Luminescence/0000755000176200001440000000000013605457601012673 5ustar liggesusersLuminescence/NAMESPACE0000644000176200001440000001742513604173242014116 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("$",RLum.Analysis) S3method("$",RLum.Data.Curve) S3method("$",RLum.Results) S3method("$<-",DRAC.list) S3method("*",RLum.Data.Curve) S3method("+",RLum.Data.Curve) S3method("-",RLum.Data.Curve) S3method("/",RLum.Data.Curve) S3method("[",RLum.Analysis) S3method("[",RLum.Data.Curve) S3method("[",RLum.Data.Image) S3method("[",RLum.Data.Spectrum) S3method("[",RLum.Results) S3method("[<-",DRAC.list) S3method("[<-",RLum.Data.Curve) S3method("[[",RLum.Analysis) S3method("[[",RLum.Results) S3method("[[<-",DRAC.list) S3method(as.data.frame,DRAC.list) S3method(as.data.frame,RLum.Data.Curve) S3method(as.data.frame,RLum.Data.Spectrum) S3method(as.list,RLum.Analysis) S3method(as.list,RLum.Data.Curve) S3method(as.list,RLum.Results) S3method(as.matrix,RLum.Data.Curve) S3method(as.matrix,RLum.Data.Spectrum) S3method(dim,RLum.Data.Curve) S3method(dim,RLum.Data.Spectrum) S3method(hist,RLum.Analysis) S3method(hist,RLum.Data.Curve) S3method(hist,RLum.Data.Image) S3method(hist,RLum.Results) S3method(length,RLum.Analysis) S3method(length,RLum.Data.Curve) S3method(length,RLum.Results) S3method(length,Risoe.BINfileData) S3method(merge,RLum) S3method(names,RLum.Analysis) S3method(names,RLum.Data.Curve) S3method(names,RLum.Data.Image) S3method(names,RLum.Data.Spectrum) S3method(names,RLum.Results) S3method(names,Risoe.BINfileData) S3method(plot,RLum.Analysis) S3method(plot,RLum.Data.Curve) S3method(plot,RLum.Data.Image) S3method(plot,RLum.Data.Spectrum) S3method(plot,RLum.Results) S3method(plot,Risoe.BINfileData) S3method(plot,list) S3method(print,DRAC.highlights) S3method(print,DRAC.list) S3method(rep,RLum) S3method(row.names,RLum.Data.Spectrum) S3method(subset,RLum.Analysis) S3method(subset,Risoe.BINfileData) S3method(summary,RLum.Analysis) S3method(summary,RLum.Data.Curve) S3method(summary,RLum.Data.Image) S3method(summary,RLum.Results) S3method(unlist,RLum.Analysis) export(Analyse_SAR.OSLdata) export(CW2pHMi) export(CW2pLM) export(CW2pLMi) export(CW2pPMi) export(PSL2Risoe.BINfileData) export(Risoe.BINfileData2RLum.Analysis) export(Second2Gray) export(analyse_Al2O3C_CrossTalk) export(analyse_Al2O3C_ITC) export(analyse_Al2O3C_Measurement) export(analyse_FadingMeasurement) export(analyse_IRSAR.RF) export(analyse_SAR.CWOSL) export(analyse_SAR.TL) export(analyse_baSAR) export(analyse_pIRIRSequence) export(analyse_portableOSL) export(app_RLum) export(apply_CosmicRayRemoval) export(apply_EfficiencyCorrection) export(bin.RLum.Data.Curve) export(bin.RLum.Data.Spectrum) export(bin_RLum.Data) export(calc_AliquotSize) export(calc_AverageDose) export(calc_CentralDose) export(calc_CommonDose) export(calc_CosmicDoseRate) export(calc_FadingCorr) export(calc_FastRatio) export(calc_FiniteMixture) export(calc_FuchsLang2001) export(calc_HomogeneityTest) export(calc_Huntley2006) export(calc_IEU) export(calc_Kars2008) export(calc_Lamothe2003) export(calc_MaxDose) export(calc_MinDose) export(calc_OSLLxTxRatio) export(calc_SourceDoseRate) export(calc_Statistics) export(calc_TLLxTxRatio) export(calc_ThermalLifetime) export(calc_WodaFuchs2008) export(calc_gSGC) export(convert_Activity2Concentration) export(convert_BIN2CSV) export(convert_Daybreak2CSV) export(convert_PSL2CSV) export(convert_RLum2Risoe.BINfileData) export(convert_Wavelength2Energy) export(convert_XSYG2CSV) export(extract_IrradiationTimes) export(fit_CWCurve) export(fit_LMCurve) export(fit_OSLLifeTimes) export(fit_SurfaceExposure) export(fit_ThermalQuenching) export(get_Layout) export(get_Quote) export(get_RLum) export(get_Risoe.BINfileData) export(get_rightAnswer) export(github_branches) export(github_commits) export(github_issues) export(install_DevelopmentVersion) export(is.RLum) export(is.RLum.Analysis) export(is.RLum.Data) export(is.RLum.Data.Curve) export(is.RLum.Data.Image) export(is.RLum.Data.Spectrum) export(is.RLum.Results) export(length_RLum) export(merge_RLum) export(merge_RLum.Analysis) export(merge_RLum.Data.Curve) export(merge_RLum.Results) export(merge_Risoe.BINfileData) export(names_RLum) export(plot_AbanicoPlot) export(plot_DRCSummary) export(plot_DRTResults) export(plot_DetPlot) export(plot_FilterCombinations) export(plot_GrowthCurve) export(plot_Histogram) export(plot_KDE) export(plot_NRt) export(plot_RLum) export(plot_RLum.Analysis) export(plot_RLum.Data.Curve) export(plot_RLum.Data.Image) export(plot_RLum.Data.Spectrum) export(plot_RLum.Results) export(plot_RadialPlot) export(plot_Risoe.BINfileData) export(plot_ViolinPlot) export(read_BIN2R) export(read_Daybreak2R) export(read_PSL2R) export(read_SPE2R) export(read_XSYG2R) export(replicate_RLum) export(report_RLum) export(sTeve) export(scale_GammaDose) export(set_RLum) export(set_Risoe.BINfileData) export(smooth_RLum) export(structure_RLum) export(template_DRAC) export(tune_Data) export(use_DRAC) export(verify_SingleGrainData) export(write_R2BIN) export(write_RLum2CSV) exportClasses(RLum) exportClasses(RLum.Analysis) exportClasses(RLum.Data) exportClasses(RLum.Data.Curve) exportClasses(RLum.Data.Image) exportClasses(RLum.Data.Spectrum) exportClasses(RLum.Results) exportClasses(Risoe.BINfileData) exportMethods(bin_RLum.Data) exportMethods(get_RLum) exportMethods(get_Risoe.BINfileData) exportMethods(length_RLum) exportMethods(names_RLum) exportMethods(replicate_RLum) exportMethods(set_RLum) exportMethods(set_Risoe.BINfileData) exportMethods(show) exportMethods(smooth_RLum) exportMethods(structure_RLum) import(data.table) import(magrittr) import(methods) import(utils) importClassesFrom(raster,RasterBrick) importFrom(grDevices,adjustcolor) importFrom(grDevices,axisTicks) importFrom(grDevices,colorRampPalette) importFrom(grDevices,dev.off) importFrom(grDevices,gray.colors) importFrom(grDevices,rgb) importFrom(grDevices,topo.colors) importFrom(grDevices,xy.coords) importFrom(graphics,abline) importFrom(graphics,arrows) importFrom(graphics,axTicks) importFrom(graphics,axis) importFrom(graphics,barplot) importFrom(graphics,box) importFrom(graphics,boxplot) importFrom(graphics,close.screen) importFrom(graphics,contour) importFrom(graphics,curve) importFrom(graphics,frame) importFrom(graphics,grconvertX) importFrom(graphics,grconvertY) importFrom(graphics,grid) importFrom(graphics,hist) importFrom(graphics,layout) importFrom(graphics,legend) importFrom(graphics,lines) importFrom(graphics,mtext) importFrom(graphics,par) importFrom(graphics,persp) importFrom(graphics,plot) importFrom(graphics,plot.default) importFrom(graphics,points) importFrom(graphics,polygon) importFrom(graphics,rug) importFrom(graphics,screen) importFrom(graphics,segments) importFrom(graphics,split.screen) importFrom(graphics,text) importFrom(graphics,title) importFrom(httr,GET) importFrom(httr,accept_json) importFrom(httr,content) importFrom(httr,status_code) importFrom(parallel,makeCluster) importFrom(parallel,parLapply) importFrom(parallel,stopCluster) importFrom(raster,brick) importFrom(raster,contour) importFrom(raster,nlayers) importFrom(raster,plot) importFrom(raster,plotRGB) importFrom(raster,raster) importFrom(stats,approx) importFrom(stats,as.formula) importFrom(stats,coef) importFrom(stats,complete.cases) importFrom(stats,confint) importFrom(stats,density) importFrom(stats,dnorm) importFrom(stats,fitted) importFrom(stats,formula) importFrom(stats,glm) importFrom(stats,lm) importFrom(stats,median) importFrom(stats,na.exclude) importFrom(stats,na.omit) importFrom(stats,nls) importFrom(stats,nls.control) importFrom(stats,pchisq) importFrom(stats,pnorm) importFrom(stats,predict) importFrom(stats,qf) importFrom(stats,quantile) importFrom(stats,residuals) importFrom(stats,rnorm) importFrom(stats,runif) importFrom(stats,sd) importFrom(stats,setNames) importFrom(stats,smooth) importFrom(stats,smooth.spline) importFrom(stats,spline) importFrom(stats,t.test) importFrom(stats,uniroot) importFrom(stats,update) importFrom(stats,var) importFrom(stats,weighted.mean) useDynLib(Luminescence, .registration = TRUE) Luminescence/README.md0000644000176200001440000001055713604173247014162 0ustar liggesusers # Luminesence The R package ‘Luminescence’ by the R-Luminescence Group provides a collection of various R functions for luminescence dating data analysis. [![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) [![CRAN](https://www.r-pkg.org/badges/version/Luminescence)](https://cran.r-project.org/package=Luminescence) [![Downloads](https://cranlogs.r-pkg.org/badges/grand-total/Luminescence)](https://www.r-pkg.org/pkg/Luminescence) [![Downloads](https://cranlogs.r-pkg.org/badges/Luminescence)](https://www.r-pkg.org/pkg/Luminescence) [![Downloads](https://cranlogs.r-pkg.org/badges/last-week/Luminescence)](https://www.r-pkg.org/pkg/Luminescence) [![Downloads](https://cranlogs.r-pkg.org/badges/last-day/Luminescence)](https://www.r-pkg.org/pkg/Luminescence) [![Build status](https://ci.appveyor.com/api/projects/status/jtgqr9a6jajn02y0/branch/master?svg=true)](https://ci.appveyor.com/project/tzerk/luminescence/branch/master) [![Build Status](https://travis-ci.org/R-Lum/Luminescence.svg?branch=master)](https://travis-ci.org/R-Lum/Luminescence) [![Coverage Status](https://img.shields.io/codecov/c/github/R-Lum/Luminescence.svg)](https://codecov.io/github/R-Lum/Luminescence?branch=master) ### CRAN check status | error | fail | warn | note | ok | | ----: | ---: | ---: | ---: | -: | | 0 | 0 | 0 | 8 | 5 | ## Social media and other resources Follow us on [![](http://i.imgur.com/wWzX9uB.png)](https://www.twitter.com/RLuminescence) or visit our [R-Luminescence homepage](http://www.r-luminescence.org). ## Installation #### i. Requirements **Windows (32/64bit)** - ‘Rtools’ (provided by CRAN) **Mac OS X** - ‘Xcode’ (provided by Apple) For **Linux** users *gcc* often comes pre-installed in most distributions. Should *gcc* be not available, however, we kindly refer to the exhaustive collection of installation guides depending on the linux distribution. #### ii. Install the package Install any development versions using our *RStudio* add-in ![](man/figures/README-Screenshot_AddIn.png) ##### The plain **R** way To install the stable version from CRAN, simply run the following from an R console: ``` r install.packages("Luminescence") ``` To install the latest development builds directly from GitHub, run ``` r if(!require("devtools")) install.packages("devtools") devtools::install_github("R-Lum/Luminescence@") ``` ## Contribute The R luminescence project is based on and evolves from ideas, contributions and constructive criticism of its users. Help us to maintain and develop the package, to find bugs and create new functions as well as a user-friendly design. Try or write us an [e-mail](mailto:developers@r-luminescence.org) if anything crosses your mind or if you want your new self-written function to be to implemented. You are kindly invited to bring forward the package with us\! ## Note **The package comes without any guarantee\!** Please further note that this version is a development version and may change day by day. For stable branches please visit the package on [CRAN ‘Luminescence’](https://cran.r-project.org/package=Luminescence). ## License This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the [GNU General Public License](https://github.com/R-Lum/Luminescence/blob/master/LICENSE) for more details. ## Related projects - [RLumModel](https://github.com/R-Lum/RLumModel) - [RLumShiny](https://github.com/R-Lum/RLumShiny) - [BayLum](https://github.com/R-Lum/BayLum) - [RLumDocker](https://github.com/R-Lum/RLumDocker) - [RCarb](https://github.com/R-Lum/RCarb) ## R package dependencies ![](man/figures/README-Package_DependencyGraph.png) Luminescence/data/0000755000176200001440000000000013604167036013603 5ustar liggesusersLuminescence/data/ExampleData.portableOSL.rda0000644000176200001440000006575413240265321020656 0ustar liggesusers7zXZi"6!XS7j fcH=.2sěp3qcelRIY-'%-rRukDT:qzw=fb\Yk?E$I> ,y9Ȁ {ox"D23H^2_ >/6=o55R+g=4A3i C_WBJxĎeQS39ɫF]5C0E$X87@ %ew>:/ ڟ^U5po}1ߺ(lJqTǜl271F+\ߥ,H/!Si4 A_kP, vwʧ^' OאGn }{'݆b`xz(w[saxUP7N06TT?Q2QWzY'gk[^^k}s͕D^̕2'E -@v=7\ C};{T $#b$#$ ]|̫5A ~%fO{X8εY.m ziұa~ztZ*8Kmt:iX<}@n+ۛaQ&͎#;z}pG*RGce(㠰6EC_ʯ- I/ @֐A-m-H pBUF*Cd@{5S@ְ 75ރbW5S"Oy?K+q Șk |U0αP9 / ABH/.ql Eao,%F -cO KǮ? Sk7d6[ְfHe3cg " Rߣ!zZHI`3r?Zu*W81HM\k؅#J2U٩%dk M{ah^}&5iE +uj}O~ *>~(V!0dF=ڧ`ae` mpbR *(}"7bHW/ydⳗX(^ U0CqiÄ& \:"&"(qYuH U|^o<~qї{_'OV׶ |WˊZ8W}ZZ픁Q;$hp\*y7q BY \#$A۬ ~m M8}S$,hBX6+K`}V{_`7`bq ?Tƿ>ȍ-m'K$0(V&-14ۈb1XU 4KvYJ%S'/Ƒ7dJ95͋5n WR\rX:4]sG`9}ǘ xR05FA|:U+\5$i}!)/_녠F3!I}a"o( z_\ "ݒ'][=(ञx]8~D y$(cfYB݂ˍEw`ەȎRSWXvc81/WrLlSɊC%BAN$ \g5A`J#L8|.%@{i_I ye N>8"o`07pw4jh95 тKGL|D,J>Fģz+4zHaNއU0B4n"gy7Jq9ۗj\n3|S΄MF-h1qE`Ŗj ذspJ_&@u&BCX_@Ҭzu-  '@r҅9)~_պ5Hb *WT +ѷU㎜G@pʡ*q菘b>$gT؝!H?x2g(gHs~E}}a-1VYV!e'Dok$;'x`ƺ12i!%^âAwН>WsA2 F/b /L^?23F-C+!v}jW\<OA[i$|i0lB%tK'u?ŞÁQ'^}b&~1HEg&1;ӑy{u/ /¡H1deJ9AP-Qf%%'^ ~P?`e,#@O~ AVAo%VDap-;Xژ2,^~ U%SWױܤd<⮞*mGL8z}=L<3s3+wDFvQmymynpcK4.LA\~B3q5TKDL&O♯}ÀOD<3IIp7oZy}hZC+8>JڔnMT'$+&7`nrE'f5̽<"4ߨΊ?xB귿 ]VKƬ!{Ht@τed\+Ejf+MNѮ8 '^nSA_+s/GWlP\5օRryIޫ NDv9֟%1f^PN"Cv+Kn5:9|ޟn&?Ύ%?{*ԁIp=כ.1 A0Ē둼VM2S[gINۿ1NjlS17;zǤ]Hssⴐg٥zZ6*-#vRCDu1=届T{=&TR]sGމ0 zMBj۷z5-ZsJ̾ C )3t[x,{{FKӫ]j?!xwo{Ϋ81|S[o72"C!?'׭6BiڊXlQ9 `L:5a:4kc(NхinBV;qsO yzĥLZ5'oXU̍`JUEkQxK0vR# DI5[l1tGҐּZL!, -r<;o+qrcMbi6) LQ:?/ce|-4^t2M~ U)D[ `.P`Xw4B`4~cq&,5n3+Z./g$#"zB5WeTc3#}6" PDv 7!ʧ@Ƞ^ {1e<\\4XBY3߱1eCk\' )?uNN*4;\{|a#ȿ҃<b,:Fj`]"_9+e6?58joP̾XƛVq3 xu J,=i֗SR cAR߁Ԇ*,}CB~S'THI`OȀxZ8sGq¿z6s9zS%Am3K(KݡNI/-#Rj=E3S2z Nsg(4uFzv6{JE"ǎDPwtd/>5cr~-X} ۈ3Y;e.1 gpYs.}z6~.PVV\OuFs{&VncD\vһ1injNIYmV Ke, A8BNhOk!TAE2L5I N[&\jLvJ a s+Wj2A3x)e)c1MkUd Wmѵ0:):hjOP*m9,\?N,E,Ѩ"M{C^`Q?Z33TY`:;VMqM&sBz'vpFt+mkP$K%N +"chx|rwuͅ#DŽO GXs`i<@ "WOQ%*r_kUdcϋzĦҀD틝N< /oIɉQ@F8U%CGݬ$P^*w?ESD,ض2^gf iQꫤر41CǛ1 ُ U]۬LzDҸlWa.r׭D}]ҝDș^[})BoS7m5̚>X޹wG':fBӀR(ñEʏG_!I1Ҕ9(4ֿz< kPVXXZwnrE vm;uhdl&# ,ϒ-k*)tTp-uCpnZ888Ov8f$zt hUmC3BÁ%k@n9%ϚLCç9ڰ{ Cax۪5?PAeq{ǹU i Y uw"ly{|fj5DI#/_'lDd$( =)}x ~( 8WMv4B'}%aO? Ҡ@7=&JiػަA=܏x4V3Ⅽ1ܻ(@Z=6R[ W}# iw-pL-l!U_*vBa#^ʅ"'@I~O+sѹ WWY'}p x~bw걶t.Z_nUXԻw' 01 er:A9AL%ynu2JߠOƽ2P}ׄ#,g7BژU]Mo/CpjOS0Q #Hq^ 1;pqoBq-V Y4PqB:Qκ :Fe ?;4݉HRO=؉p+ȸׇz/M|fgR.S >C>97w?=Q$&\p]OFVh`!Z0i;A"aBJCAb{M)>3:x~64_ky ȑN:ob^Zyu,~ ?6o\0є'W)b-y%R=eP/_[%*ӫp~8PDG?V欩ÖĊܝl諬x&Y F^}ϡrt1,7 zMvqL JnpO 5~!-m:ZTۢ'$gڽ#yL!h GQKҍ0:ʶO2ݗD3ȧg(l7:=;&ygO ׈+)m{-F)V#Kmo+qvCd`|%%c|TV8M(tHYW;1Y@r5T 7–z&}l+\h8ߎAtdec"ϝRxZPeO!`ifvY̮?ֺaB<+]tPZZ}Gj-t4fnKgD/$0YFGe7~qO?%~fu'  j#܂G∛_:r@mbu~2c~Ht']QkO ;eNrxZWgXvE||[iȴ.DZ'c@ U95D|5cB!4i줆kQL$1K߹G]\:*[z01/ `,7 mMDӄ4NUKfzqJۯ0:6 2DS ٹ[ng,EϤZOq{|ÓTYӱIyEd"oUiI.Ba/@,. FH+JCM(әxmWUv UA+Hx~ JNj 43xO8 b^C S^b}|Nu n b! _ )BOv!G'`S6#36R&]ӰFrYڲrrwzWf9'$*`6`nV@^1Q"gF[)@^X1POzM9 cgn./.|I #S]^ƒZIa6uck,+ϣ*'OkUc }0ӯ2sO`Z|f9M W!8gJ"M"ˡ翮7%m#h]{ )& .w'Vv}9oDjFҩ&S9p]=BZ^=N`zq[lDɓnb).DIFtn9E9t:qShh Ͳ~ջf9u\`*$^vjZڒ!8U5=zb fr䋽]^b=.<$KD]3T;3ox34۩:I NJ ^dؿlNg\<89Δ s~00Aoybd';uL=ȴnrk\NTMœj Wڇ)k}P|` C̤A3Pd]32UnMiU2pu*4/_gb 3yu`}TfKî[~o[ڹLrص`F`f]˒L&ɝJ?J6:ENoCVؐqmeb;5L(pVY5ޯ}/Y"0:"Zan̈-,LC؈><8>?*& J Ǖr F-(kYxY, NRS݋~m4~p/U m4;=Y]eѢѣ}*jz~2]PU3az H<p 'QD=Pp% o 5"RYn,eI[i~J*cAc'<zeo`MoXEJޝJM^1drS9-Cz%ʶPzs&[z`1:T{`/s@%]c6sh.Oes& ;=/03⾄M7/Cx2@dsS|ı~NwXq=$2.79<hc0;쬅j\vp??fL4.$(|5vzRGs[#LǽVr䓧Vsзp":F~`V]pKSŽ|17i>wI[J5PԌ2'Z;s,."yT+^tNLA6mzLmL.37aۍXPQ|x/$FAy~ın#75re)(qa՛(ٽb*l߱dv}Z_+:\%XaA V[Rf9mLh7KԦfI+`0-_#} /BWr~{7K$\*1Ef#Gpw.l8;'R-izpN"Mm񣷯ɡV=xwُ2ـks cbTmðidH/wFL3ZPBV2lh2"dԢ5* B,#جLk[\Ott <1NιkP;fNz<}ɪ-AKtp'S'޷AK O~׭3_[qu.[0.rKSKInGa7GR]&OiXCOfN9Q |eI^u0iD$V 7uxo0^eYG9j!W~E&#uSPsƼn*?|ϐ!Mh i7$GqXB.w X}Ut_G K{WE_\ e wO]`LyDÒ͠%**$> vNL6_bK&:Ŝ1{jҕ4_l=N|joRA2oJH3!9BR\J&{[q¹dc~Nqߦan}XK^Ľlnޯ S;Ó;f Fr';{N $p:@R/}CfkyЕZceb3bh߾ 0^(}yMˠF-A&NBK,l6tJc~j%奮^0+xeb"x_+L_d7K w8[1{m/B'lgEZ z^ _mA&1A9>(YӱEMv2R?S?,?: z[Qc>6UDD8dtYpQ hiwP5 {bOөJ%Oxlo"]V&j-lJTɥ Tk^Y^MWw"nDlɂopFk\'7$th'l4wZ>5O"LѺ0~QE)-S_i8VH5;DRU4x`y9X@͋+Lįܐ8(B;4EG|L-W;sZ~I f'1Z-$.E^OUpNo^VJיYSy[2+Y:$kP5JQ]A`f$b ߷D۶(Rկu MPvOiB tz(oTm M3g6z\iBӫ&fmbߋKO Ӊ؂Nz av!inX_Ə]شN֫mr3&\f: _HR@:,%&ld=$> 5g~}i'gNvz]ZGHX @rOvIxQ1hZ0ZAd 8:dX$^V6$yAߢ:5XIF*vXƽhTeT\smyXu5~F6L={cO+'؆",tfU 6(q,|ݺ(F]N?TAi`ѡ %KBANldQ(d >IO@ ^( .c`| FH\yJoOŭe>AKx/i׍Ѭ}(LREoju2(cC-ՁL?&ښ!uIze+JSיwb̪ɅxWMn W-%WQ!SGϻ DD̔.rY*W5y\/ɮd/ UX*;D2e[qO3}_F% sh]<%DSOBUuˍMԭ.c&OZ@x)NċG CA}j?Ec+sNAvLF&`w D%n_? J "%]RT\ʖ3U-Xޫm?YheÂ@=Qbd~^ma4 `66ѻlޱ1OME$Cn9bWU}v$[dGzqgn]$5'MOׯwK.1[T3/?=;ϕ%|kh4OXTzh81.lYڸ,L؋'AwvV;! ׾RXaۨ/H[&Հbjx_g(SI]M'&h\I55)V kczanD+ߜ\?lߜ me8ُVg6]gcW)9gSf Z&~rPN~R"8wF@Dk}zεTpӍCL)OIGLQt{Pj5`Q u=4F_mɌƆ7j Hbr# N1 uyÜ {,,E:erQTt&Gu-BʱR}yY/luF[D:)|-$Քا;!fj 24X*d+Ї ?FRN}{eوSLrtAKӚJ*T$5Wx$5X!K9Q |qݓE}6'ޤ.ܽc`;.0a~~6߮t[5 QNҙ0; H[>Ri6m]~۪`7]aWwG[5fDL|ghYj@`=Xc_@5i Ru\J?&?/{u^phtot;2hfn#Ȟ9W"IOoٸ P6ݽ]"xf'-ȭZ;U&Z*0 1eL1qfqzK]W\j+’yQry-a 7/"op[^12[{=eAyݛtS?mSWcVhN<DθB&;bSA(cKZ;_@tJH* _/zG~5XlbD9ᜅECkP\&\{tÍͮ 浪2]U=SCa '\){ -!Mn첥b7ߊשU17jGuD68}9o,ؔB?xHy~D8g65{tHm M,*m?Hk;@󥻹զƠ'$l@ZBziroÑN"JN [hu(䎸 En'I/e L Ӎ NY5d30Dл=& {rf_w{+ mh bKN+3]936Vϥc'E2Jv -|y6ifa[bb7:HŠ%,?D<~ fA(N ?+~nTmILD)$ź/9۠zˁ޷ζTEG^Љ2?KL}I]^-]O)/Y,.6*k^ T|AO}9 !` zԟż[1*d߼/+~S#kU]Dsn|\G4;ȋl3XxP[TBx>s?~tF$,|uukQYa@ eeo”# %)>,[~{^mVʭ2ia%!]=sD1UX(_ZY些8Bq1맷]{곂l1kbXXw@}%jF!c)\ CY R+"Wv b>+|F6܄h,iD  323i5:rhT6]\Y^YhzdUnj<%3dDPmGw0XNnZZwwm<9͙]gils 91j\CsqZ\>.f_̺ŚhG .gVksxq'D ǖorOގ/7^tkT+!cU2;?]iyrGgVX&WW g].;tMNiZ c\"R6!@hw-ڄuRX2r0qpJ8So4=#"\Cy)d/ÇigІ]3O|Pt?̬eK}3 @?<-#+e_[i9lWO!0HK<~XP wpPU9ʂޡ2Pgl] 7{!BN'i~H5k#Y.-g,Z8&9ב"§ϕ?* w7ϊc4Wp=0 ֯Y+RqnTF򇏘! 6yw驅H- ]=1-d-SDGɒb5Xh`]ưoC%=.潺 wq}ȇԎYx~hU f0jjg\[2օ]y?idžv@Po0f=`]n ,Vc4yn6׉s!u]#ɑ:b. `HCfz"X3ካ%RMHȉI)\guU*/>-@S1Lp[AjOƪrE, LZ`3g67S B)~CFӪ%ELƄ*|x"\b@4(Ha>ADURs]Z@h Y VnW}ES6 ;\ N80"I#櫤$㎞=yQq\oU j+zrG70q8+FC.r]ׁOyJ;\Nxė1-1 ^k,PLQ:]@wCbU30Y%MȄ 12o\j)Nw 0Ywwu,^.e8)rPAGNPeJf9&֭}## Ǻۑ +cx a* z5vG\N':$i";D +e{]mI><@Ըwef]Qw+vd7⯇;]e7$-j$ԥKIu2B!gz&̂Q^ ҡpDf\*9$uB+2_tPuZS',!8PnxP@ׄjM!RDT3)ʍ_2žP0\csjkkN8\[(  +[17tFWIIh}W. G I}uKK n$/ƈv}b;z]Dʹ @X.2jC}`*vZ!UڸBE],U {J <㬅bsg?U^5#9`wDa \Ӵ5.sژ19hq*G%ԜkH*vMR-օc#"T ԷYg=jOz`U"$e|ptO:J`Zϓ%U{1 LP /jwDocn2 >ޗi= 6Hc CBjQhP"SX_j`EHVz!@ߺ* F Lm^'y)\IԬ{!dUq=c@Oz 0CDjHWJUt57ksq+R1بC'!Os.4~ǂ30 `(X%T-f.@NOܨ/s㔢KYh,3h|0Z3QXqj_~l 6*Ս`M;,d*^P4OGbYS@&}40INiBp5LJ]gD=5Xmd nQ_Ap{&9㿻a&\Bz]I" @7H8Rf'f*{sBL'L}ɖW4WS ݢv<{r2ޢɬbٳEq?MI0mt9צ>!q:}Ұ6c>ٛ),6y=`o/g}Lu3tr}}4RyRqgwOPMK3ϙ,x0噾:EÏ:))hN 93-\G{:&g|٬uQ9%kaRq?0OS qGn9XFw-# (BؕDGrUxq4W} ,.)˗VȤy29,iH5n:ֳ^}`q^} c4N/Ա"p!t6[洠,Ƚ’oY(c$0&uu:S6[ȟ#'W3Ul}KЃ ?SRݿ;Fa_t?0 ~z}=ḿ:ʲ-`M/fK+ʣ AR :A9mb lxiĠlSgKiLb5Tu꽯h΃nCcN4s#F\FtTʇТ.FfXmlu~<  >,sm:JG}M ,?N -jZFHGz?PHi|m`W2#vD)=DʟͿ'3bI[Ց]%)DX ,o!"Ol{V^MiJ,޽sWBxBog:Mk6Ҋ=vx)]G=[tI:,|9ّaHr,lI(d"h}FmטkjyKMK{ Vsm aɟ }Cd#<\GãqF ;gD;,yWP!k1~FgCXR 3Pi!S*3(p9.Y@En@>?ab0K zP,>)ݘ-{EcϨըMsfG):epYo[f2Z!Q[$1u6%,J 槆%"g+?ĉ}mv B(l5X?*+WЫՂY}=Tws.,)o5?%dK;7np ד|'sù$^,HʾuÌmy&w!\H8wg"?,NGD6]!V L7f1<*յ]qoϒDCRQBV p jC}Z97.hZj/}5UG_E49K39jO.o`;8=!~tР\Gˎz}F<>}ξHPogXNFa 7GzgK9Sm=}bXǿ*3V'cy$"im9ao5lQ^|< YۂTlLw=JtG4V 4(@TaY⁹k3)_Vߧ$Hu"~w]4OaM'~&F2HINItSP zǑL`~~էZC9f"KOhٴD=S㟯16(y9|9S5XyB @1FݟeDΉ|ĆNN=}jި^1P':R?B`2~Qv @e3W?k;zWUVLDZH_f D9ȱTnsEZ"q8.oYG ([r̺N쪙sE||r´C Ӣn8sbVAyuôdf;yY2laRuָ)E #sfb]+9;>74|>p;lȒ $gWXPqQB<_.uy1$p뗯fl[gt_*U < >eyXaeX.Q7$?KribCu0#9WTh`ÒD\]LU(j.TdylĔJef_DN_j峗 YRcaOzFuu ?vzJJ dJL|Aڧ,fN=\){uXW'E-zl91`漋%ʟy#~Kch!GKJ!% g,k7T8iT-98QMȡ Nߞr'u'oqڙZ`\ `78]sIP@v^+∭,b>f!&y띻a(z!C}1r7N2Yo!0v7ߜ!wW6gc 3 "$)uSrDw!1' @p5pl ڸZi$ÊhA@N&;Fa{P`X{|g7+MpS[]5aU>VVGr6 [WƦϧ v솧ۡnB=ۼhlN O`!yB9#=)pNĪÑwT\{ߐ2= y1Hq'kԬ(4p%cF&hlsVc*$\k# Ů:{1vs~p}Ȫ*ezQ="xsXNTĽ ovWovG8c`U$"E0y+xMl_U$1?5 =k66?2Wn7g5%٤g0*q*{Z'hDA9t>5g/~Wf!;[^!#ZޭO'DE^eW&FwB(=A1w/#`Fd)y@6CUb {%:? z^s6Q _LJSX_ 'y#M5r7:lQwBr+j& Б5J_/gyj(2oIܫP+;_&*"Fy+mi=śzYi6WƘ<.?zN5ÎIy"]F5|qX⬣"qr S]}UOY}DS5odV.աbP"&d6:{ IvBB[O%u*c}rv/r\'djǷau{wq{AxqJ"̣NhMN8ig)B.)}Ea6CU_ٙnNYx.cxZw_j|8UZ/̛yg$v0R_c,\]Q8/ bX۔cB/&!Tb>>%eg& Rj.*f@I撧Ib=p|/#)@ % {&(AyYFDf8jIdz@v̾g?#$a߂G2*$;R%N MV=s|}dFG!v5hZ8m>T4ӏ]@R8ĊN(;Rưrt_5yf{1# c> ٨Ih0Į ETEI~*Ь]J_~&.Rq3lju3Ym:UhE)ys.9L*9Ԯ%> 1뻒Fs?~ ^udl1_ ]UQ^!x|okǼp;ea% Wu5 (X`@p $Ljv[/,hqBcQkM7`#O)Bn*3{Uk > ᢳNǖ<orYƫ}Vv#8pc~Tn] EVo*;ɿNKI nrf;f*S[9Vҟcucq%`Fn,lr4l<@A]3 D><3P\8`<@f>hT:ĥ/,$Ďf{s\?Ie4pI!8]Tf`Gi,4: w1^Ez5&{4Dr>b~~nQ%?㣞Hd0a9i` !6(ܵJ!PC~p1 r92g E%w}wOgqgH1G]>O>Yr(+KaV9Հ\G>1F쿯b%sɻ~5م]/WKl9;Т b\g6ryN.e7Q}7DGeDޘ־P}tl!gj5jV.Z.yP4'~(L%˻b}u qywʕ!k;az@iooχ-Pq> (^i \N;SxdItDۡ1i!О*'^?_+SQ2ߝR͸ܠ,;%؛۟ n:Qplk`S1b7/$rm=nD  ps~< +IM)A1 KpͣʌGסNnTњڕ abc#:%^Cd,Jv[s[5mE/ }&hHYt]}k{[%[BJ_| xIpՑ1~B1Ipɛ3mf< ص,zLO`b+~n^<ۇDdſD2V~q,M t0aJ4 [댘JXB8*z=.ƄCs*ɋj?8 6c֛bߪ/$TOl g86$ #$7I 0h"քz> uo>޸o(HhJƑQ99{Vv&'dZyݽܧ \XW:jQeקn4N>~uc}v>_o6a%Pv.N!?.^PxJ0CyDML$b@x a;.Z-lCR~4ZR94V&Kp\{kzAuk9XlET!t]$ڰX k -P5ݲj(>f#ssn]ci7of^b&ItQZ}݈Fk9Q?z,sչʂtc>d:*7EBĘ`4:e@!HZasZr&4l\ph.' M:[%.߰98Hh3V {$ u񷉣~4İnٖ6J.ŝ_P衬̄.3}H N%ŧ_!gbxX`9lL$+Ԃۤ&0G`IƸFVl(Y$ 3]ܖ7KG{Vp,]E:DZ%~ULSi3"s&2Fץݦ#ѽKt?%A2O0yQr:7"]3XfɵWK۳{}8?Iu!#ʯ=8K̜<[fN ?/ *pr=7r⧩ܺAAk:| A-uީe OCΘub޵jU H^'tud$ƍ* T }$<̒Ɍs@rQdҥ\_>W*TC1V#OYTn#]v+T>|/%KȘڮ_Tc'613(Na5=72/=~{2)u-JKkؑT{{V f/tL+$bfmz)F/ kzXe孢p#u"(D%& @.Ka Qft &\A{l3+DŽo3,jqɀȩy5Y`ʗ^x΂)V֤iL:|ڵ8.Cmp/ۊ=$ZZ I `b>0 YZLuminescence/data/datalist0000644000176200001440000000165313604173175015341 0ustar liggesusersBaseDataSet.ConversionFactors: BaseDataSet.ConversionFactors BaseDataSet.CosmicDoseRate: values.cosmic.Softcomp values.factor.Altitude values.par.FJH BaseDataSet.FractionalGammaDose: BaseDataSet.FractionalGammaDose ExampleData.Al2O3C: data_CrossTalk data_ITC ExampleData.BINfileData: CWOSL.SAR.Data TL.SAR.Data ExampleData.CW_OSL_Curve: CW_Curve.BosWallinga2012 ExampleData.CW_OSL_Curve ExampleData.DeValues: ExampleData.DeValues ExampleData.Fading: ExampleData.Fading ExampleData.FittingLM: values.curve values.curveBG ExampleData.LxTxData: LxTxData ExampleData.LxTxOSLData: Lx.data Tx.data ExampleData.portableOSL: ExampleData.portableOSL ExampleData.RLum.Analysis: IRSAR.RF.Data ExampleData.RLum.Data.Image: ExampleData.RLum.Data.Image ExampleData.ScaleGammaDose: ExampleData.ScaleGammaDose ExampleData.SurfaceExposure: ExampleData.SurfaceExposure ExampleData.TR_OSL: ExampleData.TR_OSL ExampleData.XSYG: OSL.SARMeasurement TL.Spectrum Luminescence/data/BaseDataSet.FractionalGammaDose.rda0000644000176200001440000000272613417222471022256 0ustar liggesusersBZh91AY&SY吷޹PM$4h@ @AMhI&4Ѳ@hh BC4&M44=M='3!144M 2hɡiP64 @fAOQ3Pe zFڍ1!MC@0 d4b ޔ4ii[d?+@`S6uՃ""sPO=!*$4QȲY$h[Y!'ݘ*$aW! 1a׊ U\F7ÞPHU Sd-ijag ] H`4mAUmُB6j'ЄKRO@%C} >yMy+x~ *!W^M5Ei`1cE&ϡ\}tZ *kn6S- 5 ] BJ#LO`61#d &yUu*>,ekyxn瑲ɇsݟBN3ĺku8YA*L+Mna>OK<+(# gz@3Z$qy[HѤKGPh-=c"fU}> WWRpE"BQ(EqJ1*)@;P ~=|׭op=r0-Q))N{G, )E+gQZ>ٔSȟNp0N@As-%7BUy%}\XfE(9Z WM0&pI~0ft搡LCQ>a#w:;SC(NК lp1  FVPwِZBXGUOa"1DkbPa%Ozm@7x(Fp7[%J8Ą/}^`~ ' PCfWhi: Ӵgcmz׮i8^vŌW<m9.4Qi*>QqƨzZ yP(~}YIq(n/NOr%*T=_T)X(R2[Fq))nŭ8㤸Eq\T9sY/M[Eq(nŭYMq<Cq<KqGq|GqOq'@qP R Q S0 SZ[Kq"'Bq"Qz[Oz_PO8Q8Q818181@qx6P@q W_ \'x>Nr9["x O,OjK=mDPG.?E` H77/r?TKՌ'eY|ѱd>?*[ +CMCC@^>t]*v9zx(k4똽 Q֣4#7Z!Eg~K{$?"Ye,ڑǖ|pgֽ6Ii(\b,8e e]DFT}~[5oړ$N)]70ݔpP#>׬L^II/!ZI;fh݅d :TmU^gދ5򾼝52| "eb j7F_\voȘNԑ _d^3>szlt?~<?Ix9PR E:aD~IT~]&r `ġee6^ oaY0b,h.c-2`mKi (uͺ6O`T2{&^J00NRuViK(nL/t:{3/ 0XS?Lp.l[n1u06>)7ab?=e;{s]=44=_2@ 3Wk+Y}k `7+z)%ÓTv v/*&OBҬ|sD}ї|A.ϻS5aPin;M%ʋ>/5bMPν{rbF*s$}_%wrnN*C&ժ3~Z!^*quw KMA5udp;g3W˝ S58yuW ׄڠm*Ťb͌lJi嵲ҡъw 1]tDZ2qϸ*j6":q]YTO}}!sլ\ň+?]blBWlKxp9˥]K x!8?a 88ʈX:gVP!QW} 1diu<+P׾˖-Ns;^ue}GJf|'=F[y(y Fq>)|Nx!]Ğ8?]v-xc@V岱U?Ux,`vBٕ+W+׷-AzGc瀦W\(d@:I+D{"7)n;.zGw,XrA {@(/xbz@Q͗ȝ+!ohV1J?-zE)}åݗls V%4'ֻN)][͊:(n-Ďv&]:P-yK²WΙEl9P>t)U\Yz<Xߺg˜d٦_Sѹ}ht(c{ bBf"cmzȶ/_Aw_l*%\P<϶*? Weã~3XBRWfhe ,Ӈ+x{BT7PejG޼_[ria:T:|ػ?k<׈=C2n'YaXUt}G0\b#!p&m[v⿱`0"jxGFM7K'&8YfVVC=01xNw_ta9 K{1+ )lbp"`ձWc{wcAm@ [f_q*h̚S+9`xNYg`]\6h_83;8 OqNhZSn6< X)4o:xWaΙWۍerm|uûZHԤ[ Ewd/E2/t4 KEF=1\0 Fp.÷` ނ Fnmܨ`߆ kp--~XmԂQZ0H ƈh,[08C`d ̂!Y0, Fe,8ʼnSIPFHq)N$)N(N6Q&Lq)n3m-RV8i AH߾-If9 ]c~xٍ$4nFn;j]4r&觸o-о s63n()P>ѷ< }"2wL# Vٖg_APke{j-0WG%/8 wSq' pƓ]ݿM0RyLL Tv%eøPŇYϕ0.Q^a\J\ ?z3͡zň&@|c&"ziV@+ {)faq`P>6OM4\Bօrݝa ֒<}5Im䥜Ug/[Fܘ=YEFLOsiRyFwIɯ'noou|4t ~#I ?Z_®)і9 p* /9kTx#ql8oim5v ;Z[i]y_~ǜ$*mpSUYhTUu9s6Yc`W/\SsbLZOq'h) ik?"j领.jxHSq?vre(X"_d4$rrJ~F~\ +nK< fQR38MlFR?2"j/E/A3Ľ0 MK /eI*$i[dg܁'$~M-x"۫UE}{I.t( >.gOՅ'v]&;2n`PyPudJɀ7 ou `3AU{;hSo bn) zw%@?Ɲ|!OT#.\f.*d[@?\PXU"a\roā`kGc_j`kc}j͎ kR]L.s-aqL%J:OU}m ܘ+Y1'2m8ۀ]=Y_XpG/u5֐0 !,Q_\(h噦^ O.qehKj};;ChwLR(`UZ]޳{rQC5߮H"oO"E}t}tG_ƍX 3R~$N~͔(!]VV$ne<'p7HG NH.kcZ8O`%R R^$?>N% yaˎ:j=c}i Mp<.:RZfґ1aTlKBO]sBoH܁ͪED\f/{U!S r{d"v+уȵE;yYҼ}; ҤÛf/^q>س73AV-L+wbxFkaF$$hw kr fVeB~-o(2h?LZϰ.u^@{l!՞)>bQ0 @y}wgGo\( OC/v@ j60@;=Oj,{47Rgl_U1^՝J찟@cwL[}FfQC5_%5*q)'Iͼ$53KFr?YKԒ^}g~)I-/Oتv7dy<|dwLι~9MEc;gd{YD <ۏj\$,Wb3y,:Hd~!{I.JyyYDxc!ꑴfsδo--mPAQڢ!QޑUB>y,"IEW t6}D"ukB^性|VBq YgPbzdn-ym,rO撬 <8vf]: Ώ>ط1l r-:kC+Iҩ FĎ!Fc/j}-P / 4bzh*PKPΎM02:&. L)uallCꗥD8@ha~vUti{' 09K d: Z!BM8 􂋍2_6QӸ߫eYE-Rh]*VWcK=ԛϭz2K=^ z0\H|K52-#*eٙ%# o@t`춟`]ۑc c!9 <0~ZyP3.|g`w @g ,ta!@r9_S}cOzyK򵀮- K%m%ZTfW\VӚ`q+u%`rN'n~8v=᳆uއIdBLOpIƉ:UG* `,N@vc+@ryy*a5"WE5|MT-`mO4Yl@)L|u n4KLp9VuFMN  sns:FLu- ԻԱq@\~ P2++&',Q*<&WfVLM \ohBU32{oijs{AӾdo"}f: p1aM^ 9~^30-R!}B c-,?ϴN%l`c)_N-SXUO;4,nCc?r'9{pM3>>PCGB7ݜ%4z\U_`c5tQC;:w"}=NDd)ĉB*zv!̝/~Ežek' iGNpA$5HҎӐA<殌6]|F8-[lB6g [}!>r#PM"P3ëPX)YqJ¥C}) 5O j}U5qyؾzl } }ZQLo/YYQV8"T?"! :T$|4e0|q=жFk\abyoF^q88ڣÜMm3mݒd)o|t%m%N3aiGـa_¤9WtW;yX`I$wytz%_ ~[gg+eRײ"=U`M+uzzg`Kl$|hj25MR+؋9^v0/mH}7M}'h_!n)Cro{#s/ <ƙ1GN+i/j袆5ԷX$~SqH+$p 5A2vp ~ǢH|ݗJd$bӾli&D'$B6-jy Χ5@6ȧGzVBJ~wa u5"w d_1 cwGxC8>ǴaJzP'Y 4Z/XͿ:?GL>Ү2~}%d(p|aW;=E|û][?B `T0 `p` d(!v;0zE[0Z2OBWX!ο7W_po۔9Z|r@`ү+ ¬JNOSMteg^Y![Z/1\6tl;Zq^ _<=? xÃS-JLj 3lsj5 ֕ӎ51ybqP'δ 5{+{tR4QC5_6_-:!}eͯ<rL #BDvo!8'd 黏C?kGQEfw+ ͧz 9 H5shUJQvG[݄⦯2(+?85BVČPvd(\9o+֕zb8sv|Y[MBǴ5Bm7NBҜrm95\/I/Z8+'gM*ۂ p :T{I"|~ c7;#4_PQz2>zX@0lOooPU/:(Moֲ6 @H43g)]7Ý`tAm$;w>(p,|[U "ַ`ӏNJ@hG74v*pCޯJ.Hpr}i\"v4*2&zW?.޲ [Mo~+bB3!@܉$)|%k<2WCX-r?3r0y9[YDLuminescence/data/ExampleData.Al2O3C.rda0000644000176200001440000021124013240265315017375 0ustar liggesusers7zXZi"6!X])TW"nRʟu,=!z%mt=c@ [ t|y*5+-P}Pv|fCFK'<(SHe)D)##NZg7Πz"Wo43k4$vb^gG`˗(£i֎yc-دjIQea;4zX:}/h𔶠I&[0pk6sG8߹ RX(D.ǿ0zbBT]|xjrpXs6ѭ/9nfw|4=Jrz(zŘ~"}n9X%YP~tk ?1S,R_BzEBi"COPFm\VD6iuZU4n%~fªYo#ntEh;vAo|V`I՞]l0T!ƣ4sm岜6tM}fI,) <8{-P yJO;=%K|&c"a#TX1fMBF046񓍱*MmIEۙ}9[Be7}2 C{nA > iyiw[TDIBRRiZ&'n1oHa,&~V)ᦼ)N%ǦlUdPP|.oF(ԪPy$]5Z:3 կ25wNůӽL %!4|3>HL):"gŇi>q/bBvb!W4ʉ}Yn RzYK-#?rȓ'\@[4jK9bЍ [~ρVbGD wy7Q~4%sh[^J}HVl2(R @5S_U.BOv^"f|X3)ʉ~FJ*'PzSI(mHCȆi,-%i7]TnoG>VSčՑk sv!@qW =kzN%E+kL8g"R 43S :|d۝ER2g١#9z Γ'zw|dm{ŧdst}Ͽ&g5N]y=![xњA ЎIŻ)b!]C%D_eyڃwx+viM }=U)SNc t|diCXeVFm|@շy@w\/md]/v ĆL&NͳJ?0P&:"so}+,I/>!0MD8"AnuC=$k_7rdPh~InLuxDEڬK#)=`!LBΐB|K> 9"^Z^K($jt4y9gIn53 8kzq SE!іS86@%7kIʥxA 0=~1>ϚYk$a@;>kך^{ \иs^=ͦTXE#gM+JB>)=J~ {W+֔ü yYOXJ'_ yWJ!˚ tuG:<-$"X!*Y Y/yC: M k9f[F nK -i#kDԦJ>O4OR1K!x89X?]wDMۚ~zGqd]EySJS>݇M.mdGeA)Jw! Y]FCBK&tM{wq5W#@+@>xN5IBةʜ{|y+qQ#<+g"^\_>!q%WN(OMd*ǃ) g!>.q"bH7\YMeMN  i^:JK;SDx(g`Y0qT}pSvDxՐߒpT[x]%uD`^A!\͓dC1To}1}_!q-=bǒ*I|d{Z &gNI%p¯ K_X+z[h@5<~fK]#7TOKkܣz=\J/zBQi4i~;u^l qnf,TGO7-Kv/1ўW,e'QWŋ$A"lta-/pu@⵫.Pm#"!=YsLCn=Xe=^M |S XHS`LM#…5j_ߦ4%أ3w0 M41cl߄+ֳe "P;)&s{WkE~i;㻫_>v(a`&vV2jtB)ؠ(+ iגkhA,Q!d7ӐGGE%a5j* j"˷-JþEM]Kqj&IΦqj}lçW9Xah53![jlL ?jTW[ہQ I8Isb0T0,~>^f:BN&_BbL)9^%:P}.R3zz'CNϘa\L[~_*\ߢ 5n(vA>y2FN&O_#;1 yhѮMˣ75 E>(Fɤ# B\B b, f6J)V0׫AfV4wfk]ǧ!6ۭM.gG8ɱ+o=%jY"֗Ug/PonJ h0yĒ&4^Sn?4Nd!k3QC-e#Lw IX-~_\4ڷ4:#,PL:]?Zy8%Lۂ4}6Pr)(*O)%;JL 套vM#[,/*r6Uk13'9p&Wnqf^$gpP,s2Q'X|;\|5z6[?O*[+e _[-'UH^viއ9@Oda}[&6W, ^Njrx0RZG[F CM t7&Y@ځN`RbJ%Ί #!%6IYF6lC&MxFY $ps^KNF+XzTSw#tMO@,opTw̉jX}*oMk+SofSɳ1H}v>TDNɛz{Rsr܃Sx:}%1N]],Դ:wD5ōV8?j^g% z/.)oIIv.KTSp-Jd::mVgQEe[ymmh\1I(c 1΀GǥX&zCv5iȅiEÉd#D'SZ0!t: Rs@ZoѕˍXl̳T RV<U/xy yjr G; lZLprT5p߄: m C]}y<(^ו`}ksz-7h n@k.[aX$?,1BU'2hdS%P`ԾX~)!@ԅ5lG#ZySo۪=a8|$)9!id:#d-ʱA}>u!;roU2.=9/?BɪObj5Qa2ZA9#<:Ix)}X3D{ 7ANFcš ֨H’hI pԅ恞rHC+џxo-8!$Jw&_M[~BTR@sJ6 +1jK[_j_)y+#QCNIu9&0jF6|O&.mTPn}_^v 8ޙ4$5Lr.fރi+=NTaeE"J5oVKh1ѱ\D?/*eDun =9}]ըLՌzyXpPI% ӵ~:ǹtJt2lӀ>m:OeM[?5:"S@%){w/ɸTsF4oS<]$0Nb"y Y j7D/q|^d-N9Jҳ{HXlOՕe jc]+N׋Y}zbj}[ڭwCARdzqIkSXV#qa J[؋J9q pY͞PӲ#yrYDn%4_zl/pyK`(ꡒPh ʣ&r)‰tjk^ 9]/_Wy,G46 aN4Y0͊Y4#_Hz%e58h&}bLFAF荇u˥$~slP+gNNYE0rMa|PMq0.Ųf=;MP) 5,ucqN`IVDͺY9v~Ә6&ya9#˜5悱"gmg;[w_İ"vpoE:ѳcrfcV]x7Ӎ A4&5j8i&6Vz-C 6 Y@If{}vGLee̢kbKhmj'4WVzs1k$ Z+2ÞǨAq0TPwzuU DËN)*j;Bo4کaҳ^s̋ LEn5M.' B~s(g ju T0qTN0f[<[[<#uyhly L:h'R w`,v5`!( LX`˸o2O%+g<=1!F ,:!VLq)F[ HsBݏ:3bW*;| "!sȝ4hk gt'Zjk.240 sl:!P[EB lak;%yfd?b'00QĤY*O@DZƜcRR]ҫaHPxv?90@,Z_t".2eS@+S[< C,Bl\|Z5i˅:s>xf9}ɠ}!Ξ'=J9&1M^4tOL&mH 8g5`M?|ӛYCy 2y$JNpƮS]GsBղߗ zSH N:x]w.lݝ:gs2Vrqo2_Ȇ-bt\?׭ԩ?8aU!c|%Eo:WGgER.*[?kj?!81!#/듩 ~@*Jq~ǰ`B{[2.$`R8>w{܂\pؕ3L "Dk\-o1 q[(&B{( HӬja'@dv$A đc1RhkD;8,^UC"B?~^K%>4j7h&4Bͦ£=}:X&/Dg̤U7WH'xq7B^Xn<<%R}z@ M:8>޻9 G..z u6Cv.糨+ž'{2ZTKd=RRmޛ9|eǹ,RjL[laRfv~5i)ښKQ8SUH@8CЈv2TQPe_)8-@E6ag@ ^d19RAX<I)OL%S3Gip߁NTP+~W;D0 o54nѯ&+= ~#&o7X0D:Yʄ&??JkVx{3ĨI/rz}B dd4E6N0H% 䌟Y,KǘEH/[\CPo^~C&,_Nnr1Ҷ"wGϢ*dK~}`jH+.BJ 8 Eи-lv#6/V_݇b*uo@xOPƵYkA*Fq@lc8YC˭dhg ~L#@/ͥpn q% BsMD9{ߋG nJ~1(nؤTQ 5CX4y 7*lMwXygs광βڎڗf/_ X;Jׇ4^fw(5M1[:-.4va7vGy(DXw'[7ADX^@ɀ1}Q.gݦ_^)V^-ue1:T~)k*ȵՋ~lNMُϴa `u ipc [i~kL&w9qzJF5;+XVg8إitu(RhH1MD'<[^%?0av43۫Xi_T`,fGig(_kh`V[51Vy#{*\kyxvN@9yTh^ WE,tt\n;V c׷..+2IWFؐ#έw8kk)ȅqk.mOj܁}xTR3fDcYh?GzLb38QokjWBEI6"{]l'Ic=š':,D06rJȿVw2Xm&i mS`SoQn%ߗTMO$VL?W 9O,yam A̲h#pwߖ^ދ.&ZEU.*ϼ|4Cn3|%piF=:hmjDI};M}`KmVw}2ldٙgo/IVJ!4b'$RTnZGAU&G|8K[7nipf۰t=hq2 M?܄,vu&#]qY[Rf.4-<{$~AwGx)|ꑪc„ qç7R}Jб:gx |iyt~=1"$4%s\$?AWЕ_Mz}aYOF+ 8ijˣV+ȍH_{4rN$5 q|. ,2k mg'Z# };nck8piYنKs:)5aAyr7V!Bh^grhb>siq4&6?Eգ=z #7x@~$ɼXTFų*iة҅>VkE@Yf `2*׮7Z~;L66hL=Y1h`IϖV`Ք!Ct֫ZA&7㛏(MvJ`*K<)Ts{)1 yM"3$ůcz&ԈN[*iO;ϧ1~<(Z̃N]c7Ed{ A^|R8ԝjS5Mɜ.a';:͊XWsẂڄ8RB')pfrI:y~wFܹr^sڃħ!W{bmZ+#Hl/~}^\JoY v,ZkTx9_rMAF !;b"(.owm7bO&lb {ٜBdvl-M'JAFEyX1]Vx9#F4Zly~cw q#Kב8jEH20S_A?ܜas{weG6f3)}4G(C;Ox0MA:? b"yϑ1^mpU:kdg&dTOb +bw&@W$搳qzPjpkYb6 4's<(yv7=[À? )yZ5 W~I9 'lpaF=U\ܩ k溭3̌k'΀5ٹ?@ӥ/H6L#NV{&|"{oQ`Z!]P>u*Mc4wo:Ӝ2/ 1L>o /=սf7a0ɉì. z^ l A# A&+. JsrK$szDF2<$(/x!@9 F#G?4ϳ 0?qru `ٻ)pl#pg1W)~bGNV?8\g4Y@{oݿ *8k էAW&k$>ʑ]a8Ƅ d8S|54΋EVMz8sQfc1XF=$r}( 2 J8ävR@%j Rvr/@&?qN~-{t[Ҋr6YւtO3r=OAC ,E% t6M0긧kEn]"f}8ҭz=5Je-hJR9a-ŘИl7ЭT}2VڞmeU7aF~~ǜ~"` )'#EUP~3.1}ۢDB4 `{0m[U*5*}I1٭X߅ j+4X瘐ឿi5m"ygR%%Z.ԉ EH2}@% d^F|dHLAXq߹5)t%/}pkX7`Jp)Io]kˬ5qɛU)8Ӵ^=spuNe{I<*܊( +0{vdbj_8.~m@asE%Bk'!b}ߨλsu 8 ZAd(Ĉ̋ɢ;Ħ1dCX}q1zё1-Mϩ I fUUEFI 562@ELvŮs١`/$48Իii[OO^ {o|Fډoj,7`Yy5E"3S-HOVj=g%L/;ޗ%uJXe.Cp6l]AyWaZ\q@:'2b׺}'{υ"sX&5gc/r(@ܐRf,!mI:oY[u2U42T/z+fkM-&ȹ<6+K*O\2'Qg1n(xީC7~ [Jў҈OKM]ΩW(iT݇q A3 @ٖv`Ӳm r^n*D-)"qN=7: V/pDPτf=pa&Ķ\^9u,` yXmMq Qv9skl=3u~?KR?$Inf|QaTp麛ݏ'$MZtlpG>Z6rĭGRQ#ewk":1 7OHwSvr'&B Yڙ&6U `ǍŸl-V8WRT)=3YcET8=ac`B#\r,o횜x)X`GoX'9M}o)q ϶/TmkE MxHѧ?'>!'MVxJN][U*ҐSv ch&f ,,ž5h%~POH(C[w?.?ېFl˚KuF2ElUQ7>:$MqU Ntu,>#?*aر8 P*@y0 EN+&+Vd`grԫɹ:Z:M K](9BAN+s@Xt\>&@ժIPג4ɯn-=mgŀyNZޢNO_Λ`7q,Ii@ .,aJ3(/ZO4B` e܍79(Xcy'߬j@ ASP)Lxp+\:AZÅ* Z_C" 4$<Fz;-$5M>2BnНuQq_0l 3i8R!JG$U'תJOFԍ8ڏ2&pofY/E\(j& B-EA> *. yq kJ^rvs 7VN5Yڥ' SBikh]lHЁ߾~ /(N!8;ursff~YVsە1=\_SdV^u}K%o~qKYzPײKtPp.N Oi`}W&zM@Q/ ̅vƼl4z: q_"_xv2/']>}A{a*[gRKɧ.)[x[nfK/s<[D ^OIAʂ(ߞSh9BG7pI4ۧi"JUs[o >+M(NY貽Y1ON/zHHc Π=^ge+n @+XnѸ1+bD/1 ƦhM1 ,zHcV̆ ֘ 3e^f[d:mN@}]mnX ws@baC? *PQԱ"!]+Oh8MĮ "+%Sa1z%#is4Ɗbh/i #ܖx U*da4Ýq+J\[;zoXk\fBܓR2M-4Oo "Q#2z0cO ۚp ;ypJ[tVmojMEb)5mcP+ adab֯/e)FZhزw?V}QA7P\#:oBF%N/Z}ys(AH~<|zqR+b'63:J"{ru[S-y>b{ж0եa3)kތ&"9ؒYXq:4~4Z9Z,D:809SCO0H~;V((ȂTM"@{F!C= bW2vQiTKR$ϰ~hWeZ\zs{C?C ib +BW%5i97 `IȕFOMOI=773C aD]BuǠ5`d]b^$i&# ;a8HsYT|%~4ErM13Ub'b4ي 輻! ]G9_^,S^42 ^#-bwм5)  mkŵW={;ʌu!!'{r+mji|r r7#~*p2kЉ!qÕd P u qF,.vj$kL4KhV_|L|`J|'W2ljS?XBl+5XiiJ<6rڸLX0R05 P%}pV;;hPI$>L &cs[;^6@UC)ȇgx߭t9nF\0:5s.>ݍE^ ,2*3P޷pZ& 92?AGEMf~i7nÏregZ+͘nPgAƔZg/Dvz0n$-:tpH}8%p< {]]/XT[닍L9В%\Mt;)J\Xmܰ\+d vm9FɛOA"bQĐ6h~YXHP5)`}2tFU L3#:I"لPIy"0XD43"E: ,Gq<<RG:ϧUo]!P,7%fo/b\;"-Vִ(!@T6 ,5D?3moݎ/=Tf;4ܵvT;>kl5'2+!w(ն4X j3ˀxVyMJH/a,7}U?K"^b!кWS#sTi0d.9)ka'`"B54hQBk_a5qS~C3Ix/Ji-surqU$ +$9By1)叢Uߥg& NV엟y}ќW5=2K7{L>p0[2dWAsp!eB(8üy s%e'#`@ĉxߩ.j ھi[ߡ`DЪ\`G1ǐ@u5iݵ̀궇A3:5Qfsڧ0ÃE'IkĽ99UgE k9ƻ{uh@Qvu.T,-iə|nqn?>A,e R۾j HBie%4rlǫ@ [y06lf]$ko\"wQ̉ /n_*ųN5WpxLL'6΍q?wpVt`O}^TX`iej(5@a(56\JߡoC21h>⬐P|@K)z]6j#27ZԋqЛVPc Ss(UJl()s.[6/)5(I3Ekt_( ٣ɒ8@H:y,!|IъpWH:T>1&8ar$Ç* }>ч,]^A[>\`-&knxSLI"9F9<%ĸ)$,oJ{;/ݿ" 5/3oG2}z6%1%2, q~(HЙ^`][NUI8,Sc) zAk$bW&WCud{%GIw >Ŭ=KcQI/xgR ]t&h0Q 蠌^ϏA v9DT%FGݯ*+߇ddKſ;/_hk-κnz(H+V7>&֝edK7_iR~v!sdP[<7"pʶ !@?/՛hH]zD"#30"+- $>}( Uzr.d3 -MԟIb8%C 6C k];SʜE=ؤx^:`]l`Y]A{SAl^\ 99s7H{VᬉU$j(Œ@iZ-rΈV{s( a[8w&$@3M/$m :8P<Ѿk.V,vL(n'YJ_g7*r#~CR#ȶ^ k]m| \,Z g7 O%2V0K/!souO{%_)<䩪~lMaԎO2m۸Rd3IG΄چ $bS\^wE@rb'6x4Jd|7w<X[ӍYA qwEӷnwaԈFMhvcRЧ] %LF$$:i(9=  SNWkasd4nBٶ gY膑X< } $IuSv]OG!E}sSZ0]q9 &P(/<e(jU|ƆKu0׸!ݯGԣSB˽:ֲT&͹* SnP'Ьueon7L_#89;, QTWdU (YNPҖb_ (bzʳC>LrD/L÷G vi0-Ԡ %$M=w-T+JrY]M- 7αН_.#@9䃊s->~5(Oa8.cTs[xT*oӾ9X2&2'֛lRyK6] cMqM.&=h#f|XDb/Ӌew0cԴ(3F#h?gJUt#Ad4{O: :PM4 ((1ʲW 0݌!i0 LS<|OdWɰ%"6߾h~ ! 9)3ôt/Z/8vA:Y6AQ1 B և:/$^GC3# WQ1ZHG3lɪN[9#@ۙ*:9诨c$#vB^2+G [lMd[EC|Ñ([Qv DM<v |3=t5~={):YV[#3ٟ7uIk^kՖ1V[Rx߳$:jI'8ʤOb#U:/S5c<2`B3]*-} n$ mN87ޑQ8;o+:Q˱.X@ֳLuh[@5}O%Ht[^7EEn`c-^@۔k%޴bnWlD2Iy=@zࢬٕ#$zQh}9Plm] &1(B,ArLVkV*0`h*rCі=k> #pP7KoʋoF8 NP멠4;L9 ^ѬQжgj ~QuYwcص$3UXPj:s\":ob=yxI_Mdv4>m6QiAP0:B`qJ s}(*ڊ܇?o:2Zߵ}b1 ʛl } dDGk B ľQf)PUHv7Z` '* z%Ҳ}&av__ wد8I|7.ɟZ*OOiKRbbcf (U@vУCnkkB P 'Wg72KP0|꙽LN{җiO8Lc%s-OxV7'Lp@ H'Ouae=Y9Px-m[W22ÐUfEYXo(ի'05sVOSXsI4 *3fH6|^Kx$I-wBA8ŮIc-̅wkŮR96$b88Ц$mϋ5̸QFKX(5[-aFڋYfMP3w<1GfHSFah܌HͲ[$NE}I)!m}& O5y–kW$K e4rvI\M?)EE`GȈ1\ӊs]ݪp@_AIhaĪ^+ yY;n=~%"c@,KW5|; CV_*B\˜$ځȱ/֠?04SVdqAag(U^%'Zwk߃ľHvY.hY„ˉ"U>R;?h&k*}43> ߣ")9nױYYR nA QTjzqR@Ds!WeG K(WD{)M/#%g/#L$*q=Ŗ{Ř2u\[_ XS<=vǞ4O]OKjۅ)qe[C=TV0o…$ !P<Ӿ)KSU 0":'(AEN][LLl՛s%)915:2ߘ[?,v| tcf+wYP1">[!dbz150(#7ھ! > y1R9 1u!IL|E[GޣSJŕ~_Y秫OKbwp& +ZK0RoFVG4lS|Pqwm6k ~(#; <"8{ԕWn*GJX֟u+/-=8AP,C6>3U2eķI'1mL`jኳ춍|Lƒ1,<%3ٸ ;C1<|Lw![]l5&}c|@rO3F0F ýR(O'w`IX* RjM&#B?AF+̥+ VQ= mJج+"&ypԢGYbg5atJ:9~m߶ e! 6g~r(7PUc&I"53XlM$1hUF|G擝o6+{wʝ0jGo`UT-#l028JaAoMK^j5 MP$]u( 5i{ϛ"I)z#6/'$Y⯆3;9v&_>p?ܼ2 F9pͦ=GȽ0ky9-@IJxǶo~Ǽ EJ40fϨfDMDtྨFRP|03,A hNX?nE(sj=ԋkqpzs#&#a I>59CbCT!{ny{C+75P/Y}w gt}e߻U"̅HO+U|@Dhm"1Z3t{OD68Q[QؕZ(FRrSGr`Dm0Ts)8 X%2 &[i@L%ECXe'Pƌ˦3Tӈ2q2ho n+r)AƵ'C=X$Zmr71(ޡaSed}މz񬤑EpeTPwn:#ԆVx7q@FUyWE?ZL`xNv/PbF4mFCG4S17ri.Ze]Y> f*GlE!I-^D@2.ӛVu L.Οr(Q+d('D C@x;ta#Wӝ 3aXSR-jZo. \gm ?rr+nw'Bvԉ.JXTa8YBx=6z'Z|g y;{3[]XZr/9[J3[SB\v.{F!aaF.&cM$- Շ_ qF>Z%ϧ@L{vIi ,e?plsT ӚCC-ofzz @X/v6/{oRd{h~\ Hцb47n" !(GJ>vI41Q%aW2eH9+9AAٟGd-4S{Ж̼L@F{;8ctAY od2q/y~7>_?"s= s ShA. pP}ӭTI6s>b۽_+4^sϛ^:7Fq,η&3%,Ġ.-tw{~%o9:?2V]W`JDK1:kN)\aӑZv{Z @4Ӆ;&zu. a-bMⱭrRȀ|ӟcy ]s`ɯJL̥4=BL?jt  ʼ3&{[2X%Gn"KV ,_.Љ!{BaA~ڎk )V^Za8ap!J=e1"vӡ})rP>^r&2bAWz,K\yJw1_eI̮}-=" p9=U8W9dYHFlU~Lk]7DrS=4`T %!խzCs<Vq B@pE(ͧ hehsSPu&svE@!1é͉V2NgQH;$4m,ѩc1k<mF׼0Ҋ{妐G(B@["8i|70 "+x8dz*isP<5Cb:5x8pI [T#3vɲ*I}eIb2 :#PS1[L^ 6Ơ%~{EAN{2JFhh%9}kp)/kϢ?ӵ;M4`J+a\hK02{9}@QqRC[ۇnd>4˶@4Nw돈Q%ç$m堠cC)3#FjcZ4mq͵jdyE:ABᰉ%x{847Vy[-3.ubdB⭱Mhxyt)`Z:)Or1jzIs^ OTa`P㱜R,ޕpx#{5窗nY[sIn$Iz_r`)0 l̮H޼7yBO,$K40c~.t12*.Cdhmkeݖvz0EDMAN`$|8QQ+w 5p,ӆ@x"MN|J¯,w/[QDS|[y$\>/.Ja1űw_B I@S83QIW7" Ixu1ۂ't4@ ZSeؖh]=sh.!Y5)į4;AӋ BC-q5[[L~h1RNC47[u欺4@N%K\*%Qe7ZC2Cr$g$75_`}'N2.Uj-gJ@-H˓IjܹSEv^F0QC[\@PnC( 0_&m#酥C3m;yR3Uɤ< {|7Qr ^l1 ,@3sТúIߤ{wD))[)juWhC5;U s &>fzpbeXW,3I<s]pu׃|m N+ZՃņ)1扗l#!FfYm-<埪Izef[y2ebːB]D:hiۊs^v 5l8 jޑ%ɜMq]|&;z0Ec1 olcѨdΞo{H ԃ_PN+;w]/S=E/{RfapZu@~s)Sq6tkm,0*N*}[z㮯F T龚 b>r2p(V5OAo\njlZ"{.-sč+a&yMF@eՏ (nXX?+(։p8pf_26bPH =/MԋG_u߹J%u6% (u}!T}B5`8Ӝ=,r /`^p.:}rDn.+kM8WNF )0GV3c4|C~ `^IvT||t6G犓ұEe͸l?RJ-+liLQT靇JңK91=K-|xQ`^7[ n,eי27q] `T˱iZs%|WI)haIn.*6J#} OBA߇ld!Q6:p`HYuvXTM *xB,Lgxy,d}8@gO 3?$Swg{w.ش@v Z$aR tP]GRf`qS/.`O^wfXIe 44\:d~ܚI4/uc:/-JeCUyuք]*`Uya|V[LDJfmen!JT`w'I:PC*1ϓhuęLTn8qʘ'\̾" j&9"()ӵehrcC<ԁ*f!6O`_ ; UW= Љ,vSșmFi4cc"k7uN/P街KWy&w+pvH)m])Pz1h1]XyE,'vFH'.Y}`DKyf3CsNKWY=z_Ř&HAxS:`XMTe./rC'L߁U; oYqqK@~y<2{8b|&7ĮOHG$1ζvXB!vg!}XZ-[Z⨲N78^j6G-VVUߦ,%L Q FECAK2E4`K_EUt9,v:3a9^wBJkH0'O^m,9nϡ F%Ӎ*Mt\ $%T7}-K 5ܻhd0r `@Ý,RwH/W"L?@ ק\TVϯt,bĀg+˪ޛ 6=`N%36z`">2-Q#1 6Ȱ.:(!Ɓ};i ƩP;]"^k}v_+ks3 jW4%w궎:YmL Z@79*"~ӂ@6MZͰ;hbj?C TPHw9J2Nj ^gAOweq>{E4n;Lj2 '&'o1E:&6m}<8hmjFq!%Zr@D铸Ts6 VԹ\AmVq?Z)Z-Ʋ+3@c߾g 2׽o2&}ODIW 2伮e!xb +~\Z~b'!!1Ѧ=!aXFxVeĩ{Yi~nЦ!S,Ӵbq 7kUqvC!p)u_Mw.˳[?&o?@:f=e$]}\3hJ*YY%%Jg^8>zM|S7{U*9U,P0ogga']Չ!fR om6 /T@)/t A9}^ !>4FԤ 겋&Rʶd'#h9z "k0l%aFH&"qY{Q[Cowv#L%RM7.eY|`4LF"x|$M#kM*ƈ[a~3ɉ5MsOH@ dO, Ӣ8^j}7MQW5!EԘ؍2h)uYSȇ La @h߅-y15J`u=gT?Ϭ91x1^_F,_QU-HB9-/ϯ4#pW:/_L^>,BL҇ UʨJ_*lɡ^~VMR6Í\g=2< -uT !ߴzq}D{{ JlMGB1}FH8A ;*u-,|B a:+W'| lk޺KV}ƚ6oFn.exf vb^FDkIC(5HJ08tZ yVE@ 2}<#=ojmK0?W_>eg2T^!wu⚎OI9;G(]ӊW5"W?)fm/d9[h"<Յ4y&<ιi?!K)IGccXaW/G5i-Scm;QD฼%erJP8#o9/4`{3|F31$?&,I}EŤ R)FROH 8 _.9"״kCkfyYh!OlE3xQ7r)1]bߥTç->Xz~L@6趛xR Y?DaKV) LF5E ?0On=y5 U0g>.Km用GSP@-Owm??\IxUD#Pv[֭'(dF2m _i[߽̃uLMDb X\`K}(9, $uMkٌB50F$.㸼 7`3x'ٚ-6>vDr >X>%yr&zmJ~D8]k܁߃c$bg5F:.gϚ˨V/Ԍq{#|qV-|Ҝ(L%T~\kfF+90M_p˴\RM,`=@y%Cqqlv6.yY.m!ڹφ ܙ%>q[ /-XǨpd Y_DR eW-UjZ`_y]gěIO Gky&b /I'oߛ{"%0R;4+SY8u{'Е]%\P:iR6< j"GC&JhhO%;iQO:h2IjzN;Ii5D/ DO;n\d&>mٹ@iaóW f OjVvMa},@Ɲ{ 7r 6١q\F&qnF ؾAI|Y݄i*hk  RDΧ y^h M#iYjƜ^ٺn͛)ZúoA Lɍrܗ.議"s6$;2 i:#Z9vyw5wbT:`vnƚυ[ޅِaZז)xKs @DZFv~㰯БM-HA9ݢV3)[ nefbʖ% ? )-3ژy&&y9A]h@Z`9H Pb]QOaYۗztv{Z]?ѭygI0JlqIH2/C .#1 (+Z9whpIO|O9NMY!8$:D!v]IOP)\ :BA-ٓ%C)pjsP $ ]v^r2lUj!ڔ(ȣwG{:&Yۙ3/_qg49~AGQ`y0331]BhK2j%Dd~..薶RcRyXJ8wX#9dxJ +$[*$fTiD/VLԝ}%JAɾ[ēImB֔ݝjTn {솷XL,X-̼VgđGiL* Ԫ#RV͑M9僎 ;Ʌb*̀>NѠ =7 GQ:Y?Mח{oʛpuѭ-3b٨եҒa7%ڧ~d:E' F8,7P|w{~t4kãz>;Q(}7ce-?>xysuDa3.’\}/T0aD3O,ʑ>pm_vwM髰0a'n+V-a*CbY`|FԓL?B⢾:]svWw(M&s6P& >drf {WpB fꙛ\f\p.{TH8֞QPHNFژ_hQXʁ ]m0+IJVU弞 M{C"?) !!e r-AYUt6?o0_ p~Iahq6#@ߤ׍ {-s:Q^~/ |JWNjh,E0+ypU"gQ8-ܷݗ(C G<9+YN&L(a\v025rm)#0rVkDoQTqFmN~k/ذ0Yȭ4VѮTÄhFul~ 9$Ñ򡘔7ai]QE 3"'\+Q:J4A#ho.yGTڗIyu |Rv;ն1qBi3_Vj=)6!8v[o$ef^LvZY;@˵0oV,cM`z %P>)HY+l;nFłBGd,_,|d#[pi܀o4鿯˵VGR^*~v$#E&x`qHCP&t.WVheYŮ/o0u t[ D~n &c*EMp; )ZP%'LYUra 6ҿjyD8wx G_}ޓD&[*\jGw{즙9/= q7h+ßES'̵ܶ@HmZatiMeAB-z7YB}"Ej"Pd rww͠NҖ&(>ºyҞ 61e*1]KBMBy[P| 7TeK:;Q g䈖P.x!?Y'O-EoCƜ47FqnBGh?% ͜ HI xޥp&h "bIqpJP`1 [FGɵ?h𭉺гnH[&nP*K#ޛ=}ŨԳGח̟cvF]1" D h >#u:EFN}C)Gy$W@뼶Lᮏ٢LPp.\8w4$ 1<,iˤ YX=}0o Aht3Z,a ãs3V/6?D=]F-,k1"z^N ]vG(NEl4I9RŇ~%?Pъ]RMۇ}:/6\]e*r89bt ]`Y _Tڥ 6fLt>a!(KP쇇1W>,zjP_Lg.4aR!n~8sC m9J4?4-u{Z*:h4o 3 h 3«=rWz[z[O0 DBJ A ^tYRwOn0'vYPJ®> 2ĴV=PZFtת//] E?(UC]dJsWi%JOb@=m$*UB@GriqCgƲ5xxa2WŗuDStY%L+ 3]RLX,ADM2ZA5[m<.f`:,erp~HC`R3rI.GW)$[3%BukC8e*ېLiFqb i<"kAv , YK(߫+bbc4P͵=24~`\DҮQxŊ?grNb)Kȼ@;[[tv3/ 4[B )ƪ ,K Dv uN%a{~`.9-JT =ʳ , cfmfu@w H! s;ݍJ%C+b1x%jAz*u`}t`ˍ1B 4T&MۖbЄ@2` Ր0I O|ɩw nZ/1yqkFcy;BGK*Y`HE`\iLzܞ&4O#D9bIe%-F+qw4%t!0:/z.HpOɅc"πvǺ0Y4`! $DC{C>|u@b^P/L.td{MF]"܀puCۨp;K|AmRe}ʆwJI4{a$~WxbTz 䔀yRA7Vjj6UbMgf1/LƱΥlq/gTMcgtW7 ׇj-.;H.V TC|7AFa^Cyc*ǥ ,RF+ٲT֤'7e(QAS];ߩPI5U|ő.5UE1"$b",g{n"{)1k9v*]T|nuX6yъT9(8ŨSJ3gidBcc_`h{IҌU ;%*5FDM YMk#Sf˷1l^}Imw)z$/^c|#DB`O,+ng'lʩTQ[D7S(2y5}ʡ抶o\eAȶQ4&~8ǬZG@Bd?pf!}MUs5fz0Q7o+U·\arh eyd=;Yٰ ɛ}AӰ*=m;a iWNV/PS >@˾` b}AXi?Nu{+qO,AS|z؞_OBQzZ# xZvwcwoPf, #f^ZpJBⓨwaG #s!16ڥ[Hâ4Чh .>7\SgL{BjޠTX^\jH(i Ț1} c!`rd[H1"$%RbD1` xk`axPt7Di@x܃x6 9ީjJ1h3^EnS )by#(J 턻pta<~ 2YI{5XYWUQ]HI$~ [V0 =:`cbEmBE#WWOx-~HI[P$SXUX>jYu1`w6udPql3/*!,vc [VA BTHrҺd}@ؐd4]"H2 s*?&|=Nd0˧D {k9}_tQ+98L~w&w,tnFJÊrжGhmwO2G;CE"eXٖv8@ ֢e/5{dpqIQ,.%S.|)EwWکߛrbɾ8 -G07lW/[+1TiicG`6|1.r' |EވQǑTz@gVzN6T6ԡ֥_{PERa GlT K*wñZRvERX5&ԧF#|< Fᬝf1#K/C/iG킥@ Nز 4_r9Q:'dbkS֭$Vfބ|G9"(U`ձ`^{Qtwv2)2bAu3𫟢:BoܜT=GĊ? c145#?q4%̱R@G4xTWŢf(;4?Qj`M4ht.0ε?x+p&z_sW㊼% 2C<ьr9 ށ'i 3\AV_e6bG\@rFF)u2Ɗ*PѴVLET)&4F6bۨ5+f 0fNwW=˨;[Y8NqtwЧv-Fȓyqp?^fWw{@#Y\TDYBj{?|qódU,#6Z>~+i`0+(Y.by6l%Ϳ֊M5Sw8 J(Mn/zYM%o,ˋ$- <+;M±c/~ 3[ h4!+K5'U>sҥqƗFdg=\[<[+eJG*n 1k dzSmtThs|2}QU]nfVPwaIC\Na? jl18rGlє kSuD.k& q `LJH_*x=&KEWX^vxLIG}-7zWz`\=Tj#o@]4Re\q =UP\/VoIgMSM8A#N:mѴ8Ve>ASy\~u l 6y |ȜXlzaDmVT/ 萦)a}S (qn/*Zr\Fcl}㊾8ZM^RwRavfK/Mn6aZ1IXʵrH4( YaRԉ.wx[Rw%Bz -Т^]HUAx_*p6pIg`?! :q>i r}NȩvRf?YXced÷.}h96z-gw(dnqX/(4CP1"o*71BIX㸶G}mԋE2r@ҩ8N;ntC!<4c ѕ"7D(t2:*NDm#G*F=޺8 ispOk`3b'Y]|;G}q}P4$vlȵ&Gd]g6l n7AGrf-Rup}e(a^/grsilMȜA&$#8o J~!^Uҫ/10lkX_}5ᣓ" X f#7 ҽC(7ANO1[9f%ׄ^ q7H|! \k!رdݲZ(GڙU_C<]8On*[w 7xw$_+BKAn3MPH& 6TtQ@@r䡠).{$i"]pP]ԁK"ͫ Ѣd2YiKB./sĮK-tKQ`eOc"ӛw>6.82 X0K#FWNH4{^9t [nvE;5+QY mr3!a_ŭN_AHӤkjs h@X`:Cw.;zNhJP; UYg׀SI SߗǢoƨ^= %;f ՚1>#eRo !l,yPm4L‘mI̟R?髈kB֝IQ'1:)>mW:1b#'"K^0x/{eSP>Twdޒ8msz~U3twqKN` &D3wxw:_*Ƨ(_QQ'|v N[Unc }CT,ʮK!96;1+B.NJo=6,4hrZfyL:8tF7&bUSzKi"0ݛjԚObV}Lf`j!+QmKp 3I(\E5,1P['wRv|>)rgDK]FY7~m{,D"Y:;L69Jg ZMiScYRSWR'q$o} B5o\}VJAqRܽ0V[v"2`r. >o)tA?213^bnVX&lb.'/}__qJްqh/d65?=9 d.;C; ,N}2B" FZb5V;yTEYZ摸vľWKaQX6k>u9̢Wt$ ײx75g2$FNKڈi'UA4$K+p3|CED]a۽ {IW 9lã80(~jkLN'k5Kҫ_ mw u}d 8FY"SĘ}/W.l:Bf 9 ih+r+"Im հ[K&-T];b8'S٭lp(2CN28 .+|i(OBr7heQ{K c4Yz-MbL^&LUחIv?5DUn߳]b_I=6w\g#h58$ *=xʲaH?9(p{!އgtpO `=N:Y}IDG$t=)c*/YvQ؎iI z3 q> w .8]vdrmTC߀x3P3X[rt&3R. #/&t0]w`*W{ Kq7f},t5J"'2[`U1n*mjb/rвGxTK '[i<M{ۥkW?"RdRNC:Fb}9REp83FFу慧Dyvgw*%㮴< (y2Y\%%sG3Ca&xmWU"I"ijQ&;S@%%c-Xx9yBkC9C瞁e?2OZ 1ǘiBE>y\/AqcUe6] (ҋ9ۑCɣں)N92 XuKOY#Tn(k;l %: ^i4p_boCY&l Dr!}s n/IIMf'A[Pâ?OcoP {.=LӮxF&\=Xe!fܨiM &M;nBPC6Yڶ;(!, sfFgO$Ű`f sEpVl}~)2wƲ݌,ZskE #ɹܘy d2?KB"B OEkxiJhslk57! R"%9 0IZ})Ń@g7ֹ%flEva Ϣt}3&?Ejww)3t=aB-q@@}Uc@4ʒ,jhtvRۃmf9M8~5:X٭+Y4HrmXIQ >A%duCɣT&k`0!.5i;2ܬvbg Z0 Eߋu":MLZqC?&0Ja >}wl?,%%Ƽ88lBgjR0tc2Ґ E:R 2~r:!SFad{==~M^.I1zlte׳Nĉ~)Yi?s4G8?~2U/uka. )'A-H,# )zO6{Q"q,RZ[CGͩmB1#D/mz^27 f[^ufI< QB7 Z~3mHو~KZy٤G:(k%&N[0i_ZӠw} jCog i@Nse48:Z5ɦҽ@nFVi]{ ֋˛š _Ljd9-"3ϫ`mӜ\tu=xT$5Y̵VzذS7Ӽ8PfIԡ%pe/t@Uėh?{ IMЊmm[G >לa؀NQLpM"z 1ݏl!k.i062rifjQeC!}RԽQ5f-բ fTNլ0ӝļ.JW=kWCj^>,zcO9oy5D6R^WR*rPe]/䳽e3*:ϐjz(\rfdJN,e\OzL`]"@IzFt)uVޝd;#)y j(់?QVULD7 B;@ӆc;[I*dN@oQ+j-/!3;aƠUs=en'\ 0>]e(lHtbE=+.ISR (ZQ,}[F#ŏhGS;h}6m7Qc$:eQ>czn|$ ؟USUV^t[$D嵲f>5q{qGSaJ8A=@A-hW{4gi)q'7~k0#F~v)l}iّH~y_t+h)6`#BwmYϞ!F⦠TqT}HsfqybaqwY)2J_>PW0;zI')~.7}4sև9g27',u j 8Ze-1qU<2lbG j=vPR̚M/pP=e0s9ik.j`Idr ufe`#KURV+O]bI +D6 ] өD| y{::- sm`bG_ٰ0T39DA ])y%/~c6#2d$t]1 ']Y3U} k#<$⹴_+}a֝Hr0 LmuQ( xH6p#K 2] O9"QӽL`FV9sVSyY4J+.} 5 Vbԣ jPȻ%WFtP'̺>W׌0=!@!ā;o)Y-MԑGP G^U M31]*ݪKkpFfԹyWȎ4mR?{b?K'H =Cr\8ͻKc[:CtTHMԦƓ6b/avW$9 J~ pN: Y 3b^cL $~ͮlͶjb .?*7gm \S/FċV={Cs)62 ݁ Eu&lg 5Y_nP)n1ć։"̶r]Ǹ_im8>2ᒟ<~{IP1&GU',@:RMyA"P v ڝ . $㺲S]BLN3M<]ԉT|W iWOȕ2_3;y JÏ9Bߧ}"{BOf3-} ? װJ@[(PqvWtIKiNvcE̕so?',4Dv|ܢ@s[´yteG(xT{feq'DŽه` f!HŴ/{Νw!R1R~?rb;fY/ïuS?^@{7NcvxmVKOINcu5iYw>>zϢ>N@ƻ{x,oTb1sP Gx 6 *ʨ=~*=Z!v^QyM S}HrDf^o$}^ƥ(4!e2/?*.O?ЯO^SKd% |&M*&K(L7ZWM @Oe{ګ^XG<*(&M1̾h1wɰaKgwMv2 ۔'I\7{C +̻t}տF;H]>(޶VD: "X 4*W, oȑKg I}x3 aBj WVvBJ>MvZ1Ig/sQNL +^6ή]ˤΰ @zJq*vlv璗R>5$J0OH̄+;3``e`D:`[zש@(pMt Ȣo>k+O tT$&-{:sI3>Q#ŠC6M5a`j֎"x̰uY %&Z:fpF,X?Fzds=ɲ@.d.hnD\o(3F]7ȲjN%!G2Ieˬ^x3.JJOJa]Hob DʞG*~oesoH=d }$mvx$A\ :mPŀǭ?il*$Hʳ҇9LvnwqDZibI1Drp=ŀBx>_6:RvA&(׳aj9x>\[RHJ|ZCK*WAz'C#Cm O$ZGw%RK`;dF]ǞOjP5F8%}y#T:}=ep[x07 yZɜJɣD;Vx0Gۻ܅xi3GGIʟR^<`^V-7wG5gmtt_{2buwգ~; +;>APo( <`dz| KmJ9.b 2ܾ7dMEQӔZ(ZD,8/=l5Râ,Vږzl{څ3" q^FlڰrA+(.by1;$ PG@AZBu)7ƞ2~?o{tWRs%>{#Tbg,k`'Fv#i辷,:ua`P|`ԫԙM/H$TKőR B,f|Y} ϛ1Q7T6Jx!VZv1DʆA^OG D7G< p[l$cIMʳF޼#gX|n ϫxr[+DMtR,Q򐙂.s?9؀pW&k&$`G҆L7Ū(:"K{e7:tlrbJP[^}7іUА4& ԧ/WJ,͙ b,#CSw9!N-ͧwUj $T:mh SqE__B vuT4Co8~5JX"nwr5"v\nj_s(~R!I6~DwᢵHLS3TKۧYժvFA$%5ʶـFmzl`h[eT[X8Uv b4F7s7WE_T aڞQE)d:d4׌py !m#p  @}| e~@{J[؈@0}h<~VHY\,4 ,:+ ܄bPQ34֊/7^-f xz/j* J( m'"o #W=ēXx#Jr0 (KF?z.Snx_ hؗoGp,ʆc*4L0:>vFs $+VWMQ@M/b WO":P6` mܑ4o8wD.,0bar=ܛKg9?^^ptilerQ׮@L.:X`߹6ovcNkl: <ÐByy('Z֐ʼnY[y3(tP6Kg ͳʭfV_SČ ASC&J+IBM;xa:3`eU0XA@wL Uѝs'45N)sv!r9kq /Fg{WWf%wnh|| ܎ X$dN5t waHZBWTod;>2צc({4@/yStf}'56򧯾5&w)A_sO %('/Q0}q_1)r2?سZA"E+X'fR7CG aK. ^ǔE\,:a%;x̙2KFЯ'+Mo*po톱V=̱&%bd>~Lft3̒!ϊ]VUR͛&P!;wdo_!l{r4XOM$,fp[[c&B>sT(T?]gstI< @e9RT%a"6(E@<{C!+Xq}m2cbafr1!Klx'-9Wz@+g] KZݡJ,:l&(ow쏒iWݳ3~!fs5Y[?T4r.mLTȋ5aݘ7;N0\-HrtÄQ$3+^{/v(L SMt)f7JPԚKG@^9f}%WePm y L<3wTI=WN?K+:*Ѫ tN#Rᤜ#bV(k`O,\[_T)w_`̉0scomOL7jΧjai|E'`YZE,5n@ zSaQrS OJکY߰ O44P8,/)[J-|SY2ػ\bh!"~sU/{q^ z_ ~tޥ&.+X|>*bm$lZ+L6Ȧ Ѹ"ܤ1Z\2:_ƞ=\i) Zy1Ut%. _uI̕$Gs'ЫbimK:h#y!~\|u\w©k4/k 3Ef|O_I $GJΊ,* V A_` b ^9/t#4I&,~<^[W$cOq ) b]hBJ3ı[Uȭ1Ãbs|X MP }##L_٬RX.09QS$Zop*;-1@I$RC)6\I_{m>ZDF%&>A, qJx]5_1Y MU{!9}pPֱmD(KB?2lZgm9B3 4.h6[c9v;wNdOeRSκ)T/8}Wː:k5cXe2 dAI@D3)fE2H^P3E"GR~36-K$s|xX8lmZXNq@bw_.G$'^M yʫL#)Gb g[:T4P*ɶ$+eZſ`#̕Ǡȷ$  UTҶlm|8 igCEXGX v5)g]SaY-Ah83K}PrM睃šEΔ  ^fR}g8B]Gh:PM=X(=+or'd[J>ǞxpYЙre60 f"Hpp|,GO(*cʰ@uaL9Aݭ6lXl_jxLs+4^XL*Lވ%ia dw 8z# ZD:RD aZ~](-,=qiS#!cM[~"6یJ./K_e2`мRI&&kVrl+uV Ba!uE4y\ZҩZ倵eG$XtH$17^F`S}} }6„ rJbTvgmDOmC?J]w_ͧuB!he `QĬwxkH#;gp1fn]~+@k)NP Q֯2 F_Ucc jphe|!l ,zoeȦJDN؟j C3.*CLHdz{!_yJrKyAGVBš 44v.%ɀJ+$Dr٧uoI-={Io>0LYJ}`VH< wgQ9)HǸ_GfXy33u!ӗ]: N}EUݳ5c=Yi^1n1a6'o\8Yh{}Jb[;m@:k B+ւC`.0 $'cYxq}1+s\5MA0cANґ[yzlV((ӈ>TdV yŋ& )NQA=^AZ3JVVJ_?^&tnM"BM/*l捼ꪞmoO9^T3NJt=c1Ӱlszoy[+KL[c9t$-#*j}Ԓ< IĹji.vWjeIZS#J=`urw>GT(CT+QѬV;{č{Y-0P]$IN^=ս}}gFVJy/m˶.j2.^؉-) j~%u1J=j3Fo+Tv~ ImݔjZNN`f{B$f>вp(;.~#뚻(mY:Xq@' {r^nW+DۃR~FEY]T%]ӣjB}DU.YQMzFcg9[1fb+ωШ[k0ŅWL'q-D_ވ3;V/^9 ukgt+ڃ0wЋ6!0랖eLٲSie$WDtG `ρ wz Вy?S=Y8 hgT.~5B>"Ϻ\9y̼!UsK?ut L >o3 t&L@ށRw\ժ媦X7@]Rt:Ǝ:,)}Tft ; ԊLk֐w*>U_wӲR_%XŃPP36s)`nnJ>B5ۈXUg/dߚl{nOVŸ٧r :Vk:/\'/oʩL!DcϪvٓ->cb31$փ#vkA,l);G@^t ,J"9Đf 6{z4e=%/8PUm_BqDz9U\]Dar@!(2_PV\1'$;Sm=D>hgߗ[ZmE M]sy-kW4緢qzvKh-*Y* lVY]ӉÄCh7ˊ>\sUS8)Q_\Ss] ^2h"o2{ݚY$U7^Xޯ)'L4Re2 w({lu͆m!?f27<>,i(3z٦:L,(A\|S#*DkT'`'zX$K.ęWl)(?AxdG辩pbɻn<6i=%HI YF^컔 >࡚7A¯ڀ⊍5s`{&bf@vF+TT]pՓdz[J I0 0[h&dG!LL?Vi9l]4^BAGڥLuN[I㖨CxNp٦y N8öׇ{-_|{dky'oH'%R*0tPaxW>gKHW"ng_z1~]3)nU;`͝wր䇏j|d c/ L%{oROI[!PG_nT{'7sSVRGY]iArf ٝN[L W['O>wcK@6Ĭy︅pLaypn˨2~T qS]~,&&J?z`Rl; w9rRs@ǵ@~eʊP_o{3HUoC֝IّQvE.0> *']s0%KLT4CI0&4W@,6H"+gr/QϽިRc7Ct\}iɿ%W_3?O/$4&"<ǝ0'.o ,Y{ Nu"Z>&Ui5`y"fiZ wCvm_uh.MǪEa1 S¥ c2oiQ1Mä5˚m# z*B'-Θrň5-B*@LnHңvU/9,`D#xw| @L K =q/82aёhWP R}T=: *ŬUCR͉wҶ:a؋Q~cVS Es-# lF.Egf7zLSԓ @_W"JGEk{0-,1嫸zs;X6WqKwy/ņ "3D" 5zM6\7/Ѣ%u k8^`F7P+78ZTSytp忿sv]( {:hJOv~. jnpڃhF㓃QOԌn}NWeĔ$ghY1bR|@v(b *~*ʩA Y$(+d*>GȜGflgY!Ƅ rzd -XBN"DIU5~-F]QgdK9o8"z6]€C"` 1K a~1,2xRKŪ:xtFU2 F1˃n{-d tTX+6WJjv f)A(J Ŝt>PW 8c?|`<}'1eTӸ{g_"o5ETvBc暄8䷔ Ѓ<3ki{q)Ua.Z zq˟NoN{ G0z|z[0o3ў(i ; (<}1[uah&Be)s,_UY(d'uz9矢eaPݎPm#ÿ=Qw 9膴NTd$JpG,5~׶c$ƜC.[-:*U0[@LM߭ucбx7m5hҽ14)I__C_MzѰh74Iʖ5a8kP%K9U ħg!H[~G: _X7*O.\8/LQ~\f! @cG7Ctg,517g֔~<AGbU.7=~Пi`5B3R69fp2MOdY8sT4;x6?p#>5\T [PuIֱSƀ3}/˶MO'@c1tf# U4gg.sprua-nm Z`G] Īף͋^7oظ=ҷ 󛦕vc.c܀6-ݵ*<gEa |~nU@rlc3ay0Ȅ/`7_hK1̊9)k>WQZ"fOKJƎǧ+Ey`f#JM'c3\u+| QWLop󐝎mQq038pe&+,9@V)^q%bn԰#jѿ;4䆝j썈';/Tkcҷ+Yyxe5I#2"nKVz^I8@ R|/7Rvo~XF X"!V~z<p~j*YҒ(@<9DVFO &7UygH- m`KG|f\ 6XĻ|bXb =] <( ՗g]\O&<-9d MoQ:0jU ͌񛾯9'a?y>mqfH:?B`(zm8Q dgH `aS,gIdW3`-,bz0Yku{A,ŅXC"H6&D1# $'` H:z*`#1ŗ/d"iR4ۘc= wK(DmB NcS.{txWg1sΧ ɭ Rgctɭt -"%bpݠKkܪec][sCP~ D9M3:A 7Hu)"[;Fv:lNگATq!( Ton -}y~i)W)'W) FpBB6I]F' 0"󭥄V)& ɟM dyKԁ'Tlf8"H̯!p >SP]'~C,{Ղmw8=*qM-E ˟ [tr%*^\Q` a/aB̞:5qC˶ՎYG'~ӯ?QA1ϥ>7}g/3[De'fJآayN<^Yi;$\ݗFfT91f3!Y )k Jn`r E{ن!&ρhJ7~d8K3xTףgG?QV1 4h<̑Ӡƾ2/꟣YhYZU>swl,י\n:nACTQ&w%bK Eզ @\bF&*}w v$'PU0UܛdR3  Z.&%)] ^-5`=' zy8H< q{F\g!nJWȪe-B$8VD70~ԵclNefY!.CFrF&/q{*rf8Nzt{E'vop~H]urm]g8ՖQla6R.8I j33\?W_2W$LPg=z.~J] Af }!BDǁ%Odq釧 N{$W۵esz, >q ;\ҾY$'۵*{}#ՏMB/:r<֦C2 o{] 7edNPW >`fI`n6{(͒nђ"AIlB*Xrʂd i~4 ߯7 KLvivƸ^ J; G8ޥ]ZOv'%6&Z$2c %G6 ^6ܰE%o 'rѦ 3엏mca!܉d[OÐ#0fD()g|0 =9oZ=Wm*% --P1"yh&\EEY/' E@TwJ2\LcK1>J`횄`ohζ)|{iOw!^\&4`̡8tSH&Lu; {2 NFЊ_Ķ8q˦M 6)kxDe}La?޼JרoJ_m,r@eY@w]4zs(;]H=6 YzQ 3d !C͵u`SP`DUk.I-ܚ*qs$$]S KatO`Lvc₀ahM((ewcݘXbT2xM>䱟yj}rǶk9 V b t d<GrucΥ驉q2"m%!rm84JsÍac#9J'kF[:}Q]PqSqX؃mppBԀJh]m6tC:tY%/poaUB/^qO'o<ضY@MKN7OxU@z(h59خ'bR,7Y 2萬$uH4aZ h9y}3oF)#M2p06UzV%r0g[TF L1` q(d0%#gQ/r`a@陠q}k뗙8uV}j>|;c2&=LiJh:/BλG?V!^ {ZVm[366vV2c}*H'aӠX(0հBzďYh,;b|} PU&|fں%FS-.^i&#]TkVBp3~xeMmN2mɬa2@-*j<ܾZڛ¨C7h;QiivպbCgSp+a; YD7-efJ<6s_]Ӑ.Õ6>5unTK׃U@ƠFN`C0[s87Gκ 0B, 32'P-igS`:G\k<)6g/խ׿&de>Qe(q٬II߰?.vqˁ=^YU|mt}2bήjn 5YJo؝쨫wx~TՉ@OT1Blbb. )X4^6O`.}frON;xlU>)Vu¨S("  BHjգZ!3+Ոm= yV%@We]5(j#.hon)Z-nJIų|e3m0p!aҙ*o<(^'|?<%=m*J)%\%\@Ft1ض1yM؀ #(v5B TA?Q՟ `$oJҚ rJPO@[U&+>q8[X}@3 K9TkWfDv5qɬBwVl¡ -w@-<7ch&^ ?fGBC'6,Ly$43"/XKN.y8Q~ҌpޝG+b_G&Yty')a$AB)CW47~Гy8hK 3&?^b%SZb1)tt״>|HN|}kg_{f $ݒ(7Ȉe{%7'x#wS"hqMC&$k2˨;0Ǐ)SsɉYǠCPg{ 8p1$79AzFlc_- H?[X/͍ӧ6R央6T+(3L61\Uxρz}\-?d^a8%gB0C+rNɩ1M戟b}BE(nIq&R)+ ?6Lr>h&gMAqXSzNs HrJ!sC=E\o~ģrx>L 5GcC*Z~Ұg#l; a(}B{}؏Um3ިV w}?6=x5q# yqh(7VD/#:! ´RRG"lc1_ .˓5#8!R_}V)_O$o~ٷ)  +j.jy꘿OR̥BpPnh1|9bks"ee_TR}Y6Q4-T9υoS= !VzdT2dѵ8vd^Aǎ\rђ,$24Z 4^vo`jNKW8^p fpcDQ1>*_MMԼꉑk+j;˔&I~K"UJ-k;)\*]Nq>Ԫp?K2(Q)*0]_gV1-<%bg&g#ʩk!s0Opg@Q$T OvҸ WT C<~>t>ܡljYLHPpz F$z}dF:8p =~F#̍$$6,5^G 0-}YCZ@hđ s NvW6Q̹3r_kn ^*J~?oHϲ2͆_'tY^vPa0 5gC s#4JA kçqx깐}iUiϕrD^Kq %hdD".vyʏ7qӦIR&8ܝzRZnV,'NIwھB2nQ=i%x':A1Gc>%IjP)+VѨDDO0oU$Tk۱OsWPk]kA$X=29]nuQ̢ʬU#״[JqiyY2 '`I+Y䏡*NםN6 5NenԌ4cz=eՌYm1'߇%KUyHD}Tf,]Z8Ն y#{;\B5m Sokc ۭ~5|헒WL$wc|';p#o/e+Bd{ dx ̞;Slx-!OrB^5<rM'ҵ:Wn{:J/7BlÂbMfI. *e|эqCh%xu?,Zg}*`w$$+sIDtFfWc!l&FFrG5mDEQyqS9#R^"L%>e+O_bQҘdu?!PK?LMMZKxX51Zlg"=v*qww׊%0{Wj{91f'UtrP9S'4KCï̓]='y.B9ՙ#yի; Y% jɮŨA&Erro鿺]S?\܋kMf۩0<5[`j'vbFb/>}^02Yduطȩir )Y(挤{)Ѽ%6!up1*-U*Joam)qnIve!쇃ጞ8o.8\d< @>8wlԇi hK=̀<ܘ@?ߤfҮƳe'cY̿o&$5#?4R'ui"e0[òʶTavJ@T; o(0YldU8Rj윙-İ'A\gHh "j%Ty-R=3=C¾I lP{vh1&Zy  g4;R>虜j5+/΄XzO'=ğwA#Ғڌʋ!wjfa gqdx*~' }aB0[G:и ?IGAn7YM]!BG˞ A*>0 YZLuminescence/data/ExampleData.BINfileData.rda0000644000176200001440000124116513240265317020530 0ustar liggesusersBZh91AY&SY!">8 *@(P@4 P̀i˥mc0Zi`vU͚wjk]nֶ4&tLeSUU" i{2[I٪t4[]w.۶-d ,CBdk4lV&IڥBKrc"ڻkatT +Vmĥ/3Gn[\vݴvt9dݱEc"F]m1JX6ܛgV;k-skT"v.۵m6huшR `ڶ['XPR6RXjܷQ%hѶfD+@[ m5 .nc:̻c&Tzy8]]Ҵґfڭ)U&lRVι*è'khֺt.٭*6ƁSB7ll$m[cL +%[e(Vآj6j5M,Y5R4l5KjSF(]5it&j A&K# @0ATjWfVd fI$R!kYRSmhiJ(U:ݚfcb"հ3* - @J:FU*%B6E(vaUڭd"u{UV4PLB!B!:jġT(PFT)95"G{uKEET :dj@2+Q 2UC0j2#@ЦN@JR""TJ 4D% D@JT T" )@ ( P +઀ PD*J(d(P@(  h#@ i@4ɉS@Lf&L  0UT#@ L4ЍOiFhi *@B E?U?SjCG&@ 40h LLѠa Fhd0TMM i=SOʟ?T=@2x4Iz4d0 0@#&h 4d`4dbM1 &AiM0cHiOcS=I5&ڙO2M mC?T2fSFMG=FdeS O)HdԊ?JR)JR)JR)JR)JR(J_ 4@4f 4 YGi՟m{{s9s9s9s9s9s9s9s9s9s9s9s9s9s9s9s9s9s9s` 4 {{{{{88889ܥt pʻ|U˿"bqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqy99999999999999999(*qo{!PS~$ @ū/aag'+/1nv%"M ԅړow i"@y\eo;BDtcv}Lӵ~q{r9|n> פDȿ* ]̒hfp,MHe9AF3HLܩ5eMZqd rF OhMu*&J)24Z[)j3q .'5:0L.Mwp}gkKXݦu7ӌ Qw.R> AB"9͛7L(pՂ₂(hH$""4RFc8uQ+Hh!&/b!qI!Q)t*a_. uϱE˭Y6 ;\6i@s28m2FA"q$ 'I3$IR) 4L( &Gq]b%u!,ɘ$,H=aGO&'9!ai5RshC'1*XŦjO 9Ba,QJaBv!T3q/ԛ0ɊAev Il|TXYI4~Qܖh=T^@4BʙԘD ,T`~LZ,F%P *S@l&FС iCYⲙ)u)Bq),?d'}c›g%@Y6baXPҗ' JƠ.;@nH+lHII2niK9jgl(X-TMh|#Xɨe "`iZD%\iL[K`Xn{BnOY3\dB ,))'zΥs9Y4ȑ*(Ro\2F+p{)5dqXLZJ](rIBRah_whI\A^ JGSXf 4 DQhpdeg-;^P5"L-?+@ȄP]"r65jχ<1/ݿvwҐ& $M"0Xc(jLsqI*k>QaZP*D ft)#)ؒJh4t<ҳ)ڤoS Y0'N#1Fp2.΀V Ġ QTnA4mHTIMAY Qd{ȶ(ud R꠷Y+9* P9sS B!ov@5馚*X)Gy8f'!ީ*Ȇ~R ~L"Q1ȋbÕ<CGsTCq]jtIJҫZXD:,( <12 846fB`ϯG)bI"T8Cu$Bјʹ]kޠtdԨUcba1Xo&jp&j/ys(WFB,E IK'XeHkzEmzPMcINHPQ`ȡ@9huQT 筴^c*3=X|#5z)WB8VL:X<6YD&F4!eʲ 07XjwZrXhI-2bj-)îH) <\VXLUr>ދMk\WQ1ޙanQ4 Y7olmkݲ{0uLp d$" 5 CB,P;!RgHMbsEʒw5#sCM.2ɤdr0XDeEDGWqr`ZuRVכZHRPʪ(Qy { F.Ƣ-2yXV ĒrTiIaNK@JYQzV2u2RaPMv iB tNRFz7 [>Rc59*8% mj՚kTeIu{,B%AERHM՜C'p0q4a M4ҀFIBS2HyVP(@)8dUtiBh)КF76F{T4ֳ5@$e)D'usM(/ScX/ZB E +`;®ÓA7 B‚bpE9A5cyHd@*pJqhm|<K6!'uY(C9R@i]᱋[3* -'[ NB.աN*7 BhJI42m$ra5(l:AJEp/%v59P42uNStӝnVgaR>(3 =<ڒf95ޜ槼DS|qvp]!.SӮ*m{Sy k)n/ aN9W@-#K :;q 8҂]W=([Lt\ϒX64ʵ O*ẃܸ(N`,uGJQ+XBhdT Ԙ%&C)RdVBY'Ie>UL@s$'lFF^ESɊllץfL* OibA (iAPğ|f:B#B ^,ZU7pRѬyf@ Zjye0!x'JkoeVb̖$ ITuV3V +:YTT`a8eP X߮xatZV7紸k*`(Ԩm%y`##57s<(VNt'J;IL3S߷s&V0Z) xi>iV$z'óO# S^DSK0V@p2C${*NrC)aLPI&YvaIT1ȼtI0 2|&E!&еop+5-&s#Z# lg =UBJq9 jIf"l˳i+Za*R̾7%^V9R:k3`0G59(Ćl3YJbwzot"2pb'H : R.GǝvY3޳B\o+Uia%aa [kޭaYrVAƥ*'DAuVJo7e^:*u&$K4[^,=GߢA(3V$ .Ac,2۱by&zKM&:h"gnvQ_ 0D.مi #M(̠| V#SSrNFioL+4 h B Ƒ tͥԹՈIS,U3 +JІFa$l;2\Cu|uP,bzVU>I⌁4QHxlÂ؍kW@3)WFh;VDtXRFߔ%IL-;sCe=KLA]lqEd5c8UߋRA ckR颍Y}E+<9 c E…4W)iaHX%&7?%zq-!XZXj+ Jj(uP'=SJ֓+ZJo)fLF$€A^7}@Vm\R˝2y.bȁ䦤MWD,c:%Af2׻ mK(P#oZ+bݣ`h+(.EZqMd$N帟|#@u8ǜde7ߡǟ2|zZ _$dNkjȗF@DŊMp0xYIrîMJmaYќr+LqOl*gW\ꮔqszRھ]Ig 7u%oZ&@$(yney IWHPpD FdS\be "[Sd:[& iVaNIC|cj N3[1e;50:!:'eyD*"n@=ZA![4,MnX{ VbQ[W(P HJ1Z:<°E֗ӼH\iQIWfN~e=ZQ$x|L&C)#֧ftR\bT,LAp3̬'px:%EMDgY<=ڊRhT@@MI%RS%dVM1{سZ^|~U|;VBT0@J tz0q$)K<9J{ ]ciKM4z еDFIO)E{Ef)N gc;2S'ІM?kY~Xi$OWomY_zj-LV2Uï2 8ŷlCXrf]Q)P2hȽ53h eG?3y|avDCg72O 'XնkK,/Ӷk)cuAyP`Tz֠c;PuZL!ZqHwqP (4$VOTڽ뵳{dIV]w&YyLN5bk֊Fn?=XVtOlsTZHأ<4ZCwS":T̄9U0" { NVy>|OBqt~:4NE}o7n28]_r$ʯRWco6 5ttEh2ݟ^oʽ Ѻx3&z}Bo캪wҵ9(}$hԧ bCΨU KY*O M9ՙBqf03NQHj 2,Ih:9bh}*mz 7 s )ͭ%&%a@)Jm5LGRfo8Ƞ6 ARKIjI2ѽR,sYP#De\/$ `+f<٥-C JZrNm4qD~Fvb[t:LBN WapT;(tA#y5,N0-l1)2եJڮ@@h8xqѱӘ n#ثEX֏EPʄA@DVCetM4md)[}ը9vvU 07^7 ~P&yHYK, @E-#27U~:$Ӆ{{OԸ}1wnl{/DZ^b6 po;%J`j=Ht)4B.2PZ*_o-\7z:G,\'8d~0>}:×[bmeçuf)0߭'qH,8tuH&IѸ2ɋMEF K|[ I=r'ZQtD~= 7rm3d2a߀VZ&2dU}> o||~l ]H d * )(/I׹ra0'v)"?JfDH < ir WKEWk6 ȴQ!X*sG')7Mm}L7םTl4"rvįkEm)am',Ai +{ʢױ-@E K:HQyIQ0wfNpg fb7cP>YcObK_+kN[Ƀde$!Ģa"C^_PLRx]w>qPPR5/V NA*9,aqo|,zg$)tRPe":͞d}בi#3:ׯ-BuF6P7hT6PpSpQN0ˇ xh^s!rq{SS>l|6g5t4^8$WVcD-b°>&}a]8(Jt[7Q=Z-#S0gRSF˶tV%|JZOk$~(f18pg: 8I_@ vZGt+uOP%TT6_aKhLG@sxxe~۩Cߌ7& |w7?mSߘk"hteRef[YҬF+i#}jT“->xj \+Q!#GW`Ebe\A}MRHF2xi^ihlmdb3}+M= +4euelK^3u\[E6V. }[(m5qoQ\D?$N[گXVb ߂|}Qtʑ {yڛ*kO|_S q=}_u 2^j3f 2S66i6D(*>'6ͬ=" tRK[~?3*CkfM9 pc"VPFʀj?r8bوֹ\/ص+h:iZdTxqaJ#\ Ӵ-h^&+V*ֽb@DUӓVj6Zuf `:kǨ6 J*R3(R\\3S4\Txr1 ݑ!d4n[qM "Yz-+!_]r'L(\?ljS<-NfwY .$8Sn*8ا,z([O U fTgbyЛU1㶌J>>>*j۔R;ݼV ! /ՅF:H^+ABs ;Se:(w9t`'Zh-&cd"GC~,sPyw6Xu`9^bFj%8[@˙|64Mk%D~e`{{Vh՘ 8ݞXsn*8xsw9W͌(j?ZhЀdUL1{6+Cm ibɊ@1s*R*9uj䌖9Z6 UY9M.ߦRbӯBfڇ^bN$Яyo# Qq vEtLih?Tj.U Rs8K7=F` ?=Αg[SPZ8ss%Kk2MPnW*sVDGHp5f|lE}sL[NXR{2*5& \PDǻ3i x=k}'$.ǜ?M'UWtU:d!qfsNe*i+kkm%Z/~7AwRŠ~vx'aiSl~' {Ү+ j&m:=iAi&S# Z Шr UVz"7 lTWdD辥m6qL2c):+Ej [+HH8Hr/PtOJ*qwI[ew*7rRSD8>jE:٪@'hdc0%r2ÍzcHi̿-">iwq12D(GexWfҹȑd(i\,St8"2(-5Z y&xŽ0{e4x0CĦb0vȼ˹HRΛ- :W$K [>:;M@HFYm3*NF6+/F@l}'s* Dh 5 Sc͇?ڷ|%| l{╺ta1RAwqւ^FTeo1'miLM;MŨ!prIRUj:bn'j%a03DZ[^Axf齲[x'{\x(_yJt~E/s$n. $}ClN/i)kONjBS 'yV.52EEBRO!`p4*'-->!qOH2ɫq6v-i?%O{ B"H'zCb-840PXK&--kSxU"?28K<.y|bO5R_aVU@ƴp {;"4cWSҨNe[NC_6iN2+.xkwGJ4FqcOYđU wQRRS۞$q ad}eh'ہ`e9ѝ61ߺ\.1$aQtAd 2 ş{5嶇%2LqjA]ɸJvqhVNU`ȌS.hnA`ijpV٘P$¨Z$REutGmA0㍁Bn.?PdCEaqC3x.4Hi9c(`hΞXGTAع4i:(QX._]]+n&f@alٺroLqIIK+:lFjB3L$+D04R(a3` (tG$xK ?FΜf&Jz`(]cvM>.=v+sNO]Ayg3/iޝ`v4wqnv]ݖSt,Ѯs?!Jv-1 zoc+uthc.C_ eX\ ;?d'4 T\8u̕k{vzi}MFQ(4Aͷcѫ]Xm*V&aسݐ:DSLA;!v!7^o35"C\ fAA+^BC:KVEJ9,8hTa֯& _K<7M$%`**Ğ@eﶯt$^ttP,Ԇ̣!" ߲ gܬʒ .cABȋ@YS26UO?Zl"{+,Pۻ *ib|hG3enP) X_ AKo=XǏr2-y; yolxGé۞5Rco8%k(Y ?kHl ,z0s4ۥdC`V~,y#]\21Kt;7Je33^\6t<=w\\+uO5&  }e%DFlGۦ{Qb̦vBQ'XO$EE1J$AThQ񒈫V-K>۞}zM>X}M+L]9ʭBx]ËM1馚d|eXk""4H՛-`tb6o,,XޠyTw$<z/wVH GzO|;Ug LyՔؠf"5eI\!(AM!Mɼ7S[ 1@J_aQ$xqPC ״_C]WK?uIyW+?P Qqh~Y[lzO2af F0wR %:`{_!HYC$RkWc>iDewV`ǠDZ! + sK{8J_ Um;r]3?uӆfOE"%`dҚqcBb֕{֨߷řaHSHԒ5(I FDW6*YNZO:Mx6YqkVxs6 ŁbGUL*B8lS JʪosMWweX /qGN8+DNޛ-U5dW]:ҙj8&! Kc3rvxsU|{( L3(A2$д̚rɞRR`sdɉ mcD2bWGWz.pRN ( 1jȰZ2Ud m O*wߡػe 'Vd6"@yS+2zӾJ4j ;ScX=hXt$K2xL 2=W^h-I]q ^P/} ٻ4vu^щ1F ٱ5D@4RyYIHGUP}˵,ߜ'Koדٺ(t*)܇- a,4SR领֡ V>>M}^ XZM)ɸ4괇l&FeFT}c:ꥴ'F h>Ex AA2X'ԞR@5 `(ڵ6-EOf5dU̺DVUR,D "0t|+$ D)n+.J3S}猪Ŝv.RZFУGJV:Z,_[PaC91+^C)I!d)4R;N_?5>ipv'`eS.t;.ijoXsNV C@#Cl O{W|rՁ5dKeՌVA7=A{=?^JZֹ1[ǞJ]χvKS Η .7 8ift!2~j-ODҚAaG9؞Ј:lm" %lW6X9J) H#F=R&s`hǴ DŜk}HHw 4ČQC$l3֩B+3E:Cf'慹S/D[&m(tSW8-ԴT)>?B cwk%3 0hY ' {lTQRnvUz'_k/Ռ>7|jTï,+G}Ch r^Ik=DCo#FI3#9pdžH3#-FN7QFs^,RmY%XAon7u"z*ڣJ|d,Nhjp  5a.lVK,L~T10b#A7|ۺ HT!]CNrIoz޻uuXx=o* ,]D\b)Dz' r&o}2kl%!D;R[mFcnO,%J9Sk A81HxRٱm9 m K (kHln3}  /4(Accr0#3>A[@+~fBO##1ِPMeAq-SUdKnD> =@mO?p6@<+ű8\zjXPF! Mp>(U57ygx{gٵjسӋdefklv,ޚ68װe{VƓg塳¯~S)5[gnAˎ ~S+|0>@NV }9dN  {^5[u@Zۊ=s?x8"%%^5ũLU&eKLJt(ЧpĹ_e =-KC4YTTXˆ-7uRĪ=<.N&Z9|5^YNK+oFs43"f9nqOb [ M;iơ4`"嬣HMGMLC{ͥBxGJ0l)A6TmfR3>JvjR=-+uv+\S4,j=+ eV҈ZuЖΑIupd(u$"DAal-zM6%Kޮ hD'fbVkD0%;0Yx̂p@&m־bpá\HOlAE?X(#,CÝbI2 #;j[ l\L :4 *t"Y-bf"B ;Z<i֜~:;).zePRU zFﮆv3骱I0qdeEH=E8ozGDM@T UW2 Wxbl>!= 6&ſ) mc43P:pU;ji;ddg7d}x} YQ\KL)v!zR44sZmT N |fF>d#qx  5%WrɣB?# =ͩ͐'B v>}X(b=L ^5FZxv w Mj!k~yPċu^h$Q`z&f+fMMXDD+ Nw@]e͖즽6V*d iu>>ɜI}ti4Kv+J73:M1 l73nԺ-EƥPeH dP>DBDjn C:Oz>m]^_0?P8{1O*UUws+)йfJuB%k G7!o4lfcQ,NZ`}2DI >_tI *lp* 'Ψ((QD [ ud9&PʔU Ȭ AǙ}L(VmD+Q3 UF@ξnDj_ #N"Jl](iXpA <$F@AK Rvq 5)h={*`n;vDң+^>ҝ֧1؋lrK!W)'OUˉ]-kLbuMg Ca?pTR[xwB:􍲧fkYŎ߁αsEYfdߚ{$997\}p~{4WY5a`XUkptLط4 =qk QiT1cT*H,!L'R|zKtߔKOLOwv=DseX'N0^a#,+,,:<}O';HMD$̵6aA b` ƒs52Y @170˼ie3mFμ={xchW40gw1 6B 4e-▩%$k\,7l ^?# }! 5\ ҍH$B>Euc»yӚ?V-+qZB\ª *:UWz\S%S g:M҈EnpI͕Jb⛂63ϟ3}|,Xdz`'FZ-l Q*V 5B㭹X_oe,!~il)AGE3k[ۼiͣNph-=~GNk\>g?I1eT[ `Zh{XK@e3#RbXأӰ](‹ 1_eGimB&>V Ъ6h[Mq1;M0_? "Lȯf[5l#5Ј&^F!8E W 5c q;fZg2P1W֫' =fd>K3;EMci>U*u,mTij`va*X1_uֵcWёӓϿiQΞRL]R%KtMb\vk 3>ē wZq)};&~bGffZfj=Fynb67&/E ;^;15"*Ts '0Ɣ躡>tD-Qz>DxhȼN}Nzb5g S.H<,›vGvNTax4d3f$%}[;*7@x}[o9Db0pfH'SJ.S䄙ԁn,mڽTk+>֍t `PL4{挊_ǺtSԮ;|3n[MsG羽.c3H6VK}F[-oxI[lbh<.ճ3q-^N7 H4 SpHfE+udzK <['M942žMOήpν| dR7fN$&2Q6GzQoKu\ͪpҘ@*1BJmD*[@_uW:Bf -c0]{R8 TAp3ŜAm"L 8Ink3<-TE*JǚUXXU=7StB(## 2:63byh]sb͞61GDU 0* FU;)qz%D1t풃c+r}#E^L"؎xF{-5. d֗jF0F" 8o<{0!ŦFFNN)|1]E8 y^Ṷô2-dNZ?̃6z9eۡAۦnCaVJ@fp=ujDm5;b~qeG(їۉmd[x7e)WTxݩ 9^0](ߪJ<ʒđ|Ŵ MD?Hb|NnL}g\`D5y\vb\V*%ix VQyI9ٰB`} x&֌2@':l]m!Ej8}֧jZ` C5[ٻ"hҭ'440}4Bw3cРyo,v~ףy:ݬͫ9'qvhxkhhLt(7DQsT-g pӗ0<õs̰^E;VB FϹPCd&lvD x]D/p7VHgB۬g?]qt980m nCD{>=4j./7^2m 4gaDfGb'y$&ӮqJz2rboT\׮e7W'+vʐ_[ut4()l!M[e͆eaWD<q/hy`D0$Rze>WzNWT<?o "xoKUZ 8Wr&e7Q-AH=7k-U!xkLN#``X7OfP 6%߇N2zU%JJ2/IK0}i*)wYZmBCA~Uu˜ٺ?;f4FQz<꺚#~7lBl%|-5P {^ * aSYköR)wPaᢓ6m7c]gTCgw4='qø~m7jc3ՆE7Eo OL_m!dEnN<G'GδP$/)m-f8n(3y@42Cb Ãi&Xn 3C h0P%[X]o #/yؒy~qk|2iwZk6U@d}'\5z:e:,2sFC;‚Sa/~LݠJKǐLmҌ?> ;G}WG;=#PhA}WpDŽ-)]$OX6ٳ6ZtY ]$8&R}K!]ƃbTkk^c_ [Q2in1 'RMl*6z` ěCM^^bN\AԙOOc$7Y ||| A rU3Fms*r.l2b g$q )Dρюfm$>9"Ͷ(w`b`=> /s^TTxaپs:'!{EѪdw@EݕMbhTC߲H.iUs2ev=u{n` JE099^xDрEßve[ŐށxƊ1z=A4n S?k nE*G^dMㆮZ)`'ˬ-@,|rvPÌZAj)},Ff,wP/L)w,75`)k-KL0YىpPQ$xyF+٬!ȫ8ɥ49%lM *wd !䳉Ma19GmhKjq>\]LJKNHl*f oSē6Y@9[3ttoRyqG5= dzWj])c !+uH<Ȯ)Gt1(l#(z?Hٌ2 Қ@891 emPA58[ &'AnXt4ä ^x-1V QX34NƁUt `Ǐ@p4SPWs]Vhk(@|UtY70V̵7>X"IWTk78xE# Y@%hýNCW l)r<>z;Ti V{IµK!wڬbT00t=zp K˷^I͙dD7E5:KYëk+%31"/pw0ƴYovV^.-O!Su(cL5x6j@-@(!`tg ps+CңcJ0D|kH8ڪs7[l]U5J&:2E}tQ.c;jaC$ / $FJc;9BycܕۚW Hzw- @KpjJtߙ t^2T!nu|q8/r<ROO%WE<ͧ\c0=MzhAlx (P](9o2YRDOcQZ\RJI:Fl3Ōy9p<9_U^5Z9yu|JaT ;TADfδ a0V3r-RnB%AѶ]?U "eHnzX'ffUnb(*qe.iU@ t&fjqY)a,ZMD=Mg\tQ#'I%N_^◥Rp?IwU;Ut9 #9hiY7$!e;^I|2PyWpk9 $^79S$U)HW]lk4ў\55(ʇ%I0J\!.Ȃ30֋3Ib/5 npO6P&*Qp若tx%}>AuY*sl$mF[CC:OaGj6Ig1.9g?Ae~VMmt@PT]fJ s:I);Y S+wїe"WGC@{q/K9:aY4-qn9!! ]/T ̥O3WG],ݽ:9GaydXZ"CCn3p5f0\)t'~=T@j"0&$PUj.H9% >pɌ5$~LԳ׆etxOie94֩ݕHL棒 q7 "n\޳@ӖUեc lkeʤhUʇb'GNp0+8G-#puӾD1:! (ij뗪>&~8AX=}-m^ Λ.Ґd梴.e546-b1ɂGZ7=LrrXRW2orľNI"5Mcgy)g}+ 12i[07׸M>Y4UCїc =XIvB%vƆ-*Pn9э+-ax˗ěM^W# P-L47|3^PdKh 4j&d/X*x.a֔N8#y8ODZ i_T$g](vzߦK4;%(&*"t$`ƷXEsE84T,!}qpQN.eY~!DCbU8R}_!{~siH=(.c>yAרT<]HQ\eԇ R=}G@3ssw]2#A2 ]~o~ e/>D/3ߔpu ID(PEdjC!i&{$aݵieu2%ǁM5>@`' '{(a24W:Ϋ:@{zZv8ٷؑ}b R 2 z>k [αa0Kr2ymNs xU\+_n(Psfa`fv煠ib!2PlI 暛ަy]~ጛ҄SXYbHz J1O~9r4-\Sh@[290jS[jک`vrri E IgCj `)OS;xtOVɲHy2z)";6'c29ٜT (HB^qA+A푒A0~=_D7I^Y*s?w,*L~[{_~ɹRy]clt}!,sct)mA )%@KRYSHGҥDֵցu]z=bb[y)!i?}"H Ca[uU> Nu"2AyF5wCPoZ_z-\ 34#ĈIAЦ@Ry)apBi8Z{S@p`AJX7ҾM`*w>M衲(&Y&+`>=ls8e2ZUZW&QaSrBp*ޜ_E-aS PȌd5CN@޴Gp/ܓV\NW^_c&.j65Z9 ?_Q*`yʟ h6 2R=3SCC":vapAS>>P{Y>!j$۴( 'Β=} :7%|~}č-S./jB>}~QVA\5+I kw<>LZ(Cv쑧Aܚ q  qmaDgVM CB l ɮeJdma)Î}u"MГ@S$^ N7k;1 xgA,tdĚ"ڋF97tQV,jiCw0Б!ϰS 2tN[]EO+7){XfWpsy-OGϔq fg,B MU߶h-qƦtTЋP"_Y@2/^F[Jo\(f$ځ,ie81d%BJ"aymf˳)y'=n29EŒGݙwC4M mS-7C-/ IvgikOM7үÝx]mNA|94) ?5OKlj8% ^Gn!iٶǠ3&fC[M{GoVԥQǨV%d&19YI8սz' 8,hSuGHkF:i<5@E f[PѦ~ݧ(mޗb<{!-$AIt D2ÐO5#@;n*M i횓h.~bMnE*6rΰ\pL`H3*~Pl݄5Y]ujeJvO4iwP~$ܫY6C2rzG(M`vd#}l%0>DJU 2.LQ!,~5r?:qqֺ̱ km/z/.xܔp1$,ZDag  We)skTVOYB_^ E"N,ZX䕏(]Ko7R$yJf/ɂ CV&^)?'7g;m1-DT O0S,Q'N)d{92 ia6EO>ˉtA蠙dZ l. )(~9ƄK)!v1'tZ]L7×" J*C,RAbv1,K`!}* F "V |l.olMľ(p)b2a1 d:WR=imWT":2Jff>qJRvEuWD?y.ս>V;_OdL`v" }3/Tx .}W$=&Hh_:׫ik&7/*Gea.(wKWD·0]&7B^еhH fbSp#,4ƾyO'FnD1p~K'rNj) 4P"0A ށ'Y5dh7jVAknV$mVROAbηEsI7!uGŀs{Utn$}q 8F*O etvsARzҰ @?)<#䡢EpBxdʒjQ38w) ]`7,>!yjd ]khC&6.VWBGn CsLaeր߅2q[Z;P3M8;SՊMaH;5=jLA6g9hUP7J 29X`[eK$nF _v󃤇0i@7@0-+[.2TY 2q(\Do53:WjO92L6#-<4%P dX]F`!4.%|D'uB?n#n#3P+Y,bA{ѷ-V[h{-ױ)E}f1B #q 8W VK @[֏o ;|3ckuʯ_DŚ`YBücenYyyXp~e*9ۧIp8V;(HrGtzrQ5;Z>bͤF~]SHW_y^X=fVYfrc%84S1AԾ4@$dBtert(zN1ªLjwQf)0϶Z Q1Y]c2|QW̏ʭ'H\6BN99̒WHKFoG,HG4 I ݑewWcX50 szJڦ#1b!SBA|׸@Y6{)02~1ssgyF 6^&* I3b v |ynn9< J$Qs2tN []VeTT4ACف)9҅(T`@'(NG}8`T*(ˊ뮨eR20W;;K@j TeZLǪ uX\ؽJKxJyJ'~ӴF%I45S~Dc"DE0 8!á69j[#cs8 PL&OCTQɓ-^s޿:9r٬H $8MZA.[&?wtW1mÎ!ĥ SC-uՄaAӷg^QI7j&Wf ] hM—@䘅u&J`"hmԟ&<ID5!/Q H=gQ'yA1JHc;1ƥ>5omwE5qq3ECBȚAz.5\5ZDǦp^H Z>9UE8R4u!az+MnD&.+k]kY<MЗqBJ+\y@u|U~>$GtpmhwsZdwax:H'3`gK:VO~w󓪅 )8,UVggX>J%1Sa% K&q9zn4yߕF>/v x珒wဵ&ۮ=kPh! -~ 3h?_Ft#pkp <ŷ) j}91t!㦑!ϠN bon얺w4Jy8>R&avr GQ~##d$/Ktzl@eIcPhT!ח` ;Zs^{g)B)^,dw'Ȱ!QE>1GDjjFSk[|3s=+3f~wa%,9bnFNqz/; $x!)*#Rr$?) <Ǵ Tȵ  A&[h=z1"Gn^UTMIl$L1#=M1Ih~"-՚V4X(߶$4"ڣx]TV^ Ұ_J`#¸F3tk;v*3iV%&9N$(͏\  Yi&>vN̂i~C ]ZlfcE%d$|$gV VOYjt9/M,"A#UQ4r4,lI-9 ZKD^<^[vOP=,V1aFNzDBN/r[*-G=In/~r6nq--=kBgx5gȨ:rr%R&X,.! 6WL9)Naew7qP*R7X<@:@gb_/Ԅ/jܵtY1aĒC.Eift 9{ԣu@V=KfDͤ -0=.AM*Ey~fR/Ѥç+8g~]Oy(B+;À1Lѧ;"-L|*Ċi=\"cYи~>.rzGW.gJywU&4e[&+i]+8Rl( 6:A"Q+;=+< ]@ta=~3(0~p:5 exWQB3_czl甓jxBjKh< az1'sdč="ɂasmވ5hLp{-2uWU3R(1 E :*1U-hA;PUl[w2\Ѱ3UdzOჹ3/x>cH$L rϮj0aI'\ \58Z)IUx.QK( w6I=yQD@ #`{N} G:eqUlWH}pM Ьs#Tx! g=CO8|iSDBTF.rԜ =G:5dE}ì7rxEwC, @9T.', kqw' ^tHhg2~:S4߁FFDfM>QQwmlBb%!*,l9Pe˼ D8t ތU}k-;#?k]7S(^& LΫ5 IDtnܘ35egM?nћP-BV Ƞo$:4=")mGuċHdC^Z _4Ϙ]Ozp_⟈rي_~֪{"C!ھ e0=j3bٹܻ<]]JՔ19v6hk~t[muϩi-NJ72KE<mkp:βUqƖH\hwږCϹ&Dh1Z'+oF,/0]rȬNH y6gNYBth93X}L` cIz(M.E< 6T3c᜵rZ)`_!=|f48R u(묟W/14%:H3%g=KJGFx0  S8=cRSP3ot۸HbeqN#ߨElBݸ\b ̽tWSR569u^AWst=8tXZFcӘͲX*T.zmuYOBY0'IS}/YB ~ڟ"QεM@zsl7پ}( u5@TRK߬ [/r5گJ*/YjΘYjM.v5O)!5D*OeL9X7켹"Vc7SjsH8@5k>Fx"6eDi{\:8YAenPP͛o KUu=':bc>c{o)WIv%ж)~q~pΏ<0I"-gM<ݏ!&֒@BE@ T]ej9ZU.!kQ .>H醅179_:BmQԔP]I<}hRGzzh͚(k}w!ĥS¤*z mm(s٭b Sȣ,L3yGQm#=g٢'Qkץ-6-w\ykrJ38qتCTħm~f,mq7oV>;DVitC(NdF2dZ;~!-]C#Х+YFv6 |t.c۞7LG`? R2z#Ŋ%u7:-;|3Qͱf NK&~u#6`BSݳ;1wMׂɹPǣKNTg]16*o,6T>Y9zQyV8cE9Eco&F$g4JZ\xl>ǘ20)>uOn aޚD\Ţlkٶ6[JHH |MC}R|[F<"(NL<1nVf0mg)[\$|C O5D0aeQzTcw8@.g< ٶrv(0SeMyuLoܞ)f'$s1 L@Ͱ="D!,-uMVV?}oco(M:pKQfLB?W4z/, U5ߣ Rb~HoL1Efh]tr7Bb7c]_ Tཅ` u,ڙRC-Iұ  6ׁC7 ՊqE rAOH)Z,c<y K=&iCPͶ<уFv R$kF4SfdYUsU cVs=mנߣSV}iƻ>˰w$wblfvB9KDW8<}OQtmyM:] HQ}^}Au\1U& ` EOfDyRCJx)HsWR@n=ToRΚF-x`I;,eqTMvHI=T3{c $e"OWB=؎V}ڴ0G˂_>JIr iDO=n(;/Z,ڞ1?mWW_Ւ^U\v'"P &/q3*5$o`ZiZYH ,9x}\,1إ="r\:LFbXdPF-C>IA[Iʴa<xߤG&17[i`{0*FXOYЉE /E9\*PAcR0۹ZiG E_RygDھo%T [J٤| 4N!{dQ@v⛾KU#A3:m°&\6V vG ksgX . wwef{GD§U&tbΗ]uA(p,Ej2ik]:2IW7t_&nD+m7x:%o^y ÔW g,Q|erya^> Dh7sijW2VVf'h-WZs!#x~B7ER #!:S S >^!9f"S.g.[8v_|$ΣkU:ig!đ+ 2w5%@iˠK'F9׌DŽ ]x4.l_%R:%uA= ^IjMևFM C[Z+7 ;J3;^US.ۜ/E(RvHmc-2ҫW6Duz0K` SlA]Y ]ٻSk->e{4rzL=D5v IPxdˀf5$^#ګ yfm(¦ ա-)XsTˍ[,y;jqcSӓAXQHLb@%&qؽr>0^ _kUGȽ&<_`~&mRIkHb ,6=ZMb3Dt 8$Qs0uRQa⏫HL0E.ڹ(osEɶGeIHaOڤܪV NDu еDg SX)33 ]㰜wrZD$X}@``_U'Bu!$5p`B,Y^W ͹] xyml>xdjcC6G3 F7aD>= +OSĵ7l?:<$h3!JbݮI@ ΒQS@vH(6fQA,bZQߙi %7-`90#Ӌo~$'# `lsl0q@v3 <1`Z&?$?d}s;$nf)&zR܀ ME:?bFYac?.7G G~Qt؏+?Ut?9x̭pHXF2x(F*ԿA>awe2-4itA-'pHu N)Lnr[/ah X.l`pKrKg@ 9v%!!:(,3"H8F W6WKy ¨%KA~cӔ,p"N5F'd2alt-/H6qc$lTTW*=q%&H·$H:*xx.~.$ 99,lYAE_=2_GBH$nqs_B$I$,J#`:3KiU^9@ȯbG8wそ̷q~O@~՚[]l9lh@i>+Nɬ¾e8TKR6`DER07 M/R]6H\xBNet|8SMBl5I z#9FJ?;N0A0nE%Jދa\ 9lBJy  X޼sDy?orw:ފ PEXCV)H壻3^f]Q x 1o͞L3![:orb^2nSSlNƐ`sg*c1(4Xb 3XV;J3,B$&kU gvJu-hLEQ8ܿ$7|4Ғ٥pZ߇(E'$ʠJ{˷ R 8Б}ǢOf%kX#􊇔 ^1܀e3kp -&^+.tdݩϦ}d.ZbPl\=LH8B𜀝܂B `#N }/9n-Ǹ~Hm<94LY vvA ڵ.%.$ h"ࢣ"#mgcQ ,a9^<@!A dKr%N.`5"޹( 0PG “A!ooN*'-&`;I6|o{PA3aa!pf`۠/_x/+CTҠb=;RBj{^υiW(\=mRq9R^xUb4\׊[CD x4J%N>#j P a ]V\%Z-8)qvk(XԬvWވ: 3w[1x觤wEˇV_+EBXv*% KEH3yk~Di2XcuYnT8XG ϓrJ;Uۥ;~ڠ$C#0Qxt7 O%pV攝b'f͒ JI|b}q_*3ܵW"|KYܑM־ª+ brPUٲ)2x >bI`~b' ]uM*.f[ſ{_l1CW,K>HѺem}s~,q5$,M#f "4d"ڴSHOY#ucP*A{*}^upA/ : lD d:kc[R wc~$LlMO0bFb bJ}`$*ܺ0]}m@ؚ :v0U5cQ( Q}p@tj'­ih,8SwE#4P-y0P]e?fpd[y8}P <>]# P+ :̈$iE !ꍋLV4L]t*jj͸![KX2vmCdC;wje As٭d3CꚲvQw k+AЦǣ4; ņ>2v]NBEToz~'FlLU22 ~Xt DZ(OS(k6k(J_Kv.λ,U#DMx$r3:+A3Xþyqu=e$쪈{G`|4BL=+D|m5m^nbb3ro4'{떼hU%离˚hs~*.zo48-')A&zGl! +vI-T2<{EѤϦ-|{}5LU5⡐w Q_w<G$"d B\FWPC%51` ltHTPϷ:]kM^<4^Hs>߈fB l4zD&Í;n ף5Peͩ@ ?hU޼S7cըAvU]7W-[a4LYq@(b !񔶊~dP ԫZVN  xȵ$^! &wdrBS'AZ/A%r1y/6B|TgrúK;ڜ+ lҲetEU?,ضQk""ݴ8X}fza%ET)Yݙ%a +ZYѦ/dF:z[}dAK';p4I9Z@M;॒eզxB0wЉ02rj0̠a;-N? ѿO>Q͝`+%r k9JݧJawYE\MUR !k$/tpX'ؒת2`S z1{`和|w,jQI#98(zY)9̴w(C0}dkb+CX$ B`]|PwPB%jߊޚNs`h$j*A=\t< KuQG. *We^ӵ%ztD;|˿Zkr 8"^+Uf{OaiAp?F9P5nյ'D%v?й䷕<7nf8У >^/!=Zjވ|:&dNh ҄{RCwRb$v߆ T>񉊷z@u|Wh9vbmz֒jg|pI@cM *IRE- o.B^㴓"3L8dzAaED⎒B*5N :|Ygg8GVF'5ӒL}a #d?\ӅJ1\}bB(zΓ1 c/jzԔߌA`Ab׾5KDK N2Uؽ6`/j)^rol`Z-j{q+G^[-t0ߙD9S@J l(k;*a}AS*āJyz$bIncOBw׶ Ewi#„*q6nUAYue-8 PƵe -sQ*L&EQĖjE| =Q$'Wɔ ##^).UT*r4=ќ<`'5-5x0G!%jU w8Z<ƚ:I$:ہ3iQhB=NN !@˂)ޏY']+cxELWHn`3+FՀuMDIK)"ٜʲ"MFOU`5)e?׍+4"XMAep;+bH@Wwʲ61֭][|̟P@n' ?Uo#-Nhg}q/OL"6\ %8-TgtDr;zg#<\t $":9JM,:jb4ܣaSp-mLMyVN*vjR{QVcalT3w=xl YŨV=Ave7n̑Gp-N@!??7(;m1w+QT|hqT:M9G#8:eHHxeg >4TR#$E4>#m͠G>^2vY<;b=/o9uApzMKA(L'T׭GKk| 0!G{IN6,j~$y$<2ˍj O2/U&QnE~Bljo\hK aw. ڿ;Nhi2Wdvx{R߉O d. zaQ'٢',6O~f&(!ܫ{JvQ~5'"#kWijh]Hyac]TUa;"dK187(&svO47RqseA f>0vrIqay}(q!'F`<SϞ&E#{T ds*Y: nǍE}V+@_d/FcimTAQ5ƧTw_֔+fHxŧ:J991ƛ^ɔQ@R}IM̫X[}8 9-l0OAZq8tnUbonXɫ]i%cѶs9a]r$d?P|('@)}B[|Hݶ%Y!;ѫ֛)c{9iߏ0絸y-)huە[<\TnH #w9حEŔFVMGS4ڏ<Ǣ̭DaUc '!TP'&9uiw|ow&oyn7;CsJw\(өn*DK1BSlGH(@܉Ӑp.*{G+%2-VBTbe뫳ͺMT%Y5m/ 6GoR=% [3+s0wX #;B v>J-GSp<," ^fPnNZG@3/{c_[ Y$;`5 rKf Q"Z% f1)4[uYgjEk^+ ܽ{;'MISH>vvgP=yWOEPNt:W2h&ζV$zkF~P-xH;MxIQ֒[L(=?WJ[a ]&|L LnF@Y QmL +mWA 5:6{5T|jN#1pue+Ot-sLZH&zxH0~=jd ;2 2JiӶwT:qViZ`,˖r~;~-A @Bz#Kuͨ  4j2ZDAt~~?*5$> Z~ g4 L/BN꺁|9r@L;HH`a$`qČz@[0Z`^{~vC8cVg,9#3]YiVMvF Aa 054SФ*b[4Y@}s2̽5I*d eX3=~Zcu^:w >5?z",2`Hg5w* RdF]VE\ .B(Bpqy|[DEٮͤRToC&eZPt|Ff:hnb4{m"%4u3j]i=(꧷;^Mk(jmIԵv&|t\p}dxHjhٮDm6{/\\Ԃ&L'D/iMXWx ; ׄMrM1tTe*ʸ:16bZph#D/K,>CDgaq?@i[0U n"ʤ 7mݫ> F,n|j_hp|u]~`qxQWuA sRגB"\ɧR5"40yb idSq4\s@NjʸF*:B!q1 X\P)&>-H#(P 1W&6y7g;Q$v#kw:"8@4D_4{{+wGGcֶehk'eQAldln 7bzeL:npC w6ż Tt~,f8c (ё:#Cvb+Jٓe$G̻8q2R1 @h?>TPuHPxQJa}a-¨QHqX]0x:kVh`hdTHTHxxbK#)EȎ Z0m ƸUp19̳)ƽvU/Z̎mpx1+O"0t`+ >2Eؙfb١QFmCCQ9?5 F%ږ iY1eH%jV:X&vfGucXBn+I1$`{nCx.qTL`f?"B0SmqgK,w\Bd6HJ 5Fk?B槄{W۸møNIs92P-,ٌ%D1.;2N5 {KZίJ(Pt.fSҾ*;M `4i9P@6Rc+N)`WEczareWeKsa$gu;޻ ZX^\B$|U m"@ 0a1TpqJDe3l,MA#fR'thZnh*qJ@~-8SO rt'Iyf }RHDzȶjq[jZ;V,&rr BL~n6ƫh] |6_9ME.ĖMB4^n؉6ٗ-u HZ6j {U?uڳ.+F"ᢣ1_݂k,"q0落ɻӳVYUP=CX ul|aK`z.@J ƺg±RP%3wkVĘ[Ḡ*ז R"Bs1`39$'ѕ8[ɯ_-yS0w4wwnX#J8f!Kjo}V0zYE5acaô@Q9mP }mGFWR4b:Spo[M42 6(2`|*k508jNr*Ml)BN֑P"Q"Yr4yina~OC:G?<!hٯ"on9֦%:?T ,VP/iwU,bVǛllrtT[6"qmj9Q'P {H>,P0Alq1ʠ>MCz Æ7%H;_~T3iձ?z.C,v ʞXM*}5Cq KZ1/28Jy춻AOi`uXCnj+ݪ6TB/ɹ~.~!S7<.^ *Jl *|FutrPa!ӁT)Y / sR8nD.%UEpL-J&&QCl!WH2䉓WB'UQuZSA l}zm]gj\aD ulp9FfN"NMZfY$)u^8}nC۠&Mpk! 7&ϊdf AgJf\y-_\%-r~5Oű.Gx,J}Ԛz})/5ZTD}8'^]4+ۯ[Vt^B8.Ast%g p@2l@!_z=[ [Z=:z)x)( nQ늘/WwxQ@,2[[jO7#ԏ'ZDG9U Ђ$4A;۔o}< 0/t>Hb9l^_nN&|-bP`*h]k4N w]lTD96^ da"Ԧ>kQ Pk4B ЌBt Cks6[VΤb:8]zF\9X(@61Ylo9&^/U}Z0 &o_y+LWaLN(B*+jXT P𺳯{ x1ڗU8`ƹࠔR:D5G=Q @Izx-3!N洗 (z'%&/iƅ"BG[d_xVT*Nos=} ڏ`J/NC y(ļ'ᑮ  L$EP0f Cp>)NE ,v8CYO>8ۘGblvrl)Q:3H3)H;t?"&=ژ9,ep Dm/72>WJ66sO#D'e|+^&]D T4]z;>GJ5hu@Mra4މ5ʐs[GAXY*anl0(rDRʧZW%"W}FR`02Yѩ>>kOvL׾_J`r,{.<3p_k_97$l5vF[a BݭHҐmЯ5< +PR3}ySRpcUH4 S3pKL 1ܱ9>xTM)*1sl@&󴭰W**[b$? [F JwʩotLbD`MBє ėGJsYB@kyX fo:$'Dؔ'ֺP+UM{& HhTRA' zS;cB${:CmC. Kqu%%~Ήo?JC!wwufm >ǎJ>{aVa5jȆICY/WҾ qQ .J\BΎpy!h *34?cq;kBUB-79"lY4aq4GsQ$Em"ps6ה{{ _8 j}ePčCvb"sn HyIjqa :ͳmswŻd<^~ޘ aFl=ҡ΢y8ط,5#ф]]ٹ _E0+"W 5G2#oJX54l'*ݘ5BBLPMZYT5 >2k;@$@MnFT-zAkK4pqr:?abw˶))sX %1 LbD=hPV-DF;V\U *;sw(rr3PuJ rasW-HنသîHq[n I32`tyTha1S,jk6o1U]8!7sA1̬,3$DJ 0'6X9X%L%}c ؼ/˃Qyڇ /fgXzw%6f\E)t57c u5wi"zX&`vRJuSK5<]Q5h7]h1B5rڨ(m/KK'OWUC̸QN۬mtthmoٚSp'E({U0eՄ^ .ᰪ;'c=z|k[pP,[.ޗ{w0eV"IN^h}mۖ3Nn>߅M- lCs#:;4ҫn.Zx@._^]^?bU$Xڀ'{Kvkx#PG_qqm2P Wv&.heg^=fMMW㶁JjKzsY7ձ)oeh jBFV:R RSZ(<sGiv3 ҏSn&`wxLډo8ء!+1bΫ cscMZEsG# 'u-b5K6rs9CLvĜGɞm-#]£WUl`iu=<@^̏;l-湶*QuH, f&󊛨Ck[O> ' S!\*C_φȈXQT_00dwqJ ~L{N3-xH̡k*m 1N[l§!m$JT?ayWAzmo_nvLtz]+ϦwTedA$n(6uoNH0 :`Hƥ[fWySqk/YQUn g;8=;LJ` R¡4nsSW jlQ}ܕTD2*}sY]/qS";VXfx`QsvL)2(:ON\%9{qchݜsPl]xqG@E5 % WGR"X_( ^p䌛Δ-v+p^Ri,naIEe`0Aؠ Hwh=lW,,Nw Pz/mi PpYWNx4i OaR^n6 ;"') o\"$w?/ SjAmc>/Fs65O26R Gw^YY9bьzQٳHBYY5FÁ^! d=ڪ+)т\=E,ň+Bc'ҧ.-I #)E=5uY# ( du)& r`JVd~8YhUpY^3\a[9j$45Q<\]qn0s"ib-`}0^1bv7Q;7|틯cv|D!&ըELRZ1/*&z8ۣ>+ŖnOd:sjm*"r1аoUhGj3o*c>alIV͔ܥ8 ]uM>d'kl(dY@GB(L3`qa"wM/L;`?r%v-uU?8_B G*5sl,OKO>ZUX!Qp9ˋz8gЏڑS']OjNH;u :쭋E+.aJK=YٳB5]Q,}5xv03!jLyO;6o3tyc/a ѐ+j3į[l0>J8Y -6[clϖ"JPO>g2VT!B't3ꍠ:f 4RY P3bw@S}3[14GYq%Vetp>U8yVWm BJmi6v[8S۷, ϥp (!l' 4"I{ e ޥ iNׄe %NxP(s"6BgnWǭYntU0+JkJ8{ 88) F!I%: '+CPG8M 9cPpJ-=QzYtlMog]fVUI$#dIPV5P ֋9UM-%'XKʡ2ڞdm@&ƶ[ (]59==5CzJ"V?%BJead4=*k6(^9'D2kuǛ<-v3,qHQ*TԺ(w,u3H4H aIu'&=\ʅW$_I2:JF~es< nNi;ѕ_si@4) 2[m]N>3[ $=TVSS\ p:bgnQppxhG:.͓Xm$6ߐU]֛[< I4|β>GeVߨݶq!:)W;1g=n}1DX":TvFq S!9 3d9aL2-8#{)j~H1"@2&9p7gfM9ֹ#zpl868"[P.IQ?me۫pT INk='Q{IPw"vQb )ŨTSqylv4rx,ypS1Kic&Llߙ7ÎꞰ"q_CX?݂!c>Nsf t| UūܘR*Œ95NJ @U"v2yWʺB]nCzv|GLd'.Nm:Fp΀=}mJk+챋ERbWyJrNv_*Q<oPx(H@a[ ^0uy1uN?8 mt>kz/+]IE`BH&N)9ݭDj#k3`R-TEu>ܒ7U~,_ǫB tUgl΍&ƻCf}󺮁D'܎C0CL/l !} HF7)EkPa-inAk]2㆚AZСS05=&1$DE-՘ ƄcǍ.( **X6g,Rӵ7WRYQ`U1VcdVo`:j<:c~7;g( r/HX|G2Eɻ c7 7>5trYFM-e°֔xSA ~β Sn%^ᡞ ؄fhN=S,ZP估M@Y}Y+y!OI^ơ2m"z夥(2L8@g3:W"q8n;:G;SVBi8 R1#WRtՏ虏19=Cfܕst?Yw"J (P/Rat8$1&X*ry(v Ŵ䱝L>W( %~2ҍS^V'\L#z%9=LM(A޺-T@#=8Zq',4".&,s5#I~hзU>H)oWXzkQ Wdz0Άpa' Ӈunpj}LK[t&:!(8T%71Jrj:.+3ZBVmORQ9rLΐ>\ݓfX30 jfSҾ-ر^I02MxҙXq!ƵG>&(c}H?E4la paށjُV. 4br8҂$.B՝B(?INJPmhmzlC#J]x$h?xamL`FH0r͛c@b$r} R0J^{Oy#69tTq%ID^\>oXI5G:LKh3a>z2Z)Ɍ:sЏWVyr ? spZ&B FwhJ},09lzPv~qɋ +stbwTrk $z4&Ss=̈k S"N;>&.:15ŧqOL}"2˛^Nā><U'9 "VN̏ 5 b#yZ*L,E3KqEvCIAϢg0]@EzM|dt-.}88.+ŭŦ{Uaɓ̐Z'MhvxF#_ D,FS]9X=bcpX[Niu,ABTi˃hvi5`!8r7fMS;A0:VyI,RyèR1#E4͇M&QVVCxO`tH嬥BPM "suŽZpFqrsE(H,h [.*$( gi"+{jr`x +{~T5ȬКpslk$CeF!F6.Rx 4v8ȶ"nW)m?X׀X>- MCb#܋#};6|F`C(TN [96OE6U;,(-YҰ(Ն;ɠӎۊm6}xgeBU@ +eO+KSr``Jw?3 <,> IgYB=+֋ֵ:Z\nfL 5*]٬$vx*2|<oVߺh"tlZ\AeH}Lعw J ݶn??*0T2Q`${ciY?ã=(fҿ+EvYt3o#d (Zdw2Ei /4RtELCK=>2e'p}Pul(z5P_44x?&`qvl ~$68%NH/NE%o\ 3x^\ϤӶoH|t}vfx9N U^:`y"+{QZdܗTCLӈ#暭~ =ŽDԂÝ˽@SgHmK=c Z3{G{`/= äɉ9M^Z$,kۇ83cwSK\A_|joYi԰ ޣ$(uIK= *DK)njeY wTZ:[s)Ia(w}R&$po9kkU&[*`fZ*벭Y\ J3[^y9^.jpS4gg7DlAٕw}䩿ʼw 좒S[8 ٳ&ㅨe9 1ҕ>:@r1/$Xđ a~55S0KKRkB0V` $K)=! H2P [,[xY1`D|:Ulc\.!XbIE*t:텭,J!WqڷCm+熢U5)'](# N-m&=wB DhfM;ݿ"-p1ޡSH+_gV:K5 `Eq?4*#EC8ho@knڑy]^`]Fy73/e+#K۫ ]U|C`A{QM-8jE%z]r6[^GONoQ > | f*`"WsdX{-QӞb Y-'bjL 9vpD,hmHuSqLd&fWR3L [yMse OhDPH  E+.V5XVdFK}hTMsV\( 7'}Ë}"E1!yZ) ޺BFru#vb4m^uЛ#]>H 2.vǀRjɾ1MbTR_1 ,J]uÛX$ލ_ QdA B52JD 7[\4?Y`{4j4PN% wIkg!eT`jީ] @%)j:s#x| œHv_# @3@@2gSԏª[5޷^>"*MBu7|ḥ,Kwv/Y{Z#BH=ե]`^t amy-c\IG(np@߿B YBwga=fC~;k#C쪷v/OGT8* K3?W?XɷL9BՓZN}[wW0ԡ)^530dMKɵWBqA;ߣQꫥ,rϦC?ր4Ck|nKKݸ/Wy?js+䇦h {Ai2ee3/'z^=@|X8ㄼͿ˥V{Tnm'HOsa;B_L̿+wYqf ̾t 5t>XL"C;ĂrceM z@b\;ș<@GGަx4J[0gdZe1b)!?,"`i7m_kjSJ)̚Q] %r13*T@KX J2A~МXdZmZԽVVɞLE t)A/Wg&H`-yGaԵ.uӧ]}Vǟ滰{߈7 3$"[C@uZ^x%XyJJY;gf,0E6.ЬWe[sNhv?#z%*]/SW%n5\(:4#[`y"9%Oɦ g W'֨/99I*$Hpi]{&vN֫ qDv.P_]YϰEj h`KRgLB+ЉKJe4.rԛivOZď3(sI֚>F6/$s-qawM 0)ٖql-*C!DNpY2&zAM$: U#\"Zy睯Pz鎪>hɄvWYrRN!#Z lܔ@JCU#2*gTJkixR]<b+L% Ei `Ws%~ Qx6_u Cȱ.jw<й! eQi+_,L%%yQ~!/@zB#>06w%(I~Cu.RC;jRzY)&Eݵb@1b8U3Uu*HP)&Oul8tԿ7?=mZb,,VX\,''"Ғ2ګ/3x fVlt.0l_^AP|smezTt_r~jhm)L+p/au+vU}i]HJY߈uwyZb"Ca,i t$)q$ /@4@!Tp/4DHX ؁4-ւ}_c%Ь!,u+%y)p("-[9_MtE%!fdca&ܾ benf*,Nj>7ri1۟GEc]}Kpϧ߳1ax)5h'!3m>KF_r`d}bOb*q43Ԭ|apg=q#Ԯ0^N**X\k g1,2jALb f_ -&.@%)I-n%4B3pnu %7D8"sN22| ~s/$Q7$.lzZS 42we>+"kF 0ww4`8y_c `:x:J¯ 9F kPΕp7M&$_x$Df#~_)mII5[N131zl`)QqJOtw=yDόX5 XFtL =D"ݥm;merɜswO+GGH§G#/ ?+/My)+{Py֋*j+_ep3{Mh ZŬ/bة77MDS7cT{H &)G) Ƒ#leqUPb/&lbU#{,uC~'E } Iams0}Boww*H?Kޅ~͏i4M2nlBvŊH&).p{cFQX|mS$GE{HqQ"S)j#!`-UBA9$%؀Ҥ @Ef>$%.vUL-z6Xakyy贎`/> L(Amה[@bN{RS5R/P1hs9,+{ oŦX^仉eDNHQ1GnIcA;(gn=p&O_(ۀYP=*Ԇ.D8)~W!W\ Rz5,$CW<Ƨ@;zt~$mIzΛ<89T#8VCrn8@?Z`ӢJ+24 z?l,q\ev6+R]* Jp&`N|R?J"\>A`Cg?zJ=;Lv`v! d=Q f47B3 8zfttx=kºLl`#n~9̤XbtZ9e*z!ҰeO , nL~))4}v~vccRO`O̹klW~Szmաқچb$XaEKR`0{GVhLwaI0«)Q*,)#:P-CIE +inԲ8̍u$y(}rёKA,u”Qi9WTpK[|⎫W1OAš٠]CLˢyyfd--<ߍM\6uw(G \>90~zm*MDM8g TB$t)hwwdNHE7[c=WԫC6T~NlMj[^A=F7kxapie\`Ҍӳzlʁ ^ j#"V).FXO)£X(RRyg&z>8Px;~;Zߣ>ǖFR ɳT(/D0cJ5t)ubk }!kP{8T _d[.1[BY yt"%,&v.8ǒ"ӽHoa-6" i&N#.TEL-6!P8"Ӯϑ< pvBZ4CO84˻ƟP7:^RoR.ڒUc҅]"n`Շ'܅ѧ.jeT"Z⨮ Q9g~/I^~)Wr0R;5*O`ZѶ.A-Xcƣ{(."Idʒ "AuҤ.1`!uҨl,hZlVŸkLXWڍ |֜"ЦȊ bt2]u4|nir9N186XK4jգi , 6]{QxP82b"rS/iyUB|fҪ{+#'ڊ h8l<ţN.Y| 9TĶ BgaiMS0r@#~nkMՠn`N3j[T7q/7)9Qv3N~Dh 2^нeZmt 0uD2y9ǤCmn} &yZzg/r kIL/}5XdKfXD:EB`IܹZ{+:;vĹM뷯D$ImTdٵ?7Ncn)^AP!iܢ#9~ ׇ ;eVJ&=>+ ntos?}4}`gҝ ĈTXRMoT N )*!Α<9(}+'r5ߌ}IuVw: C p9e3wiM녝V|#e$.;}}68Lq耸~+Nk )k ^YvEzxb9/g (4qS,L`*dU'u˗qՅ20]P]ϓ\R`l4]߼&MJ_Tk_^[u]]O7 qq|@g’ϰx: m:R3W`ڌ N0vTC w*WTc60|Z]>L3p{:h!'8=}񸄲6ӇLW,7QQΒޗVxGln#PQ .H ӽش֌V/ q+~jY\l~5ʨs݇'BCfiW0 u9)9jk^ %EZ<_<2o _E=hnޟӱt1P5({|@8}K*&*~a V=$~KE{/9dc5kSCp/"3- w oxV8%@"q^.DD팹e!UjtQRPtzݽ|3Kx!+tgމk T^uU<+ȹk.0<2)oR* jv.. hq8l 9r&s{ imO,@H^5j)9_H~W0xGz86K3$ ֧{z;7v||yHCMe6qύmfX,13y0CH.LISPg3*NEz1˒ʫ''wK._xP8t)cd_?x`O>UO>&z .5*""95NȲ8MEC%:!뵂2I@/@"EKI4ZR0 {mp ߉Ɓ`. %d.Oe\Eњhi!‰}N`2_vt]e eX v,-_Ȝ@ \Q{0q08 09hug9]3BDD5pެQ2œ-9ة֊9AL)du|bB(kMyKxYdֳi:C^T32rX=Ș4mRWx:SӣKJZ/ATG{]2L# er;麎ཚ`/agPa+ 6y70qX^=ݪߠ h@9z熽.nC}|Čko "Սqv S5sP$g Ôb2=Vfjy[Ϊ'V2'~f}ny#VA~ʝmʄx.6B㦯kq Vֻ]kE]en7^HacC2x:~ظt+ϟ&fbԩ*[Ǥ,vy&Za>}R-<Ԇ!bѫZZb7xm:E7;"i':2:_ ܉i3E7ύH.57`iXd5o |Hnum ~䲹g UB++Q(6Cн3JK8S9[UONݒ/U<-ш:[HʌB Nmg* TP\macA 7X>hm<|>{ۇ~wSljL)[l|2nrC|(Cx.NljȨ,`U?c'!+%b&8`'^6P*;sn(S^#wT9Js Y_Kג"hF=W >x]( l2U(R#b׆"0ž]+8oC @|@>YT:td@4C'~EQ+,{9|~1U{|>W:cVZ~kLү c;? xld.w,0@xWiK7Ih7J젨@Jֶ欷;xl1OpKw:>Uߪ1tRAW.@qhRg&> DpB*Sq۾W!@J@ìAĐ uO ^L FjHe[;0d.g5o٬XwX1} ʡCuJ,MTsj,?FBI]B`%hp0I'=V!LUOUȱwc`>dk޸Ap%hy`r3Gk1'ɧ3%ڦ^Xk04  Ӓ]az$jNDDF ž_)&f 2;gD&(;;Vign=@WPv(ijw㈜;&`x_1jYCAD0)ݞ۴M쨱;_Po H>qiy +!Ih0&12ǩUHL8 :T qqHء!j8Uxoz .R1x5[g* t}޽:},3]X ;+CzLVIcL"0ak,U`n3JFH4[tx: ]XNeghں.@Eyr}?m[qi4 zqbYti7ahoL] d47H[ZaWof@{C_^1usI& -p_ǞzfC=_5xm(_՞TrJS=+|ӗcDʭgpdYEyHMoZre`$ 33X9Hb(jGoPq#zױy:J4zz=IYR]]J: ^v/JYS87y_+s[84YR&14͗BO)ѭ "?+L.VClI͉#?;Z&!]]eKhPXɡqDfƠ & לh.R4,Ʀ82iafŚy~# Qh`Rm'!dm׋Xp 7U80ߕTZI/NIQUݮ!񬼜8*—A02܉_yXV<#"D2"b0YO> #҃j:4L&tmt˙Qrt Ve1D :a9m*5ܵdS#M@]"l$ZCǍ[ oъ-hIq.!9UOs9hU (l)0 ~-7|Ҁ)Ut-ߦ!P.7͝hFVw2*tq.([(xiϔ*jqRBGI {:P .fс7\ih2i:RrwD/ofz()[W d*X(j|FsZGebl]yX9emZyTy {\Y'Vx@nQaV&pҩNͦDuYI9R"❺|>^" SfRfms~ Ż"nD~t1~τ!ĢL`;F)jVO;iQR3v(X4˱9 A=A8X`q(Р X7R!wrySf"Bq. ˰SziT.>/R)DYc!t+d_kIu vpsKPk ^JcR&3d~p@7mM3 rj CtdaK3N|ScZQpQIնh!u( JU;8О^kjjV5Ըd3ȴ;ݸ: r+LLJ\vq+t=7;%G{KuTY# ΌjϙW'>09<"( Bic~4>ig)3$`װG 8A;7D5SǸb5(r5*jGMaÊZM;P揄u狹]`Zs<P# ufۨo!DxOI?=:tG&ѥ!jQ]}-ҺO㗩RUj)[%jU[(k̻XiEx5ήGGGjD[zaإCmu|3G2R$~}~=+e/yv5.`!jMk@(ldk;j*0wgRC/#Sg/lyYY OCtf°i=g+,-NHܚ!*-9IdrQ4ɎؤDZU._?}Ne ]$}`[l uLTOJ}tԐny7(>m8W߮!/&\NϘ='|JDkO+k_ 6'Bʢ U3\t- xX A`ER~^#zerDy85;wXj9 RVz8&9K>!3H2\ $zx⊔?)f ][Fbϴf`t3Z\"&;)dKqsWGRiڜ7HH31[̓aqUYj") 0 g2'zNKb쩌a%! F}Et?kp>髷ޖQJM҃9NpBc>Fu(,j ֮WE_>mP/ oJyhϨgx/r%N{i>%@C~ު6BO3pF]GՈf'_!ovI!9.D_ZӤ SW:Hs<fA,+!LTIs@5Er[}ws jKcL%@:IVÜιߋjE=(d5tTI[6_0v%ݿ1EMQ `ܫlf䊭=`h&4qi,MzJV7pl=Bt1ea>%pPaM SǶ "2}6W 4FejF|w&*A+UsBk\tf>^o9)s()aK9~/8q7Qzos,~+"t@`;,q#:V3!2]g~%|MZkΘ!4ǤuAyd pIJҭ];ǩlU鉲M \_??ꈧiI>o(FBxAI1>CIwP0&0R: XYBNtNNٞ?3>s%\լ50lz5QIr7x2dHÌnλ;ҁ[쩨ҫN'LcdvwK&ΈF@ѶIDXcS]Y, ƽ= SAcK4 O ;4f(#E^itӼ2^)׸e$Qhi|A} NŽ4 ܒ*3VPyX6 Q+'l5}1Qˍ6+Cgd)ܝI>l@=7x ,4:dYڟFLGg樀S4 FCz)s1ZyzË!5"m\l~~$yQgQ}nIֹ(k"k=f1@Ȫ^WCyT˖eۿe rj -E"OQЙ̩'WJ}q5o|ӥmko0 uN[6lD{9 1ڗm==]KKe #X[<٭zj ɐ׎}K+Sߋg:' 3t(=5~A(e+O-< UڑG.idN a߅w5 Г.xx`:hh.YN1БrBp] tl-)_5:P'^X*'|^{1\i?-GռjOHvv&MY7anLC1 OH^rqF*ߩ{:C/HRTItVU ];D`{U]Gƫ }IK[= X]nh _9?a 㽸)d}NNij6! js{PMmZ"LW=s3 "`Do0 z'lI,t¹f |.R7=t-3.oឯPN2ۺq$8;g\0.=0!q~d!eKS\9̳W@Koz *=muaSEQix<*'BFASԽ]_{&RQ↋,tߚ\}&Ui=.>wSSW \w 񟴚ƑFk?YqSk^tVX6|FG Y\P=ބf{9tw!yOz1$ob9mub3y[-_he=3 ;,Sf]P2*t7AQhyfJ.;=&w"L.65=|[K% P^Qie"ڲx>=|@npa|qۻ\0xTIQVmnmS/~Aӛ +g]ρ޻>\s<Z.U:=_ߤ- ޑScY7y'Nafm/C `t9R(ǃշ]6nPLz*dt(c px ÞRm) 1]ILQ|  "404~ -'kH62paV46[ VsI ܏@0&, "7`!pH3x=yb I7R)?=(^Q :oجxO/CzI2#Vl`r:5gU!3H.6Jd}5D%kAבt\5nTaFWA,=՚'bT\I1tz!;,$R`TXaWO|@*ٰe ̡DgҎ>NhUSUp ^.loIY")sdԭs({ J.`94>:{^#'KVM<pmi@eWZs`I+4 aS)%=%7d+~ BV 6'4-.\o1pnkv2;LxhČ}@o d E: FB}1xɵ7lP0-W2P{ag;SR\Vb53IJ{S;Dy`#F*+ U6"40= n*G&BOc,[x(ގy <MW7,eP5ys'jǀaxX,X]V>5JR vQ$VZN" )vICWm/s [#0DIqf3"`,Mza>N!bLy*Uv&w&s~)Qڢ{$I6 #9d4f= a.>3Ia\kXƅ0[k+,*7>/wŨ!)?geZ @/Cr \O/uۢ4`*;86/^6H9 [V;J&OB pcΒ&T58U<^Pb[&ֺuOQD.Wr5B!J6T6\o &>)I@ځQ:D_Mך}Z#$a)Ψ›6DE^? {-\lVipz7y:`ɕf.hZ,٧=| Hl=V[CLyZ^srr5ڎdk.j:"՚kpyj3YU wƦn9ƚ/ ~퐾?4 am&<߆Z`؁SpUX28<:c8ߊ[g C|Raʼn/jt+5PZț>CKr%L6&/g$Dֳ&u_۶A ]}!"۽z/\YJ;H:i'l(^O I]L٠AKU@T d noZ@Gη%c]⠍ukձ*ɎG+ js`伙Z︌kRQYiDVSjllbiG7ìvˆobUV'۷EזٜIT=uc{G(vB}I~u_ M0<sqkqWHwꗒU~1WZr,Ƿ[>9J@4O-Οu> w3CQҟCh*Ss<#G'Ruuon>]rƓ':aC` ]sU5{T4,Yb%l)cw[7LLnW?[K~h{s/wy{:U T{ylDۑ>&cxm-^C۽g\]ôo<1_oCnVK? Y?jP7h3{SW?'t~kbP\)k Zl&>~Lv΢HXF,), g9B҈4UTEne#aj%)yB,BZ7,.6F+Dw leӣ,/+ത rّ4eM#N ?2E]vjAK5^۟eJVx"22vp0gBse5PJ{4\:I*щ?l֞!C(TrZ0fUҞq:kz42ONnZ6ҍZ 9.PCVjbW+c̙ѿ*[*Z5_}g )aAMZCVrͲ uTY!!l8h-yi\d JfHk(rrFMd֟0Pbreh3SRϑiB C]Xbi R=2Ώ|Uwl 6 P1BԕN \LA젌]S޻'zB*iH}t\M̩A~sK;;k+j'e90u 4TC"ݎJd+\yP]bjM<9+s6`Vz]?z|b:-#,ts \9<i;4c| O7ZVO^J/|8;HNsm:EI&[{lܬJMM P՟E hGzsq@a .c-fqJSSF6s[aK3;l۫;eษ qn?S"DBM*|!Ebw)'%0Q;,]e| #u4UU pu谨@Unw2H_Ib)LWwUKm?0Z'ҭ/< N\7oI$b6a7\8 z{DS dž$ mW·4Ҧ#9(zd-44$g_sѠ>ǫ/4qߣ  f ˓2Xr:Hy,|j+ck.*8^|iTq tOfPIO~ Rq9);z+2)%:dD;}A[*4lQ#vyaXÜB3Dmbhÿ»mʻ`y9i@? W94>TTNR_r{ЎA[m*}9RV9ܛ5"N s"-xd5{û oJU ^l]!0) g V6zXZoa1Zr -ΦףgɂU>sc7d+~Ěd#|yv=Mq*ojAo1z!Dx|%\/DH}OQF OZh0Δ$3yP nY? JՇ5=0]C^u{osڟ$2 ԏHWD[RI vJ=9|µӧ!ga[4\6RX0j̧+=ʼn fTqϒ8~|09VJxjMi3I}6Ǽ3w{Ç\~g$[ysw(o2J  hŃYP"&X!>`(M%Kx[IAg2qx;Z]Jb)'uA6m}R\:5Tʌ1'!X1 ]= 3|ÕC؋m9 ׾w kZX~θ›^t}H-Qߐuֿ̽4(W K6ij٢VU q@j4`U#f CBXd?zyMѿd@߿u0kdXr1hm#nY샺5P `h"u`0M9(sZ.SI| ػ]x*@򵧆sHpϐ׷"_,)1R-[-Pރ4I jWoΛ%Gc0\N -R34 -e g۰ou_jWְgQ]08VDԤS"&[_T^|(kaؐ itRFL6]L8<42wr6dJzL'D YwPL1"&ZTFm4*4tl('yPd~޴]ZZ @| ]'N:T2ѥ%x=8ԱȨŞg'9gsEA"=W-\ (5]ş́]_w‰caED)A[L2`΢`5;쪆 Y|ghLr n78Zhu6xG4wl]TO*S\q4^}%KXcQUw هk۪1n ^Q<0<:ѽ$3 Uf]f V Y})<-D:ugN-S3w5j5 qk*CˀIEDkw%ԲU7 {Ul8NlWVV_i=PPlX@,94w^Ynj@_5Vפ,䀦qSE^hlH|M_T=q]3g=qOʗ Y4y\,u:h{P,8p-'ei9CXtKk#$ |ITtPM{b0M䤴6B;f`VUsfå7N(=Ͽˑ9\W7u7A—j֛C٢e]Ҝ3k\K"1ˆ80wL\Y$`o?ԊisYBi #˥"0m)d YIAkAF`EŽȏg4n'"n=Z>[DHj2LIԳ[9io$]ߪ?bU>t }We)(D|04_Āx-5ې$= D 9pC/W${ tinć/ Hؠ #v7Bm&!lSPr0ϩK0@<//sjL(jĪVP<KGffG("%!;lgudܴ@k"e96Bz9)79q*OkCYk@D@LSLE&ʸ%MrEr xR$6s]t%=An T4`dM"ƲZލfBdREe_.mljhMrr{M!Ci1<<ӗUrT cҫ3:vOu\*z\G~u׍bBsTg`L*A=K{;XcD֖%e؄]]Ba:}gubp6)aKbG*.oZiJ>NF`-.jɟhfnV*tkjD{RcYUv\Q֖bRt~ \lfPTB[e$9DLKݢ'#VC6I͞)`L]z# ;7^qHNMiVEjw_$Srq Qv uy)hcoziDI^p-`ebB)E1ł'zNTlaEmhL" y|-ts[ SK*m8>'Dw!Y>Qoq7hAzʷռzBϻǻk]k i$ OhT+]ÄpX@* -Χm86_ԐF@=t-g8`.\hj}}bb[3u@~q|˴1R B!3X)LBnuQE{p5^c1_e} % ꦑO)?V8`\8Bӊ}h8XeqcRa WZntZGk֦eiBi7Aǹ׆q豓 :) >6=_u:xERn>C s)y:-2岐i|m\ژʎNhG,Y\WuLF/j3e<<(=Zݼ,qIIg"ͳ2|Dk8x|}0Ay2([z>o>@wjǷEOT-mڡ(:g0XkǃL<=cͽN {jcCxE5aIGPo6|6 b -QrޥyL}{Bg"T[l8uKhC*n2Ux`w).IEr EjmY]SJͅ%ӆ>CCiyYLyB޾"`P@TBy]5VC,0:0K =^#anc6gR{ `Tk󲨛\7WOO!œ R"O`Js!x%ɨq`Œ!%1DoJvJ௳ +k>b5֋xy{;H77IyElCR"HsI†TߺnѬ[Y!2"ZY@l&Xv. #(B[v]BV"`]1 XOD\*I6D5Mx@OQ$o\<%5~B-hh[rNEȦʍMERV0bDS|" *5{EViw+:Z$̴uLD~qJ;'֌ 1Q!q;~<>n;uST u5s+[(B(S&r4Ծa!l:_lNx*-F6sY:@I4m[ƻtq<)AB w[<w2[I;_LG]a`5:9 CCVapy.kJ}"L 6F}%*y|r!VgUoJ=΁p@=8R"ϪZ2X >~}:4T ر:cl^GU G]OBIhVDD]kpQ]p66,4^I+V`U4=WYg<& Vf kŗj;qB+t$;:G+ =~Io'D]E0K:MSwɝYjגјw;rcCZaf4\{.j1%rku܅Ks+z%S7Ŵe܅OE]e\K=jg-t:JKKČ=И v`ٶD<8#UUZQիVbڊϫrhUgxӞpĔ=q.{/kWnvũ+.ᅮ"j=M -dU QRDD_(' L J'?deEʐq?92ё,^ 1`7|LKP22"+saf~:ks2$Dy saϯb7j\]}w/k9#U+oN7˅#2-<]?TL(bԊ(NƯ:D.E vЄd4@~+Aܾ=u8$qPmUixxpZg~ΕDT( c&8Dߌ) @NzH=n@:gL"'RD› ^m6ibv0LODh3 <㗗Wp/5Pyn}vot .6TFMt%;^|Sn5=5 Xp$uH m)XO{==1 )NqL0(UG(Tib'v讆V.h٘,+">l遍HM㈶j(.3 ʅ:%3zvGi LTS5)8[w]E*{:v[T'gHPO稰>d`qK0cX76 v6jixX2.(IߊczC~Q/[υEBB^6Tɯ|t˱.œTpu`{.`mq$?PSxd=n~"> ]'r;tV]x': ڥ{6^ ,ySDG{#};M& G:o 4̚.#xy\ٜWЋ /ꍷUE8p?0yqQ!ur!;:CבE1D.b>nj:dYJx>I9b>JtƣaA~zaŮ^M>\ϮBKm͞Ldh9IFO׃DMiDjk",f^/g|Lt9MĘd:%|0()rԭ'8te7Ik|NP:j6kR8P_%N|^'`&];uv|vYj1Cy7dvW ͐ ̢>ju!B4a}-x8_hs0W$i؟i0J2IU s(U%m^oyReUn魸V [ٝmqGխKASm阃5@0ΖrT͜ ׊Y;CDFTEzH{اsf!D (,cR=G"O#JGSt&gK=9Qѯ!nE}O;h[dXC2|ܽG،<hHk0 2頛/#EBd@ /û(lXa南Ye^#qɥӬWoOu; .몦6*jD\Yaʜ7O,1xRN3yl2csidJ+P7$5=uTV樯kۉ‚]:ռhvG|f˄* ; +g n OU8@Z|K{.pQ2(}p(;lc'nir";(q&ȸE}('a' xJg"ZAD|M6@+Q\j qgy!,VУp$'B1ߋ tnVzmC{/e&0 iޓp+?r5Y2lX37vNenv#fMF]GIßĘ5Pvܯ_}&HW}bzaFT_}Vx;}|Zxl,XmgхoH8^pK܀~4zcҫ!<}2wZǺ5}iI/_>8ܗ ֶ.}Yq`eNmV OjMrKn]0Z^V=|w\9_ >]慘ջYGbÅڔhO*|+&haH. ǯJDD^BZz{zf`0~&(G§mGaA϶{#hx.DG5 ɞQ=V++-fv77W?O*TBDT4Pm"78@)Fs8l?9-T 8A\=q"t&> X?[xXΝ6JfP̫VBMM3%aƹ"qEQ@ d9DӘ>IT:U~u dq .U 9}o,JDq"*]JE.$uNiv"9Kehg X-#-xpJ?{;K~i\Ƕ,dڠpb/`5xT}tDƟHg=_KGȱŰ*q*$)W딒--Z ZT62 \yg.lF[䈑xWdT.}r2N| ƞ5 9ۓazO6_GMˉ\WE ôUrMm+Yh>A-}}kEqGnQƍjcAp&V a QUN4Fmc}>Ϊ/5\-\rAfWX; xdPI=a0_QhLJ}"6h #e B[V"g1i~Q;akE7O/ >kNÛiI$ H}6l[u&Z:È6!P^\gt? ;? 4ǝܓe$L(pO(Mc=T Dic@&0uו`Wв]kdK!fDn:6WQRwu'jEFn#\@Ŕ%6N7h=Wv/J˦OhN1͙)FϰD;zɞ4~d ZN;,>U=>CnS@#mLޗkJE7CNg "aQCxb]t$q D 3ՕEXa?[L}~@ſ؜V/nXgY r!gߩ٣0w8w9W~<ob4ܙ,Ckǽ_Cƣ5UU D)}/B1ه Qh&W TӬi ͰA@Shg]>Ѳ|_x1=GP4xQ(;$ե.&X6O6Mlgnm{. ilvK(ʯQ ݱl))m 8vuFDŘt& K<_ +wR$?r⦩aG27_9"~/}$\@ zPہ8xǹ'Gb-ng\H{*A?j "K=S^+k2Dv`S? lV^o91 ޕL+k4u|B J/\$M}++fpqT)ˮNk^6hTTr{wdFVC^;K-3TFuD+Ps6/I]I!Ee u/ǃ,`h/l?$" h>/#ߎr-Nv5Ut*]xG~ZY=Q$pF̭A:﯍~~77%71)xQnU_77s009iNdc3X6,v88L+|c*a4M'(TXh==8oڧj G8@-OP 6X$RO_^!@$+pN1*f\EPEFQ@4pHƉPUS *4sP4@Re)W_ *8g0jk :ٌDeJ![axa e3;&}p5廉#*E6>hAk=0,<՟ܓ` /xvm ^/Dz IZO~JFAY'N{a.G)]VV߯ .L Lxߧ GϑUgChms5+i`}}-.J&I* ({. 8hWpT0m l04ztO' qU<=|ʑDPO>YޕK.瘨0K-+ub&ȶzw"bCK-~<ը~=?8Q "ABW*Ku篜r\Ta^+<.{ޡ+Zeuԩ5ݝ6 zHJl|8ԕbprBAC7e-q݋H~ؾ2p5suq+H3S (z![ǥD.pbX>gsoup5TOTtf@*j4+صGn`eQ94A ̊ЏӛO饞x®݄QݢVO^F=hxn[o^ЬwF-N:kI!x^.um'X\5m;ϧNNxWˊD\Ϡe}N^ XZ6®8PQv[u4UPN]aQ;xuv6]$ęWjPگH^^kLB~i`9]Ȏp#n]+uSNINuһs{5jUZzUJPNU ?!hSO6 :}X,l" & zGݞ;jۘQT]BD4 MR8rpVFDд}IưNȞ!1tUіl^kG>z!eqE< l60q̞R4`-޳s~r%Goes?K{Ԏ!0p̮A j_ƉC׿N$ ZN)cMs2'z}mQu I)t80x9=z3i 4^\Lk<#yAm S sO. p]m˿w@ɷ9ޒUNrtv[^'uozgYNr`~beM5{X562<3Ki8!Wxh_Y|zۦj^)@G?[%GM*sjc=+䵳—_)!~o/S"RN-{F#Jgvfq0y}ZP삻¼[|B8J驁Z-&ǹ{ㆺRv&h~=oլb&(G(~٬6cBtxyV;p>PeL_~־ zAc˒ԃF֬Ė-Ak&<٣KBawFlDے~sE ri *q X{q/ժ}$9eYPMBuC?P(yM7.r쯔@XJB9~;^_W/Ͷ\`r8JStsopeW_9Vhe^15s^gh&F^jQl@cy0pXT݇2 HtHmmOH :YeQ1)¯ߔa)=u|vO |oe?Wբ% MѣQ p*' 0qDZ@`VvG!Ilv_Njw= 4ۛ(gвz <)3˖KHm7KLJ-+Il.jQrpi3ݦfIϔ+{eJzj{sPs#̀g:^ìcSXhP%&o7}+OʨOu%C&- 9HÊW72JiuBֆ0*}' 1@E;0B5*婈Oހv$B AFlxaU/N{mϺzç5pYtv8<3.y-vb;|{ 2lHzZfMC/YJtlWy--gDҧzmB5 zZ|&UrRs=_}/уþ:7u9RXvY,%IU@}ۇzot)}J2p|u54Ϛg$XHMtxϗn-FEoW_#d"D}0Ldm \o{K}AW땀1df!ȁ/@S-p`%jq U_~q{(\m2~+ySEf_B툊UsZ:\?6mr<~=CgI֊Ԃ)E9-a4%y ׵rXB7X"=~REJk:C_6:(q9BPՉiJ vR ޖmO%?ux>7Y6mUfHzS]wm3)'@t>f%+dDd6V2aU-k枷y}dϯlwMiAnPT=h틚 vaM U0N%P?5z1fxD(/q`;ԯ>4 \zEoуe={P9B`e=x=WoT@ώ POltE IR t7<˟|B|y!yU|j~`QӃN;4abo9/%\W54.2ˤVBxơeSڂ _Ɲ]MEa1X8.Â[uz+QU%S,]߯f$4 rϙ8V:)W3߽fA2asH"\k أ9b^t)2AGkykƽ5l{v}V i}KwF4sl W@Dw ʣXER'y-٦)es ZT g vCNY͖G8M77R!T77Q$ݩn7 j4SÊUqTBK=76  >t1X$vPJ.]OKEy۽tݡH:s=<kf?Sp V.gvf73sMLỉ<6uT5Ȧ}g} x>Kӂo);.gw >!V#^Yh'VimkTٌ3 Ŋ(DBL}g°3ٍ?=IHV!Le4Y܌yO1(]~q!$"4m }Nեyb^-8^)r䊒R 'O$h1%uOr~ZoyU7kau,ޜmg?m~&ۃ:g~ EV71ї:Qo&wr W^堖SY~0s1`UG+hz1aAnݑMLO{~9>ʹ x %ci&T9L̼i?yi[.Mse\$ا2PDϖ \K maVa<(NJGٗdTl}u߶dwK:Z):ƇTS–k&z.5U(98uQsN6g=Ct`p͇?NuK;\@#ɷFSޖg83Gr30e[FK',bo]@ BuRΡJK#u!&1heRpаeU槃,*{j89afaNj Sm֚:b (K^ گ:ViQ'8UT. <"I]Ʋ@~a;DAVBv%.TΚ3Cb{秠x?@t˧{h3;Wz&\wjYey>͘M:UX`ŪWE8}~$`;RL?}zz&㍱b\z^JVCܐpK'ߎ=]0mYIW[}zeGpƟXǍ"8^^9 JU3jKO>\Cr7Oۢc=6pI{ ~¾[7U:A>9xѣ$XJ56cYVIy fga:Z-)s7/vI6hC5p4]c/X [WӮXE&kaT^R!'4(U]K )b,+q/s*h^yf4:U"Ezx>y;7\' qڏ[y3ӝ-χ˘yxυ2TVP}X.}p^!~8oB5Mȗм.a=2Y-){ᗌwt3)$X ;Fh^c#+S"!LpD M\H1-n[yd6^qӝ,s(U'YK4 /M60 ?M)`V#o=g Q*X%n~erT/ZA A*2U5F.$,Go[=ݝ@,ϵl N|:h&2~ )7m]n|X^twR0,[~ޘOSXQ%!6  =2.{OBɆ[ RT̲>gAmm_ 3{َ- gridmafʻ3+<ǩts)K{ߵyJ1[dfżÎd  |hVWoO~D۹I.@_TNE ׯQG"1~ϵn3MO~ggZcG;>1GVY*vZ9d7 ){gۗY`UaFbb ?!BDn]¼sxqes'B廽Tj1`3YXK;c a&E=d1#-ϧ# c`d^WsʧԳ@b)o^_9"ډmo!QDg2[ut8qW<l}i:hX&{Wwח(H+~߂WSgy#w-=xWh? 64zn>269nύn/G_A/|u5Gߌ-_^lԀ 1,Cf3(]/E}Aͨ;bj{vց{z#z#E/4'Z@CC60z3%sbT"y'{Ѹ,9ZҪθΣ\yUAT<$<:&5IO=2Z1էL\)j7"iz^TF4צ:X{uUoo M+9[lr"̛,\UR\ue7_i#{=!2Ϳ r.1_jcKaV%’ͺцR^\ F#j\Zeg%jw!!7W6gx@1أPI|70X齃zN`sF !tO70$oPίVT}2~2ӳ=.yWe o43<(!sR=]]n(_a&#y,qW~CW8,Y-^f «=M{c /DmJzEƬtE _M0ӕnrb7%~S~i8ָ zO>X$3jEpk[YWw}(}q8lAMuv >y_j?K=UHϴ+~Ab}{C\ιZgWsG8UԮVbcA*^WaatzߴiX~[fu>o}|X{Õ]}tN 51_0ÿO,?Q81"T Bn4IՁ4p* !!]E9i6j1(wpUJ|`m?<ܮ g|5Uzj =*vMVʗB_5Z}%ѿW_=Ϋ+Xfs W9vPPLtͼXs~B՟7`)AGخaZ浖Enz7,#Tbn]=VubJWcطxϔ XoDzC6i,,OqҌ8a[JkD B*T_!w>#:/R|#sx8mLeȜ^1U4:y|tb Y+,xYk5UId:?_r8a,t|u*so]LNbm? YU@J졜c<5h:$d.rD FgX^+?N *ћs9AVndF?]irMf+Ous҄n0Nh 6VfUdމ;1mk~\:sϢ#͔2t%3Ttk_5.Ho7˞;WEINbFIHj23y'@3,Exgpݹ=tUAwג6dWYR2>m3ezȫzvyug ˓IWKq{O59Cq .^c®\#mc:XI:Y^K|LJJ+ڊ.Tςl ܧ)GnMl;h i*X{DNhEJ c87R?KĒ6[ ȔgG$+ DrB+zH6C' )B JeX7uEm[ifMo2 g] T]4逴 z3kzcm\"Q]J^#y{||q#O>L[:—eG<*KmS{p7}q.@zJ9«)+ˤb Y^LO <ʲ;]5+,<,mIg[{V:_e4+~ee|/}b8 DŽ~5޿>S`|~<3ΏgFqy^=O[vG>~:{WW|&14GՐyZ}Q =-8X8,,<ʮNƁ l?|YCs8]dry`jsEܐtC=o˗GbgN)tYӴܐ[e|HI7^O2OnoQ"?]v2Q~tpc~_woTߞᙝ59J~p˽S~<}}=&kD/.`~ &Vevn8uk-9, )3XQ@>Me VOuiGϴ3]A(Wvk<;F?j|9wXquS;(:pZ{'_6SfO1p`LiEؘn1)j㗘-;3,-x='(\i|nǓ%׊4S??pNQ oiP O7[Ŷo~Qlm;ۯ`Z$.v9`.HE_efߙ{Z^Mۻͯ_W}zKlxcҪ3?/#,]*ψ|߻ˌ~x|Tzhm_ӭ޹y?Þ 9oe#%Z^Z$"C̎C!߿ؔ~]X+7wO>\dȭ{ Z*uTT?9UsO @U3^;f$-Od\>RWf$ Y̊TUP"\BeF XY"1EQ +#KVJ"oxԁ}5Ct/[PK,ÉA\J)LޏG\˕=ތQݰQ/p .Eւ='x>2i*va>m 6[ ?>~?OU'|w=-s04?C]VN^6?]חY3**Xr3?KO[W6D^{;I癖?O\lwpqZgM0rmg_-Z/;LK7_*ϱmO_o *>;}γQ{xھ쪴~ ^ Y 5|ϳҴߗ&DIQ Ll a,]B3V(lܒsRX9%kӓ]v#uAI}ΒhiGGiMs-he?$5[qpcڵ^Dɻ4qGkP,2-'n0S*NÍlJM۩qcFBy8%.G{$daerJ"f'uL.e?=3j1?[8To4=oRÅqĄ@g,<ķO,)/Yԙz1Wܮt]A1+:KqcVzKҠ=:dxw~9˜1%z0fQSTEXOJrw1gοwn9!;o]s]/M /k]\.x(%Ve,V龔n? HX)aY_ZjbS7\p?pu8UI,Y=y(Vʽ~!p\&8Ps|'V"{rA_ǜrv\g7gAM, F^l73K7~t2zLߥ~/O<=|_9iˇEoR_֨TmƾhQ>ynVh+sT*b3>) ھRxO)Iv;'+bsu+Ժ+u:Uj&ss(\*EMȭYBhpq,3Bd%S fxoVnF4BmrI3B% kP/nEt*$XB T"")n.^CM~h);8eƦj2+>)?x2l~Tge݅]E}v?OiW}b,[+ynǺ !htT?5/AOi.ÕtbتeeD+04n#/cT=!f_`^(4&TxV=#5_{T*9,8Sy ʿivA a2]QX)9 z8C6D!.XXР&֐%UXrUTKllY`T+A9Aykdd4;rZG OpϿ'_q_|;r*9_a's 44(EH~5l XUN􀺲?u/E9/qKW77/ߵu{\+Y ^^?~Mm S%;Շ Koo~?:+~ 濘ՏbBJpV_<:av5Yj`xҞ+GRW )[#S 뺆ǚ.[R6dһ(=ҁu[+jhP ܀υ J OLŦGfU4X JXಂ2tZTG# %KRN\p uHt5IP*\#`U:S!LJ \A`5.sJD1fC+wf<~tr/ [G_ QO%ggŜUUiW|4[B̧kE>>-礷Ms}oWLߥo/s|;>YNRґ7ȶu'z)"h'ue7zzQ:%1k;"j,` !ܩ)Ki,jWkC6ݖ/QGfUnY,"oO+pLj<.6\d:o}ѰGa^7} =cI<`R7'[֌EW\V|WG;sv, ܇Qίo}o5BH17kY#}v3 $2ڀbԡBTŠ2jцĊ0TD`u|u6X:#W80m]n"3AU!hZ]\*ɑ)jdTmGe3%9W8 řuWZתR ۉHor~<~hajf1a;?{N+_fp`Xy(ƽo=/Z(u'r_ì} ϯǷ>K-`w.==hJ XgƪzK&3SRL:˯s&U6D}͜Aݷwd0^srb|MRg@dI4#C*j]o.qKZ2WxoI9y*9O*ס]j R:DYEi̪vRZ6@5J_" -(9Eg=[??/bBa^(+Oz6+SKsZ>wb;eNuU{u4:hk/ y"\j J֬FUzP'U @A^4ou}qkN U۔.}t? :y~L} U|@_gc"el=9`9~Z?(ZNfh}mR@fw%y|M?'[BIxׯhfb-KV 2{VW+NJa&a #D!ðjb̆EKLr)b]blDU"DxK3YhX]zƛJ`fEP9vЏLme͙48vPvc36PJJRj +TRMVLާ+oߛۖZNU>H޳%~3c" $*Cڃd^Ńyޓ$2M}x [{Q`X⍛hr?n9U@X?hzsz~^MGa5#/ǡ<lf(uf ,"+% t<ʠ=sC\!^߬]c/rtUy/gW M9`:PX0p": bui<+M V)GNkE^9 u8)ˆPuNC6V_gG5|+s9?7Jp+c݂Ȇa[El((T[(VUu4vt}1iCC$`0WA׆>(RZߏ?uHmo])2}Z[==& K}L;qcğm'E LJINR ]$?_G=D/-| ~飧 mdvn/G ;̫@IO5z8RqMkЁᡚKb=wN%/;ͯK='誹n J:=a ӮADV&m=r>} Jv<xv7~Zxސ![+je!@d$ oSŃki4ܩypCaХ5K&,6Y3f 4exJ#%xJgrx5U6['u~:vB "łXZTQ+Èʢ)&!Pa7͒ IUl9Noce~O^4PcLWG3>|fD^eKW?̠=x,˼L|*ajM h~0|wGJ.ek{iWF'Jo.Pi%^%KJuSFwݙp+![1hr =[#l3ė˕!+*13E.e`;F&TbGeH,'i@B%4l\+.@T-~^P=6n~h{@6J\ F9{bduLS^}1]K J'̷1_NW"l)EIL>s[]Zct櫴[ÔB ,YFSwql%;@ƶ1JĿ-d}wIV -yU7Xr&[1^<':}jAYdk-D,R[JL>ڱp5C!L@  k"dP^LJW۔ر5}.)]<>Y2m_z~WZk~t̕ .j RXcJ-o0nD8LYI35I |`ψ875&\D KaHfI /&X:)fX&lHl^;dO=jX3aPXǬu" :Uj~ǰWW SY ] LS'% =JjhԼHP.M%ۜP~J7k S+KQ!VڶS]j;eේ [Y/gKӛ>Ne%'lP f!񠐩3D;JB(E"I$7 ,Mi)4NquP-lD 0-SUcPxTp$sODz?l~<'3; 0,,VkEr Nf}%, uw I+7گХ[ f",XGX{$s+TZ1u.a T- V:e~3.uAڅu md ~e/ Q*v&*GA(UEGcˊTB43AScDY dtk&=^VH48"H+&uD{5=2QfS0 x[$AEV! qm)FIUpC%AHM%0^#'r`X (rD)܉o@Lh6Rz 1`(f["y.kj(/m(1"@>U9B`s*T"{DdvV6 ,RL4JayZ'BtH'IA7pj*4Qdžn 3GAhl%Z 1d;h }t7^"VA(orˠ23+^gbɭfJI/ujʞ@nP5J ,=%dI"XY&p0ݣ0L0mBnd&iqmN/xP.V:,$ƥ" \jlZq|Vd}R7YS #jK%>\ǻUB"V j?[=x8JC`/սyx >~ȣ(gPa/N})`9^K4|m Hd>tzxs 87Mˆj@p(!c$]Huem>_"Mxjjp$,7Mpgzy$ 2/')D,dixYvS*vń'3?(\&, ~5 M( 3+MS]35TxRi[6NMtb[NJ Zִ*Ά;o6YD+(OAsNr9/1T.X ԓFZRcbK)bs5oM-؀rizPff\g2QY@#<=m!{'v#A C9m@TPP:L Ehb5AlմY#29^T. #&4HlHs%cvjlRwmEIc ֔i"9NiMI WTO E(ڎu3]j&ғkV$>O^MfaT dB_jDv-Rz;d qYE;u J:H6UѰv5JL`M ڝ,a'Z`)Ort 4 6 LEc G?݅I[Y1-Y+"`4A\ExhR_2nTGR20rƣhJPwG~ \ٖQ W͔IF^-PoXjb%:9K i( zmc42{$y } 5 â`͊ VI IK[b ZTYP!,h8aSCa0cD=0z[0vF0T$X|B\{L71̚Db+t(d|UQ l2))E``i" r7|/)(~U{XϰZo<)D? x@-`[IRT;&W i6JbU!r糀t1_X˖g8.d>*m)E >*V?|q"%+>iX(HTScdhYd+ gS<,K>UMZAɠa"B\u:^r̃"πb`ʸ'xk%R޴:32A}\V1ądIN6~ay:heez95 ˖ʟo&U5as@Z :Y. M)m%yU@2J᷸10xz`?*.2,GyBk _b`'YP#{S!(Xv٩\VxGu4`K;P)pY-T؄{ɡgZrj+[eu-:G KÂdـU nݞbtnթ%uDKXZn]'; YJi-}5hL%@*F뢘LӌM%EFDxqqU:%.; 1: jZY }+f]w1s{θ[baoD l$a5C,)(5.!RrYKr]5Zqt sD"?vت1 EPM{MjDk?_QiuLVu)R:CAMkʠ5 t(Wc?f|ÊrÒb*qƢ+a/mL f!xgxb-{fy5aw9xs2G97Ż,c(4pz&Z  n\Lv(v/Fe I@h uA{ xljB[>',;u ;M) B!Yb ,jbJ<8 cf>7A jtPir 6Fp.LQE #dBqͩ+e W8M 6zj vqV_Nf['p%W/#xā&b4,_!OuH>TqWsxp \)O 0jG|ܮFLRTj%r5|sVPDęiT/> \8OO[1.V0!). c֛F3[OLer*A/6!$YVʇv>4 *;*hc*KLH;NB*gBouaD(!r摰6pF$·av0 ovzUC~,2a6o('H5@vL Z-l'tebK!8$$d„ՔFOl=+6:W ũ7#}DJy#)hXa)@)L8BC![2Q],B[1tRwKmjstf;0?nk X#OEE.Cٌ5fMؔ%,)l^! 17 A-FVl] 5-@qQIQ5'8S*!ţwkVJPl1܍2L 5tQԞ5c"P"vwJ H=FXiU5C[$b\Za7ā8p;| E΄G'N„ZcX1 tɋ:R7iCZVV+G̜{xcK,F6v#/F{lK $ Z*.)a:d03S<0H KեeVFc,oU..(U8R2XN4]5-|h8dBʮA/p)dB#)a>ZՋXXm΋8B_!XKF2ERNFdB+ɝ,Z#SƋĴǟ5vVF᪇φr쑖eэQ`9bq^򨌉V=R%/ݎ' n{8/"ٴjZlT FJtn3s!qu0 ?҅.FWPBlIZغeZE&Vc1g0H v,TłHc\-qtKTsAz oWr+yh[o1ZQTjzb 5 =6F4Rp;gTzٚnh ޖ|G7U1fZxXRJ0w&΍f dq>E:QYI a*!l'(;9%A5jLbgSzUmPT^|%J;\m&ӯǒY##/((壊jfG~0 '&yCZvqսEކ|?S.YJLp6@Õ ye&IxN5 dfU aZ#IS|Q~h~L2&>[1X(Β N2ǭ >*A̢"%dMDPp)DMj[ƯEK3e؜ɲ0v)W *O"*& }^[KN;9NYKVfj˼\ϦUǀgs!Խzcٛ;;DL185#c<ATkrf(SUux}rILJ)w׎EL,;:!ma-g[Y\`1}W0-֥O,Vp dS̾rS%`8\ X^tox#Qe%\b\L$5qy .k!S)E"Ȟz"})IrcLP.># MmqDxvp1dˋ ah3b Z$|uȇD6|$\X qD&gO-Ղ E^rԕes87s-*n3SUVT76x؎@z"{K00ehŤT'0TZĢ[y,&kL"qv"pSŦ. b\vLRɬxB>5۩ؠ*lw,״ I6:\@z9F:XQSś·Lin^!V`NubZQebV@ă܌& q\hEU-cgč!*Ϋ8 QPdafأfdH|~ ZVyhb*yFTᢸix:0O'`%{U羍~ +¥vdl`5d 8jmSz’n sIT uN)h&ڊ:Gu4{W^^яe0l *Άst^N.Qwɛ By1kC.Jͬ]\ ]-3]Pf:;(aq̃]&Q~jMPc:U(R_i{35H`JcO-3j⮉iBM NwVZέV] 9>桌Xl6}h:.vR%һnF7YŦ;f )Ŝ1I`@|'5ݳ{#6ݹ8*귦d6Ȩ cɰ*,r屹QnD)Y c|yITlz43TpMUkKRsZ16Wʕ oy) iy GwD֗M+ue+ zM zɚɉV20DLsp1cQBrXir6گ?#uTN`M`U3 ZlijW! ցS)Nk^ҕ +'z«-}1Cԅ75 gbT\U5e@wbnj5͍EDktZeîՎ  FP,߅CS1ͭ;)#Yl]MJoGhU1R=4T(1݆E_y{fԐ鲵\x9hY_J΍*]׋HRY9֕o9&Ü= 3SSz:- YqNA[Ҁ݆{eouaSW#A>-&7͌A]rU#G0xؑCnFVN5rE.QoqS')sO9NI(!73B1lDI+E"S4TMeiIiIt©e9)G#t\0ɫ0e ":cM,,iv,e#6 yjvWq(F ~G/4TLUku)黧pP%8eSir%gW`Ǜ o2^g0@\@U8Y97"ug,.hF>ٝz; \yZZx{NHO59H|wɯC1vIKS,sGyЫHus|m+B͂S> 4 384÷123(-J<;zhh^:n6z.MLtVeӐӛkT.}h;˃`ql>VʀY5Tr>.:/d6f|MvCܨX+o)[=kŃ3HQqiq10G7;|.f-5ǒϿz a(Rhn1o+{MdVoh!yD;ۗk"A{Q[e܂/Kc;'T.8w?{nz1z72t&>d'FJgeFp-"CW&]k&<{lԡ $V!^t sWBQk#x /Zr7j /wR0n*μ(L%pc&204Vd3+-2?ppնX%) vB wuߕ!*)3`frDO;z\.xSԀeP3Ec.P+N륆o_ZYMЙ=*>5XnZbj0wܼE\Nga~{Ly1aW>lZjh$B-aT^ nxHJ8̙lE½eN9Ukؑieu7+mc'xT{6 1 rҪ;nJ(tb/!ʳVLc)j ָgCtܨS{ ]mUd'V"J^r7>_݋7j#޸}3`x&-ǯF?M- 4&ΕM`ȋװݸ2cp4\Е =Fkqcb vE[Lདྷg 3{ŒwG=;~Nuŕ;M8zwY葭e>/06̙6ә@{zGi8cPq")}, LVSQJz(,[K;)+:1P^賮аƂQ# \UYZ}GqՁ-e=_Wu[kyK<2گc&%KK~`#LdƔ֙@xm7[mc4K2ԾY^3}*8Pub d`!ZD;SLCg nbip|^?xyqiM ]Fo2o?ŒShu4hi z+1LMu_Pȼa,w\lRuH@Ig}BY!O\7+;b׈D=W?| ϥu} \3v%fS9mvP,\lŧssBbwùvkY̮''I:0m7Y.SS6TvFC2[/kc6E꛻C4*-& l` E2( Y5/vp_4DRA?ndԮs/FvOihӀ eXq?!lm#DX29Q)/+5 Cf#6A2`@ $ t.~cYcBK<$ CkL}$;Z A㝫\m8⋉eWj}]EI 2ƂyMy)M3SH%_N*lV 59ޙ<7%Ah3 <$w`kP1v:-v~#b TR*6}f}>2q R09ƽlKK@vD7eF`cr*sd=9L 麀 !}K=7`i_ޣ+r+m/,GӉۑÖsIxБ˪J||0+-}[6vw*{ieFl-fwƺ\\Ġx \?'D;tTgqaKf:S`v pu_yТlWs鷑54W1rƦȤV$ALZ %Ur8~{hh/p>Kn[" aGKYeթoԖ ,-d}?wc Y/XgIrndi#(y7/2!6,ے$%q)4LKS7'F*f%84*m 3#1V0&^͐H%Eu^@_?:gd^u&Ͷ/h3z,/EJDǹ x-ݚJѬǀսqW}t]%O®A ϶!4x4vPx y8mk*ݥ_1yכnq2epDOQf4 I?+_V)4^U2)yx}ruyrC<>"}6dG`M }Pnpfv]^?C &;t7G3_Ng=ߝvb'~cdsLەʳo>\SYK;ԑ8O#_J.7/hÝh:%3ZU.݁)G\q=ӖUcrl2y8TFyZcbXF쳢'?'YNaR%fx꠼7cQ%[׺Oy/g%Ѡ쵶0ݿC-xݯ[^?@"4Tg wt3ɏlu K18@b&6Qk6~ 73@I! OA7D x~gKv%u$ &vsmygmh=MPB"\`)Jx瞄$q9_)G+J0i <=lzlhM"b{\m]]nw/"Ce%aSPsDY#4׶5tt, 3 yot o){^NܷX[7_성H:vx`3~-]n2CZvX5w/1Zs6 &fʝF\fvnw 4cV. m{`xgVauy9 &{3e[p=[5W)p]Ip.N"chѸǩ2[V\wl 6kϜ(\\>&- uSv׊4iek0Dd%0')}f[ke|B"8fJpm4mV+t77\4/+g>GbҘN!U+ɇF-:Q35ٍ8Jg1*2mt8cT %R 4rdqMQWGq>|\}:N;] `4g w|{s"W3RʼiY8U6W=o3kfܵZދ΂H*7F,8;^vRm΃&VVO_h}ٗlE9p$G?b~`+3Yn'yKetQ*+t/N7Gvڬ<[Go'뎍& {/}og(jĝX#/.FDx|Nã,i)1FKǺe7QLb؊*K?MNǕ7 &NǩwZ~؁|^\Կ&W*x{^>mMhWWi.C~Ʈℑ?MF"(x0*?^lh~2}NG};H*Ҋ3H*ߜ#}& oalWhN2abezma :'.G/O$~gwr*'ʖkKNoպξ d_`{;$I p\'G/7J>t,>G<7ܰNSx91p.U/wWs;<ۉI]$.qJy:{ow:w!G]/J|5X]'ݑ_zE7uNwؠܭt|-Ϭ3z;+{E0iz%Or;O'ȅ~NDѶ-}.c)d|Nq3K?#sP0grrXZb !j=ikKiv +W,G~Ja?O.BjL5ҟz_Ow_^[Wyq}kuϸ7۝ϭN{/m3-GeYl ~cېM6=A ?ճW3}Q}^p)w'TWǮ' a:>r<Ũ֍}niF@!?#o K_yi>fOU;4-xU8`JPCCaR_K'/J{}, X S9nӒo:i`;+cyyoby 6:zA%I8i|7dzx.;N;d70<k;޷v:MVwŷ8ݓչ(jow ڎPҹ6q=~g&L.7GFGL ._ccgi%g/-r( !ߛ+Uq=/~la<ㇳƿLAl6a 4oMҾ/ R*p+Ke( 7zYeL!I=\1\ʝeg K֡,p?Ca_PHv=}ú<Oܑn¿P 2|ަ}o{?r+>G'&oR{63oAfOeCp ݣwB.X ޔWzH{?gO9, U]jw`PlW{W#Ҽ>T\|?pٝo/q*N]By߇h(>$3z* җlWe|v.?DgZw <{r~B+v4O2^z^^kgW CAuFRnz?svɗ77K}گN76'7{_a{?}n)3-doQ=_͏VT;C$.8vfv\.[:'x6߬/ ?It8)~gV낗]{<.5=ҧS֩6}g-7Yݧ7>=??kv]Y{P~}?+u&d֟5̭o>f߯brw9[韁x5?@$$"I?Џ ~ǵ >WCqiH֢ \'ީ>kUA~L{o|n/aϵ??#(Oi\oJ,ĘKaAh"m ,ޏ_o?1ysKqS;w7s&o8{_^wGuW#^pz&G~]ooGT/uxz;7? C/\=?|wS]럖\n^3|_(.iR/͊TΠ̪χyND/}?dV3=uanX._F??>;_ ws^oWoO#v=rut>WbJ?ZWz.<)Yُ7O~kZoޏ;=ng.[Gct?a~/|e˝jk~/j~WyA>@:$U3 ŗ>W>g68 }f]YݧxN7I[N}~Oȶ?{v1~{r{?TE@/Cus^e>|>th܇0~W(:>g|=I"̟9*}@G~:?rP_+H闁)XX|}rEY @(P??/ z~^s|_.~ov7$y8{8}O?7;ߕ|No?,3lz }%O}|?,-N77SUw+e~a}пv "eViMn[o1/t2?|Q_#l<c??{Կwyc~ozߋu/Z~__zf]fvʶR?_&mnv4LW ;~>z|~O~7g‚ lP y#_ ._ OKXன|3;H$ 0Wx`۔Xo(t_?/owUcWp?aΌu~*.{_]_:<-߻c?>'eJ| ?x ~@F~ߟ}ǂAc#rUݝ>>P{/w[YϞ//{vHi|7/>B>F1f]{#LM('dO|zǥ7p|^/y?a*!?j0~)}v^ޕ}nJ7 wŒPWZwSok/x|*o2iotꯇ(S_^n}[>N/4 ƭ?Ӹ|og~?FW-Z`M>wD WHB:V[kyvo;<<|o{R|~gj>8 'x!h8]% 3PzHYQc fKMo3&p}C9+@_uτC;^Uo_^όҫ=!T*ސ=m'² "@Iꄓ'lp2gk'Qk[ + 6G_qG/Q ߍG]OKCwoŁM?ڢ}?/vm_TAH;S@$$}^2ɿeVTc/ℱc}9gԀ\EEx>giOMJtr?E0@1.,Opb,6|6yڟ_ag $H H8XOLϣ/w;Pu7?8$"D!GK󿂟/?B}G~_y^{OoПv"@"@ޙ?!u<8?0m醫^H@H@=9OםK_|?(œ֠~GĢ*/'jv~?WTT?b/3A?}}_ƻ;KoA^__a5?Wb k:AD>?p|ϫ1E<~)XS_;Oz'?+)Ν'|/00@.QPRTP D'?Cr(b/"EUE~~}_W(#E~h"( B("}**H $ ~q;U??+ԯm .6IDTa8p$Ol 9'ޔ}&V2)Ikq)2L!Ue(FXUu4JHAD]d#' GVQ&%!D +A$RvQbq's D BC !Qi'pxJR ihqwv941cr--0I|pƑ$`"D'fh`h`I܉J\aPVC)!C! @$h,҉ RJ"#yW&rVI)JJI$9#نg}_}g~XTJ1(+[& m2$]kRa ,9_\}_k mt 8qkU@"""$P@ TEHETYBFADaDPdEQG:9""" idE @tX"-"," "B22"" "+""$ jW.1DTs e!$1H) R!IHRRD$E$Dd$IId$@AIBE B@.T*P*:*@I` " #HH""*  ȊHH ! $ "T ⨂ES "ȊH_D!D$)i  D$RA$R@PP@ AC䬆7MN 2 (Ȭ) )"H2$HȨ(,!$HI"+"H2(H- #HH(``>-dPB@TdBEDQAPIYDT,H(X Xn/g?7F@$P$D$UD$T$RD$R@7ŭg߽DH+ H*$!" ""(H*Ȣ# ((bL^wgN * " ""$)  B@U]mPDoH! +"R"H,qKiǒ DA$EYDP " Y$$ O՗|~#+(h*9*_$ADD5(k5Tu+5~o~hjb( 6 P+|/W7?[?';csD@d9R!}lZ 9_@ +) <}2Q + #+ uؠp"AmP33R8tq 9-L-#%Q '-?elEBȅx1}>6‚P M n &dˇb &h2f3ٙ_@C0C033{3<~*(ffcED00\0MڵsPLEE :@H8I{ճ&/ׯ^zׯ^t?|'|gUT@(;^}[=^J|S׿$P b;]׍,q@EmiH&NNG (".*"bad b . +yUW4-iAD~gb"+жDooɾs:~莎49Y HX-ceEQ ,Y: B.Ck@o/^<3);|뼧gDUP oۯ_}or"ȼ'3Wkσ)d>n)JЭ/)+}.ҲVZ]JRՕA0"`IHHFDd$R@IHĐBF I$D{2ȡ"ZV!! BB2 Hi$$Ҳc dFBAd@ eg))JQPPHMMQP@@ЄHEH̀@̌DPo_J]9o:G8u]6RJR$ Bh( *j"" BDfM đ FF E$dII BHb@HFI$dXHI $Y "0 FI$ BB@d$Y $IAI $H!BD$RAY$I+&ԅdj`Dh `"*HH`b (DDRRMہSr=7۲)i)"*&jDJf*I}ֵRV]]ě᱒$I$ " $` RIIA$FH I#BBF" )Ȉ&$E3BA1S#%CD$30"""$"2$"3= ̌HQ PHHt$ 12"! ""P3D3"ЈPLLPA ADĄ PUDTȌ  }o37 ooqt^%u5 22$1#4R4K}u/.e_|e L.@@C3BdA@@ IH@@Dt2%"B"43Cֵ]$](]IukZRo"&w u)KuiKMHIUДĐHmM4MDD PHLt22B%##DĤ[*HHHK$kJHJ%&o5e5lJfJhDjfT/XV})u`a.KIph`*j*&k! ѐR+ukK+t!JֳfP/ip i{$BFF*bF"B*$hD`f0+KK_}j (]@ҍkF_u )HnVmڊ(av!zSMZoKkZ!)*uԥBJRdFBVBVV!+)+JH_ZԒ] RRTլ!}i*PխHDun[" (sNoC88^)uԺkJx"  *)hCRR7)7_Iu닮JPV2\JJun.(Beh]R mڂ*@iX1 Dr##,:"!)h&jF`B""D&@@hFffF @JHd": )"* &DF@ @$"DB2/YZPIJR_)I$iZRRnނ ;?NM6w)wUU'cU"#7`3sPR );D:#@+ ""2#)D4JFGAGW`b4pWuWsC *2JRf9 8 2 #3 "FrXHg Ru$%6!RpgTК@ʲa#V)"M$b"$M "EF ) !dĂ(Ä.!Sarhdd8D! ܌ T$*#G-!TVI,RFP0#g;0)j(ICPKFQHXآ@&T9(1IH  d%#P),F$ i&C D]̎ XF` "QR!)9#(#C+"CC Ie#Ò´J" Ė $b b ΢l X٢LRHB#8cURb0 $B"DnfD"Es B; J@ %"%%DT`8]XAZ:+ɢF"L!&yU$" iHMYJD(lHC93D K!RR2HHIdruxFF(DL&l*fpP"N2F DQUL*0''`Lji(FNn)C$1YA9403I\e%#a* )w[4tMaqA4 P bkN} Y, =~ ^o߀Hmmlm mmmc$ 0u$HZl@"@H %s~ n ")kDAI$XH$I$I$IbeAQ+DdYA"IEdYEd@肤$YEdVEdVEdVEdVEdVEdVEgQZdVAvEYdUVEYdVREI+͈%@ ƻ5O@[}F'>}q; n. ddd+z5LQA J56߯5ߎelfhhL>WzxI7.}}[N#8}y*cŝP%t'KAbZ3xxP^28 C߅;xnEܫ w\b^tn@n yLkؤhp$$ $d0` 0` sx!'vx-kh#iˏ*(XFf3<_frɾ &:D7]\&{[OG+_M{wDL3GxTCyGxt7x|~#teClm:G>>*sv||nK8uMIӣEKe[%mv] / Grnv<Q1Z_{#u-1P-lCQqp0S 012A #vd Hj3)ߝ^c4hѢDHN;'d5 2^ev9}zwu{=VUC8NoO^Vh.]/G!DQ5@:izn#w|j*p.w'w2Tw^`NoMTN^8:8=;\)kg3?mJv]kZp /I 4GM k=^kb(vfedJ"d#dY.gD4T pd3iU%!.>.#DQ p y^K}wB+8@@4:- c+!4ٳf͛4hѣF[-:Px؏r X1:ӳf*h#.Ynw<ۄϫ=fHtCR*g"@D5AXvgNjHպނ`* `8a:NSj$v^}%U##.3E3\<e-2@rW!Lqs3 8L=y*੃!@1\EH:A,t M*ah%[::ڙ35@HD$u7:/hH $5ZZXZZZZZZZZZYʾ bO=?}a J.(8y}>@B#!mdz|/jEJpppײn2r1z=GR7~%l7sq\@11cGA 05삖X8۔K69 DoV _Ǐ 3ļ y^ $r  7vN-%\]eڲdq/[0 2LLr?$.6UQ66D|"s@sS2yʥśs98]3qKk7~`bkuiay~^* ㉉_`Fbjۉ~:*E."g瀣^cxxS/)S0/4:| *[}-^xE4R|\\L6uP0\*y""`8ʘ")Sֲal-!c`vfu2" ?1AY&SY8.F^H@z͡hg˼cj^vl,vCE(UN)t4%V`RډJZ4J ]uїG'U5HM@UjJ%K56BH$uB$.ܩh44U"Sـ飪!lh]DBCAf*AI A@(4E-5"@@@ KJ4 2+JRu)Z 04nU86ƁG@5lha4@ RkdRJ kEJA5M H@mCEPHEJ.vmhPR65@ġFf2hkK`b5=ݙ C%4ց@+#M$mXMh 02цV*()6f kA-5k)ŭiB hZJj̓hZX@:u*WbT-VIR4EP*[jfE!j'HWMfjPQPM)TRT5U(I44)H62 Mu”D*t0Ts"D@%J*j)J֐B+Y Е4@PL)((R$ ];@ʀht4鮔( r4hFA N@   R@P @I  T!D*T$I"DU **E ( @ *~0M0C@h  hd4&4iM04)``*U?24@M4& A24d4@&E6t".rjSe 9v)GN `Qe@Ȝ-M]ӿV$Ȯ,bQ5צS%h銐AU@O:[7}3֣̬o0 ((eRchܪضea_;,ۇ@.0Z6ċH*$oc%h̔2hISNR-* .bj\F ]f.k$k3dTMi<ZFqtZ r{<- #3DT $Ƹ 5Ʋ|(+';Ո [-/ :SXpR(O1kOK&鑸Z$@0QA=58T@{OKD/POD|@SrW29T7%]Q0!VF BU6IX,2 Sv |RH=$O7 s'! yX*G8(cly΅Sk[sM77稓hh\&1yZ ssOD qCk:I]gc΃ IqLߙEYk҃Մ-d}0mdCQ0Vu󛰇[!H$6Ubwo0D%IrNڦWyܢXP JS5nUi(SJL%rZ@C3Y`<(aKԛ+$v\Ԑ2Uy= c1֞mS薚ԙa}+ > /z Xƌ QH[  - 6*R蓄PBUB -͂ ,H; I+U0$`{9%ZCqݶbڌ(XcaX9]qS=Jȁ60GQ옅ηZMVTV@!ͽ}vO뭁/ +as}Kyy;s#ç] IeԬml3ǩ''6rt@q ܣxHLdKJ@vGBT PXVZo0G˯C|m%$3iS ]xbDLR"Gq`1#GTFcM2c м}f}gV2,u0f?fMX: ݩMZmgpD-F|M@0E>-m9d a(03(Sv) Va8G[^0$B:nwP5!=ٳ-D6F&;Џyk!ٞ _,b`԰G:,IlhPoF0 lk"r$"v) ""݈NMM /`Ӟmm()d +<} ^fJG݉oTvaGb62w乽Myۂ|>qJ~G.)9|v}RiPL)fv{+@5uQ] L`6l-pު[(b%{8dnD2Zm]y{K@GW:ͼ|w=2t܆Y=¾of4p•i KӒ 껠,1^6,aSb}bBR9Yv>c u*.w/mD'jlj8,A3 Xip^}J9ݯ][<-ǎռr\^I'8]i;;K6e0q+;#sӿg{=p\(9c4ۍeq^rD`GÙƂܽx@Xz;?`>LF=3w!GYgNV!(& wb0<--Fv'm*2 fDz2Xu%<͇Ӳ-GDuWtn#:8ʾ/<%m2VI c3%jAUc * *kr 04.X{\?cWb"<^.k!r\z$ro*dѫbvbW8b 'C'mvѧNS^]${lQz~M:9bظ[˾G>ڷ[gl_)1W=kA<_ EC8+ŏ|[oSp m>nu:qۮ3ĢgNb ԉ]GH.%X}95 !@qa ߢJ%h u&iD> й-%$3S1"б}:fe;=5ah#TFe Bޅ-MDn('(D>M>0SA1&k),J> m|gIΔ42Z;r!r|0u׷nչ'76[2v~U h[,Ѭh1= l,fegCfF/YW~G+-Z`[WTV&H/tO[~',rGnޣ!+ࣲ:|ҪMWFl^sTt| P0}h nWCKdF&db(o5~{spϝツ;>ADva vC;LZa$%"P̻-rX2Vf7wcWg~{:ٞh0#aѨy3~qO~<}t=`l2u-j:ۺq``ËaKo7͛$fH?1Azj( bLu>kBMAtGSgA ڶTF=SnBCN7|YjL}%z0lod}qWGJsfL@:]s.MXz([7[)nֻeƺjU.W"ap_5)&={cŸ|x/Y,s (ϱ,3N0Ŭ7[9թA[8BkIaFԒA%H ҪVFekϝԠ X!fM^>5s?n$.n5v ^ WN="yΕ8bUHB-Rzgmvָܝ|CM-6Tk_ܠ 0s. xR3i˯$nɝӜok֯ Mծ:]sGDxۆkmd~e"ǣǃYk>uj\r%,;R me~ uACߞY[xe vo7"ȆclO.9윉>9^z_o>^FsY'A )("JzGJ%YgW+P;#gQA>N=rͳ^tEK0.1 X~*[9ű-s+:ckl]BFpў%l/*uS+}'o*lTÊ #9>>c'Ѧ{_Msؼ8_CA󭈎Q0"&DhyFs [9:{8>2m ,\N)ԍޯd*U_#T o+=uZPj#WlT%aDLAR@u;4U$/lVf/5$x@a27H~擨;[ʨ0}sJTJ M{3+9^x\Wj3/0#sk̀V꘎GYW"^j #kutVਡkX|ZMtW폿GvNKgfgy73u[\&MRxo=XmJgJCv AȢ71 dYR϶CeA}(E veRGٙ1!jtܕ.V\#`s_Z3\-}~7wOa&MJᄥ'GC7ڡɈ~e04s`RñpoDD$^9Y\~\,: 2XԬ j9K qG8~L,4{yA՗`ː^;8m:ص"Ex2s&׍BX:I(I 3w&֪9F2A#P{or[؉L ~#}WGZbݐn!LfS*1`$u/rJ_>q=/7zͷ%m_ u˜|m.dn'\ϝۂ~l\G-)],ZJBYpv?YK?Sɯ1Ǧy>2&jR;NMu>{wߍnwƶMc)T~>hCVI|3<)q>|T/ʍ)u c?C$&ӴE8&UÜ|RűqOX0r@\T!PoO7qwbI2B2:F`[Uwc A`"G)2 6TvB=|`y7eJ8`V0>f>e9F"5m}o_D$|,M=|{f rBu99>AlXk?ɈqT|$ki eQܹCgf4\P=B2y\o1U;\kB1rN 0d^Si{M묚cpHGVb#Ed Eq޴ۑq"[BU-HKN삞3swhㄦ~J}Z1W+ %{UqˈGGyYo->d}+LS}}H;}tݱBuqv:_ȈLF!גarthbe6Rdߜي֐ LL4t'%^jTe)BRя081`G9N6%rH r7>V8 Ik S۹@+t,RLlYryR;:(4QTPLZ cRZ\D ̀BY`A4z]v R0΂A4̣l&kPy!{lNTv^ߗZuK|g7FA-C \' Q>b}",X#kgF/iڴwap8 -q6C?i,'9C}GsKb{C8h+O3|b rݵٴ%@)E=))8wu->ɇ;"5/r}>!uK_d>b_ͥ_ vv&u՝Tێ=$$N}Sگ mlB#@. Y2Bc[4"D}Zp-Ƣԗ]m`1i<@5CVB;Ry~޸(еpc\xic-KzuehLg5*9LKsT Ӛf{B\+M$@g/0j j7ރ2`dX_Y׍A yI( EYYh"g4K\DJaH &bJ hC.0 aAGqFAseuȑGbE῝7vBAm?4%=L\K11#ts7paʣjj;qlݹihRGx:G6ع{g8]m{+}sM2zqU[]_',X5ޮ+sN|??/Glz oh[9 "re{bw\+/&OAV܎'ǩT?3ADn*]+"ە(i]\ץşaH)EQ~X1JS,љ乶]Z"wab^h:utR^+Fj0!%5m״{cxDrx8AHFl@2Cۊ TV[X>P0fA0 '8=Rq;TTRzθE:!G-tu;_?S~d*ӿCEVKS[³s 4*ڵw+Ez:^rb_5]c<1zxÈƜևӏx kq,229(;b+jr#v3(WЎ*uyaA)e5 2Tp 7CtlaLh"wjuтsXbA'ل.kO'yUR0` ` ɒ-ՏNI{)g5A&"f?6J0֑ @mg] Kcd~\),AT˫/]@ hg><џqAq` 6^¢GOx$ 4X7 q Ir6#ANb-%Zjh3I|r ~#; W}xT#Eu˕Β0g1cf`! عmjӡ}"4DAW],% ;F%az'g[%^FmpE疷T2yN˾@v@‡qln'y4h1ѻ'؂Ҧ !eNto9 O<2ѹ>ӺpS`Þ$ r-CL9h%: pű+/ArulڝraJ8c ɖ4ԟ.Wik{Gq~Jq9ɪy𖤁H!Y׎% 6T"'A'a\9NV fͷn-y̔#H`r$?MIEćU?m9ٻ#YP|7\xjx`r;wf瑪wR<[O6̆s~K$ dJr@ V1Ns=nd}aRe>[9?GEP_ZĒ 4j(at 8\ĘE]*g%RiY|^`vjv `heQ͋ 2#a/v<-b$CM\(LF!˂ P1 mO.\ QϢP0<|>+t+H 0hVJY:@7*P2ۮ9֯;_7*Y%hq9&1v}D b3٫lJ>.fY&6^ӣZlW tlKP=-+bG}<~|b .QkƗjJ,ÞP6J@n?K;s7^⨝N-4rCPS]ZN-9S\]ڛ"yia mAc>¨>xa QM2dxoeܺB7T̟u]B,G[or:rg"`C\7RBz{t$tX=`@QRRD3b8_GY(bZrGp3$ȬdbqQ%'.]!ML J,@0]Mrsx{.1Fh#,e 1H̙ۅ)G@B27Xx"pf$&g< Yt\\oBÃZ(>ibl;Xўd4'2Hj, S7?zh\7;юJVV$I[+;ѿRPg!3 hɄnHrgjϊ2LW=vJiJςI;bt^lqQ1nxڢp*, >Ap@j(s;"iX]e0v4G:aX֬\F*m3nR֢r{HY)r!*[/J*U6vۢ-(%PVحqPDbyT؂C\UoQiğ,ZGA"nV uHVH% `e̙:#BfBH7Vgf鍘\E\P.Bf *m4;LU O^'De7H-%lNd* @Q\ NHUh+92L 𩽖y_Bs,݃8@AH>vP)cFc(^rh6ԜoCE׸xwazvݼ\(nJ ؐoV?d~ncM=tjp+ W0wxXX&DA3CUYn$Dx2hGfF, aa,zԞ6A:=0KlPə)l,UM&b&yOuZ {MZeGKF|=L_bZ,u_@R?,:uK6§x"+k!j9 s- k2 N:b\/u+;qopgD*z-×yQw6k^VLSR(H|Ρr'4K6-[I"I賱"3'=( m6XYLt"ITJV&mXhNk|N^3!Q,/(ǩT1 `?.Vc<ܻ rxz8wZ@JǚFR`d ȴ" 2 `'ԙN]6U5F`(SR*hTٰIвQK 4]cݟ>ܿw5(d8P#0Ht$טϊ4N6e}cyS 4.np,&Lՙ Xo4sFu'r$5oW;)$^w4I-`f"",Ȃ(y8U.c4nrqM^QCUlqDXAځ|,S[MLQ&QKO aƜ[ɋ{̄ro0MrR>h>zf* sX0wfYHPMTPP$֍6{Qpc;` `-9*oRxq 2S0= UTf̈́sQ9!r|A̦Lq)#lcOGA^VM/dzHbĊQH7M<<ؕi(elLMqee ,$cPt`Ǐ@2ʬ Iknj @ wEZ- |b('V>Bgv..%=Kf2 &'\bYr-pg?qϯ<\Ja~tbʋ+0ӏI1wcQ}2pxCV,&BcFl#g^[:/^^']T;2Ep.<✰d5d.\[%)㫋&Oar0 /?ļ:5n(^7M⭈d҇݉DHN;L@xlDzUm "Yq5z%ECҎ3 9h)4T ga*6&BsCyвc*p)ᒬl}/p` LzGbOAdh86Vu,oNvp9\v8` AAc\oKV5/33Da/a,Et!_)ehRq|AAu3ϩ!"7 'Oay.+26)c \@J'qC12~OFFW*iaj a|W(!;-[K tW|& BYHRM8&([ъ],xmBs4>4xbʅ ̳5 lumMAUNBzPd<"y.ScwFϝꈰN[`^lU9r$>k5U$-P&&'8yZBMG^ #؋hW.k& > ZDrbݺ dm {maUbƃ KW w=ud?>ӓUhf<%`_b,Ye^PXAyNF*Blf 2p"]"%.{!oŗH1 @ŃO#o1s>8%{.2U,њ@ XvbN[,Ћ:B$G;_tN N.Y/Y_]N_iM4мħ).{CK`E} 4"*|R.FlNzM骗,w>a4HY?[Z N@;qqW6#q@rI!W:mxHn hIt YѴUՊwb.$&ɏudWt 5_SO(BZ10M w6Q$hbA7?ިڭ-_ O.1JShb0\9+s _ӊnkAHJS0NXN4@:w@XYA8| ˟mtirm}Vy%%mi=v%6k^]$ñ$6*c+5srDVʭX:LQe;dstOA/ 4*@9dYȒ&!X/TH%EjՉ[&/(*1$"}XL0[`8}.g:-v@c%+pص9hM*E(,NmǭiX(YnO;7=Ĩݍrq70lA T_xPE)BԵ .30,Y \o\{-V!!Ö;眆5`$CxfIgP< Ėp;%wqd{5k*Lh)١1g\I6AcQ Weg!5lg[s1!T]8hv]P$8Ь؛9egSSkbf̦]\h׾ K:wEu]8.$4[].4(WDE;mA`A Pӿ}|b{[6 h Hd@j/Ӕtt݅3)LCgp&lp1ՇwdN6džpW7w#NEOU(p^i>b/uz,%exSp?5 |Ź` a7iT%I5H,0Z#  /Xk٢#GG{ģg+PӭzQVx@P/#VҹautZt fD5HG8g?n>ࡇDS<@qQfxk$2@n¤}$1evżb%L^X&>qO5X<[v-"zZT7yv`j-׋n2LRnci9t HKL@z䎅ƓE+*f (üal[wʦe*ez).S'yZh@]SI=q4эd` 7`!(_!s)`|sn}p:&)T\"rK޵P6]64JՉ F4ɷS׼y#`.O@ca;tHl蠕GuNhMxu,6_TMRċ\>82HaVfN JI  7NW2^_wڐ>\p v#sǑuetme%q Z-͆V1_a mZ+b 1^W *ex;WK'jX[Y˵~[ƻ|ꆻ{ndzAiIn=w'r:4'G-?DTp^Mܞwy'49]_fږyM؅sF'KיGN^_:a"쁈(izeAWfըndyv'ZTc ݼ[q,/v]b7h80}ohH#;P8 Ƙ_[}>O~"lj޲RjIwJ'][¾U _ ̲nu2ku ` q<5 %ϱ%(#iaRoʍʹ$.iLxdB53Lk 6cI < kװD}6|"E-`=)ۿzfAsBl: !qk3. kA!&"g2 q B^b6<(#lJ/j;KoI6-fN.qXkҺ m  F<;hpIw0̑Uvv0xa{ BGd|آRѬ!4*$ZRQ@+ܚ,#]]^Y!f0EW:(m" &ewbQ=V{O`cziq99rCDהJJѱ *EL9tR5s9l, j旁) EHړ=o#y̠υ"C,ccgUi]Dv :YZL0 BE5[TT^%dRN!.|hlqrT01g\WDJc#[vITл 0 {ZyEcCXEq=(0s"2cv 2(Icԋ95'[Ë/~N`Ċ=Bou!IXl$þ(+l\h[Y`=WF?Dyp5L 1hZ FJDbBhp`f E K/85.@|Ew-@^>FZabLsuՇN5%QY< !rd,.;??:8OMc$+#Sڡlǹ*SJ;{߷,*>fbQXcR asl|lH"kD%|#@>pW,dk=®,8Q^y^hL2,a EB<_T圆&,(g1Ӯ ,/4\P xs8ˆ& -ORj< ʏ_ PBϳZ(VsfȊI{ʂz@s ҉= ~fNc煭un/0d䷚YP *_C8־W}Ig/ Q/YYKEY<=>W GlԔs/1M5f9%^x#1 T#_S+EL[ai48X!BI?F"f34#׈t hPX0{AۛvRNf .Mpub{q/FF(!7 }W;Dmz"H[%kP昖P1 ޒ@(_Ψd/j%sY*龼ԡD2埉)hW5Dg'%ÿjYCgNNȯ5~nۜ1*A{P5M=cH[] 6AJ-~H@F6x6K [܄>l\=uaa|P@NtW dhJyp!do}I, Z7'LOʯ3DY@pؒ`fBr{(frlpM0'|zc=^$G fN`HudMפ`ȧ6 rۦcZ1n2 SBpfS \a%P Qcg cvpt[)OO˻m ]Ъ3F ?dXHݝm%BBdw~R3蔥1Z GPU]ַ7جs&P0Ҕ`#۞읯Jčq ^lO܆UwM{8rs(_lc ESe${L FT-ǂf 8=BL:^8S$+'dQ\h&v1!Kˋ3E&t,8gN+\nWZE^J#Ƶ$ޛ-Xςa$$՘Pqc@漻۴*ٽAv.Y"Y8mo]7[p\-ʝ9E@*` ! FlBqxhW:JYGQFqXOE{r|7I'%ʞ.W -*}OyMTWAX#bYD;*JW@ u.(b4+05i#Yc}FX*p# OdQ6EPyym6]L`}ǻG+ARD}D|̷p`ً:g<6ս*!h1]D#$1 .K y~n1AX9c*a,\epF` (x6ᤩ]oLeHdj\3Q͠FBiV 'tЀ@ۥ"P+W:nfQ)7otBҲ5m۬OMrSȁ,py&pT]9VyQZ\ssI1a M0Sg9-,tրq-blV- 9{u\(E^c&/Tek:sj_|^:ÙyN|P,`ÌIs/.D 혤~ɑ$%D#jft&(]m NZ}oaL2M%n, 6 YjHb)~x!n4PI7\3 A?8t`. gMv)jA!`,#pZ6p2VpxI#-լ>Uc53D{@~ĩ.:LΚp`ه6Gcc=ř,N5IN%,].v!• 90`sl?9A+nHE&FQ tKk!~JLYd۟yYJ'=Y2.I48e4rܴCI}H$\Gp>W6f0pd52WYtf9kmBڕ6!D B!9Rx=pȄT\ +>h7wMN:[H:+J9i#/*3YԉCJLYiuV}mAdUYK5ipxYsFH?Ǘ3!zM$'؅5&գL'> d5V@ rF*h2T ##).&="Y61lEfR2hpG痏\ӌafP}ªL2:PZE7)!854rC3&VO'+7v'*!ҫ:91^dL@@RQ2ϥ%|hSȩ؂-eZn*1x/)w-GxC+%B`P]6& *ځ t( my6}m:% :R!\bȄ_:כ'֏8h3H%iM>`s~O)9?F;%,Z28Ef %|A< z砚Xw^̙-1c}5Q>#I$h,R=KX#(~[bD D1IDd΃ejɤL &ԝ!F/~`VN,0B'AؓhC &Q +C=-?f[e:C+uKA"@juwDFo:Pe&w&g&x7Vh "{D=dbMjGA%2([!iwW$ 3r0D0镇~VϏŴ8 &  pNq݀S(&Nӳ1S[05L|E5q<[ݰ:O5L!@)H8\;=^?YUwL65~  fjQS+5GCb>!ZkU-N7%!`2&Z t8d>5\Q( ņ@A. k$ģd@H$neV) >(U=ީa:PX;-yϋ026qab7hVAZEOFCdӇ@51!7OvL֙ƃԂL ILrK8D9vfh%bj!ş& b_muÿvu5h.^c%*\6ZWNsLBٔ}(aFw5}F{9 W{cNfIS:g4mUi1dj͊c''Rq O=&'ʸOl8N 2yeUZpG5KOqkT/QeZԴk`GGM) QYP#Hߓ>|,"ԎePA7Zm`:w[_69n^'l`5D}Dmh] _,`wIpF@54z(N[ ^b2߭#+~L0"IDNy cb|;mV32^Ϲ""5W+Ĥ7)cuxs*&RCQ#zľ B^{+U )m1h'ڐM=C1OKBP#x3 抙JMKPHi#ӺjTjC!YyK] 3=sl`sgP·{`ʐ\.Z1%Ő wgK.I(k.I{f4}ݡAvnSŖ=-]l)JnbM`elRߚWu ݯY0pgLIC+e[};ɻ@m@l35|дc e:msVE,|g,"\ډ3]f8Yb.|ф )Z`ͻACRXaM\@3ξFu[뎍f96#og }m5@9kH%U p#]M91mp"'l8k3F:h\ 6'gAhrѝ[f'38&z-5 9vDS G2B&VT+F{Y USfH ÕE$S/{|=29f %'# 2(4? uY_GC6m7 4Co~ 77;4B V8}P!v?n9ah0)ݔRw#&IΛÑSs@6drX9>K04ɯ1p4<)cV4ia 90GژM|Ӗ ı1T Hgs&Ce±Wq _FH{mUP<ګM%$جG=Jb-S(4C Y'A~r ~4T\ɎyjT%H0[4vP Oh2z,msŠw>/kN8kƀbUSEq>l 9"ѯ|>h$HΎ= 8P,c&]v~pQs5ZNhlZC'!?2cR³$TI0ZMfv+ڼ:|SjQe!)(%ӺNsv< n){a4-slɓZ46!mrSP.T YL 4u.n!VCǬ5 \Bh"/s>?8':ZF귩lTDHLQhلҪЌ%XܻQ#l(jVWѵ92ƚՋD=U=oQuz PbTD)A U{jzCQ{X;\-'Blk ӻ>#'b7#ߑ@"})j9SdŮG+P'Yq5gDVĮZ`Dǀ:'_P-Nw09S0p&d|OksծvC薔V֋@'b \(akFY,q-P^ u|<ï[AWSf& zQ /!Gaw ak&"NR7+I0vR9dYF<;LA DahQp&)fJX !١~t)¼6nH)jAT>d]b- bN~3`8M8(JCcn 3x`VX]P":/_Q /jTyF(ʺABaC;FPY!($e#% CLXøꈁUpZi GvpQad =6$h'h':;ꩣJt`9V#kӒ(ʎ`n$S0LiK=)PL+]/ aKc #iGKd1CLP$0TFcX-Dv\wl@D8qv9k}ɉ?.,%]k.껞 ?6C#P} ZI_$c̔{@Zrf̪hw`z:-J yoUAsiN" K"<'Mx1Z.+3EYG*)l% y}1a١cW/KX+_H4i)ƛxs ҺitÝd1Ԋ;e""ϊ77+*D8(KT|߹{ٵs%n!h]8h Jqaĵl*SG; =ㆈ+ƨZړ㖚u( HUo&ȶ72`蛫Bw\"J=א_#6 5$+a,K޸?trCg&ұi5, Xo;% 0u9BC]F?&UԟW^)3)aj*PcC*6aX\cu{[\TxZٛv3P@׬*rcs s/y#Nĭ|dQǾ}+ %ܔDnM;+oÂIyA莀pFvn!y/=vz4)߆b 1aUEܝ*' y!2:Xh0i D w'7~2>*"dk57}_vJ*5%{os;5š$i|ë́'ae- f9_ Zcu" lAqpt_(DxA$}3 w>\[ 8 "ԨedUJbzxNpjd?PJAYLE5i$ hun1tEkKA$3ݸE\aj-|rf]ɐފ4 -$M ksaX f 1C ĪD˺7U8-f.ؕSi=,G{C %ñ+iPƽ+jA}¼]}`#0EPqz<h7+\dG)fC$`hd9 Sf(`U$f(sͦpzg+T+}RM$1I~LXتH?&Ԛ2eHI XE;.bMSIaFM|2ڹ= CC9YMʤ-w1E=vFIL! 2/労Ƣ[/L"Q|hSClGLܔjDXE:kN`m4Vl(<% 8gc-h@i0?nˊl5jݶ17qNgpPo0xVg`!)]\pJ"~kӔAGQȿo&O>g8]HJR_TW9GVL:hDWͷc4 .™&.QZw\ʯFs< 07 x39jGjC%k]aKl e ~v<]~TQ>SwA=4gGH> ؉sU#Ǫ@Mk6ZAJUj+WzUtJ7 -Ȁec"$6bEWa=w z}^|2AP0mH-$nj8^Wm׸v.t@Vx rHa|`9d X4%P7gRSU)M'VN"HZjl ?D  >:_Zh}N,]"8*qNIo+ /Yxʾo)GQ@ܴ`{[^bzr(h0F+3w2DqgYv&E@,G'J-W&ڭ{7QXd"upNo@Q!1&k-6ܖ޹̇>4E ԡKV0Q9j'Znxn٦C+M:avQ0&Ļtsl&MӰiMKmQ1zeG3 #?( F2vd [}t@V3J1RX~<fi!I`]_R!jjҔ7 4 fo, B'HdI%ep%χ'8\uKԴܐ#=a"Z LVJFm ~҉%PW=TvdS2<|ӎR`aHZ$1(JK28|NYd0 ZfT:Ԧ=9%1z9䚕4(I4Tv-Jw\^fDxD9xx[(-1(Y3W)K9IGIL2  W'0uZn'`D-ڮ;Lj U(8,T#S1ql}]z "-],(Ɋ 3@),3V?`W Z #l@0ʈF$Ĩ=MIp,rN8\ibiK/*!~J vv z&"V'9\ϸYTMD [cI]njqK>qM5mZ$- vzP(& /@DQMD#^gX0Ǎ̈5ߑC' ^ {*ނӇe,vB!-92D/B LŊh,޾vٴ2W|~Vd ijVL(^ SIK4Wb#2҅) m[cOl$h3홓=Xps+ȴV% y6nυ`',3CԦ4.8]7 l["go͔yCՠyvw B|YD.p66@x,|ϚUILV -L^>A:u쭉E`@͵Ћ0l{nն!Xc)ҮG>OAAD,oY6ym\% LC[O,xQ15C³n\!jԋ1!6셎CVLڲ C}/I6IE؈dq*W3"TDc&H 8栻 i;@4ú3HB` B™y33SsM+ÓP#,dō-|[z` Kd7箙|ǀS/G(v0<<%H7w\nvgluRLsM2s5#Oa33M`E9IX5|4L-]*krN&܁81O`HOԠ59em[E/l6u @ф((:3UpAc[j. r,,{^D !44h:,5Q_"'d;e+Gh~]4Om\ED XIV](k-7G=b4juۋ]č{RE$C+ gσh OD)QN%Hl?}3]ݻr$ #K c8Iɓ,FjHS(Vx'<{"hda.~9kص<8"i0Z y9j݌]\۞JiWEc ;L{w@yt^pKD2?%m4+gG+DF] k}Lw H'?AP,TsUJH޴0AnppF~g-Rp@ӵÞpڄRt(k=[˲=~C0Wk$xZTHr+g+Q!~ ώ}0;|DZ_阠o=罻[PRjܴj~4{4´}F ˂#f/`xc O*2"qk).9bHs0O% }UB Z@V&0S2SuVG"\5DhSEH77ƶ K=хVehjFfq睡#q4Lj=pBXK̃KPRtӓV7c%JL8ښD%`Gojc+d2qDPMLťmnt>Jsty[4_nSPc>ӎcn~%]Ddv9yL2q9x1TL@dEZ┦M#+Kh$XUf0k-dbُ-\W>"#(#1zٌ2`fk>Tg$C5C(c:hr+ؙHP,2c ?  僝3G6M?d M,!%ڰ"fo/P`pB{\01`|D`9RFȤύ4;BQ2q:9d ( T9MyQR= vQrs)[Gj%¸@vb)jS1+Nn&F!}wV$scLp$ZQ8U"5Fy11w;YnI/ͪ:@2 ֲ0{Vdd$!|;ܡl/wfk  k\E\ q.˗9GRtu=E<%? 3!e{z\JlRḤ-$n<#z .cq4euvsFKnxbԄ&e<g\#OD.9Ug{![SU@@,QC?y26؍u+@'>`a[$1W6.q|*2&A% QxFzFdcBkTxSqU-1gWq現WS+^eZ2#V)65Pǜ|'̧5 v*d@'iPT0S=2$M޹"z$@T`a)(x$RnD =493ȋ+Kp]'guhkǘ:UY\1H`7hK%]A}WN}qw2qT:$f]DϖmtCA˰Ķ J&QSЌNrldLZ jiNjҷ28a6+5)-zLO욁VȾ#Cb#V.um4]JOb7iLr0wa[+SRɭb(̸g@ݺ%JLc{p^x)=q2ZǞl 3_#AaAUߦcol"Dz'˲>YPzU[K㑞_YT;aĎsx׸z/=mt/1a7횶QpoE56,8gcPHw*loZ!tsʼ=|evl"C6JՐ4-iq 3<-^kBȡv{Z Lq8Mw&5?jA1up چ< iuiE̹(&E|-ԴzCS."ăcXk&@Y dREK44lG׿w=3xQbA&g_뷵ܺ\{&]0 z°2# ZJ cN~t=f`.94u"26:ҤV\C1!ώYM 朳2Ǭ_ZkĴfrx`) ςcTx w:}甗ņT!>T'r ϝRɷZ&'{&"|. @X1*-U3(&[w|!RVɛL? fHdOؕջ"'_'䱌9 Zp;Y)fGT{ؤ?, HSQ%gr@MSJKն%kR) $&R9YvArT7 j4ai[Kt0a|ސp%"6_[!F`;rbz1NܬOkRNƪʤRLȌ `%k. kqo[q1(e$'"z"c[a~UԶ?M*b™m Yxf[+Qt[n)5J٪t$Z6Ge7RIXָ:- `5#b<,[k&:!p y5:UfLT"BdQ0QxIjX²NtWNWW%ư<`RT.1c:Z^Wr8k #Ȉ#$SZrgTCU}uƽ7 {(V mS7P:zKY#3%y<|rd-Jܬ3ubVfx=[A nbD,e|W=ށ7ܓXr(Am~JzF#g*'!`7ZK>yDgvKe( %(@u2`NthiZ_grX; Xq#;/٤V"G _gqtF^Yc=ɼJhy[݇>WGd! 8@ֿ&1t e(qbM AESUNE B7Qf<V c9 Z;A-aLvB{X]^}J tFN3쟾%Br}]YJU$ދw嵠e#I.1k[}8UAI(e3kv똑JL!r2MaaqAS?60't= /@Ir %,u& UC.1mlv9&љDɲB1K9F.yKXQ7{@qQl$b}l7.?McJI/T[V3@TY 8hA9!ɴJ=VvZ[R7P/Pȭ^5ou|tJ˜`[O!ނ:Y> BjSVⷔ9<J-!e BS,Qج! /ziߔQE)Τ['d4aQV 91,9P-k_פ=?4W2æ><%5ڽa|H"#KǴ|e /ZV_#湛T T&0m>MY5f 0Gmc, S檓/GŰ+JW)LY@;SAX1-Q9QiLaBDET)E-"&\6M)!W޹`:7Qrc9Maj*̃&%}8dUqK{l /BI5ܡ]˳ Q?>tsn+yތ3YZ,2W#UdOuԚYZ u+NR⿯z]A閧Û"6OǑV(aPCģ/c lm9:Fv) \g ^ϔ91q,x0)x3O@$׊ 1{нUzȒ8]YK4T"#e= Rc\'Ԗ( 2L)/ ]Vt\.h*gUU=dUBh+QKq$4n}V:M 6:Q+> xY,9' |No_|Pu0 M6X-PQNr=zܴk$ ^Ȧg=Ly$ <é#*ĺb/ z,$ h[2V̂7hB\iwba+UBFr +ZLbԃ?l4s7Yfpɲ9)pZ9nUYCobiq 9KHB%ђ& z ^XL\ 4 Vb(HXXřTMM :Y}I> ./wwnD_@[bk"ΈFn|8xN "݄bδUVL(=" vO#,,ӝc Mq-f\5ogcl"EFg}\%KZ`CU1]4R3Z̜O&hC{SF-" QJgFw7?dԡ8s\C߈^<y’긛'Aɠom"yO{B P:Zh1WCRBٞ>~"+`Ft<` |>2"i3ƅ}."+@)[14(nV!-իaz V 1`T?l%qn92g.-ح+_`msfC@Y >,2M[P:^(4EZ&Gx9m,36 (Qu VMM/-D p.Y w`Y3,.iP@8ZLGsApNTklt If% GΡfΪp$z)̄Ԣøn|p3$BM\2U@)8zQodªG\ɕ*6 x"-t_06( ܾt UJP d_B;įy9r7"Ap!'e"R1~!B r,nm5-oO9??۳bۏh/09qyb,ԻSZve/M:$,~nPI_\|c?6:'md!JžB:擉6o }g㮝cW[udf͵/(zDo:"zkt+H  TCgVJ: S1 hev6NuMg8}rglg"o.= oVvx6cfpk{xϢ̌n$D"rqe;1zoͶÁ%FT$3@%G}IfF/4F .jfp]p NL~un/,N,%1O_JUb< i2EtZ<ץc)!VtxXDƾ40#Yc @P kB@A2sH%GGQLtۥQ+ \CR:)$nAȯ-_Q=a\F UGr!(aQ U I_w<Ԝp(ȠBVr_&Fkmepڡ6:¶Ahd#= hRC._ x܈]gEZ\ƢYm wy0_˷\Hi:p dX8& X/!` 5_c"겨\Tȭl+nڂѼ끅2DVS84š/U}!ź)ӋVAŕ拇#Uj9,Mf%A Rw q%eޡ\=''qk+- 㙼&yk"Gg1n$!:4Kr;J)sM5jQ'qFR0X AX[6"H|PHyP2g ͛幱_e䷻[U2"}(ĺt !d۵s(|fy$24L}":%ݭ3m/Cq C5-)wW*xⲬ68:κk`)ZAx9= #&Yv0?\I%c֪aQ t~zLro- p-rhH+ Y+X4^*_#vFBx!95ly쭽劘 % 5Kj" vϲ:$s1=RKQ(G3S'3Q :D̆!U Gm ^8J0z95mm( $h%Go;.6WXg/3"\6;wp,) [)c%HW~KO^CHeTtqdI A _obsajOUEZ^?4oI>*|7=|i8Gk ̥=cƬ̀ HسTMIG۟%-v!Accn8ŕ##Mw1H&@ qZm]kxad 7p Ng&\])9H{a@p&"[(liA7&8cq8?\bܘǘIsprH-8=SX4Wa߈պK-N/ǒO3 g\EɓV`L|_4DN%}V(_iFU[~ivado[Q\o[1\FۗGRc CļSΓ_8UljΘNH ޤm<,uɡGǰ(;>q3p)+!yB6 |/E#8pp<4FI krɏY>e :h(C}( E.xcj$"H*b w!`{yw@):aK2Lt~!R L=DʓcG) oF\kO( EBȆ*)Foe> 빠y浙]4# U$/IuTM>A9aJ@ԇqIuqOO:R?2gL9Cu|:\(,>}y.]WSupTΪ_&RA֛zW[){܁nIˮhL![J&Ghi֟3W20=wOnl4f ye^lY#2  XH:2oMOdw&G"p 9Gzlٍr DpRj[j0 @ZzD{'_ $.Ug4Z a =mO|2}a<Èe&c B@@|e(ٻ6Q5mF*H(qP;!a]B"{A2qx"B 1~* й<|+.}Tw`Dbpxh:K3:n3{ ک<Ocs5"TI0/m)y|Fz0F/F"tcNԨZN_HHlQ56> ǚX+=},ٛwk]$@I6K.{-钬0 I {P5CCcS̕k"KtKaF\@jJxWdv]Δ|:jۘPHL`lj1H}A5果h.dY65ɤžmJϪ&>A??) e$WȆy;C~~Fq#o4a~{7T[ hvS9[FҞ˭Ni`^ ߭Ƚ"9QSGN ^JbKII(7O4#+D ձT']L|6ix 0]2Ya;+ ,!TG ÜHV=6Au/ p`fACe)xJN+rrfd[n E/,f &`"a2Wc*1#"=;.K}b^E-ڹ.^(_e߮CG,hA<5|f Ijƨe5A2ټ[_24; ͜t,Cō(">GLdF;F8*iL#1 Z ^Ir"ÂBP516U$"-`v77PqKS+2/U\z[8RT*1}{:Xeql&)ST$o;({&VfW4j3G}(J{N dHi9hmyna,5x2{ ^c+ɠ/4~>~YI[P 5^`mSd" v Ng P,x-EDH,{UK "QDဥB{k55O?81 ŔJ^A6B9|}"AJ wtu]B9"ul'=m*aH2;$6 NPuYt 1Ϩ^U RP͑cȷ@HB̝A BX1hPh'cChBb^)z^$x5f{Kƛ*b(GF gɬ Gtz|&PxZ1 .bĠ!̃6s*! n h82"I@ ]fo gb|%/V&9?\9-԰]|iG]'SCFNcqM_e )+h d`z<4V\Rрkd!kt)|];@@&1T!`ۚ (Ν1;{,7/6jĀƎx2M5dŸ7'Qu<7$Z&slhVB$ج)_'ZX!^,qv Q\6iB_m wpl)1*E$ȴ!BoI%Ȯ]ضЦ:b4eRdQu.3Og>#"Ɂ QaCdֿk:3fޘ[;g?@oB<#~G>f!7+t;ρݑt!`Oz:pvI`/zE d4kofxlP}M ~6R-XgjAq0QV` 7>50,MA6;VjPSX۲ݞtibZ5=ևR kL1ML,E 52rbiwTl5.@ z l1Og~ ^N 9%F?  Pdhq5K#WⳙnxPDԏ?? p5NB=uaݨv-ۚ4{yq=J$ ⻾ƗVb 'l6!;69CUgy)MAW4] n"۬*Byf 6EUv 4[+qWo%cziAprqP,':b= %2&W?֢\ }KZaj,LE^ 5@?j$i,ػ ֡5/Rj?葏SEM\\:fӑ5 >Kˁ-f.*]z%@iU;aߒYKjxx NKe潴~NM~讆C+NgCr~݈ؗHH0_^s$ ]Vi 4-P[@n$Q6LdTj;~ʁuK2>n_Lh_> ~LU#2];E(`8e>c1ZON ">$P)xޮ.LgrGeEr є@Iϝl@DK TQo5h\]P9 MRH<41T28<}0^DGM;X A$_\;[ӇAl%%*6vWm`[EhXJZ5h$5ِ/?zW/wr@faI"FJ̋wFI^EKυ6O}C,GSO|Avm9Vy/[# yf^Rgn1[j,5`$⏏n [~ m)&2gGzx(U>D.O_XaBo.^ryGI;,e"zCd!i%ΨSa: BQ|)&G{sc~gPd d Q WcƖノsa)8SոVn-LdT?7GDaj#I-b[O ˭AFKR[C[b{w dFSU)MsxEsab><ۀ:US9CW@THBמ A %͑"rxc) fFb(u2ik#Nj6Őb\=kʕۘ-|ZBY|3PbyO  oux( |7t495&e0|BQc#"=0WF7ΉBktNtGhiA"'ѫգ%Xl^glDi+ ́n$=>-Cإ;"E+65GK4h:认%$Њ'"IdyTx9\/H0R0{V}>(3԰)V[72nr;3ZIi,g^փb]Im}̐6RWmJߋzdO3 !RgB M6ZY3=p{MZpPPtUQxNkf^N˿)&Fv Ce6Ņl~D 3bsK~" p6] suoHl% WE-Ӫ4"CkZF]`~O;HA"pnjRwp",Nk>2(9pGxDۻ4iaiTp2ݓiOr&\ ǣ{^sZZgMb.iw֚7%koX!;2 O[=L_uEcs -2GGƶGIGKj4-TG) == o'~uXܴAy='3#0 iSDfjj[O~*Tg/-= "%\Lvc?Ue0$+z~ٛSǛb&\|鱲ʠzxr WZpG5ͧ%3rK5CAnJLHE"Ztrʺ:fd5!]P$(B@)κ%`x2C:$If { eRI~<z:p!y|3}VNJfsou-4нz8I}(xRͨU}Gֵ(0vX}Q-X}XCޚ_{dZef>NbX%"(yxXV 4DLg^l7e^gC2pJʆ&~\+4j>'Q:[_۲jިc\'mM"74<9g 0.3S1ńtW龌ׂU`[˫.]Eu(jޡMlW2"camǬŲzL ,E Ip)?8w ]FeŽ~C x(9-M\Ь:>mwCS Ƽڒ92rT5'2=\[o>SHxu>4qq*/BceW$2udYz Z>|^+vLьYtj*^;d6L'v wo0]lTCf`vKFF? YAgU|Q|tML'=ȜE6>- _ |bwHjb4û;=᫽38!!%QF9->YʜO'ΓTӏDkM!p^[8/l`EWV=끾m h7Mr9 9ǔʵpHkt`U^_$-Eg&*mMX6v*3QA{ ' }ŭ aR1UL,д>/c霫x<٦fri* /6y}keˊJ6}KcOTׁ)\gḣ{zWG~Ӝ=0Y@:5VuaI=.лQ Bo-xN7sSE_?3( tEM*ɲra)JCIIrxoA4YF%7=UsJ@BLs^# 9@ޘhfcdNVF-5rK¨Zߩ:.`l-U I@Bg\KZ$*`tTN{oI*限#xK”8>bs)Jw]"wR ''{ِL$筎ӴPs85}P3^֛۞]fԙy5MR1A:zmDe?Ua!㩖dgmr=0eJvh.=(ax3\:gY=^b[KA3mv5{DguJ nIdUD r8%S p',c s`14Ag,CREM$Mt*UO%56m)B%ʜ_F|N+ 2G\f~,0CsC3Ͻpjmh.,Y"쌬 xl@7c]6 ۿJOtӻϭ(шĎ9pbEISǼnGWmu6]v_mJ4g7*ͫJ_blL=W 2L 2MOIw.T{]җAr8 }MP lTկ?X XFd`U*j 2{ʧLxݻ~Q#Ar]m y6aFE ZC#C]tȀ43gvl%)+ıvn)?6^99u3lFKQd÷ U׺d鍸 =^uٚ4! kQi99(Npj0P&JwxKJTqz$ vS0n o7`J']=[c1I ޶БkmEp\w)KE[ c[,, zZ;5Ѭu 2WG5&* YjE'ʗbhu[ \ B0}tH,ϞְpӒDY\-WP KKԸ$b'/Gu\kKs9 s"s jOhǼEpado@k|w$ы"[˥[3iUe/i\4*nAq$*y;Yqg\ 'C=!RF\MT>> 1 3>S!s,k I 1t&?3%O|x/ɮчdy@ !Th3TfIM903U Iv_U4{n0ֈcuCDK\H㦧LU.>hACeT~6jJ6uhuI0]:#*,[N/gGIi6:[oϗ,7iYYQvߺ6`aGV {@>(bP6R Td|=.V-8ۯlDPܺ!]t5d];z,Ǣ2V7)~iߍ&Zpе.7x mÈY?xa(Y7& KM ΃wJFiDD5먔\\e{A| WC(exgXxUNJr%lR>9e ԏ 㺞ﵒ1|7 7 ~7>X}E0(i_y<1oqԣ'»:KDJ ( I#PёM][';:A*jWkHJNNK ;h\rNJ\q_ۭӤ3? "mr>잻DaGK-ՓQՄ:Ɋ~PLosʴ\Jy}#?لK+o6'HRNE|J1C Se0?1UUQWkyˡڐp3-˭QpKnE2p,Y&H}g%m]NrLҳizwΰtS5ۣ@W߲k 9d'4柜z}y(A{}\([|.g ҅w(]"Y_|=%ٛu0lȨ>Jm*@&μznvա?5-=)&gլE||yx:{6 N7{+s;KDZ Z M`894Fz$(c5N>-IpI"~u' V;+= ?{'M11j]!al|+dͱ]D k_ɑ$b @yhd3(xQLHZڊÂDĥkiiBųVȡ 'Aaf~* m.$[y'eT)>sTd~x!RsuvOyKݣq^L;E: G`bq G 0)x8˭&(4K 0_c2Jz{p +VK, >8`Px?XX/Vr -@󛀀]$rkq욚2_TMЯss--wVb3r&YG{ bts֥yg5,NeavVU|UWڅ8c>ˮK S=9DfK;MoC<!Vmԡjgx/&xi t^0n>N@Nj5W9I]D>Iy`}$,ݑ L mR'웭Bv۸ĕs];g1Qoѹ8TMi)MhM=t?U-QC:a bui7c+\bIfۖG,]+*h+6QfmH1YIMN"Q5cnֳׂG$c!m &mV=vx vvg~Lo8l\—"T%n)fwV*>rvE1pg4ΐ71I!'\qj/xÓj۳[)`"S\*]8&>Wn>9;rdJhcڕ.3ax- \P| DHAzeLZZ *nEA8^'E`FkwԉUi"w[ѣVeiHrKu\hP"ȣTU搋JcR8jf&δv@3ߦCKlyҥ[,1 cm#QΟћG|}[ d5k@! F3V*@+w< q Ow^ uND1!շ*)9T"r HWzY*~1MNyue*{rⱽ.a1^~ޣ.Ҹ$>s:˾`s%ˍtPѱ1#;ͼRO쒿yaGy\s 9b8dƀ $k*kFQ^qwcQcƣ*SIV6Q[[l[LJ_H95Ń*!whͶn8|EnW$]B\ z>OLj +s#=Ԩ)1ii HjC.JDUH{>Eb^ZN 9,#ayB1|X]%gH$܏[Bs}dI ~0bN"|3yR}EyICbD6/)nL.xdB&l\)jQ})RG4lp؛s>z|Q@f:B&}؞5č|I=F@٬6eCΈԲAs֍ݝXrw7>OiNz,v#tW:L1 5473eϋ\ Q9FɑG^/z!six0wb+.}ܘU䈼 -ՆX4rQuj)s(G1%h|FPd#숨E|ž|zQn"hh(tL,wgz݀@.@A;Vy~鍹7o^WzeQL^`S `Ϝ/%=A3"_ Mx/P xR<:)jUP]. T&L=|75pF)X+DB XmVW*zh+?mwgasmKy'qGJ]S%o0kF r9\GsstaZQ^)a mB4{!:,UҪF0BT[RLq8 pF(cV DdNX Nc$ Ç6{8̻5y l 2 !$~z] Jm ,W(1,h S⯠Z6(4OBXpT'2}KO.4ťڛCA|x^K^>]oKt̋uP& )U7o#,Tnue^ &:ӝ`̲Qr[{X|=$B'# 73?uÏ0{K2og >2H*0!RRR$i֥'Dgex<io*d~`ОTʸHlFWɶvQ1dMtHl3q"k6a"!Ky7Ӆq~ҳ$9/)uVq|0 _+.q|x_op6KQ93EG ${j=0 2 OCRP&[*>! ի#9?i AŐB "U3@"$vmFmGv,^jEY±>a0H kJErf؋p u hX$q$R[)sDAYZ/&Lh0Tub(yaۣ xM?923VaK J%;y͚6ʆc^!\f/`Xτk4ߦ5 F@y6s]KBϢj$J!dEChPW<8Z[<_zi2jl[Deelno"Hq>F߷_a_~P$un9[EZk YpDBK{EPN:[ nspx(nO{6F P0v_-C} hG`kfp78iDQ:ǣ mA_TЅtnw m]h2HETB#3 0rj-4^$g 6h1#Utm AXObWR4c/% y8y$9bfKA撯Q`Y ( HZ- TcB,a(o;W8oNʡȯӧ"gT}$<`BxL~72Ѹ&o#02f,E<*="oNH9)m4o{W2d` ! 6uQ[ *\cBO&Eluu^Hԙ\dψHPe4'!x\^J/`-$L6_;*v7@ۈ s_c7̝IjM2X¢܊GȘv"@X5KLO/=ůͮ?Y()DˡzJb;ϗտLQq@<>B8dS/|TywdN'f&OGJ#rjy.*<ɥ9V ҅:fQRa6cfK~ļ{ߪMg| ,5G͗`1?rTp1k~Y9|l&cTvIecFGC[]s77B[ks1')oech ]14%1͠3y*w&I=D? ]1{Z\+1e"NJQi;KCg'5~2i{pddpŞl%DG $ߗ)-.kc+8CVxf:Ֆܽ]%Ѓ`yIX}Z`eyyKfJVM1-FMݦoPm7(_sT oiKfk٬?O}eߟۊ"9 B t֌FjDcyФRwM\`^P2#$Hg(( N^ TY=h #5:B JdS~Ź*Pc[A,*1fiv 5Z;ٌOT(Wl8UjEg5N#&8xT0YR=r0eYqGg/2",Gia'۝K$uQN807 `(/&΃4ם#zΧKe>Km8\LXBZ E̠] wb{pocc7J# V\,e4kt,,,ohs{ۘZ}wu ]Qd.qm,V̗Hs.,u(vB{js84@Aq_W'{8AeLz4LTʱLr&ˆ #FMl`jxk犃Ӻ ;Z} ;a:||) ` 0($Z^Hp_hbs2pRgL#⸶qfES W.(UcfXBw{`*ps,#*648jIz4_pU]kf}q{ZWdWfFy_\5No068C]tc.>rG@(ә(bO EiN#t9g9v@cϤEaH5fϬ+ڷܙf0 4F%lVΉ%,XT/=uٵc;fNqr׌>[ ]G=K4 s#oǰe֑wu/"`\GYsAyݔ3O|sВ<0;JV"$TD}OKd.[=i3ÊΣqNI.@bj=}WӑuH,Re!$o.ɂu9Ey X\%D]$@wۺ 4* lE?. cFm3yIfؼ 7hycvXj!vVf'k;F lCܐXcq˯v$ݞ_l +";8򬊆BM%eh_:vˆxVCIrђ-)vZn2Hۛ3xx94 g> ?o ̥Eynϒ\S 4Xs$бp7սzǦr0k*IfS`~0 {03γ8@-+W*+_+Fp@{?.t4bj`Xb%2sjj$*JZdхg<#I{&xȾ/bW&lAz VqP 4C$Kβ6x &nyc\lX`C,|e/\~1(m`ʣeD*]*uKiqcf3%zd}vLP6"@6n>x|YD:ǂ=`gBc?=cz̄EvXNh-u՞nlm: }N,#5TL:Yu hb f"qY -A2c=͘}Fc28w$2k]a<8& >Ϩ[jgMteE€j9~ʲq1؀Ǜ5>V߱H&VAb{r5BO`' 9eZ[T69/ɱAhAuςhDP`עNI(a/IxPVy{:#ԓWQeGPI8Q ' c )wݻ]tO +Efy|\Xq?c%=F25zș/ $@OưpE*Jb_4[) ּr}cR8/J-Upo\S]7oۻmB4[II7:ha ΃ _" a01v3RtQeau)v)!>?TM,91-w2QfրxY%I:`a(!b0p~Bヹs&MvXp;N6.bp<7j3<7ذ,,$HQᢞ2c6$ d$ϳ*$U)[ԃ%h\Cqtk2n/Au),zlaِ ѱP,c[ĝ9Ăo^V=iݠ*DbbA A M* - {"dHQ-"G.s?ϏZ+(̄ERn5Iw4;|EKB)ؕADI8 C@){BQڡ)]!Hh3` ̉~ĵ1lOz)L5\?E.dt)_*՝"ݒ>+gS`œE j w(ɮ 1t> @禯Y/ (%y>lIGUp}c DžzVSE LNgQzqWBG߾x}@ 9!MϲG@ fvDz`$91?{(zCJQ߆?/lIqlaq|ðxp69{\o۷+(K]!K;Kq>aW(2@l~%O|:y C⢮>g!TMNl]˾5TmP>:ޢ|{ 8v,MP=qڲ"?Fz)Â]]+8jltRZ@UbQ26>pw}nR^&WPe$Ab3p7F o z̅I $R4Ώi蚋Eԋ4w6q]5qLh$]: FXJ| MQwq\ұXA pd Mm  lߖ.SIq1Sf17PqO|~^Qփ5J AsqE]sIr CL^Lk ͇F`ݓ___P4t᝶z;QAKUn!ɞz>1HH[T"R:-3Cɲ3|- 1 %[)_c2֓y Ax[Qo)V ȄB TyEz } $rĆ7 l/]e\VX [ K]@ig] cOpTCNwfsN=~X8U=3vc 0aĠy`kY~~s 0+?ϵw:͂:ntȬ?*xs "-WVFQce}KqP.8ff ]"xnV ]SK5pfG-yQ5 6$Fkh1?483oCw]'Xj3QA1 Xʆ[ը*צgj c#O1LGYm.h~}|녬-m6O)ס09*쉵jo7[dYy]IeRKyNBqעD6?]c2HCu貎 $M;KC%z>;EXKg_9jڛ Լf n+?tLcOA`&i AW2'-%ظv <16]j;V,bͶVUIzyGi/ Hh*!~`W'TK K֢Ŋhͤ5aiAѿ5嫓% GyfΛ)zvD"z`9@Oq:a/DxKvgA[\l o_gU#`#;tjZe/.}?{|?xI7t/X>y'0__V'ǘ|vpBQc!Kb$o2Dc?teX'9F;3FEb]'ӿG=ͅW[sP Z|4G W{Dž;ufHNleSY|4udOTF"5= v hd(i1M{W3-t}{-w߉`vY*(ʤ"MdzmZ54 eF^`,/ ~\4śEc1ZX܀,s6> x.}iLzBw ?;~:i? d݉#@6Mws?0)~(=i} ),͓BH#MG c%В3X]8,gJ .7L>⋚]g-^J5[L9X,R.V gTdaZpďV93 ؓ}}B"L"":ı=qݻ _>QfB.RxCb!yV9Ң4X/r!_ŭjPra?Ef' μ`~Z{xh%fIGWyIJ "#INq/"N݃Qp8XͬxlZgk Xi$dqP 0+A)Lmcه"<9bE. =UjγajX`vQO|M,Yސ40Xcp8g}D:he{xX\)M|a!H} g:8Q)AKI&6Oq'3 ? V9pHs?} m@6em$g;# uLZ<5rkC;ۀo隱Y$\0 <|Gi9A'M6B ƠH2|L_Rafe>w%/@BD6U{Vem` *'"O˦]P&B hb +oHYV(4YfhyoO,n a㝞YnK ˲b$݆H;P 2#رc&ٍ Gq<=&]iʕDs<Mʶ6 AxR"{L}lw@0eH Q!txwi #r%zXvawMZa%fb̓dV\7 o%XYfV30h 7ۅ<S[Go>ltŨwcbbh9M1:b-$iX_C-ض hJC"Kn(]|qF0Ni.خf ϒ1_S ^\8_/ʾ`M+ %ݙR`k-T5 =89xO1(QgB7y:S`p@͇=Mo|m2k^W&-l}Huzv_GWLDu^<>t.Hf/=,J!yAl,VZ7ǜFdK6A0'.`3>)!IxrFtUyYνEWYB]py^Zڒ YrD9H=_D,O1n Ѽ99ۗ,ea&2~I>'e~/d<5dD.!IP)Eꂪ"3sxWB9$^szkn~hzB9x5ݬ9ԘTM!|U Q3Kk J Êg}\/,AYG vH8{ KG-ly2i(Fv-BE%xWdx)sx7??HB_DQ{ ,тTR_9((%4eZ<f %Bli)oN>/qʇ?tÕd,}ϣ2$VCUjE6RC7/h3r~1'Gz|JK@P{Ǫd&uϧSB.]w`z9BG 27@`l!Ϋf. jq!st ~ ΂OD_m *4#NzF[/_h~]o=$jߙzϑ|ThE︷{4r2c)؍Cm5ղp02\ C!^ tG"Vvu,]w\,_/mlu 4Kli)YUIQǤ|=&k*0Lp,6F*s럏s˦['UY'?Q>}C*0>7c7ZXOzX&Woa"Q ¿@@(Wt/ꎻhR7YKadSiULF t9ėlm(6BO@#gsbaدVn '/5+G, ?>oY+$\!dեxGx@Iu;?sNZ< VB,uzk2h{dy֐EJ&' pKDw2q3ա%X3,3Zd͌X/eStvMZ0PrQ= WE\lZQj,8Ug4x2!MZ02oܒ?Y/Vv IϞdXW#h&73#2%'ʪ( ZմfWB7sZ(dWE D|Rq[{a"5FɉeGISo{G!!$C1r43v`*9 3:uhȖ?W7sozfmFV}M`Fzdb0Sh &Hv-Gr\R/\hQJRMJKZAmȗip0 Pd}{цvcLJT7`vdݤ8_+͊^z,~شI{h&Rc*J ]Gh[^mMzY?(USRc04 Qbn|5o B%4f6-Y잞 z$:[;O2Q}MzSL3{9aT_O,.MZmTwѻY!*B$Kڞ qPv X8D:gzA̘\aE O)y-z9X馘?9_| >޴ کCO5KANƂ^1ӲӚq`b=IMM-tLiHC:FՀܻ/)x}aEeLYZ_exi<~[9ӺOoZ#E+wҍe6ad]>ڗ&(Ø-Yemui& t<;4W9|YR il L׾HX#TI#~m1݅Sk5AS,(6 Mh/xيR󷱖A 2Å7:. ːa?,i#j읻)\\Qq@QY(<\X'_zI] qo1@p(ΏzkU]M.2qs58s5ccyJRR(74ʆ05N ΥoQxMQm:/ڂ픻K7BAfk`0wDž} 92'F\')׵" vQ4 ޭz9/o|\_-TxA)% QO-IdPF8Ǿ8 E5 ' T1) rݼE!V;NqhB(Z] \%AzlC}6zBiRn 'ܩվD@<%g ɜ5Ӟ{]$FMP׫UIF$Yƣ-1e6Eǁ76%l.]{QN'4cS癨xKۋ Ȣ!w0͠ƽRΨ*T%e{7^kſU^0GWt00[YM Υ0 l.,1עė0fCm~>a^YKa4mIJ-^Om?Vެ EC߾L<5I[F×$J%[uhhU<+RYkB,$D *=-mρ?' \XGEHTfʪk<7L6ytfvgvzȵ -YqJϨB{Q C8(늓YOS.r[A_ Bky2{ !*VVy&+Țϰ!PB6yl[c@s6.q,=S ~vUFe]3˞98?G#8v8!maTMAEW o,p34_-n0Z|NR!훃t߂ƠǸ8S~ iO"Ms:Ƙ=:(`獵_Җ%r\_M٥-O ̂w4~WDVCE\55D 3+ ||OR[e:b*`\$AW,<-ĐXo?!`;dOFNM)#mCM}'=1P->/ɱO =vh 5A➖É?n|?$2L+L,qP/;}%YG!^$n m?d{Ϗb$Y55N9op&NP"րچ dYJ`v4ʫ.%h+ T٪' d AF(g\@ t!9@S%꽫?}Ė[h54 {3:,8Eb Az dGدĘ q-w>y9r1(UzR%YStQ aAjhRQ)Ch6C [cXOdBDڊݵs[(?M;Eىl| TPeIWC'a[€DyRhzj Ot<)f"]͏f84+}:xצfqxӾsBDҒF -ODSuGqJU*g)2b=As uqQȦ7>*s0Q:98T-қsD*^ͼ6qɕ<F<$ҐS}϶v 8+ySi{~IL}IcB`T[SGQ+υ?':,ZSC{;Ad,ÐvlHeF7 kԣU?0jM8Gƅø8$8 2a>*G)ֶ90=W/廲tҩLΧ/MԒU7 [^Euf/S7/㷼=O 쟯\Y\A`gT{9h>HJPb;zU=`/-PWj $9[.|jEe' =PsBr]V& Тq`bۼ]ʔ(3έ%O;? l?Ss8>\}5e ٗ+vffrɍm0`b`R/HlWz4ͭ e벱xяUզqk;uF5xacr3,CC,nh'=Jm[FiRߧ 5sd ܏`K/.)Jɢ[=8=7_L[_Uߦ'xPU.>܇x n2wQPŜ>ց,kPğ2Czlm}SKC;򘭇Wإ>FS2Zߥ&;2@ v9ѽ"@傘 EIq4ɞs DH"5~S%OkIQU2kx~gXL┚6>+?a>"N5┚%`vOJV~{f1#1$P# hk HDؔKJ6fh܌$-J윣og$xa+v!W,WÇ$~Ȟ}Yu=ƏStǹ|mhq(]wAbGv!{Fxtsl5p,ʫxޱD=Jr AX $1UQt26ok2Agn+> Z?]uaF$Gp0iL[ۦ8HBtHPGyIQ`up4 ϣZ mkY ir$%l9|bOԸ`R@m|#Oܢp_C5㛪*n[_όT\e5r`DH'sBݵ~] }52ø~)TQ(F9|2!8^3:wϻ h^<QVmkC̋_|@=Pp*Q&H 1}՚`?Ӿ?ep?_p?f/RmS CF͓nE&XkM0?#i 7L[\*s<jA[b-]%&E2| N^`$]恼9P_g Zf |@Es7~K8֛7[G}]et[_C?g]$VPi^|n"d0D9բ=˿J MTgFʌm6ғJdf_d5.W?Q!Nךy@ssmr$l:namy*ot9I%k-8(c1PR^II@CA7k_Z5z]!$ks~yol~x/J%$P`74ifЌU@XœͿ 7MdIIzrnɷZ= 1"X&gP pW F<Gd{wNiw+3“+&x~wC{!v"?@yC[fߠ/mVL.跆s TBtݬƳȴ Tt~gf-ycSHV Z'"'L49ܨ\)IJړt(h)zlUɮm)H$C&Ť9!*6у-0QEAxǼt`~8$%eBD ^*BIMf,iJ܃9ż4C[XVcx\b}\:(H.I ?#Ml2c kBb>7!.뿵uNFؗ1]q'^r1C.]Pa7 IB2(ve  { >'l-J!>ʜ2/L@OP0Hnk*Fl381዁eZ~9ysE!x?.mwОfy |+<ׇk~|0JV:veftsH_c\+:+1IhЭ<(Ɵ6~!~1Oi,"e<B3l=wwy(}'Ka2BA˺M\Oi 7ej"ە Un$7鳃mhq;O "& E(/6-–|轿^NR 4ΠmSlu~-.:Ca<'(Uh#%&L bz[ei*'EBh 0 @72 p9&媄f#laY~Dd]<^  M5Ϭ.3~ $0;dqxFqxeH$9&]@ bɺ1r:ȸ(ʅwMlM/ 3Z_QZ-!srI8ܠ>|̬SvƏD!ŁeaKa SE -mxeů&lMB*p1jS3r4DQ:&Y3:sAF"sS0]r$ q|쭻o[8;mxV LZ$ f:顠!x 16/jڒ'f+}TK|y;];ANqaojI@`J`*Xg&T~ ՎQȰ(d^Z:^FĞTnO;k֑=|7bN:}kV&i\-h|ޡ)}{8,]3 %z l{Vlifk7'w&#r--~ȢlXr2P7 ~JsXDO=)2*wHiL}>}HK`Ga*Y`Vy2&t^޴>yɐ3 HFPCCfY>Gc~`s1c6/u3Yۼw;u+niƺصٵx5ۭVCXR5 M… WZcNi-(LzwV<)У:;Z׳2Vjs>"kU&z%x״E27SYސwӌEs;]>V^R"Ybq$"<۟~=߻qIG֒7{~ZYbeimߔIv {o*DOF0=2vg{1]yUuNTں& _E98v^AQ4!<}M08=sj5ʀq$ZY`$O)7SO[' I=c|Cv膵{xQ9Խ*Ɯ2!Xowzu;dQgW{Fq-U$D7yA~C k{Gd9.է4„ a~&)#XiizYKPy6UL 16tpIxͺ r=6WPʀa`D?@CCuwǮ*3\]*q?ԞS h!3Б1/2qDXU򓔓 JTV mA_IBkiFo E2P_hY<-Š7de#0OԎ{ "NvR_"H+KM&*M;n&p\Xmæ`3NH<:"ߜk t~:kaC@GjuO6/.7$5m/32D/Z?\H"f{:-|\s>kH?%RW-|PL%>gP(E}"((g[@Yݹw9/G4.י>,5ńu' Hi!DzM3hoJ_"KS@֜ v 6Yp ZxB#IW(δk_o!jf,nvhkԖDjLkL"2H67Yº.##kS}7̍=|\&\Z%*}\ ]z1o Qt}y"~croowCsnv(1ڱr3J73|)Nqf]10- ;cxAWlʏUrJ ԻCf3k MR޹/ri+MŖc#΋Iv#_է^rTxC Oi}+8,8 3VY M8RȆ\IҌ"/<ʹЕ8~/wt_lz&r( 㻶gEݐ5bp[:Fmj3L]p^i1G1:V"` Eoh} ~SbxkvsJ?$V|G.@]Lyͧ}AFMT!>peaeգXC.|^~E#O8A|_ C>/T޷W-t>Vzq)FXR1Y{EyK*7δVō. P&^5d B 06ZLF'0J R06Ǿ@B y G6 xǼi ME=M1+/{f53I J#8?a\?nK[3m-A[Η>ػmpc}5e##0;iUH f1_\$L#!|.fe6M•d=8ms\j2oCw4SGۉX'r„_'轒cQ\TKJ>PuH0Z`9B%:z# ]c4W\tZi?8$ '^3pG!%^Rbb(||j/)d̜ͦǛ] @/ExʁxUE|ٖg`W$Ue/v,$#c8xz+a`gHm0kNn-Fqj)ZŊ^/(Ա1@+tTtA6IeK=8!.c|, \d c`ǯऎlh3Mu2!2lҧ |9^ێc6-"2Zzy~{CnOvl#mԀ4>O&i:PA~ 6y@U2$rg\dӑ'L)iۏHl>" y=) 0QVw xL8=Q ۿ)]T?OZ]Z%N8!BVO4UϜ2YtiH WXr5hL*jv堄v㢊ZC9kN(!yne"꜑SrWYq@+ ~[FdG_Iaa935b` cI ᳵHh|>2+dλicz8\!{.7eaܠxIc]_q>1)چО~!AV7;gإZq*U}ъݿ+Qu"V&C(g q$WD(7_\k9})BA%ЈkdZݺ+ q] kxkiA3L_wۡjqlĥf,xdW +(=$-y R<Ք&j V\#t_`bIh}9WRR\Rx6!-c"nLvۡ(8Q./I%y 7qj51ycXa[ __[7ix_?ƝЗ"TzT U炂L0:d8$BIj y!WT5qژ){o[F*,Y)=> ~hAZVTm0ScY{xb}1r6œ(,Gɤcp)|iOlb3PDHI ҦqU;?KiGkTa^GW`ɄWvEmdʞjNNW+pZa T#m߻lEls!dkz0br75C"t;I%G$hw>pAdJb\[ޒNszdRiՕXi~%pYI[sp%M t#N64pדMQ5CW o'`I5i#'RMW79A |4%x0E%av\_MXh!-tEǐ8æW 8%Z@H!(Gق#/v/h6So<<3UtGוJ\vD23B1Ϥxe~}³=66}70 3Je}&X?=`)iq׾/.Y tQs59UO^`T~t?;V ?OYF~$h%@@"/lpCy|ASTI_ d,`/VK59(cBE%$5 7I<}b:|f`VgM8Z H`zݷq,!!C֣Q"̷hͅ,& {NC'D(MYe`Mp8LC'ZV_˹뮡6o| 4Ϸu۫qc8ev-^y<]NDI猹ekM"3]E4Dc LP8mrpg_4hsM̔w9y uoI%./WdT`UL}]t&'G@PN34a'7creS|ϧ3euSOG[Qn}0(\Qp˲en yoH b[\Uq \{H΅v*H762#4s8aa`\ u4?%υM3F*t ަ]ϧ,o*c0z=!ETuU/za,utB(I:CAgG{s^ˬ"^ (>q' R;m0+έÍ =HwBҺ-Aܡ (b3 itH66:*睖rqH$U[7hX >UWВ~*ԍhA"8xnzYjrY,Ή҇B# HW'suh%b@X-؁=d}iJ@پKeG%xyz8I dy)$r9[K j0oڎJސpQпq4r}T`kH:ӱһXu}|ϛ׽N`kh@iQ2,'e ⨛2a+(gC5-’+=Ju$:檱+w/oixפtngk$&P9Z3Ƣ#oGоDqһAવԣ5Y0jR:In'uܹ]4y6t^tŅʼnѤeQq2u꥚]2+bG$(Z*fNqކjSz*,umP0Ț(( Ia:?Sǥwq\B!'V*7=Wk9:Ve_-ݒHyUƞ6E0[zRR鈳uour=_CILz>=`Wvr}zU|R[wMd{IZKG%=| %tgV e,Ys(1ߙ@B*-WA Cgo3~fQ"u ih9Ȝs偂S?f IEopIqs-peuZ@ >.337O r}z,# =W/_1MA%%-ݼQg</0pcH{&b^gՓ"wL*ltݫpF7ǡ+DQj|.7T=)4!0~ h8ϴ|]JS 猠߅轕B?.!Zd vJ uH~}tV9m9oCnх^Eb]2oB:)2N7 췀xB4~emF$&U`Pjso 8'L51kܫUCV1tXa=}r{Xf^ٯ jg>זx1 Q9scSpC 7lWJd@=A{|MؙY%  #0#~{~8nD}K(Ӄl66v šmFi,TQX B_QxnbXqK-SObϧIJgŤ턆´- ΝA2ήpaCa<P$_2m3ׅ ;hDUlbԇyt=7Q"YVOc[n y%ů,(ӃNJhX1>C\6} 2 #sƎ3?ߣ}}P&.{d30kT!B^ԤPѼtӴ_Ukb++zQUg_M 8A\eK=e@B{uFdk !;Eiojé.I}?>[&3ȧM:6CHy#XE[Z]ꕀ: rm^Tc-FvUpj[L/N `ïYk{Wi?|7N1R%6N%1 +X(=c-IU bbK_AB^/e9H\!j8 2Sj72w/u:idvG$U(ko.` yx#7Nh^!D?ÿ=Rh.mMbes0|R+C ux@ClWGB-}z訪Kt)7zX x K:E!3?ʧGľʨfGR+?"b7U?S ] ΊxPx9!q76.\`tSD\;.Zj*$X,g$NݺlSXAnmQ&ͪOW~l>;PW"p!jjwYd6YBBrgi79̠`"pBL,(Nǔ| k $Qj׶P^mH=#{FM`Tf$5X؞ v勭2?Ƞlbmv/ū =PBaom#bV2_і̟D O'ɣޢUz_*҉5Xe( C֠gvاZ=dl^,юâK2=SKI"Z; 9*Zm] unIw~<6pzށ=%3iïOa3Τ u8>bslUMo)y͝ZgQY'D}ԒgNjz 1i ~ܯ `uW"0VIb9 Fv_e{xwR"P8 ą˧RY52We9D) EE hCF /P5OT#lh~r+M7,CFP¿΃.xV8H06=^G[+Λzcԙ ;-D'p q qqI)j J~+s B^4][.V Bpt[Y 7\q+< 2c2!CV>c| r 츛T r]k%qa/eITmc8! >AN$OX"1h7r\Eۮ7.Y.EJ49$h'dvȳi#8$%:Ąs7NhKṅސn:gYqn=yo/FˊwO2Z8#|JGFXz%dH D+[@3SR4~A=4}UQM^e=T6=/USfhv#gUg<~to($ykZʻk+ղk7O<[~}5F1NUOyQUKcng_vwqE7Ol2xx2r EemPt}lt۸o!L Dcrz;P*%n8T`VW 7p5mOŞߛ|rNo_&ݷh?8UtG+)tu)ݚ:Ԣ; cr>_V?V'&rS3I@b}2}mʮÛIoE˛Wsl{i-klgpPh &alFb!x (i'nqw f(RtJ1C$0F>I^[ ;eq0:1݄.Q|tEO b2^>E7:?Ҕ*n\WF2s[c 3JLឲ8YonrN+Qjw.WvOk'gUK8~Ivl4SIҕh^ %.c!ɂb2h*[/QjIHUw.A{ke IP w)fv &g&u )Ҹ P0>4сsؘ1?=cvGJw jؿ#.H\N&^<*L 0#tIXUyaQa9(j]9F {׽5]~$_~6ilH4F缪SΞS~}{\"m6AU"PAz޹:N9q@o-L _KVۨw3a͵M3uoA9-"BGƞl%Q3BJ/ڱZ,a50)r2|Z)J] ]?ZAӕ)hU=nqnR^ZjG(oY@ /wlMtQTҨ{HÆ?z$] ʆfGCh_EPim<92fēcY(PpYhS{ ]~?R:zo3! 'Lh}0Ԁ3' !,P%LfѦy-1>k@[%o+އMP0cv/1?a[|XN8Ys^s _xjP wOi? a(ُ&i,E *nT6r'=_:<_Fh!(t+Z $#[r6ǟ3ӄGvV%$P.ɀ5WGa/IyL<:FY3f[`s!HxH+*We.nݜΈ9T) ?.3ͫsr֚^*E6{{D>,ex;U풼"[@bG4x"ឡ$w_kHiV 8z#i0]u ƿ3*%3=4X-WIEG[ڪ$I.l+&oʾ|LǼ# AIn_|0`e3ǠMk ^ٹdhNK{Qdڏ}T?n3fz'fjwUw\/M6\e[T >D=ga~!=NlQmy&[Uo_X 6l6۔ sƣ3=e&5 +J7fHn  ~dl9Nu۞fbԑ:$a6h> dK[-9t!;vfp(0Hk~Vʿ-uMM8?:lh; o,9b 'LфEt@^i8]bgh9uՒyvo*hHs臍!g.>.zѯ{>8eDݸR/LUfmt y9;QeIf v AO>X5 /!G!&:b>8B(uo;Lhg>&0rк>>8W 8^Hs#LQPYga6rlxdc$ѵ$= 1NEFw;RIxtA I?:'現yIU2}ġn ( 3yhf^zОJ7s70#S(cS}֌k5Y=pπfqq?YێKS]Sci?OBږA$tƼsnJbw1Oou3r(e<Z^}fUX?凔eLb:鏔Jq7 ^YoDŜ}{WnYE6{y@-dXU6 7^~,L*Wm*=*&mD 0MN8 ߛ𢦼4-IN7*|q SYi}uzWћ[ E ݖxDA5 Rkvj7x'r+)"b఼[V[kY |`n=ޅܶ9חm<ƺ{WׯN1? Q%XARL2usWq$&,3*+}V ]<|8&0]/%@uZo{wUu^9.Fw;Jpگӣ8tiVd_QT.ํ2"̨2q-^ nܵ=dÓNT'.},ULG2+w5ǧ VYxB`LUʯU}ռZkG8;}f[I 4~7Ak)k[]*lE\hե88=>qA^d%1,\zC =N[6'u8TujaM T^AZZ w.2tL:煊Jqh>] bi7'U!#Da[rGȯIk:]gQoQ \W3wvRO"KVW _9&ugÜRY@{U⯆h>ݻe~i!bt2+w#TZ N!eavK%6F[V*8va-Z SW ЮWʡp..7DHz{n+C~ɟH!٤ݷ ="j@!/[ [Ӯ W l͞WᔐCS$ҁvX <菄t^뵑;pOzGx?ƀ_l\s>^ $O!KS鉶vܩw|>n"@:HjN"ofyPުc? GoMۢ ӊVzx'ڏ/()nQ73YŖ˗t9J.<[}RSn_*2Kc]7]T"c 2T-3[ GGCe=TlW)@**̜MBj2ݤ|PGQ\};mZJmD\*~QwOEFi śx}t^4#m| Ҩk~Ds@b Mb#S>m&AO#;I~}|xq]tܜzfr U͂v\ ~:C/Ov䨰I-}Ó!od!!d#󢗢 BP:jd6ԙ"abN+=`^+~t($s`4c 6s +EGtqzZKLt#"㤐MiIe_G{'xAmu+&rVYrk/ecW+(}gTPkR]9r5Q(<0m̱6(jv n0ܑ=ܨ+C ?r^QqbQqcT]kW {x%w \| c8rwAYaijT.6Msd yba0dPn̤m[1u#.q:@p֮oǶ ,%dfpqʩ/pQp`!n>|㠋[|ogzTBw\" P?`. HȕE^ҭ}%mSen9 a+魑hY;R=6tN~{㮜TDi1#(9Glφ'o\mp3:4^#2x[LpPݾ8Ț%mÝp6:&aBh1dQ?֒:ob"Og>@T2}sgp~hFGXe &)~tKeioB{uH̤("5.&:,Qf]HmytL,z:"Mir]!ȟ\藸jc<㊧*FDlrzlMǷʯIgH,e$nLNX$.T > !gd$ɃlaNQ5'u>n]KA4eLvlS $xHu!&ZtǑ2w62 ZL@I lfb\ҥ=iMDmuNU[WU12b~v=;*DUdч#wG^'Ea7(REGc"sObG/m{ /.@R'Rǻ~'>%rU\9aySTÌ/? Dq4kgO-T?p.a`9!i,6`J.ZPf`u'OEB!C((]}CaP~(j8%.DӦRr!ǎ/|7 peH3ⶖGdjǙRbћʖR+53l*/;k̳0k374d f$jNl6FxYbAp_);z#$Wĕ?"wwUJFhx89ٕFZxIL`}RJqrV{.س|C]MYd,3ɓyeq-U8[Ձ 8 1{%C%omKm̭$St;t /)S$ex!7a':[iOvZLgfbF0}rbT v hq>f\CG8 kv(f 5ux|Sd/֠ Ֆ{o|l]1Umo6WρIA }xzI/cI%ֲ̉S-[aiO]ŕh>{zcn6]^:>Jeq*d7ޢo8ZQ]۠1^S eqJ+qFS{EU/,;q:.&sY" o`FPaDJ"{v}( r׿ .R%R/+wn͘*,MPB[dE|k\G&61>qZ:޹A;芎sMmQtkU$.zv\~J0knw78 LE;GRX GtqSu :)A=Ck} )kw4Km\ Q t"f[fgP6}.,Ҁ ߆]ɦZHhic<諦~ NA;!dL&<.`׏PVBr5< ,>3MkMjBޢ yvy^0S^C8`{=w) 9|N/z8e|~ز8_PPLMIh[GQ j@/q4a$ RJ+1_i@x68BN~͐IreSn{5r.Xl^q|R]r( MzL;LxK)*b۴(.o m /I$׊E2%Wo=xh{Pd4#9fҴb6B!8btV+9}beP@,};68OFeXgTO$C(=NMkŭV#ȍ= "SqH$#U*6dZ>-B(29jʦgJAH9@TLJTieIb[1͸%L90ڋ2#p~.5~.!l㿍\fг 0jn BiudXHuY7h&N6>婥 ݥq~fHBdt@'%obTn/:Pk)/-ݷcϕ˟x"3?.h 5rGKHw ;cK,w›ɾpXǷin67 L]Ha&9_#utsy@r`;Hw@s>IԸrfޜ~*fr78wڌK@e =sS׍KHoVs:K@lj:d'ǖV9,; Tƀ&kgVc`) ckdld ۻa'(q{7S ٝE0:9\%Eq'ѕ3NyG~>lTUU7u>bH5O=piArrw$+H87uۃ|'vWxg//'İL$x\:&,g y³WiA`xW|0GzKaz4ŽL%k{LOp-; 9 O1E_jtUZ{Mq8Tɦil)+InF}=Ս6]j%H@;Ja{}.EG5nR!8=cAUonYa~'߾&Ja^ݩ9: %|?qg7O}^TALSicph%8Nf|[lp!=dn5zQ&[['?{x *S|T/h9MZ0|Y;˽Sl~m%PjͣHn, Vm)밄2 #='QFaǍmΛxK قZOj{:V`wM?Z9D.k' x{'ȯPjOwMA%o{B'v7!c}?b 0][ :Eg pHrv/?K-x2@P͈Ӷ~ TTE Dy3Ȁ3$~_U9Ӷ(rmܘV)qժ11oj9!9Q] Ω0nu niBU054A9$; }nkz$*M؛YI; l…Q5uc AmҎ`u#iI^Gk}NvV14!=`qMLhϣˋW'8Fv3ID<1g%k` 'x1@$= 2z?ۿ}m[@<`^ʭKr)ᗇ4wògHJ9׮7/sP6 JDf%]酜3Y%ک )D}[ŮN8S0oʨj~bE+W9Lm~\S^S~G:>qqAKp`tz]t2wcFmM`K5a!{[ES?>.mN^I/Q)lfWV|" H8A:i:_]FGo$x$&v/o4F!P`Q'q>@fGO9FgMwfhoxnM!SN.\WeKݙFp"ARno).pUM9I)|UgƎ=Y$ "]uo{͛4ӝO': Y+6Oԩ$W*a o{.s?A _UշEOd[>04 X]__ UmQ~g; p?w?<JMnYE63 yϯr)cw} 6f^"o4%RY(>߻o:>3a|| }mٿ;{*Ǟߎ5{E)78󡻁)EmK*q߬??9h,櫠[\kf}WB4:8MF"@ZD0C ay0nPBT _ @O쒡?jTd+`B~/-0 qڏҶ|#OgD΍"(~'|}OlKi˴sXu:j}/IGrɳ,O4?T2񾜰dt \@ {Vs8)5^)CY~+o q)*y#~`{IφYmkO.) pĒdU4AΤhv!.z>F7>Ws ?OؠfltV:Sm:ۆhM^FSgOKuVu4u }lO 8uF7V ܡqmN[xl+m':(Rv#H 獫*A}~qiUƾW.$^V{(ଵ\kO£lOZwh1&Է h03hXS3cN6θp_w uNVe`=c ~Ѿr jӵIf'kNAbSc~27@&Ax ̀41O bR 3c/<*r<=-bktK s(W&*5^ r;;e%Vup/W)oĪ{\ZiKP~}纺/yTx@3KX%ycهp4[ct!0sj)~zLj(w҇Aq/8-uMZ߆svj [Dh{jcQk1pVzɎn~3nN`9;[~K|)$AW"L#}ϫy,T֥O>iLzDOӮtL 4 ](g,Y(]92ӢSkzC6.SmT(ypD%#¯].VL(ڈJBR7qNˢȡD qouet.7qXy|GB:A]=M#E nzsO=MLj U;ٷrۂA)f>O~xOZ'F~yx?[V`]'08~LWqˏBY)p\r>٨ v A&b"nI^-8sX+ǯ ;V m˒ ,MSbAtIAf߾α))Grcǧh)Wb+4.oZdtq; PIƧ*I=ܗ89Q"0=VE!hB_(&SV\ZMv IQh0| Ԕf\ QDt<2KU!xfGj&?6=@yzR-hg7'h[^0URԴMNϺ;ކCb?9WbeCC0'kwse@= J}#o~}䋟z\}Ņ"N$oh/l .P'Ԝ'ҟ11ӡD^-jy\;ڗZW5+MlwyQY[~qǾ>&)ߑ|uPӪsy[Q7 aEs J ߴ#^jC"X0Lj;YQ(4$fLYt,dÁg m:Fs\1[졺*#F~lF{:6`u t9O%+/`r0gLC9UZc ־SG} Aj?Wa tZtp"(/'OXOh|7˗R/.okoz+y|s ś3.URj.#)>%>R4͜{)efxNVm(:x߿+I@*v6<': 穀xDU6^L"3E.XGU5]xEPM~(L"]]Pzj삯| '.mh7gfWɒ,'؟dɻ70-EZ0<|,DH̱T@i35xK8W}㿭az=W|*--(&IH{xY~,oڈn@#f4C {q,PU.c$2)HNןAne">PB7v/Za=\4'qv6,0>.| Io}'HF2GX4Kqnu^Lx8äWQn %jCm҈T ʲ`Tmr/{\:)'bh5cQ& dǣK}e'jp]5={RƒǢnA <0i{ce,۴2?Y H!aƽrҰ\/loTGћ6_.~XRd_ A D7iٔyHb}e ?_bt%l>1ax"y|BmDS2_miJ`D+]<\3Hī/Úpdcu ,h=i2}y8t/rCd~2[+g 9(ZC,: TFqZx|Sx7M Lg3b86>,֤e9I$좱 0|'Q\eq$NBl\+˸9DA%JNkK?Q(IoVNj9p{ M 8*v"r[wgP0m4t͞'h-ŐU@n ָ3Ho L 9"L߲T*iqvW&| ^-W/$nTulWD]Bˊ=x9GEXfSuշo sE%pŠ: ՌLFSRvN' ޙfk"_U62e|<*{|F~vܿ\h8Ǡ46H݃pC:3GgG|i P]Gbbg]'еXr^-M= _㙤o_1 eOl?]ky/:ɽy^ĵ;QEB֯j+nBd`ACEТufJcx|+KlBϭm$y>TMoxǡ1dfT y'4}*oOURw(ҨvSX7 ,[CܿNO lzy` Qȁ)[0WeA`&pށ.E#㗽] Ysy ~1^iϕdsFQLDLq.ՇPs‚-3˻__:O1eEQSM~=>O=h9!V 'g{'2wk,| :ʣH:7MҤ޼i,C^tt1eCc̤/*/ǸrlUẼq{+ &0]rMժ?+)waLwlja)GK1wE`8gC|bml>~_K󚉰 MV#% P#"S'Au՘噵˼- TIjCȰ`DDw,T>Gn{֒?OCؤN$Y1`-- Tʋl /u [T2UC_*ޑY,geMȖ;Ԑ.ʚP|/;Ci:r9*' M39se/'% F.޻MU;2F+[dY~b ^PF:#q6π#ncDlnYjqIUͮ1& *)xc䴼9^_{%j2 o[.FͳzOýRW+嘍g mhW[Q6&Z)"J8ȯn@MQ6F"8% p{rKya|eܕ6 m\iLAuRE#lPZ_>FVRM4nZA9=tEfz'iv>KEoۭq:|#;Fفk4pvu#(7102glY=|"y0{H}skE]&<>/L}<ʫXr/*(Ǐyط1o 2ujۨs0.ˡ;]Y?|@AUy1;*UXsqY [JPcxV3%4/+ӹ9xw){-$ r!:%j O(Ԍ[(x$DEv_57Тun\YB=ZiZFyɰ}D! ]A-dZnMenwU:f-uzhh^|Ddm U+|C#wn# "Ψ_3,̇zAINolae䉌bt[ih{k]D=箷QḰjkn 3R|ⓙ6u|~p.ZA~NUЯj ů̡VnMG+Uj^g+NdqcH_5]ˍF?Y=z7F+ P37~h~{)Z:I/TQf@5d&#E LU|Uq:,!cfhJ^9UoulGBTHL%GT3E].c((bMrR$~jGJ!v,qM^?@Fe?ÂWsϐ).L\ɕGX>1V(}}Sc=z`"U2<{цoW?9j֬ǞTgfCKd@1tF!k2݄Cmٟ/I#)@5Y| 󽜁oaTRm4UwgeaX9^2+֪@/|I{D zepoS0nԤ`L@7+w8t{ E(̴ֿ.y=2a)y(]WgO\$ -?rAؕf2VO ʑxQ˴xl} mw^ᦈ.k5Tg?IowX0#0'ߊ-6Ct!NJ1f۸;{2-F^? 6ninw.NSjO[o˥ Q.#kPeCy Zݜ2j?:> DKDHwb0{_l{B)u)>7*HDpDwݡa)$B/lX 19竩p$;,Y,^SR.,C6Мq( I>yQy ^CDY@®vXXrUWWTN" U֖JG0|=^Vb N\0^c/(ެ~)ǎ6QYl6dnJuo0- U&Xw5| 㵴_֏'C 4%%i)mSbrZ"Le\R~?NxƱ 4 30lS*eWv&w--=3 DzI-=~9Yދ~rO!RRhޙj3J5y{ҟ*2a7,eon4=-:5iY]Qe:'Q.G~1@~&NZ>/_\.^7h 'ytt>]=dT+Ksa^lY (3rILN~R32sXt^=5XIʊ郥Ӕ`(޸}N!gwпͺٕ'Ŏ *Nzw7O\A|[nUu؂M}VMj} p9l|>sιZcZja.yFkl mU3ccëmVW@s$tw`0NMzhd>Ɗ8j~QtUjC5`}LŭE ܲV1!t`P?y*j%jsX3Q{i/ͺ{xI=74˂~w`XjDv +XȇED ,WLEErv @бحVL@&`)A@640<+D/p#Y<#l U+D#g>j5?r>?FL?.tPQx IyMR87ݞȮZbL}ゲSBh[zn}w]$R<9g;Es X?Qe`5|wNU71F[Y( |aS^2Rr+~G$ڮ~j+,fer4Յzب_(sobJ`)h݀WAKzBؖGzyc,'X鴠ts8@??i 7:['~diQ9F^T vSa-vV}e x7n <Ҥ˵\XصtfXf b7}Oߑ,{ Ѥ5VѴ#g9OQ@^GÓ)ql~L-b׵oiUSQ'K5?JM] 8]T:obJuOӴXEDPOCi!sLx )wenw] +D[?拲_ O4__QZvV7:Ht;3;M?DwOٚz)3ثW>&Ugs,0ζRoh/ 4:EYJOG]NKUzXF>BPI*FG} $h?^8H rIP)RW5b}o|hOZʮa;oX֟2VΗJCt Nz[5_?jdɟir/:Enr<'dZ\rMeRկhI2Ǧ~f4o8u]g>/7ו{bXN0|\w/ۿcfIy# ~VW8?q;T=:pƟKHV,E>4bK'6p1 &B~.?Gέbʼny<4RcEs!By1OX!Nۯe vT`(h-Ajg!)G^fV/Wt~Iq~Un ҒtgّMqr"1,_)LXo [c']ݸTQboe)kVk>"5%->|@ t^IĨj!|tDLgU6,mxX[VՓ@rtoW6^J2#KE@Z>e._Rb3d,TOC~tyVin7c IF=ҽ ``|qSKkx#}Ni 8Q1X߇o,\@ U0GO8E{9Zix9l/8M͗zrqI3U J"ǯY_fDHc$DR%*}4|lݕ2eo,W¬f;җSΧ3k tX3u)E0E$θ4dY/;G=Dȱx4R0Rտh$WiݥY`䢉(4\Xr[}Bm1i'yMR[YSX qv[{@l Hf&,ŊN-?7ޛzMЍ7Fmm4aXOR}kǰqt^K 19sǨ>%lhi~d)q@M͉>>SBDzX-A.hiizv.O=+SW`0[\r]۟mD{$d (7IAoz6"Ǿ@mR}ѺF9={1}[4?:8~)޾R{8g<'#Ƀ{-M%{yVxjrP)ߜ!̢w/ʌ*F*rd| 璿+"!I5""JHܳ -o !7v}dW"+z8I̫]*CwV𢧯 }q>DdQ㣻-++-9Ոn=mk2u.z΃~O!)GqJ];7֋|_aIf!.XkR.;jZ"]9YCL* lU,U\ aH,lJռ*K%l$96_ ժOr[>&ǣ06ٛ;亳_* dM6ʒ&5=0Ad~"~#*ney':({rWzCJhIl7f]OHނVDq(ȿ^&nmV-պc=3Rφ)8Zsaо_UE$}ђxX=\Ci-L)-=(E:tyM:/# + Un4N4GRvt͞HΓw9r(gw5DZ&Z4&y8r~jZysa4oOʧL 7l2(#Kf0Aȍѯ*' G`9[[.PCK;)>Oni-p,;PMUݒH<##6+V&+Zy;' YÙ#<}Ϻ.j8 Ë3r@uvΩ ìK]N~EUDNe0W{||@{~"3BZqԹF-ԃ|JEbG@ 36Av&?/Hw*m9Bg2}mdpa~jvBL42z@-u ҲAui0š0-\Oe+m oi7 У2̃2b1Eg}æ/ӏP}Q(&ܸ.-`>Ud'$l՟|S=3ױh_snMssܒӭa7+>hpܟ0Os}&_K뿰Py?o>7FtA@VGzΗE@̝ɹF'1^fn{Ô#{i۞iχۉ㐃ʕ}]&euVes<۴@5p.<.s맳\p($+]ߢᤇuPTKOxUZ`_gO&%֙@}ȘB*(y)x.ʜT8GXu $3pxq%E5z&3IdtHe-4cv?->5ioV( ?B1v"$<1|MsڃTUg>;K Oq d@(B̅-(`?)S0%#]4[LE^3< s|0Hq̣ ): X7Vи/z@5{g/t %]= >8B"7]2Js}5x٫H0 CY 0teLmޅK._I V`O*N37fD7e.N~;^w~>8,kb;K,ɘuqa{.7rhƾ>oοe6Os𯒧LT ?N l'{A[{f|_?G/CuBxM?HV=~<`!!eN4bRzG E+|{!n9VO`GA6Aˍ];?孏ۖ&/Ӥ2! Ȃ87Jȝ 'M41К0}&B md6ŀ~3qt<\qܕr7 u\z{JU46Ӯ2i<^wn$wAuNXeS?)dtqYsPc()[yx< u!$x'Q&)/$zt kxaC^TD3o!S֣&(%X&v|YC )RqWI\P(S j4$TK[Ydqbaܡ*\L8fd[pVe9Ar V.w?d,po(_Lly뒇A^ev祐?,uu?ܛZ7p~O틉t &p1 5ROh0X%ܺ=ˊoh!+=5颜8]Z9d~&jƿлrq"h҂RQTLo`=޴1ƻʋT=J!)}t" /tj\r-QS]"N r*(n讛N,dJn{H 7n[㶔Z(~κ3ٵ㽲rU?[%xkᬏ7:){7nY*w\\h!]І  vAr'=o2JDmWV}]dWx8}w+/{xJ`x8"NȻazF:MOC(,gAiy 7/]`kZu^W)0#Bi.s6ﴭfؖvIoj>r1䅚o7-|sm/i5 G* (Z T/^fUA/o7/܀K_ A?Jɂ'!Sq̣Qt(8oL$oRR s'6ksEIݳ8oHuTj\9?DhnO\H :3^3RoBHVaYeiR 'cH EInY l[ADapmZP P xe_gJ\OH wT}yO5T"B렾seӦ?}%d"B:E_i|o"nvG27}L ôO7&eL1 ߑ~t"#@%Wlc\5VC>h :K#AAl~;pr0_qb+#]`"cv`YqV=`zKc5tNjF/aM$+ch d>xb.q߲Pϣ@='Í=&NEϔ,)dS@D} 9o$9$X;q^SIȒ硾?òGzYciknk4u+eY2U",y| x{G6}2oLG+M[o.\~.QƤٖh=ǓHLQ:z{̘綠Z hNk&Ih 8jTՎ#O~B6PA,1e`>9MX_l;cyA6@ދ˾I¡vZb>7Tr'ӊE/&#weZ[k pZ K)țM8mC @-$3YZQF8 +.ਣg)tFV>ATbOC$ʚ[ )5p2G͔*aݜXq=}w0(Ye#UM%`o Ys'xRɜ7~iW^n= >F>>h+z:(D?.,4]VT! IYrZajIn~y&LP7GOU0E?:vZg>oߟ/i¸!\^ ߹$=772X(< ,*6S3fӠq,J#a{i9Y E4(bC<"+2: Is>bpBVksԂkGpyA`ei O6'c+Ť9 \[ˈ6R6rAEey663mf8=[Nb8ziYbU^ըTYW l'Aݠxf$OzCւt?{γM!?W]_ANQhݺhgNR{.r6Q<]"%KɱNպ\~ ŗ4HOtNPl ;uXU-?QšWJt{Q_|5Q4?@\e: Кi{f3?҃sNY7(~ώ+e[l&bpdMDS%(`x3&@e"xKL C *N 5(||tA,hE3e$XNn +;o>ltFSk"<+Xc:˶r*]VV)_KNb#H~qdV,u fAMB@"ń909<ջg,,dF =뾹?Gɖqqqjf0 үdE{4}5t'\MBlˁ!k?.:`qHv/[?Xz/Hࠄ /jR}UGn{Qgi{^+Xb*CCEc,oUxM]sK?2N`7&m8 ؝ LA=C߽H#!䫣[Rw^+EOsr/wI~}kP ~ObxZhm9w6{m[ZePXj KϏXuݶ.n>sm4A EӓV!W=>!~ P76"OXrcϓ屹L@b8]mWVA@g|锤*hW#2"߾b#R~H6GA[{׺QmA=-fk EcqC~P/r<7"c"g4bQs:'o-Y~sq{>o{riK2Lݛ˵F޶邮c5}M+6Y!=yEᅓaG?s|,(P4$Gow9ẚꎊg;qnu𣼰l? o|&yݎ>Dˤ;'ArҬ_3N':4oI7Bo f$UuDzW"QkL6oP|M$(#w@WTb;D%+|{e2.EdPc JtCS>=hs'Z\lX2 #xYO_ w'Y>}MXE<g,ʫg@o&>Q(77O7e)7W7gǺUdrdXH4=U/io &8~V)iH c 9/\j a-tƶx]!;w8AΏ/m>m\?j~NI[=. '\n^ã^QB[.>~'+`0T9pRe,vyz2IݛΧ?f;b?qg3յ5<1Cie'Kf+aH+Eb{£40ߺ!!܃]򖎈-LY?OMfؼ(+҆$PҬ)OjPKry?hxPKDA=;{VTx=d"'cm=_cU@釔4Jgs(qB͞bi6I6W48UF>7?1gq@}t@[DzhS'>:d?%=DUh+ZbY+nO>u9n<E_קѓp' !,Mn!~-훁JK%A@0(:Z+?[0L;SܗI3x3Ȃr&}/~kq̶v܊\-jsXxY9CK#r.ǟB^sopKX@zc̺2ODk,w(=i ٘l1U\WSL r$D;et*)j `-RÜnÎ]5N4Q@|8BI#e+P%/zT~PCA8G $:ei \Y}'hX\SD#rp~ mCeOcۃ}Sʟǣlq$]b !܍r}.6þtUB' O=0 hWb ݤAK=[~j=rv Nq?-Yv̍v JGx;ɥ̶zyxǏ[-3xȯ_VTM|n[\Q+QE-7Uc5$h:R̕@p{H~IP- $LQ1pcYӛ&4JЃ@xs̓pvgڤP,l',[xF*Ї`D%q֕0Mn5qoH"{a3L7x_[(/Id5_ 9ir`7s W>nFl#Lyth2an˯ F1a_8y /T8a&Cx%U6[qܩl;U7_: 1ެF4_՛n#C1HOwHB nI_^2` ]D+LA1 zR~q+8zey Q !?qQ$|\&Ζ$WImQͰt!Xqzm%&_guWa`Ũ&ڤK!tZ1 郹&KWZܳ!5>ug7?\{r'\qe5q7S.۸M?'t<3?F7HC$7/ 9S1 k3V 9Pkj bşO%ZqY\ب(@jff,ɌHZq@F?&OgǍI*?ۦW{`°%_B^!m/X_"㘔,ji~N|;G$>MςI`x֯ :ƿRdg${{!K& e#U/L0QT0M /oW7?yx[0ٔ ۆlWKE3fϫq~d[uus>>S)K4uz߀ʃؼ! =G;"?Dy\G%D~;㺈wh9!{RW2h./7]Qb CsRNJDc^6MYIͨOJR 5z}_ʻ_~˅|u@;|5P^ >)3? "Wnm},^j:R>[i?{G0p5~ A;0Xɻ>}I0>I5 V%~/Sv;8D*.A&ngҏ7x ."7bӚ*oVs4ouUU_rkMyzIޏ~>Ãj($SHkaG@Bآn^4I?e%ntz #*GtؓᝲS{*a{Q x<)[X{㌼ PO]xa\2{7pW1H/]y 6> Q)bI[50ȮIFUÊ9˪>Y4j)$Te$''0MsRj@>t qR9!hew.` %\-+1yهE+BDI:N@<) aո< `r''K8nGk`ݕ|Չ42i-!kc1vƫQ7qaǔfs2~OpFxaLxW@'?M1_Sた+E-(i e߅1uEEXӨ=/Iy,th&d(f2rO /'TT"ݳn'Mx;Bqi#1k|^J+w _O,ΎJAz rKbi߆<m}Vƫ|;"0E4֪mO(f4!1y-_ [kMK寣<"c}$ZWmts߄Rj^:O'G;O7' E9 l[a lS(gGrACU5 g9 ,rVN6|+w\TnZ+8~qk$ bx;!1ḬF K~Wm%iR__>^q,T0> Y4 Nxx.ݓa0?#ha[w,\[[&~zFRid c~buo\D-#0<_M 0߳N]!Cx l]$Q9@oG)Eza\n=H  ^x}s*J;_{;=j2z>kح\յ\^ž*cnzǯ _4)۹ut>;QO`t6rRmۂZaVg8^х S4FW,&湒64L zN?n?G|W:`IE.w[zSK"{GH*DQkvӫS Ý2Dd w e$H 5H**C蛳ndX2QPdb 2DCTUO~iw:<}gn|qY]g㵱a#G̔(ٹϤk ~OH_Gʀ~_ʟ˂=)Y}۟Rӏ"~}d-,L~8rLv.Ţ $:a~m?_pkKcטhxhgu9St6⑯'\|n A'3ɗha$]um" B7ەʫe:9w ̑ij9g72&ۣ*H^-*nIJ "DQ-VIORb@Ȗ/IL>"#NTfnz_`,jz ipHX}%u~[;6 BU@`yE˷Rc)3г\?H$WŸ#ySŒOʰs!+ɻ\~XKe^F_"QU{t/Ry/!MIY:qi$ 6 圻w8fQ2e q<(G<ķôu'IA&z)Wad8'PD6|V}Ѓ|:tUytmY:D>C.=w^=Ybx44AL=&p|kj};l^[:ZmP?uл7z;x [ ªLSb=a)]w()\pB@W}{(#h)ϑl):υS&q/ 9IŦ-Rɘ]ЮM,Մ9 P+NpK{ydby FO<ZW V )Nԟ(CLJNV848ujYMռu"꣘Qb ApDCv IjzOXl&}F ,!k]oNm[&fZjZBwtU˽yCe%tZeӄZJ<:E"#|d~i?[ЀkpǏzH8$q[ i>)ݳ`&}j 1_XCaƓ>2 A$`ي<é $'TwP@L5;MۄXP򤎋r&OɰJ-;'uK+!=al-pc0nsE, S_qfNX Λz8d/+cd)QddjOwCk f,cK"cZ:pԲ=%-M~zZ.Hu]EEC7o}g-/ĴovvOsӌgk@yn^pև**uXcrٶ-F=z#uJdp-l@!XE);Ix8>Xŗ|b5ee2|w?.2t< .'.plN!{nJU PV$Z4+=Pӏٙ[oĈk蕖HP])m*h$f8u0TPKVEFIĚor=g.<)^)zGnXux~1z{jj/7jlnYYP/s~Pڹԑ+4tu`=G_*g y M; ~j7?N}> X)C?:  !G+{drifᴃI>@_i/ I&mW-'!> -ɱxd`ξVG,a ?, 6撳Rq`qrt"EE6F4ix;8(y2REXo@PAxa[0ae +/.9٥)v'<=MU~phxײ_&̇y2QVuGP͊ 5`hp\iPM໎f "juzs*{OAvipD HC,G9ݨo*1ͦ2eFl<΢ C{=adC Z>P* Q@Y0TLm\oh$K88mȲU> C%<HuMX`Eh )/Lg‹7 o WO7.9WM&h:_cy]{OZ=] sЯ'|6WΠk,wz tmhސ{ lRg}ω2K7 NMQ>ӌçWoe=/Rۗ?$թy4",fRjATT;-g3W+  }f!!`%P/3ǭ֪R^h;wiꍻT&kNՃ(A KoL |\rx3\qgE}oEdzO VvD<}|j4&ZltFݹ(vҐ;|M_vO em%8G\|9&tH@hA<+ V{I& N )GuQԌ-!! vO)svc̦ 3 dF(PjmE9?#B6Oꌔx(xZ-IGPk BYkoҿ_Ho&[՚{&6P*P򿒞8q&FާKLO곆~ ?T)W %NY GK1e9b9@zXE AfSA1t֎HerWXڇѠ\K{F4ҟ91 U:Ih3 A<3Xkuhxb"Pk :@ܵvX7fC1Ί1YtEs[Ab;mcs\ijBtMK :V suLIPe-)G{v4y6<8`cQ+8F$Yq.˗~1k,c*dZT>l!Nxk ~_cx\ueK?7PWLjlW\рQ{ƹ}wO޼!eGPSc!LqVDGrIpCקMR1}2k"^0tTdL tړkBD*1A2l(/KתVyZrC!b&KD2"*L1'y*l]k콋Ϧ7-pv>]O FD-۾T1H3)aC#(M=u';Kr8&;bu{\xk4ֵsG;-x۱Pɇ-i[ :2.[bƒg,Y,'2 W#B<31=myiS׍_fªHW/yGmJ&!]RQߊ讉 ȃ,Q B V 6Wݻ,L0qӗ>9P:¢tI5ڵiLsq4FLR9exPmYtqI#c$zj`ݥmD,AwGjgǥO0]|bBvB)^­lTO@ka2ċ%dz(mO ̶,' 'h -"hlQ! 9Za\N iޙ[%kV|0Atx:q. Br栓 H(C>ҩzV?*P=8Gj?OQeCɮg{Y-]c1ntcޥԇVO .~y&/!$w~U~\pT",(`5+ p! z;ʛ҄km8 /7?zvA_&{74PE`#YY3X"ی ^"Eȵ2T#UH܉ڿt܋eʽa-Q[9 ɧIc(H`¸rZɓ f)F;*&>3$4i\ HϞn[,to87>YcىICA1[%BMe%-*cg-d1Tܑ `vѼ+`VjPAφzf2k.|zQM5Q(6Sg4ot6djN n`%T92WAOpSg+V NA"6[{٧W.=/Jd}nps^{Dg#)ZоP㪐<.6BzSchSMrFuBf:u/^) W\u-^8l֣*0G|+AKXW;8\uӖJQfs@!WB 1|x.70 ^*Îⰵ9!4q8QkYy}58cyS=9%9ú?TsI|H$et.p|TN2Qp*4!=zi cxiF^Lh?fU âvIO6;2K0Ae.Y:i"qվT(eSN2"3)|;6YM+v]=7QZF?VEA3MJEZ6j|*h֘7qdb2Pt/M+[ֶ9oZT{1%g ?"Ppa7 Mt_|?*@}g ^^j,T\ʅG5Moi^[zrt>6,VpѝG.?Eʯ%&h]lssmӜVn{\#6\'D‚( ,TYST '-4Ӗ [^RP x$ Ya$*h<W9$O1K‚~܉.M/moZSI47+7+Nq+ [Zv,۳{p:u :Rl&|aom᪁)~N=7]状 6Q&lٺUP[]g C kCZ>aoBB,JVS& BXuTzIYI[0,,"C3.sv%| Sh) *` o_RjʋrSQDŽ As2jէ9Ԥl臹d52ru@gP*f)M4)`Ft K$!\='hʍ]=r0ď%QVSپQ-2+u|3&{:=ukÞMmzbeþG\@QfGFȔ Q'([ GuG;-;ԁ"zKr Tڰ`n he_PuJdP!SYR_3zeےY(uՎXl6142aK8xGH{~xT?wξ/z.D@plCyI A+p3%kgnE9n06:`0efEڰuEwig$tbd*fT.SPٵاt֣ԈqQl62Sclliɱy2o/C$LфRp=ƶN>=bR*(b I96ZTYYEEfUsŭ=&@*VX4 ^q"a=9@N*M&K}0ݬ`|*KiE=x R< ΁5$DHo(ĝt9֠5S9fҊ{>5]˩{^Q;B3c!h[%CɩVHś*(ѣ*AOktF 1!,M!hh;tcFzsA,4_C"haLe$wFI|X@R$0_'klT&i1N΋#H ڳ̍dav:dxt@+ "Sus`+/}B:g>6 H{,R$}u-!T]jLL%C0=t⪍db.U>kt\VߏK~29wS7Ӛә]j8G4'JfPp) "\CjZ&~ qrK"Jq0i ʷF)'4/,'qNjr̒W*i t )HXgdUh^fR-!+2Ў6QYdR+QVjg\6qj\Rj)8AЈ}YJB-WQJPBh[v_D}'څ<&nZdq0--e*il]%b @o s:rbWQC hG=ma +2j9䴕.˩,{.XVДMy%ep!Ȗ*TբNݽ?4ے7#&Aս(dג(881-<%:hrEdD"=oJaB: ߯. BƲ#Uq6;pYICIeb {ÝhZmFkȅYvt0F1e ۄ#,m1!+"0X:<\jE;E<#1_h؍g(kYd"TulE*<%EX2Ød*iiU0**`{NIVҳ5dYϪCFTBCTqMDjU!9`(jު76?ӟȀ"v?s05_3p|[_cql}v9߄T)s+_/`77^.5ipo_Zj uex sXnjܘN<%(li`JQs)8i(V4\͍7ΓC `4}N/nHE-,ߪ}m\E隉9)bRI/ k{ zˉL1X!E&F Fs<& uU=" ʚܧ纺äXCJ.dSC!Tؚ0%"86!% >m2>w&tىd 7Pf}vv>Z,ݑn-XGt =TSÜԗ#<--]Pe*\ N r Fl V4C.|[LqmSB&`'Anח֞ɑ>"0;LlogC$ti o #lkXdb!b 5 5g\ .mV^3pX^srVJ6hdRN-Bh-%zY7Pы>4Q,i%&+0ea1xBeę%zVD n9'ddƌ3'u6pn6t="խ/e7.V*m3p7*>$sC D+G𥼯iيj|U\YdzsrbҲ3.oUSȗLb0OHcbSv|'KD>kCtqCV+9r⦽cKT 6 {Z5JV1&u!4Y. "L"`V|3n PmA(afK~h8@(kd]P !hTaԮ=Ԫhi-?*i:rȹEl ssў]%5`4fdN`}yVͤ>e~@n/`՞`ֈZG D vxW_xWgtJmi]Mc*͒zLAi\K1%1}vԪVJ.b J/B[Q¡,Fg5S0^  祽fT]`1I٦0n83`>^qDܶ,{odwa0lc. Q"/#XQ]F>9k_1 >Iq ܸTxB!}JWXԧ~DH5bP3pIW V`:_RJ "J[ U;+;n Зl 3Ʌ.p/qVG Y٘e#ՕN>$VdZg='#Pjw 遭bT9bQN)J%{6G|tߢK0$mn"%H0oV[`0F%0bBpŽْPfDoOM);lk2m[b%!4\z+%zr1.渲lvUfHMH`\EV.)" aQmwe,+@)1h1ڢ $.)ۏkB|?5]\|~.-^n2'Y\oY@7i`h1p"sloaJ$UZlR&F,- Z5 NS^'rgs4<U4M-6m 5rb7V6`2"tW_Kyfm^P M)*L12H~ISTi+p֒N0idRfqФΥ&>̦n+@3`p?k%4e.fkw0&M#HlBQZgXcܻ"n9sk]t5b̐NhIEW o%A(%r[}#+@Hb*XpfKz aMwQhUe| ,Ft]TI"DitDU$G7㎩^%=VNi'^Y"bPBB<[ ҌǑ T%UrY$0H\ q+ (* yhڑE̙h(X˰4ӘIpZ3Z9ٽ9( ɢy5jȰ69xMKH]w*c0Vkde<%ք[Dμ>dخļ;]@aFYi=ovThG)8Y+ 򥳟^5T#AU}8jp󭹿Uz]Z &\)lj O(F7q!c[XhuGBw4 ggW*1_MkCx=*.,)Z-*++#2vLe"Ϣj»j!Ò,U 2jf6И箱D+`xn[6*_sP$ iF3tG\ks( hȍ FHSJq6?& aoZ7kL41g">tZH0D[L.ZQqEFC˱-iep1Îʍ+p+r· ٰOY-QZѴ P$IKӵf|P5Pչ"#I) 7ٴd䖘Ic\͋,eh$4N±Mid'iS, .~yDjpX|GaWÙE)\VT(f bR0CJ{" $T$ F `!cXpS*Q-6hśAI )tʼrU'423鐪b\#{`5.sOA{hHz;2 p.q0qE%!+-xxHa\cH&Fd$F3*q!HNPB#~( RIj*+)Q2"4bBȃ~61޼9H1LҝLYFC;hԵHcU=dhzܱ+ZmR1^C3bs!\.6#ý_=ӌJI*3iʏXKF$#hǃN=F5]vŮvm6Z1[᾽㸑LŘ[RHRMN>*M O.Kcm] i-4 c\ F̴ *ry/vk&Lvqv,iLMPA!þk  a%ˡEř#gۿ! MKvb)Ls_ҫ^ Ŭ]7o.nīԮaNEMgm-U. 1|0^H+wjRkuKX-C{R-BdIAC^CgUVJþ1jsj7@ou _5cŷ]bd\SpRpZ+q6m S͕WVFC()kE-˦/iHNt֫w\UQJ|=@0G%wfU+~< I2W-0~ ݉xej$9kdBfHN^B(~9 spѿ bgcjH4Sϩ^kYj.a]Pb֖Nq2ح쬔HXv*ѓBSJ!#FF!wwF(*$g Eн.󐸘h&!&eS +!]ag*2Y B*Zo= PRרT,4L Œ-j.V'Ws #q.㖪V\yZ!s*(eDT,9W ZȂ.ff?*QYVm9zP/* yzb{Wּ:yqkſEz%cLAdGMXy 3aAJTz!c"]Nq9Ls.쵬 I5h=A(1 c4E;%DV6Ծ+`ºU]rEL14C VKK#ns:>qM Q]ՊrL1GmdXy;KOy2o}8\xJ&OCF܏ Ȍpi<3WZ]"levVBng1YUÝL +r`xqa#R-Hf#nFeJ!z!ל[ETnň\\!S9bT6H?ڻxu;1᛿rS%Y|},nc۴ρ}BwvjC50i"W*,˅тW6VEV >^ytxrk>2sw8=(Aa0L0_^%1}eEV\KqSڱ*i 5ttV'Y@{"IJU^>R*+{$F5_USU",,Wvߓ~&N={mlW.|VT:?Q3-A XtX\x/# 8 DvV_Bfs&φ9AuT1jgEaU7jaAǒ 0唥x4y]&~?\QXoΣ|ս-KY:4/vRRTo6燿y ͜hd~ `7uޥ#=[lc5S@a& B1W"i#nZZgVWq>$@noW/s yvZ0#Gg)d~g,^BWc]|\gجٛce1NƮt3y(^ $qlEܽbĸ@{K_%26HZ.ی&8r4W9м+eu\U}WLU%5^M"C$Mn̚jX˔PZ/vS_⫆7" Dp š;d]l;ɿ.4 HD"0k c֟\SBfň&.' 0Z >j&Nh{ }o^iuVăJT/ę⅃=g_i62]S_GA**b>/VAP=+X۬770wNLLVs!Ǖ߯W|v=4SFk)S;ULVVuJM>Jyj }[dlоf˔ ~,%Su|sOu0!{8 o&4@WKi9/tqs6g`xVqCYn\7}DsO W%lsгijd>N)$)b$@YiVx3L L%dؑmAH\D*I3uExsyg=e:dy tRnЖWK?fn =1Q37IUK-6CC$z";v!p.pyY/w;1u]1WGvztr(PbveWbbُv4؂{d;g{tu-kyACo.u9Pq6:ϦpqTcW nEWeɃ?&|46"dxNww]}OU:QN+ٳ:rorU>NJ[]xNKd+9>˒]S/3Y6=svjQ90Aa;?׼Y ɘEX7Vz κM-q3>9?VB3biKwpxfl!5.n7GBC-2+]ϨRtVź{ؓ g[鯅 \y=nyZa3 {lY[z/G=uf!^JfiOxU,p8Kzu/ ؋RR76 ^2M'6?%6ß`5UkZo /^ۥ<,dzi%g|p\;gʾ\N+SIV>1QyswRtc19[l_ g#z"ޜWwH vt$a_~\Ѳ3t /p2w;x_-d33\'9xzomd_%՛Ɂc2g*b, ujo#et]m )FLGI{:9s۲/UTy^_LeF;g/8,gs5UW7j}:r !ɿwןgS1E!3z Z_n#yT'qee6l?/dmϵR{W-8zx[#K?V<63 G5)r[).`+;&or8;Ĵ֋g+^J]e3MwF.q\{J:P"[r%);j-n9}>Lo\Ĥss}Quk[f!( mnj+l`:\-[1$דp9/H?q7MxVNjuϟ)CcuWkOڿ3Vkj?)~E::]tϧnč.LEalgHMJefzJn&o0](UesoUNl0u $^c??g :іO Cn}ww]v?&"`ryw=l:i^oBk|\7m-{;p8Q`3.Mh%C2?k=Z[1[-қfj{Jic\/y| ޗrk<-8\m[ޭ[Y 2M8*]pd#kf ?a:pp VXO9Hi3!I: 4v;r;x[~]8;c兦doW6B`C]mϢnuJ9O<oW}Vg%J]aobpzP; IⲚy@#%OW 5X%\wɯ{v ؇ӕ\ cFx3Fr:#I.yFň/:ZqM[]ٽ~׺!/ ͤg7.[ ~y~{hAyԼx0._ӽy'd_N?A#l}¦guzl^N[oO Ig6K@[X?|)ruS7F05f 'd.Z}5ѦAߩo6a 8?{w,+ohbdt*x qp0`[+}s}}22E:Vaa]L4Uy9banf;4<^t_w]Y2hC UKniOz\e'{=8R*kJ*lV}Yq1!< <-$2wxZ#1׶XXC떼/[h2XdD[ɕWħk;K 2szV({B<tbN߼=x9t ˒+D zZڄ"~I涂 d6pą9 ;|m>}1/_V1gZw}fsU>͊ q,HiS4a?['Q>5I>vIw$gZѱέy?=H~GP=/yC}GNnҖuڝp Cwpl\~Jԛ3$\HQG.ZM.=1t{ w9fM+Hvc3Oyu|8{.-nĴgV^É!gѬ/+l&,^Dg<~n[߫O5JWp= N'w$>w~Io߅h@[ܾx]_mo9dzE,Cyy/{_"ײWӫܺKsK[">PRkj z=Z}4A[}',q6hZg$ob_=Qt?̏|d4ZvVSsrp6g:k&3BNOw8΅"~>CVdzӯc#Mk>zTYUk?%ow=3#tKn+YVWZ?7COe?zCӗx<cH+`'BgKҩԉOP iS?bӷ}=}Sl#Qx=iw{k]c<ٿ`<{1*ImQJm?_g#w#pu8)b{Xv(y.k 霥3Q&2Ȅ8 3 kS2, L tz yW.gOJ Ιqp2VYΏv}kTq|N5̟b?%o[wc.Mp rvz0kKK [#cum*ym!+jΦr/_ Xz:i{jԡ(ptv?c/<~nC43bAzp /9;;&몁sWzρq379 RAYG=p'$W[ߡx?T[I>tw;{S_H}wVkW 6m,mb=ػWvZ=Z<}tiޓzs:t|]uu8hY|l? 6 5o3HomlY~W1uW|4;o_g/3ϥ!ngz|ngG?ŇFG3XOkEpı+Ա[<1JYdp#{xdU5Σ:~oֺgM\7t]x-eE"u? U(xZuA0}'?볻S|xΓzLcSэK} &LdR.l~Hk^ON|}Z/ o]ng؊'7Dń2}>Eʉp+V+LiZ/̔wc3;:7}$`{|;V1wx+wfKv<^PQSMv La {COWEg]#iķ8/~|E^3raX\ ~v ovx 5?\= LVK~VXnѻՓ̋OKWO_xfs6l^ -XCyqu"2Z2Ќ5co;c&C}L:֧wqщfx2kGr zyǟ_*{|~to'mn/nL{*J< }>LS9ʓ9}hHiڸ+vä7jʽv}w]]V^gxvnd\0'Z$⺁Mϻξ9:0frXyo;;z,wg53աڹ@?S ǫ=.6釰g/_x7bFG_}Q6pгz~7u}%aNs.OJ0r.o\u4^馜>Ln$/oiJ{c|Kom]l{3tl`.wgmޡn~>?7}-/*:oȿהG*q|+_ψ.a,)sNu.x >氋k=sP]}? ;=t5&ϲey}?F;fۨ2oʓ\wZp' |u];'gu{zpTs_K>ˈ?U{n>w Or|_^^ێ&7G{y_|{}o/sn;w;>OWJWG$}^SB˽ vKJ?/̟k׹g| u }܏_UO|^7݇.WC#I;oRs9x!`;qT%S:ܽ{Dz̞udO lrmrpl^_Cօ_wwR$}jku,?o/9ut9{p#;oaAlqo_ܿ|awiv-c *a|3?X`ywgk;3ؼ_"w_'íl/ywawߦ];/P| fϼ^f9!x^IYwbpt$p>~'.fݗο\;GVW4.Kr}K%QZ;5;yg_'e3Aַ>OMυ1m?c~Z)~o-R{;k}N8,َ37ѯh~ ap;x{~gak?jk,Q w&C3U3{;/:[v_ɕ2y<]w5.ѴV3fq/X{p_U!Cly^שw/v5w>, ?Q_5~~'Y.>GYU4ϗ4_OGsix?/~d']]oBW#{ц0}3_GyK'Wއ]__uV>&bw/vO}躟~RUWG6.{=KOGloH}o&阶O_ӿ4>5>_e-Scz_|lZ o+BUYit{Ÿxf;o>܀KMsz?)~IWgo~yӏؓn{w﷝?ܯJicSo{o#bz{Yo߿~wϙIo?qB܏{?W~ױN?=s/>j; }GwSO~8%wn?-B >]Zߩb5OhX?'^#ϼZ\Υ_;M^Kl|O+jvk [:~߶!NOm%XK󻎪q}>o痝Qg{WV>߉D}}8{\/WI>ZFʩ=c#S?~o޾>g4w-}Wv^_;h|~~7{7>os A7[}/=//[A3>򺖿k.}׾%w1}1wOBSUA`]^zq&{LmO*Sw_^'zDg/~?--7πG?Vn&^*gs y{ϳ_Tnp<>_VgBOwu~?}7 _Oشʁ1P?oa}2>7*%fcpR~3? M󸹌-Rr5?WsO}cqv/w׉{JO6tUj&7<'mv?3>o]x gWP\}{4*o[ou>w_u{ݣ/kF,rC;}]AGeƦz;m_bCk|^h7_pn7n*{?{T|xo=z3c/lwN<#}CcXIY~G#{3?{^b›Ps7/ wW;~O 9ߌk `? /?4=/c}+?__wֿݧz6|ϯa?~zGo}vy?W:N~wwԑ,7E.]__=3₿F sLc7@>q}.w}wrCz@Z$W'{_{=y_(L{?+TO*_Osr'pOG#pp}LJ7&JgOPp.8+ {,Dio 7o1G׿hɀ|?}Pahg8 } | ~[~>W($OGk~>O>;Gi~%.Π=Wqxy}&l_So_]hG#?_ߏ?蒑?{c_{o/7{Au_{E~ߤ󪪟^>{_Z/?//tkM W}_m'>{ǯκT{+^%?g?T?uK|g񾇱`C?I@#}Am䤏1(㷲 {?uGqsRTRQR;}*CvH??^*U_Q{8 l*Bv`HN0%#xT}F~ WLRc GH/SIR=XBD(~Wf%aX b1 ``1Jc)c0R1&0Tv￴Oxc X1,b aF1C%X–1*1,a\Avھyc0XŌ1aL`1`" &00b%1L`ccSLb* Tcc +5s?H3,bcXı;cc&1A"c1cc`b V1X†1I+6F__D 1CcS ccLaU)P1bc a0I&1 `#6o_+*1 `A)Xc%c) ccc *0qcs $*V0,b!UC)1wb*va!Up;mTcc Bc$cc QT1U1T)e;\~I.cV1! X'f1cbTaliοi?%1110"cc SSF v> b&1x^>ǫ+H"`JV1"9w׎7EIcJ"%LPڻ]D2G,-Jqc0T-m~ZBHǟ BxƷ{)* m5 Urrs΂jVM2%[Ug~ˆQW(u6)7Mxxxz#Š p'=DIhUONg"u'Hw]~AAj{]"*%`˕o_r#in7oHEh:wR ;T(܍sn}nT6WA!XI[O}}ݶ*GE]#ӿR8.7CqNp}*CJ2&c77>*Riy7 T7: M4rys˿] M|m[Ml<NjrqpUHM*:HMڵ7k9Hʬfn/CemZ5ϵHj:7uPխ]U SQN8PgO{4c-9߷ZS,mRsqqVд4ׇ>X{U$[MżR]=*ҭ 8u{٢Q윳Ϸu Firow>CHiZ4gAwTu]^>I*U05~UJFk.kŸr,^)%M8=}҇Jt~oAEiiԁhg)Jg2龹ie.7߽Гpov䉺[[PMyH~O)C88q@ZM4从y9jVw{V[rUnն**L}WU"VuWW_>>8W-{ʅܫ9:>?BK=7#p8_?$ڭ6ߏ)5a6߳gÊ\\L10&edfew۩&6.|N]+%#*YuH[qg*̌\=?⪮*qN7ZI.'hwU't]˸x fL#4{N⻮]kӸFnDvJB3UTv`ZP4M-Z럅U \o\J  5_>uij盤KƀjZןRIj֚q*^WJ>\[Rus5㴫dnǏS>o+pRSǧIiM x?%m~-QN7~TY3&oW?ڣqnl=}x%pW_qTp.oÏxJpW 0㮼7!ob;~_TZ|.'/O0rS4xuHDtN/8qBYEhit t㋌Vl 뿏Mm碣]kbN9k5Diiӯj^9RffnW 89^&Vw5&U|||wOqUqW}N8Jҵkɯg2<K2gK5 #,{jyTfYrskf$ˏkt:CoG^}R˫ѨV^. VIzY30EҮӟ/U֛>HSSVns 5,+.޷ݶf3vFѫ^{UGyPqG}[ 2^2\3Ef9^pHfg];?czQp$tCoWEtdҘs}=^>#t+e~N=+ݽ㫮[eFѲ nttguPVI{EϟBE)@B4J )%"EU$ UTP: P FLIEB @ Bz @> @  %"X[̴Z10M0 M4JxL6M0i b2 d# 4 @LS(̄M&i4  SDE@jE!5U %JF OSi3SzjiP2`hCAMC@ #@h C٨GA (SEu}FWc2u $k[!0K}*jFYWuMzڎpIƖ YJ\iEԪ"=:xŲ0h|. 1 \kDDA`a qiϏK5%0O0 ZH ˴ZKLjz"'-n 8Q-oY6L•7@ęìUӥC)LRj-2i~ݽO~{$z!Q$,AyčE E!gkkd}>•v5[5GGU MG$#H)'ƶ7@ -Ru&pnb][ih)2W@O)8f<þ6 l#S'n'$KIMLU1Z}Q 0=D'uCFa-JaqY \¼HԊ#e чbKLBmP @RՇĵfqbblf0T9nir6*duS!6Ja%d8+/^s(͝ySMBP41Z0A]$kF)H7s r4Aq*C,nsCu/}W&`qKfDf,lV fQ"5&uA ]xy.""mxs57I$NVtَ.Ae\IH`A"Djzk@Ƌ|J ;ǜA(|9gGyܽ<-i6m y$tN* #"d<ى)K4ff :jߦkbN240f MfQB\!^ ނ9-v̯m%\c}1=0RQ!EJ1"C=Wh<ǂr䴄Jd9ԯ6}s ގǨFR {ـ6zm:6NA$y c h/ [Da 6D"19}\_R42"A.gu0Q "LIv `Z9sk8Q@N<1`F`k!UlHvS8F|4l,rހ*\P6&&A$=!"}Q>uyן${~NH4AꚧEԽbs0.:e%tigCtMwOِ{BxsrwP@8;jnq (G̼4کαJ !hNT]! OqY ABZG yNA^m8aŻ@U dbd #I1~7' Hs8bDŽni$4#&ӜCKZؘlvrq4fq@DLjUZ{pAMU ccBHd_uQlcLyqeRq^! R&F8`%EXiLNhSӌ͂-R{$ |,DL*A(Gt=H @^ʛU'X[Q g㥝QƂX #TIн1.έ"&T4Ó=OsAdqBnw|!/a9>d~|L0gVW) `_lo8/wNwMcL[=cʡ$ qw a^ɚ~"Y wcF"͉9giC￷4Ω¡rVJfqrNuG_+/џqOP!@+RiH=p"3hUZ0 wc8p!!q ًSiLTAF :csگX] ⹗áz@<~5MMVN4R"k bֳAbb@j5"lOg.x|=%'2Om8y9BH*ndT;\:F72"5&YiC> My bsNfH =Anf=ҚeHEQa0RDr6atqwS> =,H#Ԟޡ 3^Z!BJg_n߂=<ύ8x2)d2ӺhaǘKᩈ3[:,ԯ! LYµd:Ub G1 KpluU\1u; vgr*w-ֶyCyu0&tH|ZOPbC9to4紽 {鴙eOH-<"9dN*I[524M!N)@fz}nU[,\+q,]tQh'HML1G3A>| $K9&^!NOM`uFnlQ"swVsMz,|FRV lMewolC֛ H|J? B YxB8o$(8pd&a(Rߟ>-DW4E c!lB,֝~9Odyïs6 cJ ruc6""U\A"XӍaؒ+MsApo \6]7;ܬFd EfP[)g$HQmxX " kmxTݑ87s#%7\kq6櫤WQY=g `|;yN>8D[&d.&dOMfFa $q-03M2u DD)Aޫڮw|/?}v< ߘW`[עczD l;jP^UOM cdJcNswvZkYkq99~a73t9<Zf$EueqIau"vx6w|Mw4%/fSƖf="qI‚z8Q-[JGĨ:q^gd?<ݨ 1cMqT%5CB͒j(NןL瞎u=Cު{u.INdtpzU2¾Mr0JgG͢EW" U=O!: eAB4X1gX&{4U1[t+SI6ms/"28*& <=6%T&혆uqu!m(kPT5kŤH k^#Sp vcqǸe`!C %+,߿֋8'RDQYXO3dAb꣛}-ArJj* q5[q;OqIY pEH9M0f7`ro̲3 }ޞhLBvB&piܓ<|kx,<@,ndޅ9U7teF9hF)kC^E3Ƹq<9J !W6"L;\~exIA O)6_n_ZޕF47$kgPj GInH/2 t9)(T$nKo;wugCs}_^9Ҏ\rY&iZ of>,K80 U^dDZ#Va>C.fQ4|Hsׄ{%wUK:;gϬww%4wwa L,0W,ܐ]tlF{ByStTc0&yG-$IGIZ6i]$ҵ73+II0ٮrk!bkc{9M-\>\.@V3@hIp hY(sř3T| >d҅]2H@ǟ$(r|xcm@$CE*n^f 92[8Y:5gY:j5~aW Dp-En"ݎCxPE:YA[2.&1b&DivO|[M6>]mJ@ӨJ$mzYf)QahXUDNs/U$PGčD,șCw\幑Y'Km}qOsLd(c, y+EV$atYy$I5QW!+/8n @̌ l:Gs͒˧QO7gdi<`Ӂ2,;wcA\A*,3p$r6w~|sAzM*B:9sFA3"2ynryږ(9xGнd#w,r+lTP||Q(Dwمr|Ѱs7꿂O~-qg{ͨx;Hr>Җn8>3d&0uU(k05Ć_Jv=4Vћ^Y )v B85XR$fI\2CA~L8W*js߯:MG@q0hi N3У)k]"K'\CXA`,iosg=nJJRQ2W@?%N{ϤR)uNx>ΊGGMXno3Q.π @*mfc.0xӹֳrg @Mo$±J7 Ì,XFI5W,[sLIg8RJ>a<Br -`"+S-.> P4Y*,_Hd3YLaalĆ/B~7Ag .xjGTPbKpM8/ Ϛ7Cj\d:O-֣9 v4#>(J=' yCIMa;YW.g9L*$ rM"ṩ (JVu"*fvo+Y>.MӴ[r8Q $ ykS`"zmJv@eOf{E_5{߭/W~eԲSb]1J_5>Cg}]|wޏ~vu|QM֏{'74ΐh1weGF\D~hxRu3Ɓ;ڮ:X~Ph a3M8 4ܪYA{:G&dbgޘl#5<FyllEacl8G9P[{vH-d,Sv%w6>O(v~5th('SϾw>nf={;grG߹艣? /0/nƓ8LiF&1 #G*p_t}"z>zCWbCX {yۆN+I U8Plviơ8=Y, tfqV{Ddb͇NެqsFL +0bAQEM0ɯ8:Cϖ7h#Og2ENz#bA%8\:eg@NC}IL5Y"0s=9Z*ʶ+qn߉V(79sfQʇUp[{^RB^ɟFd=r*&7Ht^2o05]!ëCskԒ#||Ʌz&h90-rB}yhw/o 'Ƨ ~ #8R[?#H Z؂+a SуQτxe* .J-x9, ,(}V皽RGg8=:T! ܃=Z,7r#[䙊|]Ӵ"SüEN5jRb;ṝ=L3aMȁf*4utY[)Ns!j_}y0twϫ8թpck%|Є.s'L\!Սej|O'BA-/<29eSȇVigH;RՔRtL%XLki^HM7MŖ&+8VTWWmW,I9_LQ4䮫ɇ ix<9WeU|ge%` IY' tkJb.ay?]m>~[3i%ngIzG-5etns-Z2N5}c9 7,;|0Nn}M"clZɿ`CFeyYV9[%0i9ՎH5uCUd-z@Z+ NV 4#)<ʉ-X0l:Vm)'RKg(:YfCt3[KLZ9z#uIF3ො :FG[Cv5c3֝ lQk9+lr6k4e0x| YE[;z2PgVt1l IrQZ֝2b 6W9kirE4υ*D/^Nd5CWur;#Gs_)\"Z.to7bЦ&[9ˈ-:SR.ve'\&P^z䪑L j&КIhaɤ cs-g(G'E6]aM[.)WEf++!(*#7s^jih{ _]ij$JxI{TbqcL 7ݧ s(W!nhF< ifwl(=2˫ ˽a.|텟,:,sƸrxsm_e~,폢DЉ)*ڼ雳;;oJ15n>%\U)=TB DzY(F2 ' (L> d)EBRU$`hyI!p^0u)I Wj<ӮY/je O7Fwc>4nRizE5D5_Բy8EQDe9휓r,R $_rTKdʓ/D鎙#FqH՞pY::О\F.6(NNuJ??*u{7wu:~##vc7^׆ҴES{˦xǀr;vۦ}im ^=) J\:'~{ޟ{P }'ȧO>zMr4];r-;<BoyWI ;kN]>VNo XQ+. r{Ǿ|ɮlSϮ?wjm~<\FnIxϜ;'z7>>j  z0mSעFk?~o#^~OWG$@ݾ'FhGVU~p~rWm֘eҿ_]s6WǯMuw]S5L~$ߘ1EcR8yFMX::&qbٛTKڳH;Mz :AgޕQdk 勨x!'OCtw4wncKope5Izu_"ϟd|jT6S">=dT$ OU,(n%VeI o^p~y$dz;S;_dݭ7[6yU(bqeȊ%V)Xaf [6W>4 l<:f!)ёDyk/3hמNl *J2q9c˳NPąjJx9`ΑvgT>NsBvє2GӉݬrsٸ5İDK'o C-zQ::|e&Ln}`DBˋ2lz sMѦ E%&̔6d (ORDE;,-W V@06 =C4ÓƢ7pܫ&)3 O ~+F?/!sBK+i;6̴/T B6yEB/olgNUDq:41.}3d1ynAnʩ?k\pCI@%0F9{vR8z&qhW#PNk鱆RqI5&FTB`}+XhgtY7[(gѦ֫^e!Ɇ勼iž Fo"/!ו?}@@.F-Ӧn/Fuإ,aYP[ 1jq T5+Tmѣ*};B~ll^( ө( A)rύZpS<c:g]/YJHQNHpMũ)h3*Ҵf}cvu8rb3prga㌤jaR ^DQ٩޷%S"4t(ZA@lMcKm˪VeuoEXM:0LRu0t'AٌLE! 6K=n7cԐU5B7ӆAz#m_HɴEe 2Rr<-8g{% E57?n][xCFb@؟䏒[}SX fC[C>c7{wjo~pC8[3KSt'^* |m6 }M,d) 1;"]v[@7$||,"r2SxŒ9=2ڎ#~gQ]_wr#w2/6]/3|uW]](Yݢs9i`0+#ҫYm`XFi4D gfvOQqRvbwW&^!;GKG7|jY-$^AUuJA{yZ1 ţsv6GgADgZ8me`ˍb, 2^+;w)DRqֆ62&?h0/.ȸmYuz3ɚ|I懄>4q_~Ci.j8y[ʅC7a>FF$uu0 l矌]]1SN焜fC^zF9lr :ujAB'?\+PAn`-/z|`s)\S#10 A9l}8;?]$Seih<DptI)7Gښ6yuX(" x\HAVcg e[X($D<2kf`_rF%&0P+¦/`KqEV \ɞ5C[qcko`׳ 1u}uש؎{_bvq^x)~sϷmSW 96*Wy "8yu5t ץ"v~{Ovz7,?/II0NVO֟lS?E.{oaƜ5_-~Ӂ: 6[^өBz,Ya>N{)|uOшBH JoNgr;l_3 @wwJmf.жn[yeK[zsUWW#t>IK?Ge+Š=Tt+~~WmGPG zք{$D Ku ~p"yK<8dߧ^S^ix.bĐ t;kj?\HrX^'Q~}?#)"}'^%JT_,3ޝ <$SN]Z=iIkThr;>6r/+;חVdԥn]m]2i%i4 .2ذGyn).OO%X@@D@2'bc _.+6ڠ× -]g"!5EwE# W c/} 1_sԄ^̀/Knz\a >}"mo.2Z{! hBMw-w(/k\VKS+fuIu1TUGQ3Jձ'1[`QIU0~,[8MDDDsLLiQϻ5ڴ\gY~RT~L`j#nkVPF|]/7e黷ٕ#[wJGZq'74dk" "y 7TeZ^ D2rrB!?\96s09qTd'%V#K@弼^7!vwSDއ)~TXU}$Gߒy:;ȿ It0)u7 . j@@h@f3R]uk0ey_MV_/ȤnwSvj@G/%P #(G)vu(GUĮ1ݮkê6ޚ""V34xzhz޲15^,}Bxm.Ja[=ߝaDI`;x|z3\2@;&j &6D㱍LͤmwZ D\v99|",4P[M`.Bۇݷvnt橶'l^X?C___OW4=<4\ƪi#LlqėO槻cAEr)*96g^o_QUK<+ 4<:- FMK|O·kƺUh|S! H~̿B[]ՅFj|VJK?Sx#y{uzپO+NZ˟^s&09mM}uWwFU4+'g HB>}K\PU. GW^iJKmWY$7RcP0l 'x_W*7[D{ks6 5I𵵵 gQu?meggXFJuג@9i)h7{u2^鬗3{+8 ,_)9ZQi6`k I['Yom9T_{R\MsDҕxkIT9/#ut\w cK?MqR=[rLwpu:5}|j IS˜yc頦%5AX,9b F `ј9]v~Wz}~_;QPߏ9iJZmο,l^,MghkZ) qnq|Te@82Ikܐ"kKWsdm3y"2yI:hC\l@lp%2@et~ZU}f!D7i19Ӹ i66GڕMx$I-=kɔ"|EvHXXz  L&\bs1Oҏ(lo$O/HC9>jC>C/J}2}_y/@0?)O{ f!$Dqk<[Q "g4` 7_-pH ~fP, /R Kȁ'ϲof5TST!EUS% PF GM @rNJxM T PAB1B y 4I(Z9$11DKJ\cAT--դ4 huEFDP% ̪lekJh(Ehik@h!J'EPjlCAš J)Қ(B Bh)Z$N#JM'hJiVi(QU:2*< - C@P14%4@RQE E"tZttSTB @ģAtH 4 4^mN44P,8E4Г44 !@  H*/K7OgWNSJlJ UJDnGsH&h E cEUAzwNS& DD88:9cpTS^.n/?B w"E|G'u/e?(zFSG??}@~=s@)쎹Wʢ=1@\)n;cP")(PuGGGG9Yk>hhGȀH0 Ǐ %NR")", JeJzRJ/pR0ZkA\ǗTAp˽)̄"q8PS}? 3 a?r 9G?] X y<ǧ}w'vB*S"Ol=V F=J0"&&LNu0ǀDǕ :>:""@C߾@L/} {Yq"us/??G͎6"r # bbbcἈ(!}tǀq=" ՝=?ZṼoT/ۧPN{!>=P<ǃtyo(HS;z<]IGx<{@XJd2vЈwiXy{J/߃|o{W]gO/'%҇~/u컾n`<\HXʽ[- o;w}wVC޻2!{.ǧ.n@Xt\3O}SUCr5py<1?z1l\˙s;f&&%+N]MR"^mLNš4;(CPr=}sҏ~wD1@I>"L鏥 Oi&D!9Zg̍`b *S7u~<KC`dQ1E2Arhh"1p ^.Vf<a/. M?/ }y|L'ZH@LH%b IbYx@"f`msyGsv÷qϪ:s3Yxy|Qy/xq*KZk; UpR;"ȶ* 2 5G>&(ux~$A~c1OZ@S7=z-xZ<7@9=OS>N_|>KQ|@1_N uì(( >'DM#Ts» xD,V@zpyӘa^ɠ_<>Z\q9~C`(y:`V  (J$JRV`%"PD ǩf). cS,O7ݔH "15u3"O[_*D*U /}Ȯ/{ըiDxx|:0i{Ph@ nqL8BqD!:4e H̙iHxF#dDpV<"Po |Iz\Tw=:T3nb 2mY c>c~[]T"@ǫCtss D1<`08k=ofx'N7!ҿw B"#{Z^刘 ͹bQw R*±.}P_?[F0hx@LOY 9^RX -|i Hd5ELNmb&"` 3mb+G|~G p8 \cِ3O_2Bݨ篠0&mciO{7j km" mVb 0g`\^D$Co{U0S!2˯)R@am>odDR~mb!k Z=?PUAJxw? $sw F_g0&WҼ@9q$(T{Ɂ0$X~{HHJ<𛄾DL*6=F `6KWl{XV"}1#.鐘 o4HDXXJlD1glRgք(LLKcLL R{8јbs))P )˔BreZ7}aak`uq G1N "=ܾD r[x.~DR,WY&moV!_ ս@n~NꈂCĄ@Fg)$#< BpIF$?G Rs`ր=3`.#QRAz5zT!BF  uL'^ٟP^7΀EB,Ƶ~V הeϧr3mi1r35)syZԾ4 AxB-xkm~_T}?.L2 @H c.bT}&1MIJJ KVz9|ߋ#1tqso,y4oG.p!NLuminescence/data/ExampleData.TR_OSL.rda0000644000176200001440000001243013456140362017517 0ustar liggesusers7zXZi"6!X])TW"nRʟA,ʿBJƼy $ 7ZUH2"F4$^HK#דGAa]\DGF}3}bΏ`y >+x5@8&#oMMs7Bk{@WomXע*zl$ ԢR+CBfIp'VGj4Т] b"Ѿ+`xJ ophȅ~;#`%XK4j"<ꊈY"u$.ZA,$U'c0~Rv!!a'瑒W93_9l]̡ 7g~ $&Tau Ɏe`d,i`W@V̠:|Fv {#4t4qm?"GSsV"A3 fo,:!krTaOeI Y&֦LpM4VY}%ɬ3,k9OfE =J֥񴕵=8otHb^`%xV/;o P̶F/ T?(`XYYؖk}X\X{>hƐ7-EF;_xAx/Ak@ ۺ6Y#b4'PU yX#)-І#yX\OA~ޤ#^0Wf,HiuIC_U/$i z^H6i?)tVs- t0tMV!g𽇤E=q /Az)B@x wxDҋ>68+%%@*.k/>:͓)5 +޾"|r(v^yO%6!Qÿ3#veZ9ׂJ^~=_]e{#W 5,R$G;6Ύ@`ݭ[%sAj?Od8*'4uDI϶9aܧ@i}i: '>m4qWgMG#%!@*Yx'Vǰ.#NKrjG?ARS4wNM='_mRŒC*!Xroy=#1˷$g^dzq jÌ ʗP7Yڷct&2ڣ{2 L{ކo#o-8f3O܌5 M0m>?dȤ X6s,mFކLObAb0". '[0_4/H.9ߨ=Q;n[ )c*8*Z;SG-ʼn.[zRsl䴦ͪ|k]"!rU~dkZ@QMs%xcuL6s: Q%^T޹D r`|n߶}'Y]'6M07̑^IvKodٷL ֖. 1Nu4"J& gF-xTU<{]'g,_b f'E8pa}&sl.72 swk5 pT(RͰK9‹O'J αf շmX/_68tI?7\O"{|mPKV@gl#΀dOE6dZрΦU/JO:풂ׁDZUS=1LS:5IpID֨ 0(Jԁ$/x~:`1B }g.!֨> {c*n6+KGstvYi P̴X G 3p.~.| ,I2)28^q`RɊV{y5r!JkByC\6?GeZWL߹P~#p%V8D\&5.h?iK:n1/q %H=A284LK:|.@% a .wjٷ!K΂~07Nu?:EN0uNl"eR%y{!x8x9?o.f&S.:+QqRue؈ 3=_hHIJvh2 "w{RځpzGW/E5=xdtkT2@ =7I(5vV.Qrp>gԽ C}G4JدѨZ=`V';8WL_e,MX]T̴oj#˱Hz eXoHkZE*mPjD@OD`"YO[A% 1`@S]7o6뫎֬DZx~*u:wU#]ި_6urqv4)_z@d6T~*VqhHG{y*d7y%=+ ԾnPsO i|nzs <~ ]X6"7|QK|JV$rN:kqYtHC (@'C4W#4z2G9m[ ^;7Z0dƤO]?vϩq~ zkP(MYcP}D5ݕc8.wwd~kw>*|z7-| Vŋ@KuQBWeJR>ȼ3Em "q5f]T^JhO3BGFGm9?A#,J+EXr1V jxbf\L<Y ѭ\|je 3QnL ⪮-Ō6_u,H/>gUwE&w. ]8[힘/08^qUMڒ1E[nn]Ÿ5NL0h)9rQ{;*+t"AO9XdUxx5VuQW1qֈfɽbcON7 [t}( Ɇ wDȇХEHCCW2K$$pA82q)Qx cyi¶ Q~%B-<69O+?xp\ϐ~ڇ^ktEU 3Wn_5! M碁|JƞAX"8w:}6vrgTq?\pj?zV$]AZr»Q.k} iLHDNCŰ5K M`b/MȌשQUcԾ巈/':,k uQUFF!6QS΃l0 9|zy(C1Z5{@XnnIK#t>oItc/1Z{-+uh46J b/4xGY[мlJnXVa$0+^j 7WMjaU k@yAiˉ i uXYS6bA?Cy4BX%|bk%Jx;wG:9h>Iw~%Xt`G-0j}6 gS&ƔӠ<\|+Ajx޹f}F[*[QC`Ӳrn\DH>E0#ϲ%wɯ`j*N{KQѲ?ݏDbH?rv¢3$UJ -^#@}KBHHb]e@. d԰{15zryAby4Qrd&-bk [NJgn;>QHaO*k *xZű˧1)i'SNVF|rFON\!38ዓh@\G!ިȃh-eõV:*$?J JPݼ: B $Nz -57FOp3 ѭ@c9M,(1vW(䤁7 9CS)؞֗j@O8JN|b6OL\5t?_YR2Bb5n)2<+V#¤BKpPNnâޭ;Zgx:G&&bLܸ9bPL۾;V"l=2O?mJU} jS V_Wn4%1boʛUohe(4?|m@k }!(<]ZBQFHND]fmB$T0)>0 YZLuminescence/data/ExampleData.LxTxData.rda0000644000176200001440000000045513240265317020151 0ustar liggesusers r0b```b`f@& `d`a>!.% I bv0`<r/00p~APdOV/By{8X$&:ylHr4>}e'"[oiy~&]NIK?w~ XX~;m\p$KWAQ 6% d` f`0b74%`OBY\SalP@@\ kQQ~L6$(fdrNb1H W 0,Ҋ),/׃| @3 uLuminescence/data/ExampleData.Fading.rda0000644000176200001440000000533313240265317017650 0ustar liggesusersYgTlHQ,Fi+ذnD$6lAP(zΪ[b% ewأp6Ͻܣ?7yyց BaYħVRa}ԓk|1־ఈ [: Kr$C?'G7{vjh0Mk`-0rm,}W w(M$?-.q6`Y%B=YOjQбVskrZůԍ 1 xڠpu4t:>$ ^Q:?O0];`An7uf`69 1A _F~ )ۮ[ ۗPc(1.%c닗5OHYqtis[O;jo2;t(袝ucԗ}@i < '{ [7j'AjOA%12mj;7{;=e?HS/EhԷ&lovЧXLK ʴg˺ WĮ?Lϣqy<E_Kiߪ98 x6a0b*x&X)\A9}\/(@eܚQ#使j 8bȡD;hLuH Z kzL5Œc.=%YЬ3vɫ d s^ΑhB+S=4ښ2npAfB,V=@I0JB=oNБqr*W89|ByY OVPD}YM.UzyM},%bfAa;Ye®(<͸&(dC=2Aq]慊ȖG^bvHP-ΌsnU#HAU3j?]9o" غx?d+1_tj(7u V̼ h%Э-$>1;Ӽ]r3w2ΗgC?,Wgq@;ޯ4#tp}XIбkn;wd)3 K)̿LU^U>YpF EO%qC8~-AJ꺚"[fː#?rMXmY?N- +8`2#dRw7_@OP.??LIDyg2HȖloHrdذ.Ә<.]z/3J} 'l?Uҕ" cߨq2\n_qv(6$m;,y@δh{5&!mIO?Q׽+FΒM". H 3tgWv k7BHǁ382Zұ=4SUZt !}a`*).ⲐfȝJ50AK\n/c2Ft 29D*w D\.%T+ ~u42iȯ0Fi칚Dm<2ddz^=W]~hl$ H$=CW8cl%psJV@&@ o8 ?m6ǹ$WB&ݜYnip#Z;)< B '2+^2g]یn+ςvwރޣcOXm&"%oȎu'6r)5e۳@N̾"WRp{qyԧ Ԍ]a ,r˒s)uWRt@وWg%ȕ6gwr&FnbK}PL z69*7b@ɀ T͟ǁYHdȆO'P1;ȵ~gqiLy$^QJRp0zש1DMэ "8 p20y񣢠l~=^O*+/%[$(okX^j-Ffns-/26d"kL7%פU/=rU?+E x_~ȟY%Df`}U'*4XR:MV"r3G7jiQLw &oo'8S\NOATV,]Yx {Dꔿ/14 #GB>-zk0oFq걭H(s//B"ę=_!_8;]Ͼ߯7u-dIςwWڒ;  1/ 8DFv^pd + Ԓ47G"Q+Y 5b-[3ITbrL3S'GC ƎT@Sg2JUD5X~LZDNF'\+ہgivљ3 BDFyDG<1*W:wXѢ ovZ8C$D| y);j-#Լ:I'uYb<4PD|`7awO gwu"5lƬ <^|W~N1I%Đr_X)e ?lQ>l1vVt;jS|Bi e )T&Eb7:ok\Lsݚ:\A];w3| /fL .H46Pk3T+㗥S l7]uʿw)gfXD)h3:^gsXą:v Pж`IfCWX)>{?8iԨ:~T k[A݃ X繭UhMϑi4KiK"z1V CT[vq8 䂻-@reچfNgvG*`!Ȁbr,X \'E [i ӗSNr1 PIO*q(63+=W8ydHkԧLjCB  Hgvx8 YaZ;"NA'4h}co\t#EI]<\TQ2ek"~\ekSL}"fHi?uF=Z8ɾ w(*1 5ۻgx;Y8*Y 2 tMjq^bcB\4SxrFXF-V'1Z MtSdJ0 yC,pك3 [kd;PBiƆʂas *sq(n_ CJ)uyXO'.ɄJe!)z67т\p=7u'?.^~)(*KX[}DKJó^9? QH`~m>6_hFH`!΢gPl~][j.չAN3zeLx|K%sIr^GDzCڷ"O|T_T]]mMxcOq{[ȰUi*"rWzgVZi#tx=Kpfb,wmSe*w~#!}~f6q%5(W-v/" v'[=TM=H:(ywWpϞ k#6S#r »t&7%3ᦨ(xhozu6iR 9e`ɔ.q5X̤9 uF^Q@x,퀤#%,/ńsũU`$Ң(yz)zt){/v? p0&;n\0k'Cz52U(.D ~ޮ*avhEfWy1g*Fi-sβ O(ٖ.%Pj_!'n='݇Ŧz<S"/>+ x!y5ٗaM(uVII s%s9YدE1ٺe#;&k9Ljƺ\1e-+QEbZd?VN$r[`GqzlyQ ~EKfƃ@9EzQuI^c,(\ɴ~j)ñws䋇g-xgwm #]CnQODnj`bGӐSfxF(Rru 6=jr}wQq @?l@i4~~+ڂS-FQd+8T:&S65קSI1zr T`\2whbUK 0+ʨG%Ɋ׿( c{Quf JnHSK]j!/"`tu%H~bl4c.}$vaA}Ѩȃh;+15"10 YZLuminescence/data/ExampleData.DeValues.rda0000644000176200001440000000156013540751607020173 0ustar liggesusers7zXZi"6!X 3])TW"nRʟ + ,ʿBJy $ ?bz0z^gVh^aOy2.[:#HvGӕ7Rm emzK!8#ҵ~Ɯ.>5CyuR^S\+o8c=G_ɘ}a>FaztkWMu>tӺƐ"$aD3<[٢ӎ"\>0;Sn{  >Iiի.UcژK(6}-{[fs?I+FX>EOj%I0S:4z7 ה77LS _։ - }baեc ?0&5~.E6;+j3-$eL3KA ]-ar_qŸhX󷖄wKi8 3g{/PGu4n{bq) 'S=&ZU5l,{88KpUON~u! -$B Mi{rG2Z-͌=u䶱&{>ыZ_.v&eZw2y܉LcZpebsh*LFQEM ژEݍɆoG$TU|* z4Z6lUX~WZy=l$ˣ=m OkL2xۄ˚E4m3דү~d0ck }`{QhFH&D_@>0 YZLuminescence/data/ExampleData.FittingLM.rda0000644000176200001440000002413413240265317020315 0ustar liggesusers7zXZi"6!Xk(])TW"nRʟu,=!zBtiXP͗/0'&#k| X>: Ea҇)! Wq䤿@hN~ʕ5"1 MS8WxhZ7 |y֮6}3 XYF؛h11]#ls*M 6 |r@b8SY?~VS=V)x5T g9X`ug9 *)B;pЋ}#j]gUǢyH\"T'O9qhݫ~,_VfBbҾ 2}gkMsdCp !Qv~T+D~$|{<ʆ) ;vί[ x1@ڕ΀Th "H]4.#gh/ƝM2О^ekCjy:76de%_3DȢ`7,J]|AFKM-}- bUcSXis>!ki,MULb$1Oͱ=US _p ȻX\_д}T`K[۫KKbMz6ruf\*di|Ņ󟅝BkADCBY^_z oӜtߢh~| 7UsQgt>{mj>1'YB'q2f|L+mXkSu8x̝ 9>l6(kڑ.]` G֨#1h# OT4]iKGݾY<B%GQ)Ui7@n ䷴%lu|:{#FKd=vFPy!;qv{&RhbzRE}2,JزPNvhYـ?;x= gOAL'Nֺ0vħQBIw@W-3OMgPp/0U\왾P߫SbwQ%E~i &oɞz:]vyZ%|\~#{q;FˎJ3T3z-c4EҴEU$ojy8653NE]!jXHZ=TaN`Zk=0uiTJ-_͖0l kvSV#<;uFѰ)DsN#8_$H]  HͿ$wb&!O~{}WR+ՠ=b-aXwI,ia^5u&^-+M۞ t9 P W"|8G{ ۞X dT$|~cT*4 ~=pot[,xg荄{vx>O#=' 6EN l/]B^REpZpς<'mcB͉8Jя xgpBig5*낚DJ(^yD^@ Ubou.qXTpKO- 6~;METb 'BL 1)͋y'KT`b5 qӗuK~BOf"ƼY(!hכZՒWAMdA Noj2&4;b05x4xxcWٽS"}Lg ˒(`+\!-6۵8ӸMb5 .]ɵcHNt/#$9O;D C{F{ >g(0~FMkSelRУ>;)A`+RGFuzN \p>p "zo $3~i}Sߡ%=Є1н0#;QGJMAB,RN;Wo ub gD:^F>C3S?j>-O2[Vo * ng|P) Op) 9ޏCiHC7s/O얉yˋoW`P_qTԢ{{83oϲ JiÝ{Dspo g ]j$vJ3pe#RfW!mp<(dY[TN c\]霘Ɛޑ RqMRU:لo"ɝΖ++ |7@cKhP"Wb=P@N{A43-F.Ç&2yzsۘds`8NRG_̀90^dK{,V sΨ0 (фad$_rEyφAfa0u- +2^Ž|V&VFFi4}zx:mY2 ۥ&(:\( 9E+>JCJ붟:8g 듸ݧ?72ˀ}? =;a())0uʸGZUN(qlL|nb_ij.!s8{PQ.>MA$v}C3%}NQ[9WIy|61!;he^avQsD[ :L]m,~O<> |4YJߔQN@ *Ry#γAV\|HPt0+p2c$źO:%0`n<**rzBRM6~9|Vs6rE|rߤҪg8G=;r"|b8\ CLY ` XRW"Dg~9(n3]#d7p6E˱rX1zvzOgGbR,0o* @%;&5Ǚk]s>A|]u#zbqP p䞢9tՓ+8ꔩ8't2l]r>w[%u@uqrV7fC 1`%]ն[$_{ -rƜz^n9 \/tzW2BMM4#8N#BbrqJIa?TP(=>0vpb\~`H' yzK]+mzf { &>Q-Uy@g.C>׎Tɽ>M7jdpyq U8=OTL]8(=T K%R-)X cp o1''~PdNj6~ȞAm2*xj^e)*Gd !*KqOffbZ+R MjM3B8 K'thA(SI Ƶ[M5c3PEi>WiL?J6d3?:(~$4~$s娔K `)VawO8Iw R!y/_V&Z ָWݻ* 3PsEQZLl;-G Rn:J-_2 19=YM=,nKynp 6 O<䥌* nQNd6N9|?bB`[%ǤMP0_*?"Xo]~ӄ45uj)rIس\qOZ ";a;TuC8zkf4H[U#HѬ-9b}KV$DBxm HOPpOÙ;SaIՎD+3ɂp$47juR*IG %mVbTرj0Kv]<@\%l(&@O0'C.|}.X2衞eWRr!=%e:N;6}icc0P$=(֔tjXGC+Y!s%%V"Ui*ܣsTW\u:\cu% ӕoiۮaoaEY:|p&x7{A, 4sOEҹbS럕Cl-Mi4+Amxps"II eqOV4IJYe%~O=,t,ɳ]t Aq{s"o4Կw|ץ/;(Ω ̐Sq!Dxnu/pD/L]E09T?gXWB+~-:@Jc5&/C,=)qOZ!78+Cxu\6X ͦXaq[{ )&ڬxnsdg'z)eO~JN$i~'Mo2fIPG[S֯ ̊OAx`CWf $zry!z2T:9]6  &jq#yk*& Ԏz<q]:Z^8Unav$?Ƞ-E%d Fȟ rd" мZ1 : Y c]7;_[@ّ5( Vl6kW"iu8#I1R{\2lE7/|0f>Cƅn,qy4m=mL٤|9h{깡ϯlTPyfZIOC.qsWнk Ra =5\e1+] 7dE\aD R@lDtDl^>NG0K@9qVXsּ:,|f$Y6Z6DO6W(7yjBb~G+djAf[0 7,k"IsѺtIk_T4[]e_BR7M/oiO z)`)2)&EQt-Άx[X&`u %g$TEpSwr&{ otKHًZଥZXf'!82PG +2H5-QG p=YKnѶ7۔˹NppG(%TD}˕`|1c&+[Cfw*qvQJ$oRpH)A1鼭]ޙ Yw_{ܸs%٤QlӚȃA [&*f7L`t61F":߉7-lN faB)XPs!wMNm!ZkfYU(#xġҶ6)!/6Bj,,)kf( &823z/[G 5D?\~e)]'Q[Mz;q(1ͅ X%dD":Ly"e}Xj9LCt>m^T;9~PBuqRUW4IDJ!*1d~x&9, lMO~wk}3>oӭm#eй H/jYMHd(GVqԗ_EH(!#ݔcQf^зK6H8,X 8 zz.?D`1h}AD{Zjr.ֳ3a@wR!6ڇd3e ?DNIA&Sb2\4_nݴ B" 2}5ؽDLըItdJ=K$ ĮxHQ CҾØĂ^O M)[~ARJ^ƺ*?@~m:kutP2,Amf7Oe,ORI0&l鳍 > `K(::)c\!P&m?R4-*}tQU S٨K;B20NDKb t1sEQL f)V-\ͯ=0; #͘O|7_χ0|ȫs\B`)0A0T>6j Ҹ/qI{`a!)] ~@ E͓Ϟ $ _QRLV ik#`z!̍E=B숬[F)Q (gJ+C4wP*=\IzdfH?C0+jTA Cg|^|Sza }rd޶GMY~)&zPt,g3>_?d2Cb\sF>T2acRI'UTVs=?HGTu~d>]'bQ1eb6"Ma3DUc-ۭɩd 9JHO+03"}K~אYR,G0aH#OIi?KG/ ,&̥wG "[ ^mz$)px =C(=0` 5*@}m;b gWF_el Ru(*SrQLP|#GY>I Q Q4LN ]ʱ8!uW͞8|mt'ԷY9u`/ToAj[%^C97`{0 YZLuminescence/data/ExampleData.RLum.Analysis.rda0000644000176200001440000001543413240265317021124 0ustar liggesuserszyXw22d( > *K<w$1L<#2(yD [ks~y뼿z{w|-de%$$$%K1$~$1 s{mEQ+F00,"4*+4?# dSd C|o%F;+:"H^fF8x ոv2q d=.Х|aA(9~lW.c 1C wxg2`AB kuMB#Bfaϙj.Ckk'-O `oa<`rc+NY`Yar^k7fh?U61_L*՘xHy,ߡ=js^F܀PAXh&i ogcQrWatF<y]hW`ArRZ{J ,)jٌsZ˵ciiX!;jX73ww vQ{[nQqK) X3|F?a9Gq,=a-gzDءp V+H%aaŅXoQukI6C W.aW.IcePڧXy^\a Ny^kjq°cpUNcؕt̋U<'Cc{εCvs} N|9~.*wBRz/U V^tk…V`]jΖmX] +[Pk귒iQpJSrnRW(7Uf$íh p2mRi|T31k5iM[5k?kQץgva킙=X[,ky]/ ;!knC.f=ǂ/ #*xms>֬zrXے4xZg"?i|&w`xӳl_lTB,?Ǝ%5S40/V$wjT"@9L#aȌ2bx,h$.jzXWtSdS:І@ W8!Pՙ4]>KLCE^~/_Q"PGS'Wqn:;^ZQ A=z:pĈ#x F6eU#`eY#li= y^}!.#d*zl拐a ,r51Bg5v~P8^W*rvb^1לEqS\;XS5a  lq+6g kLOexZ,ke IV]AdlY#G0i*GC\ڥn{*POە|I3G#\(e";Zev" 3Ũ6ŸyD X(p"fgѿ.`DwUMJX/Y)/FDey( q}3"jW'LBf#;͈372Ԟ)"-|i"5y^"~ "EV "hWC8#e6`RiҮc?^ˣh+OA9I9+BܣҤ#@Š S F[H7!Vc}2'[聪wb_ߧ:cF,E|Ci2 z AYeDK$Lzp >zX ˵W(EoOgf$M OC¶:4W J݊9v\QNzXw8<|,u$j]^q b^@SҌGH `iل H,ywF$in׍ٓU6H|n $ITT#i)kx+{_|_u%UȘk\gHZ>$^\ I'5/Ͻ]sU"#6RHnl1d@1vV$\dkR okd8ɩkԍ 9[GɻIɭDdy OאI$]R^:W)rk޸!EuC..Ftq>ߐ09;kBʁZ >ˑrVSRwp)/+N@D}HSppR*ݞTˀeNQHu+ZUj]ʭ7d n~^o|7]+R4/} SZ`}uwYN o͑,Sm[ >aP̤M׬[n6"}ۃ ҽ2렊g&uHR<݆t҇]}RUxEJm(G-^OiX+H?I|,W-ŃDzkUgvwg?9nZR`˪2 pW]@k:k6 fw =IZl}ebkv*;ĺ #^p>1W;A`go>#Al%U`V\.죎_]qC4,ؗ#f}~H؏XEހf`ܲe8R[;"tJ>8j;t a(; ^Μk[$5cNq8#vMIxUWy]pBJ>8;11lxpRc|38Y붗#2p.8;(= Ͽ޾$Ws]P L\́jZӄ(Ԟ>$ ';Ps习((;@A%/l Gn(H tQI-8BR犂t?LyGU`MpDpU˗E#-?=yŸM21דz] 8dw0ZD~ޱ~gF ~iA3~,{P㑟E~6 _Eg+7yژNB =J[!%~ ĞWZOko@}y/Kλ+;A rxtA3/@xR>]:S=<#bw6JJ]6?bDȌvS?)~$orݹy$?&PCfv okŸ{k37JDn;ϐ[KsoZw)r+,l"q ~ȽoJ#Da/$EXJ?ʳtmC?w30k(vJOL:wRz亾}\-IAMdJJfyxd笗 oUH"87Nq')p/EV6K0ODOxX|9" ҿ뉝Oļz+GQ~=/TeBZ|ص0}w܃8S='2 6%_fQyT]S;'?P<+n}x#MeHcHS^iYEmt^m}WtZvAt䳈>+ s%EDeSUD)$N"鏢Ȁ(ʡ]/JE-_Q '&L\ю܃3e $y$Eϫ0pD9+T?HKSzڧDv$" Ƒډ%"O{D+b0J7c:#Gn8TA_X\@dN7tXyAjL:.B'r@$O"A*l;])Nj+a9y)R;{W?iF7߾egG_!gezޢkz LniPH)t&u)d~&z0ԓЂԠR߅Tn!Ɛ|Ja?jaj ttCUPPAżA BGV?zg? ]EmWB7ΡB |\j"U; T:A?M]sx~$C퐦 R;tI H?_ ʽ,ߢJ%_ SM>CHϛHi&9) <.<'[%@PSɌQyDÆTB*!(#~$S]y&]o#Htԋ0뱠iwryTOr o |H PTʿ䳀?A)xQ?Rx 0~ē||R?O0ğC-(>UJ7%u P"FgO =tjTǔN>AOMNqNK`´:'|wQ=ǁ ]o'qΰ>o jg#?_ψW>7Կ^y4=ӹ&S<,ܘc#d{JZ}h^'{KSyD/^>z t 'xߑ*5{AMzu<+L# U6c.ǿ0H4\ Ccs6dJI\xx'_QڣEm;/G7w%'E;9{ !^3sn`RT_e_d~@_Q+*/_I7LCY﯈zE@OoάY&&Sq% O ]-h(c3E EH%Luminescence/data/BaseDataSet.CosmicDoseRate.rda0000644000176200001440000000154513240265315021256 0ustar liggesusersT}lKQnƖd&'l,y+F Y6ijA|DDcb2Yjel뺱 ALzs+^,q~{t:aT:FŮF%0&NQż}ÚGsbao"}dXaYsOؿ*8XV@Tjg oJPu]:ڤwuȯ'3\ЀSH&\Od'}mf#rB iЁH*"% > M1)z}aHp˟C8vwRoxhߛiK_T2})! M@H\s ۅ[ 5ЖnY#`ϨCU H@p6xޅZ @c e%c3fXWVf&N ?b?Iq_*tl?2Xbx#vD<8h4(4ceݼ`Uz0y@u^Y[tǹ[YiD00mYpLP(lXZJ/96E#GʚV|Bo#z<5̩L~m""wJ|kG,9/G(ҧi+/Ք=h4Ef#3 fcF4xa4#OFdS"C̤GF&9\n=uMVg7 U_vuMnjh9#j*h8O/V]/Vs %*j[٦oއ39!np; y )TY^£8ƺb_\NTBR hYvXE;ٴxy|aIb~z^Luminescence/data/ExampleData.ScaleGammaDose.rda0000644000176200001440000000110613417222471021257 0ustar liggesusersBZh91AY&SY*Qa>įo0C)lclJFILM0b1h4i 10 L&~PPi4 O@hhFhFm @4  4@4CiPgB6  #ڀ( SkPeEE7TSsJ<=r9ڕy ?^ ?Se0 }IPDI m$/[o!Â6c]H iг`J04p @SvI%LU nB"c 0fTDC>&"șF&o,tQlrNK!!Iq w#[Y%x3`;Ew U1!K9YAw HrMtm}+|Ŷ-I%A?AMؽ%QkL#玛|QՔ2DVF&(ߥ§ TJɹ#Q;9S"?OiXKaH"#.p U)Luminescence/data/ExampleData.LxTxOSLData.rda0000644000176200001440000000147613240265317020533 0ustar liggesusersKhAI$5O4}i ZŚ( EHh[UJ=͋I=8ZP¾TFSj"%`HxDžOHGz_?39ǽ4K+ӟ?K+WB/D/e/2zeUЫWErzjҫWG z @㍜D^fz"""ZЋҋҋkJ^6zuA^'.z]uKK?A?$$$$$=MϦgӳ9z=c<(KL.rn)G P]{D ΡN0sJ,>3 Pj}'&nN( N Ϡց)m`6. 0ҜG3fP ^'XUAg-(ny*Z˂8mx`<36<nH:'L&::!#X+OL/uLY6{B~n_Ŭ]01k4j1n6\< Ϊ^In0byV0fΡvKM(Ud̸̙z&o\u};5ޏ~0f?Փsެ1@e'y Luminescence/data/BaseDataSet.ConversionFactors.rda0000644000176200001440000000157113417222471022062 0ustar liggesusersBZh91AY&SY$Dm_m߻eKy,j$&jzOS& h0i24' 24 3CMɠ=MBzCm M (IF$4L@LE 8P9om׀ , g9lkn=0tVQTgVs|fU8h>tI#g9 AHHҀD]B@Luminescence/data/ExampleData.XSYG.rda0000644000176200001440000016120013240265320017240 0ustar liggesusers7zXZi"6!XC])TW"nRʟu,=!zKnbry}aM8&İ֤s0ɣHEr]! +}/e dmL_Ƅvi(\>p@D4wAS e,|@Ĺ!hQpDC[1!BgJh"CRRd0mNh(d٠#8Z&zdu{aCOo.Qgƛybm1өB&_|L{dۛ*-\Ӕ=:uͮ_2zEZ6V#8虨MzK2Q?TTG"D(b'drooFA"n {GjhG>=qH hm ?(tTy5]ڣ 3' ej>:$m nܻ =榠i ya/,8:_S[xts휉%oi,IH#jQ1,C H ,MmL/8 h%2JxV`ǏyjKa}1= g ;:j%'>Qt"3zO{`sO[uI΋{/*f#X7͆ʿѕ8Fy[ c( 7\d0[U?-o1*.VQS-%ޯ+dwc$zBp&Q- jLYU!5Rֵ٠N uXI'Ay~Lf[&, L(xʯTj#[6g]Us@b s]Q=lnvd%42/_`vxTbNH*ExH Dzm׫,$yh2Zoc|w",Egy{Aq9jbQ\O)QNhE-` ]OҖ>ǑHdᬀ.jx/l2o Wv1_sDdCJŦ"2?xg,OAbELZ;5(oB7W Uj_ndՙ59sLSKT\>L ']<%і d>c35Yӯb_H6 BYi½i |ST+!R`O3ȭbXW$RrN%<ņZҽ@)3EA(Fo[$Q-8l_0Ԁa'qfXWItpr!4- T)ܸZR0ReGB"!O~d]n=R݄'v',2 \`L r\\QMzj#))HoM~^n_dA壘#% ?wj R i`ٵ3-%a{*dB\8P |ƨp!e|an a8Zل [GZ lCv!"no(76˗>W! |^(ZQP Mpz* WBGuc 5w~ Tg. +Lm6a# m./xɥYe@C mJģ/O(D2BٹG68EW#"MystwI@ -E md 3|\楧ʱ_ev{zᚮ>pp![ cDcPbgQEj]$!:$oQ'1hSH;۷?&_2%CpB|T֩>ס9-ѐRE[pUB;nW_?t.EZاaC_c "X оHbTy{VNrx,qِ]Djҿ8IˢI5(C29geXuXpmV^yr02<# jy慁;?))$s6B~*ɉcxWp)Le6( 7́EEke;k ԕc/NkǦL"Fѧ:*Bv##DWK8~.ײA 5Oe,̮odIw/h!L gEWOn6b  E(/㩀TYJ8vx #.P9c@Gg>BtY6EU:zl Y:Dι*+ڥ* ƳEm5M=aw[aոhZo8A̜+O/&klM "w5T3c=,s,Xtj:+ E_1mh8O-)K&v\hKX؏AM&(](EKd ?6s_}HU/hL[wl<#t_v4d"3pB0.%2ȍ ROVa_sy^I&0Y+][r8ڮCP sS]㘟DQ1ҤTرO=quUYd#)dR-K ֱ(G_ZOZg@Z4vO}\6##inR3ΟK4vx?+80wHOv"ySkjum]XvU1~5׆=u7J$ d_CH.~D^]v?PA-fkREX<+b|uq?@w{_QThLޏ |]D&q5 tRAHՅPcm!N0)7m)d:*dbŷ=^C$=)0ajb]l0׳d~&,Щ<{ zq@vE'@9!~ 3~Ep1<%p#@\ƀPΑg"B0X$uvz)c22eK&)G'廖Krn/X_OfkfD/6DfPivRN5#UlӨcjۊ* WFq*zQ )NPWfab*[E۷$XGZ7NJ?}+ÿǜ{[,Wt]a7S#ܫfo(ٹMx44eq +xd/(+V]_@?P>*|xgH1Faew>9ߦηʗ*_`FD !{ď.v9#oUq֕>/=~NZvx0_W.?ȋhP}ui 1<#3ɣx+-ƿRAh=3b4>zY"WK"K\F:N#J"~SW[ԹV.a$<ɆF/g1 E|,iMf*[A%}c?`L+c@).m DKɼu7wTF(z -jCN!AE9kYRӝ鈋;;)[u[j^PS.~ l[9'bGu*YRV&^"Wqs@ҳZtEU)|)/}En^1<5ޱo?EѹȘQq|Pq?֛i)l):n.a8vQzk_(>N 峽XrIVT'18Q;_n%<#jGYt @;cq(J9#iR!k' HB7a B.5`Ѐg.9}1[y?= F7I0pFsXT!eai*Wbؓ;}kdI`nop ȲY6{q_8^y/1(9W\;X/6@Ӎ%a˅ڈ7jrz=&V)*9&#_.hst@8b +e3*MCP@V/ҝyAiĢ^!.7is7iRrw:z&aS/qby;/'ms-Z/O3S·Gq;%8%t @x"[3V<_:dQGIeϐޙIWs$jP)jziޯͧHGeks a8aƠg{bMO8e]")؎DiYs#BqV$,(I#QX"$|R_DJYψ|OӒD,wRO4σFS㚧A_[ͣYՉh}Sot4 bUBU+z4HqG1qfjm_X&2U[rPFx*/ :X*)7 2sSõ͒zR$aZJ|=;~Tz5K "%;PXGQO 3FL=0m׻㡽}.r3"dJ ^LS^t=5O^^lPXDyS)ԑÛӂ9*O2@HeqTG2I2IIgԨ(t]-L+'A6 wyX@d%%vH{x P|HxK 5ǂ0و˽_woB3W.~8gY՜+$޿wv^̤ٞ$9'!;pͥQNs<I&M/6:O--'q+c/Ӓm ObԤ؜w1+(w' -ϓYmz܂C8Qb?zQ?m+o܅#=|.+ ܧw.(-pNވ@!P͸rD'إG_8,0 |řY iKSnn Ig@ 1:CeBe3@}^LEcEȟ3Y!_@- JZN̑|J'L7;:JO@-6HAP@_̸y~%4KX˳Vj݁3~[ޘ~fhiĝR5 Mt_717Yj)-%ZZAHOKr4uZ [֕>&S?.o^L>y&?*9bEx~øhӉ., ~'`xă\wFq OjHAq.Lj9saK{A"L=x';jM ;莳D9n[d6rRtǨ`9TE?~>֬tL}x;ˎۑ^,0[IԧeBZ xH'&ʛx矂yevmnc~ Bu%*hR_ X< Ϳ+dp ܤ(\Xk`g2rk+A$k b^SJ.ƐlcDs']FZo-^yr!?V_`7|='Hz^1UoQ`/5, ,&KU2+yx vIW!Y@pR `7^kc\vw;*< )1(QcOt>HB 02Pr54`o|/*QxϡnQ9 rX.g b}fhP,͞(]8I.4s磦2"_3L !v؊tvnNB1(:*tD6KxpWlVTY4]$vQŌ arMOO Ԇ:7En7v^cT_SstaW)$TfB]t 6uT) pE.OSdlCRl,X̞&+rtSոj_` =E*s lѯxOB-si8$m՛FC@B>A9"|+ T$o`ĸQ* q~<M$P|ZvZo A旚rE:WGO%Rڤe?>n<~LͨD/e- ^Ӄ?ny|yO&`|Z.!vU;T})`<;,iUDڅwn< s80676&f7&/6Mh ,G rSX2CkAB|!&@R,G9e k$uCx@ZeP.[Bx)8穣pW ekd*a`H&(ndfUǥ|8"LXxcQJt@&̈́xMD 2ސg>;P%J@`H7F WuyT ǀ1PE%m%Y]ƸtY >1g4oK,މOG$5F.A[h[H hUq GX QbK{5!M[7kܼm(,Cįx9,7FZ麠ft\ADRt+'kǤcefDzAq"h,j8^s¾F#B3qh ڤz)4ڀ:pPG\UEZհF72nomK|Lqp\i`Xõ^0ښU/C.fIf3"i-\H7}I8u#hF|op=KnT9M-3-~8.j<1]AXyЩTB:i_ݻ*B'Z3pl iw'd)U?ʫIpCX(iz[KhȀw@Zj)r$&:DrxwMHKUY=>=ezĆ=\ȯG #fOht!H 7?EPрރi,XP\3IݓtQlZlfn@َ%%קm֋{})I tAzoni~)=>"YhJюகҤ mwGpf%~!Ýs5Jj8{=X-M)~@Ĥ|g<Ӳ _k≚ pQF?F똙0([bJVW/ |] ~//ldHxTp\)}kKC=:Hqȟ{+Clgşh97g@P xdW.c[]?Pʨ/?&a2OWF83E\Py`;BME?j^;D6u˱Ò?n-m_oq_yx EӸ>E%hq?2s;VS;ȫ.t=a[)D@ƥZA3Vse հ\)^,4t ejY( otq$vm_)hZU F~IIT&lQ+cJϪW16^cg}Xe ZfLAT`EaQ[+mJl Ng QV8Xw =7K$&?6,gqUX I)"_![k@-Sx'e;+.ɇ-*z@ c+a>J .usbխWJ±r5G9|#I}auQ#)5c}U^}Gµ?蚓Uk'naعiXӠ/ V'_H>-/);J';T1ZϽͩ #su]Xޜ!K}U I_pLJ# \*UFGG073ڊB;"^O|k趨%.%uDu:Fko*P%blvdaѺ@ʿy ~~o. V103ەY@a<ʼnGG*>э`l* %9U^RF*1Z6TyOީĵAdLnڣ#͋^W%U6}&ŏ=%^BF:Ox;/GVE$sb.p{8)JI;וk3FHXM5pB# ndFG%h5˪3OM?1Xz k5eM}ɐ }Ȥ7:ѐ{MKs/BWZf:cX1\DF &}ǗL#th,[A`nݍl.5BtY3 䜱,EGr#`O:'U{XI4 *6?K|,2O^dM+-@p~D&1t͛]薳jWa ?y=?G\UT=̬ $|V*U8QehMF[{ T?n7XzJFg{@ "GpL0ICe䨕ӻ_pC^f-[%7,]} ҄p*69wP+JJ3Pȣ|T3bX!XALJahH꽻.S}He(pZW'CohVD:SPO[2vP~8n"xg~y'μrQ1N4_K@S)'l[Pr`hKс®RlE;rb|-BT'?d#LOied\ Jl`V0:B>F$yʌ Me8TKEq45B9c[óJqnR_E>B`R.?= zx ;8m\M÷ B7@(>ʲ39 ^&$Tbm $  ;hͮo)]uP"VheWvϦF}ۉCƱ!R7O<]:GD JLvSvO.>N7vyҭ&`# AM͋(#|9a;̏8C|),8 YaZ/ +2{Ge4o21ٺz VD a<@ʮCjxa_4,y \]io^[%Oq5v'T~ޢ̅Hy>F<"̞@ *(6 swt`-:#mzܫMtsz,ɴGc/²Ck``-\1VbaO_rTx$sJJYZ劦M%CpkP4R:{~%Y=Lnmo=`++1ptYSO摴vF8xvh(C^8+!;n̋y)BdkNhcZx0wG8swoI#czy;V?#/̝c<+>ZHc7b#i*RCQ.j7^^6)Q|J([*a9B}ocUK[V]s.-B*vt[+Ծ31EAF7*I8S$7Diu%%1~16,G`%$\ij7 4ç#}쨐qCA0jm|̈(a,F@p"33YRߤhuPW|h&*ĕ BU]ys'#?-P¶QImP-)99sUk\%"1C--΍XTUN)sg6F}|-P8ےhD%J ^D{8hfN`0!AIf!MdJAݝ>ER_ǀ6c̞Y ma%uVW3)z^"+sȺMPW9fv'.}}S0sD!J-}UQDʚPmLM9L ЙBgw1QHcAauv$1ὁ=J"%Dte! E*7n*%FxwF]ZA};gݣZ 5[#<ǚ*̌v)-;媜Ag+}{ˇ(q@Ʈd qNN>*2!6ƹD7D%2W RA=΃ˏYI83w#f{F9Kq Oq2_{-JM =Xk}cV^R9a]W7FnHc߫! AunugA|\ubE<WӉ_t 1rVj<_3oPaFm gBŏB7JBÒx P["1`W*Q2= *w\a3uTwX.h ]` T%8,^)ƀ`+c0mR.݈)p}nxEn9~ԺzdqQٱ巼1+3P 24b#\> Dp? A)mm tt 𨀁!s:0(0%T6VYPQ !!{)"c{ f2,|H8iCI*N%iә(wos%[^=JLJCq,P1 lھ[ ٳ<ܨUP^RM<.|,-M08B܉cHC v^i >ŠMw Z)-?"8B]}q?rMG,9QgǤD~ACMDxPVCj'G ـH,a?Y/5¨)!ςT[[Ǘ_^=wBZm[ T>兮Mσ8i#8p tǥJȩ=X+Qg( J-ޏJRbFڭjgWA1YwӟU]Ch$aDY4ٙ|E)Oo0Nmj4LrRAyұ9qIuJa(!$Q6k/PrGĬl:K;Uq"59L2pA5>źx}HtUCUd=y*!R;iŋfqc;WLsnΛȸPԑʈn Ta`N`#E/TB#GYsqǍ";䞏rKe_iSc~+JJuG +Pl]>RJ2@T_AǷsZEdΔs+XA<17L8y=<'6ue}#Pew\g5iMTvoi6r3oNK5az!/ s4-re(9*]} qL<0hXSQe1a%DWrU^p 2ni[,[;aB;rt  u1˷Nd8ݶvF7EXl-46M}mht[~Zmr^u{p=恀^'XnZq C4R}Bqu:_#^([|祧Qt *K]^1KS_ crh+'/xڸ%@; 6'35XXqz@jLMc"eq=Ok<7 *T~H^77zL96"]w鍳T'访 N1D-K5X>s''O(,7~s7 I-U5` \]wALfE$ ` 0d|n#ꕒ'wB(?fƘm5J{]pM]bͦD|Dt/]J].6INY)r7(2<2RlDJwT'Xiٺl{B`TcXz^.tЍ9g)LL+4bf,䣮u¨588r0M!/[ c}].n5M9CMפ3OoҪ5؊:D {+Ɂ?.J)\Hz)\e 1;YWNJz0/?LMu^5-ߣ{V`O*HP#0_v+I_t/N:ice%OU^M~U(n”UJ,p]qB"KMĹCG}k!%=aA0q+b(z4'H~G`={իu~L`pOKu!>2#[Th9*}9vOᱨ_}u&0WrPP,F,;!UyJ#J)Iμ3 Et2#?z13u 5N\vbr229ƙ-}1dSM8Nn i[n7ʚP2uM*myUMp25.5mV*UL=0iUh%3Ϲ͋^T4 >h-7DmU\ARhxJDLrڑ0HQW -d6l}gխ5V n/WS$/^Ypܢ禃T"auYYWi1ٿ5v -bZ¶Y 5^f 8X}tev8R 6|eVA+x4pG)RJWVb8ZfnsoLsb@%#^ kX Jkk%$Jo1 cX9:wwILOϣO˧wQ.:_ڕ#*ro- O )uiL۪c [6[*ߔ!-ʎ [)&}(N cCŊ쾐C_3MsS402:,,iLW h3do56R1Z@}.?ؾEXyCtzIOrz8:r:c sԴ6{㓇&Lpx_*f* غjEM}րXaHpt)\tnN70d%ւ!]>RŚiӘ~fn~S$Gkf- !՟g@.cƛ0"iP4cA:p@&ԠEH}vxn<\_X_#ZB6τ 4%8&shpbu5B90B2iDWmiQJ*-!~_h".X5Igԏ!TfBOPA%XZ_NE k$ԧO?*ss_@yZT9}p<)j<+sWO#%-uЗzfZn,egͻX'ԍZ4}k‚^;e8?tH%M9Rgu-"|,ʔh%ߵ3:` UqesӪ]i*ߎigK7i`i3' /״Nezr#@%mjõN;4B`9#*4 syMṯNO`UE P ͔M^BOm|0[%t}X@Ty%*r)'m9J0~i j5ho!`y'$!@X>` NUQ9,\V3hPˊì+rp6ɃU#1zo+iW Ecam`ԓg&߰0Ck 揁 |ǿc$bha66ӡ!G-WXM_{-;F2.0r0bCYk@a!XȼVx8e $dztۗ9{d0s0ˑYm{'I0ޞ Ir(!׷Svb0 xꊫK]\4>|7ѤQj"syNxtW9,bM9..n1B"p! 8Y Ll5 s6wLT=j^ˣ7H)d&)N|1s^̇:1H*+M(&|ܡ#ބ^ķ'ky۷A?8 ?JjֿNvvw)-J[hfBKu=3NԊxMOAhf9wr̪kV;8޸pM5~G TΨ76Jݖ _ȅqPh*5z=Jfx{4ZFB)ZsR?0 %V U+Ui6}{*G%H*`v4,ED%_sl 0c8U6%^jSYʖ ,B!=ZhMO9*/)e&?|̇{:g8@AbG l*""G`#>ɤzR{(}`YNZ/hOqorG(_y?#qr{0c2:?"pߐWP@v+W_dE28D&9{AeYm vv) kKg(7FIZ#~#rUr9}Q5%aj! ؆ ppEj 9 s/[.d%-bh~gGb0$Mx掣m>>i~h2+hR1@!Of8!7P, K-GRhL;5%<fI0|,]mWf1탇lxz*pEUZL schȧM0:iJls:I@_D QZ1%&k5zK^z tX3Yd+UzV,Ծ)v"[c=?>9ByۼFb|'&=QF`. /YY^^. (v[F:4i2G 6yA!-cjyZg{+F!ĤϘw D2S*=:e"#v}^2S-H!A lAqzRĘlxU`]a7υ{ Sg)+%%r eP_ 5${ZxjUM5*fIgQ,;-[ :D4 ,9} m\g>_{ސP x "U$+N1ϡ} X'%=B)tP^>@@IQ$=7O:zqm(ǛM 3RpAvu]%GTCi * @ifO]g˪\9[F֒h[FxH] Wk))ƥGɻL< w WҴޞ\!8+hBqYMme]dp碷<2ZBݷ  5 vS 3mN)5*[z# ^Qj>~٧R ē[ %~vSL{_H̯T 4F~4xc{/21q% EQ`%Tc3=So37wƽؑG᫻RUT:DJAK-ESWLcY1z^ ;/Z>%ږK+Qr5 4Gռs+[.DF")ۊnd3†"bcևkD23ɰ8iW bU hCkQvAA0/]2;䱖`ô/B *r}/<gQ.ብ|@qs $dHd&1'l j}8TCNT#Y)fX;g5YkB$1@)*WGC"WL~6oMJHѴ0|?w5jErE~ V:9E ьPH6kQǪ HcsЎ 6_v;D}tsA!EH9:r8(+i5a?* phie ػiR}ma/%e5 WiCQN Y? Vi|ȟ$dCaLI-;R0|s ^0vRǦ:җeQ(}Y1^$` FI#s]0g!#Qӵ3yE7WvN4ϡyAG`Hj-[?^.H7ن n %:Ӥ;0\`mlȻnv;E#q>LX#N1%l*.S:MM($*lȼY d9ę=r򄨟Yufȉ>{|(pBEpb 0zlQYDcoy/R$^=&pn!Jޕøyk!OQC^r{e#.X]%ūL^3V=6a״|XK(7LЇdڢʠPqA wfV>̂ч\yaA^iˠվ՘xC*ztk/,bW_-y+X&$-4Ŵ)u _gJrvlKu9Zj ѿ(a:xѳ2$yGe2;pAvuyb;`ʧG,:0jyO?`"ٗg.`\%H`)i\ )16F(bH#ml7E Ԗev-PDz0Uw|A/)VuZFcB4 's%4<{HT BhH e6]ŇuD.RAV\ދ4?D96&7nY* 0IٻM)ŲaLPkwэ/.UiA9Bجє%-K Ƿ qNõœV0eԋڱLU!;(w!6~zˢb̶};6Tޮ٠rӀVUz70 ,,0?~!j"ĊLpd+RCYfp({=A4< +Kd)IMzifUg(LckiC+CЛZ]hFx!HG*呌ٳ*y9ʑG2sqWNxJ A3q'Rd$1DECJUOQ٥޿CS"cI>P7vM@k~B<ɧiRo4JNJdBH[B8AB@.к AM]3QlSdVT;6:bM0|G"=sRi;x25sLr[-m72KhōS_E_%y|J+`%s1eLq>u@/Ɓ:GL;s,8TվqQZC [PJ`i&z7W8+=_ "T9XbTtgXd:3ۋ]%kFTsߤOԏvѩvXijBw gn0Di¡b4nFc38]HγV*%2U~'򲀶vF㖂ۓgF(wxҴ & VҚJ(aX<xn"wXټ=pPh?Hxшg圑J-)#Ia#ulh'ssx j ;S996 گc7ɃNЭa -D|m/-{qv)\dKOLV߽N? $zYˇK"I9= cCuV"b1^]6q rf)@NK0Bhgc7yx7iJiH]O+I %Dxbƶ`Gm0gӜyd|sJ7dNJʆ볔p6QZvU"!xMю+롻֙A| Ͻug a?TܴmҾTYRm߰n{m㑜SpFñXJnEfݖIAq(kʆl|iSUetaG{'Me@DW6kS.st)'m~}f4K-缗@I_)ҲV,EUar]^GQ8/{Q? h8&''rp5hQ)"n'ߤS Ś yQ*#Tbo ő-7}QdEZr"WJDh'5{ٛ+#DZ2{:PA9vF2.?ϵ[#!Cی٩rd`doypfI.-Z NXyGשp@zF47[Р^>O@H[rx휉ܓCՑKDƣMt=[ p YIP?HdcmM} SC#G0qGikd~C24 јC7ND rz<ވdQy'p#X:i=XjVG.Y!(TX`$R9=4 LnQ!.{k-&`{K"fqP,)t|=Y--|V@< +%j~z6C {80jrcv2y g rz8Pf w[ 'Ky'kg;&L0WuV&ژtxg:$h>h%㜡s_NX'01Lי.n3Xξ!x .CV#!, ![vY 0g'łٰ4ޢzg #b \\n>Rr "#KGo}cP&p.,!+=C&%a.6ʮe5۷@X؝$ȴosL1ޥ+,XB\95 %8fYL2|w 2:FρṼpqM b}ԩ[pb=g~cY#&Nڽ. :>\GeIi >:N]SPԆySd~>A @cS45>ƪ P-vHawFZը\*3eUuXt1'$B5GLm6N(ȫRv NC2sōV=jrDJJ Tխ-QH̯*Gl/db+fOo u/zӷ>7#w7= +NSp%5pJK:u 0B)>UڶX.*W{UG?A 6+ǽz]sYxͦM 1"Wi15Mfp+>~.=MɫA;}P},hc8obt{MޚD uCȏn,YH 3#8\O 2asa@Nm29.Pzo<MqMN1nnY3O5J&HwdGё&QA6L&h 3TW4<HItBʆy7XYe5p!2>x |[`J32-bI{P>˷Y"?-.҃5S3к3SQ3.pnCpZ%( U%YGlX'I]~d9<][?0F p; p7D^,"!'چ&yDfȟ"׺?G/w8 6ҖPQj+vǖ9{2Ȉ}\9Sѝ±ġW]K ̃-mܺ6E᥋Rг4l h VJ6 UE( #?L#) G`sbF.↢Kط*{)\4Z7l Q9j-"q4*N(ky%lv+@ˮȲaT) ``ܰNڋ+>AQVg B[0)w0 ''6(՝3?6Y˺\oB{WMF9XV5BV2 a)hk N '؞p3 V Xٙ` xTIN~l+ܨ${/U S詫KvCYˢVƯBҠZzM;goph^_n8Eٷ/`&2#5:5f{(WZ Jt"S:Y]IY5$wS\B=]fEΉ ._)R<6%A4V81BGiJxKQl9W$;M'F *}=aRi7d-.EOf6Ӊ}fo=7}U-r=I/(QpS'Nz~FH+p#擈܊'^DTsbXVSg|6j^<*Mʯ&r^MI08rkԣl:Y,nە;E9Gٚ&m$ E/s)ZBf66Utx[źeTqFY`G 4HF сS ׂߊU\URw){oFm ,wfMechl?愯;_Iy|J[`4ْXL"iL^w^ :u9dА5N@Yux"dHs lD sT]ɾ~]W}HN ఏ4 `uȫ5)3C.SBH^V(H=0sPAKyF{ [.%l kM8>@n,"6u[? L+<,x`T&v!zkCW^Јݒ@WosRenY~P_e):KBeX#Ö>"ѻgWY}Kf}Y |X8M'(̸7owpօtBRW 5flqKϒM?^D(Nj(xoe8/`vSU9BT:yf&-e.|-6k.oݕcxhTzhpTqsz(,;{E+D f5M'ҳ:e73)i0],q bO \ST 2Q&WYw|c>qyϟ4 i-:F͛C.'mh5r\s̓cX;RJ;ka#ĠYfn}9fOЄաV}|xi3%p(F"LIN0FQ*[8Y  %"O& }}lfAHt? sϞq,HRXկ5"jRPz)a,Ll&AcdLi|ms_ Ɗ3p/[ϫ 6{2?MWwbOKb0j'Vqω3x5$l9/ưF'FZ CԠDv'ngq%%M+evGc2lH{8mTLjd;x8k)]b7bv*{K=E/)kCÃ"$'͛u~S q2]3=u;X=AjX=7+DDtUCifJzE:&{B0rG31a-My _5G *2]8wժtٺߵD&)ڛwB"S9/9# <}(jVE sflTSTĦJqi%sV_q*z| JM86a@ח^}?&'U6cnنufuA <9rfήdR |(!4sd#B"ꈴ{" Pч`7=OΉz5,1ͯQ݄8K_ = v/@Š5ۘ^D?2'je2yw&ڪ~o=`N9klȧE2"rŃO `;⏠kffb `GőY@Pa :];i,DxZ$(Ͼ-wPDd;dLmնeߴ驚X^mQtN2 <ƸHecO,Zn'>6hS,OىIy(:qCawIvd}z5q/24d,Yق@"Flj"L%hz_mJU(k,KA"J$cg5.Y yL j]cZ;x  \u' "Wj1Qdz|^mMrBxSݘPl0yKJo?4SyjO7ۂic߀RF|ZxOX㢲-tIٓ<[b1nr%'dU={f4oj#UAr,c] =[% }z7CdkQ 2'ߍǺ$4wF+B!dBt6*y_:P3:_!^.htn6BZZtkÊǾ葁 ngX.0hF8 +;'>vMN98^kڙo4cp}K^iR'RfضcIp; ƶ 87ɷ194A/Z҈)]Ҡ&VM~/Y]!BK*Rsd}EJ~%&  yԒ,KueFzDz< D_\'mEsA}@m0\˹ Ģ}m2ڸ,91£gw(nWTC;[_["jH^ Quhr0`]( 畇d@:̥,!A@N<&J^v~RîrRKa-h iPauOP'QɱYAE:=c ar,TBUʒWcRv~łU@[BU4,պ8$<.E:MʺNhz0Tnҭ7Gȼ!Y5Mb gl(2 oX{S =dayTCFxl ;B>ھ[4ldtۚDQlXU׋IdE0Bx%T!}/;}H3z=?;Ym'F26>+Ϻ NT5{$m:k0^4hMn+wDDO9:5W[c܂*Nb;$뿼sL0bKx *ӝ|j{WHQ~ŝdO Q^ ޏ[RNf"]jBlDV0K$ns"歇//7Ki 8Jijٜ+;aM5L%4#A_N4m IyƸ?v!|t2b'K}o% -'xmЅ$Qoi0౫y|\U[Ir7K!T[!5}zw36KibpwoJ6E6\`Exxq# q, dоƂ#7쒜,}K~`00hKڠa~^nF Jh'[."Z N#g5J;.Ujj$2 4ckdy`@;IéAE\oT%gJg H٧8'8[Rĩe8|%oCU-7R lSz$.?l'(?/dфpe},*? 3M3\=~q؆.OX}KxMoW&CbH7S|\ۀUXa2X˦$YS^(`+ ^=ߣg`bb7YK0X_#?k{]ƈ$esama/=sM)aԁדƧ?|2y݌5Do| #gNKon6yС=UHMQ yڞEc'u( \evF}tOH(c: L'ɩ'6/g$).++Vz(|еj/[E-Ao ?+f `bJ=qAiYWǷ$;;sEc)hد#L IvB ? `-^K)ɆۨʄWoJ \!7F'N> .i.4K!AXˠlu[`Q .Bɞ&t)d΋1?~+-p]֌2$0 OB.džGֿ=&NQ'vtvjJkJK-&ĔDX]ێ_k֓Xt|QH\ofWV8PCP~CPaabKi=燒ӷ;HXMє1hAH]D'cZ-* ؾ62jJw45b㠢ӳ< &Bxv} gߥ$ (8jn]v7SFB,,VNCGUO M ӳX]9B0%GOc|-Hac3m82[I{_}/#6'Fϻ-PbfFިUCVX͌nC xHfϞF8.' TBH(Lg7}Ck_9i Y,.F~V1Lp^IJߢy%{əTW4h@Ƀ#ۓ! (ɹ6ti!1Ϻ2< ;e=]TK^!N<0EJ yhƹ \m) }{NCkEl>ˌ@{0[W_d䓎A/uT?21P2=&U%R@}tvu -.c]GJ0;n+8*㏂!*X@+&_dDIH8kSTИH S|Z]3sHẆD(#ҝQlgž]"'庅 ^׉O뽉K=7_s!oCOuGtrmI+@EM㨿r'Ŋ| &=dw)Q`dkQ2@ !SZACI=w`Œ# K]gL*CKF?%xgq[ ً~RKa ut.碑-WqrNo(ti3kiecwz b15o2ܬOs#s`FŶ^kz6{0%&'% ;u-kSE4t[ZAa`aٜ8ʱ`=Dz'Ҝ*m<~N?jRYXSdE\@򮇵 FItX/x s![8̑bSHRY/ @.4I #@?EDd5b=~AOۣ=uD|{F1;_2E3 0F;^nnV5rAgO] B E."8jüX/O_& *rՅsCR)c+4;=e#fv}2!9 Uu "fQ޼oW3C';~Gc_"zsT1^T!8 [ax4d v:8x a@.N>'8'ZtN#/;oY#'E7@Il?eB%1 Arz"u^'vfd] Mazj'Zu]^KQ MxqPJ f[NhGf^]Goz.'hMM`d_*Ce5n*Dk@D[W׏uAQ/(( x-ZO|6?ӞZDhj\6*0/r&>{4q(I<MuS=;_dmr+lCS@@T kRR΁^}{X o͋5L Gy39m4МTwc@E?묡I*`{!u;q.C׉묡DARi5e"icrP=Y}wտ ԰P -1kc(B%sH;%TRdӳYjVgs"5<^*;b-Dy|xn,r96^p 3{4;4{`^0h~ת].9o |6ybLu +熃u9֔ 1ܭ.^(Z`dW.4Wa0Y˙d"7}3<Ϟ{lN;l{T;O_QB9,ׯ#M0 M; #ٴ#Hy`Sqp7]$t䚝BPWIMEڄͲs㥀~yR#ѫG@WWx1LtNLxd-cнZyx'QX0ɲ0>{)4*f#0HO> Fx.O:^T2"|D{uzD$ksr&Y׽jS[ܒ&3Iz PF4>a s c>`ѧj/HZȻaڠpa3N S;w V) !EnB겙V/P["|>sh@#'Tz b>3"/Cb;ᔷSP bJOȔ+;d|1 _6+]$<(m ]dzϽwփS . |BnB M]^K ߿T_#h7 봄0P+`Gj,a̪\b|4ǜ#gYnvK@W3d`Q0ei0 n= 쐚W8.6aEmRŤ{}OD#:x}'ByzYq^ḃ,i_bFYw_:)gO3^S23"7zJđhvr (b{P 72fx D194GY<ϧGN5,ɋ"UL+0:NR߈nh𸲸iā_<*_Yp1͛@G@Bx"]B$/}{(lnWF bښjay'mϯ ʺp&z P4Sm,Q2Q=sHl5";~Au:Bѳ< #;"#6tv-)1m~lF̝Ʋ+k?Bև mz.Ev#JF}*@ٖ E˝|"ߡe>?'SC}|n*ɠKrW}%ݼfR*<@!B@- M8ouպ1ѤJY<!ObAqbM>ϴ U<<e GĿ={'t /i,5>kN<3#F5gKR^13:'H:lenMYs:M#f q=s?7 -*{2_vfWV_*c)_Lha5Ph@Z`Yޡ)AY%,_GG4ܹx#NI |bxf Q&}wMMIe VB1|Ee 2h8noGޗ^K+TqZK gI{1?RZ99$#d&mg&kֻm IQHOٰEV7͊$Q۞ =xfOTr#wum0.aXB| 8@ov4 maNEWe.z\K)6?go=9$qW)yQJD ۳*vA*wB1yi{Pq0qi"oGoXuG..mw@/iUQbGgsUȼ9Ҫ<8M;bE$NgL-ZB^}Ea~[mQIr !CM",UgL@\spUjao+BL/fG`ːzGWJ&œW~x/>UoZ.X싉T%ͤeud,VTl~b)eRZ5}X.K]ߞui4&"elY1X x |xW׉#r!G ɕiB#pe:mOmk!21֏)U.^ZUHT6tȾtش? lo|Ca;2;@ =זk{[

V^ 36`TcCF#56Y< шsPfB@#Ò:huԼ%$˙w%GsDžզ@HNŽ]TY"Cr|.7ƶr<++μ1.;Mq؅zs 5͘5|zS99# `vvvyd9nMrmuݸ! ́#e>r^h\w5q:" F0Ab!)Hq@ҦXgy-%2]WVŸg" J<3URq,W*x5>1<>3]ؐ~Z箲q*ۂ~_0:Leu6wN9D='%9|QmQJ'y^z f OW79fE+DmhOd38D?޴CzFf@ۯ]P T]*H#VS&X7*$4DwޯA#%7kT`z.tAN^G]/Ԑ|rV$g6+KbR#YtkgsIŊ8LCW՞ fGd0 5 Iq-\#/&zPݻ:![pV脝\N )@%m ⺑'WL ٳOO1Nlo1zw#: @642&#\ )d,^LXjn!14bB*K#q/d8:hTvTua#RawK CQ1m@3 nbHK'Qm9T,vW`Y`\p[. ЌjTϱp̿4a=j+b>V^ WP|\r#^Yudv#V:{"~ql-SIS&@@x+̵o͜g u(8NE>6h:((š.T6b#%8s\yo KݦYe WXf+{QF/:o$wCɩC6_Pm+,[ikcFI}_\_KRUc `4nOCfM hni>*J; JI$s?ź, ؾ|/nH/}?r^5ܘ+yh[%ŹdMŲ_[/QM3' d *VnfK.-, Z[Aj:AjivlH ƉSbZ:KW#SMf^<|uة%TY<癟84R"iq@}"b2LIa-P\`R+u$bDm_x`+H ^(0b.-~?$YxG~U|(=G&_@)'.ڹQ m˾ QNo7$K9ʤfFIԗw3 ΈƅS#%.q;甛r-z5b@8FzWYYJzCj􄃭}ˋ7!Ky< M9r_':6xt$&>B%F!<ԾaL5b}[u=cI׍p<>bQ{K˃n( #rpMn[ÒFeYv\!/<?h_ꘫ& F}McNcyзWgdy'V-n2yfinPsE]Ddސmp.dt؁ʨmfs+@s!,S~MH(v\:|W6b uf{w[/3lzPuIV% c ?/N9vW YCչvI熌_?Fm,"YdS&`bp;4!p$e)V0OZs:[>5;Wm>nKIDpdADU3mٻ8b3 *(&x̭e};m ɧ]v68nLMPuZRQ.ueXU 7Mz=,޴d5!e;U;=#+<>Sl띧Q}M~t~k REW٠7d 9dHꬊzQz'|xɽ0սm gu]lk 7E˸JF&! 9u+p-:?!g&9B9w-#GQ+#eǕJ;P ^|okH+2V₻dCAA#  ɒRNNe^fG3suN%r͡/MQ['X`ҞICYGH?tdp}Mx ΉNK|<ƏV]ǀɽo!3 V,œۈR;mcwrMϧM,H| t_~][J6GA2} yo4Wf.ۡҤH È*;CJOPdC=Av(v10XǞ5c#EMؖrr[B덫iZI-GG(wKyȎm`Wz 0a %8B#;ӻE0?E&.p2)OYo:|hE;b*w/nlk -/V[ QtݙKg7wbKq.x#~T.NE;JC>Fjt{۽ug<=(PBWPDN^_XgHCoofI:2n' o 1 z/8E;`*hYfv ޵Φ_IDu WtD,y@lה 9n{BZEM,unw;e3|!JKXjRo*H^=\QcSo);AJnG=3 "͜4Z&RuY eys|h!WWȎ0-nyX#;y B-()kѽ< :ƌ+1>^`SQ q-Ϟv #+1'ӪA0U(k[riz|eWkYnIE ܝYDUQm Sk\p- TV"NlI_b:@㽾֙V`Wm ީ=q{6O Zj,wv\({;=&!y3k6bV7SH9[D)@NhlrFΧ* fF/z%kPxKȷ>tjzRZoCOZ$ _h=o1O9R]a>8 9F^/=/<YVIIPDuB,)lon$@#3u0"2w^ztWG܈X`K4 $x8BZMJʟD/Բ-oaPװjec~(C;oEw r-qwd{1\b#2ǢQVшk2YhV$lq^1Nr!*O:6ܦ~_۶ J0zMHk!_shFpe Ƃ g f:- ^yӿ5.3X# L wzqfhHGA@[yy8 ˵/MXQ=6 ŭmh"nRSϚDfic6S]+zȮa8Ԣu!#}9 8+-w3ElG]!:,r)k:(oq'ۦ8c8Zd31n# ώg8D,jvέe΁d UK KMe(<A7eS^:,ENé $sIN1ʪsN0ѫTn>!S*ry;Y:Z%uW, 5ýբg0c=b=RxщDᳺj"*UNƀ^!C)ah~ DRH?ҳ{Hfu{:.KMGI)lXĴbqgwMtr.)i3KHeVt|Qs\8-&L!(29e,B >*Ud҇#Ej7Eh QF OiCY.+v 6 Fj6LLȼKs >dWzO]p)5XőXrPLO=X=ϼQøZ(6U,4mF/C~}5^9PC7\R7U[I$n:(lq7(%i,䁒~<".-q7b~a54R9o O;̥q:+I,t#{ɂ?$ }z<|5KܙiGsP%&CGA1;2WtC&d0(bCU[Ә_TpYؔfGyܮ -F`ph5Nth\9*{l=' )3֕:82z ȥߧ$*;Linrh?<~JB 9:`@ntB )ϰrbmnjA-ٹ+8e]S>mj` L"X˩Q*ebV+Al0xxb*nuțn#jM+랖c"s+RO<.LZu5hfN_uZP]MG׸2s=;m z ͱ'E~oO"q΃-nLT#:7,4YYg" -޻7}ވ$r>aӨaBHzP[ 4r>syj\}sQdR8==`v%!4ѿPƺK~mI(+8g$ʠCd6L)j&淯Q"+~>Te6[d4 P!6Dc0\pVi7^.$S6d5{ NJß\i/-F=:bԛ7@ʷ*5~0}`o|*CuӞMsȽzCl0 26Q-Q8.m!FwLT@|4oTP=?v<#N_'R}H~ģVl)~!q}vA~@kr+<)5&H5C9WU'NaJr|(w˸aN; ZS=߷66TT~UEӊ6 ޥSy6dM> 5! Dw&[u@F;ہ=ETeσ*Pn那^w[ xČ^L 4L7[Q\k@&ށ- NR=+,fm7Lۉd|;ouU>]͍bv bpo"T @,,}mSz&A \X~Ԭb+?P$a҂BOOU>Rjt;(Xxw[fjN ɟxh4q/g|ĦQt\pbͲ*TH'K ~H |kJsУaЃ7cjlɬK'rHUi}p8HGcϛLi.^A㺃o}~ ~IZKĈ]12#@Al7Һ#{C纁f#LC.޺t&tzz2yKǾgJbV;u%xrҀ&nZX<$vg]b +,JYV3c5B ,D5CJRW9ge 9B "&:*E9ehul~X%Vǖ:, PGT0s%@x Z _/Is>*7^JcNg (F=` &"zށfZtnvn9O?$(ey 27_gah&HXoaEpA%3#+soTp9k_ToX!]m+>\CpRtqMe Olڂ% ,۔mv4;Қm yg)L)O^)DD5)do bE| |^1vwgNJAai z{$>$ o9 E$DCӗCvvyZgJ㗪)Ϭq@}ab5Ur&ǫ2%If 캥`3Ე]R;֥d!&t>YNH(Gk) D=0|yo H >% A#0f Ouу bZ7/0ڐPDmT۵@wLʡ2)M,P@LXEY\g6=E{jN) ֛B@yb4}jRά:JZYaf˷4o53j` H5AVj08$w]8~h*YW`=8V; XiMr[mNC?NqFƸ\o >2, &CAx>8wrc1]q{`2($R^9ͩm10b9X{ҹx]}Nhu0qe_QϸeQ R_Eʥ5T}XjHږ7&Qf@WbU'}<a~1L #'.#[\z侁(Y +[[G].olT5ZL(v{k7%=0eN3QIbBV%$r묶 -s7MXj=$! "EwPBcڞOsIh$F&]U.GЍWJ5s+U$ b3KUSC%(8Ecb (eZKaހ #>K/x7M5c8Ekh"#M@j|}DJzT\Z|0xMˡaUMl4 <)%Օo"d}) $6x [#./Zl0UGfY?>Fg[rG nwˌH݊ej:|O.OGnw3[|쟐&%ԖM 鳽˻d6pGRN3MB &o? $rq)+f),dS N# KT K&c5^ [V@jHdCX;@#[ne64}#yb0-̳L7%H_uq]'TbA;{4bPx֎ *(p4 ü. ъj0':)ot'JЅIn"Nsv8EsORsPNeڔIoS5*~؊VN4lm0c qo™^íy層qлGY֕Z&u|MfkcɪfG;WN3VˏM<^OMy4X,͌L>m7y(ncg76 $8?OP~3Fr4 TYYe a}!puXM6`5J~ߨĞWT}PNKG9A*YwPf_P \;hpVV HֿŌ`w>93XcJ+01f(cl| fCQu4-E u@WZqG;#@NAbHbsA`Uf0Բ6;JF4Fx,k/IkRW /"FR0@EB%;ۓZdD8Hv,{P8Dvp$g+o3@$hw B=+ -oZ!F:E૝ 1LS1Uz@Z;z.Pq*adQ$.1J,%x|2_呉M)4ňѵ*".W3OɚX@7eݢ%`#OMpoUx{cQc2@5(`  ά4ycR0`>ٍfߓ\FG(N41sFۦ\wאY*bXǑ U=hܪescyc{Blxvin͏@o?z~f0$":#}'-ݎ )"#/@9t"6m,K4^:s|b OZ)sdWjW N)wtp *x Qf'#[#{hU2h薐{ EvQ;M mN~S@5TPY* oFr'ñV$[Su*e9tP~PU# {H٢-Ea2dVK y?j^}b56tnz]iG [Vt;*>=ymA\; [`ne -X ʏPԂm^3J_qXzI`(2} vX\k{AA49%''mrp32!o"R#TTȲYau5F[HK;Yn_v޽٠JC╇9٣B8MW m 6WCFZ#p6eZ1OcspY% s{ӡqb^ zac~|I"fNd .e>cP{ji@lXďR6+]tnL} IJQTDnJ{z "oC%Iݡ 7ALX̒@{;Qe`A ܶ}(Z'Iݥ1E6jeu_XGn͞dE07BIb54c-2_+u>~|܍f^TA DXCLlΨNc?#o \*s"](Bdm#?=$3EaqJinP䚱[Lvi"B- Rq OPVwӘwB"0!h.F,Grzj(W-$3'ilFa 8m۞/e^1@g86b-/s[%ʨGr%Ԣ93 Q>76´{"pO QED rouKt[}Ss :fENN%%|ʒ~MD̗4ZBpx("[R":Q׿U 8 :۹v>~gMadLid0H6lviL31l34@ԉ$k#FwipYWS;l>jmҦz-,З 2ȋv@:Sw|O.؍xc1 t.+wjN‚/3Cq-,=qfc;QpdӿZ1v+iOQc5 c:rѴup7mnB&DkgwPpI%+ih@i0i&=ݢBܒ;pvRZ`Q>v$Q\7&߳7A0H_e6@V=1&)v-S7C#}^[8Shc7z<PX ~a.#2C 4FH{GxpkY5)H:e)efbgu؝sp b$`UH`"Ioc$,]aCa=РoC|voY5op{]y.) b&XQsh>6wf uf_jOE)8O4<];`9 OuyWc+s[C)`Xq<_#EQX*RAI.d[)A,+I6ߵa2ç`z}Q O!ѲN^aD~mrTEkПVEsǣJςi&e+`9RIȢSՒYÌ%=+BY D ]b1'4I >6qR,?~# ׆+J5]s YUƓ][b,)m&cuC:{IUXivʿʱ&G4LE}uSQpE*T}kO ˾-_3$0R lݵMPP};[8T^%Ѹ]p{C3(=/oc&l8M܋:O)n,cTNlڴLbTŃEЇ?+#}C}OօgYOS'"I蕱.l[e3e~Ѽ;^L:r-d9ʸ=!KPy <ϒݏVp]074ȹD6X@ -k)&P[Ha۽9u:G^2t6G2. 9${Uv`)*R['ێ獂Y#ud56B,Rq[2."\ ݒ&{׆UꇺSPֻQk<ݒ$yƛà{3z xnB96V"b ΐ~{%Դ*I02]\whc7K^=!A2{_Bt; 靘u>a5B.rO g(-lWPOؐL*+%M;ʂ϶h©ML^) -Z)L`ŻkvÇT\CO~}<\ 44άOS~?7?"j]Aj4j9M#Nm"C; \<+i=ZL{Ctw zVkb#I%O}: 4SҴEM1o}#I0H7㌿Ldy$E S!CäפL1ku"HLӹ'Z6gԟYU m>El\JFJ{p\߀ K􎖊I,–͡\fU5m\w(3X@|~W<'_-ؙ|s K2ȇn]}7RKɸ%5yZy'ekD1eHzBawrƋJ0 YZLuminescence/data/ExampleData.RLum.Data.Image.rda0000644000176200001440000037736013240265320021236 0ustar liggesusersBZh91AY&SYSmo>!HE %BR %(AI PDT ()JHT *%*)A@"!((RHJ P)U$R(URE*)UTEHUP @ PH P@@4RART("@ (( PH(ID)D(%"**@APPPDJPH*)J%$R$RDQHA!"DJ$D$$U$E "URII"@P( (hUBE"J $E |@&&L(eT 2 L&F&LM)L& T2d&4 & `&MPMMh=@=&M@US4КhLЌMhhѠA*! ='ODA@MI$O60B@4JdƄ4Fj hzh 4 4hhhzOӻԪY%+E[CB*ouة=8•_KD*p-=f ̪$d"AX2zɽ=+$u."PJ@}JAd,܂hT,-` ("(:ԅ)L$Y!#2}IJ>  20?G%3(  `3 ?VG2!ф W3wS{zwF?l?<'D4qV p ;Ac\l(sF9l|푍gnq-c7I0668'DHF`#koF5B>#hQ"$XClGf'|.pD_]wF$3؂4~MJr&D *(Q Que\XX7#8'KJ0%>$QTQU ,qҔAHF+l6 Qj>&B"N(CXA6Ш}޳rfG $G2Op)n Y:jpa Ifd15K4CY4e ahK^u8-a=7tc >#bCh?({%A6]AI 7-!:H[Qq`%1p8Me}̱]W@Km6{sj~* NC"iP"Kf@1?<[lLslW#DwE_FQa0 5ByiUV>&wDB jzQAO'>Ÿ́{S.|pƫi%!gI-\% -emA-UyI[p&|uD`@FiAfp4|--.f @8nV&$D|T9<1s#).|I j|s["6Zz]x5zA"׌2D"maG\jeLlP ޡ׌]{a!2l#3J0\=2+616jVV(G2jal:W:)΋S5{Rc;DԠ^px+C';hyGY:..go>'0P$YA N/`IⷮĤvEU%1Yp.-T6-Ry[j&ZMœ1R߁!ȾA~YABY HgEb1ăk]8Cz)[ ۡS (d[6 YHHh;T &+P.\m޺(ե}wvsP=\103@V+oGЊ|W&*8v*{Lݓp,oS,0Iǯoii<\ U^B"/\Ih,@wbˢ)G;0޻Cc"XeBq9q}w¾g K2~+AyJAE֏w@pЄ~hczk@_b2Pnzl75 wK{:u! w? 8޵Zk+bo  O=Ҿ5⎅fl5M8* O+{ oQLzcl*N$=7yv-?֣&.E^]o1by}xu^2TeajVz-j1!vJ36OX>[}:&[⺖Fy1Z:!Q30Tpz0[1Ӎb. 5I7׻{ B'H|o!<|ͽASu ߥ  Cl12~^s4:ۭi.δwT5Fb[(mU4vjd{ZI*-AT =LvAӎDzjt=9RJO%va!Řu6řal¤W#EF![όu;K,=gڑkQ\Fs^)K8/g[=4*Ȝם]e)RgC??yRš*TTv@H^z1 tc%z1 DwJ'+W5ΚՆ4~ȾG(g+Qdž:Dz2{m sKUGpxV@r0'zZ^:hΔ6߄ʷ1Cnbn.NGfCQ!21nWs~E/ n20=gfҍ Vn %"*G+zrP4 }iOJROlZbQ9gaJyj!^ihqcu[r!]3pG@󾑍.`K}8 l8K5)FP{tv 1r ezT=ְiیZJB>w!Om{#ܑQȎXɻŭϡ=UTDL"dlH\; cd٧1pdۤ|wk1H7 g/}}%;p+դ-P鈻^;ԥ%ʐ^F=R X8.2i ލSKo#!'f nFki߻Xy`W9<_藊 5 x.g{6ȉV$E:-U=J, ͖ũeFń['p0rv7lWm5ร!<2V\M9mW O 0 dpDjeo EYӄM F_F ?V@6 HTG_wzҐQM͒:U R溂;pW6TUhz2݂z?"n :Ce 5hr\vW٬bmĨb]7 dW'G,n<~U)1QpShq;pa L1~6^4$S>3ⰾШW2m '2p-ص-Z ] MߣYM6$TBcdu+lLc^4KM91]6M/\]l%xG",),hp^'P1SATk#w\9˲vcp߲Vt*!8͒4H5l$Iء\!bYn͛'cc#~deZ&X1f[)!ѬX;;?lNSW=[" H$S5Ic$7foqAcabo&;J2d_DR . E\ 1vx52C{,⊧ F+\Ȝ&:fURsJ r,dgN^3U1[5&@0a⃺kXMlv1g{f!S'V өUq6A ]6;׿xwq, rKSY y| a 멋n}_M -:50QPGջ;-7C-s6Kr?kV?n}ڄ^v>w L CC5Sl3Q<OF&=MmPkaT $Ϟɔ-t'x+$\t s9wJY CKUJ}La<^hMq S"lMD5m[4U]Rټ-13m* JZŹŪHxz :T`ȇF5ѡ.q5=RȀ;R%&MBݹ@>λvT?mSfpUM*%؝@;*ٽ˱wɯӑ4lW=glh6Kye;KIwM pCAaRHhrR Gly.Yv*jicn.j!l<_^4$^dh؊,Frh,˄Fࣧ`43ּ\4tQJrp,cz@`KY6#>Tz *hv'H41ȬΝ "x0ee`eH$G,㠧$9 Rf_bY+2 r$8^1 [3{8jngJo0;~a 8Ew1ZITl+KjzQ8$]WqC7/qAKb()<-f~,hW7BPlǿW[ LdXaymu;c_A#g Ux= :A'ZlfeV6Sy`jL~eدUlFJw!P+DZ XnncH/t |taAQ*Age*tlaGd7*M9Bpb?5xfƙv<: d_u*sBadž]Ok Jͧw9d  Wĥ(4+T$3izsr&w)Խ쪐fkG1[d*!K{h;wQG9 0p dڤ5G]j& ǫYA7#,D DTQ9& GnFLJx9vp<{hWm$ a~xRxilwL0t/gƬ P ^PTR q>yޯA8\-! c,U*X.ZO?h!0 F ӯͨ9|>C_P*g]~ n/6F7rX%H;zp=Ѥ,NNaGg\Uo,h2-namFl=WEEJX9c9I=>՛n>q.]HvB vU`5Z 0hSRZ97p7O)WE K辉 Hw?jy3CJ'm|S,b) VszP&^4(T* ol,ىBJAFUf$CQoz7 N2_եb-%\K6LNp=~S&Zpq|;V002=5nR m66owxާPh3`q4nVmmHRi^27.hq6Y4`%JC@qegۘ3P'+/sWym[+cfkT Nj[OrŁ@ԕ,k7H,x [0Q;jui`"_ObF~x;::?Oou@|\_{̫U݌!I->4!X0zY1: gWHc rz ~\Eusݾw-W*R]#R_^XeS". &d]FW \ He#^3&ȴQ}=0,2:*Kn1hAlG\&omX ;Lt/ dǻ;uw$٧P2-Œ~٩$C36TeJ~V1&j=QhG.Y}7#~O~ڴgƃk8c^:SB3%^t$ӵ %Okɪk@*#*ǔ!JsYiw>.=?1kBK Vs>f `X,B wFLą}|}oe:a_ C3]ݳi}bd&HD8>P8)6;nJ |nO;D6kЈKm>&U~)Jg)#Soawoa2|bbud((uxWFx\ۨA=eF$NCwmdO=6UY! !ezB5)-G7˙0$FF!~TwE ^𒶞a񰹝PDzcW?N\V9i e5LPx0<xC SXԡ VϑAl4m/v j(KM/*Q~ξU`̎r6xBڄ:a)R( keR G昃\bp=b_WxMR9\9>l^{s<ѩGIꙫAh&#oVRf/ ڛ20p3ޢ٫ZO+$͌FK Ûj׃66A Cj28+!=q:l-uJk2U!Q$̂7wշ0 6|"D2m+锚BEx s-rmMBMY@ųi8TOxevdŬT&}喴}ܘo#:, 9DL$O< -7Ŕ=(Bi "./8lTEي>sɑ|k∞RYU!3&A ԭWFcYr1O6Q&fSr9=ɪWZ?(~O3g0pq⸁a= GӢso"ƩH} ȼyao =}.bO/h+>u@e'Ac'KQ7ɴza7IAP+w[2rfLX 'dlÕiLlwpŤtm45u #7p5_mDQe*$(e̙[(cKQ!HZRYAݦ63e% s6F@J4G`Mv$q(~RƖYcV8remO>e?Ne(A3 zV̻~lҵQ^ĩt=|,{Qlm~: *|xn͛4W? Arg2m7VynV.dy΋[aʓv0J3%[nyrA.|m.;\xv4ߧtk`gbCI1UO4 ǷVXY1JlǦehk@@z<#1J '.#ZJEK5nG!%IqTJg3ƎF#o|I}q>XFIK25 mƽg[B`"^,5چ1Mɴ hgƨy>:*Fd59e.^2a=Ҵ ɗ,M{VXP&ZBBM~TJ![sJcx-[;xLpyxBzQo-@a-ؔ*6zڻJeH%,9oG@'ٱ;ɧ SnX=|ɴ3.kىDVR꯴&|MAZ>+805 1aEqdžvL0 H= /y_m?g!NiBeva.W{X=7!N4ZljtYγL/L*Il.D3zM*&}/MݘNMbmnfM ek͠Ml\}uB𣥸bwin(s^I٪.V܅%~dli`::R]z ҅UmH8#&DBpuG7:2ʜ ϧa/n(C)MȎ{ V u2y*z2k^ <ܼm}uro7H|' r܀D2 C-|lez"댲K4(hT9wΪPl8 s!b#@=!.f C[Z$ *ղV)I" }0qkmqN x|n'N"Efʒ6dJ}pe[5N %X8&^ȸ@bAuټj_MߔڀSZ=Tv,Dz6n_p;Si!`;yc;Z( <"R!Y Qtah< o#9in"l@K;e'r[%E8l)A5̟NgdR}M fϨglMx vhwsWؓ\U JIep4!ޮq&ݬ*>;v6v i ^R .y4jbZtx0m}x딮pj,Cm2D걪yE@<ɈЦz8Ӻ[gV[l*f_F=q` 7e-˰&gGYſJ͎wo ѰᩖxXVwg@#x(cWjA(q@sfd8Ԗd[1-[ D/f'lʤn跤-F29H]q r4S9Vsb/e~hV} 36"5@EÑ3ed@Lv|i!3 }"-žn:wIJcl5M!؆֛}$IyyE.mV^\<2RSxB}]!'8DYJxJȿMv&J{ [<1S~07,to{56v_VIQ %*\fa| z>\6ծ~QnY}q&xarKOmy\4!M`R0pXT9G(zjNa|¯I"# Xo.njJn0p-6M "U374VEtxv:v3v] jJ15+OR)aÖ7aܲ![hyk7wwQ DA}Hb]hid7bf_v/tیR6}^WR!a8^SnO)|Bҡ;n:-Z4R XrAÖu Nդ16bFGmlj&2dCkq|dcã#5qI0^]@Dgns}&kH E Ar1FeXmƵV .&:STvlAԮ)kNMm]Y^cࡈ%x 6ʸ<eY0wL=/א*.J?hJCq'M`xa9h\0CY*M$@9F`fV^=ehD*΍$C>p50910F('`zi;:5N\#0 XM":CaHC߅)JQI%QWZ nr- J0a%?Dca&$Nk?כXkUs,bXоH剃akJm:^j sQv^bQ\EnhX ="lH3c 6H)NXsI)vc/2cᆫC_LKkЂCRϝ=^T`9$'pCF9kfYh@qe ӈ NV`PԼ[ie%CA1_T߉s!qSa9<^{YJ_ o(6ݪ\?+x4KOnػ@F׋k-uyf̻"F+lxQaL+ c&|+𑁃LTKVG(/ֶ%ƫu5б^ZArrvnYaQo0V+[$!#쒓ޢE,:ޒrROZ(5n5-.kc]I}V#dZ"~JQFP!M>оy4tb.Phځ7'$zßq?ҷ;U.20sL"/z| Z2+l{7WAb%K:t=srP=ԅn}8$}'F&@[=ZRDUHR?XZКwKOZ[Θ*͠ӾՙAϦ8!$T &MjX0u^##"yp4qVP(uh<7 A #X~GO$LEi_5g +Y:J NH}P[&)*=:RxPl"_Z¥`T/|E*7&[3{d|ʨԢ. ֹ֤ж6XGdZ x#&5fсNmBRFȹ3TyJO-(|u m&aZ6۸"RaAa=)kUۭ# "ƞع CiQ%@h?>g<;%rx} R ><}s9]ry-CZU]WXentLsJu=y:e%lSI{z% 47Tk$:3LaPR_2~K=̣';T=ME*08+ĸg 1 p׺G{J6y\0-*zQY:%oZ`)+=W #r</m&]Y#f5#6Y7Qb3-1jױh|+1rx#1jn ٙgrlK$aƞkV\ڲ7}A0e{[6ԑ Fve_vtn!Rv8O'Y@ հ&5#C_kōMA{:p c@kVmރtc_W7ŃFIǥekA#WPaz2ԿqQLȀH k5)F( 䴠6@Ce5AۤM3[bx؊X$oABXs"؞ǁH񡭨y`{6 .ZvP4`uɄBdGd6G`Q SuDFX҈z=?4[< jh{Bvul]Dim. к!JP]/+xosTٱ}ALfX3TU0 yD&cԗ Ӥ&ȡ0!"Nz!BT^t5bح-銮xΕη2Uv@f4lP@q9lDZ#76_'{ĹQ0/E370 L3$5Kp6vVXNX[[ś.J̔D0n@<1LxN1'UA%]zvY8H?ۨhe(1^x$WqIQ4pP,YJP\ 0-F`Y,M|*^ʍ#.$@-f`!" AaKCAٙޥROeD# Rd'HL2JG@"l Xg&;\ %'! 쳳 +1 \-`Lo 9r92`ȨcqVsbݧPk*pUÆ 1\)}7@ (T`cg OA$8+Uӛ21 ,>lT՟Ir0Y_Txo@/(4DT`)XLO} s.a:>)۰7c:vΝ:"+5INtf"t Zt:7أlu$Jq;L}6|gxߚdd@}%չzzH?r7qWLau0|k\!\5)).Nw3 4sUϹM4(bE *ֱp6HS|ZkGʽy3ߪXa ^eKIrZd$V2T$ŧ`+tf ;MUSLMs C*5߸h?zO}"ɻu:O<o*ݾ/oGFcm/;C|,@i\<\F)rUwGznjaN=C7d[n=x5 [Uo_߅;8}M ҧ^Yۍ)(fEJ_5Tebd]'/$t̠h;ix͟f jcuۉٸk%lŒю>OɌ!7# wץٮ}qpf'xHPWq?|-2p~E聺22&zO]o[/nFfgjѕ'O(yDXk6OUJ{zӨi?fl$`Q$ver4W*{OEQ&9;~['92z@ 鰊,OX{ͦyTǨ_ʀlP9nN/sYn?|C⾹;1#&lT+/ 蹭"4z sN311JQƷ>c!%WXqy< n-SƜHFNq#U'!K^MUD?dMq):a&r|:#W)}uVg]ί#$p/-L{.B ݥ~CqhkD+'VPv8Wv.{ u2M=eF@Pa,-!e/|{uϚ0$N^uy<"K q!1;UPfU6xԥe /Sp5]⏩Ҫ?vk|FJ3QrVm0F(GJ oۛ): Ձ5Pc~ tYQ<Q8yc+U^^. .P̱[\4NF,IuVwRe'T2[ ~]9%Sh>ȫy#k}.8>N4[/H;Emh{d235nHxLAfoN)_F kŃcT{y *!FѲkn#jP̰ΰ%nIx%ﬗބD, סz]/ŀ9( ګyuGz -ʐ׾^]C(.ͷռ>/Oլ&1Μ)& H_b:]UrX]I=!9Ο'{otݐpU|LW\`>E c=-L铴s2 |<6- qÜO49 I=Jst ڝcل{K M^pcBF Flx=~@+'.-@ 1yR/#"frdᄫ9;Rh諊n>WB}$U VBHOr(Rl !X3jc`8~5|3 }nlZ\埂/!oamTrZ am} E\@Ub08U(8/nQzhv P7?W#< '&G㵕&֥t57 K·tlWm7l,S::z6Y'ޢᣟw]=N{V_d}_j9b˧O{7@`h\~歀K*Vu#W]b {gAy. )O]8 ydA1GTcxDS%jX<ŖX'T 6FR mI* .lf(^J=#hؗԥ3-NVf([ozak)[ej0j?Bq -|a `DAb}1~I4'qvټEk59O䵆Uˤ֜AVԜ(ߏB|ȸbm(Yb*2z6bšZtR ͞,@Ẓq-՘wDjSd3µp E犢J0kJ)*bSo2E0QgQؖHC7VM"N-}ՏJ92F6$!llTtPv Pwg@;HI%bBp c*Vh4eij)+Jm 4#4bFkiK} lܺ7ưh7྅ZM8" XQ0BmTaC3G9*4$kn.,nS:v׍[3<_3#bYփ .qXP4S$re\,iu6 xLa4/QC' !U)k$\rRExvL`<#zU&8ys1h io͛޺95ڋ0A-[xϘRKnOrSdtaxK.[N.zS!#K+eՍx]0Iz֠0/8}t" 0E>IL(mRЊs2W{|]oUmۢ^%T}jk'X +ceטo9?Z̏j㥌lU;RFx/9kyL"xxK\(%>>T`6z*C3sOU6;`yD=Vx\,6d{\:C<0s%F?ѬސC4Xr?DsiRsP0[{37hO-SxMB2vn~jot@-e{|_>t`c~ЄK = eq,~7]JuTr@lKeKoG}>ўI a\@W}8H|XH *ZDmeyu'.:n݁]L0}!|@KukkR\hRd=N(#iG}~.F͸IG[A[d5y5HL%z5\Vw cO%$@FrM;6x}G U%Rg1o BhF+I!ݾN=LМ@HHB={}֌K*V1ݗ[WTe %ۧT(ۇ9m_W[$Zԗ"\;"ڛĠD lu^<:U=bug\(BB(ڔmn:N$kd/pHsP=7<s࿆-,"& )|(qyovk7+T+k1Ic9&򰝓\DD=OhcՒ#Ċ\At Mk|ɟK#gV~o]γ dM4QÊ\jz4 GI%:,' bN+i6[krHm@ԭ $C&Ǿ%/8A]lFL <ޱvxUǾr촸c-TzI[ L8ĺQlA{u\ՅXXEC0{of.RiU}|& ϯZNmKr#U']ih&vL,WevڍƗ0be `羳= UFwEa(@vTIL6VeM+0+hn`B )oMW9߭Jf D:{\SL q!'$ )w6-;/ 4yf٠mL k2v9%L85f k%3aFh\lt|wY 1@9vyh["2] fS9~v+*=ő|zySU;$ !B X3Hl2Z2KxIuXf08~7_m޹=y]9ku5OG)~?/C_P >Нs^0IZȀ`zѰe&&9:dC%v$gm"Fyȷ)XX/R:Y* [#kNQ=U?f5' `G)'Q %ϚlܣX㖫=ݵcF/s-@ɜgv|(Kh: ?=23WCWnu7πמw̍g6Nc֑-h%}K2Թf4^{.<'}IB&-$mIX_ -+ q,"j'H9Io$,7Ҭ;xh mq9*!vh(E[[.񇁁QzLTH'0H/$_fJ3,x*S@͒Af#m!T4Mm"$wF[vב=ڈ/z>lngk;}K]CbU G0\9fAqCn LBQrSܱyMފ?o#U"[203\uʁ[xT̂|HC-ߤkE11c@j t|'0s = s$:>:j;5p8.i ۉNG.P  wͺ 0gogZj1S.Gv*8v[\:]*XzU*4S8@J&5\#~]nD̚@iӿLoo&;Q!6>n]r@$FchX6{&,Ma6Wd#y 交/eZ MtATrQ]˃{1z'ƌRV~Hd+ujTeRsbMCzFݠ6pE:gcKrPdkAF;Y@}l!,eJ(K L>lRQ?x&k+Yyh65jf1;LC1lo1屩09JMa0݆Vk YхF>nOs1PA5'jyɁZ% JX*1 #K&%LH&|Q997U}+j 1UŶ':|XT LHћ1 Q41փ앟\݂ ]I{{FaCYnɂ ,yru#(4dv8wk<"I! :j? \觀Z#|!GSMNc]diD`lpRχ5X<E!i L.Ր<#lv*R`=L<b|~+ L{M"CK6N0r|op$Ŀ=ZL'BSV,Ř LDာKYV5gߞV`A8VX&DPUm LRoat+ImH*#vMD(S.0_ɪ9)<%92#ȦQmƻ,\9vmɸ젼&(>XP!,%=ϑι($Gpt0:f= b\ɚ.21S [ܫKMܗI#ueR8x7%D,X߲%ɦKa&@=cV}qB_G\'+hVZ\S.C& n)]MJ6!]'t@ f,V ~f >˻=③m?yMnYPi:Ƞ19/S k#B ADɩ" BP(J~XOC`9ah3;Z?fH8e+B؂qtN"҃ը}P )6( Bnpl t$O|:-'Lq!Ƈf]Tr'0á^;@^rJ [wʯ{Ӥ-<)T)Ǟ9c 1y[eih7rSu#Tt ʸ>C/dEJ%cX!5-) F<Î wbi_tSWL blx2(ɃfV)LE478K',gab[f^b~s\ {jsjNӓ/|82RN#ȑ(`I5/7V` z&<:uLdLU-t^.-ʎ7as} \P5\4]: G]f(1lwX6%wM$LHwj+8JL [=9a x E Kv??ՐVբ!@l͢X ly>ڱ} 5.3`ob|9n]J̠GudWs^ bPa? wk5'RKv6 ݱA ;k\iB .?>ZР&o'E0'J*֓$<O9 cD3|,ҒժϨ\_3{傻^;TL8hqH?khM&IbS|BӟJ?FTŭ^̄pL1Z9B0+܌c`$5|.f^>$0a4'm ˑx]@znؘI*~vaNU4wlKS.D}GDDRX,pbDZ̦~%u ~}x}XsŐZ# ڙX煸q,d O9lzHڟ2@>寊X成IHF5[h]^~ ~ ѸVf_z`*onpt"PVCņJI:Ĉq3!M\&$b8=/g e_~Ui=cJePall,i^.!$GʣO0aM-(l(GUU,÷㺣(xTt]N?'"ӀR--XBxX p"-p\hbNNϱÚGYٶmtnsN9d KC+eHVeL" ,1js5b[I >IhQPsӖ w4dE/`CA@bx8"blyYN@-/mzu墂F7C\ڕϼ_ !ca<'ia0D[ؐqo9$b`i!eO3L&!UJ92cw >cr^ݬ7-҂ iBP:2Wy7JJyl}@hF@{4yW,叭>߰Z6(a߻$/}wd=RvĒ"XvI! KA@둤`훴郄-KxL@GcI}feT͒oSiF st+"23p"vW!oEx_k d'݅>FK5xt[1a쿶wZܶZr"bLIlgFw%3T|rᝰPTœ_}M߲xE;ݮUn*̪L8SvgWO)~#h\_{R#?>j|!{pn]Z^#_ł/9IlT'Vjׄ&=q$WX`mH`:&3~HvEKu4(ظ X̕wϺ]Tř ) :v,$WFfw fW2rՇSA[ zW!.gor_4ńyl*?Yd6%˸R ӊ@96 P8G塳x&[ݱ[z1+|}z |3~k2l>i.~B?p?3 l"%ù=.?^˞}aKb`Es_Ff?~߼̜#uޑaO>`_8F+p2*oI]'ppߵ'Ǣ/3͜?R~([~=C?< Lla"[A# pNu^ $ 9΁@LӔ%Eŕfp1K-'cbS]3@!#YL('PfO5טQچh7nV@>8^ * ~]“8v]xLŸ+r"$F(K!W_7)>gxwG'$V:A x-EP+]3SѱշЍ;!qb!Ht@.Dg F<5JPE",BNsHH<'}l@?|zۖ,dnXJ)!tu)S!sgiQ}eYsZk$J/'D EYBIpYcZ9OydH# v9 9=>_{PxDos _K|~ڝOA٫vKil4&^+}Jz`J87B~ktX ؾy_x79˱tGu ,\z X8_+x2!Mde̓0/?q CTY;7[DN6C%C)*I (+2:9C|G<1Pp2'`iNG &j . q(n'W*|0["*h ${3?Q J)=B'Pehž {ACD">q EVߔW|%?K^_\q > ?"sD Vt.GQ6 y6#S6Dk{w?j+ͤ;Bx=h<HB#)DE@L`pabdvuXQ2H& v< WƈPa(E. [ d!˞~ȋ$w9hd@f̲ʼh4\Bub4]ȲGQ-&JD( ŘjRřl bBipJL:m^Ys؍A+o zzG@Dwy'I:$΄p@aˆk.{4"fh9 +.u "pP§)GdZ43nwSG񴋷P\/qxCu:р¨K7iȤC5pz}h]Va̓ӓ~ݴ4fHEMxo$ةz_צe#+E4)/LOR58d`J{5Nb4DN[D{DK,W%J @`Ö"ix6PyȎDB1l(0lO͔$)vÌ Yڎ@E'qXFr&T 5F* wNi-_H=CB b]Eug&ƇMIh\J3y܅)f4JK13.j'Ej9ƉY CLL *˺pbl\n$N КHk6٭(l 88(YM:*&Jo~^m69g{U*o cb{]~ğM9JS-CQ96f"(7H\ Z0ȠVmE|nwvI#ӗ>i^:ɝc{G}$Uj` G#tƎRwj୅kV`mrWCsHyZs㡰JgٽUq.)]@-_51G :tVpkiF P(I ;ܖ#9alFPpRJ$A}RȆ$dvE9=ժ/ؗIn; gͬ-a%TJ\mKmH0xwrs&oh ։nXě({8,EG ݳTXcI`ui8ل{v`QzT&I1H45`S\$Swq)Q-tQ`m-RSrgzp)Bnn{Er[0T7_l\3iɍhFp| pA_Dj !ЄfN^#֤6!y Teh*U #u5SYRrx+Ȧi|‹x[.tp"'־LE56OPz2]3-g5jЇM&GL\e$C͌)0eMAdlU;Ab nQolԻQ)*Ulo>€"cjbEj*XrB "]N9a:BVlmb$pnc6H [.Y) (V0 .elAR)Sl,]׹9tBd?Jաh3?Zsc6lJZtAAUr0(4P{:ͽ9d2-PlC*@gprasG(΍MGt6{[-9L$D`XcSQw <&DW߹-u`RF0r C^Pq{}@L6숊h0PdV5"X1K)J 80p(s3-+3tUhϜ3㥔/e5\[D)X0+vVi/w#|"\nw6SWqH iN*.֛ݫ="( x w29e/S 0HT@o$(yT*L>ehiWw%i1~Ө\eJh1怵BbY2|Nw @qft h 'q#FbneT8&x`8j'ׁ֨: *:fT'x; }i*CR(ѫZ?Jf\tT2=v7ҍ_ԯ`f6~`лk00䐳p5<|XQm*.m7O6βE$F]atVTȄ\Ǣj{v+1}f;Km=]v w~QNN>'V[}0slќ.ܽ ³mЌ2냅U&<Lv El6(#"*t\`& Eu `ٳe\/uNJλݩ[ -1;~vSd#H{dK<­̫X) | 2YbP(xyeI@M¿E?ؙI<g̉-"z2+Ҡ×bJ{Ca4%luPW4)/pNN8ƃ +hU,p aJ?5!|I/\-#sGtM0mB hBZx?CC &9:& dцx!ecCU|\*8/6m`=( _}@I^0ߋYU*W3fHC1N9f"5иq2L1l^ίrZ)@h8_ _K^5S)d ]p.nr^]/?L֩Sd|%I]n5l(nc{K5,wŽ1`:I[af|{] }}AfS-sjl6^ؗZ!ئ)qo tSi3,!E &<Qd|~ .0A&!8/FDxoݠ!L9K KvS͛Tsi,yiā}ZEhVpG/O!t<$.XkcXvk,5|"xrE)cJʮX%`!E†^iexȐ(D#Lv^-K0DY{[d^}ۮ؞땛ڈIuTH$m>-G*2R-qgKI--cݼ5ʶSDW {@6SD+olx-|5-A>IkEPzogI+XܭSKԆ'H>Պ(0\"N m_@DQ{!z<%<4 OUFHc0ATlb gxM(XIDRZ"^HQ[E6 m]*JyJT~b+CbkW@A]YURSfr2#[pj۳G)3M|@Lee+ nفvheRCI$g5 ꏇ5_w"-e+urБS$m($jeAcY:.OFe1-VŁ3[RnY2%ۅ{pŜݹx}~%N|]b@F1DuHZ:HWb侠T"]Л@4Ry"BYJ+VĖ -Ի3TՈDQn'rl05? &d=8{|3h'n#M|Zbӿnr"ëJ2.6[V@?FKO}9(]:/-qP!#)B-oYc8p;ēɦan߄sZt'365AZ| {Vy|w7QXXI]ΖٓWSuɦck) NpFXN|k4Laleec?qOL% X4e lm 6$^Gmӥ7@xY Ŝ3ytp# [v) :Y ^]=]#JM2YvL6-كAÁ6kB^ాCR,^ja0G8̈5HQ`tW39Ѿh-ɧ'b6g[ȿgզ& !̈ `J}M|%'SFVg ,X ['mC4 Au.?MO_f>3Y,a*cPǻTj!.aZfJ4HF}) Lm)S ~SěrL| (ϛ OCE~HSLP+Bkp/ŸOA7cZkk9b/ý5i3qBPh{0ua'>t&!o-EY.f:G໩qb$$|Z/zOtXyC^tf~`@6*f۳SFn~m{ =Vg v;nSm7w?C'[WO-C[MĮg{`YMQMy߱u 1jO2*̍%GmHh@tXϑS#:~/up&@BW*Uɠ7OLD[AtJVE""1sX><n 43ަa|@Fٴވ3,f8N< f: 70y78Zx_ݣ$99]U \N5 OԱI0_r£*#O {ՔdnV ɘqG(=3ڧpSDT!%+)_So(Fφ7S"LZbHn E4@\, t{dN,0 P9-wNE$%ߒf˱/3˦~@'xűsu_5pup|+6ƽ`Wf-0c}KP@&ݻfi, ?aiH+0ϣaip(#$yJO[TÎz{#FάkiȈ#zfRQ0"%I ?GLW}0v$q>$K^FL30bAdQClV4"l+SQsCNJ%&3BP!l 7eڢdb&E;-:NZDƗ1l0.8jĶ.DVƆqg84n,-9A]jS"BE0$ėvXd861FAG `#AR=+S HPYj4ȍU$\Q9݃0Cvbٚ4%?4;K_u 2H묶9Z 濈d-6B~P%j GqU,H>REV=X}WibΟ-S<|$A!yFE\2Re ư ;B*bP*L)x ƻV nEG(,zMw7{o]*fѩY#vˀs =]ZQC `ŠIbp06$xEU!Q䪈8$\[(&a\Jai JrUN`1h"MXV5,{u6CuWd`smpX+U:"u'醹i`l+:+5$@ f%kUOv:l)U_*7Cv _8hdx7_#mMSw7N-*f+}֯V8,bxj|Jؔ Ghf q-+{x턔kJ@0S_$L*GWGrc1z@l[!<ʥ>~?:kp~3r~?Ev_y\+@Gp'5v`p@@ RVvIHI|.>}a$kB TP2 f!Tn#|PU}qTi!m!ݮ4 + r.0 hOJJZl4Rm|.eC=y'р#`b2;ވrȈ(#hƝ3?ZQF~hR&bMv#7DL}r+o4OьwK!6ӨPqjGDcӤ!̀lbLeQ2벙Y d(t4fF}xe{ -C8#ZRj>aӼƼ*q&Nʾ+TJOA [hZG#~-"]Htĕ|2:x5ATx8d?lϠ DϢ*h19vZ;;-7HK&yU{O@kJOC\`sȣ4x:l#{萯Hd|0DPj:;UDXϲDpBw!9)8aKAjzUacQF' ̉RZq. RD]!'1p- HJ`e\2R,x((lLMQex3݆Y&e VkCWt =6-U#B?(4S@*vlFXa&)XO8G"|a1yxCzq1 ]dXx"k!ʄh0N2J>D&FtbͶj[.|~{c9dTnE1 Eq |zWA "SHc71 3^JW^0YwHV16wn(>lo7mp*Jk-o*+PHh`s7#>\Az$m;^>DF84+ Ham?Bp Pm~/Y=DZ؁RjmyS/S\&q#~#<&l X0&oK*3|(W?Za$527_588Рe(~4\԰ X)`WT8Ժ cJp{iXBdy!@NV1U]p'셃M},f$-{DXx`+'nb>%tbeI[/9퓐ح?.g\ON c"t $88(M5<(궂t | }u j,Sqq.eNJܑL]R Y X5Fi8'M9!<6[oq}Gt&`-PfVi"c* tHeewdmo(bo+},GksIM=mگ *xW`! r{HW ˸yqGRt=B[9,6o˶0(  +ݫ>8 T`|'[P>`ӯ 2+֌Ui#ѡ>3eX&QQd'O.[ rRBI!$!ZK_6>t,C1`8z%΃Į` hM׸ŞA"zduإ%VS8$vd(6ImDX޾ ]^ d vj yW)*PG6oLI7V5FO` 8H՜4fOZƮ|\m Rt_(lsqy8O?;ȫioɀL WA+ͥu۸@ rțuP"x]Xݡ gSiqPm ̦wU0P {;ri lugus"P Ie2?&oXp(e:w^qznՎ0DiEuG_C@Qy]x +ߔd|o '\@.VgV VJy~P%Ǧ$77eA'R1}t*(E\q BgG@Q΀$bXXNY ^5Z H+V'{'31)]9_ܒs!}Y'=koU ^1)Z3PT%E/Ei7!w3+ H{k x̰㓱"*n"#忮Fɖcnk<2F N gD!v^^۟~qɤu}PȖvUK0Ԛf\΃g>% ^-YXu dI9c-fn1Grt 97(GO.4 n%6σ(V+Gnp$b&y5[4? L "'jT2b} _9Le[e!F=;}jVgH*\ 8e[b^$q$y0dH>4IyGKoҙsgAX70}WȜ׳ْ ~c?Y%: cT.v Kidqd (ĹV#=خ3ZIƨ0-bsˇa%!M/RNE*9UOԑ5nAU ]Rdو"6;Ms[:\6)0L)E օf4#JHcttUgY/˥̬*j&iVT׹0ri]2dYmO4&C!Zt,P/Fnad-jdWm-KlEy#d~?2\SktAۑ#{(f% 9CO:(Uդ6N J>F_xr[E"y ][ &Z Z X#pn̓U딬 䞩f҂VL7h9mtkuIXS4K4`F'*sXRdp<な4w|hdiҺ 9=O_ Io rcfIe fNNMYF"0Čp^NMljcn* CdU4vGKMM==.W  C}~"E÷^7d h_vE{Y[2.(Q2S~@HLrvF>|;mf4c=a5;gZ1hr, 1" p'm =+8'e,ؤ '} 0=ȝ D <$#/Du8ܫ$d0qM|Q`?w.d2΄ L\Y2/ [9p^WD.գC=E"<#Ӕ̄m,!;xN7ؚ|C=m|EВShqݚt]|Fo;cpvhe̞<>7=juŬ*(ekA{19 +5dq) @/jY-`KOG1ԇSƛpRՠMP`2ԜKDh]{04jϮ!tbpÖNycbe_҃t#EO:nl?9f"*NYd#I6=+#tX3Fx]-6zoROucUyC81@Tg[1}ܴS#yQeLSWw̌85t}4pS5M ԦAXHVZMB+ {;dJ𳮣5ջݝK3pq=.7mćO kЩއY(̏I13ӷL4[r-/xCg~7s0>?3Jmm8qQ~9X߱wb< Pq ۞ChT|(U9qPj]kH >nPSW v6u Wg,R2^ni\jmkkhU4N9ndm/Mxpe g*JOP#<p6?%QZx  O͗b0wi7ZEVuc7wȥL)FR =GlfU<z͹<ʌNv:?Gr%gartf3+r 3'uETNVcy .ԆcPڶ2hKafljLx|=7,^{Oz<"fM1zx3v5<ٰ]얥%TNٳ|_x?U~>hlUb2m0cڼKƹ뽔-4-9 mևEVcaX o5܍$ApeR5=R:!-7}*O1_%{iZq͜_nt(Mv~:nD?"xJrg*:< Pߒcq"q|9W4V{}'*fE)UJ"r`݌ikY7aե'LeF[Y>^ؿy{l|~[Ym]*%[=&*NuW|НyJ XRE_IUWN{I9+'?j3rNR<^E%7e&b~f AskhRs!]$, 5l6#k%K-NV!iQY>Ѥsa.N[cUŸg/%/Kio0 ̿C ,gmǜBg ;#!d{bGbqqˑ!kB[Pu"-n3D_Ov{b[ M.`-r.RxvT9Q0%XqDSL`U9ɂ1EB("Bڠ7'HzĤ.njo M UwJ7G)=_LT1tusH#nrA!>`5rdk˥ᜌH+?Gg`hn+wF/`鮀ɻ4ۻ {4YNT:g:$5. --qq$>}?}п9!U+YtzVo[Ay*)"'8\dqEg&c# rĀсõXW IFMӃ@a5 q _6!a1kFhchmL0BB@- CMgVxo(9cS܀?LU+b0OG)z~$f=c]L{)Su D/il:A#tC2}44bR^ܧrXb>L#ESI.gM29kk3:+|2_8At N]j犒8 ;C C^k_R1:룡HJtHy a^{9/Oҫ^S0'Ne7LpN$"S{HŤ*[!^>s<Ja+@ ` >;4FҸJޮV69ib`U0ۺ67tyA$gB 0H?Fqƞh; $s{(+OXvt3ش]t ?8pkdk,.4e [! 3'GAjLp 5Q ["4-oϏ{h왣Tb dn;j++S-hk];fHB= (TRa3 tVƜ;(@(.}dqc')63E+S}lʅ"BG_p,Q2BPp! g>Gh s.6R}xqFOmčL{dsec KT +J*j\ŨخxoD:/U :NW&3{ޣ1_5f.aӓO,ځzNdVyE-jf$*%)JT!$!LU (>C8kppt^ww Y'4eSH0o/WTI%8=A FZL|{/?rLHBQjO$$z m2 i|ERju?0H3܈GD}˛mwxoO/{QYS}Ҋݪs{5Wcx{j?f),l4Ziy 3S)Ð_4s Ftԣ?iߺUVzfФ~5?޸z6.gvoܮ&KD ^PrYz?UÝgu.< /q-Vđi߇e (7zpo=Ql=L=%>G&|ABҦ Zc۫IRVAf%Vmz~S3xzQoIăi.`L[ÄZSńaPDgPׂsvFm5óUX;(dDNFr@hPmj:Ȓk?_g`_X;+zߥ, k,1b=V:+Zݙл*j% WBW|n X%F Pꊃry4c$6㠿[gb1>Um:J9^߯}Ϸuh&jC.Č.de^fa G6٤?J1tFQW#r+f:%7\DŃ#d|Y|_YVq=MpB]g54u' 4ȢC@# GIyyOp*?Gx旡`/ wEҸ=,{]2#7?&:aJ3o=4"$ ܆V~쨎+Gټ_ ip`g/aȁvQtS4k HGSqE>cfA wρFjV'o[.ߠψ٫X;!:0u᡹,{ȜX M 1p2#ҟ:~u}d+@SD(M1C5no??X<%':N4 +q?'$] GsŴ50sʙX!cOk4! XoǺ qP1} (,= -R)6c'|_Y=CC_I滻l]\1A? ă`3Lڿbho66P.I kRKOR4]CyBe=ᩤI;*qإӽڤԭ! m`A92Z5~hTj8,D9ϻίVN*&RRB0wT5)3#"(Xn\t}mM ;$E-8| ꉴfK9;ɀ~qW U@ww>"ԈjD`k+TiUܯX #K%¤Ҳ[r"2`Ƴr>\ r2%bFa(Ñ1E "Rꋗg]xUO]T/{/LRbۯo&rY⑼zwuYEz1ܥ/A C龯yz 0);`-59 )Nwvzp[xi`K:TYQ7X v9W>Q4Z}?WqCԽVju2Km˨ 5+ Mq"V#/QFN.&g/|wy RhCfkd''Kʆv{vwb29g(s=2Ұp{D3We, {q2Ԛ=2p gk` o.~j?A})XmU:9܀# wy_L#A>rدPcB'>e?B"Vb燴ಣw|Mޖ|]2%%Fggxgg`_BXcS?C~ {w5`;uiI"5!0I !&f"߯`OZɑg  Фk -N, ь' -Nbnǫ c(G?v5MoY1#Eđ\ѡCws#'X:"ʩ3X~f  G謬0N{eЫP$I6nL +/`Y nRh"=KA b pW ?f(m{ctYX;$9Tu8x ASaey*"!(Z*Ux(}Q38<_ͺ{_sm}FMO9K5;NxIݢWO>*ݙ׷t9ԍ:\]hk$ӫFCЊձjN1)V-ޟ |-eAs%%YBTgx_u"]Od+1ACzIaua^u3C}Jm}j/ClDaebP>ŏ^~+$:h<sA2K HDues%Az=5oFO+gC5Km A,$2O@΅υKLWGU!Yˆ} UMQt[0j ֘y{}:gnվ6iUD˴7txµ\1uX~y,h}Ӟvt=d'' 71XSJߑ[}ޭًbqj(]t"o6v}_)/oިT$9JH\swwz˟R(x׽w Ev v+Q Jw[%'cux0=/eImXӀ'<ඍ; bU?H!yꂵ8a5:T g}HdM s_g [>kvߦL0iB\IjTݔMڌa%w@\:ؤ,g3[G\tRxQ& L*ɵD%Gm+DL.*i)U =Pߍp~ڡkZ2|HWER@O"/XIUqGf ܑ† !Cv\xwx;苳'dΰ-dO/|{j"T7]IcHTr&;\p xQ̟KŭRA:mп\x Y=ӓJoxg/~Wʙ;+׺lʜh.xKgzRCW v̀ %&r@ը{9Y*Sl" Fv' 0}s{ӏ>xHNuALhU$)MR(2s0}RDj#눾`O%7PqƓpADurD؋#EۍiJhK'( Y}ʱo%51B$$R_.B@?YGmIy[* VJnP6CTK;1f#^mPuԀ,6YurF"6G&8Q tg>ծb$k ŚQL\m4,'u3Wuj*D#NEAaJ*J;?#甇a]DWQG*ҥ iiH|s4ꡲ1Yg"ͫN!ip} 3M cWeYRkX;Q1dX/dͮ>h? w!]ւ xo5USﯝTa$kSJBX ݓcuo['5yjsbgJ[CuHutqB2;xH-z:{{ ~gȹ7MMO‡:jל?cA\:nH4RkëZPĂLԦe¤>b$8:I?<:WaV h8. D7%5*ILcET?YO5Di'(tV{A5 /hd%jHqLKhNB"Zz_@Mh` H%@k9d~_z0Vo|sO8>w4!^+~ /rX9|SH>TqB›Gv3iķTJ\z؆:NNi.LIjE/JuWv<8܇t0BBe7 &ᒹ-enG.Є$A% c]{yc>9] 1/1/D6؎_fZ#m5_EDRDD/j$[mUX\ Xevx0߽36` <0c\%wJq L9P^ѸϑaH+!iZj#amgXGEGJ|lEgrsFC|B#^tB#x>^ラ}UD̤ӜWvŅelh[ׄ"DDBp,op%: 0\l菀sMb#vSbǙe*l#-yd!g/o=o]Y™@'}ں&2܆{?Dr 9JOK QAk#Oo%^E3JnSסr1AtLUQ[ {EG} 5^ c{K~HG\T` o4nP5&b”l'Dih1py{cnq.L.V)!uUޞ+%a+-gJ1OM,BWu/=u=9:j.cIG(.ZЮdh /îB)ӀDYMquk#|E4:#7+#١$l OT4] pktް鳒OU|<%gjs=iw0`Ev[P3>%fo s'd_R-eH3JW"ȽǪX@,rAJ9$X%DYH=w~?KWaLMZ^#j,&S7k:p,9-8hVCEQu6j5#_G/CU6x3R:ƩFPRA[e#8/#G_-UaWv6<+KCl"XsAtdhf*NS\E\-B Fm;B7't>v'{Pns(oxn<'m[f8uD- w;/Ƃ;2T)BU#PB! .ЋڈZ$\^ /L+|OY v Ub UuƨUK $GߙC~2VFT}R3Ňċ$l tw~go?jVQtUYR_,#MMDB XB,ό5jM>? 2) o=Ӓ u"NrP2e+= ]P6c(7aH`.ud!OE2ZO 6zL n9Ma!D3IIjQ|tBh<ذA"j(x [?qK3T&NR'=k0|\%pY3zXVmuJ[&T`uFN7_}eb0ee6K,\ F&NHlw0bȹDY4-hf~J(d@EzzzVLyKp &YLdA _iY)q3ӛ튫;9UY[~Ą ]KXq6|bH3_$qYrh|J φ.t"G_U{މ$-UtX42CĹ8#6 D3U4W0RҊ4\5cvLI8OW܂N u *殛6DVlhWY4$s6ti2Path ehFwnTDs^e?{>>=VlJ0%_VcsB籉6dcT O8`/3 {s%OdXbZhWp4+s~8{4f<:8 nL)PĩʑjYs;$j$Y{m5d-}"I+ bKMV]<Ƕ4Ņ%R1fBW\]os5m8-cm<?7HZgJjlߵ? ҵ{ C<҄o] "# %Rfs[8}nO-f}&)o;NbʟO|!4)ߐ T#&S1 =y׊2 xrZuu[vɗ@ N1oM,ڔG:7W_;$֔`쭽Fv=W{zB\zͭm0ݯO.xȳ|6hk^TWX۳ϛb[fLKꗘ?ߗ{7fb;Z=U#3X>_+dQ|E8դ _kzI ^7e Z>Lj 50)de=]h)xo7Gηk5Lc@k6аF[J»:(.5.*^&l+k*u\Q9:O'"h`]Uv(e=XJ+,BpIGeޏҀjh4,̴p %Qj:| T=Z-;~Q՘j3JqjJkĥ( ɍ֑+z‰O/PBb&ҷ&[0a0>wdSD(%9.m=|{rceAj cл؂w tѷ7Hj'V?5tF"q"6a8̓U,=X,k_e֘!LOjzh%RmLKw㸳H{vct'EN6_a⧸8%P]fqx̓ʞ ~YADZ,TUkcRtwHl8Y=.<ѹEyt흻 ])o n"/<ԷHb6LVkB||Ì JVuS + lWà@4q8 }įu-. +@=&~J)zøjdl;e#w&;QoЗ~ۭ?ƏxSUُu1U2 g'2v~v^K"us5ƒa(\_Y;(B/幻ԅEH_nbj*.YL6g>5OXձՎ$[uGJ<gI}zpqܽT`gt"ۇǟ&E<ޯZ֙`\[~f'\Vɱc2 ;-&`%~NasFYEg*_*cDy3yE'f;vLjv4ʏCCAJgԲ9I#ZN}OjA&0.}5bmwp H獽Vlxg% 3BO A7ԍyt6b}g\'Pnk52bҷ* ׋T)Z:U=虚q*s6D]9z$zW*rcSۃĆ&o^oLY҇ l6!EJ4@hB\BLtTYe; xG_D:K QܥҫZ%дW.$;s-w dⅳ37Wׄٴh? #ڽ Ŧm5_tBVƱ}"U1 ^bUNZ*ƢrtELT˱<VZ\]ݣ[Y6rc&(fYhArՉbP.798t) &q40/RPZ~&G2{ئe!kXHYQK$fW-DFxRyIl` XK^rCN_?ĐMbBt):֍hˮЦC4ͦ"xL-C ]y17Z4F+Zj'JCm2댬W-yƩ^Ĥ$<[aCojѸWč5jSWjFu=|kⰎvJ*%"(Yxkq{+Sj2ņORN>/O:dBX1X*"`If1p-FJDYbRЈ65rB1{ b T/WG Q\OӘ;ؒ嵑Q_b1H%Y]a% 9 TLnG`Ĕ)5/PM`IqNt׍U&*޿5έb&Լ&RlnpWOz~R[y*/SA<Q`ӂ*: )b:m'n1n%5o)5 Cb6|AebýPD[H-,ˠ/ PCC&^ly.Ii/ rMI *54D;\xGXYMeԭ#C jZJuH BbzXO0"fd] n|qb8&X䩌 d'cRʢѐ)YnsSǙ inprWܗm#*_d~ahd%4V7g1 ZwQ8x.h:vka7(Tapj瞍<(&9m`hq uDRq7y-ϥ[u@D2^ ԖNIHKҹ۸|gW\w) ӭ~CboM%u{q)>/NKz?uפeZt0wDZ.IVwOqº/8p>}xpYKB/WL䖖wVd}x6Z {( d5(ma@WsM[60:כjY8/qTΤ2K<1 BCA uЈt/ό/3~]>9O|}]Aqp|n-k#FzX+gNXF &XbnPSbۛ%yj0Aӕ}BHl0%\xV w- Uܖ\ / j!x`AjP=f^FMYS쭘iK]s!,2M.]nIhDz\l~\_| m@b~kTwSmD6˘3 Dy,1aD[ 2rn[{m·_8RIWp /duMz͋-Nwǿ+Ja^ш9C9S50ߞEy*3BIS&BiϪ$ns.W["=7 -|ڲ^vQԈee)v6!|TEJpv cNb<`@ހ }/J옑+1,Qf麄@;ٹ =+SgY\Hj;grRGH`$|'dq!%X&:KV.մwmzLN؉ś_yH<hVf ra!ba3wdm/YEަY/|`c<%")ڿnƧp"]Y(i:Ya|̟-/7Va等 -i%v(ϗ٦8SOF<+A ZcQrĶ7Wl\gv(/ܜC3'T&/b@kd(ĂCX|ޱ~N5 XM1j b跙%I7ct|5+Nf9@jL aHUrI5Um>9Ey{_6OeKnplV'Wz#0e,8@NUn=<׀/SΛGO۴9$UWK`a 1tY_C)I[] uQ !Zz|]^M.[*c_Efsg/=c,lP|,a*lI]ƚLҾO{_'}Ɵa-u9B[sz[ZITiJR#e hHP9ZH ,a #dgE$}9_4 ۊD!{ː3V?oK(iwȘI]0cU|@&:t@HL4H X7S>WS?S~n|s:u8-ڌ]v@=sL0Qj Վ&]bSKa9qyIWĄ<nl~JR\"kj,f5Svzwq&xB,W X1pwHLCx052w51p~!) >;Nk{AHr[@w&t?0GJ~ixN ʉ=N (%-O錦n%Ÿ,zBcFȝZʈq&sϑcuuDTwu,Hޕk %eC" ܤUd'I0[U@>C#dLW!ɇR+6=}԰bbuL0[T^n0-h;uwM4޿qv`.> 1ؖ {/M<gB|wbX꽶:`'Lj++iӿbbz==D/b y`3D,yiXJ V shĚR2FqQ<5 =CDz24xB6 IOIW55N$7 8i! 3j(YIe2 艘*ͼYpN^#K] l>n@wQ?rqd2i\@y{,ܴS.d4N \pঌՒ6%&H'TRfƃr"W(KxN7B\/<TxMWfأe: &*4&N!y|MΟFpHRgLq#z .RWeRBjcFʁZjf]=۪yf(>+/!7maظn@9)vioGoG; U88}Xfڻ͝vsPPXl ρ?7ߟ7y!+|xv,puߝxϬn.RbJQVj|P5RfK"H@X3yØ>ds̤_@g&CXuirrJoS6tc"ӑ9=%aK댥/kY=&bi~`1Ө> TrO銎Ӭy-s^J~*>%~gSu'YTw mK/>3\^;31ףO`ӓSI!n_Q{:4*~q>5Lwxc3q!Oc$.pr\c!6 V- 7PY䎱O#!Rvjc x>ӑ[/@kG`q}k5BM54ӗT>I]tntrу[e_Wf}_ 18C%@4`K3y bvdS>̲}k2$94[óO} wSRlGyASDJ3ްE6 =6=C{.GG9~h}9a̻>vOԥR+IKđ.SZy7t ^pv>ŠǸE?5DD[ PKTAPAjGZɻꓪ6fK\-A3wj;g~OoفW^ҙv`xQwV/y̻c9%NlӞ r;eS*9"{IިI(޼ ҧ#oѱromU}_oeK1uz'E ISϚ#U飶}o@7LKLr7ޔEzJ%V0=Ņf`^{I;_Տ >v0 7%r--"zv^M:h HE籈ds(ҕ#G{GRW]@_4NP D&)F5I.xțyXobЦ 9~t^D5{p*_wM}1,sSrX͊ruܫoswϗqxŁ]1=1 medfL*_ H2SvZ񼏯x8VP$`U2m|ZKYcm9 YLMd5 .1r%!9z^/Ψ Pl$;#hAה[“,эjy, Y!ke =jb1dƶDOݫWI+W SpC8Ml_ .$˶Ft~/&I>8'(6,TYLxHU}hAp1F='Wh^@VMYW! ?@d(.4==k~^y)iT$D(6 1%MRZ}zm.N̍0DPfFG fq,P)@tUvao\. lq ^CɑMDSȣ 2\ȁ߆JijmZ5b1~GOҮǭ+Q?/Dez{:ݲ$$nikhģ[Ecpl}ӵuthc㹭4NTgsҩG?ѳ"A'F,t}LDĈFEhώ&1%T$~h4`vx?<ؾWxPH<_HݡpCXt:Hx8HAM ,4)ST+A?OExn"59362`B?jag=ߝ_{̝j*a%w|swm}Z}:^"S3$(XqCrؿƐpU m18~gL bjBAQ - )MBEN(jD'^x}>Y,Cn̮k&T%㸋فC)U}"c >$8/^*h\FR҂+K~KӰ_ ϡ BJQwˣ85Cҙ:T_O{JRUQˁ=GV8>P׈EgLGN"x+o5]u+~iȢ9g4}n_0F"%D57QH[lR}3ϐEuZj¯5MԲ H.,PCO EU5, UP`IirvIB~GZgG 5BפUΉMXf&Z /+;8 9:4H-&ّН͋$ @/*7@ìBژG['L>(%P)I++:?&+v#θ䥈QJX($T^%!#2?>Y%E9(HkDfWOq+N 3s !6*zSq"CƃU!MY8B5 F[MVwAE8f XDCeݯJ';j,æ*@aխ"=-:Jeq~`!y$!HV@AtaOIjP "Ot24ZjbP'K6(7R*. hLk\؊&qģ9 i^m"+D5p#897WjbFY4:x^tM'.d3 ^t/Veq`y|/Fvi8Љ^ +u !z(jŵϛٟ^--4["*1 ! QVEB E hNU5֋رVp2Re#vO7涯O RH Ŕkoziw)$5q(Z}d+ZI/jT6Bs) Jo] r e,@u.hbu@˖UPc㒨Ŧsq^x;BsxR0;;Cr1ҤH 5k`/I,^Ȣզ2`@- @K "ohfc D~ӻ2y x`@swtgJ-FP#(,D,$n6L ˏGD'yumG:2/;.g39 l]7'H3-+#*k FwN!cw_p\?+`Ə7 W0Y k_hpQ074+c&7v]٢SR&u_?{FAFN y}mJ# mw*@ۚ\Z9G2@.^&{X&4<yM79v̥K?(]Dד̒("0k] "9$x c2ҟ=ɲ !r؛?md$:Ecs#mϽc,&L(҆q󓣶C7psnT(9Kmv։}q;,%sȿ iȠ]Q!S=sT/@% >; NTmB3 ̐oOg$y˄oIL~:иQFA槴 T(bC͍+pwpqk JR;#mN@l˶D]8sjGAˍ׺@N"ZOQ9JUA `i)1ϵU k $YnA/ze)I ٿmΘ8EًQJQ1ǟԺ"jzC fL0=<SKnFHe&M':Sxw?6*k]bOrg,͵1D2c-$CVΊ/c.PV?(+hPpU82G[ސ6 :D,+P&W{1Uhd5( ddQH-PsJ+h?db@}ITGiuQNBJԖ/ e0u^-f @>TK-\NnrQ%4 Gg@Va% O:@V@JwХd4k?>cDT;.q7(ej\@Ӯsk@, &َ4aw{hn8;H VoI#@jl%e0dl>/}?\βVPvhЭf&ǗdEM-ܺv(.1ї ȅG%C= \& z*$v. &0%~FPc$ʯE|,=)YR&!_1{kA}%BJJ1:Bɘ+H\GֻNl !;Js 5r+E%҉Tʫ4ɿ vm9uЊB+-EO8V!&%KhO#/' QD4<,kjp տ៷& +_!NΊ c7#%ɺYՅ <đ_G~G?IԐ[9 oKn8Y O*5K]R&lʵkGmzH|3}%ń@HirO[DIPWR4cSij3 't&-|!rXc8bE"#WvSv7# Yv[B?}nC3)x.D ptT1#lZ$pAE,,D'AB v5ǧ-3,3f#Cd*v}R i ڰuRtXD+iHvTMU%Z+N)Z.; Wպ(u *'R:lrY Fޓ Gk[ 4[2:[y;,l 4@azr'd9^"v㈥[XyXg2qTLȹq Nro )("! G d#٣3&k!C gЩ|5s).\x\U ظr:XE4Ԭx˞r$\PCt\Lyd! xmPY3Inx%%Igpepo2S͉ ze;OdyXa,g)AE3<`[ky8 Y%0ܧz:FJbb+d5!*sdド=& Ii_%bgyuE4`x{VfB*K* )̶"P$r; QzH{p/. dr)Iqz 8ax Zf~󾘼 OI"{zkTR]#ܰ4tE&Gzj-k5x@'e>@?^4&G/A`͒ö;Z t» Vx4@q^FSի1SNJOs$FhqU/DI;7!A.!R0xy05'tUlυ`.8n1o Tt!Va}$Yp}Z`KsK# BM_ۼ"4L gׇC`KGaK|! (|ǒ+p.hq`~#f1_CS/⌭;Rg@OB!AIN, `b9AjElj/RiZJ(4^(hAT sZlY~iyA`> &"Lo#[u&Qz7ͫ`s,t7jx7B,In1Iv\Qff.IQLtt"DԮIH<dVQ=0~1(A%̰*zIA2vQO pt2||! !!ؔWJޛn^5MFI&bHᝧ7(,v٩RY3-MlMlV{rl|E]E\;tփFWOgePmoospkצxYzv|lwYnWJƹ@!lM\yF(cX/\ BR:u=TX N姀1Wϥ!"N0Q<u+ALoK)w{D]9LX {8KNU7/WCނz۵r pA`k NMAT,^ hޙqK]MR a[N(גtskZw1#W7_2o ]#7=ݲ{Wc$0Q1ɸ}ZVLda1QGE \RJ!L|>oxr͍^8m{yژþ{p w_}13R!~^_/ =T=C i" B@@gufqzpUIuIiIt^;qb^S>}ңtX @[Z8cJ;cy ps"LN,B"w|``ڇ%388{>˗ȭS}݉O?q2 m5.IrWXF]rl"tt}U#$Џ $4\I6,@IFZvCd|fG/Ohí.p' 6\3V1:zwAMy5^pPb+ۚ\lYmcƠRaz"v%@b ;\Z| ',bȨn̈#axj`5 v"0gd2T˼"W+]*M.إÛrYoljr 5 dsna*1*w,0&ͥ({gC^Qo8,e3J6zrߡ!`B0yE'zg4-$ ie(u5L]'IekCBZA}a^O/-,F"EKgQ.ťPNioR.A?v Ҋ!2S[i6qݴ$>5#-BToԑEv#_m~ `stɪ(p li_Ќ|9{W0ulf.pM٭{$6mĜJl(}@M"nV V S!%r<+IC_( ]\oaEwq?nEƯe\ ;KfP C}tWAG'IZ>ؘjF:^~ќ 'Rz fLjB**D<GӖ27ape@d  a& `D'gDBoNQǂRUE^dI;:s9XF2P v<+V*'E s$LW.jD_J""3}^hַQ>8FEPE%9\uQrL&@q?T ~u/p}lrs;) Y,0мDc'rwsm\x.B]Ĭ Iu<)arן..pJJ@7= ׏XNwtdjx@&,g]OYV6Ͼ 2{a$JefhD܂܃6X4r>c\k,YܗNdrn-j`/݂ D.qGZ,Bvz mXD:NFؿo]Zh2],{y&0]692~'rIUhs*UMSDqb尪`,(raۮsӅc$&a_\!M^ ӷa`w[GRB(N^B2 u2O>ffv B^L$NK0(\hu ڄʆ[f  YZyt>7!MY&̑ d兤kx(Ӄ̘k93Lyh,+*ȁt\1u\iU"~^:!d\0 l|Y-Auyp[g^C [0N /L( >*) l0!ĩ 4D5IJjJRy d@ЕxBhSp?@K0:|>K(nH@]e5TP53?\a W&bd "tg);!EB;g;q;c 1eg?gehyO26b T #4R6{ψ!mH:DZ ?ok)1c3aT{Qwd͚qx z| 訹9!jđ"@ۧM7JH55%$c[&POکNJK>$ Ĉ$GeEǼutWRL 4iJRO QpNýωzR3=VV;ԃњ#/ Ձ'ubJӃE@Ś3^Z">KRDْpԖe+[9弧_{GUYkeo^2 HƀoÜ  mBv+ DAմH شoF`] G։a./ t=ϛ\Qǀ;e!EF:-uGDMC;<*hW2 J%yŗ$k<՘#\Lg3_MwUP~[PIO /zu. A\l%A5/ajc-H%>迲7-N9]S8+.R&a4b#fC#'8+$o E 1ZDA$A`|cg@Au25@[[ЍN*h6/YikX{URhu9f)JrOIL,p %D@. ;"o`Y)6?MO&dhck~ysÿIi1;ڮX)nꀊ%MI;?e1%=V!(SSQ}aHlk{a|tJ/X`J7kU3d'Gr^];hA&#F ىtcyԇ*iOVɮx3`: q*mzu)P|* p u ?4ak\ ]xIlv9lFS+71" 4oȉU䩯tu+}$%C`ZPuK5/p/m{KP|TZ+iP ѩ7MTNۇHgN@ T])ō*+Sͯ<SM睊8 &Nbϐ^iڐ =I6% ;D ')aCFѳHՂD(Qĉ> O@2;MSƐ ]1L*{XiI>Kք7&n8 Ń! ILJC#M?@R۲{x[;&u/GAY?t3J:{72YE((3,ӊ˔v6&ՐZPɛOaָC1}pˮ9lmKJC| ؿ",1 *GA6:¿4!rSLlh.2`Lc-R.)41QքiZ$1)Uwci{`]\(`*Zf:U/&&Z?EaoOS9Z}aWXGg[$ <#yÞ֌p;`ϴ#8=*an֮Z "Tk*I>dB9֣t F1T "Yj I3i[*m..n`!lkS햨`R? P`3 S|R5mk9TX }H2|MNҀ^Q AZ4N+,0X40&<qLZO/:=NŒ .YCkaS} FۛQY%reRu6Y}(VkC #VL=c"xE)xFy'fo#CP "O#_K7sS7t%͚cx_%jH" BF&o0)4HfQlH7kz ׎AǬj&)??6+aY!3֢D@3´`192Htwwtk2]'b.e˻FXM2PYD~*q,GJO'i-ېb{AٍTp/ ;d)PӐ4#΅R\M,荐W0)QP ߓcDfBH #$:1.K9‡K0Ns-Du8_Ԧ8OpFp &7 >?)ɁQ'd)x[9?`H*F"wC?d,W=km?z}ۣm $:bquBvx6;,j]iz xwkTT8i+S?m6sR<4?R_6%"Q&֞ZLAJx6ɳXzq%ʦF@28r\]|K5M%[|RU(\tNT\iJ5Xz"qO3wI-No>K"U4A!5pEd)n|%f@_TF9~@FnNJx<&.Z>.D_"%928 ",VpaL 0׷tC2A'shFfkJ/kR+4æjYidzͳ ݻ&d?`ZTVaBF<)24%R9&KSzZ{1j@Bq)o@wU?udz Xik 3 83J<)a,vIMgxIv"JR=|PwZ^V7HS0sz[W^p;F R&P hnpA@Qbȿ-g}4$.NI iEKF!{|&˲$ZNA qؖ00i FzI#dhGJ_STT&^! SjA5(@E"FeH^IQO Qr|K5 A:ec 7 0B   jRr}2R H^ a-5K7EPD 囈ƒ9oUPW14<#`b .\OzPTZ0:3} `7d\/^ ;i5f> ?5 YH1%*؝SbYFeĵ=lJccShgkHI]Ll m}AO|B#ۯ0){V8>ە]aiu/|yqW542q|AH#) nV-l+6!)@ɽ˶uHMD*n)M :ᑫ C-Y2|ߟm= `6Bt+ù@lI T7|̅quk sٰ+QkҦf,yc/ݱ\)ֽͭ(" 9lLGhZ·FUU ~8sQ*~7ۺ{שH֚Z4A9*H3 %HպuCHkDR(/5Zx`}yDb<=/LwKpQ"eCQbo`y%ֳ->*KNbQ}S~_f*#Xs z@qMNEIC*QK˦QWP iΆu̱Tܿ{f0%D _It{?_)n\@?jx^0bWpyRZ&R: e$[]Z7"*֒5{OQ5k*{WAsW/򬎪 B l<]o9ZM6f 6k 54Z.ߜEL'+Wi^ V~C4NEsSH'sOnP;"Ɖ/2լ=R7B QU? ?+O#,ZO=VywӋƌaLb-`vyο!)>c|8sLJR?\IfWZUGЫSz>֣ څ?׈ >!!t`+fia9!m( rR{@?67_/1@uWX#8+ [Jqv-o"˱D%#h.%=,<8G!'a*d_21}*a ն ](2(:ǷmQXxL*TdF.mUmbId{qXR1hŨpWuUC\ॡeɦȰ(Zܢyܭ«XKi~S̀ȥs$bԐo6TDf9Hp#qezo9*. _5H=hltB` i = yYu=̹ g2(5X| *dqࢵ3Yؘ|p&hnN!,,,HLxZuqzA:A̖]Dg '"MzХ%:MG|z拚)\wXMz &w4EQZY,CVy %d]OdFm^VQ:~͙Q8uYn7)|:JQBuD3 )Yt9^ٶ2bhƮv%,|eZ)wq 1aKr#) Hb5I!5xB &XdBLaFy~A} gS_2w*ߓЗCs+f̹=`݅1 E|Y=ĩWB ;MCOf"XÇ05*gEgW~@FG.# '5ViP&[go!S<]Wn9B]4y;jw-JJ3r8O30 _-vUG*_*1J9F"{M~OUVDW'1oo#)oq!rwK) Q K=/4V7@`4])?3x28>8ǺpjT6IAZhq*@ aԎ9Ĵhq%[s;()4-EAɹ)#I3/˳ȽARJ:I.9ȀnBu-~/QanLb)ʫ%8Ԏ<;V#=(uiz/uDz+]w/)]K% H&'Y3d=,ضd \65>IԘV8T1PZ"E{4V^%& ?^"7E'('9qՍVl|&6(S[R 5ܕ! v;AӢ%2= 27|w٧M4Hub _CmH,ܳbVlk2Sdx= Lr% jV&w"qC_9$ 5.5h1kvuDfhY 3_{*PLzFRsriu AScm uYLj=~3FVXi=ӧ㸧NxSpèt)K XBlPI# #ȝԿTjz:α aKU07OPVf G,5ЪᜯEL|0EؓZ]/QYC=( 0KNBDFcfv):1+xV|z|nA;/.Q쑓剂VIac1;& [{g5H`V!5=qk~_ ŚZAf/ΗX0PEyc[  dჇ ۤ/SdOg~U5 IE4h;;5/c2 h yǩw Zk}hO/D۫GcHH%TNO"oxSw~J/YL9j1R˹=mI1K- F]P(Oϩk;E0!:7U-/xaAeZzď ( D]*k 4Q$dr#,,vR)@4qdTC圧GgEMi_ǹ dC8,IJ;tx_FsWR@JMX4Yw'`u>pzUApt H%j =k$oXxwAbWMp8)BȑX\T[v.uOZ 9 XfD\,HV$Er.TZwGۅ*l{ՕQ&8:!%@RzW[0_-湆 r+bh(R]5mJU#}c_ԑL^`Oɱ/EՈJf=CAZ1dYQ|1+(B;}mC4](JMjdS'@12~ Ќ]ĒgU!I6sx( ȝ*K MW=PAEYYR7zI;VCD|hnbov;3|w͍GþjWbɃ)O&w*E?a,?mWwPsF“hO7LIMSN!VXD@#Fu0`G+MufL 1iUk2D!mU[}&d0'⊁4AP“ZT`yyNƞ$Ve),ɔA{TI!_ m&ڎY4 .$P$YAQBy$fxRGv֓ɈMœ7C*sǕOmo(!Vڂ_e)6D!mA W.Vb*h._eiҘQ-ANͦd4GD5=oф喔hGNGGudOO%SIZɪ dW2m;K2>SyL؎v9>S#\ʉ 0#QD1Dz bG q@Q$5?m>>qLQP5<(;V!05X ooD!$ Lb܋G8kvg:%LQJzHV4ЉT$]Ϧ\=?|vv+N4-, 읊ۖ8˔6.esy]=/tS7$ Uƍ7aw7( '9~Rtd@6]v;sN$[ Ɂ;ZKQ`%u׆U/AAMUr4Ȟ:3غO$aiDI!.%LƄo~X[G6t52.%Lջn;+' 6>B:N՘Z"]rD92in $ `s=h똃<`MqϮjc#K{bAA0b =1I tC' .D^% =4.&}R #оG{Ӛ ]g'atЇ_tfb"e݁rHeUˁSvŕyA7_zUѩ1%G8Ye&+#O"—$) +.,n#mde{]JEE[# C5Y0eutk)8tp)\VT[!ZR "MpI`O FA Zc&&X,zJ! q4TԜ~*s*?u<%&l/5Oѧ>kmm6 /PROymoS-Nd z-2 C~BmERMEST[ "Zm1"'=g !m|GK:, u 'cX #ooFf["h;Y!6Ks|ybv-*A0#aVg>_/P./i(0UB_#T9֙ٸ)3sUV}"Ɂ!R9U|I!lMI9VW ]ؐ<0)"1!lG л} cpO`/ NM&ԶDv.S!eLjƓbg[%@~YYٓ*t76!/̾p@̧!GAˀ@AwY d~1il%,9 @D߈~BZ`$_}Q+tϓ2t]C*d66X7A.ȕB:x]Ɵ{MH Tv07{bkaPI%2\έ|D$y@W |6jBU6|Q4a AzD/#ɂjF`K_X]3d71]?9Dб{Q:M"L"/8yDpXPe2(gd^Hb<$T9$}KB_2ߘ'prPrHeu˰ϯH%o3 austQS4]3;$BC60us5b3Wr!blЧNyB%S׆$ 䈓 Y[q!(?gQLvN)U31!p^8gLpugVy@8ڹir =mU_7^ Bw^%ҿqijfhob@6%$uzTgWMĹ[Bz5;eN0 8UܛGq7,}FEuvu%?AOD!bn.wqzڮ{y$ p),!O('jwX"3* VƲCNh|$[B;XǾb|B7Wx[A;0CZb'/ѯ sɈږ8짼+K%nc1J62y ;X%8qGHxa gju6?QdFLh"b|J s1]ʼnN6QPdeq_PZ6՚cEL dB$Sm:?M{gy"Q\HS,N۷(3EDr'[AdLFJV ![gx񴼚,_e)g[=gOn5/W_ BJArl)du'0O2E,!<7|CϾ1¨?拯ѺL];j2CmRLb-T Chg}FjF7LԖ0t q u97NQ++**^1 6k)TgvNlc:fCO ]9{oRbBCٰmi7dkI%d6}ar b6IBf:)rC'ċmj X[-H\',/$|A2IV**X<RD$VC>&Q<:% FLÌcxDԘb̷DmmĕnZ/TTG9XO$>prS[b9|=M7ְ&yz̎IH(G࿒KƄϥ?`]?%Lc؇l']jRn!nH=[pdu+=D!}ĵ|s3A?T, rzq_+)C˦7mSȼ_.1ogFxA'4 ٗ}rxtU;B&;ӅxPC-Br| 7}hܤy͵zolSW-^~/# EiPmE n#$cKx& ³BuA~b KmK+}#roF~T@g:VŷCL a>Pj={fr,tƐ9sՠfOJ6-} $Ʒ<2U1h˖2cWKH{uZ,MJe٧(k=}>g&sn rMzh. A'nApf; -wO+k&Ys|z7UMVe^*4*Nw;S#Is_9n ^jna-&Ty M?2ZFdbK=AVd͟c3$sGM)ET73KVn )v'r3W\sʼn"j&r[fu{MJL*SpR^-kn8AG\R1{ttaMm+2 8,]/;9 ̽h&DwiQ/RJ)%0V=vJ b rؽ[ۑeGO5n*N)yyLe8JֻLqE0E݃sDbH_Lw0q{'[>,^Aeio.CxZ.ΐR}1)LxJ>EK)~(̉;~Z+p=_nXkX8"DQ7R3gͣȺ?: 'ͺ2K CIy o@Zg|(Kº%ߜMK$EGti N-[)w2,mḍ-rȲ0MAG$J3EbBsjGGR#}xN =V+JC{A"?#a;. ,*='AN#[FNȠQE^urpT{SKz |/KDX}A@44{¤Lރ.\M;aCGq˴2a,9 (: Q*}GQdפM"o!ptԏ6G2Z<7C*s QoR#ihI>0dcZCTY&Sc%k,uF^0 S=e1" j^^؋i?8Jgl ^5xxjI巎A ]1~Yn۾dPzjYVsqbTuܢs)3p6 *CVUSmJF fk6+Y?gYl{P6 Z>7yeY(n%9EiMZ֖DN@PxA^!5F$JebTɍ֖"3}B77 p8d,52]Z:(vb5I㛔j@8xI;#? ؛ ̘[27*'$[~q_8[-#S a3wD?xzlF #Sag7qQمS{zF/ Y iHƜx+xϿ SAB:|X'2[p7'b% udKu pXm|a ʖzڃ&:(A'u\ƾ!(=l|n2m|Jk雍?E+{BTnsi 訶C~u8 2#4e_bB2j\27z[ }.ooL)g~5ڶ\\:%(uƒ{D F) 1IأV42;^Wh~̃tJ"; w^18]ZZ Dr\2ݐ>Db'P~J0/{'p ~DCۥB$oÕڷD\Gfr I1E?hꧼ8qI4 +Cn/ƅF] 3׍VqUp;Rpc6Rh8uв٨# 2$Z~ǵ Z6 |wт9̭U+Zs#=PoP]N\O,m ؟EPDdxX]·0G4e&9 eR 9qu8XFͱ?i$ ﲶ^+>iC\SKZa.):5<6rK{ ]:An&,t5Rқ̶ >5\ϰ!i }|C/!WoS]xI5b{&O:DUeתvTZ.HSZeH@)cdѨ!VZ4d־bJW:Jo4 ٯ7AJZYJ<%7z],̉EiI6d:@"'B0?hPQ ꚍ!FZlB&X {]/߁1ޟ*% A(/e`cIޢM>z&~TAe­Lz X)\W_W=h?K,@*?O PfڳݪUh2YHԾiT U.kh+HCp~Flܼ yqUxs k5,uSnjX-vfFȓu85gL*AF*\tZ/FW>ݣ[d:+A d4~`g <&HL0;mmF,Z).UḲ9<ӳ{O>T4,U@׹1S!*Z>舳2J.#'To@]EkxP6ݤ@T[l6 -84 JYҌ~Z$"㽙]k:gR3hްoQq&sHE揎ؑ˓9D{)L"i*YHevPؔ}_ Y aJکZezKa&w̳(w,%ü"sFykٻk91^8B#r*Jx\D"pFҡH>g E\%%Z#bT6q=Po[q+8P`]CZuy຺遲<87#rAaܸjQ}4_ r]}Rm-#% @-9 +6*GXw#gHU0&B DB/ sJ4ੁQӌ}Ǹ-m98OGE\/ h'nĎ,@+}M!$R.^|63P!Uc MpNړMȬULX_%ojG_k1z㥤nm.s&_2&-/G|eHƼ}daMZV|F_B08vj$,b1}*k8"_,K 0j aH]DN^;=L\l;5`ɬ^eⲠA6e'fYB(RArC3(F\OYv) *1_oNbzqӧKz[w$ɼ,u0V(1j6 TE5d\Xgbe>!y,9. t[ܫKM!.{I[8QS*͑fͦ(?ćxmȾ:33^tي4U<,Xyġ BE${l)C4_h7@*)(%^Ŋv=ΊD/]YRY< *=^$UCg *f?ĺptIRP+rSЭ"3[cQEM_Sr vy9)A5װQAtnN1zN)yPXӰP`i`qڲ)[ab@$u bwnK2CpDj.aƧߧ _68z րem߿z2f]kɴ읛[Ym%[iIFqaCtn7QM9qKfRz*%aݫ2 |\fjfͼرNW;vKV АNݤ?S)c؊8OKͥ}-yZ.ٽ$?DV 6RtOP{Uw|p'25Vf*Yh. yjq3meG]+o ${`kwC> N_CW ^52b( %Xʜ6dxw y4N\=Wh7 -faZřCt5 la[Y6 )YOئzO  ;8.y8ۯ*VX+v} S]fK>'[p"׌\ >o< +HЩa7..tK/Ji٦vhiO֝"C ?Md(EŇh #Ez ޸'1kZ>#QfҜ[=#1rF'ƲŘ*rfYWOڸRAT?U}R e*S pfD$"]GӓGJg6_rX7rT˜E@u+d1lIgC9Mn |[ +DD?-+)#wDž0- BDA RjoS^'Sڷ 4ˤG %X *tŝ.P/*^O%rqs'8VQ4?>d7)2 6PrT8h1 'V#x<W1/b'ݐ5jA 2#„H=](݌}=x5^N[ˉ7 p-Ϧ׮M1j[jH:L:j ڦP;`~_*p~M,}lH4{8'ZjA6C}ݻq_Qv^tO?P{S EW˰0AǑFDfU^?\4Bfzz@ܾCOHVSlrkm-RIlC՛e[iU!v+N:_Óhεoh^ʚ'z_޺ CHv~)2ش#6Z_v-D$]Y`}"w}f%B,1]/yDe(:aZzڳlN`=nI] YLq[[keYܸck%y_Âp5{~IXM4(|zjdO"oug,Jf:fNMhQ./,.Vhr. 6$6d4E6YoE #q,MV]x"V!a5O&H0$ ɳ Ij/P}P%N/-sUa!mNu4Ͷ;*~ϋ,^HitM9QqaFA*{JVKI7%{#!"O;,Hb'r JHH$I½]Drk [i:Qנ9S4[Fcd.HgB?"jz8Jt9,/{EA$}~ O|YIoõަgٹ3~garyE4VQ8*QQ~f$c 4q~=F陂0+wNaW } l$ @;dD7>'#J4p1^pYU[ZQ/_ZEB/KmhC0YgbI)a@&aV;MeWaS~P;2]CZUn-Lz&^.4DGUi!NQqOtWpqƮ'B)kӰ?"/׋\4&YAX3u"[GCKzb*Von3+)Q1*zlwY>ڳ):+?vCV&hgNZ;U"Y82K,|>.AE̓z92ECOg|!'r5C'byx"4i.㣼zFC94f#]h oCS$̠&IhĒ*X ]m3ƨmE"`>Z8;Xܞb4U * mQ*{}m[)UM˨IPⶤh'џ{Hj1COhz# r`XIZ?zH8G]tJG `*4HRF ƺ1{j ::TY`EOI_`. ɦ|%#tooJL:Bգ@:lK*fq{o Fm9&MtCj]v 3&:Yg }~ucODP7̄9!" e 3O(7 0=Sf}άvs ۤ_j7F} _њ0/5)fs=lv6_Ҩ;R./ֵT=ڧӡ7yA^;nR- V^iSy2JURF AJk#VMZHL 4@jJȞt\70xu6L&kh7h\< Yi]uq+fDV'Mj].%b5[ M{iO7֑UD2>V-TΒnHY#𗲪mA񿍅FNsf]N qu8۾2B!hڿbi] 6zëeqP^,fbs8{gP"l!0T|/8E#R \S^J6;rP;ON:zla]|ZMl :$,~z{/ ud=CNĆH=z.,sB$:z?c溞tu)x6|-֖T1t<<~Ah]؃1I38@xKo0Vż/zR*JHJMI޾O:E4Q Nx+#rAwAg(y1 ' B~om̦%&ץA=pQG! R~<r<А3w 娲)Ҥ fz1xr}Hx?V9-E۠'c Q7zrϚd!w7[<=rbqnw}0=&_!7ey㨾a\e>DŽ,Ts#5c_vm>@mj7$cZi ved$ҳc1Idx4yI,Rob a\jћ0ٓ+ɫ vq^Q=c݂|V7@PVR1I*5p"Tb/_pvC.wZw0}Up1b,x5'$L_QL)fwEMӗDF6l`*tD͑]7d"!CV=+V [rrY[]+C>|}N8 'PQƥȭYUEP{Cǿ]=ӝUwZ5b Y{ސ<̆(Zf g vIuݞ9'QѷWG_vB>_7ۤtNS5к-mVAGOLE&ZN?@ tVybg ~m搈PəsM C ODDzwk'y\He/IšEV/Jf@Ome1  R[ޗe`(gr}g7^T]ij} 0\>|"4{(_qlE,¥RV|z*5O1p330mȥ[{>r*+!?S?D\!eP5D&1"Bl!{I>Ƅ|;YDLnGke_j|̑A ^)aʆSp'+ s vT8kص{9XCtC9\jK3oZ_,wlM̟sy$T-R`=l36, 7\SD&z164X]G0hoG~:=" xb uyUF_L}kC(g-UފcWu)FfgN$⧻柄4\)iRnsDc* d;!pcBDz4xǛ;Sg&G8Q4w8sZ`BdzU${ʫa"(wDsU(hG3>OX5Vt+!)ρ~a\$Hr˪\U +9CQ;.G곽:CGr!8D:GOPtA ?)~9zFut׃g";x/9zfSLzfБӄIy_y?FK޾$j*V5LCӬmr5 WWo>Z-%ԫX:X_T*gF- ^[_OLrlǸ#uV9I}=?X6&*^p}+T&@#elq%/;0-MwPZDmHpl5XDtgHci")sa!Ѝz`ٓq;diޭn}3G,ø1[rk_hcD[Ps9bH Km>KNBvhe`n6YUlZy%ly7+ ܡ/ ,;4WZؗ~'I6E/|cC5lrxPrQG)2_czщ_p:њ̦sXՊ$^,[h'5+V!\.zLV;dJs[7:;vk)vvtFH+W4vMEjqX"zQp!R\1')E]'%n)]hTey~fEz>i -g3SKV9nz˲>Y7me(}U(kK{]_g6W0@c⓴,<$9% G:[f.:[QxVr"fګW˅}fVX{Bđ$#CT4\A1DX7 TĤc{FZv@j}8xJ QFgKL$1L5Bzy%dž'QC&56@j)cK4d>Ha-JwQGH@0$Hzrzt4:r@*U?v!}/z?Q d"$:D$BBџK$?ڟ?.F,)?#2iZ.뺿%YW1/ }lMЄM!EMl@ȖY"fR2nٽmښ+NuWzD5_w~/[g_̒@=X@) BB9(Tکߺvvwr֯yGp]oGx! xd$$( ȊMɺP6إ;h2Rwnfg_d$TRKDTT"  ‰ $ZE0b!I& d0N\f5r9dM2}0' I! dJ@1d04!bD0 V T߰qQ:2y|;?ղI̐I !QBM6)ɵ.MaXmbFhU9Rse#]v=7GgB^H)$ ɬx=tօTwb2z\{߻^, 0e$0A00PPk%2 KXniѠ׏4oG/o LU@""$1b Bb1!dLG*௯PFGŬ5 r@uI l`,:5ɬd׬)5U (huT:6Zt22[MrC[kc[CX*b)5^KCY,Y5v}ǻ|~wml!ͬ"# HmЁX/w_) ddd2)/29@ʑArE6LkZ=Ä!>ɈC I&ETbd,4'f3()EeWZ{n\orxgBflbe 0` s!Zj!*VWqQ\ݟx?Np̒fE$2s@sT &ba3 9VhY ʲaʶyv}w{ߣ>Gol6@h6٦LڱT۵Tt/HlZ{*QŰ5sߋ ZCTBjb1jB"iaԉ PSLSRNS4QEGUV+ۣ'ώݟϐ(+N{I`d60H"N7EIOOeS[Sk,0BhAE D >_O{uMp\X-U. kbPRiNYSF–i ZcG~w_d p Y"#d" 9:%5 ڠX抱G5UgR ͙͌c|=ϳNN._']OeI!$D8tUE,8BZ5S#GN??{̒ar @SI&(dB'*JYށzC:TFejSt+;Y}o?킀I60XٶPY66jmm ΀R0,#n :./Y ^TB7z5 VAoGXVґva P Di"X)jb5 @UFCN/AR j67ښAfhݣ_bΎ.-^~8!MN$a%$`P8( F76xYLZ-k8jٻ˂ y_rz6i fTRKHmUU0Rm63V7Pڂjm5EhkWc_~mw>_wBlll6@d6)IalI3-fĺ_'?w:000 !0dV`0 "SU 5*5{jUUvhަ};~Tj$5BCPHMMKY.PQRp)C[5蠦%kn{ekw~O7߻BL( ܐ1`,"L&HKb 2Y 5VfLW_#7|?e@ $r"Y NA2FR2 =UVJk+w/ٻ⯓̐$ Pd "0̙DQ5UK L2;w~w{?!@0L!  *#H"B&0% Hڪ; ꪢOGaYc|_0)n$!e ̂()7)K;UbnU7MZ vem.L7n4mnݺH K`S Hf lE` 6Q C]-rf0]"8-c&b vȇL33oR!!fj3C3)m<<͉DI AP/re:C%M'긺 ޝeUZo&x#( xdUUx^]4/0ݢ+ ?f^B&??T"D8:lVq,Y8x.&3mj8su-?1w |}7'q$ V@X La @Ŵ1,¢c*͛Pڌphq꽂ϫGg9GE7 dP7 &7@7~atfSj+zv!o^/9oM@Vooo:֨uwJ:kӽk-i(䷫=>n?>?K.Ώ>7OqB$܂!Ĩ$Y8Y'& j*8’#G,Sp={y}|$N!xh)P8$ECHaijS,$CUbtn+{~w~8<($ 2@P,@8X(p+' Cc̪7n+ ϗhї}ـH} bd`) XðeL2ALZ*%-!Etv<[ݞww<||܄ͷCwHHBII RR+UIаc: zd:rQT=6諆s!a仿<=ϩԘa$HL Ր,/DP4a{f qR|~\>^Ǘ?ug֩&rB@̒^,Y3@)Ő8NDʳ*fNGd)#YZl)/w{x6m $D`Hm 64dQI1ۃmECk,ءumIz Uv[y}tw^8N8PXcjC ǮI8mJ)4qZR}' ;|;7ڡ0Ha!RZK0`RD0&0cU 0SLY8S-Vڊy'Wlcc'NcAM$l!˪I8 8"P=fGc)8 ړjzhdR ԢȯIqny{'8P$Ipr"TX8 LZCpܨ6M2WzWX~C}` `1RaPD,0@¨e0C/fXYL+ t;߿d!& &* b$@0"j Ubl)z^ޯtS۽_mg#Y'$pY8X728 XXuM nV1`pY-[mڭbcs]L $"Z" !LinPo e`#/OUo]|w$ȒIe$2@ (2PȒ'7e#DS"UrKG+`kݷw~œ< I2! O jaHpX' + aI7<<2*2\8^)jGSIwo߄$ `R P0DY0A ֒„,axkU֫U W{?>uB:t, ׀uE\'\]Okuz]zhJK%){}.7\͐$Mؒ*},CbD 6d&|j*fT.V{iLW ?@!5 hZ2Md ՓX.Zv.)Rj]ZܿAq-ˣoT%⢡/@a/Pkx~[]b=ZVG?˫9<O HC9 EI6gTaaa66RfJա(S[Il6Qy>87HHrI_,&Ȫ(l ]rT3+6&] m1 9͝yag/twutT>c]:?`I!e0"Caܒw'Vh'rC;ݩm{u]'a}ܞopus__ίѰ 3c(TbaIӆi-BعUQxE+gf{һ|6GOns^7O$Nt T2KBsk:%RUo:y'0su'w1]|^#Æ\n'㷽}]7~N-z@PUPHvR-!]d-Ƚ碣Qںc>}\nd(]~tJR)R>H /Q !_箏['`$v$@-@BhDQV,z"O.;F؅]RG]W?_;ɳoWӽĒt=2{*_&0ˤi}ѕ*+3^Qs鯧ݷ~w{?ù'm>ǽ tID @=9'DB= XW:ikr[RW2tTKpx9ýwG}/y~?-ВILaq.Af$n přhB(a<AقKQqt X9*T?u.n!z,JFC*AwS~]ߖ;G5^;g|dڒDP76nn%&IZnBVT4"t>.@8T'Q@QT UVb8CUI$1cUd0iŸ^/cmU[=}k_\^goWK $WEa:Δ*: -!'B QAcE(E9k9wlWW Goz{zV:[/KSz!+FNP I2JNJ)+ RÖrT*g-Wg.r'3˂A;˥L}@8a=VBtPPb,8ĒrKh~5.: CpE(!ܻ H d܉+uD˫mUIʬL~vؙD$R() 2RɋP1LK A+*ɒzL 뀈@ 4}1uqn[togN=c[S9s$@d̒?2jzT%L|]r[g{wx?7V!>Oُ'PT8VgSsHkunhl˸,p}^?s.]xo7!$`NF#䢠P 9!Ȱ䜇'#8df:GOF7r/~wL?2U u0I *0D*ѪYlek6i|_O=Ą q` "D `b.2uڅؔ^F W-U-K/6|9 $,)& $*(&{$`bK"eR+Jֵ+|=/"R P)d/X"Ix/4ئ%"Pm\|=gn{HI  H`)ʒR0 +ChF^eZk/hx`9<2L$2H %!.Mh jH“UF.6џ{׿;r2@Ddd"*(,1d0!zN8J-%kutogo-݇&2I$bb1&2RG ]XZֶ~4Z_^Os{?~Aa$&LPDXC$d"C$0HL޹*дLjZr3r۸}^38BC0P&h*I oAL3%͂v̰ڜ%kEo}ܡ! 0ɀH(&B꬙E%`Yk1C%U9o4xh %buڧz^߆.&D$҄ =vUoͦIcLLAeou57WUv6a$֫ !MuE"Mj,CS uNjUU1)s;O7]y9O@P&p4H^&k |l,ml[1EΫeV}ݕ˹KH%켨UKؒkթ5]"=֪)淭`B@'R !%Mk(Zibhݷ<}_G8Ē@1A 1&!p"& iL0TRk*/'-ը$ j5Eu:[TB]qwGӤ ҈M$ i@4Yd-ZֲJ߱i[/wpp}Gww7&樛Y&A`Q7@܈fpM˖"ôwPK賡˽x>^r@%HK@ /zI| 佾M X7‰XZ޽^Mol;>HBBI&f&S4=aM&d64[3)U[J9'w#Wտ RI1@7Mⱁ=roŕ֣~uIV'Eԏ^7N5\^kv{o2rI "(9B(e$P1IYTe&VhhrcY=fc`^^I~dS/{(MQ֒\:\^ll I l zgP%lg6d6<)9u sO8<}1 bLE .)J YjLacR<(LLPwoL)eZ\ZڍXL5yǽZ5XFMIkbCX(Mh!5KB K4#Izָ;}crvlHI{4U3M6,,]I[7&cM5 my8<_xa  `LTb,V .R E]eac~ooo}01 qa,ʄ1",@ĆC8b%Uхk*V%Rݞ[=/a 0U``"L0HX`. YTjtZ.=S}w7^H~e |%/|5‘ti}r8rۺvc HHLQI1FĆ(&$1$ֈa1Cuo\Z˿iny~KB)ƌ $hdP8^98xJ@-V94[u98x7=3von}?lMڄ*@HfC-+pa.kJP[2UYYUL2ov{o/s{Ӕ$"HID&@d e!*,1 rj/5T13Ͽx;esu}v$ؤ$b,@M6 a0Od,,67UeQUJBͷnǣ& LP$$dEk jMFBrɨ5կ];k|/D^1T "1(bSQcfX2(*3* x;{0W}ϫd6 $ RHRQj,d͌0ŕT!!B`0 !_RaVe) NԚ[ZUY_x@/$5E^ R@7/ zQuuP:vqv{<\__x/PJcU`J05 QUrc.?tϛ$PCL,B`Yb, j%6e& QCRaÇ6~o?؂$Oe`D \M(gP4l*aRdhKջWj51TCPPjQRjH1a7IRZ5Z=uM']*w0G{}?G00 QP&"TXc!@b0"cj= SF8[I `׮րS, dQ pֱ t tSUgRi-Z:uc;vld1 CLTJY H_,\j%3jbˏ/BL )E5 ,` 8LeSYhZZ^t'G _?ѮI\$TPMkkE&k5Z,WQ5U|V-cq|֐uY @CyuΰumְU{ uXWڻֺ\w+qx~?ϨMJ I)l#!)5,U5J!HuS%LEu+P)m/WtzsroI 0E PPI*fQ@oY%+UJXx.nc?.>s3C5@S9 Vp I̙fRga1EPfI:Pu( NIqz i8=)UmT4ZVm{MVwv^}Bhi``ڄ C4 lDEԛ/Cv0]kpN×m...gٷ0@z`C/!sz%:jj(vlvTrLny>| P!M)a, $;y+0ZH ukx㫮Y]su;HI@7444RMnDFnCl6f꩷:zťM>+&Ϗy~=p b & `끬SY!:Қ#ͪޖ넄BuSKxu\i7iߢ-Mu5x4{uڷwy;=b)$2bđƩ j E$( (Lɓ`"YeUS "4j>,;@HocY7 o|FC~P*C~w?gt0XL  0FL-i*0 FuR_Tz5nn{,{+!RLD7P%`Q}Uqۢrۓݽ j JIIR`bɩd5$Hi]]wBj+H]4I;'I!@ T oEd HjIdʢM2^nVv6x=K/l!Hl ,dCc6)L`\*Lڢi=^ߏ?O>lM!#!jMR!6eMPZjlU6Y,7m69y5w!,1E)* &"P4k_CWxph& C!YRbXĘY˧-Eˮtiї|/w~rPkB,!B+ k hj 5.uݞO>/ 2Tb JBc,PI 2!fRa )LFUXNχo&]>w>21LLR(*@bHa"Ɏ3d:h`ceźeÆ'/o{o?/cCq$/ nbBn-[lCtP)]Mw4xϣ=|Gn};m$7;Z&, „n7@f,>uoEx_!bS$22H>4! 30aV#_o5Ij=jQ"Hj‰׬Y h5Ekuuƶ,woyw?qBC6>fldf+&ci r#&Q@,90+}]<ݪ?HCL i C~JXiRixVFuءP6hjGG}?!$0 I8P D `Aj*`%U#}|,ܗMnZt~ra$@˜C^L4&05=6Csq[{p-4$A$뤤҈ BXiRiZmM\.o/qfI8HfB )sXf,XɔD2shckZΛuϯjpq{}}IaZH*MJ`R*DXJilU )p2ErY٧!۶E޷>/rc+?_ $$E0D ԗ5%:kj[o120 GZLe~%bBpncE튧U+w;CdB@)ScIM2M6 "ɲXu[iKЉYgF~Ϲ[}_|@fZE I6)6jl%2m {VpeV)ꩶlK3dѷy66N_G_I^̛BIHІ0&[!dvԵYDYټ6%DO>[K?w;?G;ܑN$6Luminescence/man/0000755000176200001440000000000013604173345013445 5ustar liggesusersLuminescence/man/calc_CentralDose.Rd0000644000176200001440000001160513604173244017122 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_CentralDose.R \name{calc_CentralDose} \alias{calc_CentralDose} \title{Apply the central age model (CAM) after Galbraith et al. (1999) to a given De distribution} \usage{ calc_CentralDose(data, sigmab, log = TRUE, na.rm = FALSE, plot = TRUE, ...) } \arguments{ \item{data}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): for \link{data.frame}: two columns with De \code{(data[,1])} and De error \code{(data[,2])}} \item{sigmab}{\link{numeric} (\emph{with default}): additional spread in De values. This value represents the expected overdispersion in the data should the sample be well-bleached (Cunningham & Walling 2012, p. 100). \strong{NOTE}: For the logged model (\code{log = TRUE}) this value must be a fraction, e.g. 0.2 (= 20 \\%). If the un-logged model is used (\code{log = FALSE}), sigmab must be provided in the same absolute units of the De values (seconds or Gray).} \item{log}{\link{logical} (\emph{with default}): fit the (un-)logged central age model to De data} \item{na.rm}{\link{logical} (\emph{with default}): strip \code{NA} values before the computation proceeds} \item{plot}{\link{logical} (\emph{with default}): plot output} \item{...}{further arguments (\code{trace}, \code{verbose}).} } \value{ Returns a plot (\emph{optional}) and terminal output. In addition an \linkS4class{RLum.Results} object is returned containing the following elements: \item{.$summary}{\link{data.frame} summary of all relevant model results.} \item{.$data}{\link{data.frame} original input data} \item{.$args}{\link{list} used arguments} \item{.$call}{\link{call} the function call} \item{.$profile}{\link{data.frame} the log likelihood profile for sigma} The output should be accessed using the function \link{get_RLum} } \description{ This function calculates the central dose and dispersion of the De distribution, their standard errors and the profile log likelihood function for sigma. } \details{ This function uses the equations of Galbraith & Roberts (2012). The parameters \code{delta} and \code{sigma} are estimated by numerically solving eq. 15 and 16. Their standard errors are approximated using eq. 17. In addition, the profile log-likelihood function for \code{sigma} is calculated using eq. 18 and presented as a plot. Numerical values of the maximum likelihood approach are \strong{only} presented in the plot and \strong{not} in the console. A detailed explanation on maximum likelihood estimation can be found in the appendix of Galbraith & Laslett (1993, 468-470) and Galbraith & Roberts (2012, 15) } \section{Function version}{ 1.4.0 } \examples{ ##load example data data(ExampleData.DeValues, envir = environment()) ##apply the central dose model calc_CentralDose(ExampleData.DeValues$CA1) } \section{How to cite}{ Burow, C., 2020. calc_CentralDose(): Apply the central age model (CAM) after Galbraith et al. (1999) to a given De distribution. Function version 1.4.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for mixed fission track ages. Nuclear Tracks Radiation Measurements 4, 459-470. Galbraith, R.F., Roberts, R.G., Laslett, G.M., Yoshida, H. & Olley, J.M., 1999. Optical dating of single grains of quartz from Jinmium rock shelter, northern Australia. Part I: experimental design and statistical models. Archaeometry 41, 339-364. Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error calculation and display in OSL dating: An overview and some recommendations. Quaternary Geochronology 11, 1-27. \strong{Further reading} Arnold, L.J. & Roberts, R.G., 2009. Stochastic modelling of multi-grain equivalent dose (De) distributions: Implications for OSL dating of sediment mixtures. Quaternary Geochronology 4, 204-230. Bailey, R.M. & Arnold, L.J., 2006. Statistical modelling of single grain quartz De distributions and an assessment of procedures for estimating burial dose. Quaternary Science Reviews 25, 2475-2502. Cunningham, A.C. & Wallinga, J., 2012. Realizing the potential of fluvial archives using robust OSL chronologies. Quaternary Geochronology 12, 98-106. Rodnight, H., Duller, G.A.T., Wintle, A.G. & Tooth, S., 2006. Assessing the reproducibility and accuracy of optical dating of fluvial deposits. Quaternary Geochronology, 1 109-120. Rodnight, H., 2008. How many equivalent dose values are needed to obtain a reproducible distribution?. Ancient TL 26, 3-10. } \seealso{ \link{plot}, \link{calc_CommonDose}, \link{calc_FiniteMixture}, \link{calc_FuchsLang2001}, \link{calc_MinDose} } \author{ Christoph Burow, University of Cologne (Germany) \cr Based on a rewritten S script of Rex Galbraith, 2010 , RLum Developer Team} Luminescence/man/calc_IEU.Rd0000644000176200001440000000604113604173244015337 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_IEU.R \name{calc_IEU} \alias{calc_IEU} \title{Apply the internal-external-uncertainty (IEU) model after Thomsen et al. (2007) to a given De distribution} \usage{ calc_IEU(data, a, b, interval, decimal.point = 2, plot = TRUE, ...) } \arguments{ \item{data}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): for \link{data.frame}: two columns with De \code{(data[,1])} and De error \code{(values[,2])}} \item{a}{\link{numeric} (\strong{required}): slope} \item{b}{\link{numeric} (\strong{required}): intercept} \item{interval}{\link{numeric} (\strong{required}): fixed interval (e.g. 5 Gy) used for iteration of Dbar, from the mean to Lowest.De used to create Graph.IEU \verb{[Dbar.Fixed vs Z]}} \item{decimal.point}{\link{numeric} (\emph{with default}): number of decimal points for rounding calculations (e.g. 2)} \item{plot}{\link{logical} (\emph{with default}): plot output} \item{...}{further arguments (\verb{trace, verbose}).} } \value{ Returns a plot (\emph{optional}) and terminal output. In addition an \linkS4class{RLum.Results} object is returned containing the following elements: \item{.$summary}{\link{data.frame} summary of all relevant model results.} \item{.$data}{\link{data.frame} original input data} \item{.$args}{\link{list} used arguments} \item{.$call}{\link{call} the function call} \item{.$tables}{\link{list} a list of data frames containing all calculation tables} The output should be accessed using the function \link{get_RLum}. } \description{ Function to calculate the IEU De for a De data set. } \details{ This function uses the equations of Thomsen et al. (2007). The parameters a and b are estimated from dose-recovery experiments. } \section{Function version}{ 0.1.1 } \examples{ ## load data data(ExampleData.DeValues, envir = environment()) ## apply the IEU model ieu <- calc_IEU(ExampleData.DeValues$CA1, a = 0.2, b = 1.9, interval = 1) } \section{How to cite}{ Smedley, R.K., 2020. calc_IEU(): Apply the internal-external-uncertainty (IEU) model after Thomsen et al. (2007) to a given De distribution. Function version 0.1.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Smedley, R.K., 2015. A new R function for the Internal External Uncertainty (IEU) model. Ancient TL 33, 16-21. Thomsen, K.J., Murray, A.S., Boetter-Jensen, L. & Kinahan, J., 2007. Determination of burial dose in incompletely bleached fluvial samples using single grains of quartz. Radiation Measurements 42, 370-379. } \seealso{ \link{plot}, \link{calc_CommonDose}, \link{calc_CentralDose}, \link{calc_FiniteMixture}, \link{calc_FuchsLang2001}, \link{calc_MinDose} } \author{ Rachel Smedley, Geography & Earth Sciences, Aberystwyth University (United Kingdom) \cr Based on an excel spreadsheet and accompanying macro written by Kristina Thomsen. , RLum Developer Team} Luminescence/man/fit_SurfaceExposure.Rd0000644000176200001440000002057213604173244017725 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/fit_SurfaceExposure.R \name{fit_SurfaceExposure} \alias{fit_SurfaceExposure} \title{Nonlinear Least Squares Fit for OSL surface exposure data} \usage{ fit_SurfaceExposure( data, sigmaphi = NULL, mu = NULL, age = NULL, Ddot = NULL, D0 = NULL, weights = FALSE, plot = TRUE, legend = TRUE, error_bars = TRUE, coord_flip = FALSE, ... ) } \arguments{ \item{data}{\link{data.frame} or \link{list} (\strong{required}): Measured OSL surface exposure data with the following structure:\preformatted{ (optional) | depth (a.u.)| intensity | error | | [ ,1] | [ ,2] | [ ,3] | |-------------|-----------|-------| [1, ]| ~~~~ | ~~~~ | ~~~~ | [2, ]| ~~~~ | ~~~~ | ~~~~ | ... | ... | ... | ... | [x, ]| ~~~~ | ~~~~ | ~~~~ | } Alternatively, a \link{list} of \code{data.frames} can be provided, where each \code{data.frame} has the same structure as shown above, with the exception that they must \strong{not} include the optional error column. Providing a \link{list} as input automatically activates the global fitting procedure (see details).} \item{sigmaphi}{\link{numeric} (\emph{optional}): A numeric value for sigmaphi, i.e. the charge detrapping rate. Example: \code{sigmaphi = 5e-10}} \item{mu}{\link{numeric} (\emph{optional}): A numeric value for mu, i.e. the light attenuation coefficient. Example: \code{mu = 0.9}} \item{age}{\link{numeric} (\emph{optional}): The age (a) of the sample, if known. If \code{data} is a \link{list} of \emph{x} samples, then \code{age} must be a numeric vector of length \emph{x}. Example: \code{age = 10000}, or \code{age = c(1e4, 1e5, 1e6)}.} \item{Ddot}{\link{numeric} (\emph{optional}): A numeric value for the environmental dose rate (Gy/ka). For this argument to be considered a value for \code{D0} must also be provided; otherwise it will be ignored.} \item{D0}{\link{numeric} (\emph{optional}): A numeric value for the characteristic saturation dose (Gy). For this argument to be considered a value for \code{Ddot} must also be provided; otherwise it will be ignored.} \item{weights}{\link{logical} (\emph{optional}): If \code{TRUE} the fit will be weighted by the inverse square of the error. Requires \code{data} to be a \link{data.frame} with three columns.} \item{plot}{\link{logical} (\emph{optional}): Show or hide the plot.} \item{legend}{\link{logical} (\emph{optional}): Show or hide the equation inside the plot.} \item{error_bars}{\link{logical} (\emph{optional}): Show or hide error bars (only applies if errors were provided).} \item{coord_flip}{\link{logical} (\emph{optional}): Flip the coordinate system.} \item{...}{Further parameters passed to \link{plot}. Custom parameters include: \itemize{ \item \code{verbose} (\link{logical}): show or hide console output \item \code{line_col}: Color of the fitted line \item \code{line_lty}: Type of the fitted line (see \code{lty} in \code{?par}) \item \code{line_lwd}: Line width of the fitted line (see \code{lwd} in \code{?par}) }} } \value{ Function returns results numerically and graphically: -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \verb{$summary} \tab \code{data.frame} \tab summary of the fitting results \cr \verb{$data} \tab \code{data.frame} \tab the original input data \cr \verb{$fit} \tab \code{nls} \tab the fitting object produced by \link[minpack.lm:nlsLM]{minpack.lm::nlsLM} \cr \verb{$args} \tab \code{character} \tab arguments of the call \cr \verb{$call} \tab \code{call} \tab the original function call \cr } \strong{slot:} \strong{\verb{@info}} Currently unused. ------------------------\cr \verb{[ PLOT OUTPUT ]}\cr ------------------------\cr A scatter plot of the provided depth-intensity OSL surface exposure data with the fitted model. } \description{ This function determines the (weighted) least-squares estimates of the parameters of either eq. 1 in \emph{Sohbati et al. (2012a)} or eq. 12 in \emph{Sohbati et al. (2012b)} for a given OSL surface exposure data set (\strong{BETA}). } \details{ \strong{Weighted fitting} If \code{weights = TRUE} the function will use the inverse square of the error (\eqn{1/\sigma^2}) as weights during fitting using \link[minpack.lm:nlsLM]{minpack.lm::nlsLM}. Naturally, for this to take effect individual errors must be provided in the third column of the \code{data.frame} for \code{data}. Weighted fitting is \strong{not} supported if \code{data} is a list of multiple \code{data.frame}s, i.e., it is not available for global fitting. \strong{Dose rate} If any of the arguments \code{Ddot} or \code{D0} is at its default value (\code{NULL}), this function will fit eq. 1 in Sohbati et al. (2012a) to the data. If the effect of dose rate (i.e., signal saturation) needs to be considered, numeric values for the dose rate (\code{Ddot}) (in Gy/ka) and the characteristic saturation dose (\code{D0}) (in Gy) must be provided. The function will then fit eq. 12 in Sohbati et al. (2012b) to the data. \strong{NOTE}: Currently, this function does \strong{not} consider the variability of the dose rate with sample depth (\code{x})! In the original equation the dose rate \code{D} is an arbitrary function of \code{x} (term \code{D(x)}), but here \code{D} is assumed constant. \strong{Global fitting} If \code{data} is \link{list} of multiple \code{data.frame}s, each representing a separate sample, the function automatically performs a global fit to the data. This may be useful to better constrain the parameters \code{sigmaphi} or \code{mu} and \strong{requires} that known ages for each sample is provided (e.g., \code{age = c(100, 1000)} if \code{data} is a list with two samples). } \note{ \strong{This function has BETA status. If possible, results should be} \strong{cross-checked.} } \section{Function version}{ 0.1.0 } \examples{ ## Load example data data("ExampleData.SurfaceExposure") ## Example 1 - Single sample # Known parameters: 10000 a, mu = 0.9, sigmaphi = 5e-10 sample_1 <- ExampleData.SurfaceExposure$sample_1 head(sample_1) results <- fit_SurfaceExposure(sample_1, mu = 0.9, sigmaphi = 5e-10) get_RLum(results) ## Example 2 - Single sample and considering dose rate # Known parameters: 10000 a, mu = 0.9, sigmaphi = 5e-10, # dose rate = 2.5 Gy/ka, D0 = 40 Gy sample_2 <- ExampleData.SurfaceExposure$sample_2 head(sample_2) results <- fit_SurfaceExposure(sample_2, mu = 0.9, sigmaphi = 5e-10, Ddot = 2.5, D0 = 40) get_RLum(results) ## Example 3 - Multiple samples (global fit) to better constrain 'mu' # Known parameters: ages = 1e3, 1e4, 1e5, 1e6 a, mu = 0.9, sigmaphi = 5e-10 set_1 <- ExampleData.SurfaceExposure$set_1 str(set_1, max.level = 2) results <- fit_SurfaceExposure(set_1, age = c(1e3, 1e4, 1e5, 1e6), sigmaphi = 5e-10) get_RLum(results) ## Example 4 - Multiple samples (global fit) and considering dose rate # Known parameters: ages = 1e2, 1e3, 1e4, 1e5, 1e6 a, mu = 0.9, sigmaphi = 5e-10, # dose rate = 1.0 Ga/ka, D0 = 40 Gy set_2 <- ExampleData.SurfaceExposure$set_2 str(set_2, max.level = 2) results <- fit_SurfaceExposure(set_2, age = c(1e2, 1e3, 1e4, 1e5, 1e6), sigmaphi = 5e-10, Ddot = 1, D0 = 40) get_RLum(results) } \section{How to cite}{ Burow, C., 2020. fit_SurfaceExposure(): Nonlinear Least Squares Fit for OSL surface exposure data. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Sohbati, R., Murray, A.S., Chapot, M.S., Jain, M., Pederson, J., 2012a. Optically stimulated luminescence (OSL) as a chronometer for surface exposure dating. Journal of Geophysical Research 117, B09202. doi:10.1029/2012JB009383 Sohbati, R., Jain, M., Murray, A.S., 2012b. Surface exposure dating of non-terrestial bodies using optically stimulated luminescence: A new method. Icarus 221, 160-166. } \seealso{ \link{ExampleData.SurfaceExposure}, \link[minpack.lm:nlsLM]{minpack.lm::nlsLM} } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} \keyword{datagen} Luminescence/man/plot_DetPlot.Rd0000644000176200001440000001410013604173245016340 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_DetPlot.R \name{plot_DetPlot} \alias{plot_DetPlot} \title{Create De(t) plot} \usage{ plot_DetPlot( object, signal.integral.min, signal.integral.max, background.integral.min, background.integral.max, method = "shift", signal_integral.seq = NULL, analyse_function = "analyse_SAR.CWOSL", analyse_function.control = list(), n.channels = NULL, show_ShineDownCurve = TRUE, respect_RC.Status = FALSE, verbose = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} (\strong{required}): input object containing data for analysis} \item{signal.integral.min}{\link{integer} (\strong{required}): lower bound of the signal integral.} \item{signal.integral.max}{\link{integer} (\strong{required}): upper bound of the signal integral.} \item{background.integral.min}{\link{integer} (\strong{required}): lower bound of the background integral.} \item{background.integral.max}{\link{integer} (\strong{required}): upper bound of the background integral.} \item{method}{\link{character} (\emph{with default}): method applied for constructing the De(t) plot. \itemize{ \item \code{shift} (\emph{the default}): the chosen signal integral is shifted the shine down curve, \item \code{expansion}: the chosen signal integral is expanded each time by its length }} \item{signal_integral.seq}{\link{numeric} (\emph{optional}): argument to provide an own signal integral sequence for constructing the De(t) plot} \item{analyse_function}{\link{character} (\emph{with default}): name of the analyse function to be called. Supported functions are: \code{'analyse_SAR.CWOSL'}, \code{'analyse_pIRIRSequence'}} \item{analyse_function.control}{\link{list} (\emph{optional}): selected arguments to be passed to the supported analyse functions (\code{'analyse_SAR.CWOSL'}, \code{'analyse_pIRIRSequence'})} \item{n.channels}{\link{integer} (\emph{optional}): number of channels used for the De(t) plot. If nothing is provided all De-values are calculated and plotted until the start of the background integral.} \item{show_ShineDownCurve}{\link{logical} (\emph{with default}): enables or disables shine down curve in the plot output} \item{respect_RC.Status}{\link{logical} (\emph{with default}): remove De-values with 'FAILED' RC.Status from the plot (cf. \link{analyse_SAR.CWOSL} and \link{analyse_pIRIRSequence})} \item{verbose}{\link{logical} (\emph{with default}): enables or disables terminal feedback} \item{...}{further arguments and graphical parameters passed to \link{plot.default}, \link{analyse_SAR.CWOSL} and \link{analyse_pIRIRSequence} (see details for further information). Plot control parameters are: \code{ylim}, \code{xlim}, \code{ylab}, \code{xlab}, \code{main}, \code{pch}, \code{mtext}, \code{cex}, \code{legend}, \code{legend.text}, \code{legend.pos}} } \value{ A plot and an \linkS4class{RLum.Results} object with the produced De values \verb{@data}: \tabular{lll}{ \strong{Object} \tab \strong{Type} \tab \strong{Description}\cr De.values \tab \code{data.frame} \tab table with De values \cr signal_integral.seq \tab \code{numeric} \tab integral sequence used for the calculation } \verb{@info}: \tabular{lll}{ \strong{Object} \tab \strong{Type} \tab \strong{Description}\cr call \tab \code{call} \tab the original function call } } \description{ Plots the equivalent dose (De) in dependency of the chosen signal integral (cf. Bailey et al., 2003). The function is simply passing several arguments to the function \link{plot} and the used analysis functions and runs it in a loop. Example: \code{legend.pos} for legend position, \code{legend} for legend text. } \details{ \strong{method} The original method presented by Baiely et al., 2003 shifted the signal integrals and slightly extended them accounting for changes in the counting statistics. Example: \code{c(1:3, 3:5, 5:7)}. However, here also another method is provided allowing to expand the signal integral by consectutively expaning the integral by its chosen length. Example: \code{c(1:3, 1:5, 1:7)} Note that in both cases the integral limits are overlap. The finally applied limits are part of the function output. \strong{analyse_function.control} The argument \code{analyse_function.control} currently supports the following arguments \code{sequence.structure}, \code{dose.points}, \code{mtext.outer}, \code{fit.method}, \code{fit.force_through_origin}, \code{plot}, \code{plot.single} } \note{ The entire analysis is based on the used analysis functions, namely \link{analyse_SAR.CWOSL} and \link{analyse_pIRIRSequence}. However, the integrity checks of this function are not that thoughtful as in these functions itself. It means, that every sequence should be checked carefully before running long calculations using serveral hundreds of channels. } \section{Function version}{ 0.1.3 } \examples{ \dontrun{ ##load data ##ExampleData.BINfileData contains two BINfileData objects ##CWOSL.SAR.Data and TL.SAR.Data data(ExampleData.BINfileData, envir = environment()) ##transform the values from the first position in a RLum.Analysis object object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) plot_DetPlot(object, signal.integral.min = 1, signal.integral.max = 3, background.integral.min = 900, background.integral.max = 1000, n.channels = 5, ) } } \section{How to cite}{ Kreutzer, S., 2020. plot_DetPlot(): Create De(t) plot. Function version 0.1.3. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Bailey, R.M., Singarayer, J.S., Ward, S., Stokes, S., 2003. Identification of partial resetting using De as a function of illumination time. Radiation Measurements 37, 511-518. doi:10.1016/S1350-4487(03)00063-5 } \seealso{ \link{plot}, \link{analyse_SAR.CWOSL}, \link{analyse_pIRIRSequence} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} Luminescence/man/plot_RLum.Rd0000644000176200001440000000544613604173245015661 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_RLum.R \name{plot_RLum} \alias{plot_RLum} \title{General plot function for RLum S4 class objects} \usage{ plot_RLum(object, ...) } \arguments{ \item{object}{\linkS4class{RLum} (\strong{required}): S4 object of class \code{RLum}. Optional a \link{list} containing objects of class \linkS4class{RLum} can be provided. In this case the function tries to plot every object in this list according to its \code{RLum} class.} \item{...}{further arguments and graphical parameters that will be passed to the specific plot functions. The only argument that is supported directly is \code{main} (setting the plot title). In contrast to the normal behaviour \code{main} can be here provided as \link{list} and the arguments in the list will dispatched to the plots if the \code{object} is of type \code{list} as well.} } \value{ Returns a plot. } \description{ Function calls object specific plot functions for RLum S4 class objects. } \details{ The function provides a generalised access point for plotting specific \linkS4class{RLum} objects.\cr Depending on the input object, the corresponding plot function will be selected. Allowed arguments can be found in the documentations of each plot function. \tabular{lll}{ \strong{object} \tab \tab \strong{corresponding plot function} \cr \linkS4class{RLum.Data.Curve} \tab : \tab \link{plot_RLum.Data.Curve} \cr \linkS4class{RLum.Data.Spectrum} \tab : \tab \link{plot_RLum.Data.Spectrum}\cr \linkS4class{RLum.Data.Image} \tab : \tab \link{plot_RLum.Data.Image}\cr \linkS4class{RLum.Analysis} \tab : \tab \link{plot_RLum.Analysis}\cr \linkS4class{RLum.Results} \tab : \tab \link{plot_RLum.Results} } } \note{ The provided plot output depends on the input object. } \section{Function version}{ 0.4.3 } \examples{ #load Example data data(ExampleData.CW_OSL_Curve, envir = environment()) #transform data.frame to RLum.Data.Curve object temp <- as(ExampleData.CW_OSL_Curve, "RLum.Data.Curve") #plot RLum object plot_RLum(temp) } \seealso{ \link{plot_RLum.Data.Curve}, \linkS4class{RLum.Data.Curve}, \link{plot_RLum.Data.Spectrum}, \linkS4class{RLum.Data.Spectrum}, \link{plot_RLum.Data.Image}, \linkS4class{RLum.Data.Image}, \link{plot_RLum.Analysis}, \linkS4class{RLum.Analysis}, \link{plot_RLum.Results}, \linkS4class{RLum.Results} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. plot_RLum(): General plot function for RLum S4 class objects. Function version 0.4.3. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{dplot} Luminescence/man/plot_RLum.Analysis.Rd0000644000176200001440000001122213604173245017430 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_RLum.Analysis.R \name{plot_RLum.Analysis} \alias{plot_RLum.Analysis} \title{Plot function for an RLum.Analysis S4 class object} \usage{ plot_RLum.Analysis( object, subset = NULL, nrows, ncols, abline = NULL, combine = FALSE, curve.transformation, plot.single = FALSE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} (\strong{required}): S4 object of class \code{RLum.Analysis}} \item{subset}{named \link{list} (\emph{optional}): subsets elements for plotting. The arguments in the named \link{list} will be directly passed to the function \link{get_RLum} (e.g., \code{subset = list(curveType = "measured")})} \item{nrows}{\link{integer} (\emph{optional}): sets number of rows for plot output, if nothing is set the function tries to find a value.} \item{ncols}{\link{integer} (\emph{optional}): sets number of columns for plot output, if nothing is set the function tries to find a value.} \item{abline}{\link{list} (\emph{optional}): allows to add ablines to the plot. Argument are provided in a list and will be forwared to the function \link{abline}, e.g., \code{list(v = c(10, 100))} adds two vertical lines add 10 and 100 to all plots. In contrast \verb{list(v = c(10), v = c(100)} adds a vertical at 10 to the first and a vertical line at 100 to the 2nd plot.} \item{combine}{\link{logical} (\emph{with default}): allows to combine all \linkS4class{RLum.Data.Curve} objects in one single plot.} \item{curve.transformation}{\link{character} (\emph{optional}): allows transforming CW-OSL and CW-IRSL curves to pseudo-LM curves via transformation functions. Allowed values are: \code{CW2pLM}, \code{CW2pLMi}, \code{CW2pHMi} and \code{CW2pPMi}. See details.} \item{plot.single}{\link{logical} (\emph{with default}): global par settings are considered, normally this should end in one plot per page} \item{...}{further arguments and graphical parameters will be passed to the \code{plot} function. Supported arguments: \code{main}, \code{mtext}, \code{log}, \code{lwd}, \code{lty} \code{type}, \code{pch}, \code{col}, \code{norm}, \code{xlim},\code{ylim}, \code{xlab}, \code{ylab}... and for \code{combine = TRUE} also: \code{sub}, \code{legend}, \code{legend.text}, \code{legend.pos} (typical plus 'outside'), \code{legend.col}, \code{smooth}. All arguments can be provided as \code{vector} or \code{list} to gain in full control of all plot settings.} } \value{ Returns multiple plots. } \description{ The function provides a standardised plot output for curve data of an RLum.Analysis S4 class object } \details{ The function produces a multiple plot output. A file output is recommended (e.g., \link{pdf}). \strong{curve.transformation} This argument allows transforming continuous wave (CW) curves to pseudo (linear) modulated curves. For the transformation, the functions of the package are used. Currently, it is not possible to pass further arguments to the transformation functions. The argument works only for \code{ltype} \code{OSL} and \code{IRSL}. Please note: The curve transformation within this functions works roughly, i.e. every IRSL or OSL curve is transformed, without considerung whether it is measured with the PMT or not! However, for a fast look it might be helpful. } \note{ Not all arguments available for \link{plot} will be passed and they partly do not behave in the way you might expect them to work. This function was designed to serve as an overview plot, if you want to have more control, extract the objects and plot them individually. } \section{Function version}{ 0.3.11 } \examples{ ##load data data(ExampleData.BINfileData, envir = environment()) ##convert values for position 1 temp <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) ##(1) plot (combine) TL curves in one plot plot_RLum.Analysis( temp, subset = list(recordType = "TL"), combine = TRUE, norm = TRUE, abline = list(v = c(110)) ) ##(2) same as example (1) but using ## the argument smooth = TRUE plot_RLum.Analysis( temp, subset = list(recordType = "TL"), combine = TRUE, norm = TRUE, smooth = TRUE, abline = list(v = c(110)) ) } \seealso{ \link{plot}, \link{plot_RLum}, \link{plot_RLum.Data.Curve} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. plot_RLum.Analysis(): Plot function for an RLum.Analysis S4 class object. Function version 0.3.11. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{aplot} Luminescence/man/sTeve.Rd0000644000176200001440000000252413604173245015024 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/zzz.R \name{sTeve} \alias{sTeve} \title{sTeve - sophisticated tool for efficient data validation and evaluation} \usage{ sTeve(n_frames = 10, t_animation = 2, n.tree = 7, type) } \arguments{ \item{n_frames}{\link{integer} (\emph{with default}): n frames} \item{t_animation}{\link{integer} (\emph{with default}): t animation} \item{n.tree}{\link{integer} (\emph{with default}): how many trees do you want to cut?} \item{type}{\link{integer} (\emph{optional}): Make a decision: 1, 2 or 3} } \value{ Validates your data. } \description{ This function provides a sophisticated routine for comprehensive luminescence dating data analysis. } \details{ This amazing sophisticated function validates your data seriously. } \note{ This function should not be taken too seriously. } \examples{ ##no example available } \seealso{ \link{plot_KDE} } \author{ R Luminescence Team, 2012-2046 , RLum Developer Team} \section{How to cite}{ NA, NA, , , 2020. sTeve(): sTeve - sophisticated tool for efficient data validation and evaluation. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{manip} Luminescence/man/RLum.Data.Spectrum-class.Rd0000644000176200001440000001313113604173245020365 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/RLum.Data.Spectrum-class.R \docType{class} \name{RLum.Data.Spectrum-class} \alias{RLum.Data.Spectrum-class} \alias{show,RLum.Data.Spectrum-method} \alias{set_RLum,RLum.Data.Spectrum-method} \alias{get_RLum,RLum.Data.Spectrum-method} \alias{names_RLum,RLum.Data.Spectrum-method} \alias{bin_RLum.Data,RLum.Data.Spectrum-method} \title{Class \code{"RLum.Data.Spectrum"}} \usage{ \S4method{show}{RLum.Data.Spectrum}(object) \S4method{set_RLum}{RLum.Data.Spectrum}( class, originator, .uid, .pid, recordType = "Spectrum", curveType = NA_character_, data = matrix(), info = list() ) \S4method{get_RLum}{RLum.Data.Spectrum}(object, info.object) \S4method{names_RLum}{RLum.Data.Spectrum}(object) \S4method{bin_RLum.Data}{RLum.Data.Spectrum}(object, bin_size.col = 1, bin_size.row = 1) } \arguments{ \item{object}{\code{\link{get_RLum}}, \code{\link{names_RLum}} (\strong{required}): an object of class \linkS4class{RLum.Data.Spectrum}} \item{class}{\code{\link{set_RLum}}; \link{character} (\emph{automatic}): name of the \code{RLum} class to create.} \item{originator}{\link{character} (\emph{automatic}): contains the name of the calling function (the function that produces this object); can be set manually.} \item{.uid}{\code{\link{set_RLum}}; \link{character} (\emph{automatic}): sets an unique ID for this object using the internal C++ function \code{create_UID}.} \item{.pid}{\code{\link{set_RLum}}; \link{character} (\emph{with default}): option to provide a parent id for nesting at will.} \item{recordType}{\code{\link{set_RLum}}; \link{character}: record type (e.g. "OSL")} \item{curveType}{\code{\link{set_RLum}}; \link{character}: curve type (e.g. "predefined" or "measured")} \item{data}{\code{\link{set_RLum}}; \link{matrix}: raw curve data. If data is of type \code{RLum.Data.Spectrum}, this can be used to re-construct the object. If the object is reconstructed, \code{.uid}, \code{.pid} and \code{orginator} are always taken from the input object} \item{info}{\code{\link{set_RLum}} \link{list}: info elements} \item{info.object}{\code{\link{get_RLum}}; \link{character} (\emph{optional}): the name of the info object to be called} \item{bin_size.col}{\link{integer} (\emph{with default}): set number of channels used for each bin, e.g. \code{bin_size.col = 2} means that two channels are binned. Note: The function does not check the input, very large values mean a full column binning (a single sum)} \item{bin_size.row}{\link{integer} (\emph{with default}): set number of channels used for each bin, e.g. \code{bin_size.row = 2} means that two channels are binned. Note: The function does not check the input, very large values mean a full row binning (a single sum)} } \value{ \strong{\verb{[set_RLum]}} An object from the class \code{RLum.Data.Spectrum} \strong{\verb{[get_RLum]}} \enumerate{ \item A \link{matrix} with the spectrum values or \item only the info object if \code{info.object} was set. } \strong{\verb{[names_RLum]}} The names of the info objects \strong{\verb{[bin_RLum.Data]}} Same object as input, after applying the binning. } \description{ Class for representing luminescence spectra data (TL/OSL/RF). } \section{Methods (by generic)}{ \itemize{ \item \code{show}: Show structure of \code{RLum.Data.Spectrum} object \item \code{set_RLum}: Construction method for RLum.Data.Spectrum object. The slot info is optional and predefined as empty list by default \item \code{get_RLum}: Accessor method for RLum.Data.Spectrum object. The argument info.object is optional to directly access the info elements. If no info element name is provided, the raw curve data (matrix) will be returned \item \code{names_RLum}: Returns the names info elements coming along with this curve object \item \code{bin_RLum.Data}: Allows binning of RLum.Data.Spectrum data. Count values and values on the x-axis are summed-up; for wavalength/energy values the mean is calculated. }} \section{Slots}{ \describe{ \item{\code{recordType}}{Object of class \link{character} containing the type of the curve (e.g. "TL" or "OSL")} \item{\code{curveType}}{Object of class \link{character} containing curve type, allowed values are measured or predefined} \item{\code{data}}{Object of class \link{matrix} containing spectrum (count) values. Row labels indicate wavelength/pixel values, column labels are temperature or time values.} \item{\code{info}}{Object of class \link{list} containing further meta information objects} }} \note{ The class should only contain data for a single spectra data set. For additional elements the slot \code{info} can be used. Objects from this class are automatically created by, e.g., \link{read_XSYG2R} } \section{Objects from the Class}{ Objects can be created by calls of the form \code{set_RLum("RLum.Data.Spectrum", ...)}. } \section{Class version}{ 0.5.2 } \examples{ showClass("RLum.Data.Spectrum") ##show example data data(ExampleData.XSYG, envir = environment()) TL.Spectrum ##show data matrix get_RLum(TL.Spectrum) ##plot spectrum \dontrun{ plot_RLum(TL.Spectrum) } } \seealso{ \linkS4class{RLum}, \linkS4class{RLum.Data}, \link{plot_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. RLum.Data.Spectrum-class(): Class 'RLum.Data.Spectrum'. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{classes} \keyword{internal} Luminescence/man/Risoe.BINfileData2RLum.Analysis.Rd0000644000176200001440000000750113604173245021524 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Risoe.BINfileData2RLum.Analysis.R \name{Risoe.BINfileData2RLum.Analysis} \alias{Risoe.BINfileData2RLum.Analysis} \title{Convert Risoe.BINfileData object to an RLum.Analysis object} \usage{ Risoe.BINfileData2RLum.Analysis( object, pos = NULL, grain = NULL, run = NULL, set = NULL, ltype = NULL, dtype = NULL, protocol = "unknown", keep.empty = TRUE, txtProgressBar = FALSE ) } \arguments{ \item{object}{\linkS4class{Risoe.BINfileData} (\strong{required}): \code{Risoe.BINfileData} object} \item{pos}{\link{numeric} (\emph{optional}): position number of the \code{Risoe.BINfileData} object for which the curves are stored in the \code{RLum.Analysis} object. If \code{length(position)>1} a list of \code{RLum.Analysis} objects is returned. If nothing is provided every position will be converted. If the position is not valid \code{NA} is returned.} \item{grain}{\link{vector}, \link{numeric} (\emph{optional}): grain number from the measurement to limit the converted data set (e.g., \code{grain = c(1:48)}). Please be aware that this option may lead to unwanted effects, as the output is strictly limited to the choosen grain number for all position numbers} \item{run}{\link{vector}, \link{numeric} (\emph{optional}): run number from the measurement to limit the converted data set (e.g., \code{run = c(1:48)}).} \item{set}{\link{vector}, \link{numeric} (\emph{optional}): set number from the measurement to limit the converted data set (e.g., \code{set = c(1:48)}).} \item{ltype}{\link{vector}, \link{character} (\emph{optional}): curve type to limit the converted data. Commonly allowed values are: \code{IRSL}, \code{OSL}, \code{TL}, \code{RIR}, \code{RBR} and \code{USER} (see also \linkS4class{Risoe.BINfileData})} \item{dtype}{\link{vector}, \link{character} (\emph{optional}): data type to limit the converted data. Commonly allowed values are listed in \linkS4class{Risoe.BINfileData}} \item{protocol}{\link{character} (\emph{optional}): sets protocol type for analysis object. Value may be used by subsequent analysis functions.} \item{keep.empty}{\link{logical} (\emph{with default}): If \code{TRUE} (default) an \code{RLum.Analysis} object is returned even if it does not contain any records. Set to \code{FALSE} to discard all empty objects.} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables or disables \link{txtProgressBar}.} } \value{ Returns an \linkS4class{RLum.Analysis} object. } \description{ Converts values from one specific position of a Risoe.BINfileData S4-class object to an RLum.Analysis object. } \details{ The \linkS4class{RLum.Analysis} object requires a set of curves for specific further protocol analyses. However, the \linkS4class{Risoe.BINfileData} usually contains a set of curves for different aliquots and different protocol types that may be mixed up. Therefore, a conversion is needed. } \note{ The \code{protocol} argument of the \linkS4class{RLum.Analysis} object is set to 'unknown' if not stated otherwise. } \section{Function version}{ 0.4.2 } \examples{ ##load data data(ExampleData.BINfileData, envir = environment()) ##convert values for position 1 Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos = 1) } \seealso{ \linkS4class{Risoe.BINfileData}, \linkS4class{RLum.Analysis}, \link{read_BIN2R} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. Risoe.BINfileData2RLum.Analysis(): Convert Risoe.BINfileData object to an RLum.Analysis object. Function version 0.4.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{manip} Luminescence/man/get_RLum.Rd0000644000176200001440000000562513604173244015460 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/get_RLum.R \name{get_RLum} \alias{get_RLum} \alias{get_RLum,list-method} \alias{get_RLum,NULL-method} \title{General accessor function for RLum S4 class objects} \usage{ get_RLum(object, ...) \S4method{get_RLum}{list}(object, class = NULL, null.rm = FALSE, ...) \S4method{get_RLum}{`NULL`}(object, ...) } \arguments{ \item{object}{\linkS4class{RLum} (\strong{required}): S4 object of class \code{RLum} or an object of type \link{list} containing only objects of type \linkS4class{RLum}} \item{...}{further arguments that will be passed to the object specific methods. For furter details on the supported arguments please see the class documentation: \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Data.Image}, \linkS4class{RLum.Analysis} and \linkS4class{RLum.Results}} \item{class}{\link{character} (\emph{optional}): allows to define the class that gets selected if applied to a list, e.g., if a list consists of different type of RLum-class objects, this arguments allows to make selection. If nothing is provided, all RLum-objects are treated.} \item{null.rm}{\link{logical} (\emph{with default}): option to get rid of empty and NULL objects} } \value{ Return is the same as input objects as provided in the list. } \description{ Function calls object-specific get functions for RLum S4 class objects. } \details{ The function provides a generalised access point for specific \linkS4class{RLum} objects.\cr Depending on the input object, the corresponding get function will be selected. Allowed arguments can be found in the documentations of the corresponding \linkS4class{RLum} class. } \section{Functions}{ \itemize{ \item \code{get_RLum,list-method}: Returns a list of \linkS4class{RLum} objects that had been passed to \link{get_RLum} \item \code{get_RLum,NULL-method}: Returns NULL }} \section{Function version}{ 0.3.3 } \examples{ ##Example based using data and from the calc_CentralDose() function ##load example data data(ExampleData.DeValues, envir = environment()) ##apply the central dose model 1st time temp1 <- calc_CentralDose(ExampleData.DeValues$CA1) ##get results and store them in a new object temp.get <- get_RLum(object = temp1) } \seealso{ \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Image}, \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. get_RLum(): General accessor function for RLum S4 class objects. Function version 0.3.3. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/scale_GammaDose.Rd0000644000176200001440000002575013604173245016750 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/scale_GammaDose.R \name{scale_GammaDose} \alias{scale_GammaDose} \title{Calculate the gamma dose deposited within a sample taking layer-to-layer variations in radioactivity into account (according to Aitken, 1985)} \usage{ scale_GammaDose( data, conversion_factors = c("Cresswelletal2019", "Guerinetal2011", "AdamiecAitken1998", "Liritzisetal2013")[1], fractional_gamma_dose = c("Aitken1985")[1], verbose = TRUE, plot = TRUE, plot_single = TRUE, ... ) } \arguments{ \item{data}{\link{data.frame} (\strong{required}): A table containing all relevant information for each individual layer. The table must have the following named columns: \itemize{ \item \code{id} (\link{character}): an arbitrary id or name of each layer \item \code{thickness} (\link{numeric}): vertical extent of each layer in cm \item \code{sample_offset} (\link{logical}): distance of the sample in cm, \strong{measured from the BOTTOM OF THE TARGET LAYER}. Except for the target layer all values must be \code{NA}. \item \code{K} (\link{numeric}): K nuclide content in \% \item \code{K_se} (\link{numeric}): error on the K content \item \code{Th} (\link{numeric}): Th nuclide content in ppm \item \code{Th_se} (\link{numeric}): error on the Th content \item \code{U} (\link{numeric}): U nuclide content in ppm \item \code{U_se} (\link{numeric}): error on the U content \item \code{water_content} (\link{numeric}): water content of each layer in \% \item \code{water_content_se} (\link{numeric}): error on the water content \item \code{density} (\link{numeric}): bulk density of each layer in g/cm^-3 }} \item{conversion_factors}{\link{character} (\emph{optional}): The conversion factors used to calculate the dose rate from sediument nuclide contents. Valid options are: \itemize{ \item \code{"Cresswelletal2019"} (default) \item \code{"Liritzisetal2013"} \item \code{"Guerinetal2011"} \item \code{"AdamiecAitken1998"} }} \item{fractional_gamma_dose}{\link{character} (\emph{optional}): Factors to scale gamma dose rate values. Valid options are: \itemize{ \item \code{"Aitken1985"} (default): Table H1 in the appendix }} \item{verbose}{\link{logical} (\emph{optional}): Show or hide console output (defaults to \code{TRUE}).} \item{plot}{\link{logical} (\emph{optional}): Show or hide the plot (defaults to \code{TRUE}).} \item{plot_single}{\link{logical} (\emph{optional}): Show all plots in one panel (defaults to \code{TRUE}).} \item{...}{Further parameters passed to \link{barplot}.} } \value{ After performing the calculations the user is provided with different outputs. \enumerate{ \item The total gamma dose rate received by the sample (+/- uncertainties) as a print in the console. \item A plot showing the sediment sequence, the user input sample information and the contribution to total gamma dose rate. \item RLum Results. If the user wishes to save these results, writing a script to run the function and to save the results would look like this: }\preformatted{mydata <- read.table("c:/path/to/input/file.txt") results <- scale_GammaDose(mydata) table <- get_RLum(results) write.csv(table, "c:/path/to/results.csv") } -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \verb{$summary} \tab \code{data.frame} \tab summary of the model results \cr \verb{$data} \tab \code{data.frame} \tab the original input data \cr \verb{$dose_rates} \tab \code{list} \tab two \code{data.frames} for the scaled and infinite matrix dose rates \cr \verb{$tables} \tab \code{list} \tab several \code{data.frames} containing intermediate results \cr \verb{$args} \tab \code{character} \tab arguments of the call \cr \verb{$call} \tab \code{call} \tab the original function call \cr } \strong{slot:} \strong{\verb{@info}} Currently unused. ------------------------\cr \verb{[ PLOT OUTPUT ]}\cr ------------------------\cr Three plots are produced: \itemize{ \item A visualisation of the provided sediment layer structure to quickly assess whether the data was provided and interpreted correctly. \item A scatter plot of the nuclide contents per layer (K, Th, U) as well as the water content. This may help to correlate the dose rate contribution of specific layers to the layer of interest. \item A barplot visualising the contribution of each layer to the total dose rate received by the sample in the target layer. } } \description{ This function calculates the gamma dose deposited in a luminescence sample taking into account layer-to-layer variations in sediment radioactivity . The function scales user inputs of Uranium, Thorium and Potassium based on input parameters for sediment density, water content and given layer thicknesses and distances to the sample. } \details{ \strong{User Input} To calculate the gamma dose which is deposited in a sample, the user needs to provide information on those samples influencing the luminescence sample. As a rule of thumb, all sediment layers within at least 30 cm radius from the luminescence sample taken should be taken into account when calculating the gamma dose rate. However, the actual range of gamma radiation might be different, depending on the emitting radioelement, the water content and the sediment density of each layer (Aitken, 1985). Therefore the user is advised to provide as much detail as possible and physically sensible. The function requires a \link{data.frame} that is to be structured in columns and rows, with samples listed in rows. The first column contains information on the layer/sample ID, the second on the thickness (in cm) of each layer, whilst column 3 should contain \code{NA} for all layers that are not sampled for OSL/TL. For the layer the OSL/TL sample was taken from a numerical value must be provided, which is the distance (in cm) measured from \strong{bottom} of the layer of interest. If the whole layer was sampled insert \code{0}. If the sample was taken from \emph{within} the layer, insert a numerical value \verb{>0}, which describes the distance from the middle of the sample to the bottom of the layer in cm. Columns 4 to 9 should contain radionuclide concentrations and their standard errors for Potassium (in \%), Thorium (in ppm) and Uranium (in ppm). Columns 10 and 11 give information on the water content and its uncertainty (standard error) in \%. The layer density (in g/cm3) should be given in column 12. No cell should be left blank. Please ensure to keep the column titles as given in the example dataset (\code{data('ExampleData.ScaleGammaDose')}, see examples). The user can decide which dose rate conversion factors should be used to calculate the gamma dose rates. The options are: \itemize{ \item \code{"Cresswelletal2019"} (Cresswell et al., in press; the default) \item \code{"Liritzisetal2013"} (Liritzis et al., 2013) \item \code{"Guerinetal2011"} (Guerin et al., 2011) \item \code{"AdamiecAitken1998"} (Adamiec and Aitken, 1998) } \strong{Water content} The water content provided by the user should be calculated according to: \deqn{ ( Wet weight [g] - Dry weight [g] ) / Dry weight [g] * 100 } \strong{Calculations} After converting the radionuclide concentrations into dose rates, the function will scale the dose rates based on the thickness of the layers, the distances to the sample, the water content and the density of the sediment. The calculations are based on Aitken (1985, Appendix H). As an example (equivalent to Aitken, 1985), assuming three layers of sediment, where \strong{L} is inert and positioned in between the infinite thick and equally active layers \strong{A} and \strong{B}, the dose in \strong{L} and \strong{B} due to \strong{A} is given by \deqn{ {1-f(x)}D_A } Where \code{x} is the distance into the inert medium, so \code{f(x)} is the weighted average fractional dose at \code{x} and \code{D_A} denotes that the dose is delivered by \strong{A}. \code{f(x)} is derived from table H1 (Aitken, 1985), when setting \code{z = x}. Consequently, the dose in \strong{A} and \strong{L} due to \strong{B} is given by \deqn{ {1 - f(t-x)}D_B } Here \code{t} is the thickness of \strong{L} and the other parameters are denoted as above, just for the dose being delivered by B. \code{f(t-x)} is derived from table H1 (Aitken, 1985), when setting \code{z} equal to \code{t-x}. Following this, the dose in \strong{L} delivered by \strong{A} and \strong{B} is given by \deqn{ {2 - f(x) - f(t-x)}D_{AB} } Since \strong{A} and \strong{B} are equally active \verb{D_\{AB\} = D_A = D_B}. The function uses the value of the fractional dose rate at the layer boundary to start the calculation for the next layer. This way, the function is able to scale the gamma dose rate accurately for distant layers when the density and water content is not constant for the entire section. } \note{ \strong{This function has BETA status. If possible, results should be} \strong{cross-checked.} } \section{Function version}{ 0.1.1 } \section{Acknowledgements}{ We thank Dr Ian Bailiff for the provision of an excel spreadsheet, which has been very helpful when writing this function. } \examples{ # Load example data data("ExampleData.ScaleGammaDose", envir = environment()) x <- ExampleData.ScaleGammaDose # Scale gamma dose rate results <- scale_GammaDose(data = x, conversion_factors = "Cresswelletal2019", fractional_gamma_dose = "Aitken1985", verbose = TRUE, plot = TRUE) get_RLum(results) } \section{How to cite}{ Riedesel, S., Autzen, M., Burow, C., 2020. scale_GammaDose(): Calculate the gamma dose deposited within a sample taking layer-to-layer variations in radioactivity into account (according to Aitken, 1985). Function version 0.1.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Aitken, M.J., 1985. Thermoluminescence Dating. Academic Press, London. Adamiec, G., Aitken, M.J., 1998. Dose-rate conversion factors: update. Ancient TL 16, 37-46. Cresswell, A. J., Carter, J., Sanderson, D. C. W., in press. Dose rate conversion parameters: Assessment of nuclear data. Radiation Measurements. Guerin, G., Mercier, N., Adamiec, G., 2011. Dose-rate conversion factors: update. Ancient TL, 29, 5-8. Liritzis, I., Stamoulis, K., Papachristodoulou, C., Ioannides, K., 2013. A re-evaluation of radiation dose-rate conversion factors. Mediterranean Archaeology and Archaeometry 13, 1-15. } \seealso{ \link{ExampleData.ScaleGammaDose}, \link{approx}, \link{barplot} } \author{ Svenja Riedesel, Aberystwyth University (United Kingdom) \cr Martin Autzen, DTU NUTECH Center for Nuclear Technologies (Denmark) \cr Christoph Burow, University of Cologne (Germany) \cr Based on an excel spreadsheet and accompanying macro written by Ian Bailiff. , RLum Developer Team} \keyword{datagen} Luminescence/man/ExampleData.FittingLM.Rd0000644000176200001440000000212413604173243017751 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.FittingLM} \alias{ExampleData.FittingLM} \alias{values.curve} \alias{values.curveBG} \title{Example data for fit_LMCurve() in the package Luminescence} \format{Two objects (data.frames) with two columns (time and counts).} \source{ \tabular{ll}{ Lab: \tab Luminescence Laboratory Bayreuth\cr Lab-Code: \tab BT900\cr Location: \tab Norway\cr Material: \tab Beach deposit, coarse grain quartz measured on aluminum discs on a Risoe TL/OSL DA-15 reader\cr } } \description{ Lineraly modulated (LM) measurement data from a quartz sample from Norway including background measurement. Measurements carried out in the luminescence laboratory at the University of Bayreuth. } \examples{ ##show LM data data(ExampleData.FittingLM, envir = environment()) plot(values.curve,log="x") } \references{ Fuchs, M., Kreutzer, S., Fischer, M., Sauer, D., Soerensen, R., 2012. OSL and IRSL dating of raised beach sand deposits along the southeastern coast of Norway. Quaternary Geochronology, 10, 195-200. } Luminescence/man/get_Risoe.BINfileData.Rd0000644000176200001440000000300713604173244017713 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/get_Risoe.BINfileData.R \name{get_Risoe.BINfileData} \alias{get_Risoe.BINfileData} \title{General accessor function for RLum S4 class objects} \usage{ get_Risoe.BINfileData(object, ...) } \arguments{ \item{object}{\linkS4class{Risoe.BINfileData} (\strong{required}): S4 object of class \code{RLum}} \item{...}{further arguments that one might want to pass to the specific get function} } \value{ Return is the same as input objects as provided in the list } \description{ Function calls object-specific get functions for RisoeBINfileData S4 class objects. } \details{ The function provides a generalised access point for specific \linkS4class{Risoe.BINfileData} objects. \cr Depending on the input object, the corresponding get function will be selected. Allowed arguments can be found in the documentations of the corresponding \linkS4class{Risoe.BINfileData} class. } \section{Function version}{ 0.1.0 } \seealso{ \linkS4class{Risoe.BINfileData} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. get_Risoe.BINfileData(): General accessor function for RLum S4 class objects. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/GitHub-API.Rd0000644000176200001440000000564213604173244015532 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github.R \name{GitHub-API} \alias{GitHub-API} \alias{github_commits} \alias{github_branches} \alias{github_issues} \title{GitHub API} \usage{ github_commits(user = "r-lum", repo = "luminescence", branch = "master", n = 5) github_branches(user = "r-lum", repo = "luminescence") github_issues(user = "r-lum", repo = "luminescence", verbose = TRUE) } \arguments{ \item{user}{\link{character} (\emph{with default}): GitHub user name (defaults to 'r-lum').} \item{repo}{\link{character} (\emph{with default}): name of a GitHub repository (defaults to 'luminescence').} \item{branch}{\link{character} (\emph{with default}): branch of a GitHub repository (defaults to 'master').} \item{n}{\link{integer} (\emph{with default}): number of commits returned (defaults to 5).} \item{verbose}{\link{logical} (\emph{with default}): print the output to the console (defaults to \code{TRUE}).} } \value{ \code{github_commits}: \link{data.frame} with columns: \tabular{ll}{ \verb{[ ,1]} \tab SHA \cr \verb{[ ,2]} \tab AUTHOR \cr \verb{[ ,3]} \tab DATE \cr \verb{[ ,4]} \tab MESSAGE \cr } \code{github_branches}: \link{data.frame} with columns: \tabular{ll}{ \verb{[ ,1]} \tab BRANCH \cr \verb{[ ,2]} \tab SHA \cr \verb{[ ,3]} \tab INSTALL \cr } \code{github_commits}: Nested \link{list} with \code{n} elements. Each commit element is a list with elements: \tabular{ll}{ \verb{[[1]]} \tab NUMBER \cr \verb{[[2]]} \tab TITLE \cr \verb{[[3]]} \tab BODY \cr \verb{[[4]]} \tab CREATED \cr \verb{[[5]]} \tab UPDATED \cr \verb{[[6]]} \tab CREATOR \cr \verb{[[7]]} \tab URL \cr \verb{[[8]]} \tab STATUS \cr } } \description{ R Interface to the GitHub API v3. } \details{ These functions can be used to query a specific repository hosted on GitHub. \cr \code{github_commits} lists the most recent \code{n} commits of a specific branch of a repository. \code{github_branches} can be used to list all current branches of a repository and returns the corresponding SHA hash as well as an installation command to install the branch in R via the 'devtools' package. \code{github_issues} lists all open issues for a repository in valid YAML. } \section{Function version}{ 0.1.0 } \examples{ \dontrun{ github_branches(user = "r-lum", repo = "luminescence") github_issues(user = "r-lum", repo = "luminescence") github_commits(user = "r-lum", repo = "luminescence", branch = "master", n = 10) } } \section{How to cite}{ Burow, C., 2020. GitHub-API(): GitHub API. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ GitHub Developer API v3. \url{https://developer.github.com/v3/}, last accessed: 10/01/2017. } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} Luminescence/man/ExampleData.XSYG.Rd0000644000176200001440000000606713604173243016720 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.XSYG} \alias{ExampleData.XSYG} \alias{OSL.SARMeasurement} \alias{TL.Spectrum} \title{Example data for a SAR OSL measurement and a TL spectrum using a lexsyg reader} \format{\code{OSL.SARMeasurement}: SAR OSL measurement data The data contain two elements: (a) \verb{$Sequence.Header} is a \link{data.frame} with metadata from the measurement,(b) \code{Sequence.Object} contains an \linkS4class{RLum.Analysis} object for further analysis. \code{TL.Spectrum}: TL spectrum data \linkS4class{RLum.Data.Spectrum} object for further analysis. The spectrum was cleaned from cosmic-rays using the function \code{apply_CosmicRayRemoval}. Note that no quantum efficiency calibration was performed.} \source{ \strong{OSL.SARMeasurement} \tabular{ll}{ Lab: \tab Luminescence Laboratory Giessen\cr Lab-Code: \tab no code\cr Location: \tab not specified\cr Material: \tab Coarse grain quartz on steel cups on lexsyg research reader\cr Reference: \tab unpublished } \strong{TL.Spectrum} \tabular{ll}{ Lab: \tab Luminescence Laboratory Giessen\cr Lab-Code: \tab BT753\cr Location: \tab Dolni Vestonice/Czech Republic\cr Material: \tab Fine grain polymineral on steel cups on lexsyg rearch reader\cr Reference: \tab Fuchs et al., 2013 \cr Spectrum: \tab Integration time 19 s, channel time 20 s\cr Heating: \tab 1 K/s, up to 500 deg. C } } \description{ Example data from a SAR OSL measurement and a TL spectrum for package Luminescence imported from a Freiberg Instruments XSYG file using the function \link{read_XSYG2R}. } \section{Version}{ 0.1 } \examples{ ##show data data(ExampleData.XSYG, envir = environment()) ## ========================================= ##(1) OSL.SARMeasurement OSL.SARMeasurement ##show $Sequence.Object OSL.SARMeasurement$Sequence.Object ##grep OSL curves and plot the first curve OSLcurve <- get_RLum(OSL.SARMeasurement$Sequence.Object, recordType="OSL")[[1]] plot_RLum(OSLcurve) ## ========================================= ##(2) TL.Spectrum TL.Spectrum ##plot simple spectrum (2D) plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="contour", xlim = c(310,750), ylim = c(0,300), bin.rows=10, bin.cols = 1) ##plot 3d spectrum (uncomment for usage) # plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="persp", # xlim = c(310,750), ylim = c(0,300), bin.rows=10, # bin.cols = 1) } \references{ Unpublished data measured to serve as example data for that package. Location origin of sample BT753 is given here: Fuchs, M., Kreutzer, S., Rousseau, D.D., Antoine, P., Hatte, C., Lagroix, F., Moine, O., Gauthier, C., Svoboda, J., Lisa, L., 2013. The loess sequence of Dolni Vestonice, Czech Republic: A new OSL-based chronology of the Last Climatic Cycle. Boreas, 42, 664--677. } \seealso{ \link{read_XSYG2R}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data.Spectrum}, \link{plot_RLum}, \link{plot_RLum.Analysis}, \link{plot_RLum.Data.Spectrum} } \keyword{datasets} Luminescence/man/RLum.Data.Image-class.Rd0000644000176200001440000001100013604173245017576 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/RLum.Data.Image-class.R \docType{class} \name{RLum.Data.Image-class} \alias{RLum.Data.Image-class} \alias{show,RLum.Data.Image-method} \alias{set_RLum,RLum.Data.Image-method} \alias{get_RLum,RLum.Data.Image-method} \alias{names_RLum,RLum.Data.Image-method} \title{Class \code{"RLum.Data.Image"}} \usage{ \S4method{show}{RLum.Data.Image}(object) \S4method{set_RLum}{RLum.Data.Image}( class, originator, .uid, .pid, recordType = "Image", curveType = NA_character_, data = raster::brick(raster::raster(matrix())), info = list() ) \S4method{get_RLum}{RLum.Data.Image}(object, info.object) \S4method{names_RLum}{RLum.Data.Image}(object) } \arguments{ \item{object}{\code{\link{get_RLum}}, \code{\link{names_RLum}} (\strong{required}): an object of class \linkS4class{RLum.Data.Image}} \item{class}{\code{\link{set_RLum}}; \link{character}: name of the \code{RLum} class to create} \item{originator}{\code{\link{set_RLum}}; \link{character} (\emph{automatic}): contains the name of the calling function (the function that produces this object); can be set manually.} \item{.uid}{\code{\link{set_RLum}}; \link{character} (\emph{automatic}): sets an unique ID for this object using the internal C++ function \code{create_UID}.} \item{.pid}{\code{\link{set_RLum}}; \link{character} (\emph{with default}): option to provide a parent id for nesting at will.} \item{recordType}{\code{\link{set_RLum}}; \link{character}: record type (e.g. "OSL")} \item{curveType}{\code{\link{set_RLum}}; \link{character}: curve type (e.g. "predefined" or "measured")} \item{data}{\code{\link{set_RLum}}; \link{matrix}: raw curve data. If data is of type \code{RLum.Data.Image} this can be used to re-construct the object, i.e. modified parameters except \code{.uid} and \code{.pid}. The rest will be subject to copy and paste unless provided.} \item{info}{\code{\link{set_RLum}}; \link{list}: info elements} \item{info.object}{\code{\link{get_RLum}}; \link{character}: name of the info object to returned} } \value{ \strong{\code{set_RLum}} Returns an object from class \code{RLum.Data.Image} \strong{\code{get_RLum}} \enumerate{ \item Returns the data object (\link[raster:brick]{raster::brick}) \item only the info object if \code{info.object} was set. } \strong{\code{names_RLum}} Returns the names of the info elements } \description{ Class for representing luminescence image data (TL/OSL/RF). Such data are for example produced by the function \link{read_SPE2R} } \section{Methods (by generic)}{ \itemize{ \item \code{show}: Show structure of \code{RLum.Data.Image} object \item \code{set_RLum}: Construction method for RLum.Data.Image object. The slot info is optional and predefined as empty list by default. \item \code{get_RLum}: Accessor method for RLum.Data.Image object. The argument \code{info.object} is optional to directly access the info elements. If no info element name is provided, the raw image data (\code{RasterBrick}) will be returned. \item \code{names_RLum}: Returns the names info elements coming along with this curve object }} \section{Slots}{ \describe{ \item{\code{recordType}}{Object of class \link{character} containing the type of the curve (e.g. "OSL image", "TL image")} \item{\code{curveType}}{Object of class \link{character} containing curve type, allowed values are measured or predefined} \item{\code{data}}{Object of class \link[raster:brick]{raster::brick} containing images (raster data).} \item{\code{info}}{Object of class \link{list} containing further meta information objects} }} \note{ The class should only contain data for a set of images. For additional elements the slot \code{info} can be used. } \section{Objects from the class}{ Objects can be created by calls of the form \code{set_RLum("RLum.Data.Image", ...)}. } \section{Class version}{ 0.4.2 } \examples{ showClass("RLum.Data.Image") ##create empty RLum.Data.Image object set_RLum(class = "RLum.Data.Image") } \seealso{ \linkS4class{RLum}, \linkS4class{RLum.Data}, \link{plot_RLum}, \link{read_SPE2R} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. RLum.Data.Image-class(): Class 'RLum.Data.Image'. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{classes} \keyword{internal} Luminescence/man/calc_ThermalLifetime.Rd0000644000176200001440000001265413604173244017777 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_ThermalLifetime.R \name{calc_ThermalLifetime} \alias{calc_ThermalLifetime} \title{Calculates the Thermal Lifetime using the Arrhenius equation} \usage{ calc_ThermalLifetime( E, s, T = 20, output_unit = "Ma", profiling = FALSE, profiling_config = NULL, verbose = TRUE, plot = TRUE, ... ) } \arguments{ \item{E}{\link{numeric} (\strong{required}): vector of trap depths in eV, if \code{profiling = TRUE} only the first two elements are considered} \item{s}{\link{numeric} (\strong{required}): vector of frequency factor in 1/s, if \code{profiling = TRUE} only the first two elements are considered} \item{T}{\link{numeric} (\emph{with default}): temperature in deg. C for which the lifetime(s) will be calculted. A vector can be provided.} \item{output_unit}{\link{character} (\emph{with default}): output unit of the calculated lifetimes, accepted entries are: \code{"Ma"}, \code{"ka"}, \code{"a"}, \code{"d"}, \code{"h"}, \code{"min"}, \code{"s"}} \item{profiling}{\link{logical} (\emph{with default}): this option allows to estimate uncertainties based on given E and s parameters and their corresponding standard error (cf. details and examples section)} \item{profiling_config}{\link{list} (\emph{optional}): allows to set configurate parameters used for the profiling (and only have an effect here). Supported parameters are: \itemize{ \item \code{n} (number of MC runs), \item \code{E.distribution} (distribution used for the resampling for E) and \item \code{s.distribution} (distribution used for the resampling for s). } Currently only the normal distribution is supported (e.g., \code{profiling_config = list(E.distribution = "norm")}} \item{verbose}{\link{logical}: enables/disables verbose mode} \item{plot}{\link{logical}: enables/disables output plot, currenlty only in combination with \code{profiling = TRUE}.} \item{...}{further arguments that can be passed in combination with the plot output. Standard plot parameters are supported (\link{plot.default})} } \value{ A \linkS4class{RLum.Results} object is returned a along with a plot (for \code{profiling = TRUE}). The output object contain the following slots: \strong{\verb{@data}} \tabular{lll}{ \strong{Object} \tab \strong{Type} \tab \strong{Description} \cr \code{lifetimes} \tab \link{array} or \link{numeric} \tab calculated lifetimes \cr \code{profiling_matrix} \tab \link{matrix} \tab profiling matrix used for the MC runs } \strong{\verb{@info}} \tabular{lll}{ \strong{Object} \tab \strong{Type} \tab \strong{Description} \cr \code{call} \tab \code{call} \tab the original function call } } \description{ The function calculates the thermal lifetime of charges for given E (in eV), s (in 1/s) and T (in deg. C.) parameters. The function can be used in two operational modes: } \details{ \strong{Mode 1 \code{(profiling = FALSE)}} An arbitrary set of input parameters (E, s, T) can be provided and the function calculates the thermal lifetimes using the Arrhenius equation for all possible combinations of these input parameters. An array with 3-dimensions is returned that can be used for further analyses or graphical output (see example 1) \strong{Mode 2 \code{(profiling = TRUE)}} This mode tries to profile the variation of the thermal lifetime for a chosen temperature by accounting for the provided E and s parameters and their corresponding standard errors, e.g., \code{E = c(1.600, 0.001)} The calculation based on a Monte Carlo simulation, where values are sampled from a normal distribution (for E and s). \strong{Used equation (Arrhenius equation)} \deqn{\tau = 1/s exp(E/kT)} where: \eqn{\tau} in s as the mean time an electron spends in the trap for a given \eqn{T}, \eqn{E} trap depth in eV, \eqn{s} the frequency factor in 1/s, \eqn{T} the temperature in K and \eqn{k} the Boltzmann constant in eV/K (cf. Furetta, 2010). } \note{ The profiling is currently based on resampling from a normal distribution, this distribution assumption might be, however, not valid for given E and s paramters. } \section{Function version}{ 0.1.0 } \examples{ ##EXAMPLE 1 ##calculation for two trap-depths with similar frequency factor for different temperatures E <- c(1.66, 1.70) s <- 1e+13 T <- 10:20 temp <- calc_ThermalLifetime( E = E, s = s, T = T, output_unit = "Ma" ) contour(x = E, y = T, z = temp$lifetimes[1,,], ylab = "Temperature [\u00B0C]", xlab = "Trap depth [eV]", main = "Thermal Lifetime Contour Plot" ) mtext(side = 3, "(values quoted in Ma)") ##EXAMPLE 2 ##profiling of thermal life time for E and s and their standard error E <- c(1.600, 0.003) s <- c(1e+13,1e+011) T <- 20 calc_ThermalLifetime( E = E, s = s, T = T, profiling = TRUE, output_unit = "Ma" ) } \section{How to cite}{ Kreutzer, S., 2020. calc_ThermalLifetime(): Calculates the Thermal Lifetime using the Arrhenius equation. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Furetta, C., 2010. Handbook of Thermoluminescence, Second Edition. ed. World Scientific. } \seealso{ \link[graphics:matplot]{graphics::matplot}, \link[stats:Normal]{stats::rnorm}, \link{get_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/RLum.Analysis-class.Rd0000644000176200001440000002036713604173245017507 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/RLum.Analysis-class.R \docType{class} \name{RLum.Analysis-class} \alias{RLum.Analysis-class} \alias{show,RLum.Analysis-method} \alias{set_RLum,RLum.Analysis-method} \alias{get_RLum,RLum.Analysis-method} \alias{structure_RLum,RLum.Analysis-method} \alias{length_RLum,RLum.Analysis-method} \alias{names_RLum,RLum.Analysis-method} \alias{smooth_RLum,RLum.Analysis-method} \title{Class \code{"RLum.Analysis"}} \usage{ \S4method{show}{RLum.Analysis}(object) \S4method{set_RLum}{RLum.Analysis}( class, originator, .uid, .pid, protocol = NA_character_, records = list(), info = list() ) \S4method{get_RLum}{RLum.Analysis}( object, record.id = NULL, recordType = NULL, curveType = NULL, RLum.type = NULL, protocol = "UNKNOWN", get.index = NULL, drop = TRUE, recursive = TRUE, info.object = NULL, subset = NULL, env = parent.frame(2) ) \S4method{structure_RLum}{RLum.Analysis}(object, fullExtent = FALSE) \S4method{length_RLum}{RLum.Analysis}(object) \S4method{names_RLum}{RLum.Analysis}(object) \S4method{smooth_RLum}{RLum.Analysis}(object, ...) } \arguments{ \item{object}{\code{\link{get_RLum}}: \code{\link{names_RLum}}, \code{\link{length_RLum}}, \code{\link{structure_RLum}} (\strong{required}): an object of class \linkS4class{RLum.Analysis}} \item{class}{\code{\link{set_RLum}} \link{character} (\strong{required}): name of the \code{RLum} class to be created} \item{originator}{\code{\link{set_RLum}} \link{character} (\emph{automatic}): contains the name of the calling function (the function that produces this object); can be set manually.} \item{.uid}{\code{\link{set_RLum}} \link{character} (\emph{automatic}): sets an unique ID for this object using the internal C++ function \code{create_UID}.} \item{.pid}{\code{\link{set_RLum}} \link{character} (\emph{with default}): option to provide a parent id for nesting at will.} \item{protocol}{\code{\link{set_RLum}} \link{character} (\emph{optional}): sets protocol type for analysis object. Value may be used by subsequent analysis functions.} \item{records}{\code{\link{set_RLum}} \link{list} (\strong{required}): list of \linkS4class{RLum.Analysis} objects} \item{info}{\code{\link{set_RLum}} \link{list} (\emph{optional}): a list containing additional info data for the object \strong{\code{set_RLum}}: Returns an \linkS4class{RLum.Analysis} object.} \item{record.id}{\code{\link{get_RLum}}: \link{numeric} or \link{logical} (\emph{optional}): IDs of specific records. If of type \code{logical} the entire id range is assumed and \code{TRUE} and \code{FALSE} indicates the selection.} \item{recordType}{\code{\link{get_RLum}}: \link{character} (\emph{optional}): record type (e.g., "OSL"). Can be also a vector, for multiple matching, e.g., \code{recordType = c("OSL", "IRSL")}} \item{curveType}{\code{\link{get_RLum}}: \link{character} (\emph{optional}): curve type (e.g. "predefined" or "measured")} \item{RLum.type}{\code{\link{get_RLum}}: \link{character} (\emph{optional}): RLum object type. Defaults to "RLum.Data.Curve" and "RLum.Data.Spectrum".} \item{get.index}{\code{\link{get_RLum}}: \link{logical} (\emph{optional}): return a numeric vector with the index of each element in the RLum.Analysis object.} \item{drop}{\code{\link{get_RLum}}: \link{logical} (\emph{with default}): coerce to the next possible layer (which are \code{RLum.Data}-objects), \code{drop = FALSE} keeps the original \code{RLum.Analysis}} \item{recursive}{\code{\link{get_RLum}}: \link{logical} (\emph{with default}): if \code{TRUE} (the default) and the result of the 'get_RLum' request is a single object this object will be unlisted, means only the object itself and no list containing exactly one object is returned. Mostly this makes things easier, however, if this method is used within a loop this might be undesired.} \item{info.object}{\code{\link{get_RLum}}: \link{character} (\emph{optional}): name of the wanted info element} \item{subset}{\code{\link{get_RLum}}: \link{expression} (\emph{optional}): logical expression indicating elements or rows to keep: missing values are taken as false. This argument takes precedence over all other arguments, meaning they are not considered when subsetting the object.} \item{env}{\code{\link{get_RLum}}: \link{environment} (\emph{with default}): An environment passed to \link{eval} as the enclosure. This argument is only relevant when subsetting the object and should not be used manually.} \item{fullExtent}{\link{structure_RLum}; \link{logical} (\emph{with default}): extents the returned \code{data.frame} to its full extent, i.e. all info elements are part of the return as well. The default value is \code{FALSE} as the data frame might become rather big.} \item{...}{further arguments passed to underlying methods} } \value{ \strong{\code{get_RLum}}: Returns: \enumerate{ \item \link{list} of \linkS4class{RLum.Data} objects or \item Single \linkS4class{RLum.Data} object, if only one object is contained and \code{recursive = FALSE} or \item \linkS4class{RLum.Analysis} ojects for \code{drop = FALSE} } \strong{\code{structure_RLum}}: Returns \linkS4class{data.frame} showing the structure. \strong{\code{length_RLum}} Returns the number records in this object. \strong{\code{names_RLum}} Returns the names of the record types (\code{recordType}) in this object. \strong{\code{smooth_RLum}} Same object as input, after smoothing } \description{ Object class to represent analysis data for protocol analysis, i.e. all curves, spectra etc. from one measurements. Objects from this class are produced, by e.g. \link{read_XSYG2R}, \link{read_Daybreak2R} } \section{Methods (by generic)}{ \itemize{ \item \code{show}: Show structure of \code{RLum.Analysis} object \item \code{set_RLum}: Construction method for \linkS4class{RLum.Analysis} objects. \item \code{get_RLum}: Accessor method for RLum.Analysis object. The slots record.id, recordType, curveType and RLum.type are optional to allow for records limited by their id (list index number), their record type (e.g. recordType = "OSL") or object type. Example: curve type (e.g. curveType = "predefined" or curveType ="measured") The selection of a specific RLum.type object superimposes the default selection. Currently supported objects are: RLum.Data.Curve and RLum.Data.Spectrum \item \code{structure_RLum}: Method to show the structure of an \linkS4class{RLum.Analysis} object. \item \code{length_RLum}: Returns the length of the object, i.e., number of stored records. \item \code{names_RLum}: Returns the names of the \linkS4class{RLum.Data} objects objects (same as shown with the show method) \item \code{smooth_RLum}: Smoothing of \code{RLum.Data} objects contained in this \code{RLum.Analysis} object \link[zoo:rollmean]{zoo::rollmean} or \link[zoo:rollmean]{zoo::rollmedian}. In particular the internal function \code{.smoothing} is used. }} \section{Slots}{ \describe{ \item{\code{protocol}}{Object of class \link{character} describing the applied measurement protocol} \item{\code{records}}{Object of class \link{list} containing objects of class \linkS4class{RLum.Data}} }} \note{ The method \link{structure_RLum} is currently just avaiblable for objects containing \linkS4class{RLum.Data.Curve}. } \section{Objects from the Class}{ Objects can be created by calls of the form \code{set_RLum("RLum.Analysis", ...)}. } \section{Class version}{ 0.4.15 } \examples{ showClass("RLum.Analysis") ##set empty object set_RLum(class = "RLum.Analysis") ###use example data ##load data data(ExampleData.RLum.Analysis, envir = environment()) ##show curves in object get_RLum(IRSAR.RF.Data) ##show only the first object, but by keeping the object get_RLum(IRSAR.RF.Data, record.id = 1, drop = FALSE) } \seealso{ \link{Risoe.BINfileData2RLum.Analysis}, \linkS4class{Risoe.BINfileData}, \linkS4class{RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. RLum.Analysis-class(): Class 'RLum.Analysis'. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{classes} \keyword{internal} \keyword{methods} Luminescence/man/read_Daybreak2R.Rd0000644000176200001440000000474013604173245016661 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/read_Daybreak2R.R \name{read_Daybreak2R} \alias{read_Daybreak2R} \title{Import measurement data produced by a Daybreak TL/OSL reader into R} \usage{ read_Daybreak2R(file, raw = FALSE, verbose = TRUE, txtProgressBar = TRUE) } \arguments{ \item{file}{\link{character} or \link{list} (\strong{required}): path and file name of the file to be imported. Alternatively a list of file names can be provided or just the path a folder containing measurement data. Please note that the specific, common, file extension (txt) is likely leading to function failures during import when just a path is provided.} \item{raw}{\link{logical} (\emph{with default}): if the input is a DAT-file (binary) a \link[data.table:data.table]{data.table::data.table} instead of the \linkS4class{RLum.Analysis} object can be returned for debugging purposes.} \item{verbose}{\link{logical} (\emph{with default}): enables or disables terminal feedback} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables or disables \link{txtProgressBar}.} } \value{ A list of \linkS4class{RLum.Analysis} objects (each per position) is provided. } \description{ Import a TXT-file (ASCII file) or a DAT-file (binary file) produced by a Daybreak reader into R. The import of the DAT-files is limited to the file format described for the software TLAPLLIC v.3.2 used for a Daybreak, model 1100. } \note{ \strong{\verb{[BETA VERSION]}} This function still needs to be tested properly. In particular the function has underwent only very rough rests using a few files. } \section{Function version}{ 0.3.1 } \examples{ \dontrun{ file <- file.choose() temp <- read_Daybreak2R(file) } } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data.Curve}, \link[data.table:data.table]{data.table::data.table} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr Anotine Zink, C2RMF, Palais du Louvre, Paris (France) The ASCII-file import is based on a suggestion by Willian Amidon and Andrew Louis Gorin , RLum Developer Team} \section{How to cite}{ Kreutzer, S., Zink, A., 2020. read_Daybreak2R(): Import measurement data produced by a Daybreak TL/OSL reader into R. Function version 0.3.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{IO} Luminescence/man/ExampleData.portableOSL.Rd0000644000176200001440000000150013604173243020277 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \docType{data} \name{ExampleData.portableOSL} \alias{ExampleData.portableOSL} \title{Example portable OSL curve data for the package Luminescence} \source{ \strong{ExampleData.portableOSL} \tabular{ll}{ Lab: \tab Cologne Luminescence Laboratory\cr Lab-Code: \tab \code{none} \cr Location: \tab Nievenheim/Germany\cr Material: \tab Fine grain quartz \cr Reference: \tab unpublished data } } \description{ A \code{list} of \linkS4class{RLum.Analysis} objects, each containing the same number of \linkS4class{RLum.Data.Curve} objects representing individual OSL, IRSL and dark count measurements of a sample. } \examples{ data(ExampleData.portableOSL, envir = environment()) plot_RLum(ExampleData.portableOSL) } \keyword{datasets} Luminescence/man/calc_Statistics.Rd0000644000176200001440000000620713604173244017053 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_Statistics.R \name{calc_Statistics} \alias{calc_Statistics} \title{Function to calculate statistic measures} \usage{ calc_Statistics( data, weight.calc = "square", digits = NULL, n.MCM = NULL, na.rm = TRUE ) } \arguments{ \item{data}{\link{data.frame} or \linkS4class{RLum.Results} object (\strong{required}): for \link{data.frame} two columns: De (\code{data[,1]}) and De error (\code{data[,2]}). To plot several data sets in one plot the data sets must be provided as \code{list}, e.g. \code{list(data.1, data.2)}.} \item{weight.calc}{\link{character}: type of weight calculation. One out of \code{"reciprocal"} (weight is 1/error), \code{"square"} (weight is 1/error^2). Default is \code{"square"}.} \item{digits}{\link{integer} (\emph{with default}): round numbers to the specified digits. If digits is set to \code{NULL} nothing is rounded.} \item{n.MCM}{\link{numeric} (\emph{with default}): number of samples drawn for Monte Carlo-based statistics. \code{NULL} (the default) disables MC runs.} \item{na.rm}{\link{logical} (\emph{with default}): indicating whether \code{NA} values should be stripped before the computation proceeds.} } \value{ Returns a list with weighted and unweighted statistic measures. } \description{ This function calculates a number of descriptive statistics for estimates with a given standard error (SE), most fundamentally using error-weighted approaches. } \details{ The option to use Monte Carlo Methods (\code{n.MCM}) allows calculating all descriptive statistics based on random values. The distribution of these random values is based on the Normal distribution with \code{De} values as means and \code{De_error} values as one standard deviation. Increasing the number of MCM-samples linearly increases computation time. On a Lenovo X230 machine evaluation of 25 Aliquots with n.MCM = 1000 takes 0.01 s, with n = 100000, ca. 1.65 s. It might be useful to work with logarithms of these values. See Dietze et al. (2016, Quaternary Geochronology) and the function \link{plot_AbanicoPlot} for details. } \section{Function version}{ 0.1.7 } \examples{ ## load example data data(ExampleData.DeValues, envir = environment()) ## show a rough plot of the data to illustrate the non-normal distribution plot_KDE(ExampleData.DeValues$BT998) ## calculate statistics and show output str(calc_Statistics(ExampleData.DeValues$BT998)) \dontrun{ ## now the same for 10000 normal distributed random numbers with equal errors x <- as.data.frame(cbind(rnorm(n = 10^5, mean = 0, sd = 1), rep(0.001, 10^5))) ## note the congruent results for weighted and unweighted measures str(calc_Statistics(x)) } } \author{ Michael Dietze, GFZ Potsdam (Germany) , RLum Developer Team} \section{How to cite}{ Dietze, M., 2020. calc_Statistics(): Function to calculate statistic measures. Function version 0.1.7. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{datagen} Luminescence/man/calc_AverageDose.Rd0000644000176200001440000001277113604173244017111 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_AverageDose.R \name{calc_AverageDose} \alias{calc_AverageDose} \title{Calculate the Average Dose and the dose rate dispersion} \usage{ calc_AverageDose( data, sigma_m = NULL, Nb_BE = 500, na.rm = TRUE, plot = TRUE, verbose = TRUE, ... ) } \arguments{ \item{data}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): for \link{data.frame}: two columns with De \code{(data[,1])} and De error \code{(values[,2])}} \item{sigma_m}{\link{numeric} (\strong{required}): the overdispersion resulting from a dose recovery experiment, i.e. when all grains have received the same dose. Indeed in such a case, any overdispersion (i.e. dispersion on top of analytical uncertainties) is, by definition, an unrecognised measurement uncertainty.} \item{Nb_BE}{\link{integer} (\emph{with default}): sample size used for the bootstrapping} \item{na.rm}{\link{logical} (\emph{with default}): exclude NA values from the data set prior to any further operation.} \item{plot}{\link{logical} (\emph{with default}): enables/disables plot output} \item{verbose}{\link{logical} (\emph{with default}): enables/disables terminal output} \item{...}{further arguments that can be passed to \link[graphics:hist]{graphics::hist}. As three plots are returned all arguments need to be provided as \link{list}, e.g., \code{main = list("Plot 1", "Plot 2", "Plot 3")}. Note: not all arguments of \code{hist} are supported, but the output of \code{hist} is returned and can be used of own plots. \cr Further supported arguments: \code{mtext} (\link{character}), \code{rug} (\code{TRUE/FALSE}).} } \value{ The function returns numerical output and an (\emph{optional}) plot. -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]} \cr -----------------------------------\cr \strong{\code{RLum.Results}}-object\cr \strong{slot:} \strong{\verb{@data}} \cr \verb{[.. $summary : data.frame]}\cr \tabular{lll}{ \strong{Column} \tab \strong{Type} \tab \strong{Description}\cr AVERAGE_DOSE \tab \link{numeric} \tab the obtained averge dose\cr AVERAGE_DOSE.SE \tab \link{numeric} \tab the average dose error \cr SIGMA_D \tab \link{numeric}\tab sigma \cr SIGMA_D.SE \tab \link{numeric}\tab standard error of the sigma \cr IC_AVERAGE_DOSE.LEVEL \tab \link{character}\tab confidence level average dose\cr IC_AVERAGE_DOSE.LOWER \tab \link{character}\tab lower quantile of average dose \cr IC_AVERAGE_DOSE.UPPER \tab \link{character}\tab upper quantile of average dose\cr IC_SIGMA_D.LEVEL \tab \link{integer}\tab confidence level sigma\cr IC_SIGMA_D.LOWER \tab \link{character}\tab lower sigma quantile\cr IC_SIGMA_D.UPPER \tab \link{character}\tab upper sigma quantile\cr L_MAX \tab \link{character}\tab maximum likelihood value } \verb{[.. $dstar : matrix]} \cr Matrix with bootstrap values\cr \verb{[.. $hist : list]}\cr Object as produced by the function histogram ------------------------\cr \verb{[ PLOT OUTPUT ]}\cr ------------------------\cr The function returns two different plot panels. (1) An abanico plot with the dose values (2) A histogram panel comprising 3 histograms with the equivalent dose and the bootstrapped average dose and the sigma values. } \description{ This functions calculates the Average Dose and their extrinsic dispersion and estimates the standard errors by bootstrapping based on the Average Dose Model by Guerin et al., 2017 } \details{ \strong{\code{sigma_m}}\cr The program requires the input of a known value of sigma_m, which corresponds to the intrinsic overdispersion, as determined by a dose recovery experiment. Then the dispersion in doses (sigma_d) will be that over and above sigma_m (and individual uncertainties sigma_wi). } \note{ This function has beta status! } \section{Function version}{ 0.1.4 } \examples{ ##Example 01 using package example data ##load example data data(ExampleData.DeValues, envir = environment()) ##calculate Average dose ##(use only the first 56 values here) AD <- calc_AverageDose(ExampleData.DeValues$CA1[1:56,], sigma_m = 0.1) ##plot De and set Average dose as central value plot_AbanicoPlot( data = ExampleData.DeValues$CA1[1:56,], z.0 = AD$summary$AVERAGE_DOSE) } \section{How to cite}{ Christophe, C., Philippe, A., Guerin, G., Kreutzer, S., 2020. calc_AverageDose(): Calculate the Average Dose and the dose rate dispersion. Function version 0.1.4. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Guerin, G., Christophe, C., Philippe, A., Murray, A.S., Thomsen, K.J., Tribolo, C., Urbanova, P., Jain, M., Guibert, P., Mercier, N., Kreutzer, S., Lahaye, C., 2017. Absorbed dose, equivalent dose, measured dose rates, and implications for OSL age estimates: Introducing the Average Dose Model. Quaternary Geochronology 1-32. doi:10.1016/j.quageo.2017.04.002 \strong{Further reading}\cr Efron, B., Tibshirani, R., 1986. Bootstrap Methods for Standard Errors, Confidence Intervals, and Other Measures of Statistical Accuracy. Statistical Science 1, 54-75. } \seealso{ \link{read.table}, \link[graphics:hist]{graphics::hist} } \author{ Claire Christophe, IRAMAT-CRP2A, Universite de Nantes (France), Anne Philippe, Universite de Nantes, (France), Guillaume Guerin, IRAMAT-CRP2A, Universite Bordeaux Montaigne, (France), Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne, (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/methods_RLum.Rd0000644000176200001440000001706513604173243016344 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/methods_RLum.R \name{methods_RLum} \alias{methods_RLum} \alias{plot.list} \alias{plot.RLum.Results} \alias{plot.RLum.Analysis} \alias{plot.RLum.Data.Curve} \alias{plot.RLum.Data.Spectrum} \alias{plot.RLum.Data.Image} \alias{plot.Risoe.BINfileData} \alias{hist.RLum.Results} \alias{hist.RLum.Data.Image} \alias{hist.RLum.Data.Curve} \alias{hist.RLum.Analysis} \alias{summary.RLum.Results} \alias{summary.RLum.Analysis} \alias{summary.RLum.Data.Image} \alias{summary.RLum.Data.Curve} \alias{subset.Risoe.BINfileData} \alias{subset.RLum.Analysis} \alias{bin.RLum.Data.Curve} \alias{bin.RLum.Data.Spectrum} \alias{length.RLum.Results} \alias{length.RLum.Analysis} \alias{length.RLum.Data.Curve} \alias{length.Risoe.BINfileData} \alias{dim.RLum.Data.Curve} \alias{dim.RLum.Data.Spectrum} \alias{rep.RLum} \alias{names.RLum.Data.Curve} \alias{names.RLum.Data.Spectrum} \alias{names.RLum.Data.Image} \alias{names.RLum.Analysis} \alias{names.RLum.Results} \alias{names.Risoe.BINfileData} \alias{row.names.RLum.Data.Spectrum} \alias{as.data.frame.RLum.Data.Curve} \alias{as.data.frame.RLum.Data.Spectrum} \alias{as.list.RLum.Results} \alias{as.list.RLum.Data.Curve} \alias{as.list.RLum.Analysis} \alias{as.matrix.RLum.Data.Curve} \alias{as.matrix.RLum.Data.Spectrum} \alias{is.RLum} \alias{is.RLum.Data} \alias{is.RLum.Data.Curve} \alias{is.RLum.Data.Spectrum} \alias{is.RLum.Data.Image} \alias{is.RLum.Analysis} \alias{is.RLum.Results} \alias{merge.RLum} \alias{unlist.RLum.Analysis} \alias{+.RLum.Data.Curve} \alias{-.RLum.Data.Curve} \alias{*.RLum.Data.Curve} \alias{/.RLum.Data.Curve} \alias{[.RLum.Data.Curve} \alias{[.RLum.Data.Spectrum} \alias{[.RLum.Data.Image} \alias{[.RLum.Analysis} \alias{[.RLum.Results} \alias{[<-.RLum.Data.Curve} \alias{[[.RLum.Analysis} \alias{[[.RLum.Results} \alias{$.RLum.Data.Curve} \alias{$.RLum.Analysis} \alias{$.RLum.Results} \title{methods_RLum} \usage{ \method{plot}{list}(x, y, ...) \method{plot}{RLum.Results}(x, y, ...) \method{plot}{RLum.Analysis}(x, y, ...) \method{plot}{RLum.Data.Curve}(x, y, ...) \method{plot}{RLum.Data.Spectrum}(x, y, ...) \method{plot}{RLum.Data.Image}(x, y, ...) \method{plot}{Risoe.BINfileData}(x, y, ...) \method{hist}{RLum.Results}(x, ...) \method{hist}{RLum.Data.Image}(x, ...) \method{hist}{RLum.Data.Curve}(x, ...) \method{hist}{RLum.Analysis}(x, ...) \method{summary}{RLum.Results}(object, ...) \method{summary}{RLum.Analysis}(object, ...) \method{summary}{RLum.Data.Image}(object, ...) \method{summary}{RLum.Data.Curve}(object, ...) \method{subset}{Risoe.BINfileData}(x, subset, records.rm = TRUE, ...) \method{subset}{RLum.Analysis}(x, subset = NULL, ...) bin.RLum.Data.Curve(x, bin_size = 2, ...) bin.RLum.Data.Spectrum(x, bin_size.row = 1, bin_size.col = 1, ...) \method{length}{RLum.Results}(x, ...) \method{length}{RLum.Analysis}(x, ...) \method{length}{RLum.Data.Curve}(x, ...) \method{length}{Risoe.BINfileData}(x, ...) \method{dim}{RLum.Data.Curve}(x) \method{dim}{RLum.Data.Spectrum}(x) \method{rep}{RLum}(x, ...) \method{names}{RLum.Data.Curve}(x, ...) \method{names}{RLum.Data.Spectrum}(x, ...) \method{names}{RLum.Data.Image}(x, ...) \method{names}{RLum.Analysis}(x, ...) \method{names}{RLum.Results}(x, ...) \method{names}{Risoe.BINfileData}(x) \method{row.names}{RLum.Data.Spectrum}(x, ...) \method{as.data.frame}{RLum.Data.Curve}(x, row.names = NULL, optional = FALSE, ...) \method{as.data.frame}{RLum.Data.Spectrum}(x, row.names = NULL, optional = FALSE, ...) \method{as.list}{RLum.Results}(x, ...) \method{as.list}{RLum.Data.Curve}(x, ...) \method{as.list}{RLum.Analysis}(x, ...) \method{as.matrix}{RLum.Data.Curve}(x, ...) \method{as.matrix}{RLum.Data.Spectrum}(x, ...) is.RLum(x, ...) is.RLum.Data(x, ...) is.RLum.Data.Curve(x, ...) is.RLum.Data.Spectrum(x, ...) is.RLum.Data.Image(x, ...) is.RLum.Analysis(x, ...) is.RLum.Results(x, ...) \method{merge}{RLum}(x, y, ...) \method{unlist}{RLum.Analysis}(x, recursive = TRUE, ...) \method{+}{RLum.Data.Curve}(x, y) \method{-}{RLum.Data.Curve}(x, y) \method{*}{RLum.Data.Curve}(x, y) \method{/}{RLum.Data.Curve}(x, y) \method{[}{RLum.Data.Curve}(x, y, z, drop = TRUE) \method{[}{RLum.Data.Spectrum}(x, y, z, drop = TRUE) \method{[}{RLum.Data.Image}(x, y, z, drop = TRUE) \method{[}{RLum.Analysis}(x, i, drop = FALSE) \method{[}{RLum.Results}(x, i, drop = TRUE) \method{[}{RLum.Data.Curve}(x, i, j) <- value \method{[[}{RLum.Analysis}(x, i) \method{[[}{RLum.Results}(x, i) \method{$}{RLum.Data.Curve}(x, i) \method{$}{RLum.Analysis}(x, i) \method{$}{RLum.Results}(x, i) } \arguments{ \item{x}{\linkS4class{RLum} or \linkS4class{Risoe.BINfileData} (\strong{required}): input opject} \item{y}{\link{integer} (\emph{optional}): the row index of the matrix, data.frame} \item{...}{further arguments that can be passed to the method} \item{object}{\linkS4class{RLum} (\strong{required}): input opject} \item{subset}{\verb{[subset]} \link{expression} (\strong{required}): logical expression indicating elements or rows to keep, this function works in \linkS4class{Risoe.BINfileData} objects like \link{subset.data.frame}, but takes care of the object structure} \item{records.rm}{\link{subset} \link{logical} (\emph{with default}): remove records from data set, can be disabled, to just set the column \code{SET} to \code{TRUE} or \code{FALSE}} \item{row.names}{\link{logical} (\emph{with default}): enables or disables row names (\code{as.data.frame})} \item{optional}{\link{logical} (\emph{with default}): logical. If TRUE, setting row names and converting column names (to syntactic names: see make.names) is optional (see \link[base:as.data.frame]{base::as.data.frame})} \item{recursive}{\link{logical} (\emph{with default}): enables or disables further subsetting (\code{unlist})} \item{z}{\link{integer} (\emph{optional}): the column index of the matrix, data.frame} \item{drop}{\link{logical} (\emph{with default}): keep object structure or drop it} \item{i}{\link{character} (\emph{optional}): name of the wanted record type or data object or row in the \code{RLum.Data.Curve} object} \item{j}{\link{integer} (\emph{optional}): column of the data matrix in the \code{RLum.Data.Curve} object} \item{value}{\link{numeric} \strong{(required)}: numeric value which replace the value in the \code{RLum.Data.Curve} object} } \description{ Methods for S3-generics implemented for the package 'Luminescence'. This document summarises all implemented S3-generics. The name of the function is given before the first dot, after the dot the name of the object that is supported by this method is given, e.g. \code{plot.RLum.Data.Curve} can be called by \code{plot(object, ...)}, where \code{object} is the \code{RLum.Data.Curve} object. } \details{ The term S3-generics sounds complicated, however, it just means that something has been implemented in the package to increase the usability for users new in R and who are not familiar with the underlying \code{RLum}-object structure of the package. The practical outcome is that operations and functions presented in standard books on R can be used without knowing the specifica of the R package 'Luminescence'. For examples see the example section. } \note{ \code{methods_RLum} are not really new functions, everything given here are mostly just surrogates for existing functions in the package. } \examples{ ##load example data data(ExampleData.RLum.Analysis, envir = environment()) ##combine curve is various ways curve1 <- IRSAR.RF.Data[[1]] curve2 <- IRSAR.RF.Data[[1]] curve1 + curve2 curve1 - curve2 curve1 / curve2 curve1 * curve2 ##`$` access curves IRSAR.RF.Data$RF } \keyword{internal} Luminescence/man/calc_TLLxTxRatio.Rd0000644000176200001440000000720513604173244017056 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_TLLxTxRatio.R \name{calc_TLLxTxRatio} \alias{calc_TLLxTxRatio} \title{Calculate the Lx/Tx ratio for a given set of TL curves -beta version-} \usage{ calc_TLLxTxRatio( Lx.data.signal, Lx.data.background = NULL, Tx.data.signal, Tx.data.background = NULL, signal.integral.min, signal.integral.max ) } \arguments{ \item{Lx.data.signal}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): TL data (x = temperature, y = counts) (TL signal)} \item{Lx.data.background}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\emph{optional}): TL data (x = temperature, y = counts). If no data are provided no background subtraction is performed.} \item{Tx.data.signal}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): TL data (x = temperature, y = counts) (TL test signal)} \item{Tx.data.background}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\emph{optional}): TL data (x = temperature, y = counts). If no data are provided no background subtraction is performed.} \item{signal.integral.min}{\link{integer} (\strong{required}): channel number for the lower signal integral bound (e.g. \code{signal.integral.min = 100})} \item{signal.integral.max}{\link{integer} (\strong{required}): channel number for the upper signal integral bound (e.g. \code{signal.integral.max = 200})} } \value{ Returns an S4 object of type \linkS4class{RLum.Results}. Slot \code{data} contains a \link{list} with the following structure:\preformatted{$ LxTx.table .. $ LnLx .. $ LnLx.BG .. $ TnTx .. $ TnTx.BG .. $ Net_LnLx .. $ Net_LnLx.Error } } \description{ Calculate Lx/Tx ratio for a given set of TL curves. } \details{ \strong{Uncertainty estimation} The standard errors are calculated using the following generalised equation: \deqn{SE_{signal} <- abs(Signal_{net} * BG_f /BG_{signal}} where \eqn{BG_f} is a term estimated by calculating the standard deviation of the sum of the \eqn{L_x} background counts and the sum of the \eqn{T_x} background counts. However, if both signals are similar the error becomes zero. } \note{ \strong{This function has still BETA status!} Please further note that a similar background for both curves results in a zero error and is therefore set to \code{NA}. } \section{Function version}{ 0.3.3 } \examples{ ##load package example data data(ExampleData.BINfileData, envir = environment()) ##convert Risoe.BINfileData into a curve object temp <- Risoe.BINfileData2RLum.Analysis(TL.SAR.Data, pos = 3) Lx.data.signal <- get_RLum(temp, record.id=1) Lx.data.background <- get_RLum(temp, record.id=2) Tx.data.signal <- get_RLum(temp, record.id=3) Tx.data.background <- get_RLum(temp, record.id=4) signal.integral.min <- 210 signal.integral.max <- 230 output <- calc_TLLxTxRatio(Lx.data.signal, Lx.data.background, Tx.data.signal, Tx.data.background, signal.integral.min, signal.integral.max) get_RLum(output) } \seealso{ \linkS4class{RLum.Results}, \link{analyse_SAR.TL} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, Université Bordeaux Montaigne (France) \cr Christoph Schmidt, University of Bayreuth (Germany) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., Schmidt, C., 2020. calc_TLLxTxRatio(): Calculate the Lx/Tx ratio for a given set of TL curves -beta version-. Function version 0.3.3. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{datagen} Luminescence/man/analyse_IRSAR.RF.Rd0000644000176200001440000004730613604173243016645 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_IRSAR.RF.R \name{analyse_IRSAR.RF} \alias{analyse_IRSAR.RF} \title{Analyse IRSAR RF measurements} \usage{ analyse_IRSAR.RF( object, sequence_structure = c("NATURAL", "REGENERATED"), RF_nat.lim = NULL, RF_reg.lim = NULL, method = "FIT", method.control = NULL, test_parameters = NULL, n.MC = 10, txtProgressBar = TRUE, plot = TRUE, plot_reduced = FALSE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} or a \link{list} of \linkS4class{RLum.Analysis}-objects (\strong{required}): input object containing data for protocol analysis. The function expects to find at least two curves in the \linkS4class{RLum.Analysis} object: (1) RF_nat, (2) RF_reg. If a \code{list} is provided as input all other parameters can be provided as \code{list} as well to gain full control.} \item{sequence_structure}{\link{vector} \link{character} (\emph{with default}): specifies the general sequence structure. Allowed steps are \code{NATURAL}, \code{REGENERATED}. In addition any other character is allowed in the sequence structure; such curves will be ignored during the analysis.} \item{RF_nat.lim}{\link{vector} (\emph{with default}): set minimum and maximum channel range for natural signal fitting and sliding. If only one value is provided this will be treated as minimum value and the maximum limit will be added automatically.} \item{RF_reg.lim}{\link{vector} (\emph{with default}): set minimum and maximum channel range for regenerated signal fitting and sliding. If only one value is provided this will be treated as minimum value and the maximum limit will be added automatically.} \item{method}{\link{character} (\emph{with default}): setting method applied for the data analysis. Possible options are \code{"FIT"} or \code{"SLIDE"}.} \item{method.control}{\link{list} (\emph{optional}): parameters to control the method, that can be passed to the chosen method. These are for (1) \code{method = "FIT"}: 'trace', 'maxiter', 'warnOnly', 'minFactor' and for (2) \code{method = "SLIDE"}: 'correct_onset', 'show_density', 'show_fit', 'trace'. See details.} \item{test_parameters}{\link{list} (\emph{with default}): set test parameters. Supported parameters are: \code{curves_ratio}, \code{residuals_slope} (only for \code{method = "SLIDE"}), \code{curves_bounds}, \code{dynamic_ratio}, \code{lambda}, \code{beta} and \code{delta.phi}. All input: \link{numeric} values, \code{NA} and \code{NULL} (s. Details) (see Details for further information)} \item{n.MC}{\link{numeric} (\emph{with default}): set number of Monte Carlo runs for start parameter estimation (\code{method = "FIT"}) or error estimation (\code{method = "SLIDE"}). This value can be set to \code{NULL} to skip the MC runs. Note: Large values will significantly increase the computation time} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables \code{TRUE} or disables \code{FALSE} the progression bar during MC runs} \item{plot}{\link{logical} (\emph{with default}): plot output (\code{TRUE} or \code{FALSE})} \item{plot_reduced}{\link{logical} (\emph{optional}): provides a reduced plot output if enabled to allow common R plot combinations, e.g., \code{par(mfrow(...))}. If \code{TRUE} no residual plot is returned; it has no effect if \code{plot = FALSE}} \item{...}{further arguments that will be passed to the plot output. Currently supported arguments are \code{main}, \code{xlab}, \code{ylab}, \code{xlim}, \code{ylim}, \code{log}, \code{legend} (\code{TRUE/FALSE}), \code{legend.pos}, \code{legend.text} (passes argument to x,y in \link[graphics:legend]{graphics::legend}), \code{xaxt}} } \value{ The function returns numerical output and an (\emph{optional}) plot. -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \verb{[.. $data : data.frame]} \tabular{lll}{ \strong{Column} \tab \strong{Type} \tab \strong{Description}\cr \code{DE} \tab \code{numeric} \tab the obtained equivalent dose\cr \code{DE.ERROR} \tab \code{numeric} \tab (only \code{method = "SLIDE"}) standard deviation obtained from MC runs \cr \code{DE.LOWER} \tab \code{numeric}\tab 2.5\% quantile for De values obtained by MC runs \cr \code{DE.UPPER} \tab \code{numeric}\tab 97.5\% quantile for De values obtained by MC runs \cr \code{DE.STATUS} \tab \code{character}\tab test parameter status\cr \code{RF_NAT.LIM} \tab \code{charcter}\tab used RF_nat curve limits \cr \code{RF_REG.LIM} \tab \code{character}\tab used RF_reg curve limits\cr \code{POSITION} \tab \code{integer}\tab (\emph{optional}) position of the curves\cr \code{DATE} \tab \code{character}\tab (\emph{optional}) measurement date\cr \code{SEQUENCE_NAME} \tab \code{character}\tab (\emph{optional}) sequence name\cr \code{UID} \tab \code{character}\tab unique data set ID } \verb{[.. $De.MC : numeric]} A \code{numeric} vector with all the De values obtained by the MC runs. \verb{[.. $test_parameters : data.frame]} \tabular{lll}{ \strong{Column} \tab \strong{Type} \tab \strong{Description}\cr \code{POSITION} \tab \code{numeric} \tab aliquot position \cr \code{PARAMETER} \tab \code{character} \tab test parameter name \cr \code{THRESHOLD} \tab \code{numeric} \tab set test parameter threshold value \cr \code{VALUE} \tab \code{numeric} \tab the calculated test parameter value (to be compared with the threshold)\cr \code{STATUS} \tab \code{character} \tab test parameter status either \code{"OK"} or \code{"FAILED"} \cr \code{SEQUENCE_NAME} \tab \code{character} \tab name of the sequence, so far available \cr \code{UID} \tab \code{character}\tab unique data set ID } \verb{[.. $fit : data.frame]} An \link{nls} object produced by the fitting. \verb{[.. $slide : list]} A \link{list} with data produced during the sliding. Some elements are previously reported with the summary object data. List elements are: \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \code{De} \tab \code{numeric} \tab the final De obtained with the sliding approach \cr \code{De.MC} \tab \code{numeric} \tab all De values obtained by the MC runs \cr \code{residuals} \tab \code{numeric} \tab the obtained residuals for each channel of the curve \cr \code{trend.fit} \tab \code{lm} \tab fitting results produced by the fitting of the residuals \cr \code{RF_nat.slided} \tab \code{matrix} \tab the slided RF_nat curve \cr \code{t_n.id} \tab \code{numeric} \tab the index of the t_n offset \cr \code{I_n} \tab \code{numeric} \tab the vertical intensity offset if a vertical slide was applied \cr \code{algorithm_error} \tab \code{numeric} \tab the vertical sliding suffers from a systematic effect induced by the used algorithm. The returned value is the standard deviation of all obtained De values while expanding the vertical sliding range. I can be added as systematic error to the final De error; so far wanted.\cr \code{vslide_range} \tab \code{numeric} \tab the range used for the vertical sliding \cr \code{squared_residuals} \tab \code{numeric} \tab the squared residuals (horizontal sliding) } \strong{slot:} \strong{\verb{@info}} The original function call (\link[methods:LanguageClasses]{methods::language-class}-object) The output (\code{data}) should be accessed using the function \link{get_RLum} ------------------------\cr \verb{[ PLOT OUTPUT ]}\cr ------------------------\cr The slided IR-RF curves with the finally obtained De } \description{ Function to analyse IRSAR RF measurements on K-feldspar samples, performed using the protocol according to Erfurt et al. (2003) and beyond. } \details{ The function performs an IRSAR analysis described for K-feldspar samples by Erfurt et al. (2003) assuming a negligible sensitivity change of the RF signal. \strong{General Sequence Structure} (according to Erfurt et al., 2003) \enumerate{ \item Measuring IR-RF intensity of the natural dose for a few seconds (\eqn{RF_{nat}}) \item Bleach the samples under solar conditions for at least 30 min without changing the geometry \item Waiting for at least one hour \item Regeneration of the IR-RF signal to at least the natural level (measuring (\eqn{RF_{reg}}) \item Fitting data with a stretched exponential function \item Calculate the the palaeodose \eqn{D_{e}} using the parameters from the fitting } Actually two methods are supported to obtain the \eqn{D_{e}}: \code{method = "FIT"} and \code{method = "SLIDE"}: \strong{\code{method = "FIT"}} The principle is described above and follows the original suggestions by Erfurt et al., 2003. For the fitting the mean count value of the RF_nat curve is used. Function used for the fitting (according to Erfurt et al. (2003)): \deqn{\phi(D) = \phi_{0}-\Delta\phi(1-exp(-\lambda*D))^\beta} with \eqn{\phi(D)} the dose dependent IR-RF flux, \eqn{\phi_{0}} the initial IR-RF flux, \eqn{\Delta\phi} the dose dependent change of the IR-RF flux, \eqn{\lambda} the exponential parameter, \eqn{D} the dose and \eqn{\beta} the dispersive factor. To obtain the palaeodose \eqn{D_{e}} the function is changed to: \deqn{D_{e} = ln(-(\phi(D) - \phi_{0})/(-\lambda*\phi)^{1/\beta}+1)/-\lambda} The fitting is done using the \code{port} algorithm of the \link{nls} function. \strong{\code{method = "SLIDE"}} For this method, the natural curve is slid along the x-axis until congruence with the regenerated curve is reached. Instead of fitting this allows working with the original data without the need for any physical model. This approach was introduced for RF curves by Buylaert et al., 2012 and Lapp et al., 2012. Here the sliding is done by searching for the minimum of the squared residuals. For the mathematical details of the implementation see Frouin et al., 2017 \strong{\code{method.control}} To keep the generic argument list as clear as possible, arguments to control the methods for De estimation are all preset with meaningful default parameters and can be handled using the argument \code{method.control} only, e.g., \code{method.control = list(trace = TRUE)}. Supported arguments are: \tabular{lll}{ \strong{ARGUMENT} \tab \strong{METHOD} \tab \strong{DESCRIPTION}\cr \code{trace} \tab \code{FIT}, \code{SLIDE} \tab as in \link{nls}; shows sum of squared residuals\cr \code{trace_vslide} \tab \code{SLIDE} \tab \link{logical} argument to enable or disable the tracing of the vertical sliding\cr \code{maxiter} \tab \code{FIT} \tab as in \link{nls}\cr \code{warnOnly} \tab \code{FIT} \tab as in \link{nls}\cr \code{minFactor} \tab \code{FIT} \tab as in \link{nls}\cr \code{correct_onset} \tab \code{SLIDE} \tab The logical argument shifts the curves along the x-axis by the first channel, as light is expected in the first channel. The default value is \code{TRUE}.\cr \code{show_density} \tab \code{SLIDE} \tab \link{logical} (\emph{with default}) enables or disables KDE plots for MC run results. If the distribution is too narrow nothing is shown.\cr \code{show_fit} \tab \code{SLIDE} \tab \link{logical} (\emph{with default}) enables or disables the plot of the fitted curve routinely obtained during the evaluation.\cr \code{n.MC} \tab \code{SLIDE} \tab \link{integer} (\emph{with default}): This controls the number of MC runs within the sliding (assessing the possible minimum values). The default \code{n.MC = 1000}. Note: This parameter is not the same as controlled by the function argument \code{n.MC}. \cr \code{vslide_range} \tab \code{SLDE} \tab \link{logical} or \link{numeric} or \link{character} (\emph{with default}): This argument sets the boundaries for a vertical curve sliding. The argument expects a vector with an absolute minimum and a maximum (e.g., \code{c(-1000,1000)}). Alternatively the values \code{NULL} and \code{'auto'} are allowed. The automatic mode detects the reasonable vertical sliding range (\strong{recommended}). \code{NULL} applies no vertical sliding. The default is \code{NULL}.\cr \code{cores} \tab \code{SLIDE} \tab \code{number} or \code{character} (\emph{with default}): set number of cores to be allocated for a parallel processing of the Monte-Carlo runs. The default value is \code{NULL} (single thread), the recommended values is \code{'auto'}. An optional number (e.g., \code{cores} = 8) assigns a value manually. } \strong{Error estimation} For \strong{\code{method = "FIT"}} the asymmetric error range is obtained by using the 2.5 \% (lower) and the 97.5 \% (upper) quantiles of the \eqn{RF_{nat}} curve for calculating the \eqn{D_{e}} error range. For \strong{\code{method = "SLIDE"}} the error is obtained by bootstrapping the residuals of the slided curve to construct new natural curves for a Monte Carlo simulation. The error is returned in two ways: (a) the standard deviation of the herewith obtained \eqn{D_{e}} from the MC runs and (b) the confidence interval using the 2.5 \% (lower) and the 97.5 \% (upper) quantiles. The results of the MC runs are returned with the function output. \strong{Test parameters} The argument \code{test_parameters} allows to pass some thresholds for several test parameters, which will be evaluated during the function run. If a threshold is set and it will be exceeded the test parameter status will be set to "FAILED". Intentionally this parameter is not termed 'rejection criteria' as not all test parameters are evaluated for both methods and some parameters are calculated by not evaluated by default. Common for all parameters are the allowed argument options \code{NA} and \code{NULL}. If the parameter is set to \code{NA} the value is calculated but the result will not be evaluated, means it has no effect on the status ("OK" or "FAILED") of the parameter. Setting the parameter to \code{NULL} disables the parameter entirely and the parameter will be also removed from the function output. This might be useful in cases where a particular parameter asks for long computation times. Currently supported parameters are: \code{curves_ratio} \link{numeric} (default: \code{1.001}): The ratio of \eqn{RF_{nat}} over \eqn{RF_{reg}} in the range of\eqn{RF_{nat}} of is calculated and should not exceed the threshold value. \code{intersection_ratio} \link{numeric} (default: \code{NA}): Calculated as absolute difference from 1 of the ratio of the integral of the normalised RF-curves, This value indicates intersection of the RF-curves and should be close to 0 if the curves have a similar shape. For this calculation first the corresponding time-count pair value on the RF_reg curve is obtained using the maximum count value of the RF_nat curve and only this segment (fitting to the RF_nat curve) on the RF_reg curve is taken for further calculating this ratio. If nothing is found at all, \code{Inf} is returned. \code{residuals_slope} \link{numeric} (default: \code{NA}; only for \code{method = "SLIDE"}): A linear function is fitted on the residuals after sliding. The corresponding slope can be used to discard values as a high (positive, negative) slope may indicate that both curves are fundamentally different and the method cannot be applied at all. Per default the value of this parameter is calculated but not evaluated. \code{curves_bounds} \link{numeric} (default: \eqn{max(RF_{reg_counts})}: This measure uses the maximum time (x) value of the regenerated curve. The maximum time (x) value of the natural curve cannot be larger than this value. However, although this is not recommended the value can be changed or disabled. \code{dynamic_ratio} \link{numeric} (default: \code{NA}): The dynamic ratio of the regenerated curve is calculated as ratio of the minimum and maximum count values. \code{lambda}, \code{beta} and \code{delta.phi} \link{numeric} (default: \code{NA}; \code{method = "SLIDE"}): The stretched exponential function suggested by Erfurt et al. (2003) describing the decay of the RF signal, comprises several parameters that might be useful to evaluate the shape of the curves. For \code{method = "FIT"} this parameter is obtained during the fitting, for \code{method = "SLIDE"} a rather rough estimation is made using the function \link[minpack.lm:nlsLM]{minpack.lm::nlsLM} and the equation given above. Note: As this procedure requests more computation time, setting of one of these three parameters to \code{NULL} also prevents a calculation of the remaining two. } \note{ This function assumes that there is no sensitivity change during the measurements (natural vs. regenerated signal), which is in contrast to the findings by Buylaert et al. (2012). } \section{Function version}{ 0.7.5 } \examples{ ##load data data(ExampleData.RLum.Analysis, envir = environment()) ##(1) perform analysis using the method 'FIT' results <- analyse_IRSAR.RF(object = IRSAR.RF.Data) ##show De results and test paramter results get_RLum(results, data.object = "data") get_RLum(results, data.object = "test_parameters") ##(2) perform analysis using the method 'SLIDE' results <- analyse_IRSAR.RF(object = IRSAR.RF.Data, method = "SLIDE", n.MC = 1) \dontrun{ ##(3) perform analysis using the method 'SLIDE' and method control option ## 'trace results <- analyse_IRSAR.RF( object = IRSAR.RF.Data, method = "SLIDE", method.control = list(trace = TRUE)) } } \section{How to cite}{ Kreutzer, S., 2020. analyse_IRSAR.RF(): Analyse IRSAR RF measurements. Function version 0.7.5. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Buylaert, J.P., Jain, M., Murray, A.S., Thomsen, K.J., Lapp, T., 2012. IR-RF dating of sand-sized K-feldspar extracts: A test of accuracy. Radiation Measurements 44 (5-6), 560-565. doi: 10.1016/j.radmeas.2012.06.021 Erfurt, G., Krbetschek, M.R., 2003. IRSAR - A single-aliquot regenerative-dose dating protocol applied to the infrared radiofluorescence (IR-RF) of coarse- grain K-feldspar. Ancient TL 21, 35-42. Erfurt, G., 2003. Infrared luminescence of Pb+ centres in potassium-rich feldspars. physica status solidi (a) 200, 429-438. Erfurt, G., Krbetschek, M.R., 2003. Studies on the physics of the infrared radioluminescence of potassium feldspar and on the methodology of its application to sediment dating. Radiation Measurements 37, 505-510. Erfurt, G., Krbetschek, M.R., Bortolot, V.J., Preusser, F., 2003. A fully automated multi-spectral radioluminescence reading system for geochronometry and dosimetry. Nuclear Instruments and Methods in Physics Research Section B: Beam Interactions with Materials and Atoms 207, 487-499. Frouin, M., Huot, S., Kreutzer, S., Lahaye, C., Lamothe, M., Philippe, A., Mercier, N., 2017. An improved radiofluorescence single-aliquot regenerative dose protocol for K-feldspars. Quaternary Geochronology 38, 13-24. doi:10.1016/j.quageo.2016.11.004 Lapp, T., Jain, M., Thomsen, K.J., Murray, A.S., Buylaert, J.P., 2012. New luminescence measurement facilities in retrospective dosimetry. Radiation Measurements 47, 803-808. doi:10.1016/j.radmeas.2012.02.006 Trautmann, T., 2000. A study of radioluminescence kinetics of natural feldspar dosimeters: experiments and simulations. Journal of Physics D: Applied Physics 33, 2304-2310. Trautmann, T., Krbetschek, M.R., Dietrich, A., Stolz, W., 1998. Investigations of feldspar radioluminescence: potential for a new dating technique. Radiation Measurements 29, 421-425. Trautmann, T., Krbetschek, M.R., Dietrich, A., Stolz, W., 1999. Feldspar radioluminescence: a new dating method and its physical background. Journal of Luminescence 85, 45-58. Trautmann, T., Krbetschek, M.R., Stolz, W., 2000. A systematic study of the radioluminescence properties of single feldspar grains. Radiation Measurements 32, 685-690. } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results}, \link{get_RLum}, \link{nls}, \link[minpack.lm:nlsLM]{minpack.lm::nlsLM}, \code{parallel::mclapply} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/calc_FiniteMixture.Rd0000644000176200001440000002041013604173244017505 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_FiniteMixture.R \name{calc_FiniteMixture} \alias{calc_FiniteMixture} \title{Apply the finite mixture model (FMM) after Galbraith (2005) to a given De distribution} \usage{ calc_FiniteMixture( data, sigmab, n.components, grain.probability = FALSE, dose.scale, pdf.weight = TRUE, pdf.sigma = "sigmab", pdf.colors = "gray", pdf.scale, plot.proportions = TRUE, plot = TRUE, ... ) } \arguments{ \item{data}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): for \link{data.frame}: two columns with De \code{(data[,1])} and De error \code{(values[,2])}} \item{sigmab}{\link{numeric} (\strong{required}): spread in De values given as a fraction (e.g. 0.2). This value represents the expected overdispersion in the data should the sample be well-bleached (Cunningham & Wallinga 2012, p. 100).} \item{n.components}{\link{numeric} (\strong{required}): number of components to be fitted. If a vector is provided (e.g. \code{c(2:8)}) the finite mixtures for 2, 3 ... 8 components are calculated and a plot and a statistical evaluation of the model performance (BIC score and maximum log-likelihood) is provided.} \item{grain.probability}{\link{logical} (\emph{with default}): prints the estimated probabilities of which component each grain is in} \item{dose.scale}{\link{numeric}: manually set the scaling of the y-axis of the first plot with a vector in the form of \code{c(min, max)}} \item{pdf.weight}{\link{logical} (\emph{with default}): weight the probability density functions by the components proportion (applies only when a vector is provided for \code{n.components})} \item{pdf.sigma}{\link{character} (\emph{with default}): if \code{"sigmab"} the components normal distributions are plotted with a common standard deviation (i.e. \code{sigmab}) as assumed by the FFM. Alternatively, \code{"se"} takes the standard error of each component for the sigma parameter of the normal distribution} \item{pdf.colors}{\link{character} (\emph{with default}): color coding of the components in the the plot. Possible options are \code{"gray"}, \code{"colors"} and \code{"none"}} \item{pdf.scale}{\link{numeric}: manually set the max density value for proper scaling of the x-axis of the first plot} \item{plot.proportions}{\link{logical} (\emph{with default}): plot barplot showing the proportions of components} \item{plot}{\link{logical} (\emph{with default}): plot output} \item{...}{further arguments to pass. See details for their usage.} } \value{ Returns a plot (\emph{optional}) and terminal output. In addition an \linkS4class{RLum.Results} object is returned containing the following elements: \item{.$summary}{\link{data.frame} summary of all relevant model results.} \item{.$data}{\link{data.frame} original input data} \item{.$args}{\link{list} used arguments} \item{.$call}{\link{call} the function call} \item{.$mle}{ covariance matrices of the log likelhoods} \item{.$BIC}{ BIC score} \item{.$llik}{ maximum log likelihood} \item{.$grain.probability}{ probabilities of a grain belonging to a component} \item{.$components}{\link{matrix} estimates of the de, de error and proportion for each component} \item{.$single.comp}{\link{data.frame} single componente FFM estimate} If a vector for \code{n.components} is provided (e.g. \code{c(2:8)}), \code{mle} and \code{grain.probability} are lists containing matrices of the results for each iteration of the model. The output should be accessed using the function \link{get_RLum} } \description{ This function fits a k-component mixture to a De distribution with differing known standard errors. Parameters (doses and mixing proportions) are estimated by maximum likelihood assuming that the log dose estimates are from a mixture of normal distributions. } \details{ This model uses the maximum likelihood and Bayesian Information Criterion (BIC) approaches. Indications of overfitting are: \itemize{ \item increasing BIC \item repeated dose estimates \item covariance matrix not positive definite \item covariance matrix produces NaNs \item convergence problems } \strong{Plot} If a vector (\code{c(k.min:k.max)}) is provided for \code{n.components} a plot is generated showing the the k components equivalent doses as normal distributions. By default \code{pdf.weight} is set to \code{FALSE}, so that the area under each normal distribution is always 1. If \code{TRUE}, the probability density functions are weighted by the components proportion for each iteration of k components, so the sum of areas of each component equals 1. While the density values are on the same scale when no weights are used, the y-axis are individually scaled if the probability density are weighted by the components proportion.\cr The standard deviation (sigma) of the normal distributions is by default determined by a common \code{sigmab} (see \code{pdf.sigma}). For \code{pdf.sigma = "se"} the standard error of each component is taken instead.\cr The stacked barplot shows the proportion of each component (in per cent) calculated by the FFM. The last plot shows the achieved BIC scores and maximum log-likelihood estimates for each iteration of k. } \section{Function version}{ 0.4.1 } \examples{ ## load example data data(ExampleData.DeValues, envir = environment()) ## (1) apply the finite mixture model ## NOTE: the data set is not suitable for the finite mixture model, ## which is why a very small sigmab is necessary calc_FiniteMixture(ExampleData.DeValues$CA1, sigmab = 0.2, n.components = 2, grain.probability = TRUE) ## (2) repeat the finite mixture model for 2, 3 and 4 maximum number of fitted ## components and save results ## NOTE: The following example is computationally intensive. Please un-comment ## the following lines to make the example work. FMM<- calc_FiniteMixture(ExampleData.DeValues$CA1, sigmab = 0.2, n.components = c(2:4), pdf.weight = TRUE, dose.scale = c(0, 100)) ## show structure of the results FMM ## show the results on equivalent dose, standard error and proportion of ## fitted components get_RLum(object = FMM, data.object = "components") } \section{How to cite}{ Burow, C., 2020. calc_FiniteMixture(): Apply the finite mixture model (FMM) after Galbraith (2005) to a given De distribution. Function version 0.4.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Galbraith, R.F. & Green, P.F., 1990. Estimating the component ages in a finite mixture. Nuclear Tracks and Radiation Measurements 17, 197-206. Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for mixed fission track ages. Nuclear Tracks Radiation Measurements 4, 459-470. Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error calculation and display in OSL dating: An overview and some recommendations. Quaternary Geochronology 11, 1-27. Roberts, R.G., Galbraith, R.F., Yoshida, H., Laslett, G.M. & Olley, J.M., 2000. Distinguishing dose populations in sediment mixtures: a test of single-grain optical dating procedures using mixtures of laboratory-dosed quartz. Radiation Measurements 32, 459-465. Galbraith, R.F., 2005. Statistics for Fission Track Analysis, Chapman & Hall/CRC, Boca Raton. \strong{Further reading} Arnold, L.J. & Roberts, R.G., 2009. Stochastic modelling of multi-grain equivalent dose (De) distributions: Implications for OSL dating of sediment mixtures. Quaternary Geochronology 4, 204-230. Cunningham, A.C. & Wallinga, J., 2012. Realizing the potential of fluvial archives using robust OSL chronologies. Quaternary Geochronology 12, 98-106. Rodnight, H., Duller, G.A.T., Wintle, A.G. & Tooth, S., 2006. Assessing the reproducibility and accuracy of optical dating of fluvial deposits. Quaternary Geochronology 1, 109-120. Rodnight, H. 2008. How many equivalent dose values are needed to obtain a reproducible distribution?. Ancient TL 26, 3-10. } \seealso{ \link{calc_CentralDose}, \link{calc_CommonDose}, \link{calc_FuchsLang2001}, \link{calc_MinDose} } \author{ Christoph Burow, University of Cologne (Germany) \cr Based on a rewritten S script of Rex Galbraith, 2006. , RLum Developer Team} Luminescence/man/bin_RLum.Data.Rd0000644000176200001440000000400213604173244016305 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bin_RLum.Data.R \name{bin_RLum.Data} \alias{bin_RLum.Data} \title{Channel binning - method dispatchter} \usage{ bin_RLum.Data(object, ...) } \arguments{ \item{object}{\linkS4class{RLum.Data} (\strong{required}): S4 object of class \code{RLum.Data}} \item{...}{further arguments passed to the specifc class method} } \value{ An object of the same type as the input object is provided } \description{ Function calls the object-specific bin functions for RLum.Data S4 class objects. } \details{ The function provides a generalised access point for specific \linkS4class{RLum.Data} objects. \cr Depending on the input object, the corresponding function will be selected. Allowed arguments can be found in the documentations of the corresponding \linkS4class{RLum.Data} class. } \note{ Currently only \code{RLum.Data} objects of class \linkS4class{RLum.Data.Curve} and \linkS4class{RLum.Data.Spectrum} are supported! } \section{Function version}{ 0.2.0 } \examples{ ##load example data data(ExampleData.CW_OSL_Curve, envir = environment()) ##create RLum.Data.Curve object from this example curve <- set_RLum( class = "RLum.Data.Curve", recordType = "OSL", data = as.matrix(ExampleData.CW_OSL_Curve) ) ##plot data without and with 2 and 4 channel binning plot_RLum(curve) plot_RLum(bin_RLum.Data(curve, bin_size = 2)) plot_RLum(bin_RLum.Data(curve, bin_size = 4)) } \seealso{ \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Spectrum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5050, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. bin_RLum.Data(): Channel binning - method dispatchter. Function version 0.2.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/convert_RLum2Risoe.BINfileData.Rd0000644000176200001440000000410413604173244021475 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convert_RLum2Risoe.BINfileData.R \name{convert_RLum2Risoe.BINfileData} \alias{convert_RLum2Risoe.BINfileData} \title{Converts RLum.Analysis-objects and RLum.Data.Curve-objects to RLum2Risoe.BINfileData-objects} \usage{ convert_RLum2Risoe.BINfileData(object, keep.position.number = FALSE) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} or \linkS4class{RLum.Data.Curve} (\strong{required}): input object to be converted} \item{keep.position.number}{\link{logical} (with default): keeps the original position number or re-calculate the numbers to avoid doubling} } \value{ The function returns a \linkS4class{Risoe.BINfileData} object. } \description{ The functions converts \linkS4class{RLum.Analysis} and \linkS4class{RLum.Data.Curve} objects and a \link{list} of those to \linkS4class{Risoe.BINfileData} objects. The funtion intends to provide a minimum of compatiblility between both formats. The created \linkS4class{RLum.Analysis} object can be later exported to a BIN-file using the function \link{write_R2BIN}. } \note{ The conversion can be never perfect. The \code{RLum} objects may contain information which are not part of the \linkS4class{Risoe.BINfileData} definition. } \section{Function version}{ 0.1.3 } \examples{ ##simple conversion using the example dataset data(ExampleData.RLum.Analysis, envir = environment()) convert_RLum2Risoe.BINfileData(IRSAR.RF.Data) } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data.Curve}, \link{write_R2BIN} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. convert_RLum2Risoe.BINfileData(): Converts RLum.Analysis-objects and RLum.Data.Curve-objects to RLum2Risoe.BINfileData-objects. Function version 0.1.3. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{IO} Luminescence/man/write_R2BIN.Rd0000644000176200001440000000755513604173246015776 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/write_R2BIN.R \name{write_R2BIN} \alias{write_R2BIN} \title{Export Risoe.BINfileData into Risø BIN/BINX-file} \usage{ write_R2BIN( object, file, version, compatibility.mode = FALSE, txtProgressBar = TRUE ) } \arguments{ \item{object}{\linkS4class{Risoe.BINfileData} (\strong{required}): input object to be stored in a bin file.} \item{file}{\link{character} (\strong{required}): file name and path of the output file \itemize{ \item \verb{[WIN]}: \code{write_R2BIN(object, "C:/Desktop/test.bin")} \item \verb{[MAC/LINUX]}: \code{write_R2BIN("/User/test/Desktop/test.bin")} }} \item{version}{\link{character} (\emph{optional}): version number for the output file. If no value is provided the highest version number from the \linkS4class{Risoe.BINfileData} is taken automatically. \strong{Note:} This argument can be used to convert BIN-file versions.} \item{compatibility.mode}{\link{logical} (\emph{with default}): this option recalculates the position values if necessary and set the max. value to 48. The old position number is appended as comment (e.g., 'OP: 70). This option accounts for potential compatibility problems with the Analyst software. It further limits the maximum number of points per curve to 9,999. If a curve contains more data the curve data got binned using the smallest possible bin width.} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables or disables \link{txtProgressBar}.} } \value{ Write a binary file. } \description{ Exports a Risoe.BINfileData object in a *.bin or *.binx file that can be opened by the Analyst software or other Risoe software. } \details{ The structure of the exported binary data follows the data structure published in the Appendices of the Analyst manual p. 42. If \code{LTYPE}, \code{DTYPE} and \code{LIGHTSOURCE} are not of type \link{character}, no transformation into numeric values is done. } \note{ The function just roughly checks the data structures. The validity of the output data depends on the user. The validity of the file path is not further checked. BIN-file conversions using the argument \code{version} may be a lossy conversion, depending on the chosen input andoutput data (e.g., conversion from version 08 to 07 to 06 to 05 to 04 or 03). \strong{Warning} Although the coding was done carefully it seems that the BIN/BINX-files produced by Risø DA 15/20 TL/OSL readers slightly differ on the byte level. No obvious differences are observed in the METADATA, however, the BIN/BINX-file may not fully compatible, at least not similar to the once directly produced by the Risoe readers! ROI definitions (introduced in BIN-file version 8) are not supported! There are furthermore ignored by the function \link{read_BIN2R}. } \section{Function version}{ 0.5.1 } \examples{ \dontrun{ ##load exampled dataset data(ExampleData.BINfileData, envir = environment()) ##create temporary filepath ##(for usage replace by own path) temp_file <- temp_file <- tempfile(pattern = "output", fileext = ".bin") ##export to temporary file path write_R2BIN(CWOSL.SAR.Data, file = temp_file) } } \section{How to cite}{ Kreutzer, S., 2020. write_R2BIN(): Export Risoe.BINfileData into Risø BIN/BINX-file. Function version 0.5.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ DTU Nutech, 2016. The Squence Editor, Users Manual, February, 2016. \url{http://www.nutech.dtu.dk/english/products-and-services/radiation-instruments/tl_osl_reader/manuals} } \seealso{ \link{read_BIN2R}, \linkS4class{Risoe.BINfileData}, \link{writeBin} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{IO} Luminescence/man/merge_RLum.Rd0000644000176200001440000000466413604173244016002 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/merge_RLum.R \name{merge_RLum} \alias{merge_RLum} \title{General merge function for RLum S4 class objects} \usage{ merge_RLum(objects, ...) } \arguments{ \item{objects}{\link{list} of \linkS4class{RLum} (\strong{required}): list of S4 object of class \code{RLum}} \item{...}{further arguments that one might want to pass to the specific merge function} } \value{ Return is the same as input objects as provided in the list. } \description{ Function calls object-specific merge functions for RLum S4 class objects. } \details{ The function provides a generalised access point for merge specific \linkS4class{RLum} objects. Depending on the input object, the corresponding merge function will be selected. Allowed arguments can be found in the documentations of each merge function. Empty list elements (\code{NULL}) are automatically removed from the input \code{list}. \tabular{lll}{ \strong{object} \tab \tab \strong{corresponding merge function} \cr \linkS4class{RLum.Data.Curve} \tab : \tab \code{merge_RLum.Data.Curve} \cr \linkS4class{RLum.Analysis} \tab : \tab \code{merge_RLum.Analysis} \cr \linkS4class{RLum.Results} \tab : \tab \code{merge_RLum.Results} } } \note{ So far not for every \code{RLum} object a merging function exists. } \section{Function version}{ 0.1.2 } \examples{ ##Example based using data and from the calc_CentralDose() function ##load example data data(ExampleData.DeValues, envir = environment()) ##apply the central dose model 1st time temp1 <- calc_CentralDose(ExampleData.DeValues$CA1) ##apply the central dose model 2nd time temp2 <- calc_CentralDose(ExampleData.DeValues$CA1) ##merge the results and store them in a new object temp.merged <- get_RLum(merge_RLum(objects = list(temp1, temp2))) } \seealso{ \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Image}, \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. merge_RLum(): General merge function for RLum S4 class objects. Function version 0.1.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/plot_RLum.Data.Image.Rd0000644000176200001440000000677313604173245017556 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_RLum.Data.Image.R \name{plot_RLum.Data.Image} \alias{plot_RLum.Data.Image} \title{Plot function for an \code{RLum.Data.Image} S4 class object} \usage{ plot_RLum.Data.Image(object, par.local = TRUE, plot.type = "plot.raster", ...) } \arguments{ \item{object}{\linkS4class{RLum.Data.Image} (\strong{required}): S4 object of class \code{RLum.Data.Image}} \item{par.local}{\link{logical} (\emph{with default}): use local graphical parameters for plotting, e.g. the plot is shown in one column and one row. If \code{par.local = FALSE} global parameters are inherited.} \item{plot.type}{\link{character} (\emph{with default}): plot types. Supported types are \code{plot.raster}, \code{plotRGB} or \code{contour}} \item{...}{further arguments and graphical parameters that will be passed to the specific plot functions.} } \value{ Returns a plot. } \description{ The function provides a standardised plot output for image data of an \code{RLum.Data.Image}S4 class object, mainly using the plot functions provided by the \link{raster} package. } \details{ \strong{Details on the plot functions} Image is visualised as 2D plot usinng generic plot types provided by other packages. Supported plot types: \strong{\code{plot.type = "plot.raster"}} Uses the standard plot function for raster data from the package \link[raster:raster]{raster::raster}: \link[raster:plot]{raster::plot}. For each raster layer in a raster brick one plot is produced. Arguments that are passed through the function call: \code{main},\code{axes}, \code{xlab}, \code{ylab}, \code{xlim}, \code{ylim}, \code{col} \strong{\code{plot.type = "plotRGB"}} Uses the function \link[raster:plotRGB]{raster::plotRGB} from the \link[raster:raster]{raster::raster} package. Only one image plot is produced as all layers in a brick a combined. This plot type is useful to see whether any signal is recorded by the camera.\cr Arguments that are passed through the function call: \code{main},\code{axes}, \code{xlab}, \code{ylab}, \code{ext}, \code{interpolate}, \code{maxpixels}, \code{alpha}, \code{colNA}, \code{stretch} \strong{\code{plot.type = "contour"}} Uses the function contour plot function from the \link{raster} function (\link[raster:contour]{raster::contour}). For each raster layer one contour plot is produced. Arguments that are passed through the function call:\cr \code{main},\code{axes}, \code{xlab}, \code{ylab}, \code{xlim}, \code{ylim}, \code{col} } \note{ This function has been created to faciliate the plotting of image data imported by the function \link{read_SPE2R}. However, so far the function is not optimized to handle image data > ca. 200 MByte and thus plotting of such data is extremely slow. } \section{Function version}{ 0.1 } \examples{ ##load data data(ExampleData.RLum.Data.Image, envir = environment()) ##plot data plot_RLum.Data.Image(ExampleData.RLum.Data.Image) } \seealso{ \linkS4class{RLum.Data.Image}, \link{plot}, \link{plot_RLum}, \link[raster:raster]{raster::raster} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. plot_RLum.Data.Image(): Plot function for an RLum.Data.Image S4 class object. Function version 0.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{aplot} Luminescence/man/write_RLum2CSV.Rd0000644000176200001440000000713413604173246016470 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/write_RLum2CSV.R \name{write_RLum2CSV} \alias{write_RLum2CSV} \title{Export RLum-objects to CSV} \usage{ write_RLum2CSV( object, path = NULL, prefix = "", export = TRUE, compact = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{RLum} or a \link{list} of \code{RLum} objects (\strong{required}): objects to be written} \item{path}{\link{character} (\emph{optional}): character string naming folder for the output to be written. If nothing is provided \code{path} will be set to the working directory. \strong{Note:} this argument is ignored if the the argument \code{export} is set to \code{FALSE}.} \item{prefix}{\link{character} (\emph{with default}): optional prefix to name the files. This prefix is valid for all written files} \item{export}{\link{logical} (\emph{with default}): enable or disable the file export. If set to \code{FALSE} nothing is written to the file connection, but a list comprising objects of type \link{data.frame} and \link{matrix} is returned instead} \item{compact}{\link{logical} (\emph{with default}): if \code{TRUE} (the default) the output will be more simple but less comprehensive, means not all elements in the objects will be fully broken down. This is in particular useful for writing \code{RLum.Results} objects to CSV-files, such objects can be rather complex and not all information are needed in a CSV-file or can be meaningful translated to it.} \item{...}{further arguments that will be passed to the function \link[utils:write.table]{utils::write.table}. All arguments except the argument \code{file} are supported} } \value{ The function returns either a CSV-file (or many of them) or for the option \code{export == FALSE} a list comprising objects of type \link{data.frame} and \link{matrix} } \description{ This function exports \linkS4class{RLum}-objects to CSV-files using the R function \link[utils:write.table]{utils::write.table}. All \linkS4class{RLum}-objects are supported, but the export is lossy, i.e. the pure numerical values are exported only. Information that cannot be coerced to a \link{data.frame} or a \link{matrix} are discarded as well as metadata. } \details{ However, in combination with the implemented import functions, nearly every supported import data format can be exported to CSV-files, this gives a great deal of freedom in terms of compatibility with other tools. \strong{Input is a list of objects} If the input is a \link{list} of objects all explicit function arguments can be provided as \link{list}. } \section{Function version}{ 0.2.0 } \examples{ ##transform values to a list (and do not write) data(ExampleData.BINfileData, envir = environment()) object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data)[[1]] write_RLum2CSV(object, export = FALSE) \dontrun{ ##create temporary filepath ##(for usage replace by own path) temp_file <- tempfile(pattern = "output", fileext = ".csv") ##write CSV-file to working directory write_RLum2CSV(temp_file) } } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data}, \linkS4class{RLum.Results}, \link[utils:write.table]{utils::write.table} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. write_RLum2CSV(): Export RLum-objects to CSV. Function version 0.2.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{IO} Luminescence/man/use_DRAC.Rd0000644000176200001440000001067213604173245015326 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/use_DRAC.R \name{use_DRAC} \alias{use_DRAC} \title{Use DRAC to calculate dose rate data} \usage{ use_DRAC(file, name, print_references = TRUE, citation_style = "text", ...) } \arguments{ \item{file}{\link{character} (\strong{required}): spreadsheet to be passed to the DRAC website for calculation. Can also be a DRAC template object obtained from \code{template_DRAC()}.} \item{name}{\link{character} (\emph{with defautl}): Optional user name submitted to DRAC. If omitted, a random name will be generated} \item{print_references}{(\emph{with default}): Print all references used in the input data table to the console.} \item{citation_style}{(\emph{with default}): If \code{print_references = TRUE} this argument determines the output style of the used references. Valid options are \code{"Bibtex"}, \code{"citation"}, \code{"html"}, \code{"latex"} or \code{"R"}. Default is \code{"text"}.} \item{...}{Further arguments. \itemize{ \item \code{url} \link{character}: provide an alternative URL to DRAC \item \code{verbose} \link{logical}: show or hide console output }} } \value{ Returns an \linkS4class{RLum.Results} object containing the following elements: \item{DRAC}{\link{list}: a named list containing the following elements in slot \verb{@data}: \tabular{lll}{ \verb{$highlights} \tab \link{data.frame} \tab summary of 25 most important input/output fields \cr \verb{$header} \tab \link{character} \tab HTTP header from the DRAC server response \cr \verb{$labels} \tab \link{data.frame} \tab descriptive headers of all input/output fields \cr \verb{$content} \tab \link{data.frame} \tab complete DRAC input/output table \cr \verb{$input} \tab \link{data.frame} \tab DRAC input table \cr \verb{$output} \tab \link{data.frame} \tab DRAC output table \cr \code{references}\tab \link{list} \tab A list of \link{bibentry}s of used references \cr } } \item{data}{\link{character} or \link{list} path to the input spreadsheet or a DRAC template} \item{call}{\link{call} the function call} \item{args}{\link{list} used arguments} The output should be accessed using the function \link{get_RLum}. } \description{ The function provides an interface from R to DRAC. An R-object or a pre-formatted XLS/XLSX file is passed to the DRAC website and the results are re-imported into R. } \section{Function version}{ 0.1.3 } \examples{ ## (1) Method using the DRAC spreadsheet file <- "/PATH/TO/DRAC_Input_Template.csv" # send the actual IO template spreadsheet to DRAC \dontrun{ use_DRAC(file = file) } ## (2) Method using an R template object # Create a template input <- template_DRAC(preset = "DRAC-example_quartz") # Fill the template with values input$`Project ID` <- "DRAC-Example" input$`Sample ID` <- "Quartz" input$`Conversion factors` <- "AdamiecAitken1998" input$`External U (ppm)` <- 3.4 input$`errExternal U (ppm)` <- 0.51 input$`External Th (ppm)` <- 14.47 input$`errExternal Th (ppm)` <- 1.69 input$`External K (\%)` <- 1.2 input$`errExternal K (\%)` <- 0.14 input$`Calculate external Rb from K conc?` <- "N" input$`Calculate internal Rb from K conc?` <- "N" input$`Scale gammadoserate at shallow depths?` <- "N" input$`Grain size min (microns)` <- 90 input$`Grain size max (microns)` <- 125 input$`Water content ((wet weight - dry weight)/dry weight) \%` <- 5 input$`errWater content \%` <- 2 input$`Depth (m)` <- 2.2 input$`errDepth (m)` <- 0.22 input$`Overburden density (g cm-3)` <- 1.8 input$`errOverburden density (g cm-3)` <- 0.1 input$`Latitude (decimal degrees)` <- 30.0000 input$`Longitude (decimal degrees)` <- 70.0000 input$`Altitude (m)` <- 150 input$`De (Gy)` <- 20 input$`errDe (Gy)` <- 0.2 # use DRAC \dontrun{ output <- use_DRAC(input) } } \section{How to cite}{ Kreutzer, S., Dietze, M., Burow, C., 2020. use_DRAC(): Use DRAC to calculate dose rate data. Function version 0.1.3. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Durcan, J.A., King, G.E., Duller, G.A.T., 2015. DRAC: Dose Rate and Age Calculator for trapped charge dating. Quaternary Geochronology 28, 54-61. doi:10.1016/j.quageo.2015.03.012 } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr Michael Dietze, GFZ Potsdam (Germany)\cr Christoph Burow, University of Cologne (Germany) , RLum Developer Team} Luminescence/man/plot_RLum.Results.Rd0000644000176200001440000000405713604173245017316 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_RLum.Results.R \name{plot_RLum.Results} \alias{plot_RLum.Results} \title{Plot function for an RLum.Results S4 class object} \usage{ plot_RLum.Results(object, single = TRUE, ...) } \arguments{ \item{object}{\linkS4class{RLum.Results} (\strong{required}): S4 object of class \code{RLum.Results}} \item{single}{\link{logical} (\emph{with default}): single plot output (\code{TRUE/FALSE}) to allow for plotting the results in as few plot windows as possible.} \item{...}{further arguments and graphical parameters will be passed to the \code{plot} function.} } \value{ Returns multiple plots. } \description{ The function provides a standardised plot output for data of an RLum.Results S4 class object } \details{ The function produces a multiple plot output. A file output is recommended (e.g., \link{pdf}). } \note{ Not all arguments available for \link{plot} will be passed! Only plotting of \code{RLum.Results} objects are supported. } \section{Function version}{ 0.2.1 } \examples{ ###load data data(ExampleData.DeValues, envir = environment()) # apply the un-logged minimum age model mam <- calc_MinDose(data = ExampleData.DeValues$CA1, sigmab = 0.2, log = TRUE, plot = FALSE) ##plot plot_RLum.Results(mam) # estimate the number of grains on an aliquot grains<- calc_AliquotSize(grain.size = c(100,150), sample.diameter = 1, plot = FALSE, MC.iter = 100) ##plot plot_RLum.Results(grains) } \seealso{ \link{plot}, \link{plot_RLum} } \author{ Christoph Burow, University of Cologne (Germany) \cr Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Burow, C., Kreutzer, S., 2020. plot_RLum.Results(): Plot function for an RLum.Results S4 class object. Function version 0.2.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{aplot} Luminescence/man/plot_KDE.Rd0000644000176200001440000001576113604173245015406 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_KDE.R \name{plot_KDE} \alias{plot_KDE} \title{Plot kernel density estimate with statistics} \usage{ plot_KDE( data, na.rm = TRUE, values.cumulative = TRUE, order = TRUE, boxplot = TRUE, rug = TRUE, summary, summary.pos, summary.method = "MCM", bw = "nrd0", output = TRUE, ... ) } \arguments{ \item{data}{\link{data.frame} or \linkS4class{RLum.Results} object (\strong{required}): for \code{data.frame}: two columns: De (\code{values[,1]}) and De error (\code{values[,2]}). For plotting multiple data sets, these must be provided as \code{list} (e.g. \code{list(dataset1, dataset2)}).} \item{na.rm}{\link{logical} (\emph{with default}): exclude NA values from the data set prior to any further operation.} \item{values.cumulative}{\link{logical} (\emph{with default}): show cumulative individual data.} \item{order}{\link{logical}: Order data in ascending order.} \item{boxplot}{\link{logical} (\emph{with default}): optionally show a boxplot (depicting median as thick central line, first and third quartile as box limits, whiskers denoting +/- 1.5 interquartile ranges and dots further outliers).} \item{rug}{\link{logical} (\emph{with default}): optionally add rug.} \item{summary}{\link{character} (\emph{optional}): add statistic measures of centrality and dispersion to the plot. Can be one or more of several keywords. See details for available keywords.} \item{summary.pos}{\link{numeric} or \link{character} (\emph{with default}): optional position coordinates or keyword (e.g. \code{"topright"}) for the statistical summary. Alternatively, the keyword \code{"sub"} may be specified to place the summary below the plot header. However, this latter option in only possible if \code{mtext} is not used. In case of coordinate specification, y-coordinate refers to the right y-axis.} \item{summary.method}{\link{character} (\emph{with default}): keyword indicating the method used to calculate the statistic summary. One out of \code{"unweighted"}, \code{"weighted"} and \code{"MCM"}. See \link{calc_Statistics} for details.} \item{bw}{\link{character} (\emph{with default}): bin-width, chose a numeric value for manual setting.} \item{output}{\link{logical}: Optional output of numerical plot parameters. These can be useful to reproduce similar plots. Default is \code{TRUE}.} \item{...}{further arguments and graphical parameters passed to \link{plot}.} } \description{ Plot a kernel density estimate of measurement values in combination with the actual values and associated error bars in ascending order. If enabled, the boxplot will show the usual distribution parameters (median as bold line, box delimited by the first and third quartile, whiskers defined by the extremes and outliers shown as points) and also the mean and standard deviation as pale bold line and pale polygon, respectively. } \details{ The function allows passing several plot arguments, such as \code{main}, \code{xlab}, \code{cex}. However, as the figure is an overlay of two separate plots, \code{ylim} must be specified in the order: c(ymin_axis1, ymax_axis1, ymin_axis2, ymax_axis2) when using the cumulative values plot option. See examples for some further explanations. For details on the calculation of the bin-width (parameter \code{bw}) see \link{density}. A statistic summary, i.e. a collection of statistic measures of centrality and dispersion (and further measures) can be added by specifying one or more of the following keywords: \itemize{ \item \code{"n"} (number of samples) \item \code{"mean"} (mean De value) \item \code{"median"} (median of the De values) \item \code{"sd.rel"} (relative standard deviation in percent) \item \code{"sd.abs"} (absolute standard deviation) \item \code{"se.rel"} (relative standard error) \item \code{"se.abs"} (absolute standard error) \item \code{"in.2s"} (percent of samples in 2-sigma range) \item \code{"kurtosis"} (kurtosis) \item \code{"skewness"} (skewness) } \strong{Note} that the input data for the statistic summary is sent to the function \code{calc_Statistics()} depending on the log-option for the z-scale. If \code{"log.z = TRUE"}, the summary is based on the logarithms of the input data. If \code{"log.z = FALSE"} the linearly scaled data is used. \strong{Note} as well, that \code{"calc_Statistics()"} calculates these statistic measures in three different ways: \code{unweighted}, \code{weighted} and \code{MCM-based} (i.e., based on Monte Carlo Methods). By default, the MCM-based version is used. If you wish to use another method, indicate this with the appropriate keyword using the argument \code{summary.method}. } \note{ The plot output is no 'probability density' plot (cf. the discussion of Berger and Galbraith in Ancient TL; see references)! } \section{Function version}{ 3.6.0 } \examples{ ## read example data set data(ExampleData.DeValues, envir = environment()) ExampleData.DeValues <- Second2Gray(ExampleData.DeValues$BT998, c(0.0438,0.0019)) ## create plot straightforward plot_KDE(data = ExampleData.DeValues) ## create plot with logarithmic x-axis plot_KDE(data = ExampleData.DeValues, log = "x") ## create plot with user-defined labels and axes limits plot_KDE(data = ExampleData.DeValues, main = "Dose distribution", xlab = "Dose (s)", ylab = c("KDE estimate", "Cumulative dose value"), xlim = c(100, 250), ylim = c(0, 0.08, 0, 30)) ## create plot with boxplot option plot_KDE(data = ExampleData.DeValues, boxplot = TRUE) ## create plot with statistical summary below header plot_KDE(data = ExampleData.DeValues, summary = c("n", "median", "skewness", "in.2s")) ## create plot with statistical summary as legend plot_KDE(data = ExampleData.DeValues, summary = c("n", "mean", "sd.rel", "se.abs"), summary.pos = "topleft") ## split data set into sub-groups, one is manipulated, and merge again data.1 <- ExampleData.DeValues[1:15,] data.2 <- ExampleData.DeValues[16:25,] * 1.3 data.3 <- list(data.1, data.2) ## create plot with two subsets straightforward plot_KDE(data = data.3) ## create plot with two subsets and summary legend at user coordinates plot_KDE(data = data.3, summary = c("n", "median", "skewness"), summary.pos = c(110, 0.07), col = c("blue", "orange")) ## example of how to use the numerical output of the function ## return plot output to draw a thicker KDE line KDE_out <- plot_KDE(data = ExampleData.DeValues, output = TRUE) } \seealso{ \link{density}, \link{plot} } \author{ Michael Dietze, GFZ Potsdam (Germany)\cr Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS-Université Bordeaux Montaigne, France , RLum Developer Team} \section{How to cite}{ Dietze, M., Kreutzer, S., 2020. plot_KDE(): Plot kernel density estimate with statistics. Function version 3.6.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } Luminescence/man/RLum-class.Rd0000644000176200001440000000463113604173245015721 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/RLum-class.R \docType{class} \name{RLum-class} \alias{RLum-class} \alias{replicate_RLum,RLum-method} \title{Class \code{"RLum"}} \usage{ \S4method{replicate_RLum}{RLum}(object, times = NULL) } \arguments{ \item{object}{\linkS4class{RLum} (\strong{required}): an object of class \linkS4class{RLum}} \item{times}{\link{integer} (\emph{optional}): number for times each element is repeated element} } \description{ Abstract class for data in the package Luminescence Sublasses are: } \details{ \strong{RLum-class}\cr |\cr |----\linkS4class{RLum.Data}\cr |----|-- \linkS4class{RLum.Data.Curve}\cr |----|-- \linkS4class{RLum.Data.Spectrum}\cr |----|-- \linkS4class{RLum.Data.Image}\cr |----\linkS4class{RLum.Analysis}\cr |----\linkS4class{RLum.Results} } \section{Methods (by generic)}{ \itemize{ \item \code{replicate_RLum}: Replication method RLum-objects }} \section{Slots}{ \describe{ \item{\code{originator}}{Object of class \link{character} containing the name of the producing function for the object. Set automatically by using the function \link{set_RLum}.} \item{\code{info}}{Object of class \link{list} for additional information on the object itself} \item{\code{.uid}}{Object of class \link{character} for a unique object identifier. This id is usually calculated using the internal function \code{create_UID()} if the funtion \link{set_RLum} is called.} \item{\code{.pid}}{Object of class \link{character} for a parent id. This allows nesting RLum-objects at will. The parent id can be the uid of another object.} }} \note{ \code{RLum} is a virtual class. } \section{Objects from the Class}{ A virtual Class: No objects can be created from it. } \section{Class version}{ 0.4.0 } \examples{ showClass("RLum") } \seealso{ \linkS4class{RLum.Data}, \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Data.Image}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results}, \link{methods_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. RLum-class(): Class 'RLum'. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{classes} Luminescence/man/analyse_portableOSL.Rd0000644000176200001440000000530313604173243017634 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_portableOSL.R \name{analyse_portableOSL} \alias{analyse_portableOSL} \title{Analyse portable CW-OSL measurements} \usage{ analyse_portableOSL( object, signal.integral, invert = FALSE, normalise = FALSE, plot = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} (\strong{required}): \code{RLum.Analysis} object produced by \link{read_PSL2R}.} \item{signal.integral}{\link{vector} (\strong{required}): A vector of two values specifying the lower and upper channel used to calculate the OSL/IRSL signal. Can be provided in form of \code{c(1, 5)} or \code{1:5}.} \item{invert}{\link{logical} (\emph{with default}): \code{TRUE} to calculate and plot the data in reverse order.} \item{normalise}{\link{logical} (\emph{with default}): \code{TRUE} to normalise the OSL/IRSL signals by the mean of all corresponding data curves.} \item{plot}{\link{logical} (\emph{with default}): enable/disable plot output} \item{...}{currently not used.} } \value{ Returns an S4 \linkS4class{RLum.Results} object. } \description{ The function analyses CW-OSL curve data produced by a SUERC portable OSL reader and produces a combined plot of OSL/IRSL signal intensities, OSL/IRSL depletion ratios and the IRSL/OSL ratio. } \details{ This function only works with \code{RLum.Analysis} objects produced by \link{read_PSL2R}. It further assumes (or rather requires) an equal amount of OSL and IRSL curves that are pairwise combined for calculating the IRSL/OSL ratio. For calculating the depletion ratios the cumulative signal of the last n channels (same number of channels as specified by \code{signal.integral}) is divided by cumulative signal of the first n channels (\code{signal.integral}). } \section{Function version}{ 0.0.3 } \examples{ # (1) load example data set data("ExampleData.portableOSL", envir = environment()) # (2) merge and plot all RLum.Analysis objects merged <- merge_RLum(ExampleData.portableOSL) plot_RLum(merged, combine = TRUE) merged # (3) analyse and plot results <- analyse_portableOSL(merged, signal.integral = 1:5, invert = FALSE, normalise = TRUE) get_RLum(results) } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data.Curve} } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} \section{How to cite}{ Burow, C., 2020. analyse_portableOSL(): Analyse portable CW-OSL measurements. Function version 0.0.3. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{datagen} \keyword{plot} Luminescence/man/analyse_Al2O3C_CrossTalk.Rd0000644000176200001440000000670413604173243020364 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_Al2O3C_CrossTalk.R \name{analyse_Al2O3C_CrossTalk} \alias{analyse_Al2O3C_CrossTalk} \title{Al2O3:C Reader Cross Talk Analysis} \usage{ analyse_Al2O3C_CrossTalk( object, signal_integral = NULL, dose_points = c(0, 4), recordType = c("OSL (UVVIS)"), irradiation_time_correction = NULL, method_control = NULL, plot = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} \strong{(required)}: measurement input} \item{signal_integral}{\link{numeric} (\emph{optional}): signal integral, used for the signal and the background. If nothing is provided the full range is used} \item{dose_points}{\link{numeric} (\emph{with default}): vector with dose points, if dose points are repeated, only the general pattern needs to be provided. Default values follow the suggestions made by Kreutzer et al., 2018} \item{recordType}{\link{character} (\emph{with default}): input curve selection, which is passed to function \link{get_RLum}. To deactivate the automatic selection set the argument to \code{NULL}} \item{irradiation_time_correction}{\link{numeric} or \linkS4class{RLum.Results} (\emph{optional}): information on the used irradiation time correction obained by another experiements.} \item{method_control}{\link{list} (\emph{optional}): optional parameters to control the calculation. See details for further explanations} \item{plot}{\link{logical} (\emph{with default}): enable/disable plot output} \item{...}{further arguments that can be passed to the plot output} } \value{ Function returns results numerically and graphically: -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \verb{$data} \tab \code{data.frame} \tab summed apparent dose table \cr \verb{$data_full} \tab \code{data.frame} \tab full apparent dose table \cr \verb{$fit} \tab \code{lm} \tab the linear model obtained from fitting \cr \verb{$col.seq} \tab \code{numeric} \tab the used colour vector \cr } \strong{slot:} \strong{\verb{@info}} The original function call ------------------------\cr \verb{[ PLOT OUTPUT ]}\cr ------------------------\cr \itemize{ \item An overview of the obtained apparent dose values } } \description{ The function provides the analysis of cross-talk measurements on a FI lexsyg SMART reader using Al2O3:C chips } \section{Function version}{ 0.1.2 } \examples{ ##load data data(ExampleData.Al2O3C, envir = environment()) ##run analysis analyse_Al2O3C_CrossTalk(data_CrossTalk) } \section{How to cite}{ Kreutzer, S., 2020. analyse_Al2O3C_CrossTalk(): Al2O3:C Reader Cross Talk Analysis. Function version 0.1.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., 2018. Environmental Dose Rate Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. Geochromometria 45, 56-67. doi: 10.1515/geochr-2015-0086 } \seealso{ \link{analyse_Al2O3C_ITC} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/ExampleData.ScaleGammaDose.Rd0000644000176200001440000000117013604173243020721 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.ScaleGammaDose} \alias{ExampleData.ScaleGammaDose} \title{Example data for scale_GammaDose()} \format{A \code{\link{data.frame}}. Please see \code{?scale_GammaDose()} for a detailed description of its structure.} \description{ An example data set for the function \code{scale_GammaDose()} containing layer specific information to scale the gamma dose rate considering variations in soil radioactivity. } \section{Version}{ 0.1 } \examples{ ## Load data data("ExampleData.ScaleGammaDose") } \keyword{datasets} Luminescence/man/plot_GrowthCurve.Rd0000644000176200001440000002653013604173245017256 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_GrowthCurve.R \name{plot_GrowthCurve} \alias{plot_GrowthCurve} \title{Fit and plot a growth curve for luminescence data (Lx/Tx against dose)} \usage{ plot_GrowthCurve( sample, na.rm = TRUE, mode = "interpolation", fit.method = "EXP", fit.force_through_origin = FALSE, fit.weights = TRUE, fit.includingRepeatedRegPoints = TRUE, fit.NumberRegPoints = NULL, fit.NumberRegPointsReal = NULL, fit.bounds = TRUE, NumberIterations.MC = 100, output.plot = TRUE, output.plotExtended = TRUE, output.plotExtended.single = FALSE, cex.global = 1, txtProgressBar = TRUE, verbose = TRUE, ... ) } \arguments{ \item{sample}{\link{data.frame} (\strong{required}): data frame with three columns for x=Dose,y=LxTx,z=LxTx.Error, y1=TnTx. The column for the test dose response is optional, but requires 'TnTx' as column name if used. For exponential fits at least three dose points (including the natural) should be provided.} \item{na.rm}{\link{logical} (\emph{with default}): excludes \code{NA} values from the data set prior to any further operations.} \item{mode}{\link{character} (\emph{with default}): selects calculation mode of the function. \itemize{ \item \code{"interpolation"} (default) calculates the De by interpolation, \item \code{"extrapolation"} calculates the De by extrapolation and \item \code{"alternate"} calculates no De and just fits the data points. } Please note that for option \code{"regenrative"} the first point is considered as natural dose} \item{fit.method}{\link{character} (\emph{with default}): function used for fitting. Possible options are: \itemize{ \item \code{LIN}, \item \code{QDR}, \item \code{EXP}, \item \verb{EXP OR LIN}, \item \code{EXP+LIN}, \item \code{EXP+EXP} or \item \code{GOK}. } See details.} \item{fit.force_through_origin}{\link{logical} (\emph{with default}) allow to force the fitted function through the origin. For \code{method = "EXP+EXP"} and \code{method = "GOK"} the function will go through the origin in either case, so this option will have no effect.} \item{fit.weights}{\link{logical} (\emph{with default}): option whether the fitting is done with or without weights. See details.} \item{fit.includingRepeatedRegPoints}{\link{logical} (\emph{with default}): includes repeated points for fitting (\code{TRUE}/\code{FALSE}).} \item{fit.NumberRegPoints}{\link{integer} (\emph{optional}): set number of regeneration points manually. By default the number of all (!) regeneration points is used automatically.} \item{fit.NumberRegPointsReal}{\link{integer} (\emph{optional}): if the number of regeneration points is provided manually, the value of the real, regeneration points = all points (repeated points) including reg 0, has to be inserted.} \item{fit.bounds}{\link{logical} (\emph{with default}): set lower fit bounds for all fitting parameters to 0. Limited for the use with the fit methods \code{EXP}, \code{EXP+LIN}, \verb{EXP OR LIN} and \code{GOK}. Argument to be inserted for experimental application only!} \item{NumberIterations.MC}{\link{integer} (\emph{with default}): number of Monte Carlo simulations for error estimation. See details.} \item{output.plot}{\link{logical} (\emph{with default}): plot output (\code{TRUE/FALSE}).} \item{output.plotExtended}{\link{logical} (\emph{with default}): If' \code{TRUE}, 3 plots on one plot area are provided: \enumerate{ \item growth curve, \item histogram from Monte Carlo error simulation and \item a test dose response plot. } If \code{FALSE}, just the growth curve will be plotted. \strong{Requires:} \code{output.plot = TRUE}.} \item{output.plotExtended.single}{\link{logical} (\emph{with default}): single plot output (\code{TRUE/FALSE}) to allow for plotting the results in single plot windows. Requires \code{output.plot = TRUE} and \code{output.plotExtended = TRUE}.} \item{cex.global}{\link{numeric} (\emph{with default}): global scaling factor.} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables or disables txtProgressBar. If \code{verbose = FALSE} also no txtProgressBar is shown.} \item{verbose}{\link{logical} (\emph{with default}): enables or disables terminal feedback.} \item{...}{Further arguments and graphical parameters to be passed. Note: Standard arguments will only be passed to the growth curve plot. Supported: \code{xlim}, \code{ylim}, \code{main}, \code{xlab}, \code{ylab}} } \value{ Along with a plot (so far wanted) an \code{RLum.Results} object is returned containing, the slot \code{data} contains the following elements: \tabular{lll}{ \strong{DATA.OBJECT} \tab \strong{TYPE} \tab \strong{DESCRIPTION} \cr \code{..$De} : \tab \code{data.frame} \tab Table with De values \cr \code{..$De.MC} : \tab \code{numeric} \tab Table with De values from MC runs \cr \code{..$Fit} : \tab \link{nls} or \link{lm} \tab object from the fitting for \code{EXP}, \code{EXP+LIN} and \code{EXP+EXP}. In case of a resulting linear fit when using \code{LIN}, \code{QDR} or \verb{EXP OR LIN} \cr \code{..$Formula} : \tab \link{expression} \tab Fitting formula as R expression \cr \code{..$call} : \tab \code{call} \tab The original function call\cr } } \description{ A dose response curve is produced for luminescence measurements using a regenerative or additive protocol. The function supports interpolation and extraxpolation to calculate the equivalent dose. } \details{ \strong{Fitting methods} For all options (except for the \code{LIN}, \code{QDR} and the \verb{EXP OR LIN}), the \link[minpack.lm:nlsLM]{minpack.lm::nlsLM} function with the \code{LM} (Levenberg-Marquardt algorithm) algorithm is used. Note: For historical reasons for the Monte Carlo simulations partly the function \link{nls} using the \code{port} algorithm. The solution is found by transforming the function or using \link{uniroot}. \code{LIN}: fits a linear function to the data using \link{lm}: \deqn{y = m*x+n} \code{QDR}: fits a linear function to the data using \link{lm}: \deqn{y = a + b * x + c * x^2} \code{EXP}: try to fit a function of the form \deqn{y = a*(1-exp(-(x+c)/b))} Parameters b and c are approximated by a linear fit using \link{lm}. Note: b = D0 \verb{EXP OR LIN}: works for some cases where an \code{EXP} fit fails. If the \code{EXP} fit fails, a \code{LIN} fit is done instead. \code{EXP+LIN}: tries to fit an exponential plus linear function of the form: \deqn{y = a*(1-exp(-(x+c)/b)+(g*x))} The De is calculated by iteration. \strong{Note:} In the context of luminescence dating, this function has no physical meaning. Therefore, no D0 value is returned. \code{EXP+EXP}: tries to fit a double exponential function of the form \deqn{y = (a1*(1-exp(-(x)/b1)))+(a2*(1-exp(-(x)/b2)))} This fitting procedure is not robust against wrong start parameters and should be further improved. \code{GOK}: tries to fit the general-order kinetics function after Guralnik et al. (2015) of the form of \deqn{y = a*(1-(1+(1/b)*x*c)^(-1/c))} where \strong{c > 0} is a kinetic order modifier (not to be confused with \strong{c} in \code{EXP} or \code{EXP+LIN}!). \strong{Fit weighting} If the option \code{fit.weights = TRUE} is chosen, weights are calculated using provided signal errors (Lx/Tx error): \deqn{fit.weights = 1/error/(sum(1/error))} \strong{Error estimation using Monte Carlo simulation} Error estimation is done using a Monte Carlo (MC) simulation approach. A set of Lx/Tx values is constructed by randomly drawing curve data from samled from normal distributions. The normal distribution is defined by the input values (mean = value, sd = value.error). Then, a growth curve fit is attempted for each dataset resulting in a new distribution of single De values. The \link{sd} of this distribution is becomes then the error of the De. With increasing iterations, the error value becomes more stable. \strong{Note:} It may take some calculation time with increasing MC runs, especially for the composed functions (\code{EXP+LIN} and \code{EXP+EXP}).\cr Each error estimation is done with the function of the chosen fitting method. \strong{Subtitle information} To avoid plotting the subtitle information, provide an empty user mtext \code{mtext = ""}. To plot any other subtitle text, use \code{mtext}. } \section{Function version}{ 1.10.10 } \examples{ ##(1) plot growth curve for a dummy data.set and show De value data(ExampleData.LxTxData, envir = environment()) temp <- plot_GrowthCurve(LxTxData) get_RLum(temp) ##(1b) horizontal plot arrangement layout(mat = matrix(c(1,1,2,3), ncol = 2)) plot_GrowthCurve(LxTxData, output.plotExtended.single = TRUE) ##(1c) to access the fitting value try get_RLum(temp, data.object = "Fit") ##(2) plot the growth curve only - uncomment to use ##pdf(file = "~/Desktop/Growth_Curve_Dummy.pdf", paper = "special") plot_GrowthCurve(LxTxData) ##dev.off() ##(3) plot growth curve with pdf output - uncomment to use, single output ##pdf(file = "~/Desktop/Growth_Curve_Dummy.pdf", paper = "special") plot_GrowthCurve(LxTxData, output.plotExtended.single = TRUE) ##dev.off() ##(4) plot resulting function for given intervall x x <- seq(1,10000, by = 100) plot( x = x, y = eval(temp$Formula), type = "l" ) ##(5) plot using the 'extrapolation' mode LxTxData[1,2:3] <- c(0.5, 0.001) print(plot_GrowthCurve(LxTxData,mode = "extrapolation")) ##(6) plot using the 'alternate' mode LxTxData[1,2:3] <- c(0.5, 0.001) print(plot_GrowthCurve(LxTxData,mode = "alternate")) ##(7) import and fit test data set by Berger & Huntley 1989 QNL84_2_unbleached <- read.table(system.file("extdata/QNL84_2_unbleached.txt", package = "Luminescence")) results <- plot_GrowthCurve( QNL84_2_unbleached, mode = "extrapolation", plot = FALSE, verbose = FALSE) #calculate confidence interval for the parameters #as alternative error estimation confint(results$Fit, level = 0.68) \dontrun{ QNL84_2_bleached <- read.table(system.file("extdata/QNL84_2_bleached.txt", package = "Luminescence")) STRB87_1_unbleached <- read.table(system.file("extdata/STRB87_1_unbleached.txt", package = "Luminescence")) STRB87_1_bleached <- read.table(system.file("extdata/STRB87_1_bleached.txt", package = "Luminescence")) print( plot_GrowthCurve( QNL84_2_bleached, mode = "alternate", plot = FALSE, verbose = FALSE)$Fit) print( plot_GrowthCurve( STRB87_1_unbleached, mode = "alternate", plot = FALSE, verbose = FALSE)$Fit) print( plot_GrowthCurve( STRB87_1_bleached, mode = "alternate", plot = FALSE, verbose = FALSE)$Fit) } } \section{How to cite}{ Kreutzer, S., Dietze, M., 2020. plot_GrowthCurve(): Fit and plot a growth curve for luminescence data (Lx/Tx against dose). Function version 1.10.10. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. Guralnik, B., Li, B., Jain, M., Chen, R., Paris, R.B., Murray, A.S., Li, S.-H., Pagonis, P., Herman, F., 2015. Radiation-induced growth and isothermal decay of infrared-stimulated luminescence from feldspar. Radiation Measurements 81, 224-231. } \seealso{ \link{nls}, \linkS4class{RLum.Results}, \link{get_RLum}, \link[minpack.lm:nlsLM]{minpack.lm::nlsLM}, \link{lm}, \link{uniroot} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France)\cr Michael Dietze, GFZ Potsdam (Germany) , RLum Developer Team} Luminescence/man/convert_Daybreak2CSV.Rd0000644000176200001440000000361313604173244017655 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convert_Daybreak2CSV.R \name{convert_Daybreak2CSV} \alias{convert_Daybreak2CSV} \title{Export measurement data produced by a Daybreak luminescence reader to CSV-files} \usage{ convert_Daybreak2CSV(file, ...) } \arguments{ \item{file}{\link{character} (\strong{required}): name of the Daybreak-file (TXT-file, DAT-file) to be converted to CSV-files} \item{...}{further arguments that will be passed to the function \link{read_Daybreak2R} and \link{write_RLum2CSV}} } \value{ The function returns either a CSV-file (or many of them) or for the option \code{export = FALSE} a list comprising objects of type \link{data.frame} and \link{matrix} } \description{ This function is a wrapper function around the functions \link{read_Daybreak2R} and \link{write_RLum2CSV} and it imports an Daybreak-file (TXT-file, DAT-file) and directly exports its content to CSV-files. If nothing is set for the argument \code{path} (\link{write_RLum2CSV}) the input folder will become the output folder. } \section{Function version}{ 0.1.0 } \examples{ \dontrun{ ##select your BIN-file file <- file.choose() ##convert convert_Daybreak2CSV(file) } } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data}, \linkS4class{RLum.Results}, \link[utils:write.table]{utils::write.table}, \link{write_RLum2CSV}, \link{read_Daybreak2R} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. convert_Daybreak2CSV(): Export measurement data produced by a Daybreak luminescence reader to CSV-files. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{IO} Luminescence/man/analyse_pIRIRSequence.Rd0000644000176200001440000001774013604173243020074 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_pIRIRSequence.R \name{analyse_pIRIRSequence} \alias{analyse_pIRIRSequence} \title{Analyse post-IR IRSL measurement sequences} \usage{ analyse_pIRIRSequence( object, signal.integral.min, signal.integral.max, background.integral.min, background.integral.max, dose.points = NULL, sequence.structure = c("TL", "IR50", "pIRIR225"), plot = TRUE, plot.single = FALSE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} or \link{list} of \linkS4class{RLum.Analysis} objects (\strong{required}): input object containing data for analysis. If a \link{list} is provided the functions tries to iteratre over the list.} \item{signal.integral.min}{\link{integer} (\strong{required}): lower bound of the signal integral. Provide this value as vector for different integration limits for the different IRSL curves.} \item{signal.integral.max}{\link{integer} (\strong{required}): upper bound of the signal integral. Provide this value as vector for different integration limits for the different IRSL curves.} \item{background.integral.min}{\link{integer} (\strong{required}): lower bound of the background integral. Provide this value as vector for different integration limits for the different IRSL curves.} \item{background.integral.max}{\link{integer} (\strong{required}): upper bound of the background integral. Provide this value as vector for different integration limits for the different IRSL curves.} \item{dose.points}{\link{numeric} (\emph{optional}): a numeric vector containing the dose points values. Using this argument overwrites dose point values in the signal curves.} \item{sequence.structure}{\link{vector} \link{character} (\emph{with default}): specifies the general sequence structure. Allowed values are \code{"TL"} and any \code{"IR"} combination (e.g., \code{"IR50"},\code{"pIRIR225"}). Additionally a parameter \code{"EXCLUDE"} is allowed to exclude curves from the analysis (Note: If a preheat without PMT measurement is used, i.e. preheat as none TL, remove the TL step.)} \item{plot}{\link{logical} (\emph{with default}): enables or disables plot output.} \item{plot.single}{\link{logical} (\emph{with default}): single plot output (\code{TRUE/FALSE}) to allow for plotting the results in single plot windows. Requires \code{plot = TRUE}.} \item{...}{further arguments that will be passed to the function \link{analyse_SAR.CWOSL} and \link{plot_GrowthCurve}. Furthermore, the arguments \code{main} (headers), \code{log} (IRSL curves), \code{cex} (control the size) and \code{mtext.outer} (additional text on the plot area) can be passed to influence the plotting. If the input is list, \code{main} can be passed as \link{vector} or \link{list}.} } \value{ Plots (\emph{optional}) and an \linkS4class{RLum.Results} object is returned containing the following elements: \tabular{lll}{ \strong{DATA.OBJECT} \tab \strong{TYPE} \tab \strong{DESCRIPTION} \cr \code{..$data} : \tab \code{data.frame} \tab Table with De values \cr \code{..$LnLxTnTx.table} : \tab \code{data.frame} \tab with the LnLxTnTx values \cr \code{..$rejection.criteria} : \tab \link{data.frame} \tab rejection criteria \cr \code{..$Formula} : \tab \link{list} \tab Function used for fitting of the dose response curve \cr \code{..$call} : \tab \link{call} \tab the original function call } The output should be accessed using the function \link{get_RLum}. } \description{ The function performs an analysis of post-IR IRSL sequences including curve fitting on \linkS4class{RLum.Analysis} objects. } \details{ To allow post-IR IRSL protocol (Thomsen et al., 2008) measurement analyses this function has been written as extended wrapper function for the function \link{analyse_SAR.CWOSL}, facilitating an entire sequence analysis in one run. With this, its functionality is strictly limited by the functionality of the function \link{analyse_SAR.CWOSL}. **Defining the sequence structure ** The argument \code{sequence.structure} expects a shortened pattern of your sequence structure and was mainly introduced to ease the use of the function. For example: If your measurement data contains the following curves: \code{TL}, \code{IRSL}, \code{IRSL}, \code{TL}, \code{IRSL}, \code{IRSL}, the sequence pattern in \code{sequence.structure} becomes \code{c('TL', 'IRSL', 'IRSL')}. The second part of your sequence for one cycle should be similar and can be discarded. If this is not the case (e.g., additional hotbleach) such curves have to be removed before using the function. \strong{If the input is a \code{list}} If the input is a list of RLum.Analysis-objects, every argument can be provided as list to allow for different sets of parameters for every single input element. For further information see \link{analyse_SAR.CWOSL}. } \note{ Best graphical output can be achieved by using the function \code{pdf} with the following options: \code{pdf(file = "", height = 15, width = 15)} } \section{Function version}{ 0.2.4 } \examples{ ### NOTE: For this example existing example data are used. These data are non pIRIR data. ### ##(1) Compile example data set based on existing example data (SAR quartz measurement) ##(a) Load example data data(ExampleData.BINfileData, envir = environment()) ##(b) Transform the values from the first position in a RLum.Analysis object object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) ##(c) Grep curves and exclude the last two (one TL and one IRSL) object <- get_RLum(object, record.id = c(-29,-30)) ##(d) Define new sequence structure and set new RLum.Analysis object sequence.structure <- c(1,2,2,3,4,4) sequence.structure <- as.vector(sapply(seq(0,length(object)-1,by = 4), function(x){sequence.structure + x})) object <- sapply(1:length(sequence.structure), function(x){ object[[sequence.structure[x]]] }) object <- set_RLum(class = "RLum.Analysis", records = object, protocol = "pIRIR") ##(2) Perform pIRIR analysis (for this example with quartz OSL data!) ## Note: output as single plots to avoid problems with this example results <- analyse_pIRIRSequence(object, signal.integral.min = 1, signal.integral.max = 2, background.integral.min = 900, background.integral.max = 1000, fit.method = "EXP", sequence.structure = c("TL", "pseudoIRSL1", "pseudoIRSL2"), main = "Pseudo pIRIR data set based on quartz OSL", plot.single = TRUE) ##(3) Perform pIRIR analysis (for this example with quartz OSL data!) ## Alternative for PDF output, uncomment and complete for usage \dontrun{ tempfile <- tempfile(fileext = ".pdf") pdf(file = tempfile, height = 15, width = 15) results <- analyse_pIRIRSequence(object, signal.integral.min = 1, signal.integral.max = 2, background.integral.min = 900, background.integral.max = 1000, fit.method = "EXP", main = "Pseudo pIRIR data set based on quartz OSL") dev.off() } } \section{How to cite}{ Kreutzer, S., 2020. analyse_pIRIRSequence(): Analyse post-IR IRSL measurement sequences. Function version 0.2.4. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Murray, A.S., Wintle, A.G., 2000. Luminescence dating of quartz using an improved single-aliquot regenerative-dose protocol. Radiation Measurements 32, 57-73. \doi{10.1016/S1350-4487(99)00253-X} Thomsen, K.J., Murray, A.S., Jain, M., Boetter-Jensen, L., 2008. Laboratory fading rates of various luminescence signals from feldspar-rich sediment extracts. Radiation Measurements 43, 1474-1486. \doi{10.1016/j.radmeas.2008.06.002} } \seealso{ \link{analyse_SAR.CWOSL}, \link{calc_OSLLxTxRatio}, \link{plot_GrowthCurve}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results} \link{get_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} \keyword{plot} Luminescence/man/ExampleData.RLum.Analysis.Rd0000644000176200001440000000247613604173243020567 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.RLum.Analysis} \alias{ExampleData.RLum.Analysis} \alias{IRSAR.RF.Data} \title{Example data as \linkS4class{RLum.Analysis} objects} \format{\code{IRSAR.RF.Data}: IRSAR.RF.Data on coarse grain feldspar Each object contains data needed for the given protocol analysis.} \source{ \strong{IRSAR.RF.Data} These data were kindly provided by Tobias Lauer and Matthias Krbetschek. \tabular{ll}{ Lab: \tab Luminescence Laboratory TU Bergakademie Freiberg\cr Lab-Code: \tab ZEU/SA1\cr Location: \tab Zeuchfeld (Zeuchfeld Sandur; Saxony-Anhalt/Germany)\cr Material: \tab K-feldspar (130-200 \eqn{\mu}m)\cr Reference: \tab Kreutzer et al. (2014) } } \description{ Collection of different \linkS4class{RLum.Analysis} objects for protocol analysis. } \section{Version}{ 0.1 } \examples{ ##load data data(ExampleData.RLum.Analysis, envir = environment()) ##plot data plot_RLum(IRSAR.RF.Data) } \references{ \strong{IRSAR.RF.Data} Kreutzer, S., Lauer, T., Meszner, S., Krbetschek, M.R., Faust, D., Fuchs, M., 2014. Chronology of the Quaternary profile Zeuchfeld in Saxony-Anhalt / Germany - a preliminary luminescence dating study. Zeitschrift fuer Geomorphologie 58, 5-26. doi: 10.1127/0372-8854/2012/S-00112 } \keyword{datasets} Luminescence/man/Analyse_SAR.OSLdata.Rd0000644000176200001440000001510013604173243017315 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Analyse_SAR.OSLdata.R \name{Analyse_SAR.OSLdata} \alias{Analyse_SAR.OSLdata} \title{Analyse SAR CW-OSL measurements.} \usage{ Analyse_SAR.OSLdata( input.data, signal.integral, background.integral, position, run, set, dtype, keep.SEL = FALSE, info.measurement = "unkown measurement", output.plot = FALSE, output.plot.single = FALSE, cex.global = 1, ... ) } \arguments{ \item{input.data}{\linkS4class{Risoe.BINfileData} (\strong{required}): input data from a Risoe BIN file, produced by the function \link{read_BIN2R}.} \item{signal.integral}{\link{vector} (\strong{required}): channels used for the signal integral, e.g. \code{signal.integral=c(1:2)}} \item{background.integral}{\link{vector} (\strong{required}): channels used for the background integral, e.g. \code{background.integral=c(85:100)}} \item{position}{\link{vector} (\emph{optional}): reader positions that want to be analysed (e.g. \code{position=c(1:48)}. Empty positions are automatically omitted. If no value is given all positions are analysed by default.} \item{run}{\link{vector} (\emph{optional}): range of runs used for the analysis. If no value is given the range of the runs in the sequence is deduced from the \code{Risoe.BINfileData} object.} \item{set}{\link{vector} (\emph{optional}): range of sets used for the analysis. If no value is given the range of the sets in the sequence is deduced from the \code{Risoe.BINfileData} object.} \item{dtype}{\link{character} (\emph{optional}): allows to further limit the curves by their data type (\code{DTYPE}), e.g., \code{dtype = c("Natural", "Dose")} limits the curves to this two data types. By default all values are allowed. See \linkS4class{Risoe.BINfileData} for allowed data types.} \item{keep.SEL}{\link{logical} (default): option allowing to use the \code{SEL} element of the \linkS4class{Risoe.BINfileData} manually. \strong{NOTE:} In this case any limitation provided by \code{run}, \code{set} and \code{dtype} are ignored!} \item{info.measurement}{\link{character} (\emph{with default}): option to provide information about the measurement on the plot output (e.g. name of the BIN or BINX file).} \item{output.plot}{\link{logical} (\emph{with default}): plot output (\code{TRUE/FALSE})} \item{output.plot.single}{\link{logical} (\emph{with default}): single plot output (\code{TRUE/FALSE}) to allow for plotting the results in single plot windows. Requires \code{output.plot = TRUE}.} \item{cex.global}{\link{numeric} (\emph{with default}): global scaling factor.} \item{...}{further arguments that will be passed to the function \link{calc_OSLLxTxRatio} (supported: \code{background.count.distribution}, \code{sigmab}, \code{sig0}; e.g., for instrumental error) and can be used to adjust the plot. Supported" \code{mtext}, \code{log}} } \value{ A plot (\emph{optional}) and \link{list} is returned containing the following elements: \item{LnLxTnTx}{\link{data.frame} of all calculated Lx/Tx values including signal, background counts and the dose points.} \item{RejectionCriteria}{\link{data.frame} with values that might by used as rejection criteria. NA is produced if no R0 dose point exists.} \item{SARParameters}{\link{data.frame} of additional measurement parameters obtained from the BIN file, e.g. preheat or read temperature (not valid for all types of measurements).} } \description{ The function analyses SAR CW-OSL curve data and provides a summary of the measured data for every position. The output of the function is optimised for SAR OSL measurements on quartz. } \details{ The function works only for standard SAR protocol measurements introduced by Murray and Wintle (2000) with CW-OSL curves. For the calculation of the Lx/Tx value the function \link{calc_OSLLxTxRatio} is used. \strong{Provided rejection criteria} \verb{[recyling ratio]}: calculated for every repeated regeneration dose point. \verb{[recuperation]}: recuperation rate calculated by comparing the Lx/Tx values of the zero regeneration point with the Ln/Tn value (the Lx/Tx ratio of the natural signal). For methodological background see Aitken and Smith (1988) \verb{[IRSL/BOSL]}: the integrated counts (\code{signal.integral}) of an IRSL curve are compared to the integrated counts of the first regenerated dose point. It is assumed that IRSL curves got the same dose as the first regenerated dose point. \strong{Note:} This is not the IR depletation ratio described by Duller (2003). } \note{ Rejection criteria are calculated but not considered during the analysis to discard values. \strong{The analysis of IRSL data is not directly supported}. You may want to consider using the functions \link{analyse_SAR.CWOSL} or \link{analyse_pIRIRSequence} instead. \strong{The development of this function will not be continued. We recommend to use the function \link{analyse_SAR.CWOSL} or instead.} } \section{Function version}{ 0.2.17 } \examples{ ##load data data(ExampleData.BINfileData, envir = environment()) ##analyse data output <- Analyse_SAR.OSLdata(input.data = CWOSL.SAR.Data, signal.integral = c(1:5), background.integral = c(900:1000), position = c(1:1), output.plot = TRUE) ##combine results relevant for further analysis output.SAR <- data.frame(Dose = output$LnLxTnTx[[1]]$Dose, LxTx = output$LnLxTnTx[[1]]$LxTx, LxTx.Error = output$LnLxTnTx[[1]]$LxTx.Error) output.SAR } \section{How to cite}{ Kreutzer, S., Fuchs, M.C., 2020. Analyse_SAR.OSLdata(): Analyse SAR CW-OSL measurements.. Function version 0.2.17. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Aitken, M.J. and Smith, B.W., 1988. Optical dating: recuperation after bleaching. Quaternary Science Reviews 7, 387-393. Duller, G., 2003. Distinguishing quartz and feldspar in single grain luminescence measurements. Radiation Measurements, 37 (2), 161-165. Murray, A.S. and Wintle, A.G., 2000. Luminescence dating of quartz using an improved single-aliquot regenerative-dose protocol. Radiation Measurements 32, 57-73. } \seealso{ \link{calc_OSLLxTxRatio}, \linkS4class{Risoe.BINfileData}, \link{read_BIN2R}, \link{plot_GrowthCurve} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr Margret C. Fuchs, HZDR, Freiberg (Germany) , RLum Developer Team} \keyword{datagen} \keyword{dplot} Luminescence/man/verify_SingleGrainData.Rd0000644000176200001440000001302313604173246020313 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/verify_SingleGrainData.R \name{verify_SingleGrainData} \alias{verify_SingleGrainData} \title{Verify single grain data sets and check for invalid grains, i.e. zero-light level grains} \usage{ verify_SingleGrainData( object, threshold = 10, cleanup = FALSE, cleanup_level = "aliquot", verbose = TRUE, plot = FALSE ) } \arguments{ \item{object}{\linkS4class{Risoe.BINfileData} or \linkS4class{RLum.Analysis} (\strong{required}): input object. The function also accepts a list with objects of allowed type.} \item{threshold}{\link{numeric} (\emph{with default}): numeric threshold value for the allowed difference between the \code{mean} and the \code{var} of the count values (see details)} \item{cleanup}{\link{logical} (\emph{with default}): if set to \code{TRUE} curves indentified as zero light level curves are automatically removed. Ouput is an object as same type as the input, i.e. either \linkS4class{Risoe.BINfileData} or \linkS4class{RLum.Analysis}} \item{cleanup_level}{\link{character} (\emph{with default}): selects the level for the cleanup of the input data sets. Two options are allowed: \code{"curve"} or \code{"aliquot"}: \itemize{ \item If \code{"curve"} is selected every single curve marked as \code{invalid} is removed. \item If \code{"aliquot"} is selected, curves of one aliquot (grain or disc) can be marked as invalid, but will not be removed. An aliquot will be only removed if all curves of this aliquot are marked as invalid. }} \item{verbose}{\link{logical} (\emph{with default}): enables or disables the terminal feedback} \item{plot}{\link{logical} (\emph{with default}): enables or disables the graphical feedback} } \value{ The function returns -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:****\verb{@data}} \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \verb{$unique_pairs} \tab \code{data.frame} \tab the unique position and grain pairs \cr \verb{$selection_id} \tab \code{numeric} \tab the selection as record ID \cr \verb{$selection_full} \tab \code{data.frame} \tab implemented models used in the baSAR-model core \cr } \strong{slot:****\verb{@info}} The original function call \strong{Output variation} For \code{cleanup = TRUE} the same object as the input is returned, but cleaned up (invalid curves were removed). This means: Either an \linkS4class{Risoe.BINfileData} or an \linkS4class{RLum.Analysis} object is returned in such cases. An \linkS4class{Risoe.BINfileData} object can be exported to a BIN-file by using the function \link{write_R2BIN}. } \description{ This function tries to identify automatically zero-light level curves (grains) from single grain data measurements. } \details{ \strong{How does the method work?} The function compares the expected values (\eqn{E(X)}) and the variance (\eqn{Var(X)}) of the count values for each curve. Assuming that the background roughly follows a poisson distribution the absolute difference of both values should be zero or at least around zero as \deqn{E(x) = Var(x) = \lambda} Thus the function checks for: \deqn{abs(E(x) - Var(x)) >= \Theta} With \eqn{\Theta} an arbitray, user defined, threshold. Values above the threshold indicating curves comprising a signal. Note: the absolute difference of \eqn{E(X)} and \eqn{Var(x)} instead of the ratio was chosen as both terms can become 0 which would result in 0 or \code{Inf}, if the ratio is calculated. } \note{ This function can work with \linkS4class{Risoe.BINfileData} objects or \linkS4class{RLum.Analysis} objects (or a list of it). However, the function is highly optimised for \linkS4class{Risoe.BINfileData} objects as it make sense to remove identify invalid grains before the conversion to an \linkS4class{RLum.Analysis} object. The function checking for invalid curves works rather robust and it is likely that Reg0 curves within a SAR cycle are removed as well. Therefore it is strongly recommended to use the argument \code{cleanup = TRUE} carefully. } \section{Function version}{ 0.2.1 } \examples{ ##01 - basic example I ##just show how to apply the function data(ExampleData.XSYG, envir = environment()) ##verify and get data.frame out of it verify_SingleGrainData(OSL.SARMeasurement$Sequence.Object)$selection_full ##02 - basic example II data(ExampleData.BINfileData, envir = environment()) id <- verify_SingleGrainData(object = CWOSL.SAR.Data, cleanup_level = "aliquot")$selection_id \dontrun{ ##03 - advanced example I ##importing and exporting a BIN-file ##select and import file file <- file.choose() object <- read_BIN2R(file) ##remove invalid aliquots(!) object <- verify_SingleGrainData(object, cleanup = TRUE) ##export to new BIN-file write_R2BIN(object, paste0(dirname(file),"/", basename(file), "_CLEANED.BIN")) } } \seealso{ \linkS4class{Risoe.BINfileData}, \linkS4class{RLum.Analysis}, \link{write_R2BIN}, \link{read_BIN2R} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. verify_SingleGrainData(): Verify single grain data sets and check for invalid grains, i.e. zero-light level grains. Function version 0.2.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{datagen} \keyword{manip} Luminescence/man/length_RLum.Rd0000644000176200001440000000267513604173244016164 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/length_RLum.R \name{length_RLum} \alias{length_RLum} \title{General accessor function for RLum S4 class objects} \usage{ length_RLum(object) } \arguments{ \item{object}{\linkS4class{RLum} (\strong{required}): S4 object of class \code{RLum}} } \value{ Return is the same as input objects as provided in the list. } \description{ Function calls object-specific get functions for RLum S4 class objects. } \details{ The function provides a generalised access point for specific \linkS4class{RLum} objects.\cr Depending on the input object, the corresponding get function will be selected. Allowed arguments can be found in the documentations of the corresponding \linkS4class{RLum} class. } \section{Function version}{ 0.1.0 } \seealso{ \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Image}, \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. length_RLum(): General accessor function for RLum S4 class objects. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/set_Risoe.BINfileData.Rd0000644000176200001440000000266713604173245017743 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/set_Risoe.BINfileData.R \name{set_Risoe.BINfileData} \alias{set_Risoe.BINfileData} \title{General accessor function for RLum S4 class objects} \usage{ set_Risoe.BINfileData( METADATA = data.frame(), DATA = list(), .RESERVED = list() ) } \arguments{ \item{METADATA}{x} \item{DATA}{x} \item{.RESERVED}{x} } \value{ Return is the same as input objects as provided in the list. } \description{ Function calls object-specific get functions for RisoeBINfileData S4 class objects. } \details{ The function provides a generalised access point for specific \linkS4class{Risoe.BINfileData} objects.\cr Depending on the input object, the corresponding get function will be selected. Allowed arguments can be found in the documentations of the corresponding \linkS4class{Risoe.BINfileData} class. } \section{Function version}{ 0.1 } \seealso{ \linkS4class{Risoe.BINfileData} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. set_Risoe.BINfileData(): General accessor function for RLum S4 class objects. Function version 0.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/ExampleData.Fading.Rd0000644000176200001440000000641013604173243017306 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.Fading} \alias{ExampleData.Fading} \title{Example data for feldspar fading measurements} \format{A \link{list} with two elements, each containing a further \link{list} of \link{data.frame}s containing the data on the fading and equivalent dose measurements: \describe{ \verb{$fading.data}: A named \link{list} of \link{data.frame}s, each having three named columns (\verb{LxTx, LxTx.error, timeSinceIrradiation}).\cr \code{..$IR50}: Fading data of the IR50 signal.\cr \code{..$IR100}: Fading data of the IR100 signal.\cr \code{..$IR150}: Fading data of the IR150 signal.\cr \code{..$IR225}: Fading data of the IR225 signal.\cr \verb{$equivalentDose.data}: A named of \link{data.frame}s, each having three named columns (\verb{dose, LxTx, LxTx.error}).\cr \code{..$IR50}: Equivalent dose measurement data of the IR50 signal.\cr \code{..$IR100}: Equivalent dose measurement data of the IR100 signal.\cr \code{..$IR150}: Equivalent dose measurement data of the IR150 signal.\cr \code{..$IR225}: Equivalent dose measurement data of the IR225 signal.\cr }} \source{ These data were kindly provided by Georgina E. King. Detailed information on the sample UNIL/NB123 can be found in the reference given below. The raw data can be found in the accompanying supplementary information. } \description{ Example data set for fading measurements of the IR50, IR100, IR150 and IR225 feldspar signals of sample UNIL/NB123. It further contains regular equivalent dose measurement data of the same sample, which can be used to apply a fading correction to. } \examples{ ## Load example data data("ExampleData.Fading", envir = environment()) ## Get fading measurement data of the IR50 signal IR50_fading <- ExampleData.Fading$fading.data$IR50 head(IR50_fading) ## Determine g-value and rho' for the IR50 signal IR50_fading.res <- analyse_FadingMeasurement(IR50_fading) ## Show g-value and rho' results gval <- get_RLum(IR50_fading.res) rhop <- get_RLum(IR50_fading.res, "rho_prime") gval rhop ## Get LxTx values of the IR50 DE measurement IR50_De.LxTx <- ExampleData.Fading$equivalentDose.data$IR50 ## Calculate the De of the IR50 signal IR50_De <- plot_GrowthCurve(IR50_De.LxTx, mode = "interpolation", fit.method = "EXP") ## Extract the calculated De and its error IR50_De.res <- get_RLum(IR50_De) De <- c(IR50_De.res$De, IR50_De.res$De.Error) ## Apply fading correction (age conversion greatly simplified) IR50_Age <- De / 7.00 IR50_Age.corr <- calc_FadingCorr(IR50_Age, g_value = IR50_fading.res) } \references{ King, G.E., Herman, F., Lambert, R., Valla, P.G., Guralnik, B., 2016. Multi-OSL-thermochronometry of feldspar. Quaternary Geochronology 33, 76-87. doi:10.1016/j.quageo.2016.01.004 \strong{Details} \tabular{ll}{ Lab: \tab University of Lausanne \cr Lab-Code: \tab UNIL/NB123 \cr Location: \tab Namche Barwa (eastern Himalaya)\cr Material: \tab Coarse grained (180-212 microns) potassium feldspar \cr Units: \tab Values are given in seconds \cr Lab Dose Rate: \tab Dose rate of the beta-source at measurement ca. 0.1335 +/- 0.004 Gy/s \cr Environmental Dose Rate: \tab 7.00 +/- 0.92 Gy/ka (includes internal dose rate) } } \keyword{datasets} Luminescence/man/report_RLum.Rd0000644000176200001440000001670713604173245016220 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/report_RLum.R \name{report_RLum} \alias{report_RLum} \title{Create a HTML-report for (RLum) objects} \usage{ report_RLum( object, file = tempfile(), title = "RLum.Report", compact = TRUE, timestamp = TRUE, show_report = TRUE, launch.browser = FALSE, css.file = NULL, quiet = TRUE, clean = TRUE, ... ) } \arguments{ \item{object}{(\strong{required}): The object to be reported on, preferably of any \code{RLum}-class.} \item{file}{\link{character} (\emph{with default}): A character string naming the output file. If no filename is provided a temporary file is created.} \item{title}{\link{character} (\emph{with default}): A character string specifying the title of the document.} \item{compact}{\link{logical} (\emph{with default}): When \code{TRUE} the following report components are hidden: \verb{@.pid}, \verb{@.uid}, \code{'Object structure'}, \code{'Session Info'} and only the first and last 5 rows of long matrices and data frames are shown. See details.} \item{timestamp}{\link{logical} (\emph{with default}): \code{TRUE} to add a timestamp to the filename (suffix).} \item{show_report}{\link{logical} (\emph{with default}): If set to \code{TRUE} the function tries to display the report output in the local viewer, e.g., within \emph{RStudio} after rendering.} \item{launch.browser}{\link{logical} (\emph{with default}): \code{TRUE} to open the HTML file in the system's default web browser after it has been rendered.} \item{css.file}{\link{character} (\emph{optional}): Path to a CSS file to change the default styling of the HTML document.} \item{quiet}{\link{logical} (\emph{with default}): \code{TRUE} to supress printing of the pandoc command line.} \item{clean}{\link{logical} (\emph{with default}): \code{TRUE} to clean intermediate files created during rendering.} \item{...}{further arguments passed to or from other methods and to control the document's structure (see details).} } \value{ Writes a HTML and .Rds file. } \description{ Create a HTML-report for (RLum) objects } \details{ This function creates a HTML-report for a given object, listing its complete structure and content. The object itself is saved as a serialised .Rds file. The report file serves both as a convenient way of browsing through objects with complex data structures as well as a mean of properly documenting and saving objects. The HTML report is created with \link[rmarkdown:render]{rmarkdown::render} and has the following structure: \tabular{ll}{ \strong{Section} \tab \strong{Description} \cr \code{Header} \tab A summary of general characteristics of the object \cr \verb{Object content} \tab A comprehensive list of the complete structure and content of the provided object. \cr \verb{Object structure} \tab Summary of the objects structure given as a table \cr \code{File} \tab Information on the saved RDS file \cr \verb{Session Info} \tab Captured output from \code{sessionInfo()} \cr \code{Plots} \tab (\emph{optional}) For \code{RLum-class} objects a variable number of plots \cr } The structure of the report can be controlled individually by providing one or more of the following arguments (all \code{logical}): \tabular{ll}{ \strong{Argument} \tab \strong{Description} \cr \code{header} \tab Hide or show general information on the object \cr \code{main} \tab Hide or show the object's content \cr \code{structure} \tab Hide or show object's structure \cr \code{rds} \tab Hide or show information on the saved RDS file \cr \code{session} \tab Hide or show the session info \cr \code{plot} \tab Hide or show the plots (depending on object) \cr } Note that these arguments have higher precedence than \code{compact}. Further options that can be provided via the \code{...} argument: \tabular{ll}{ \strong{Argument} \tab \strong{Description} \cr \code{short_table} \tab If \code{TRUE} only show the first and last 5 rows of lang tables. \cr \code{theme} \tab Specifies the Bootstrap theme to use for the report. Valid themes include "default", "cerulean", "journal", "flatly", "readable", "spacelab", "united", "cosmo", "lumen", "paper", "sandstone", "simplex", and "yeti". \cr \code{highlight} \tab Specifies the syntax highlighting style. Supported styles include "default", "tango", "pygments", "kate", "monochrome", "espresso", "zenburn", "haddock", and "textmate". \cr \code{css} \tab \code{TRUE} or \code{FALSE} to enable/disable custom CSS styling \cr } The following arguments can be used to customise the report via CSS (Cascading Style Sheets): \tabular{ll}{ \strong{Argument} \tab \strong{Description} \cr \code{font_family} \tab Define the font family of the HTML document (default: arial) \cr \code{headings_size} \tab Size of the \verb{

} to \verb{

} tags used to define HTML headings (default: 166\%). \cr \code{content_color} \tab Color of the object's content (default: #a72925). \cr } Note that these arguments must all be of class \link{character} and follow standard CSS syntax. For exhaustive CSS styling you can provide a custom CSS file for argument \code{css.file}. CSS styling can be turned of using \code{css = FALSE}. } \note{ This function requires the R packages 'rmarkdown', 'pander' and 'rstudioapi'. } \section{Function version}{ 0.1.1 } \examples{ \dontrun{ ## Example: RLum.Results ---- # load example data data("ExampleData.DeValues") # apply the MAM-3 age model and save results mam <- calc_MinDose(ExampleData.DeValues$CA1, sigmab = 0.2) # create the HTML report report_RLum(object = mam, file = "~/CA1_MAM.Rmd", timestamp = FALSE, title = "MAM-3 for sample CA1") # when creating a report the input file is automatically saved to a # .Rds file (see saveRDS()). mam_report <- readRDS("~/CA1_MAM.Rds") all.equal(mam, mam_report) ## Example: Temporary file & Viewer/Browser ---- # (a) # Specifying a filename is not necessarily required. If no filename is provided, # the report is rendered in a temporary file. If you use the RStudio IDE, the # temporary report is shown in the interactive Viewer pane. report_RLum(object = mam) # (b) # Additionally, you can view the HTML report in your system's default web browser. report_RLum(object = mam, launch.browser = TRUE) ## Example: RLum.Analysis ---- data("ExampleData.RLum.Analysis") # create the HTML report (note that specifying a file # extension is not necessary) report_RLum(object = IRSAR.RF.Data, file = "~/IRSAR_RF") ## Example: RLum.Data.Curve ---- data.curve <- get_RLum(IRSAR.RF.Data)[[1]] # create the HTML report report_RLum(object = data.curve, file = "~/Data_Curve") ## Example: Any other object ---- x <- list(x = 1:10, y = runif(10, -5, 5), z = data.frame(a = LETTERS[1:20], b = dnorm(0:9)), NA) report_RLum(object = x, file = "~/arbitray_list") } } \seealso{ \link[rmarkdown:render]{rmarkdown::render}, \link[pander:pander_return]{pander::pander_return}, \link[pander:openFileInOS]{pander::openFileInOS}, \link[rstudioapi:viewer]{rstudioapi::viewer}, \link{browseURL} } \author{ Christoph Burow, University of Cologne (Germany), Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) \cr , RLum Developer Team} \section{How to cite}{ Burow, C., Kreutzer, S., 2020. report_RLum(): Create a HTML-report for (RLum) objects. Function version 0.1.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } Luminescence/man/ExampleData.DeValues.Rd0000644000176200001440000000437713604173243017640 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.DeValues} \alias{ExampleData.DeValues} \title{Example De data sets for the package Luminescence} \format{A \link{list} with two elements, each containing a two column \link{data.frame}: \describe{ \verb{$BT998}: De and De error values for a fine grain quartz sample from a loess section in Rottewitz. \verb{$CA1}: Single grain De and De error values for a coarse grain quartz sample from a fluvial deposit in the rock shelter of Cueva Anton }} \description{ Equivalent dose (De) values measured for a fine grain quartz sample from a loess section in Rottewitz (Saxony/Germany) and for a coarse grain quartz sample from a fluvial deposit in the rock shelter of Cueva Anton (Murcia/Spain). } \examples{ ##(1) plot values as histogram data(ExampleData.DeValues, envir = environment()) plot_Histogram(ExampleData.DeValues$BT998, xlab = "De [s]") ##(2) plot values as histogram (with second to gray conversion) data(ExampleData.DeValues, envir = environment()) De.values <- Second2Gray(ExampleData.DeValues$BT998, dose.rate = c(0.0438, 0.0019)) plot_Histogram(De.values, xlab = "De [Gy]") } \references{ \strong{BT998} Unpublished data \strong{CA1} Burow, C., Kehl, M., Hilgers, A., Weniger, G.-C., Angelucci, D., Villaverde, V., Zapata, J. and Zilhao, J. (2015). Luminescence dating of fluvial deposits in the rock shelter of Cueva Anton, Spain. Geochronometria 52, 107-125. \strong{BT998} \tabular{ll}{ Lab: \tab Luminescence Laboratory Bayreuth\cr Lab-Code: \tab BT998\cr Location: \tab Rottewitz (Saxony/Germany)\cr Material: \tab Fine grain quartz measured on aluminum discs on a Risoe TL/OSL DA-15 reader\cr Units: \tab Values are given in seconds \cr Dose Rate: \tab Dose rate of the beta-source at measurement ca. 0.0438 Gy/s +/- 0.0019 Gy/s\cr Measurement Date: \tab 2012-01-27 } \strong{CA1} \tabular{ll}{ Lab: \tab Cologne Luminescence Laboratory (CLL)\cr Lab-Code: \tab C-L2941\cr Location: \tab Cueva Anton (Murcia/Spain)\cr Material: \tab Coarse grain quartz (200-250 microns) measured on single grain discs on a Risoe TL/OSL DA-20 reader\cr Units: \tab Values are given in Gray \cr Measurement Date: \tab 2012 } } \keyword{datasets} Luminescence/man/ExampleData.LxTxOSLData.Rd0000644000176200001440000000113613604173243020165 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.LxTxOSLData} \alias{ExampleData.LxTxOSLData} \alias{Lx.data} \alias{Tx.data} \title{Example Lx and Tx curve data from an artificial OSL measurement} \format{Two \code{\link{data.frame}}s containing time and count values.} \source{ Arbitrary OSL measurement. } \description{ Lx and Tx data of continous wave (CW-) OSL signal curves. } \examples{ ##load data data(ExampleData.LxTxOSLData, envir = environment()) ##plot data plot(Lx.data) plot(Tx.data) } \references{ unpublished data } Luminescence/man/BaseDataSet.ConversionFactors.Rd0000644000176200001440000000253613604173243021525 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{BaseDataSet.ConversionFactors} \alias{BaseDataSet.ConversionFactors} \title{Base data set of dose-rate conversion factors} \format{A \code{\link{list}} with three elements with dose-rate conversion factors sorted by article and radiation type (alpha, beta, gamma): \tabular{ll}{ \code{AdamiecAitken1998}: \tab Conversion factors from Tables 5 and 6 \cr \code{Guerinetal2011}: \tab Conversion factors from Tables 1, 2 and 3 \cr \code{Liritzisetal2013}: \tab Conversion factors from Tables 1, 2 and 3 \cr }} \source{ All gamma conversion factors were carefully read from the tables given in the references above. } \description{ Collection of published dose-rate conversion factors to convert concentrations of radioactive isotopes to dose rate values. } \section{Version}{ 0.1 } \examples{ ## Load data data("BaseDataSet.ConversionFactors") } \references{ Guerin, G., Mercier, N., Adamiec, G., 2011. Dose-rate conversion factors: update. Ancient TL, 29, 5-8. Adamiec, G., Aitken, M.J., 1998. Dose-rate conversion factors: update. Ancient TL 16, 37-46. Liritzis, I., Stamoulis, K., Papachristodoulou, C., Ioannides, K., 2013. A re-evaluation of radiation dose-rate conversion factors. Mediterranean Archaeology and Archaeometry 13, 1-15. } \keyword{datasets} Luminescence/man/ExampleData.LxTxData.Rd0000644000176200001440000000143313604173243017607 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.LxTxData} \alias{ExampleData.LxTxData} \alias{LxTxData} \title{Example Lx/Tx data from CW-OSL SAR measurement} \format{A \code{\link{data.frame}} with 4 columns (Dose, LxTx, LxTx.Error, TnTx).} \source{ \tabular{ll}{ Lab: \tab Luminescence Laboratory Bayreuth\cr Lab-Code: \tab BT607\cr Location: \tab Ostrau (Saxony-Anhalt/Germany)\cr Material: \tab Middle grain (38-63 \eqn{\mu}m) quartz measured on a Risoe TL/OSL DA-15 reader. } } \description{ LxTx data from a SAR measurement for the package Luminescence. } \examples{ ## plot Lx/Tx data vs dose [s] data(ExampleData.LxTxData, envir = environment()) plot(LxTxData$Dose,LxTxData$LxTx) } \references{ unpublished data } Luminescence/man/RLum.Data-class.Rd0000644000176200001440000000126613604173243016570 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/RLum.Data-class.R \docType{class} \name{RLum.Data-class} \alias{RLum.Data-class} \title{Class \code{"RLum.Data"}} \description{ Generalized virtual data class for luminescence data. } \note{ Just a virtual class. } \section{Objects from the Class}{ A virtual Class: No objects can be created from it. } \section{Class version}{ 0.2.1 } \examples{ showClass("RLum.Data") } \seealso{ \linkS4class{RLum}, \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Spectrum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{classes} \keyword{internal} Luminescence/man/plot_RadialPlot.Rd0000644000176200001440000002554713604173245017041 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_RadialPlot.R \name{plot_RadialPlot} \alias{plot_RadialPlot} \title{Function to create a Radial Plot} \usage{ plot_RadialPlot( data, na.rm = TRUE, log.z = TRUE, central.value, centrality = "mean.weighted", mtext, summary, summary.pos, legend, legend.pos, stats, rug = FALSE, plot.ratio, bar.col, y.ticks = TRUE, grid.col, line, line.col, line.label, output = FALSE, ... ) } \arguments{ \item{data}{\link{data.frame} or \linkS4class{RLum.Results} object (\strong{required}): for \code{data.frame} two columns: De (\code{data[,1]}) and De error (\code{data[,2]}). To plot several data sets in one plot, the data sets must be provided as \code{list}, e.g. \code{list(data.1, data.2)}.} \item{na.rm}{\link{logical} (\emph{with default}): excludes \code{NA} values from the data set prior to any further operations.} \item{log.z}{\link{logical} (\emph{with default}): Option to display the z-axis in logarithmic scale. Default is \code{TRUE}.} \item{central.value}{\link{numeric}: User-defined central value, primarily used for horizontal centering of the z-axis.} \item{centrality}{\link{character} or \link{numeric} (\emph{with default}): measure of centrality, used for automatically centering the plot and drawing the central line. Can either be one out of \itemize{ \item \code{"mean"}, \item \code{"median"}, \item \code{"mean.weighted"} and \item \code{"median.weighted"} or a \item numeric value used for the standardisation. }} \item{mtext}{\link{character}: additional text below the plot title.} \item{summary}{\link{character} (\emph{optional}): add statistic measures of centrality and dispersion to the plot. Can be one or more of several keywords. See details for available keywords.} \item{summary.pos}{\link{numeric} or \link{character} (\emph{with default}): optional position coordinates or keyword (e.g. \code{"topright"}) for the statistical summary. Alternatively, the keyword \code{"sub"} may be specified to place the summary below the plot header. However, this latter option is only possible if \code{mtext} is not used.} \item{legend}{\link{character} vector (\emph{optional}): legend content to be added to the plot.} \item{legend.pos}{\link{numeric} or \link{character} (with default): optional position coordinates or keyword (e.g. \code{"topright"}) for the legend to be plotted.} \item{stats}{\link{character}: additional labels of statistically important values in the plot. One or more out of the following: \itemize{ \item \code{"min"}, \item \code{"max"}, \item \code{"median"}. }} \item{rug}{\link{logical}: Option to add a rug to the z-scale, to indicate the location of individual values} \item{plot.ratio}{\link{numeric}: User-defined plot area ratio (i.e. curvature of the z-axis). If omitted, the default value (\code{4.5/5.5}) is used and modified automatically to optimise the z-axis curvature. The parameter should be decreased when data points are plotted outside the z-axis or when the z-axis gets too elliptic.} \item{bar.col}{\link{character} or \link{numeric} (\emph{with default}): colour of the bar showing the 2-sigma range around the central value. To disable the bar, use \code{"none"}. Default is \code{"grey"}.} \item{y.ticks}{\link{logical}: Option to hide y-axis labels. Useful for data with small scatter.} \item{grid.col}{\link{character} or \link{numeric} (\emph{with default}): colour of the grid lines (originating at \verb{[0,0]} and stretching to the z-scale). To disable grid lines, use \code{"none"}. Default is \code{"grey"}.} \item{line}{\link{numeric}: numeric values of the additional lines to be added.} \item{line.col}{\link{character} or \link{numeric}: colour of the additional lines.} \item{line.label}{\link{character}: labels for the additional lines.} \item{output}{\link{logical}: Optional output of numerical plot parameters. These can be useful to reproduce similar plots. Default is \code{FALSE}.} \item{...}{Further plot arguments to pass. \code{xlab} must be a vector of length 2, specifying the upper and lower x-axes labels.} } \value{ Returns a plot object. } \description{ A Galbraith's radial plot is produced on a logarithmic or a linear scale. } \details{ Details and the theoretical background of the radial plot are given in the cited literature. This function is based on an S script of Rex Galbraith. To reduce the manual adjustments, the function has been rewritten. Thanks to Rex Galbraith for useful comments on this function. \cr Plotting can be disabled by adding the argument \code{plot = "FALSE"}, e.g. to return only numeric plot output. Earlier versions of the Radial Plot in this package had the 2-sigma-bar drawn onto the z-axis. However, this might have caused misunderstanding in that the 2-sigma range may also refer to the z-scale, which it does not! Rather it applies only to the x-y-coordinate system (standardised error vs. precision). A spread in doses or ages must be drawn as lines originating at zero precision (x0) and zero standardised estimate (y0). Such a range may be drawn by adding lines to the radial plot ( \code{line}, \code{line.col}, \code{line.label}, cf. examples). A statistic summary, i.e. a collection of statistic measures of centrality and dispersion (and further measures) can be added by specifying one or more of the following keywords: \itemize{ \item \code{"n"} (number of samples), \item \code{"mean"} (mean De value), \item \code{"mean.weighted"} (error-weighted mean), \item \code{"median"} (median of the De values), \item \code{"sdrel"} (relative standard deviation in percent), \item \code{"sdrel.weighted"} (error-weighted relative standard deviation in percent), \item \code{"sdabs"} (absolute standard deviation), \item \code{"sdabs.weighted"} (error-weighted absolute standard deviation), \item \code{"serel"} (relative standard error), \item \code{"serel.weighted"} (error-weighted relative standard error), \item \code{"seabs"} (absolute standard error), \item \code{"seabs.weighted"} (error-weighted absolute standard error), \item \code{"in.2s"} (percent of samples in 2-sigma range), \item \code{"kurtosis"} (kurtosis) and \item \code{"skewness"} (skewness). } } \section{Function version}{ 0.5.5 } \examples{ ## load example data data(ExampleData.DeValues, envir = environment()) ExampleData.DeValues <- Second2Gray(ExampleData.DeValues$BT998, c(0.0438,0.0019)) ## plot the example data straightforward plot_RadialPlot(data = ExampleData.DeValues) ## now with linear z-scale plot_RadialPlot(data = ExampleData.DeValues, log.z = FALSE) ## now with output of the plot parameters plot1 <- plot_RadialPlot(data = ExampleData.DeValues, log.z = FALSE, output = TRUE) plot1 plot1$zlim ## now with adjusted z-scale limits plot_RadialPlot(data = ExampleData.DeValues, log.z = FALSE, zlim = c(100, 200)) ## now the two plots with serious but seasonally changing fun #plot_RadialPlot(data = data.3, fun = TRUE) ## now with user-defined central value, in log-scale again plot_RadialPlot(data = ExampleData.DeValues, central.value = 150) ## now with a rug, indicating individual De values at the z-scale plot_RadialPlot(data = ExampleData.DeValues, rug = TRUE) ## now with legend, colour, different points and smaller scale plot_RadialPlot(data = ExampleData.DeValues, legend.text = "Sample 1", col = "tomato4", bar.col = "peachpuff", pch = "R", cex = 0.8) ## now without 2-sigma bar, y-axis, grid lines and central value line plot_RadialPlot(data = ExampleData.DeValues, bar.col = "none", grid.col = "none", y.ticks = FALSE, lwd = 0) ## now with user-defined axes labels plot_RadialPlot(data = ExampleData.DeValues, xlab = c("Data error (\%)", "Data precision"), ylab = "Scatter", zlab = "Equivalent dose [Gy]") ## now with minimum, maximum and median value indicated plot_RadialPlot(data = ExampleData.DeValues, central.value = 150, stats = c("min", "max", "median")) ## now with a brief statistical summary plot_RadialPlot(data = ExampleData.DeValues, summary = c("n", "in.2s")) ## now with another statistical summary as subheader plot_RadialPlot(data = ExampleData.DeValues, summary = c("mean.weighted", "median"), summary.pos = "sub") ## now the data set is split into sub-groups, one is manipulated data.1 <- ExampleData.DeValues[1:15,] data.2 <- ExampleData.DeValues[16:25,] * 1.3 ## now a common dataset is created from the two subgroups data.3 <- list(data.1, data.2) ## now the two data sets are plotted in one plot plot_RadialPlot(data = data.3) ## now with some graphical modification plot_RadialPlot(data = data.3, col = c("darkblue", "darkgreen"), bar.col = c("lightblue", "lightgreen"), pch = c(2, 6), summary = c("n", "in.2s"), summary.pos = "sub", legend = c("Sample 1", "Sample 2")) } \section{How to cite}{ Dietze, M., Kreutzer, S., 2020. plot_RadialPlot(): Function to create a Radial Plot. Function version 0.5.5. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Galbraith, R.F., 1988. Graphical Display of Estimates Having Differing Standard Errors. Technometrics, 30 (3), 271-281. Galbraith, R.F., 1990. The radial plot: Graphical assessment of spread in ages. International Journal of Radiation Applications and Instrumentation. Part D. Nuclear Tracks and Radiation Measurements, 17 (3), 207-214. Galbraith, R. & Green, P., 1990. Estimating the component ages in a finite mixture. International Journal of Radiation Applications and Instrumentation. Part D. Nuclear Tracks and Radiation Measurements, 17 (3) 197-206. Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for mixed fission track ages. Nuclear Tracks And Radiation Measurements, 21 (4), 459-470. Galbraith, R.F., 1994. Some Applications of Radial Plots. Journal of the American Statistical Association, 89 (428), 1232-1242. Galbraith, R.F., 2010. On plotting OSL equivalent doses. Ancient TL, 28 (1), 1-10. Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error calculation and display in OSL dating: An overview and some recommendations. Quaternary Geochronology, 11, 1-27. } \seealso{ \link{plot}, \link{plot_KDE}, \link{plot_Histogram} } \author{ Michael Dietze, GFZ Potsdam (Germany)\cr Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr Based on a rewritten S script of Rex Galbraith, 2010 , RLum Developer Team} Luminescence/man/set_RLum.Rd0000644000176200001440000000517113604173245015471 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/set_RLum.R \name{set_RLum} \alias{set_RLum} \title{General set function for RLum S4 class objects} \usage{ set_RLum(class, originator, .uid = create_UID(), .pid = NA_character_, ...) } \arguments{ \item{class}{\linkS4class{RLum} (\strong{required}): name of the S4 class to create} \item{originator}{\link{character} (\emph{automatic}): contains the name of the calling function (the function that produces this object); can be set manually.} \item{.uid}{\link{character} (\emph{automatic}): sets an unique ID for this object using the internal C++ function \code{create_UID}.} \item{.pid}{\link{character} (\emph{with default}): option to provide a parent id for nesting at will.} \item{...}{further arguments that one might want to pass to the specific set method} } \value{ Returns an object of the specified class. } \description{ Function calls object-specific set functions for RLum S4 class objects. } \details{ The function provides a generalised access point for specific \linkS4class{RLum} objects.\cr Depending on the given class, the corresponding method to create an object from this class will be selected. Allowed additional arguments can be found in the documentations of the corresponding \linkS4class{RLum} class: \itemize{ \item \linkS4class{RLum.Data.Curve}, \item \linkS4class{RLum.Data.Image}, \item \linkS4class{RLum.Data.Spectrum}, \item \linkS4class{RLum.Analysis}, \item \linkS4class{RLum.Results} } } \section{Function version}{ 0.3.0 } \examples{ ##produce empty objects from each class set_RLum(class = "RLum.Data.Curve") set_RLum(class = "RLum.Data.Spectrum") set_RLum(class = "RLum.Data.Spectrum") set_RLum(class = "RLum.Analysis") set_RLum(class = "RLum.Results") ##produce a curve object with arbitrary curve values object <- set_RLum( class = "RLum.Data.Curve", curveType = "arbitrary", recordType = "OSL", data = matrix(c(1:100,exp(-c(1:100))),ncol = 2)) ##plot this curve object plot_RLum(object) } \seealso{ \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Image}, \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. set_RLum(): General set function for RLum S4 class objects. Function version 0.3.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/read_PSL2R.Rd0000644000176200001440000000603613604173245015575 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/read_PSL2R.R \name{read_PSL2R} \alias{read_PSL2R} \title{Import PSL files to R} \usage{ read_PSL2R( file, drop_bg = FALSE, as_decay_curve = TRUE, smooth = FALSE, merge = FALSE, ... ) } \arguments{ \item{file}{\link{character} (\strong{required}): path and file name of the PSL file. If input is a \code{vector} it should comprise only \code{character}s representing valid paths and PSL file names. Alternatively the input character can be just a directory (path). In this case the the function tries to detect and import all PSL files found in the directory.} \item{drop_bg}{\link{logical} (\emph{with default}): \code{TRUE} to automatically remove all non-OSL/IRSL curves.} \item{as_decay_curve}{\link{logical} (\emph{with default}): Portable OSL Reader curves are often given as cumulative light sum curves. Use \code{TRUE} (default) to convert the curves to the more usual decay form.} \item{smooth}{\link{logical} (\emph{with default}): \code{TRUE} to apply Tukey's Running Median Smoothing for OSL and IRSL decay curves. Smoothing is encouraged if you see random signal drops within the decay curves related to hardware errors.} \item{merge}{\link{logical} (\emph{with default}): \code{TRUE} to merge all \code{RLum.Analysis} objects. Only applicable if multiple files are imported.} \item{...}{currently not used.} } \value{ Returns an S4 \linkS4class{RLum.Analysis} object containing \linkS4class{RLum.Data.Curve} objects for each curve. } \description{ Imports PSL files produced by a SUERC portable OSL reader into R \strong{(BETA)}. } \details{ This function provides an import routine for the SUERC portable OSL Reader PSL format. PSL files are just plain text and can be viewed with any text editor. Due to the formatting of PSL files this import function relies heavily on regular expression to find and extract all relevant information. See \strong{note}. } \note{ Because this function relies heavily on regular expressions to parse PSL files it is currently only in beta status. If the routine fails to import a specific PSL file please report to \href{mailto:christoph.burow@gmx.net}{christoph.burow@gmx.net} so the function can be updated. } \section{Function version}{ 0.0.1 } \examples{ # (1) Import PSL file to R file <- system.file("extdata", "DorNie_0016.psl", package = "Luminescence") psl <- read_PSL2R(file, drop_bg = FALSE, as_decay_curve = TRUE, smooth = TRUE, merge = FALSE) print(str(psl, max.level = 3)) plot(psl, combine = TRUE) } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Curve} } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} \section{How to cite}{ Burow, C., 2020. read_PSL2R(): Import PSL files to R. Function version 0.0.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{IO} Luminescence/man/BaseDataSet.CosmicDoseRate.Rd0000644000176200001440000000600113604173243020711 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{BaseDataSet.CosmicDoseRate} \alias{BaseDataSet.CosmicDoseRate} \alias{values.cosmic.Softcomp} \alias{values.factor.Altitude} \alias{values.par.FJH} \title{Base data set for cosmic dose rate calculation} \format{\tabular{ll}{ \code{values.cosmic.Softcomp}: \tab data frame containing cosmic dose rates for shallow depths (< 167 g cm^-2) obtained using the "AGE" program by Rainer Gruen (cf. Gruen 2009). These data essentially reproduce the graph shown in Fig. 1 of Prescott & Hutton (1988). \cr \code{values.factor.Altitude}: \tab data frame containing altitude factors for adjusting geomagnetic field-change factors. Values were read from Fig. 1 in Prescott & Hutton (1994). \cr \code{values.par.FJH}: \tab data frame containing values for parameters F, J and H (read from Fig. 2 in Prescott & Hutton 1994) used in the expression \cr } \deqn{Dc = D0*(F+J*exp((altitude/1000)/H))}} \source{ The following data were carefully read from figures in mentioned sources and used for fitting procedures. The derived expressions are used in the function \code{calc_CosmicDoseRate}. \strong{values.cosmic.Softcomp} \tabular{ll}{ Program: \tab "AGE"\cr Reference: \tab Gruen (2009) \cr Fit: \tab Polynomials in the form of } For depths between 40-167 g cm^-2: \deqn{y = 2*10^-6*x^2-0.0008*x+0.2535} (For depths <40 g cm^-2) \deqn{y = -6*10^-8*x^3+2*10^-5*x^2-0.0025*x+0.2969} \strong{values.factor.Altitude} \tabular{ll}{ Reference: \tab Prescott & Hutton (1994) \cr Page: \tab 499 \cr Figure: \tab 1 \cr Fit: \tab 2-degree polynomial in the form of } \deqn{y = -0.026*x^2 + 0.6628*x + 1.0435} \strong{values.par.FJH} \tabular{ll}{ Reference: \tab Prescott & Hutton (1994) \cr Page: \tab 500 \cr Figure: \tab 2 \cr Fits: \tab 3-degree polynomials and linear fits } F (non-linear part, \eqn{\lambda} < 36.5 deg.): \deqn{y = -7*10^-7*x^3-8*10^-5*x^2-0.0009*x+0.3988} F (linear part, \eqn{\lambda} > 36.5 deg.): \deqn{y = -0.0001*x + 0.2347} J (non-linear part, \eqn{\lambda} < 34 deg.): \deqn{y = 5*10^-6*x^3-5*10^-5*x^2+0.0026*x+0.5177} J (linear part, \eqn{\lambda} > 34 deg.): \deqn{y = 0.0005*x + 0.7388} H (non-linear part, \eqn{\lambda} < 36 deg.): \deqn{y = -3*10^-6*x^3-5*10^-5*x^2-0.0031*x+4.398} H (linear part, \eqn{\lambda} > 36 deg.): \deqn{y = 0.0002*x + 4.0914} } \description{ Collection of data from various sources needed for cosmic dose rate calculation } \section{Version}{ 0.1 } \examples{ ##load data data(BaseDataSet.CosmicDoseRate) } \references{ Gruen, R., 2009. The "AGE" program for the calculation of luminescence age estimates. Ancient TL, 27, pp. 45-46. Prescott, J.R., Hutton, J.T., 1988. Cosmic ray and gamma ray dosimetry for TL and ESR. Nuclear Tracks and Radiation Measurements, 14, pp. 223-227. Prescott, J.R., Hutton, J.T., 1994. Cosmic ray contributions to dose rates for luminescence and ESR dating: large depths and long-term time variations. Radiation Measurements, 23, pp. 497-500. } \keyword{datasets} Luminescence/man/app_RLum.Rd0000644000176200001440000000216013604173244015450 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/app_RLum.R \name{app_RLum} \alias{app_RLum} \title{Run Luminescence shiny apps (wrapper)} \usage{ app_RLum(app = NULL, ...) } \arguments{ \item{app}{\link{character} (\strong{required}): name of the application to start. See details for a list of available apps.} \item{...}{further arguments passed \link[shiny:runApp]{shiny::runApp}} } \description{ Wrapper for the function \link[RLumShiny:app_RLum]{RLumShiny::app_RLum} from the package \link[RLumShiny:RLumShiny-package]{RLumShiny::RLumShiny-package}. For further details and examples please see the manual of this package. } \section{Function version}{ 0.1.1 } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} \section{How to cite}{ Burow, C., 2020. app_RLum(): Run Luminescence shiny apps (wrapper). Function version 0.1.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } Luminescence/man/calc_gSGC.Rd0000644000176200001440000000672613604173244015512 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_gSGC.R \name{calc_gSGC} \alias{calc_gSGC} \title{Calculate De value based on the gSGC by Li et al., 2015} \usage{ calc_gSGC( data, gSGC.type = "0-250", gSGC.parameters, n.MC = 100, verbose = TRUE, plot = TRUE, ... ) } \arguments{ \item{data}{\link{data.frame} (\strong{required}): input data of providing the following columns: \emph{'LnTn', 'LnTn.error', Lr1Tr1', 'Lr1Tr1.error', 'Dr1'} \strong{Note:} column names are not required. The function expect the input data in the given order} \item{gSGC.type}{\link{character} (\emph{with default}): define the function parameters that should be used for the iteration procedure: Li et al., 2015 (Table 2) presented function parameters for two dose ranges: \code{"0-450"} and \code{"0-250"}} \item{gSGC.parameters}{\link{list} (\emph{optional}): option to provide own function parameters used for fitting as named list. Nomenclature follows Li et al., 2015, i.e. \code{list(A,A.error,D0,D0.error,c,c.error,Y0,Y0.error,range)}, range requires a vector for the range the function is considered as valid, e.g. \code{range = c(0,250)}\cr Using this option overwrites the default parameter list of the gSGC, meaning the argument \code{gSGC.type} will be without effect} \item{n.MC}{\link{integer} (\emph{with default}): number of Monte Carlo simulation runs for error estimation, see details.} \item{verbose}{\link{logical}: enable or disable terminal output} \item{plot}{\link{logical}: enable or disable graphical feedback as plot} \item{...}{parameters will be passed to the plot output} } \value{ Returns an S4 object of type \linkS4class{RLum.Results}. \strong{\verb{@data}}\cr \verb{$ De.value} (\link{data.frame}) \cr \code{.. $ De} \cr \code{.. $ De.error} \cr \code{.. $ Eta} \cr \verb{$ De.MC} (\link{list}) contains the matricies from the error estimation.\cr \verb{$ uniroot} (\link{list}) contains the uniroot outputs of the De estimations\cr \strong{\verb{@info}}\cr `$ call`` (\link{call}) the original function call } \description{ Function returns De value and De value error using the global standardised growth curve (gSGC) assumption proposed by Li et al., 2015 for OSL dating of sedimentary quartz } \details{ The error of the De value is determined using a Monte Carlo simulation approach. Solving of the equation is realised using \link{uniroot}. Large values for \code{n.MC} will significantly increase the computation time. } \section{Function version}{ 0.1.1 } \examples{ results <- calc_gSGC(data = data.frame( LnTn = 2.361, LnTn.error = 0.087, Lr1Tr1 = 2.744, Lr1Tr1.error = 0.091, Dr1 = 34.4)) get_RLum(results, data.object = "De") } \section{How to cite}{ Kreutzer, S., 2020. calc_gSGC(): Calculate De value based on the gSGC by Li et al., 2015. Function version 0.1.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Li, B., Roberts, R.G., Jacobs, Z., Li, S.-H., 2015. Potential of establishing a 'global standardised growth curve' (gSGC) for optical dating of quartz from sediments. Quaternary Geochronology 27, 94-104. doi:10.1016/j.quageo.2015.02.011 } \seealso{ \linkS4class{RLum.Results}, \link{get_RLum}, \link{uniroot} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montagine (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/get_Layout.Rd0000644000176200001440000000414313604173244016050 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/get_Layout.R \name{get_Layout} \alias{get_Layout} \title{Collection of layout definitions} \usage{ get_Layout(layout) } \arguments{ \item{layout}{\link{character} or \link{list} object (\strong{required}): name of the layout definition to be returned. If name is provided the respective definition is returned. One of the following supported layout definitions is possible: \code{"default"}, \code{"journal.1"}, \code{"small"}, \code{"empty"}. User-specific layout definitions must be provided as a list object of predefined structure, see details.} } \value{ A list object with layout definitions for plot functions. } \description{ This helper function returns a list with layout definitions for homogeneous plotting. } \details{ The easiest way to create a user-specific layout definition is perhaps to create either an empty or a default layout object and fill/modify the definitions (\code{user.layout <- get_Layout(data = "empty")}). } \section{Function version}{ 0.1 } \examples{ ## read example data set data(ExampleData.DeValues, envir = environment()) ## show structure of the default layout definition layout.default <- get_Layout(layout = "default") str(layout.default) ## show colour definitions for Abanico plot, only layout.default$abanico$colour ## set Abanico plot title colour to orange layout.default$abanico$colour$main <- "orange" ## create Abanico plot with modofied layout definition plot_AbanicoPlot(data = ExampleData.DeValues, layout = layout.default) ## create Abanico plot with predefined layout "journal" plot_AbanicoPlot(data = ExampleData.DeValues, layout = "journal") } \author{ Michael Dietze, GFZ Potsdam (Germany) , RLum Developer Team} \section{How to cite}{ Dietze, M., 2020. get_Layout(): Collection of layout definitions. Function version 0.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } Luminescence/man/BaseDataSet.FractionalGammaDose.Rd0000644000176200001440000000156413604173243021716 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{BaseDataSet.FractionalGammaDose} \alias{BaseDataSet.FractionalGammaDose} \title{Base data set of fractional gamma-dose values} \format{A \code{\link{list}} with fractional gamma dose-rate values sorted by article: \tabular{ll}{ \code{Aitken1985}: \tab Fractional gamma-dose values from table H.1 }} \source{ Fractional gamma dose values were carefully read from the tables given in the references above. } \description{ Collection of (un-)published fractional gamma dose-rate values to scale the gamma-dose rate considering layer-to-layer variations in soil radioactivity. } \section{Version}{ 0.1 } \examples{ ## Load data data("BaseDataSet.FractionalGammaDose") } \references{ Aitken, M.J., 1985. Thermoluminescence Dating. Academic Press, London. } \keyword{datasets} Luminescence/man/convert_Wavelength2Energy.Rd0000644000176200001440000001146513604173244021041 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convert_Wavelength2Energy.R \name{convert_Wavelength2Energy} \alias{convert_Wavelength2Energy} \title{Emission Spectra Conversion from Wavelength to Energy Scales} \usage{ convert_Wavelength2Energy(object, digits = 3L, order = FALSE) } \arguments{ \item{object}{\linkS4class{RLum.Data.Spectrum}, \link{data.frame}, \link{matrix} (\strong{required}): input object to be converted. If the input is not an \linkS4class{RLum.Data.Spectrum}, the first column is always treated as the wavelength column. The function supports a list of allowed input objects.} \item{digits}{\link{integer} (\emph{with default}): set the number of digits on the returned energy axis} \item{order}{\link{logical} (\emph{with default}): enables/disables sorting of the values in ascending energy order. After the conversion the longest wavelength has the lowest energy value and the shortest wavelength the highest. While this is correct, some R functions expect increasing x-values.} } \value{ The same object class as provided as input is returned. } \description{ The function provides a convenient and fast way to convert emission spectra wavelength to energy scales. The function works on \linkS4class{RLum.Data.Spectrum}, \link{data.frame} and \link{matrix} and a \link{list} of such objects. The function was written to smooth the workflow while analysing emission spectra data. This is in particular useful if you want to further treat your data and apply, e.g., a signal deconvolution. } \details{ The intensity of the spectrum is re-calcualted using the following approach to recalulate wavelength and corresponding intensity values (e.g., Appendix 4 in Blasse and Grabmeier, 1994; Mooney and Kambhampati, 2013): \deqn{\phi_{E} = \phi_{\lambda} * \lambda^2 / (hc)} with \eqn{\phi_{E}} the intensity per interval of energy \eqn{E} (1/eV), \eqn{\phi_{\lambda}} the intensity per interval of wavelength \eqn{\lambda} (1/nm) and \eqn{h} (eV * s) the Planck constant and \eqn{c} (nm/s) the velocity of light. For transforming the wavelength axis (x-values) the equation as follow is used \deqn{E = hc/\lambda} } \note{ This conversion works solely for emission spectra. In case of absorption spectra only the x-axsis has to be converted. } \section{Function version}{ 0.1.1 } \examples{ ##=====================## ##(1) Literature example after Mooney et al. (2013) ##(1.1) create matrix m <- matrix( data = c(seq(400, 800, 50), rep(1, 9)), ncol = 2) ##(1.2) set plot function to reproduce the ##literature figure p <- function(m) { plot(x = m[, 1], y = m[, 2]) polygon( x = c(m[, 1], rev(m[, 1])), y = c(m[, 2], rep(0, nrow(m)))) for (i in 1:nrow(m)) { lines(x = rep(m[i, 1], 2), y = c(0, m[i, 2])) } } ##(1.3) plot curves par(mfrow = c(1,2)) p(m) p(convert_Wavelength2Energy(m)) ##=====================## ##(2) Another example using density curves ##create dataset xy <- density( c(rnorm(n = 100, mean = 500, sd = 20), rnorm(n = 100, mean = 800, sd = 20))) xy <- data.frame(xy$x, xy$y) ##plot par(mfrow = c(1,2)) plot( xy, type = "l", xlim = c(150, 1000), xlab = "Wavelength [nm]", ylab = "Luminescence [a.u.]" ) plot( convert_Wavelength2Energy(xy), xy$y, type = "l", xlim = c(1.23, 8.3), xlab = "Energy [eV]", ylab = "Luminescence [a.u.]" ) } \section{How to cite}{ Kreutzer, S., 2020. convert_Wavelength2Energy(): Emission Spectra Conversion from Wavelength to Energy Scales. Function version 0.1.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Blasse, G., Grabmaier, B.C., 1994. Luminescent Materials. Springer. Mooney, J., Kambhampati, P., 2013. Get the Basics Right: Jacobian Conversion of Wavelength and Energy Scales for Quantitative Analysis of Emission Spectra. J. Phys. Chem. Lett. 4, 3316–3318. \doi{10.1021/jz401508t} Mooney, J., Kambhampati, P., 2013. Correction to “Get the Basics Right: Jacobian Conversion of Wavelength and Energy Scales for Quantitative Analysis of Emission Spectra.” J. Phys. Chem. Lett. 4, 3316–3318. \doi{10.1021/jz401508t} \strong{Further reading} Angulo, G., Grampp, G., Rosspeintner, A., 2006. Recalling the appropriate representation of electronic spectra. Spectrochimica Acta Part A: Molecular and Biomolecular Spectroscopy 65, 727–731. \doi{10.1016/j.saa.2006.01.007} Wang, Y., Townsend, P.D., 2013. Potential problems in collection and data processing of luminescence signals. Journal of Luminescence 142, 202–211. \doi{10.1016/j.jlumin.2013.03.052} } \seealso{ \linkS4class{RLum.Data.Spectrum}, \link{plot_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{IO} Luminescence/man/ExampleData.BINfileData.Rd0000644000176200001440000000376413604173243020171 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \docType{data} \name{ExampleData.BINfileData} \alias{ExampleData.BINfileData} \alias{CWOSL.SAR.Data} \alias{TL.SAR.Data} \title{Example data from a SAR OSL and SAR TL measurement for the package Luminescence} \format{\code{CWOSL.SAR.Data}: SAR OSL measurement data \code{TL.SAR.Data}: SAR TL measurement data Each class object contains two slots: (a) \code{METADATA} is a \link{data.frame} with all metadata stored in the BIN file of the measurements and (b) \code{DATA} contains a list of vectors of the measured data (usually count values).} \source{ \strong{CWOSL.SAR.Data} \tabular{ll}{ Lab: \tab Luminescence Laboratory Bayreuth \cr Lab-Code: \tab BT607 \cr Location: \tab Saxony/Germany \cr Material: \tab Middle grain quartz measured on aluminum cups on a Risoe TL/OSL DA-15 reader\cr Reference: \tab unpublished } \strong{TL.SAR.Data} \tabular{ll}{ Lab: \tab Luminescence Laboratory of Cologne\cr Lab-Code: \tab LP1_5\cr Location: \tab Spain\cr Material: \tab Flint \cr Setup: \tab Risoe TL/OSL DA-20 reader (Filter: Semrock Brightline, HC475/50, N2, unpolished steel discs) \cr Reference: \tab unpublished \cr Remarks: \tab dataset limited to one position } } \description{ Example data from a SAR OSL and TL measurement for package Luminescence directly extracted from a Risoe BIN-file and provided in an object of type \linkS4class{Risoe.BINfileData} } \note{ Please note that this example data cannot be exported to a BIN-file using the function \code{writeR2BIN} as it was generated and implemented in the package long time ago. In the meantime the BIN-file format changed. } \section{Version}{ 0.1 } \examples{ ## show first 5 elements of the METADATA and DATA elements in the terminal data(ExampleData.BINfileData, envir = environment()) CWOSL.SAR.Data@METADATA[1:5,] CWOSL.SAR.Data@DATA[1:5] } \references{ \strong{CWOSL.SAR.Data}: unpublished data \strong{TL.SAR.Data}: unpublished data } \keyword{datasets} Luminescence/man/apply_CosmicRayRemoval.Rd0000644000176200001440000001141313604173244020356 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/apply_CosmicRayRemoval.R \name{apply_CosmicRayRemoval} \alias{apply_CosmicRayRemoval} \title{Function to remove cosmic rays from an RLum.Data.Spectrum S4 class object} \usage{ apply_CosmicRayRemoval( object, method = "smooth", method.Pych.smoothing = 2, method.Pych.threshold_factor = 3, MARGIN = 2, verbose = FALSE, plot = FALSE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Data.Spectrum} or \linkS4class{RLum.Analysis} (\strong{required}): input object to be treated. This can be also provided as \link{list}. If an \linkS4class{RLum.Analysis} object is provided, only the \linkS4class{RLum.Data.Spectrum} objects are treated. Please note: this mixing of objects do not work for a list of \code{RLum.Data} objects.} \item{method}{\link{character} (\emph{with default}): Defines method that is applied for cosmic ray removal. Allowed methods are \code{smooth}, the default, (\link{smooth}), \code{smooth.spline} (\link{smooth.spline}) and \code{Pych}. See details for further information.} \item{method.Pych.smoothing}{\link{integer} (\emph{with default}): Smoothing parameter for cosmic ray removal according to Pych (2003). The value defines how many neighboring values in each frame are used for smoothing (e.g., \code{2} means that the two previous and two following values are used).} \item{method.Pych.threshold_factor}{\link{numeric} (\emph{with default}): Threshold for zero-bins in the histogram. Small values mean that more peaks are removed, but signal might be also affected by this removal.} \item{MARGIN}{\link{integer} (\emph{with default}): on which part the function cosmic ray removal should be applied on: \itemize{ \item 1 = along the time axis (line by line), \item 2 = along the wavelength axis (column by column). } \strong{Note:} This argument currently only affects the methods \code{smooth} and \code{smooth.spline}} \item{verbose}{\link{logical} (\emph{with default}): Option to suppress terminal output.,} \item{plot}{\link{logical} (\emph{with default}): If \code{TRUE} the histograms used for the cosmic-ray removal are returned as plot including the used threshold. Note: A separat plot is returned for each frame! Currently only for \code{method = "Pych"} a graphical output is provided.} \item{...}{further arguments and graphical parameters that will be passed to the \link{smooth} function.} } \value{ Returns same object as input. } \description{ The function provides several methods for cosmic-ray removal and spectrum smoothing \linkS4class{RLum.Data.Spectrum} objects and such objects embedded in \link{list} or \linkS4class{RLum.Analysis} objects. } \details{ \strong{\code{method = "Pych"}} This method applies the cosmic-ray removal algorithm described by Pych (2003). Some aspects that are different to the publication: \itemize{ \item For interpolation between neighbouring values the median and not the mean is used. \item The number of breaks to construct the histogram is set to: \code{length(number.of.input.values)/2} } For further details see references below. \strong{\code{method = "smooth"}} Method uses the function \link{smooth} to remove cosmic rays. Arguments that can be passed are: \code{kind}, \code{twiceit} \strong{\code{method = "smooth.spline"}} Method uses the function \link{smooth.spline} to remove cosmic rays. Arguments that can be passed are: \code{spar} \strong{How to combine methods?} Different methods can be combined by applying the method repeatedly to the dataset (see example). } \section{Function version}{ 0.3.0 } \examples{ ##(1) - use with your own data and combine (uncomment for usage) ## run two times the default method and smooth with another method ## your.spectrum <- apply_CosmicRayRemoval(your.spectrum, method = "Pych") ## your.spectrum <- apply_CosmicRayRemoval(your.spectrum, method = "Pych") ## your.spectrum <- apply_CosmicRayRemoval(your.spectrum, method = "smooth") } \section{How to cite}{ Kreutzer, S., 2020. apply_CosmicRayRemoval(): Function to remove cosmic rays from an RLum.Data.Spectrum S4 class object. Function version 0.3.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Pych, W., 2003. A Fast Algorithm for Cosmic-Ray Removal from Single Images. Astrophysics 116, 148-153. \url{http://arxiv.org/pdf/astro-ph/0311290.pdf?origin=publication_detail} } \seealso{ \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Analysis}, \link{smooth}, \link{smooth.spline}, \link{apply_CosmicRayRemoval} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{manip} Luminescence/man/analyse_baSAR.Rd0000644000176200001440000005223213604173243016401 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_baSAR.R \name{analyse_baSAR} \alias{analyse_baSAR} \title{Bayesian models (baSAR) applied on luminescence data} \usage{ analyse_baSAR( object, XLS_file = NULL, aliquot_range = NULL, source_doserate = NULL, signal.integral, signal.integral.Tx = NULL, background.integral, background.integral.Tx = NULL, irradiation_times = NULL, sigmab = 0, sig0 = 0.025, distribution = "cauchy", baSAR_model = NULL, n.MCMC = 1e+05, fit.method = "EXP", fit.force_through_origin = TRUE, fit.includingRepeatedRegPoints = TRUE, method_control = list(), digits = 3L, distribution_plot = "kde", plot = TRUE, plot_reduced = TRUE, plot.single = FALSE, verbose = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{Risoe.BINfileData}, \linkS4class{RLum.Results}, \link{list} of \linkS4class{RLum.Analysis}, \link{character} or \link{list} (\strong{required}): input object used for the Bayesian analysis. If a \code{character} is provided the function assumes a file connection and tries to import a BIN/BINX-file using the provided path. If a \code{list} is provided the list can only contain either \code{Risoe.BINfileData} objects or \code{character}s providing a file connection. Mixing of both types is not allowed. If an \linkS4class{RLum.Results} is provided the function directly starts with the Bayesian Analysis (see details)} \item{XLS_file}{\link{character} (\emph{optional}): XLS_file with data for the analysis. This file must contain 3 columns: the name of the file, the disc position and the grain position (the last being 0 for multi-grain measurements).\cr Alternatively a \code{data.frame} of similar structure can be provided.} \item{aliquot_range}{\link{numeric} (\emph{optional}): allows to limit the range of the aliquots used for the analysis. This argument has only an effect if the argument \code{XLS_file} is used or the input is the previous output (i.e. is \linkS4class{RLum.Results}). In this case the new selection will add the aliquots to the removed aliquots table.} \item{source_doserate}{\link{numeric} \strong{(required)}: source dose rate of beta-source used for the measuremnt and its uncertainty in Gy/s, e.g., \code{source_doserate = c(0.12, 0.04)}. Paramater can be provided as \code{list}, for the case that more than one BIN-file is provided, e.g., \code{source_doserate = list(c(0.04, 0.004), c(0.05, 0.004))}.} \item{signal.integral}{\link{vector} (\strong{required}): vector with the limits for the signal integral used for the calculation, e.g., \code{signal.integral = c(1:5)}. Ignored if \code{object} is an \linkS4class{RLum.Results} object. The parameter can be provided as \code{list}, see \code{source_doserate}.} \item{signal.integral.Tx}{\link{vector} (\emph{optional}): vector with the limits for the signal integral for the Tx curve. I f nothing is provided the value from \code{signal.integral} is used and it is ignored if \code{object} is an \linkS4class{RLum.Results} object. The parameter can be provided as \code{list}, see \code{source_doserate}.} \item{background.integral}{\link{vector} (\strong{required}): vector with the bounds for the background integral. Ignored if \code{object} is an \linkS4class{RLum.Results} object. The parameter can be provided as \code{list}, see \code{source_doserate}.} \item{background.integral.Tx}{\link{vector} (\emph{optional}): vector with the limits for the background integral for the Tx curve. If nothing is provided the value from \code{background.integral} is used. Ignored if \code{object} is an \linkS4class{RLum.Results} object. The parameter can be provided as \code{list}, see \code{source_doserate}.} \item{irradiation_times}{\link{numeric} (\emph{optional}): if set this vector replaces all irradiation times for one aliquot and one cycle (Lx and Tx curves) and recycles it for all others cycles and aliquots. Plesae note that if this argument is used, for every(!) single curve in the dataset an irradiation time needs to be set.} \item{sigmab}{\link{numeric} (\emph{with default}): option to set a manual value for the overdispersion (for LnTx and TnTx), used for the Lx/Tx error calculation. The value should be provided as absolute squared count values, cf. \link{calc_OSLLxTxRatio}. The parameter can be provided as \code{list}, see \code{source_doserate}.} \item{sig0}{\link{numeric} (\emph{with default}): allow adding an extra component of error to the final Lx/Tx error value (e.g., instrumental errror, see details is \link{calc_OSLLxTxRatio}). The parameter can be provided as \code{list}, see \code{source_doserate}.} \item{distribution}{\link{character} (\emph{with default}): type of distribution that is used during Bayesian calculations for determining the Central dose and overdispersion values. Allowed inputs are \code{"cauchy"}, \code{"normal"} and \code{"log_normal"}.} \item{baSAR_model}{\link{character} (\emph{optional}): option to provide an own modified or new model for the Bayesian calculation (see details). If an own model is provided the argument \code{distribution} is ignored and set to \code{'user_defined'}} \item{n.MCMC}{\link{integer} (\emph{with default}): number of iterations for the Markov chain Monte Carlo (MCMC) simulations} \item{fit.method}{\link{character} (\emph{with default}): equation used for the fitting of the dose-response curve using the function \link{plot_GrowthCurve} and then for the Bayesian modelling. Here supported methods: \code{EXP}, \code{EXP+LIN} and \code{LIN}} \item{fit.force_through_origin}{\link{logical} (\emph{with default}): force fitting through origin} \item{fit.includingRepeatedRegPoints}{\link{logical} (\emph{with default}): includes the recycling point (assumed to be measured during the last cycle)} \item{method_control}{\link{list} (\emph{optional}): named list of control parameters that can be directly passed to the Bayesian analysis, e.g., \code{method_control = list(n.chains = 4)}. See details for further information} \item{digits}{\link{integer} (\emph{with default}): round output to the number of given digits} \item{distribution_plot}{\link{character} (\emph{with default}): sets the final distribution plot that shows equivalent doses obtained using the frequentist approach and sets in the central dose as comparison obtained using baSAR. Allowed input is \code{'abanico'} or \code{'kde'}. If set to \code{NULL} nothing is plotted.} \item{plot}{\link{logical} (\emph{with default}): enables or disables plot output} \item{plot_reduced}{\link{logical} (\emph{with default}): enables or disables the advanced plot output} \item{plot.single}{\link{logical} (\emph{with default}): enables or disables single plots or plots arranged by \code{analyse_baSAR}} \item{verbose}{\link{logical} (\emph{with default}): enables or disables verbose mode} \item{...}{parameters that can be passed to the function \link{calc_OSLLxTxRatio} (almost full support), \link[readxl:read_excel]{readxl::read_excel} (full support), \link{read_BIN2R} (\code{n.records}, \code{position}, \code{duplicated.rm}), see details.} } \value{ Function returns results numerically and graphically: -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \verb{$summary} \tab \code{data.frame} \tab statistical summary, including the central dose \cr \verb{$mcmc} \tab \code{mcmc} \tab \link[coda:mcmc.list]{coda::mcmc.list} object including raw output \cr \verb{$models} \tab \code{character} \tab implemented models used in the baSAR-model core \cr \verb{$input_object} \tab \code{data.frame} \tab summarising table (same format as the XLS-file) including, e.g., Lx/Tx values\cr \verb{$removed_aliquots} \tab \code{data.frame} \tab table with removed aliquots (e.g., NaN, or Inf Lx/Tx values). If nothing was removed \code{NULL} is returned } \strong{slot:} \strong{\verb{@info}} The original function call ------------------------\cr \verb{[ PLOT OUTPUT ]}\cr ------------------------\cr \itemize{ \item (A) Ln/Tn curves with set integration limits, \item (B) trace plots are returned by the baSAR-model, showing the convergence of the parameters (trace) and the resulting kernel density plots. If \code{plot_reduced = FALSE} for every(!) dose a trace and a density plot is returned (this may take a long time), \item (C) dose plots showing the dose for every aliquot as boxplots and the marked HPD in within. If boxes are coloured 'orange' or 'red' the aliquot itself should be checked, \item (D) the dose response curve resulting from the monitoring of the Bayesian modelling are provided along with the Lx/Tx values and the HPD. Note: The amount for curves displayed is limited to 1000 (random choice) for performance reasons, \item (E) the final plot is the De distribution as calculated using the conventional (frequentist) approach and the central dose with the HPDs marked within. This figure is only provided for a comparison, no further statistical conclusion should be drawn from it. } \strong{Please note: If distribution was set to \code{log_normal} the central dose is given as geometric mean!} } \description{ This function allows the application of Bayesian models on luminescence data, measured with the single-aliquot regenerative-dose (SAR, Murray and Wintle, 2000) protocol. In particular, it follows the idea proposed by Combes et al., 2015 of using an hierarchical model for estimating a central equivalent dose from a set of luminescence measurements. This function is (I) the adaption of this approach for the R environment and (II) an extension and a technical refinement of the published code. } \details{ Internally the function consists of two parts: (I) The Bayesian core for the Bayesian calculations and applying the hierchical model and (II) a data pre-processing part. The Bayesian core can be run independently, if the input data are sufficient (see below). The data pre-processing part was implemented to simplify the analysis for the user as all needed data pre-processing is done by the function, i.e. in theory it is enough to provide a BIN/BINX-file with the SAR measurement data. For the Bayesian analysis for each aliquot the following information are needed from the SAR analysis. LxTx, the LxTx error and the dose values for all regeneration points. \strong{How the systematic error contribution is calculated?} Standard errors (so far) provided with the source dose rate are considered as systematic uncertainties and added to final central dose by: \deqn{systematic.error = 1/n \sum SE(source.doserate)} \deqn{SE(central.dose.final) = \sqrt{SE(central.dose)^2 + systematic.error^2}} Please note that this approach is rather rough and can only be valid if the source dose rate errors, in case different readers had been used, are similar. In cases where more than one source dose rate is provided a warning is given. \strong{Input / output scenarios} Various inputs are allowed for this function. Unfortunately this makes the function handling rather complex, but at the same time very powerful. Available scenarios: \strong{(1) - \code{object} is BIN-file or link to a BIN-file} Finally it does not matter how the information of the BIN/BINX file are provided. The function supports \strong{(a)} either a path to a file or directory or a \code{list} of file names or paths or \strong{(b)} a \linkS4class{Risoe.BINfileData} object or a list of these objects. The latter one can be produced by using the function \link{read_BIN2R}, but this function is called automatically if only a filename and/or a path is provided. In both cases it will become the data that can be used for the analysis. \verb{[XLS_file = NULL]} If no XLS file (or data frame with the same format) is provided the functions runs an automatic process that consists of the following steps: \enumerate{ \item Select all valid aliquots using the function \link{verify_SingleGrainData} \item Calculate Lx/Tx values using the function \link{calc_OSLLxTxRatio} \item Calculate De values using the function \link{plot_GrowthCurve} } These proceeded data are subsequently used in for the Bayesian analysis \verb{[XLS_file != NULL]} If an XLS-file is provided or a \code{data.frame} providing similar information the pre-processing steps consists of the following steps: \enumerate{ \item Calculate Lx/Tx values using the function \link{calc_OSLLxTxRatio} \item Calculate De values using the function \link{plot_GrowthCurve} } Means, the XLS file should contain a selection of the BIN-file names and the aliquots selected for the further analysis. This allows a manual selection of input data, as the automatic selection by \link{verify_SingleGrainData} might be not totally sufficient. \strong{(2) - \code{object} \verb{RLum.Results object}} If an \linkS4class{RLum.Results} object is provided as input and(!) this object was previously created by the function \code{analyse_baSAR()} itself, the pre-processing part is skipped and the function starts directly the Bayesian analysis. This option is very powerful as it allows to change parameters for the Bayesian analysis without the need to repeat the data pre-processing. If furthermore the argument \code{aliquot_range} is set, aliquots can be manually excluded based on previous runs. \strong{\code{method_control}} These are arguments that can be passed directly to the Bayesian calculation core, supported arguments are: \tabular{lll}{ \strong{Parameter} \tab \strong{Type} \tab \strong{Descritpion}\cr \code{lower_centralD} \tab \link{numeric} \tab sets the lower bound for the expected De range. Change it only if you know what you are doing!\cr \code{upper_centralD} \tab \link{numeric} \tab sets the upper bound for the expected De range. Change it only if you know what you are doing!\cr \code{n.chains} \tab \link{integer} \tab sets number of parallel chains for the model (default = 3) (cf. \link[rjags:jags.model]{rjags::jags.model})\cr \code{inits} \tab \link{list} \tab option to set initialisation values (cf. \link[rjags:jags.model]{rjags::jags.model}) \cr \code{thin} \tab \link{numeric} \tab thinning interval for monitoring the Bayesian process (cf. \link[rjags:jags.model]{rjags::jags.model})\cr \code{variable.names} \tab \link{character} \tab set the variables to be monitored during the MCMC run, default: \code{'central_D'}, \code{'sigma_D'}, \code{'D'}, \code{'Q'}, \code{'a'}, \code{'b'}, \code{'c'}, \code{'g'}. Note: only variables present in the model can be monitored. } \strong{User defined models}\cr The function provides the option to modify and to define own models that can be used for the Bayesian calculation. In the case the user wants to modify a model, a new model can be piped into the funtion via the argument \code{baSAR_model} as \code{character}. The model has to be provided in the JAGS dialect of the BUGS language (cf. \link[rjags:jags.model]{rjags::jags.model}) and parameter names given with the pre-defined names have to be respected, otherwise the function will break. \strong{FAQ} Q: How can I set the seed for the random number generator (RNG)? A: Use the argument \code{method_control}, e.g., for three MCMC chains (as it is the default):\preformatted{method_control = list( inits = list( list(.RNG.name = "base::Wichmann-Hill", .RNG.seed = 1), list(.RNG.name = "base::Wichmann-Hill", .RNG.seed = 2), list(.RNG.name = "base::Wichmann-Hill", .RNG.seed = 3) )) } This sets a reproducible set for every chain separately.\cr Q: How can I modify the output plots? A: You can't, but you can use the function output to create own, modified plots. Q: Can I change the boundaries for the central_D? A: Yes, we made it possible, but we DO NOT recommend it, except you know what you are doing!\cr Example: \verb{method_control = list(lower_centralD = 10))} Q: The lines in the baSAR-model appear to be in a wrong logical order?\cr A: This is correct and allowed (cf. JAGS manual) \strong{Additional arguments support via the \code{...} argument} This list summarizes the additional arguments that can be passed to the internally used functions. \tabular{llll}{ \strong{Supported argument} \tab \strong{Corresponding function} \tab \strong{Default} \tab **Short description **\cr \code{threshold} \tab \link{verify_SingleGrainData} \tab \code{30} \tab change rejection threshold for curve selection \cr \code{sheet} \tab \link[readxl:read_excel]{readxl::read_excel} \tab \code{1} \tab select XLS-sheet for import\cr \code{col_names} \tab \link[readxl:read_excel]{readxl::read_excel} \tab \code{TRUE} \tab first row in XLS-file is header\cr \code{col_types} \tab \link[readxl:read_excel]{readxl::read_excel} \tab \code{NULL} \tab limit import to specific columns\cr \code{skip} \tab \link[readxl:read_excel]{readxl::read_excel} \tab \code{0} \tab number of rows to be skipped during import\cr \code{n.records} \tab \link{read_BIN2R} \tab \code{NULL} \tab limit records during BIN-file import\cr \code{duplicated.rm} \tab \link{read_BIN2R} \tab \code{TRUE} \tab remove duplicated records in the BIN-file\cr \code{pattern} \tab \link{read_BIN2R} \tab \code{TRUE} \tab select BIN-file by name pattern\cr \code{position} \tab \link{read_BIN2R} \tab \code{NULL} \tab limit import to a specific position\cr \code{background.count.distribution} \tab \link{calc_OSLLxTxRatio} \tab \code{"non-poisson"} \tab set assumed count distribution\cr \code{fit.weights} \tab \link{plot_GrowthCurve} \tab \code{TRUE} \tab enables / disables fit weights\cr \code{fit.bounds} \tab \link{plot_GrowthCurve} \tab \code{TRUE} \tab enables / disables fit bounds\cr \code{NumberIterations.MC} \tab \link{plot_GrowthCurve} \tab \code{100} \tab number of MC runs for error calculation\cr \code{output.plot} \tab \link{plot_GrowthCurve} \tab \code{TRUE} \tab enables / disables dose response curve plot\cr \code{output.plotExtended} \tab \link{plot_GrowthCurve} \tab \code{TRUE} \tab enables / disables extended dose response curve plot\cr } } \note{ \strong{If you provide more than one BIN-file}, it is \strong{strongly} recommanded to provide a \code{list} with the same number of elements for the following parameters: \code{source_doserate}, \code{signal.integral}, \code{signal.integral.Tx}, \code{background.integral}, \code{background.integral.Tx}, \code{sigmab}, \code{sig0}. Example for two BIN-files: \code{source_doserate = list(c(0.04, 0.006), c(0.05, 0.006))} \strong{The function is currently limited to work with standard Risoe BIN-files only!} } \section{Function version}{ 0.1.33 } \examples{ ##(1) load package test data set data(ExampleData.BINfileData, envir = environment()) ##(2) selecting relevant curves, and limit dataset CWOSL.SAR.Data <- subset( CWOSL.SAR.Data, subset = POSITION\%in\%c(1:3) & LTYPE == "OSL") \dontrun{ ##(3) run analysis ##please not that the here selected parameters are ##choosen for performance, not for reliability results <- analyse_baSAR( object = CWOSL.SAR.Data, source_doserate = c(0.04, 0.001), signal.integral = c(1:2), background.integral = c(80:100), fit.method = "LIN", plot = FALSE, n.MCMC = 200 ) print(results) ##XLS_file template ##copy and paste this the code below in the terminal ##you can further use the function write.csv() to export the example XLS_file <- structure( list( BIN_FILE = NA_character_, DISC = NA_real_, GRAIN = NA_real_), .Names = c("BIN_FILE", "DISC", "GRAIN"), class = "data.frame", row.names = 1L ) } } \section{How to cite}{ Mercier, N., Kreutzer, S., 2020. analyse_baSAR(): Bayesian models (baSAR) applied on luminescence data. Function version 0.1.33. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Combes, B., Philippe, A., Lanos, P., Mercier, N., Tribolo, C., Guerin, G., Guibert, P., Lahaye, C., 2015. A Bayesian central equivalent dose model for optically stimulated luminescence dating. Quaternary Geochronology 28, 62-70. doi:10.1016/j.quageo.2015.04.001 Mercier, N., Kreutzer, S., Christophe, C., Guerin, G., Guibert, P., Lahaye, C., Lanos, P., Philippe, A., Tribolo, C., 2016. Bayesian statistics in luminescence dating: The 'baSAR'-model and its implementation in the R package 'Luminescence'. Ancient TL 34, 14-21. \strong{Further reading} Gelman, A., Carlin, J.B., Stern, H.S., Dunson, D.B., Vehtari, A., Rubin, D.B., 2013. Bayesian Data Analysis, Third Edition. CRC Press. Murray, A.S., Wintle, A.G., 2000. Luminescence dating of quartz using an improved single-aliquot regenerative-dose protocol. Radiation Measurements 32, 57-73. doi:10.1016/S1350-4487(99)00253-X Plummer, M., 2017. JAGS Version 4.3.0 user manual. https://sourceforge.net/projects/mcmc-jags/files/Manuals/4.x/jags_user_manual.pdf/download } \seealso{ \link{read_BIN2R}, \link{calc_OSLLxTxRatio}, \link{plot_GrowthCurve}, \link[readxl:read_excel]{readxl::read_excel}, \link{verify_SingleGrainData}, \link[rjags:jags.model]{rjags::jags.model}, \link[rjags:coda.samples]{rjags::coda.samples}, \link{boxplot.default} } \author{ Norbert Mercier, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) \cr Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) \cr The underlying Bayesian model based on a contribution by Combes et al., 2015. , RLum Developer Team} \keyword{datagen} Luminescence/man/calc_Kars2008.Rd0000644000176200001440000001014613604173244016130 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_Kars2008.R \name{calc_Kars2008} \alias{calc_Kars2008} \title{Apply the Kars et al. (2008) model (deprecated)} \usage{ calc_Kars2008(fit.method = "EXP", ...) } \arguments{ \item{fit.method}{\link{character} (\emph{with default}): Fit function of the dose response curve. Can either be \code{EXP} (the default) or \code{GOK}. Note that \code{EXP} (single saturating exponential) is the original function the model after Huntley (2006) and Kars et al. (2008) was designed to use. The use of a general-order kinetics function (\code{GOK}) is an experimental adaption of the model and should only be used with great care.} \item{...}{Parameters passed to \link{calc_Huntley2006}.} } \value{ An \linkS4class{RLum.Results} object is returned: } \description{ A function to calculate the expected sample specific fraction of saturation following Kars et al. (2008) and Huntley (2006). This function is deprecated and will eventually be removed. Please use \code{calc_Huntley2006()} instead. } \details{ This function applies the approach described in Kars et al. (2008), developed from the model of Huntley (2006) to calculate the expected sample specific fraction of saturation of a feldspar and also to calculate fading corrected age using this model. \eqn{\rho}' (\code{rhop}), the density of recombination centres, is a crucial parameter of this model and must be determined separately from a fading measurement. The function \link{analyse_FadingMeasurement} can be used to calculate the sample specific \eqn{\rho}' value. } \note{ \strong{This function is deprecated and will eventually be removed from the package.} \strong{Please use the function \code{\link[=calc_Huntley2006]{calc_Huntley2006()}} instead} \strong{(use \code{fit.method = "EXP"} to apply the model after Kars et al., 2008).} } \section{Function version}{ 0.4.0 } \examples{ ## Load example data (sample UNIL/NB123, see ?ExampleData.Fading) data("ExampleData.Fading", envir = environment()) ## (1) Set all relevant parameters # a. fading measurement data (IR50) fading_data <- ExampleData.Fading$fading.data$IR50 # b. Dose response curve data data <- ExampleData.Fading$equivalentDose.data$IR50 ## (2) Define required function parameters ddot <- c(7.00, 0.004) readerDdot <- c(0.134, 0.0067) # Analyse fading measurement and get an estimate of rho'. # Note that the RLum.Results object can be directly used for further processing. # The number of MC runs is reduced for this example rhop <- analyse_FadingMeasurement(fading_data, plot = TRUE, verbose = FALSE, n.MC = 10) ## (3) Apply the Kars et al. (2008) model to the data kars <- suppressWarnings( calc_Kars2008(data = data, rhop = rhop, ddot = ddot, readerDdot = readerDdot, n.MC = 25) ) } \section{How to cite}{ King, G.E., Burow, C., 2020. calc_Kars2008(): Apply the Kars et al. (2008) model (deprecated). Function version 0.4.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Kars, R.H., Wallinga, J., Cohen, K.M., 2008. A new approach towards anomalous fading correction for feldspar IRSL dating-tests on samples in field saturation. Radiation Measurements 43, 786-790. doi:10.1016/j.radmeas.2008.01.021 Huntley, D.J., 2006. An explanation of the power-law decay of luminescence. Journal of Physics: Condensed Matter 18, 1359-1365. doi:10.1088/0953-8984/18/4/020 King, G.E., Herman, F., Lambert, R., Valla, P.G., Guralnik, B., 2016. Multi-OSL-thermochronometry of feldspar. Quaternary Geochronology 33, 76-87. doi:10.1016/j.quageo.2016.01.004 \strong{Further reading} Morthekai, P., Jain, M., Cunha, P.P., Azevedo, J.M., Singhvi, A.K., 2011. An attempt to correct for the fading in million year old basaltic rocks. Geochronometria 38(3), 223-230. } \author{ Georgina E. King, University of Bern (Switzerland) \cr Christoph Burow, University of Cologne (Germany) , RLum Developer Team} \keyword{datagen} Luminescence/man/extract_IrradiationTimes.Rd0000644000176200001440000001416113604173244020736 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/extract_IrradiationTimes.R \name{extract_IrradiationTimes} \alias{extract_IrradiationTimes} \title{Extract Irradiation Times from an XSYG-file} \usage{ extract_IrradiationTimes( object, file.BINX, recordType = c("irradiation (NA)", "IRSL (UVVIS)", "OSL (UVVIS)", "TL (UVVIS)"), compatibility.mode = TRUE, txtProgressBar = TRUE ) } \arguments{ \item{object}{\link{character}, \linkS4class{RLum.Analysis} or \link{list} (\strong{required}): path and file name of the XSYG file or an \linkS4class{RLum.Analysis} produced by the function \link{read_XSYG2R}; alternatively a \code{list} of \linkS4class{RLum.Analysis} can be provided. \strong{Note}: If an \linkS4class{RLum.Analysis} is used, any input for the arguments \code{file.BINX} and \code{recordType} will be ignored!} \item{file.BINX}{\link{character} (\emph{optional}): path and file name of an existing BINX-file. If a file name is provided the file will be updated with the information from the XSYG file in the same folder as the original BINX-file. \strong{Note:} The XSYG and the BINX-file have to be originate from the same measurement!} \item{recordType}{\link{character} (\emph{with default}): select relevant curves types from the XSYG file or \linkS4class{RLum.Analysis} object. As the XSYG-file format comprises much more information than usually needed for routine data analysis and allowed in the BINX-file format, only the relevant curves are selected by using the function \link{get_RLum}. The argument \code{recordType} works as described for this function. \strong{Note:} A wrong selection will causes a function error. Please change this argument only if you have reasons to do so.} \item{compatibility.mode}{\link{logical} (\emph{with default}): this option is parsed only if a BIN/BINX file is produced and it will reset all position values to a max. value of 48, cf.\link{write_R2BIN}} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables \code{TRUE} or disables \code{FALSE} the progression bars during import and export} } \value{ An \linkS4class{RLum.Results} object is returned with the following structure:\preformatted{.. $irr.times (data.frame) } If a BINX-file path and name is set, the output will be additionally transferred into a new BINX-file with the function name as suffix. For the output the path of the input BINX-file itself is used. Note that this will not work if the input object is a file path to an XSYG-file, instead of a link to only one file. In this case the argument input for \code{file.BINX} is ignored. In the self call mode (input is a \code{list} of \linkS4class{RLum.Analysis} objects a list of \linkS4class{RLum.Results} is returned. } \description{ Extracts irradiation times, dose and times since last irradiation, from a Freiberg Instruments XSYG-file. These information can be further used to update an existing BINX-file. } \details{ The function was written to compensate missing information in the BINX-file output of Freiberg Instruments lexsyg readers. As all information are available within the XSYG-file anyway, these information can be extracted and used for further analysis or/and to stored in a new BINX-file, which can be further used by other software, e.g., Analyst (Geoff Duller). Typical application example: g-value estimation from fading measurements using the Analyst or any other self written script. Beside the some simple data transformation steps the function applies the functions \link{read_XSYG2R}, \link{read_BIN2R}, \link{write_R2BIN} for data import and export. } \note{ The produced output object contains still the irradiation steps to keep the output transparent. However, for the BINX-file export this steps are removed as the BINX-file format description does not allow irradiations as separat sequences steps. \strong{BINX-file 'Time Since Irradiation' value differs from the table output?} The way the value 'Time Since Irradiation' is defined differs. In the BINX-file the 'Time Since Irradiation' is calculated as the 'Time Since Irradiation' plus the 'Irradiation Time'. The table output returns only the real 'Time Since Irradiation', i.e. time between the end of the irradiation and the next step. \strong{Negative values for \code{TIMESINCELAS.STEP}?} Yes, this is possible and no bug, as in the XSYG-file multiple curves are stored for one step. Example: TL step may comprise three curves: \itemize{ \item (a) counts vs. time, \item (b) measured temperature vs. time and \item (c) predefined temperature vs. time. } Three curves, but they are all belonging to one TL measurement step, but with regard to the time stamps this could produce negative values as the important function (\link{read_XSYG2R}) do not change the order of entries for one step towards a correct time order. } \section{Function version}{ 0.3.1 } \examples{ ## (1) - example for your own data ## ## set files and run function # # file.XSYG <- file.choose() # file.BINX <- file.choose() # # output <- extract_IrradiationTimes(file.XSYG = file.XSYG, file.BINX = file.BINX) # get_RLum(output) # ## export results additionally to a CSV.file in the same directory as the XSYG-file # write.table(x = get_RLum(output), # file = paste0(file.BINX,"_extract_IrradiationTimes.csv"), # sep = ";", # row.names = FALSE) } \section{How to cite}{ Kreutzer, S., 2020. extract_IrradiationTimes(): Extract Irradiation Times from an XSYG-file. Function version 0.3.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Duller, G.A.T., 2015. The Analyst software package for luminescence data: overview and recent improvements. Ancient TL 33, 35-42. } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results}, \linkS4class{Risoe.BINfileData}, \link{read_XSYG2R}, \link{read_BIN2R}, \link{write_R2BIN} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{IO} \keyword{manip} Luminescence/man/calc_MinDose.Rd0000644000176200001440000003571113604173244016261 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_MinDose.R \name{calc_MinDose} \alias{calc_MinDose} \title{Apply the (un-)logged minimum age model (MAM) after Galbraith et al. (1999) to a given De distribution} \usage{ calc_MinDose( data, sigmab, log = TRUE, par = 3, bootstrap = FALSE, init.values, level = 0.95, log.output = FALSE, plot = TRUE, multicore = FALSE, ... ) } \arguments{ \item{data}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): for \link{data.frame}: two columns with De \code{(data[ ,1])} and De error \code{(data[ ,2])}.} \item{sigmab}{\link{numeric} (\strong{required}): additional spread in De values. This value represents the expected overdispersion in the data should the sample be well-bleached (Cunningham & Walling 2012, p. 100). \strong{NOTE}: For the logged model (\code{log = TRUE}) this value must be a fraction, e.g. 0.2 (= 20 \\%). If the un-logged model is used (\code{log = FALSE}), sigmab must be provided in the same absolute units of the De values (seconds or Gray). See details.} \item{log}{\link{logical} (\emph{with default}): fit the (un-)logged minimum dose model to De data.} \item{par}{\link{numeric} (\emph{with default}): apply the 3- or 4-parameter minimum age model (\code{par=3} or \code{par=4}). The MAM-3 is used by default.} \item{bootstrap}{\link{logical} (\emph{with default}): apply the recycled bootstrap approach of Cunningham & Wallinga (2012).} \item{init.values}{\link{numeric} (\emph{optional}): a named list with starting values for gamma, sigma, p0 and mu (e.g. \code{list(gamma=100, sigma=1.5, p0=0.1, mu=100)}). If no values are provided reasonable values are tried to be estimated from the data. \strong{NOTE} that the initial values must always be given in the absolute units. The the logged model is applied (\code{log = TRUE}), the provided \code{init.values} are automatically log transformed.} \item{level}{\link{logical} (\emph{with default}): the confidence level required (defaults to 0.95).} \item{log.output}{\link{logical} (\emph{with default}): If \code{TRUE} the console output will also show the logged values of the final parameter estimates and confidence intervals (only applicable if \code{log = TRUE}).} \item{plot}{\link{logical} (\emph{with default}): plot output (\code{TRUE}/\code{FALSE})} \item{multicore}{\link{logical} (\emph{with default}): enable parallel computation of the bootstrap by creating a multicore SNOW cluster. Depending on the number of available logical CPU cores this may drastically reduce the computation time. Note that this option is highly experimental and may not work on all machines. (\code{TRUE}/\code{FALSE})} \item{...}{(\emph{optional}) further arguments for bootstrapping (\verb{bs.M, bs.N, bs.h, sigmab.sd}). See details for their usage. Further arguments are \itemize{ \item \code{verbose} to de-/activate console output (logical), \item \code{debug} for extended console output (logical) and \item \code{cores} (integer) to manually specify the number of cores to be used when \code{multicore=TRUE}. }} } \value{ Returns a plot (\emph{optional}) and terminal output. In addition an \linkS4class{RLum.Results} object is returned containing the following elements: \item{.$summary}{\link{data.frame} summary of all relevant model results.} \item{.$data}{\link{data.frame} original input data} \item{args}{\link{list} used arguments} \item{call}{\link{call} the function call} \item{.$mle}{\link{mle2} object containing the maximum log likelhood functions for all parameters} \item{BIC}{\link{numeric} BIC score} \item{.$confint}{\link{data.frame} confidence intervals for all parameters} \item{.$profile}{\link{profile.mle2} the log likelihood profiles} \item{.$bootstrap}{\link{list} bootstrap results} The output should be accessed using the function \link{get_RLum} } \description{ Function to fit the (un-)logged three or four parameter minimum dose model (MAM-3/4) to De data. } \details{ \strong{Parameters} This model has four parameters: \tabular{rl}{ \code{gamma}: \tab minimum dose on the log scale \cr \code{mu}: \tab mean of the non-truncated normal distribution \cr \code{sigma}: \tab spread in ages above the minimum \cr \code{p0}: \tab proportion of grains at gamma \cr } If \code{par=3} (default) the 3-parameter minimum age model is applied, where \code{gamma=mu}. For \code{par=4} the 4-parameter model is applied instead. \strong{(Un-)logged model} In the original version of the minimum dose model, the basic data are the natural logarithms of the De estimates and relative standard errors of the De estimates. The value for \code{sigmab} must be provided as a ratio (e.g, 0.2 for 20 \\%). This model will be applied if \code{log = TRUE}. If \code{log=FALSE}, the modified un-logged model will be applied instead. This has essentially the same form as the original version. \code{gamma} and \code{sigma} are in Gy and \code{gamma} becomes the minimum true dose in the population. \strong{Note} that the un-logged model requires \code{sigmab} to be in the same absolute unit as the provided De values (seconds or Gray). While the original (logged) version of the mimimum dose model may be appropriate for most samples (i.e. De distributions), the modified (un-logged) version is specially designed for modern-age and young samples containing negative, zero or near-zero De estimates (Arnold et al. 2009, p. 323). \strong{Initial values & boundaries} The log likelihood calculations use the \link{nlminb} function for box-constrained optimisation using PORT routines. Accordingly, initial values for the four parameters can be specified via \code{init.values}. If no values are provided for \code{init.values} reasonable starting values are estimated from the input data. If the final estimates of \emph{gamma}, \emph{mu}, \emph{sigma} and \emph{p0} are totally off target, consider providing custom starting values via \code{init.values}. In contrast to previous versions of this function the boundaries for the individual model parameters are no longer required to be explicitly specified. If you want to override the default boundary values use the arguments \code{gamma.lower}, \code{gamma.upper}, \code{sigma.lower}, \code{sigma.upper}, \code{p0.lower}, \code{p0.upper}, \code{mu.lower} and \code{mu.upper}. \strong{Bootstrap} When \code{bootstrap=TRUE} the function applies the bootstrapping method as described in Wallinga & Cunningham (2012). By default, the minimum age model produces 1000 first level and 3000 second level bootstrap replicates (actually, the number of second level bootstrap replicates is three times the number of first level replicates unless specified otherwise). The uncertainty on sigmab is 0.04 by default. These values can be changed by using the arguments \code{bs.M} (first level replicates), \code{bs.N} (second level replicates) and \code{sigmab.sd} (error on sigmab). With \code{bs.h} the bandwidth of the kernel density estimate can be specified. By default, \code{h} is calculated as \deqn{h = (2*\sigma_{DE})/\sqrt{n}} \strong{Multicore support} This function supports parallel computing and can be activated by \code{multicore=TRUE}. By default, the number of available logical CPU cores is determined automatically, but can be changed with \code{cores}. The multicore support is only available when \code{bootstrap=TRUE} and spawns \code{n} R instances for each core to get MAM estimates for each of the N and M boostrap replicates. Note that this option is highly experimental and may or may not work for your machine. Also the performance gain increases for larger number of bootstrap replicates. Also note that with each additional core and hence R instance and depending on the number of bootstrap replicates the memory usage can significantly increase. Make sure that memory is always availabe, otherwise there will be a massive perfomance hit. \strong{Likelihood profiles} The likelihood profiles are generated and plotted by the \code{bbmle} package. The profile likelihood plots look different to ordinary profile likelihood as "\verb{[...]} the plot method for likelihood profiles displays the square root of the the deviance difference (twice the difference in negative log-likelihood from the best fit), so it will be V-shaped for cases where the quadratic approximation works well \verb{[...]}." (Bolker 2016). For more details on the profile likelihood calculations and plots please see the vignettes of the \code{bbmle} package (also available here: \url{https://CRAN.R-project.org/package=bbmle}). } \note{ The default starting values for \emph{gamma}, \emph{mu}, \emph{sigma} and \emph{p0} may only be appropriate for some De data sets and may need to be changed for other data. This is especially true when the un-logged version is applied. \cr Also note that all R warning messages are suppressed when running this function. If the results seem odd consider re-running the model with \code{debug=TRUE} which provides extended console output and forwards all internal warning messages. } \section{Function version}{ 0.4.4 } \examples{ ## Load example data data(ExampleData.DeValues, envir = environment()) # (1) Apply the minimum age model with minimum required parameters. # By default, this will apply the un-logged 3-parameter MAM. calc_MinDose(data = ExampleData.DeValues$CA1, sigmab = 0.1) \dontrun{ # (2) Re-run the model, but save results to a variable and turn # plotting of the log-likelihood profiles off. mam <- calc_MinDose(data = ExampleData.DeValues$CA1, sigmab = 0.1, plot = FALSE) # Show structure of the RLum.Results object mam # Show summary table that contains the most relevant results res <- get_RLum(mam, "summary") res # Plot the log likelihood profiles retroactively, because before # we set plot = FALSE plot_RLum(mam) # Plot the dose distribution in an abanico plot and draw a line # at the minimum dose estimate plot_AbanicoPlot(data = ExampleData.DeValues$CA1, main = "3-parameter Minimum Age Model", line = mam,polygon.col = "none", hist = TRUE, rug = TRUE, summary = c("n", "mean", "mean.weighted", "median", "in.ci"), centrality = res$de, line.col = "red", grid.col = "none", line.label = paste0(round(res$de, 1), "\U00B1", round(res$de_err, 1), " Gy"), bw = 0.1, ylim = c(-25, 18), summary.pos = "topleft", mtext = bquote("Parameters: " ~ sigma[b] == .(get_RLum(mam, "args")$sigmab) ~ ", " ~ gamma == .(round(log(res$de), 1)) ~ ", " ~ sigma == .(round(res$sig, 1)) ~ ", " ~ rho == .(round(res$p0, 2)))) # (3) Run the minimum age model with bootstrap # NOTE: Bootstrapping is computationally intensive # (3.1) run the minimum age model with default values for bootstrapping calc_MinDose(data = ExampleData.DeValues$CA1, sigmab = 0.15, bootstrap = TRUE) # (3.2) Bootstrap control parameters mam <- calc_MinDose(data = ExampleData.DeValues$CA1, sigmab = 0.15, bootstrap = TRUE, bs.M = 300, bs.N = 500, bs.h = 4, sigmab.sd = 0.06, plot = FALSE) # Plot the results plot_RLum(mam) # save bootstrap results in a separate variable bs <- get_RLum(mam, "bootstrap") # show structure of the bootstrap results str(bs, max.level = 2, give.attr = FALSE) # print summary of minimum dose and likelihood pairs summary(bs$pairs$gamma) # Show polynomial fits of the bootstrap pairs bs$poly.fits$poly.three # Plot various statistics of the fit using the generic plot() function par(mfcol=c(2,2)) plot(bs$poly.fits$poly.three, ask = FALSE) # Show the fitted values of the polynomials summary(bs$poly.fits$poly.three$fitted.values) } } \section{How to cite}{ Burow, C., 2020. calc_MinDose(): Apply the (un-)logged minimum age model (MAM) after Galbraith et al. (1999) to a given De distribution. Function version 0.4.4. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Arnold, L.J., Roberts, R.G., Galbraith, R.F. & DeLong, S.B., 2009. A revised burial dose estimation procedure for optical dating of young and modern-age sediments. Quaternary Geochronology 4, 306-325. Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for mixed fission track ages. Nuclear Tracks Radiation Measurements 4, 459-470. Galbraith, R.F., Roberts, R.G., Laslett, G.M., Yoshida, H. & Olley, J.M., 1999. Optical dating of single grains of quartz from Jinmium rock shelter, northern Australia. Part I: experimental design and statistical models. Archaeometry 41, 339-364. Galbraith, R.F., 2005. Statistics for Fission Track Analysis, Chapman & Hall/CRC, Boca Raton. Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error calculation and display in OSL dating: An overview and some recommendations. Quaternary Geochronology 11, 1-27. Olley, J.M., Roberts, R.G., Yoshida, H., Bowler, J.M., 2006. Single-grain optical dating of grave-infill associated with human burials at Lake Mungo, Australia. Quaternary Science Reviews 25, 2469-2474. \strong{Further reading} Arnold, L.J. & Roberts, R.G., 2009. Stochastic modelling of multi-grain equivalent dose (De) distributions: Implications for OSL dating of sediment mixtures. Quaternary Geochronology 4, 204-230. Bolker, B., 2016. Maximum likelihood estimation analysis with the bbmle package. In: Bolker, B., R Development Core Team, 2016. bbmle: Tools for General Maximum Likelihood Estimation. R package version 1.0.18. \url{https://CRAN.R-project.org/package=bbmle} Bailey, R.M. & Arnold, L.J., 2006. Statistical modelling of single grain quartz De distributions and an assessment of procedures for estimating burial dose. Quaternary Science Reviews 25, 2475-2502. Cunningham, A.C. & Wallinga, J., 2012. Realizing the potential of fluvial archives using robust OSL chronologies. Quaternary Geochronology 12, 98-106. Rodnight, H., Duller, G.A.T., Wintle, A.G. & Tooth, S., 2006. Assessing the reproducibility and accuracy of optical dating of fluvial deposits. Quaternary Geochronology 1, 109-120. Rodnight, H., 2008. How many equivalent dose values are needed to obtain a reproducible distribution?. Ancient TL 26, 3-10. } \seealso{ \link{calc_CentralDose}, \link{calc_CommonDose}, \link{calc_FiniteMixture}, \link{calc_FuchsLang2001}, \link{calc_MaxDose} } \author{ Christoph Burow, University of Cologne (Germany) \cr Based on a rewritten S script of Rex Galbraith, 2010 \cr The bootstrap approach is based on a rewritten MATLAB script of Alastair Cunningham. \cr Alastair Cunningham is thanked for his help in implementing and cross-checking the code. , RLum Developer Team} Luminescence/man/plot_RLum.Data.Curve.Rd0000644000176200001440000000417413604173245017611 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_RLum.Data.Curve.R \name{plot_RLum.Data.Curve} \alias{plot_RLum.Data.Curve} \title{Plot function for an RLum.Data.Curve S4 class object} \usage{ plot_RLum.Data.Curve( object, par.local = TRUE, norm = FALSE, smooth = FALSE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Data.Curve} (\strong{required}): S4 object of class \code{RLum.Data.Curve}} \item{par.local}{\link{logical} (\emph{with default}): use local graphical parameters for plotting, e.g. the plot is shown in one column and one row. If \code{par.local = FALSE}, global parameters are inherited.} \item{norm}{\link{logical} (\emph{with default}): allows curve normalisation to the highest count value} \item{smooth}{\link{logical} (\emph{with default}): provides an automatic curve smoothing based on \link[zoo:rollmean]{zoo::rollmean}} \item{...}{further arguments and graphical parameters that will be passed to the \code{plot} function} } \value{ Returns a plot. } \description{ The function provides a standardised plot output for curve data of an RLum.Data.Curve S4 class object } \details{ Only single curve data can be plotted with this function. Arguments according to \link{plot}. } \note{ Not all arguments of \link{plot} will be passed! } \section{Function version}{ 0.2.3 } \examples{ ##plot curve data #load Example data data(ExampleData.CW_OSL_Curve, envir = environment()) #transform data.frame to RLum.Data.Curve object temp <- as(ExampleData.CW_OSL_Curve, "RLum.Data.Curve") #plot RLum.Data.Curve object plot_RLum.Data.Curve(temp) } \seealso{ \link{plot}, \link{plot_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. plot_RLum.Data.Curve(): Plot function for an RLum.Data.Curve S4 class object. Function version 0.2.3. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{aplot} Luminescence/man/calc_FuchsLang2001.Rd0000644000176200001440000000716213604173244017077 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_FuchsLang2001.R \name{calc_FuchsLang2001} \alias{calc_FuchsLang2001} \title{Apply the model after Fuchs & Lang (2001) to a given De distribution.} \usage{ calc_FuchsLang2001(data, cvThreshold = 5, startDeValue = 1, plot = TRUE, ...) } \arguments{ \item{data}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): for \link{data.frame}: two columns with De \code{(data[,1])} and De error \code{(values[,2])}} \item{cvThreshold}{\link{numeric} (\emph{with default}): coefficient of variation in percent, as threshold for the method, e.g. \code{cvThreshold = 3}. See details .} \item{startDeValue}{\link{numeric} (\emph{with default}): number of the first aliquot that is used for the calculations} \item{plot}{\link{logical} (\emph{with default}): plot output \code{TRUE}/\code{FALSE}} \item{...}{further arguments and graphical parameters passed to \link{plot}} } \value{ Returns a plot (\emph{optional}) and terminal output. In addition an \linkS4class{RLum.Results} object is returned containing the following elements: \item{summary}{\link{data.frame} summary of all relevant model results.} \item{data}{\link{data.frame} original input data} \item{args}{\link{list} used arguments} \item{call}{\link{call} the function call} \item{usedDeValues}{\link{data.frame} containing the used values for the calculation} } \description{ This function applies the method according to Fuchs & Lang (2001) for heterogeneously bleached samples with a given coefficient of variation threshold. } \details{ \strong{Used values} If the coefficient of variation (\code{c[v]}) of the first two values is larger than the threshold \code{c[v_threshold]}, the first value is skipped. Use the \code{startDeValue} argument to define a start value for calculation (e.g. 2nd or 3rd value). \strong{Basic steps of the approach} \enumerate{ \item Estimate natural relative variation of the sample using a dose recovery test \item Sort the input values ascendingly \item Calculate a running mean, starting with the lowermost two values and add values iteratively. \item Stop if the calculated \code{c[v]} exceeds the specified \code{cvThreshold} } } \note{ Please consider the requirements and the constraints of this method (see Fuchs & Lang, 2001) } \section{Function version}{ 0.4.1 } \examples{ ## load example data data(ExampleData.DeValues, envir = environment()) ## calculate De according to Fuchs & Lang (2001) temp<- calc_FuchsLang2001(ExampleData.DeValues$BT998, cvThreshold = 5) } \section{How to cite}{ Kreutzer, S., Burow, C., 2020. calc_FuchsLang2001(): Apply the model after Fuchs & Lang (2001) to a given De distribution.. Function version 0.4.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Fuchs, M. & Lang, A., 2001. OSL dating of coarse-grain fluvial quartz using single-aliqout protocols on sediments from NE Peloponnese, Greece. In: Quaternary Science Reviews 20, 783-787. Fuchs, M. & Wagner, G.A., 2003. Recognition of insufficient bleaching by small aliquots of quartz for reconstructing soil erosion in Greece. Quaternary Science Reviews 22, 1161-1167. } \seealso{ \link{plot}, \link{calc_MinDose}, \link{calc_FiniteMixture}, \link{calc_CentralDose}, \link{calc_CommonDose}, \linkS4class{RLum.Results} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) \cr Christoph Burow, University of Cologne (Germany) , RLum Developer Team} \keyword{dplot} Luminescence/man/analyse_SAR.CWOSL.Rd0000644000176200001440000002366213604173243016771 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_SAR.CWOSL.R \name{analyse_SAR.CWOSL} \alias{analyse_SAR.CWOSL} \title{Analyse SAR CW-OSL measurements} \usage{ analyse_SAR.CWOSL( object, signal.integral.min, signal.integral.max, background.integral.min, background.integral.max, rejection.criteria = NULL, dose.points = NULL, mtext.outer, plot = TRUE, plot.single = FALSE, onlyLxTxTable = FALSE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} (\strong{required}): input object containing data for analysis, alternatively a \link{list} of \linkS4class{RLum.Analysis} objects can be provided.} \item{signal.integral.min}{\link{integer} (\strong{required}): lower bound of the signal integral. Can be a \link{list} of \link{integer}s, if \code{object} is of type \link{list}. If the input is vector (e.g., \code{c(1,2)}) the 2nd value will be interpreted as the minimum signal integral for the Tx curve.} \item{signal.integral.max}{\link{integer} (\strong{required}): upper bound of the signal integral. Can be a \link{list} of \link{integer}s, if \code{object} is of type \link{list}. If the input is vector (e.g., \code{c(1,2)}) the 2nd value will be interpreted as the maximum signal integral for the Tx curve.} \item{background.integral.min}{\link{integer} (\strong{required}): lower bound of the background integral. Can be a \link{list} of \link{integer}s, if \code{object} is of type \link{list}. If the input is vector (e.g., \code{c(1,2)}) the 2nd value will be interpreted as the minimum background integral for the Tx curve.} \item{background.integral.max}{\link{integer} (\strong{required}): upper bound of the background integral. Can be a \link{list} of \link{integer}s, if \code{object} is of type \link{list}. If the input is vector (e.g., \code{c(1,2)}) the 2nd value will be interpreted as the maximum background integral for the Tx curve.} \item{rejection.criteria}{\link{list} (\emph{with default}): provide a named list and set rejection criteria in \strong{percentage} for further calculation. Can be a \link{list} in a \link{list}, if \code{object} is of type \link{list} Allowed arguments are \code{recycling.ratio}, \code{recuperation.rate}, \code{palaeodose.error}, \code{testdose.error} and \code{exceed.max.regpoint = TRUE/FALSE}. Example: \code{rejection.criteria = list(recycling.ratio = 10)}. Per default all numerical values are set to 10, \code{exceed.max.regpoint = TRUE}. Every criterium can be set to \code{NA}. In this value are calculated, but not considered, i.e. the RC.Status becomes always \code{'OK'}} \item{dose.points}{\link{numeric} (\emph{optional}): a numeric vector containg the dose points values Using this argument overwrites dose point values in the signal curves. Can be a \link{list} of \link{numeric} vectors, if \code{object} is of type \link{list}} \item{mtext.outer}{\link{character} (\emph{optional}): option to provide an outer margin mtext. Can be a \link{list} of \link{character}s, if \code{object} is of type \link{list}} \item{plot}{\link{logical} (\emph{with default}): enables or disables plot output.} \item{plot.single}{\link{logical} (\emph{with default}) or \link{numeric} (\emph{optional}): single plot output (\code{TRUE/FALSE}) to allow for plotting the results in single plot windows. If a numerice vector is provided the plots can be selected individually, i.e. \code{plot.single = c(1,2,3,4)} will plot the TL and Lx, Tx curves but not the legend (5) or the growth curve (6), (7) and (8) belong to rejection criteria plots. Requires \code{plot = TRUE}.} \item{onlyLxTxTable}{\link{logical} (with default): If \code{TRUE} the dose response curve fitting and plotting is skipped. This allows to get hands on the Lx/Tx table for large datasets without the need for a curve fitting.} \item{...}{further arguments that will be passed to the function \link{plot_GrowthCurve} or \link{calc_OSLLxTxRatio} (supported: \code{background.count.distribution}, \code{sigmab}, \code{sig0}). \strong{Please note} that if you consider to use the early light subtraction method you should provide your own \code{sigmab} value!} } \value{ A plot (\emph{optional}) and an \linkS4class{RLum.Results} object is returned containing the following elements: \item{data}{\link{data.frame} containing De-values, De-error and further parameters} \item{LnLxTnTx.values}{\link{data.frame} of all calculated Lx/Tx values including signal, background counts and the dose points} \item{rejection.criteria}{\link{data.frame} with values that might by used as rejection criteria. NA is produced if no R0 dose point exists.} \item{Formula}{\link{formula} formula that have been used for the growth curve fitting } The output should be accessed using the function \link{get_RLum}. } \description{ The function performs a SAR CW-OSL analysis on an \linkS4class{RLum.Analysis} object including growth curve fitting. } \details{ The function performs an analysis for a standard SAR protocol measurements introduced by Murray and Wintle (2000) with CW-OSL curves. For the calculation of the Lx/Tx value the function \link{calc_OSLLxTxRatio} is used. For \strong{changing the way the Lx/Tx error is calculated} use the argument \code{background.count.distribution} and \code{sigmab}, which will be passed to the function \link{calc_OSLLxTxRatio}. \strong{Argument \code{object} is of type \code{list}} If the argument \code{object} is of type \link{list} containing \strong{only} \linkS4class{RLum.Analysis} objects, the function re-calls itself as often as elements are in the list. This is usefull if an entire measurement wanted to be analysed without writing separate for-loops. To gain in full control of the parameters (e.g., \code{dose.points}) for every aliquot (corresponding to one \linkS4class{RLum.Analysis} object in the list), in this case the arguments can be provided as \link{list}. This \code{list} should be of similar length as the \code{list} provided with the argument \code{object}, otherwise the function will create an own list of the requested lenght. Function output will be just one single \linkS4class{RLum.Results} object. Please be careful when using this option. It may allow a fast an efficient data analysis, but the function may also break with an unclear error message, due to wrong input data. \strong{Working with IRSL data} The function was originally designed to work just for 'OSL' curves, following the principles of the SAR protocol. An IRSL measurement protocol may follow this procedure, e.g., post-IR IRSL protocol (Thomsen et al., 2008). Therefore this functions has been enhanced to work with IRSL data, however, the function is only capable of analysing curves that follow the SAR protocol structure, i.e., to analyse a post-IR IRSL protocol, curve data have to be pre-selected by the user to fit the standards of the SAR protocol, i.e., Lx,Tx,Lx,Tx and so on. Example: Imagine the measurement contains pIRIR50 and pIRIR225 IRSL curves. Only one curve type can be analysed at the same time: The pIRIR50 curves or the pIRIR225 curves. \strong{Supported rejection criteria} \verb{[recycling.ratio]}: calculated for every repeated regeneration dose point. \verb{[recuperation.rate]}: recuperation rate calculated by comparing the Lx/Tx values of the zero regeneration point with the Ln/Tn value (the Lx/Tx ratio of the natural signal). For methodological background see Aitken and Smith (1988). \verb{[testdose.error]}: set the allowed error for the testdose, which per default should not exceed 10\\%. The testdose error is calculated as Tx_net.error/Tx_net. \verb{[palaeodose.error]}: set the allowed error for the De value, which per default should not exceed 10\\%. } \note{ This function must not be mixed up with the function \link{Analyse_SAR.OSLdata}, which works with \linkS4class{Risoe.BINfileData} objects. \strong{The function currently does support only 'OSL', 'IRSL' and 'POSL' data!} } \section{Function version}{ 0.8.8 } \examples{ ##load data ##ExampleData.BINfileData contains two BINfileData objects ##CWOSL.SAR.Data and TL.SAR.Data data(ExampleData.BINfileData, envir = environment()) ##transform the values from the first position in a RLum.Analysis object object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) ##perform SAR analysis and set rejection criteria results <- analyse_SAR.CWOSL( object = object, signal.integral.min = 1, signal.integral.max = 2, background.integral.min = 900, background.integral.max = 1000, log = "x", fit.method = "EXP", rejection.criteria = list( recycling.ratio = 10, recuperation.rate = 10, testdose.error = 10, palaeodose.error = 10, exceed.max.regpoint = TRUE) ) ##show De results get_RLum(results) ##show LnTnLxTx table get_RLum(results, data.object = "LnLxTnTx.table") } \section{How to cite}{ Kreutzer, S., 2020. analyse_SAR.CWOSL(): Analyse SAR CW-OSL measurements. Function version 0.8.8. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Aitken, M.J. and Smith, B.W., 1988. Optical dating: recuperation after bleaching. Quaternary Science Reviews 7, 387-393. Duller, G., 2003. Distinguishing quartz and feldspar in single grain luminescence measurements. Radiation Measurements, 37 (2), 161-165. Murray, A.S. and Wintle, A.G., 2000. Luminescence dating of quartz using an improved single-aliquot regenerative-dose protocol. Radiation Measurements 32, 57-73. Thomsen, K.J., Murray, A.S., Jain, M., Boetter-Jensen, L., 2008. Laboratory fading rates of various luminescence signals from feldspar-rich sediment extracts. Radiation Measurements 43, 1474-1486. doi:10.1016/j.radmeas.2008.06.002 } \seealso{ \link{calc_OSLLxTxRatio}, \link{plot_GrowthCurve}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results}, \link{get_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS-Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} \keyword{plot} Luminescence/man/fit_OSLLifeTimes.Rd0000644000176200001440000002063713604173244017043 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/fit_OSLLifeTimes.R \name{fit_OSLLifeTimes} \alias{fit_OSLLifeTimes} \title{Fitting and Deconvolution of OSL Lifetime Components} \usage{ fit_OSLLifeTimes( object, tp = 0, signal_range = NULL, n.components = NULL, method_control = list(), plot = TRUE, plot_simple = FALSE, verbose = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Analysis}, \link{data.frame} or \link{matrix} \strong{(required)}: Input object containing the data to be analysed. All objects can be provided also as list for an automated processing. Please note: \code{NA} values are automatically removed and the dataset should comprise at least 5 data points.} \item{tp}{\link{numeric} (\emph{with default}): option to account for the stimulation pulse width. For off-time measurements the default value is 0. \code{tp} has the same unit as the measurement data, e.g., µs. Please set this parameter carefully, if it all, otherwise you may heavily bias your fit results.} \item{signal_range}{\link{numeric} (\emph{optional}): allows to set a channel range, by default all channels are used, e.g. \code{signal_range = c(2,100)} considers only channels 2 to 100 and \code{signal_range = c(2)} considers only channels from channel 2 onwards.} \item{n.components}{\link{numeric} (\emph{optional}): Fix the number of components. If set the algorithm will try to fit the number of predefined components. If nothing is set, the algorithm will try to find the best number of components.} \item{method_control}{\link{list} (\emph{optonal}): Named to allow a more fine control of the fitting process. See details for allowed options.} \item{plot}{\link{logical} (\emph{with default}): Enable/disable plot output} \item{plot_simple}{\link{logical} (\emph{with default}): Enable/disable reduced plot output. If \code{TRUE}, no residual plot is shown, however, plot output can be combined using the standard R layout obtions, such as \code{par(mfrow = c(2,2))}.} \item{verbose}{\link{logical} (\emph{with default}): Enable/disable terminal feedback} \item{...}{parameters passed to \link{plot.default} to control the plot output, supported are: \code{main}, \code{xlab}, \code{ylab}, \code{log}, \code{xlim}, \code{ylim}, \code{col}, \code{lty}, \code{legend.pos}, \code{legend.text}. If the input object is of type \linkS4class{RLum.Analysis} this arguments can be provided as a \link{list}.} } \value{ -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \verb{$data} \tab \code{matrix} \tab the final fit matrix \cr \verb{$start_matrix} \tab \code{matrix} \tab the start matrix used for the fitting \cr \verb{$total_counts} \tab \code{integer} \tab Photon count sum \cr \verb{$fit} \tab \code{nls} \tab the fit object returned by \link[minpack.lm:nls.lm]{minpack.lm::nls.lm} \cr } \strong{slot:} \strong{\verb{@info}} The original function call ------------------------\cr \verb{[ TERMINAL OUTPUT ]}\cr ------------------------\cr Terminal output is only shown of the argument \code{verbose = TRUE}. \emph{(1) Start parameter and component adapation}\cr Trave of the parameter adaption process \emph{(2) Fitting results (sorted by ascending tau)}\cr The fitting results sorted by ascending tau value. Please note that if you access the \code{nls} fitting object, the values are not sorted. \emph{(3) Further information}\cr \itemize{ \item The photon count sum \item Durbin-Watson residual statistic to asses whether the residuals are correlated, ideally the residuals should be not correlated at all. Rough measures are: \cr D = 0: the residuls are systematically correlated \cr D = 2: the residuals are randomly distributed \cr D = 4: the residuals are systematically anticorrlated\cr } You should be suspicious if D differs largely from 2. ------------------------\cr \verb{[ PLOT OUTPUT ]}\cr ------------------------\cr A plot showing the original data and the fit so far possible. The lower plot shows the residuals of the fit. } \description{ Fitting and Deconvolution of OSL Lifetime Components } \details{ The function intends to provide an easy access to pulsed optically stimulated luminescence (POSL) data, in order determine signal lifetimes. The fitting is currently optimised to work with the off-time flank of POSL measurements only. For the signal deconvolution, a differential evolution optimisation is combined with nonlinear least-square fitting following the approach by Bluszcz & Adamiec (2006). \strong{Component deconvolution algorithm} The component deconvolution consists of two steps: (1) Adaption phase In the adaption phase the function tries to figure out the optimal and statistically justified number of signal components following roughly the approach suggestd by Bluszcz & Adamiec (2006). In contrast to their work, for the optimisation by differential evolution here the package 'DEoptim' is used. The function to be optimized has the form: \deqn{\chi^2 = \sum(w * (n_i/c - \sum(A_i * exp(-x/(tau_i + t_p))))^2)} with \eqn{w = 1} for unweighted regression analysis (\code{method_control = list(weights = FALSE)}) or \eqn{w = c^2/n_i} for weighted regression analysis. The default values is \code{TRUE}. \deqn{F = (\Delta\chi^2 / 2) / (\chi^2/(N - 2*m - 2))} (2) Final fitting \strong{\code{method_control}} \tabular{lll}{ \strong{Parameter} \tab \strong{Type} \tab \strong{Description}\cr \code{p} \tab \link{numeric} \tab controls the probability for the F statistic reference values. For a significance level of 5 \% a value of 0.95 (the default) should be added, for 1 \%, a value of 0.99 is sufficient: 1 > p > 0 (cf. \link[stats:FDist]{stats::FDist})\cr \code{seed} \tab \link{numeric} \tab set the seed for the random number generator, provide a value here to get reproducible results \cr \code{DEoptim.trace} \tab \link{logical} \tab enables/disables the tracing of the differential evolution (cf. \link[DEoptim:DEoptim.control]{DEoptim::DEoptim.control}) \cr \code{DEoptim.itermax} \tab \link{logical} \tab controls the number of the allowed generations (cf. \link[DEoptim:DEoptim.control]{DEoptim::DEoptim.control}) \cr \code{weights} \tab \link{logical} \tab enables/disables the weighting for the start parameter estimation and fitting (see equations above). The default values is \code{TRUE} \cr \code{nlsLM.trace} \tab \link{logical} \tab enables/disables trace mode for the nls fitting (\link[minpack.lm:nlsLM]{minpack.lm::nlsLM}), can be used to identify convergence problems, default is \code{FALSE} \cr \code{nlsLM.upper} \tab \link{logical} \tab enables/disables upper parameter boundary, default is \code{TRUE} \cr \code{nlsLM.lower} \tab \link{logical} \tab enables/disables lower parameter boundary, default is \code{TRUE} } } \section{Function version}{ 0.1.5 } \examples{ ##load example data data(ExampleData.TR_OSL, envir = environment()) ##fit lifetimes (short run) fit_OSLLifeTimes( object = ExampleData.TR_OSL, n.components = 1) ##long example \dontrun{ fit_OSLLifeTimes( object = ExampleData.TR_OSL) } } \section{How to cite}{ Kreutzer, S., Schmidt, C., 2020. fit_OSLLifeTimes(): Fitting and Deconvolution of OSL Lifetime Components. Function version 0.1.5. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Bluszcz, A., Adamiec, G., 2006. Application of differential evolution to fitting OSL decay curves. Radiation Measurements 41, 886-891. doi:10.1016/j.radmeas.2006.05.016\cr Durbin, J., Watson, G.S., 1950. Testing for Serial Correlation in Least Squares Regression: I. Biometrika 37, 409-21. doi:10.2307/2332391 \strong{Further reading} Hughes, I., Hase, T., 2010. Measurements and Their Uncertainties. Oxford University Press. Storn, R., Price, K., 1997. Differential Evolution – A Simple and Efficient Heuristic for Global Optimization over Continuous Spaces. Journal of Global Optimization 11, 341–359. } \seealso{ \link[minpack.lm:nls.lm]{minpack.lm::nls.lm}, \link[DEoptim:DEoptim]{DEoptim::DEoptim} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS-Université Bordeaux Montaigne (France), Christoph Schmidt, University of Bayreuth (Germany) , RLum Developer Team} Luminescence/man/get_Quote.Rd0000644000176200001440000000241313604173244015666 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/get_Quote.R \name{get_Quote} \alias{get_Quote} \title{Function to return essential quotes} \usage{ get_Quote(ID, separated = FALSE) } \arguments{ \item{ID}{\link{character} (\emph{optional}): qoute ID to be returned.} \item{separated}{\link{logical} (\emph{with default}): return result in separated form.} } \value{ Returns a character with quote and respective (false) author. } \description{ This function returns one of the collected essential quotes in the growing library. If called without any parameters, a random quote is returned. } \section{Function version}{ 0.1.4 } \examples{ ## ask for an arbitrary qoute get_Quote() } \author{ Michael Dietze, GFZ Potsdam (Germany), Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France), Dirk Mittelstraß, TU Dresden (Germany) , RLum Developer Team} \section{How to cite}{ Dietze, M., Kreutzer, S., 2020. get_Quote(): Function to return essential quotes. Function version 0.1.4. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } Luminescence/man/plot_NRt.Rd0000644000176200001440000001061513604173245015477 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_NRt.R \name{plot_NRt} \alias{plot_NRt} \title{Visualise natural/regenerated signal ratios} \usage{ plot_NRt( data, log = FALSE, smooth = c("none", "spline", "rmean"), k = 3, legend = TRUE, legend.pos = "topright", ... ) } \arguments{ \item{data}{\link{list}, \link{data.frame}, \link{matrix} or \linkS4class{RLum.Analysis} (\strong{required}): X,Y data of measured values (time and counts). See details on individual data structure.} \item{log}{\link{character} (\emph{optional}): logarithmic axes (\code{c("x", "y", "xy")}).} \item{smooth}{\link{character} (\emph{optional}): apply data smoothing. Use \code{"rmean"} to calculate the rolling where \code{k} determines the width of the rolling window (see \link{rollmean}). \code{"spline"} applies a smoothing spline to each curve (see \link{smooth.spline})} \item{k}{\link{integer} (\emph{with default}): integer width of the rolling window.} \item{legend}{\link{logical} (\emph{with default}): show or hide the plot legend.} \item{legend.pos}{\link{character} (\emph{with default}): keyword specifying the position of the legend (see \link{legend}).} \item{...}{further parameters passed to \link{plot} (also see \link{par}).} } \value{ Returns a plot and \linkS4class{RLum.Analysis} object. } \description{ This function creates a Natural/Regenerated signal vs. time (NR(t)) plot as shown in Steffen et al. 2009 } \details{ This function accepts the individual curve data in many different formats. If \code{data} is a \code{list}, each element of the list must contain a two column \code{data.frame} or \code{matrix} containing the XY data of the curves (time and counts). Alternatively, the elements can be objects of class \linkS4class{RLum.Data.Curve}. Input values can also be provided as a \code{data.frame} or \code{matrix} where the first column contains the time values and each following column contains the counts of each curve. } \examples{ ## load example data data("ExampleData.BINfileData", envir = environment()) ## EXAMPLE 1 ## convert Risoe.BINfileData object to RLum.Analysis object data <- Risoe.BINfileData2RLum.Analysis(object = CWOSL.SAR.Data, pos = 8, ltype = "OSL") ## extract all OSL curves allCurves <- get_RLum(data) ## keep only the natural and regenerated signal curves pos <- seq(1, 9, 2) curves <- allCurves[pos] ## plot a standard NR(t) plot plot_NRt(curves) ## re-plot with rolling mean data smoothing plot_NRt(curves, smooth = "rmean", k = 10) ## re-plot with a logarithmic x-axis plot_NRt(curves, log = "x", smooth = "rmean", k = 5) ## re-plot with custom axes ranges plot_NRt(curves, smooth = "rmean", k = 5, xlim = c(0.1, 5), ylim = c(0.4, 1.6), legend.pos = "bottomleft") ## re-plot with smoothing spline on log scale plot_NRt(curves, smooth = "spline", log = "x", legend.pos = "top") ## EXAMPLE 2 # you may also use this function to check whether all # TD curves follow the same shape (making it a TnTx(t) plot). posTD <- seq(2, 14, 2) curves <- allCurves[posTD] plot_NRt(curves, main = "TnTx(t) Plot", smooth = "rmean", k = 20, ylab = "TD natural / TD regenerated", xlim = c(0, 20), legend = FALSE) ## EXAMPLE 3 # extract data from all positions data <- lapply(1:24, FUN = function(pos) { Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos = pos, ltype = "OSL") }) # get individual curve data from each aliquot aliquot <- lapply(data, get_RLum) # set graphical parameters par(mfrow = c(2, 2)) # create NR(t) plots for all aliquots for (i in 1:length(aliquot)) { plot_NRt(aliquot[[i]][pos], main = paste0("Aliquot #", i), smooth = "rmean", k = 20, xlim = c(0, 10), cex = 0.6, legend.pos = "bottomleft") } # reset graphical parameters par(mfrow = c(1, 1)) } \section{How to cite}{ Burow, C., 2020. plot_NRt(): Visualise natural/regenerated signal ratios. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Steffen, D., Preusser, F., Schlunegger, F., 2009. OSL quartz underestimation due to unstable signal components. Quaternary Geochronology, 4, 353-362. } \seealso{ \link{plot} } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} Luminescence/man/calc_MaxDose.Rd0000644000176200001440000001323613604173244016261 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_MaxDose.R \name{calc_MaxDose} \alias{calc_MaxDose} \title{Apply the maximum age model to a given De distribution} \usage{ calc_MaxDose( data, sigmab, log = TRUE, par = 3, bootstrap = FALSE, init.values, plot = TRUE, ... ) } \arguments{ \item{data}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): for \link{data.frame}: two columns with De \code{(data[ ,1])} and De error \code{(data[ ,2])}.} \item{sigmab}{\link{numeric} (\strong{required}): additional spread in De values. This value represents the expected overdispersion in the data should the sample be well-bleached (Cunningham & Walling 2012, p. 100). \strong{NOTE}: For the logged model (\code{log = TRUE}) this value must be a fraction, e.g. 0.2 (= 20 \\%). If the un-logged model is used (\code{log = FALSE}), sigmab must be provided in the same absolute units of the De values (seconds or Gray). See details (\link{calc_MinDose}.} \item{log}{\link{logical} (\emph{with default}): fit the (un-)logged three parameter minimum dose model to De data} \item{par}{\link{numeric} (\emph{with default}): apply the 3- or 4-parameter minimum age model (\code{par=3} or \code{par=4}).} \item{bootstrap}{\link{logical} (\emph{with default}): apply the recycled bootstrap approach of Cunningham & Wallinga (2012).} \item{init.values}{\link{numeric} (\emph{with default}): starting values for gamma, sigma, p0 and mu. Custom values need to be provided in a vector of length three in the form of \code{c(gamma, sigma, p0)}.} \item{plot}{\link{logical} (\emph{with default}): plot output (\code{TRUE}/\code{FALSE})} \item{...}{further arguments for bootstrapping (\verb{bs.M, bs.N, bs.h, sigmab.sd}). See details for their usage.} } \value{ Please see \link{calc_MinDose}. } \description{ Function to fit the maximum age model to De data. This is a wrapper function that calls calc_MinDose() and applies a similiar approach as described in Olley et al. (2006). } \details{ \strong{Data transformation} To estimate the maximum dose population and its standard error, the three parameter minimum age model of Galbraith et al. (1999) is adapted. The measured De values are transformed as follows: \enumerate{ \item convert De values to natural logs \item multiply the logged data to creat a mirror image of the De distribution \item shift De values along x-axis by the smallest x-value found to obtain only positive values \item combine in quadrature the measurement error associated with each De value with a relative error specified by sigmab \item apply the MAM to these data } When all calculations are done the results are then converted as follows \enumerate{ \item subtract the x-offset \item multiply the natural logs by -1 \item take the exponent to obtain the maximum dose estimate in Gy } \strong{Further documentation} Please see \link{calc_MinDose}. } \section{Function version}{ 0.3.1 } \examples{ ## load example data data(ExampleData.DeValues, envir = environment()) # apply the maximum dose model calc_MaxDose(ExampleData.DeValues$CA1, sigmab = 0.2, par = 3) } \section{How to cite}{ Burow, C., 2020. calc_MaxDose(): Apply the maximum age model to a given De distribution. Function version 0.3.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Arnold, L.J., Roberts, R.G., Galbraith, R.F. & DeLong, S.B., 2009. A revised burial dose estimation procedure for optical dating of young and modern-age sediments. Quaternary Geochronology 4, 306-325. Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for mixed fission track ages. Nuclear Tracks Radiation Measurements 4, 459-470. Galbraith, R.F., Roberts, R.G., Laslett, G.M., Yoshida, H. & Olley, J.M., 1999. Optical dating of single grains of quartz from Jinmium rock shelter, northern Australia. Part I: experimental design and statistical models. Archaeometry 41, 339-364. Galbraith, R.F., 2005. Statistics for Fission Track Analysis, Chapman & Hall/CRC, Boca Raton. Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error calculation and display in OSL dating: An overview and some recommendations. Quaternary Geochronology 11, 1-27. Olley, J.M., Roberts, R.G., Yoshida, H., Bowler, J.M., 2006. Single-grain optical dating of grave-infill associated with human burials at Lake Mungo, Australia. Quaternary Science Reviews 25, 2469-2474 \strong{Further reading} Arnold, L.J. & Roberts, R.G., 2009. Stochastic modelling of multi-grain equivalent dose (De) distributions: Implications for OSL dating of sediment mixtures. Quaternary Geochronology 4, 204-230. Bailey, R.M. & Arnold, L.J., 2006. Statistical modelling of single grain quartz De distributions and an assessment of procedures for estimating burial dose. Quaternary Science Reviews 25, 2475-2502. Cunningham, A.C. & Wallinga, J., 2012. Realizing the potential of fluvial archives using robust OSL chronologies. Quaternary Geochronology 12, 98-106. Rodnight, H., Duller, G.A.T., Wintle, A.G. & Tooth, S., 2006. Assessing the reproducibility and accuracy of optical dating of fluvial deposits. Quaternary Geochronology 1, 109-120. Rodnight, H., 2008. How many equivalent dose values are needed to obtain a reproducible distribution?. Ancient TL 26, 3-10. } \seealso{ \link{calc_CentralDose}, \link{calc_CommonDose}, \link{calc_FiniteMixture}, \link{calc_FuchsLang2001}, \link{calc_MinDose} } \author{ Christoph Burow, University of Cologne (Germany) \cr Based on a rewritten S script of Rex Galbraith, 2010 , RLum Developer Team} Luminescence/man/template_DRAC.Rd0000644000176200001440000000671513604173245016350 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/template_DRAC.R \name{template_DRAC} \alias{template_DRAC} \title{Create a DRAC input data template (v1.2)} \usage{ template_DRAC(nrow = 1, preset = NULL, notification = TRUE) } \arguments{ \item{nrow}{\link{integer} (\emph{with default}): specifies the number of rows of the template (i.e., the number of data sets you want to submit).} \item{preset}{\link{character} (\emph{optional}): By default, all values of the template are set to \code{NA}, which means that the user needs to fill in \strong{all} data first before submitting to DRAC using \code{use_DRAC()}. To reduce the number of values that need to be provided, \code{preset} can be used to create a template with at least a minimum of reasonable preset values. \code{preset} can be one of the following: \itemize{ \item \code{quartz_coarse} \item \code{quartz_fine} \item \code{feldspar_coarse} \item \code{polymineral_fine} \item \code{DRAC-example_quartz} \item \code{DRAC-example_feldspar} \item \code{DRAC-example_polymineral} } Note that the last three options can be used to produce a template with values directly taken from the official DRAC input .csv file.} \item{notification}{\link{logical} (\emph{with default}): show or hide the notification} } \value{ A list. } \description{ This function returns a DRAC input template (v1.2) to be used in conjunction with the use_DRAC() function } \examples{ # create a new DRAC input input input <- template_DRAC(preset = "DRAC-example_quartz") # show content of the input print(input) print(input$`Project ID`) print(input[[4]]) ## Example: DRAC Quartz example # note that you only have to assign new values where they # are different to the default values input$`Project ID` <- "DRAC-Example" input$`Sample ID` <- "Quartz" input$`Conversion factors` <- "AdamiecAitken1998" input$`External U (ppm)` <- 3.4 input$`errExternal U (ppm)` <- 0.51 input$`External Th (ppm)` <- 14.47 input$`errExternal Th (ppm)` <- 1.69 input$`External K (\%)` <- 1.2 input$`errExternal K (\%)` <- 0.14 input$`Calculate external Rb from K conc?` <- "N" input$`Calculate internal Rb from K conc?` <- "N" input$`Scale gammadoserate at shallow depths?` <- "N" input$`Grain size min (microns)` <- 90 input$`Grain size max (microns)` <- 125 input$`Water content ((wet weight - dry weight)/dry weight) \%` <- 5 input$`errWater content \%` <- 2 input$`Depth (m)` <- 2.2 input$`errDepth (m)` <- 0.22 input$`Overburden density (g cm-3)` <- 1.8 input$`errOverburden density (g cm-3)` <- 0.1 input$`Latitude (decimal degrees)` <- 30.0000 input$`Longitude (decimal degrees)` <- 70.0000 input$`Altitude (m)` <- 150 input$`De (Gy)` <- 20 input$`errDe (Gy)` <- 0.2 # use DRAC \dontrun{ output <- use_DRAC(input) } } \section{How to cite}{ Burow, C., Kreutzer, S., 2020. template_DRAC(): Create a DRAC input data template (v1.2). In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Durcan, J.A., King, G.E., Duller, G.A.T., 2015. DRAC: Dose Rate and Age Calculator for trapped charge dating. Quaternary Geochronology 28, 54-61. doi:10.1016/j.quageo.2015.03.012 } \seealso{ \link{as.data.frame}, \link{list} } \author{ Christoph Burow, University of Cologne (Germany), Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} Luminescence/man/structure_RLum.Rd0000644000176200001440000000370113604173245016733 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/structure_RLum.R \name{structure_RLum} \alias{structure_RLum} \alias{structure_RLum,list-method} \title{General structure function for RLum S4 class objects} \usage{ structure_RLum(object, ...) \S4method{structure_RLum}{list}(object, ...) } \arguments{ \item{object}{\linkS4class{RLum} (\strong{required}): S4 object of class \code{RLum}} \item{...}{further arguments that one might want to pass to the specific structure method} } \value{ Returns a \link{data.frame} with structure of the object. } \description{ Function calls object-specific get functions for RLum S4 class objects. } \details{ The function provides a generalised access point for specific \linkS4class{RLum} objects.\cr Depending on the input object, the corresponding structure function will be selected. Allowed arguments can be found in the documentations of the corresponding \linkS4class{RLum} class. } \section{Functions}{ \itemize{ \item \code{structure_RLum,list-method}: Returns a list of \linkS4class{RLum} objects that had been passed to \link{structure_RLum} }} \section{Function version}{ 0.2.0 } \examples{ ##load example data data(ExampleData.XSYG, envir = environment()) ##show structure structure_RLum(OSL.SARMeasurement$Sequence.Object) } \seealso{ \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Image}, \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. structure_RLum(): General structure function for RLum S4 class objects. Function version 0.2.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/apply_EfficiencyCorrection.Rd0000644000176200001440000000500413604173244021232 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/apply_EfficiencyCorrection.R \name{apply_EfficiencyCorrection} \alias{apply_EfficiencyCorrection} \title{Function to apply spectral efficiency correction to RLum.Data.Spectrum S4 class objects} \usage{ apply_EfficiencyCorrection(object, spectral.efficiency) } \arguments{ \item{object}{\linkS4class{RLum.Data.Spectrum} or \linkS4class{RLum.Analysis} (\strong{required}): S4 object of class \code{RLum.Data.Spectrum}, \code{RLum.Analysis}or a \link{list} of such objects. Other objects in the list are skipped.} \item{spectral.efficiency}{\link{data.frame} (\strong{required}): Data set containing wavelengths (x-column) and relative spectral response values (y-column) (values between 0 and 1). The provided data will be used to correct all spectra if \code{object} is a \link{list}} } \value{ Returns same object as provided as input } \description{ The function allows spectral efficiency corrections for RLum.Data.Spectrum S4 class objects } \details{ The efficiency correction is based on a spectral response dataset provided by the user. Usually the data set for the quantum efficiency is of lower resolution and values are interpolated for the required spectral resolution using the function \link[stats:approxfun]{stats::approx} If the energy calibration differes for both data set \code{NA} values are produces that will be removed from the matrix. } \note{ Please note that the spectral efficiency data from the camera alone may not sufficiently correct for spectral efficiency of the entire optical system (e.g., spectrometer, camera ...). } \section{Function version}{ 0.2.0 } \examples{ ##(1) - use with your own data (uncomment for usage) ## spectral.efficiency <- read.csv("your data") ## ## your.spectrum <- apply_EfficiencyCorrection(your.spectrum, ) } \seealso{ \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Analysis} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS-Université Bordeaux Montaigne (France)\cr Johannes Friedrich, University of Bayreuth (Germany) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., Friedrich, J., 2020. apply_EfficiencyCorrection(): Function to apply spectral efficiency correction to RLum.Data.Spectrum S4 class objects. Function version 0.2.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{manip} Luminescence/man/tune_Data.Rd0000644000176200001440000000337313604173245015645 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tune_Data.R \name{tune_Data} \alias{tune_Data} \title{Tune data for experimental purpose} \usage{ tune_Data(data, decrease.error = 0, increase.data = 0) } \arguments{ \item{data}{\link{data.frame} (\strong{required}): input values, structure: data (\code{values[,1]}) and data error (\code{values [,2]}) are required} \item{decrease.error}{\link{numeric}: factor by which the error is decreased, ranges between 0 and 1.} \item{increase.data}{\link{numeric}: factor by which the error is decreased, ranges between 0 and inf.} } \value{ Returns a \link{data.frame} with tuned values. } \description{ The error can be reduced and sample size increased for specific purpose. } \note{ You should not use this function to improve your poor data set! } \section{Function version}{ 0.5.0 } \examples{ ## load example data set data(ExampleData.DeValues, envir = environment()) x <- ExampleData.DeValues$CA1 ## plot original data plot_AbanicoPlot(data = x, summary = c("n", "mean")) ## decrease error by 10 \% plot_AbanicoPlot(data = tune_Data(x, decrease.error = 0.1), summary = c("n", "mean")) ## increase sample size by 200 \% #plot_AbanicoPlot(data = tune_Data(x, increase.data = 2) , # summary = c("n", "mean")) } \author{ Michael Dietze, GFZ Potsdam (Germany) , RLum Developer Team} \section{How to cite}{ Dietze, M., 2020. tune_Data(): Tune data for experimental purpose. Function version 0.5.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{manip} Luminescence/man/replicate_RLum.Rd0000644000176200001440000000223313604173245016642 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/replicate_RLum.R \name{replicate_RLum} \alias{replicate_RLum} \title{General replication function for RLum S4 class objects} \usage{ replicate_RLum(object, times = NULL) } \arguments{ \item{object}{\linkS4class{RLum} (\strong{required}): an \linkS4class{RLum} object} \item{times}{\link{integer} (\emph{optional}): number for times each element is repeated element} } \value{ Returns a \link{list} of the object to be repeated } \description{ Function replicates RLum S4 class objects and returns a list for this objects } \section{Function version}{ 0.1.0 } \seealso{ \linkS4class{RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. replicate_RLum(): General replication function for RLum S4 class objects. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/smooth_RLum.Rd0000644000176200001440000000422013604173245016201 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/smooth_RLum.R \name{smooth_RLum} \alias{smooth_RLum} \alias{smooth_RLum,list-method} \title{Smoothing of data} \usage{ smooth_RLum(object, ...) \S4method{smooth_RLum}{list}(object, ...) } \arguments{ \item{object}{\linkS4class{RLum} (\strong{required}): S4 object of class \code{RLum}} \item{...}{further arguments passed to the specifc class method} } \value{ An object of the same type as the input object is provided } \description{ Function calls the object-specific smooth functions for provided RLum S4-class objects. } \details{ The function provides a generalised access point for specific \linkS4class{RLum} objects.\cr Depending on the input object, the corresponding function will be selected. Allowed arguments can be found in the documentations of the corresponding \linkS4class{RLum} class. The smoothing is based on an internal function called \code{.smoothing}. } \section{Functions}{ \itemize{ \item \code{smooth_RLum,list-method}: Returns a list of \linkS4class{RLum} objects that had been passed to \link{smooth_RLum} }} \note{ Currenlty only \code{RLum} objects of class \code{RLum.Data.Curve} and \code{RLum.Analysis} (with curve data) are supported! } \section{Function version}{ 0.1.0 } \examples{ ##load example data data(ExampleData.CW_OSL_Curve, envir = environment()) ##create RLum.Data.Curve object from this example curve <- set_RLum( class = "RLum.Data.Curve", recordType = "OSL", data = as.matrix(ExampleData.CW_OSL_Curve) ) ##plot data without and with smoothing plot_RLum(curve) plot_RLum(smooth_RLum(curve)) } \seealso{ \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Analysis} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. smooth_RLum(): Smoothing of data. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/install_DevelopmentVersion.Rd0000644000176200001440000000336413604173243021315 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install_DevelopmentVersion.R \name{install_DevelopmentVersion} \alias{install_DevelopmentVersion} \title{Attempts to install the development version of the 'Luminescence' package} \usage{ install_DevelopmentVersion(force_install = FALSE) } \arguments{ \item{force_install}{\link{logical} (\emph{optional}): If \code{FALSE} (the default) the function produces and prints the required code to the console for the user to run manually afterwards. When \code{TRUE} and all requirements are fulfilled (see details) this function attempts to install the package itself.} } \value{ This function requires user input at the command prompt to choose the desired development branch to be installed. The required R code to install the package is then printed to the console. } \description{ This function is a convenient method for installing the development version of the R package 'Luminescence' directly from GitHub. } \details{ This function uses \link[Luminescence:GitHub-API]{Luminescence::github_branches} to check which development branches of the R package 'Luminescence' are currently available on GitHub. The user is then prompted to choose one of the branches to be installed. It further checks whether the R package 'devtools' is currently installed and available on the system. Finally, it prints R code to the console that the user can copy and paste to the R console in order to install the desired development version of the package. If \code{force_install=TRUE} the functions checks if 'devtools' is available and then attempts to install the chosen development branch via \link[devtools:remote-reexports]{devtools::remote-reexports}. } \examples{ \dontrun{ install_DevelopmentVersion() } } Luminescence/man/plot_FilterCombinations.Rd0000644000176200001440000001420113604173245020562 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_FilterCombinations.R \name{plot_FilterCombinations} \alias{plot_FilterCombinations} \title{Plot filter combinations along with the (optional) net transmission window} \usage{ plot_FilterCombinations( filters, wavelength_range = 200:1000, show_net_transmission = TRUE, interactive = FALSE, plot = TRUE, ... ) } \arguments{ \item{filters}{\link{list} (\strong{required}): a named list of filter data for each filter to be shown. The filter data itself should be either provided as \link{data.frame} or \link{matrix}. (for more options s. Details)} \item{wavelength_range}{\link{numeric} (\emph{with default}): wavelength range used for the interpolation} \item{show_net_transmission}{\link{logical} (\emph{with default}): show net transmission window as polygon.} \item{interactive}{\link{logical} (\emph{with default}): enable/disable interactive plot} \item{plot}{\link{logical} (\emph{with default}): enables or disables the plot output} \item{...}{further arguments that can be passed to control the plot output. Suppored are \code{main}, \code{xlab}, \code{ylab}, \code{xlim}, \code{ylim}, \code{type}, \code{lty}, \code{lwd}. For non common plotting parameters see the details section.} } \value{ Returns an S4 object of type \linkS4class{RLum.Results}. \strong{@data} \tabular{lll}{ \strong{\code{Object}} \tab \strong{\code{Type}} \strong{\code{Description}} \cr \code{net_transmission_window} \tab \code{matrix} \tab the resulting net transmission window \cr \code{OD_total} \tab \code{matrix} \tab the total optical density\cr \code{filter_matrix} \tab \code{matrix} \tab the filter matrix used for plotting } \strong{@info} \tabular{lll}{ \strong{Object} \tab \strong{Type} \strong{Description} \cr \code{call} \tab \link{call} \tab the original function call } } \description{ The function allows to plot transmission windows for different filters. Missing data for specific wavelenghts are automatically interpolated for the given filter data using the function \link{approx}. With that a standardised output is reached and a net transmission window can be shown. } \details{ \strong{Calculations} \strong{Net transmission window} The net transmission window of two filters is approximated by \deqn{T_{final} = T_{1} * T_{2}} \strong{Optical density} \deqn{OD = -log(T)} \strong{Total optical density} \deqn{OD_{total} = OD_{1} + OD_{2}} Please consider using own calculations for more precise values. \strong{How to provide input data?} \emph{CASE 1} The function expects that all filter values are either of type \code{matrix} or \code{data.frame} with two columns. The first columens contains the wavelength, the second the relative transmission (but not in percentage, i.e. the maximum transmission can be only become 1). In this case only the transmission window is show as provided. Changes in filter thickness and relection factor are not considered. \emph{CASE 2} The filter data itself are provided as list element containing a \code{matrix} or \code{data.frame} and additional information on the thickness of the filter, e.g., \code{list(filter1 = list(filter_matrix, d = 2))}. The given filter data are always considered as standard input and the filter thickness value is taken into account by \deqn{Transmission = Transmission^(d)} with d given in the same dimension as the original filter data. \emph{CASE 3} Same as CASE 2 but additionally a reflection factor P is provided, e.g., \code{list(filter1 = list(filter_matrix, d = 2, P = 0.9))}. The final transmission becomes: \deqn{Transmission = Transmission^(d) * P} \strong{Advanced plotting parameters} The following further non-common plotting parameters can be passed to the function: \tabular{lll}{ \strong{\code{Argument}} \tab \strong{\code{Datatype}} \tab \strong{\code{Description}}\cr \code{legend} \tab \code{logical} \tab enable/disable legend \cr \code{legend.pos} \tab \code{character} \tab change legend position (\link[graphics:legend]{graphics::legend}) \cr \code{legend.text} \tab \code{character} \tab same as the argument \code{legend} in (\link[graphics:legend]{graphics::legend}) \cr \code{net_transmission.col} \tab \code{col} \tab colour of net transmission window polygon \cr \code{net_transmission.col_lines} \tab \code{col} \tab colour of net transmission window polygon lines \cr \code{net_transmission.density} \tab \code{numeric} \tab specify line density in the transmission polygon \cr \code{grid} \tab \code{list} \tab full list of arguments that can be passd to the function \link[graphics:grid]{graphics::grid} } For further modifications standard additional R plot functions are recommend, e.g., the legend can be fully customised by disabling the standard legend and use the function \link[graphics:legend]{graphics::legend} instead. } \section{Function version}{ 0.3.1 } \examples{ ## (For legal reasons no real filter data are provided) ## Create filter sets filter1 <- density(rnorm(100, mean = 450, sd = 20)) filter1 <- matrix(c(filter1$x, filter1$y/max(filter1$y)), ncol = 2) filter2 <- matrix(c(200:799,rep(c(0,0.8,0),each = 200)), ncol = 2) ## Example 1 (standard) plot_FilterCombinations(filters = list(filter1, filter2)) ## Example 2 (with d and P value and name for filter 2) results <- plot_FilterCombinations( filters = list(filter_1 = filter1, Rectangle = list(filter2, d = 2, P = 0.6))) results ## Example 3 show optical density plot(results$OD_total) \dontrun{ ##Example 4 ##show the filters using the interactive mode plot_FilterCombinations(filters = list(filter1, filter2), interactive = TRUE) } } \seealso{ \linkS4class{RLum.Results}, \link{approx} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montagine (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. plot_FilterCombinations(): Plot filter combinations along with the (optional) net transmission window. Function version 0.3.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{aplot} \keyword{datagen} Luminescence/man/figures/0000755000176200001440000000000013540753757015123 5ustar liggesusersLuminescence/man/figures/README-Screenshot_AddIn.png0000644000176200001440000003037313540753757021746 0ustar liggesusersPNG  IHDRbN2V iCCPICC ProfileHTSY{/zW5t`#$B !QQEDQp,`aq **%=s}}7a@ |=1q @: sb曦SZ yN rGNG: eE!Z ʧ;=S0ÿOD# %@yFPtQ6sx|#Pva'8( ==cQ6L'4,W3L'rߑ&}6:(B0tG.5#@YqOs/r"ϸY氼fY>,ܳ<13baR}~ZpT?)Dw,'|=YY̭慒0iIBi颹جw#jITS:W4/ >+F,CtB"@2>D  1`/" @?z<ĉ&<39B7YpGoQ"g-`X[0u'g~wѯ2p(BܹKS}EV%¬ԱX@@  ) oBNbrFIG;Y :PVT`?G1N2zm!` 9:9dtixE :k;90\]~twreָ>uqt{n~Ǹjv/׫ȫ[;һקgwo/o]:ͬg@  xh( l = fǡ,-]\YEX^Xg8-|E%# #%QQKƣKcƬˋmEǒ6K Yf,{ӖY!x<6>:PV5LNe{w_r8eDIII\gvHkry+'&/eoxjHjmdZtZS:!=>_ʿ+0 33wd EhU.;@KVUǕQ+ggrs6>=3bɋ 53:Ffw*j[ {!xGu?u~|bF_۾|{4>9)` YVAZw@}5If3O4 'a @e:gept#DIV3Zyb?NNSWɯbО9է)5PWW_%G qiTXtXML:com.adobe.xmp 271 98 ]$NIDATx] |Lm"KE$A!4hQ>$hM)}xO=KKiZKKӧ<6(% KIH"̤{L&3I$i"|>3s9[sӤPQOG4"='p8y#yT 6^#΃@ΣZJwpwՂWpjyp6bg* m9pw'cf RqfB%^3Bdz8@G,:5XZm(`gct2GOL]<G"PM瑇] c좏0[5.Sg*},*5odR27!.oЉI+qPdrmߗH7.q@#`{AUB۶mWDcmOxjqyͩ%]0J${x?Zt ٿnZeA`܋+i @tU}QWp)w檴,87n܀um5Ǘq3Mwϝ/h1[MB RXԐKv2ij10Wf $ uJ<Į;{X >hP)X[s@S[e'ףKGܭImƬ*532a@{vP-a >= oMjG룿j-+`V9{?5K>;<6~1Gsr⭶jcxrueʻ|UOխKւ]v8r%KmG#P&MX+E+vD![ttrǧ1D"26vcvd+XN2s+!U@ Fbe!GUdXȽiLڰ2CZBe4R<#i*84ĜGAuW$<9 cyR%4.$Aܸ r8`>Ar Wp|cxG#аw%ĤBrߘ3̼Ĥˍn1G#@X{Zܸb+s8ۣŗRGh/·#hDTs=F7#0vh)|(`#nu6mGҵ{a%wj=y+JJtl66ArUI=ZVnUVoQPnq4hT{0`זE3ږ3XWXAK@f 9hM6!Ewis'b!N^L@֭\sqhp)oުha![d2[T"z2׿GqϞǟ4gxӸHS[HfhWe,0-MC(xOa)@1GjWȌ`$G7Ggy=mĬق2L#V$&`xhR|d O=bƫqt\u A3ހ7SU!GΝpG*cT9GIv~|^G׌D$h>ȨDj".B͢ Ԕ `]8 i|@EmSܿ1uNܲǐYԠ}+Lf5ʅ(XŃK;ۥDfw{ӗ{y, sxn;#b 1^y(#ŝ`o3\݅4AW[bkLG_nwCuy2!G OwV%`}ә=чΞK(U+_L|ꎜ]u Iƾ GmHBFM*I-_Gxm+l3rH DE%"j`v9 Fu,GQO|h:?UAd*M2ZRR %dSFC5p LeuR+bzï'X7\It;\#qFۨYfm~4h'Bwhz} .rځTc"ѣ/|}C'G0%N"͊ɒ˻_`[eZGcW#mVVVȹ#DZ1߸֞G8x% _ ; QF7B~mf+$hf*M?xwj-ӷC7r!Jaӊ(ڃY2 0 r`(-(E< e`/-^5OIX6i(bκ}Z|'c_2S _:{(Z| W:歌@&1s!bNcWVCߵ6[9.9".*KB4[1i{VEJ#^,w ;@+&+A]UYnjJ #B0dRұVy o#.?A[10Yq0 <~݃]r=ODضMJ8xEc"5[SZ8qVcִ{ ;k(I bve,[ֆf"2iSbtG)dN4߹?ŋ 6?+Pq<҉ 7&F0.{F3z#%m<⿂ѥb‰ ?&^CYL:Fkv!=hlƻ5^)8k$ [г29X4zM+<| R4چ]mL/ǐ~0z8tmږѸr\BmC{40~=L/Ƿ4DWBt{"0? IEfUQt9?Pyҽ# @?QWsAf>7g;ؽ \ˆACMV!!ruVc3޶^Zy4MAVne+ [\1(U8C 6?t菷׋|h/,rB簙OG%o܈߮$lDΊagl W)pO`jq.- NacЏAZ0 ?8 @U X>xt-au_W<=:tc;"C7k7UY G3udOXX.L5 [&aHg͎,ޟF O#}Tn# ]  U)ʴ.tI̝P꫇ Q"@ut~v&س‡ (W,05lI5&eqm(1m{zOc ׆ݤ?|+O"5B䜧"N5 )&W@iblwm*~aܑˣ!)u9W1t6#p@Xl0sbsvtƹbEk<;8,I웲0>#C1ѵmX'&W*<= 8}mݽӒ\7lqhJw d6kzum.TRй<džs9#0v^p &h N(9g|(zv kݵ92"+^L`HL!LwאBxB§g#‚ch(w$A=S[_h$Bj`N@J5]^q-ȡSt|s~) k>N݅COmlY$PWj,R YDFh8܂تmuY5KcΫziEYp[!_ɐ[+n8 떝ѣ4Ȩ.s!ӉݚE#h6-Uuk,`.fe8t rWp.޼)HLQ@'~J,\^ …,*lJ؎ ()O2<:Bc3#yxt5`eOmQyg2)PRzhD;=M]3\ه|FRd&c/L^ɹ(Q"+w)"q4.Oy\l,'_/]˷ Qr/]:#.ij)Ϗx 4|>{gUl?|&Bݰs`ܬq_2Rxy܏Y {dXC} ;q47~zwtNUϒSv |1tp0<֮oE:#ok^n1qi(^'D Je3gȒ#.;9?Ezjŏ߾.K1l3ml]P(4gߪ}+Eg6}д'^#>x1噾YyUhEymrc|DfN{PLTϼ  4)F'_IQx+$ٱlڛY"Jk5f'<)_-:`*>US׏~[}#dZZ9%Sz忓1N2u oI\Ny$A!X]uau ZLUg6O4KͰNe:+p-hkIWq#ݖ6AOJL[jT 1Hi;; ;[qIHO Bbݧս~'Bxdjg3yDDŽ#)= qc)FLBÈ\6_A(DB8yړ~--HHq3"yg=:~ gHAۥ=xI9h?uтCS},Q;8' aSijFpC;W"vQ| ZalLs^j ?vYH_ Z|5WE d*>{ %LMg;uԓ/iZ@9= |]8y)h@G  {\Tl7M-!)W2 $V48 s B$#K"ޣQ)rC%V=weڭ "oLjxGtE7 񛮦Be3i&B3C~, 5 ^DL3KfV gH"HCZj ,P^&Ljt LT]W3a5lHA9KYҔ%3(y)IRj\=,aqR:9\7k0 Kז@]!f[0k7=#cO** i'^`VpSIL<%\ļz^slRB6NluK~6prYja)꨸=@X*@nn NЖ 5O͢=Tyk\c*iTl .9JgTɞ&0B`\?#̲vr}8S0|hK}xX XԂ AHo:yC:v K_F܏'؝UzD*פ'.dbF7s_'[ds5k[>do_gxR;!G>L8s}t'/Ǖq3'>}uNƊqy4P/Ww^1M7˼%6t\`HgmuFcpen@xvb0.]H.;[/J~:#!A\J~9sMTg>0B"RȴM%CdŘ+5SHѤr*z9s.YK(K)kkڛoK-bZmC;|sEÏ0H#2hOgƙfef_~EYuշ۔Ҏ;ˮ~Nk5e,Ϭqp1/3[]^Sl4Ed%%g9e |:-w?3ۼQw3J9Խ29o_dm(&H]pW|b^tnZ3׋6s'Z|rr=˦d;C`8pG=얆8&WnV̡b\edVIrwB[[iHg hWr9Dc3sH] ^ɕpeC* f&+]/D:G!!z@anWN_2uLRI046EփF#oP .=eW$o<;99rpmJnFgM:%MNƚKO|JkSi6`nkaȩ9 nQpˉpSB_UjĴe6f0s㣮c# ^d8a8K>|}ub | K ܡ2#Ҡt?fク=f #ET4!1 npb5Eb.!;av91w;VV4𪂜P,=[4=0(?r"ae!R {*X݅^٤fS@_zCEA͔3EpCJ{rXKTT7cgkޟCl9r-n3wPmqnh,+ڭSgTDSk@ϙaBiL^;m:9:)OBGubLat`vw W/' :|KN0ewKo%|[ <ձGuqA K!7 &HHZ):s#p & 8 İwm>` PwFGS7&qZ/iN> ޏ:1t=+ C[v%ymU5If%X\# p*2eVFH~*N|_ߟ2AL!ؿ$̛EGjpBR_u㩥, *B!s)gJ-ŕ Z!V͒rc#f Hq~ʾNBt;/MY6,SOn heDkvM=u#[hLک_mKĖ+͘^X= ^Gĩ=`.F>HЊævf$*Jik %Pԋ+rlWx8&sG-Ρ#i"@cp $9 JWju$E88(O57'/x:.!V#/瘯N4jL S!"NDTf~Q+*9:Z}[(c\*TV 6ʾj=:{Y|M#!{8DD w|Zw-K(ASW;iI:mjGz$7OPVzHb§mF$D ~7J7V!`" nY--BLh*4GQA\:B."]A6Œ?Y jR].zz=#G2W5ə3RVJx;:)iNF -,|sFЯ_1&`.c(C\cw"E4>Al!iުp] 3ES{R4;5& K!)! rl :zɁ:bz[Bp ]!"G* ;I5|sYۀdVbIVcFJ1ZE+Z5yK|?㡲a$X)1C6owhϡhMʏ'06>Tyۂe ["n~xLPo?+{S6È<@68@jmnY yZ*Veu|<:jn F\հW$4 /c*Ɵ1Q:z-^: 7Yb)4V2]ETN8 ZZom] Ze CU_ϋѭ j9s Z`Lp#ŬlsԚ-L0kwS s#?Y1߬}oG~/ͷzuH_Q&!$`" @-c۴ŵZi) -\ap̓ wH $BE7TT3%x7گ0bvEVi^t4@T u1h@!Rm9RHQ( ' Ic"< zMDboA;TV z-8;%$:ڲ%Ŕ.*I ~]BWA^1|k5߭R[iq6~ # fQkQs|uwG32ZR%k)E;dTi`<+F^ۤ+zS?lxt FբU?u 8Cށ]Oei"h%<Z2"E$zJM;Ȁ4?;Khϫ憯YMΕ@ĂFGmPVph" s`-dWP7SK0-LɠjIر,ԟgҚ,5JT]\#>vta̲QH* kґ$Dz^p@Lf5zZ<;zFjjt)';ٸ@d3}]XO ǜ0{MiǬv=:,N <P]|:_&]ZEo*=[ӵtAgWp_LZςk#nϕyD}njġRZ4t :708}M9Y7ɚILMjflw9IZkZDߏ/7̷6EP_| f:di2Vw(D3rP;ʈka\Q,i1$mB w뚴`obKGD pHYs.#.#x?vtIME 4#P7IDATh՚iL\UC°#ek, EP\h -TM5mc&ZĈc1MiV.1X6!X HyeQR}<:b{9sw9{3)@(D#8a uۀld2$B "+E*֬^uur2:6!yo:S֯(j""]]]S=4uRgl{O£%ڽ+$Zd ņbe[[8FǨ9˹zb_A,Si #cSv f }tg%HLtss"o}@{.C.zuaVʚ%^1PT8>>O>%Q5X9~໥X0)Ѷ<@zZw-9eg)ْ]9EG$ @'sr $$(@@bb" h_2ajRj eT|7d'fyΪdZtɀ_ױg'705V~).}W^-{pQv$p'"`)Sqmf}LyK~jϷ(3iINzv,,,)v̾`\i[ݚ1cpNk/Ʀ pBD;JpP Q7/vblPnjɴ7J TOX\b6>nQO߸''(x~:?_-4z$ZT?ُgE/(]VHK[W'ø%vY2 johh l'Eꅝh4(>-=1f&i :9=5yN2?&s*)~rEg{yNSF3yiDG:h,S܏f9ܓcXFv5k@pIV[DSg TN7HXn566Ave+z G2G^Ѓ)!Ӝ@_akiEIENDB`Luminescence/man/figures/README-Package_DependencyGraph.png0000644000176200001440000104341713540753757023251 0ustar liggesusersPNG  IHDR[iCCPkCGColorSpaceGenericRGB8U]hU>sg#$Sl4t? % V46nI6"dΘ83OEP|1Ŀ (>/ % (>P苦;3ie|{g蹪X-2s=+WQ+]L6O w[C{_F qb Uvz?Zb1@/zcs>~if,ӈUSjF 1_Mjbuݠpamhmçϙ>a\+5%QKFkm}ۖ?ޚD\!~6,-7SثŜvķ5Z;[rmS5{yDyH}r9|-ăFAJjI.[/]mK 7KRDrYQO-Q||6 (0 MXd(@h2_f<:”_δ*d>e\c?~,7?& ك^2Iq2"y@g|U\ @IDATx MUkHD2 BҬQ5yͥDiBi%74Pi֠H(%JTywOw9>{9{Z߭XY!/FA@@@@ NR_t @@@@'@?    q-@+C@@@ ş@@@@ ׏!       @\ C@@@@JA )f㏣:ڮu,Y? >V\^ݭ^zϼA@H#A@̜9Zn5EY %ƍ?;c۷   @0y%#A@@@RVR>V   $9Y2@J#te~|ڪU]v>޲gֲeK;?B3gM0f͚ewcoѢilSP.6>M>ݦLbͳʖ-XJs=uz6zAڵW_u_s袋l} ggϛ,[jժvI'T%Nq@O@R@@ +$BQ|7}z}Թ;{u<YPa *67xӨQk]#z^ݺuwm7o۶m%\>#xС>'ʔ){rK^٤IL {{Ե`)۷ow3yyLyM32*_|iVfeV{=l,?USO=؊:o@Xѽ۶mkO<6ol{CT'?ة1fk.j&\d== /0/ϑMY6@@ `7 $ lƍϮ-=S@+wLʕ+ѤIꫯ▔3&Xn?hy?C/W^yƠg}6*1k?(^\ns=gtP_ewuWތ5~1s.3sLgK%A&%vs9dž իy%:qD5VKBT_A)-, wvGsvwۓO>邖Z^-|0u>  @| 0:  Bo<Aђ??}Q/v /_>?(\^)U\AC .!x~P塁WP'8W?xuqի#x~1Ʀ%*5@ZXߪ(  P8~E~$=n'|x≑;-J`K."H ^x}+Vt37~^,[Ç駟Ygrnl#F5\G^lÆ CJ7Yo7%W2Aٻk^zS㈺?p^"kgU=T'^}XkN9݌>Ȕ_)>  @\ a\=: @r D. F'T޺WNg}TR1S|NE*^ZYb·jժoݫ,A: veԎ򓜇w[ܹsxGǠnhBUp^c8" kbu.vHS#{XTs 3*51@X  !CNC(]/!jJ*Q\r;6Y=?F7o*FCdyF?~$I.4+6`Svئ\y1U*yYVm^ k<99@@XXBX,@ 0IUȥeAckpP_3 E& @Fbd92RJޫW/n Qִ/.s@E}A#4v,jOQ+dzCN  @ 0@@ 2 ` s@Uff.9'q屳6m.5{/.P;-j֕Tb mҥ6l~7˖-݇G}b{>ϱcQ~UXa'Ûq@B n @(HYHk׮ݩyv:b* )|^+/?شi\Yf_]+~szR9d={f͚o/ֵkםCGp*YfX"|kJjժxN=T3nq'<  @ Pyi@c=:wE}և1ct,h"*G… >iW_}[wy繝O(au]g't>n9s愯oԨ]q' '76m l޼y(G>N8o&Lpu;nآoiÇ%xOl / 7E@Move裏5h j:-uk߾a[o /)o4SHꆬ\M=zW^y8wLy|Iҿٳgc $ig=]kO?&ꪫg1]"g)9z䬫#8=Xݺu{%~_lχ75RfM~GviwaJ>yd{----bQNUm  @PXi@˭{;uZ38c:~nݺ_D͈̠ovN:RSS,\p 4(Yo4K9D.6xu[߾}C&Mn=wqEqdtSK.6?{ZBX%7# @ *xSZD@ 4SH.Zj Ĵn^z%ݻwxI{ь3L:u4|%&K循YNK nEVTI;Q: Λ7ϔ̼|{-KTL7--Gl{\re7^-M$3gΌk<'_G@| *(nrUX1QRhKڲe[fLe˖֯_v\tݦDZZzVzFG)Ӓq9% iwBy(Q.i:     $xE@@@KXqX    @ @+@@@@ .`cS    @W@@@@ N!    ^@@@@RV\>:     $xE@@@KXqX    @ @+@@@@ .`cS    @W@@@@ N!    ^@@@@RV\>:     $xE@@@KXqX    @ @+@@@@ .`cS    @W@@@@ N!    ^@@@@RV\>:     $xE@@@KXqX    @ @+@@@@ .`cS    @W@@@@ N!    ^@@@@RV\>:     $xE@@@KXqX    @ @+@@@@ .`cS    @W@@@@ N!    ^@@@@RV\>:     $xE@@@KXqX    @ @+@@@@ .`cS    @W@@@@ N!    ^@@@@RV\>:     $xE@@@KXqX    @ @+@@@@ .`cS    @W@@@@ N!    ^@@@@RV\>:  @̞=c͛7yM   K0#  LM4vΝk%  @ JP@@ȭ@ٲeeG@H`)77C@@ LZfmAm۶e;kA^lbٍ';5@(=   ̙c>^[-_~a3gv6|V=Vrp7 @s=zjHuz]_~=~ݻZje/y睶>/b}駶b 9r;'O_4lvv+?~5mխ_ 2RRvۻV ҥKi1[6m4wRJquQ@WV>;z  }vg_AӧO#8ڴic't͚5:t` ԪU:c\f:us|Mԩ5?|PM2֮]k-rA5kָӆ 좋.rRJVzuן^x  ]JA=z/:wlUTq-GEu?pK?f̘aSN5S}.UeP@W O%Sc>?z4aKOOϢE`]vqYֿR@@_O#;]P _װaCSj {g?>fv7xl[J7ߘ+T>c 뮻 d?;׸qc2۰a/vV_p-X;䓭QFf=/w JFs SO]wݕTIrMA@?44Q`IEVIYRr?*odu?[X^=?0jvíun<ل{WP;0}m.]z!u]֯_}~W:TG8AZ{|CH\Xy6ARPK厒ʛe$jMgH@ G`ƍi&c3& `͋:__:0(qVfשpii5\cO< {QA=bAiĈ6n8Qw+(m`VpN3T+ya^@DVqKTRtr$^~e;묳x x;H~c@@R[n.rOi`{mnǿLyi&t%yW(rfEqU_I#QN,%Xo۶Y'KA6 _ݒARx-]R/K;j}B*𺿖!DK$J;C#o{@`V>4  @ %vM5p@Q_|1NA$%mW?5jлv!CD]9gW^?vg5I үZWWK Mf+V~k`rsϵ=HYVI;"Ξ=~mS2w휨DMn#dx뭷lٲe6,ƃ   غu\u>--f͚Shҥ?46n/6wܝm…6}t[jUp8ُ޺ulͮiӦp-[_m_|]6|7   ~ҧL0Vj'OvZ5kifk֬;ԩcjM6_ٶm[k}֠A;蠃v/G}d￿vGX*UT祗^rg][ճ:ʆ XTUiٲ*U=z؆ y!   P SK2_~@u ,.RЪnݺnrQv-DoĈ.b ӌN:E]toֵkWX͞=͖7nM2?ڽkGvm-[/^l7piU ؠAlԨQ6|   @ *i$X',cb뮱P'tM7nv{ꩧ\H3J.O87*r[ x?~-9묳쯿ʦMfymˮn \ MP3?pkܸwunM6 _@@@ K }Vh"HKMnn~S ͜e沨-Z[+VN9pJu:#?G A[.]\KyL1(Z˜U_nnfO4}]xY]9@HRϟM{R>3IGȰ@QV<>T$[꼯sw//liZZ?Xژ{W̑ȃ [jԨ{4+K=ϳ%KXzfbEϲ+^~e۷jҤ;"qٵy@H|7b:df' @ +ޟ+407_yrˍ 9S2 N夽˗Tmٲen9ԲA}-;ԬYS9釖*Oc}iv-Tbx  O+}B]:*9"q!@+.HB:0K1o j|mܸ1ڵk]^+-Sѫ .\0\Go4{JE;fUR\_iii*aÆ.ϖjwuھ}{.o@@ ƚ[o^et q'@+ WPږribf`ifB͙3N?tӮJҧOOSN1P .;sM۶m@b:DJ cƌ^{͔dT>;&@@\ ?Y:p|2  _AGڵkv R@ͦ ,ԩz\W| b)Ỏ+_lFrfs=ה]|:t3ΰ :vYY9@H龎f_Ͳvg  P!?W7^mf׻`NQݷ0ӸqckԨM4V\infS}'*W<_PE?*O[z[Rϴ<1m4k߾s2M! @Nz^ns,uB eMNۡ y`¼q q PJl{QR%W^JlJ31/s  $nyg'JFO@`oR=N  @ 7ii@ !ȋ 4(R%\Zi9qu.]wΌ>yf]B>oG  P GիkK1t["$$} +>VZe 2eʸ/J;mٲuPI؟}Y;sK/4GƢ1j"^ 7Khȹ  @,!C-bҌ%>6g;묳lƍ}KC@ D }3fR- =A@ /5%R[yK/WP;v-Xcblٴi}W6|KKK=mw8/ܹswy`ҥ駟fŖ[] u\;*wex_Qmƌa}̰hf_m_|]6:D@{/lSZ̅ @q *.yxkm-P`}?3yoT{qK>`kڴժU|pi۶]{־}{kРi9bڵ_7o;Cm˟~i4i4[*(+W޴$QwA%h]v9c iEVU{fxE@>웹@ BYz 3L>Bgfz:V7Y-t=V?]x hSO=ՔF.c9&ҥKۑGi>ZQYPD^EEZQ{oS~,휨[qEo}I'E\aV~}S;ɥ'x @[%o/] Pb`G@cܿ$r@}-Բu]lsyʕ=%YWC}N*/[^¥FT5+Klҫk&VliذUy\AR?S1Y*Vys^~e۷jҤKZZ@oϕob!??&@o[@1 h^Y% :(75_3TJ3+˗/ߩʲeR=ԞX=.T7^T+hGBYvm7P fbu]vgϑuy 'o;B{o@ HHO@B ^)sUW͛nrcSHK*(~dLEէM꘮Q"w岊,ZTڡ0Yf.ꫯF][oE}VyRwI֭ŵ}{$)@@@\] y`V^\|Ǧ* 8-Y}_]p'+x;|R@KIٿ_m5K~S`HIصP Un󟒨k ੧jӧOwJAz]%n̮k&*fJخd jSAN8e9/@@hO?3<4 E  @h>!P( VKE8{SN-rJiɝf;)4++6Tv_,Ғ>ͶRw>ٳz뭦k֬v{Zڨ]C q)OW\\:t8 wL2n\2- B(!?MIoJ$ Nnf/_>ᆥ45SRJ.;-lԨM4V\钵WV-Z L/E}ӮutFhիW{*pעe۷w:wf@JYBtGeS 3Κ;!+dR('J*VS W>dyO.픘兜D@bci؞7FX$p@@'ZJ0 @| 0+KpK|P:ury2M.̪t\?35jT)رc+ZjF~h_|mݻwwH@(Vfηk?9  Hcn իgzݒ.R{G]̮?iV>"oݺ))NԮJXOA@p˖!y{|?&zX, =zޑc=>#콠׬uօw zy[n}kC@MW,tz7 fߞ@VVDT@`j 0שS'@7ntয়~jV ꍂGf۶mVbEkڴ+W]R:ns̱ܱo7ov~gs…6}''R@'=7l珑@ QV!tr ԬYnf{ |`vs9v۶mk}s=תV*G^eU`s=]N,&N讟={l„ ԩSݲ-ZouTQjYbʕM}waZ~h&Xp#8:5  P0.y{,t  P W@y f=zzȶo^}'m?_&ڵ=#SN.] '`.O)4vWtM+rs]>C2Eoa qٔ)SLA,  PPiz뭷Z޽]^]lذL8p٧Ξ-i>l쫞 ? $qOgȲxkɦVƧ9s d\#GQ)X_ߌ3㎋f7uN>d3f*otmhkY:e ӱ#<?x}u߿mڴƏo{キβˮ*So PTtQV?Z}wγ#G6P]?N9芺P}*@AXcyw/܆^( ]cAgAY:ur9X=.RȢ5kA ~T"—)S&XN?̝;B!N3T{yEnajFڵk;k֬qx?h /ӵ@$ x~nLW-tiSP@WXdW B!Q'_yu-闠,{GTA`J#o4(˪``l-=ُ/Y抭SzuwH3( @Nl׬~!,y`6L(hިQ#h}V? H XehB%vrEI֕]IֵDO9+Y솨FZbE e˖Twǚ5k9 @x7VZn |.X *`Py_r*Oߵ^І UGsMehRNA ?=¬ic ~ ĵ~/ew;ӮzJ[S>}s=&?eq]wYEAI%wyO9=Ђ?-N8wQ lf`}dy.}͚5Ɵ~ɾ+]9/m#ްgbIaw@"@s^ˁYP\ߘ (;M>PXU@kSQВvڹY΃?˧:*9 -, ,&@ B_"C ovߴSU^.yk_?5h j{?jԨNz@,kVN(Qɓs9zxjժxA8Q@pmUfϞmZj81$T^smΜ9nYomiVE<̨ZK/crL|+ XXж%P!#$@ަg$(/X@38袋L)beE ^^\@IhGHJKKc]ovS[nq3tbJ йvTGWԯ_f͚K}5ݣ_~.QNjWQ- 6m_7~u5o<[n)fE-Zd7oŋ+WtV GmK,3giC k}  %I{/)If  .@+ O br>~.ɱk/ڒ^9m~ƌn'%(^vS4HK.z'\Pkvs9rKt3@ea7KS1hƦ)EevuєKO Xuꩧ]L?z!ח~ڂZ>`=vwe&M*\]S|C #y{= Iܳ4Pr%}/vJ]v](M:u v,'7jEh.p.9p}ܹ֦MkٲL׮]mw;]M,.Zfܼys r)\ivԂ L3l~ʹjPԩUX1/2Ts_˗/wK5{@IV0ok:) $3Iτ/YQ4A+zʞy:I[oe˖-K0 S@KҌ$QP=*T0 fw-[|ꫭnݺQT+ ^7RJ8xU^e%Ĺ =m,, 1"I&@+()<z\t9[4J;jժvAn,_$˥lڴ) J.Py`Ӄjn{*3fu?]bo̙f[hfgiu]gEִiSYye4+C 'Pj6@+Ѳfo=s7߸fQ@bwS\?4Nre߿i7?s=ꍂ lkٴ֭s(VrRK?sL˙5 SEjaFg!@ x~Mod ]p'S@H4Xo n]n,"/z%+s1}6m4YIv7|sNrgT96ȀЬYUVUPO `)f-\- VٳgZ~j)Зv3˪hE6l~رp n粬O@|3ڑOfV"wvڹVϏCF\3*#<6І \C9슬{}v#' ` a.<E._y|NZ.юX:J%;A =쳦x%}skR`֘pE#^砮[E-,Yb)/x +_okfSeV4K,+NuY.@/Q@xUp x|U?f͚069+?3:uތ@($XK'+`^bopQ]vΝkZuWZ֒%K܎`gn+7u;︼Yʝ>?>C@|xCp+\3|7 $D{b\3rSDP3.7)ZzҬ&* WF%զM`K˝i jyfzEǎ|tZZ!SO=rh/RbwR"nݺdAg"gV){B nɤUY`{ 7eK&KN9ҧLaϘU(osG-! P*xSZD ,pGesO<]DJxe{E>rSSS5G}K~|wG}کPNO~Lou7(ɺ***U2 Ȣk:+5+sf\LBSn-QK9唪 #kN9ڵ v)Ov8Sh6@ X`;LK7o~w|nJߐ@("V6\}|wC`F;3kzx㍮0YٌRyۻM}gm'Ӱ  @ i/Nns[kPl[OVn,f}gL D%PM$\~*jժQM\~n L[JޞQя gծNҥ3]i9dp^34K3cnvnl߾Q(  7w5"Q ?B{YCgG@Cr`CK+dڹ/ev:L"wis=Z?o/ѣG1>F,xzdʁ"kʕF)x+ p@T17SN}=j  Gar"S)iBPA'|:]3 piU˖-])RL$@XnU[(نxȹeY,u:M8 @ 'Bȁ]{뮻9+ _{W1d#GO>ĪU@ i4jv,L!1r-/ϴ}~#P@H4v!L'FG@9,=dC+ˊD@ TQ,_aQ`/R'le3>Q@\Xq *޼{]|E P}G?gR_G*ZO  &@+E@ +iZ~Н}gyV<zRg_{K_Rߜd!?@D O# PLڕ4tici<4J;1+}(&OhN5B.@]V?A @1 ,-?PL=ZFH `8T43PCuᇅ?@D O# PFYY=?ڼ%K'{mnٺu;KYKy>EHnv!L|zʲNBYX`AB#)}o/Y}ZSPC ,Y:@q쫔~[fD@P` +ƃ@*U\7zݡ Prh&@ BM[ybڵ ܷ2ލ+B{iqTjI11@V >@2 b?0ēj]89(.ҚYxi0펻̻gYn~Bw %H`{md)n3T@ KSRRA%t (:P-tz7yּ>7%PCW"wXE;Ņ@mJ*.@(h(! PRnlVK}K@Y7&13X@#@6Qa'l $z\t@p6m['CXB   @JG@@(^AZK'=π*Rj;XBX;  P @()1۸T< UXBX<@(4XFK Zn  X%93J@T ? >"f`(E?P X%Y3R@L th[ y5o"X~-/=h@@ MV0 %[Z)R-!tl"/7C@(<XgK h[v y¼5kB3THÁ  @J@W7 kZy躨X~!{ёs={ǚ7onsX⍌#$d} @8}GZvm( "q&-ФImlܹy[dt,GI@IV2=MƂ @ YX6'wwA ʖ-ktPT3:U b c1;P[^B(b`TmmI{ $dx@8H+*Z= ˢl47֭[_.]بQl֬Y35yd?.ZjYVvpo;vwy{ڴiܱ.;#vY]{mַo_=z;{}뭷w֢E w>(<@HX'  ,r|{m,}#mP#,,!,|hn{챇)X3ϸ 7|c-[ /{1Xzlʕֿ֩S'{v7nlW^yvmٲ;mҥ._~i 0 f|3~x7uE]wep;oSN7|~a9s5mt%K@D (O~" n_+{rXσ-g aCs]vڷoo)))nJکV\rըQ~:clѢEa4]Yʕ+>B!ZBS`L2v嗛>ݫQ͙3}ի{շn*Ufg;*VheEA@ `% $@Jne6oBۉVx@t jآc+V/ J0KK-%_oݺ]}Vn]w^*t\u饗fu $@+}E@ RzpGlbZ^ {}צ\ZOlTK/E  @+}D@ ,?4o36f`} U@ڶm[> Ю[.|L"~6{l]&N;æM\*~2iҤΆcW\q;]+K;*QRޠ:1Wo b~@&:ف-7CJ3TȃÁ&=cv lȐ!uP)SR?EP3.bd]O;4رUͳŋۇ~hZʵ{}ݮ]%eW"w%~A}~\Ν;:wys96h K.ֵkWS͘1#G_`A @ȥ@k K I,Kyl\uE)S-Į$s31D@ʕ+Y\UTL0zl2V.]zk5kK4hvKܩ@HX " &?Kklߖhʔ)!zVZXh4  P,!,zs @P~;y#FK ޔ@@b U@,:T-͋H~] &R*$I#  @q *'@(n}̖,5o*[֬RE*! '@ xOXfXZ(ɫ۾jZvU9  D@HfPJv_5`ZpEIk  " ba  xii֤Yg)YXi4YVjV" $3m@E c֢{al ˟e$q/8OZB@QV1sk@!:> ,YRU5۲ռk 9A@(>Xgϝ@@7 f?.2oK^mG;*OA@SVqso@ ?׬~ ]PD; $~xt@d*e)}n2~y/3OI E#S.q@LV=0 @2 zoV;| g`-Of2ƖDwe'u5o$CA@``#  @J[o4{O_X+W#PަM=?B'o @ Xϋ"  {ծc}U%y'ʢ^g ]~iݔ;! @aU@J@L~kބyr(kNժf+gD.,26ۻNXdF $DzZ@"B{Y=YXZF.%OM+fYe=- Bĥi@ț@lYK7s͛49oW)$y"H:ܬt) ]/  @2d  @f5tf`噎 FoBNPEsS @ JF@( ]vn05'!+_楧zB6m[wy@[Vb??z @R @ +/EK-pʼ\5@0?y{F:B7@@ `%ӣ $@\9Kz/gZ[Ss?ZR!Ǖ@~WqT  @ }  )m`iJ}Q6m2[ӎ~ʛyό4BK ?0o_+5%'ȡ@gfS_lJrx@(J5 $@|yKrt8>޶m.8iǜ`ŌSK'۹G(Bo2fН[!$Ks@J@̪TwRRhi;rrNo"u;E) a=R獂  0["*  @<B}n2W] ׯ+Y^ZϙYFYB!,uw{  ϬVM+n @ 0+=FE?o?n'x-X >L8: [fMu;1sL;cǎU| /^5?! лA;CPScQoVEGUC ls=svE]g_V͞=c͛7yuX}Ѭ. #@?  9P^ڦLb۷o]w*Wl~ !e}oϟ&.Igy˿ uդJ5j԰ڵkx9ٗUe˖u3SU[ļwYk핇2! @3"0x ;$K꺭[hWpm̛H9U0~["T(VPϋM?󀏢zoޤ4iҋ"Pz"U.ETRD& w\ !۷o%77ܺu+\KGy.o:8kiB@!āOf/HNBfO#o޼رc˗1zhuAݶyfݷtؿ?ʖ-IM*f{մiS9[޽{]:E׮]ԩSc,[L(={ H OpUfvx?rA ɾXwoʔ)>|x0w(S ֭c޼yaɎB@" ,! @<5kի5eXS&l39.^ۇ+WbȐ!Əq2f̨0(α *OvᢧV aeO|a.OQeK\oqU@ݺu+fϞiҤёT@]ҥ?/\?n>*TpWAhѬ\,H"HI)ϖ-n_B@! @?.J$qKPNHuII<="Bܹ~L$ˁ 'N'ޑ_EcpXSvq3⯄itIQj#GO Omۦ Hp8IsŤI0j(]=+>cհ ܹ!1{Xdkm,b}+6lXw`_~~_=z8tPZbEH * B@"`зaMjX! ./hG;b%GFCU t7ǯ`3iY)Ŋ h6C-\8ʉ#wjO*W0@ml%G}m㟿(`'W>,/r NB@!āwU$B (@oe F/V/Kv[M㗳0ZGauݕ8er6ɣB@"Zׯ$B@! Aɰ6ge^N@QZ &byh+N+yN;kNtkvl ! WwV%B !^Lx {P}ñwbEceZ?pBl޼Yuq&Mh X "BK KoLK! Bw ( R@]B@/!{ʻ^ B@! Hhjշ 0oS !  Q!B@!M5v otkd.B@!$GoL[! B; g֮Ѽ q^yMY ! 2B p ~ ʅ/[lg,B@!āwH';taQ! {j,ȕk)B@!kākwL+PFڴ0Z 5q },[o'^ \{ ! wa2]! @ P~Ca S@!kB *>DžS0{ g(SB@!$7Z! 0ʗݥs,Z$㽖[$B@& ,y2u! @ 0%lP얭`Xd(PB;V0c ԫW-[P$bD0p0Ɂ%&B@.q`.OM! ⁀p|Ȟ vFP[UN0 ;w`QO)lGO}-؟O$u yrؓfQb`A ! ''r! FpYbUKB2jXn7oUbΜ9(X $I*Uիک}y4m~ ,@͚5Ѿ}{ضٷo:vׯ_̙37xk E@Բ0Xay*&B@>q`>SQ! ♀QE8V,\ ubG<@. .b۶mAa7nD6m)o޼شiKaαEӬY3̞=Ǐ;؞$͵5k8}4ҥK? hZ=0Z4ÕʒB@x B! 0^- yԇU&¸'h#@ eʔȚ5+XqzJ}͛d t韘̷E^>5iҤ`#Gॗ^BܹQHڵ [s/g2y@-^ A=6B@!7$+nʨB@!LeN'U @,N)d;~ \7FWn#F`ԩ:%bNKL:k\0;ǑG ~r]! qIR8uj4a(#w\`\|gΜC0x`]+/^i"W\0aG9"լYӎ+nXn]\~Cuɓ'_~KdQwL(rzCu;-`֮0dB@! ⑀A8x\J! @P!!k50'~Mx\,ԩS-{n\x2dc 7oB޽:Q(r\YhR$<#ȗW! qL@R / K kPU1h 1 j^q-߆=q :cchC! q`EtB@ `N z 쳰Ն,,WbsUĉ㥗^ s<&;7n|Omݺ5كw`Av1!鳀 P(u;` CLȵB +|! @02fc*JU:>"3V6 Ĝ9mۆ:S[|8-#J4{l%Jpºu0l0̘1ôi\8©W^xw6mZt]StXl6mgyWG}NC Av;Ue mhѢ?[l]9gϞX`j֬öm}Slقcǎwڵ['N@-^ { h& υQ2 *$ &B@#FTL! @@6lT|_^ˀ<*I$QRk޼ySNVZmrܹs+UgϞUxr{ѣGi&tE2EZj)r$)rnsΩ}{~o:~_T޼yU \ɓGsMK4A%KjժeC$ TTȄ*ۄ\W! @xtʥBjqXUkAݼ]7|a=S8r~ׅ xbOΝ#]| )z(bR7|mݺUGnqdT4it4$xQlXL;)ia87]ڴi9sj*r^eNҤI)f]ӘdCE/y & F*a^),B@!  ˝l ! @09e{/캔CGJҥvRq:O?Dُ7\DB֧r:'iN3f ϏM6"y;wDQ`AϚ5k@T8 HB?ɱ%BFH\y_`o F~6YB@_#})AB@[C}v۰Uz]vu 488O,OϜ9Yf@ZT ,jԨ?ZȑXŒɑY-.vNS{OsdUH9.^iↀڲ8{F7.FSdT! Bā@6qDO>`=ch֬N߫_>6l؀~Mkaq! l\RJwYˊyjs|xeڥK-|,K;ܸ!;ݜq$ ߿EC/ǟ T|OAY@ǁȤ 8B@& z! P6^UjU,p;$D8FPCG>+VΎ'Bu C"((TO1Nj;w.rʅի빳-[o#G)SquӋ/_ԩSOt>L :Aجũ={3>c@?8|ahE\̹@!gǒ= ǚ0JWMV` W_PVYD! ,a2]! h爒W]{饗U9CEحއ_1;ܝ8NOعχm,&pN7egY<_}?أ?"画 -6LQ4:q@]=3ΣAin7`V1K ؜>>Ԟ}Ia4i3i !   ,N| pڒqU&Mp 05 ?hr, X ȝ vf7nIv.3tRa3^%Ͽ"qVt DIn!V鸫Y6vnԩ\qC (&B@$q`%{KCǎcȜ93x Vի}]MV)C_5j֬G|f6m ]t(RbNÔ)S0|psA}hزe ;wyGvZ 8Æ C޽ÇGN[jѢgl߇9.;AxY8֭6]jXx:mQ4^ E؟8J/t]״ vmoivɩ;Xz-=!dW 8 e fVB@! X~s+e!B;rZt#QD>2@IDATv *US,N]vc-$cǎEihݺ5:t_~E;^{5L:U/x…xጾbylJҩQ+W,YR,`sܻw/ioN,7;ܸRX`PT *UR͂F܁ "VijS5}g Ib-n~FWCZ?"<(V¬Zk+h}GcaΝ*ՉطgDb ـYgLs/L](B H_^YsQR'Ŏ'bܹ3\|YW"={hJ& ,XN)(B i@ƑPΈ-*WN3iTNc#G8w#k8mꫯ\Qa>qD8+gΜZSB 8zar7c ԫWOW{Y±R5Ru`9i\];!¨YjX%"@&AEH2#C+zq=xs3̣A}\ C:, ogoO=5˿(@(W>pdB@!˕E !!,^;v܉ӣ`5k4CsG?7nz9O<ҥ /^S9S}\N)̑#RL)UVux#{4VvXrt]lQ ?H̝RܬU\ae 7f2p̟ǥ30  ]gXgnM|XFU.۲ jn׮Ynm?$J#"vo$XDՑcTE 9i8WB@@" @۲V!؉'N ٴO-Nd]=zp‘uP屓'OWOvƦ%\<_[)R(c9GH f8#$]uYs`T !2/_ܠMP7oFF:1{-rqp7""Ni]Aw+۷h‹ !  O@X dB pjCp0Ȭ󏐎յkWnؠA0FiZ˹kpaխlz7:aH0gO:ua~i1vbUyp\< s`պԁТ }X@ϧ({6W^LZ} ,4iRԯ__seAڴi2f̈;w$=;!xnϽŋc^\"9X3ϝ;ٟrv8}tpcz~׮]t>o՝WXn7oS?̙R$I*UI14A(٤IS`Օ\۷/y…X++6PFfJGKzmbqK

UTٳgWT@UG} 5H1We# ؗ/+k`1 F"TJҶֵ<zǠv3YfwVS-*UJ:ablΔQU56͑`~>lLzw,[ ǹ0~ eP,vLx4l( /jVPH}#mu9ѷ7Uȉ5 $#}񡾢 2x@wy#N)C@XseB@DA8Gr8 X,?9)S&yNv. !CP(.\ bڴiX@h`Ԯ5`q=ux"`PIG#\F:_!=/ qC̈Xrc*:~.?.Lge+?I #r n LT! @ :Q 1.kM!٣:M{,W2)J@: Eb" +!3n)I+gN&=*N ҥvZQ@P5KԫWO;Ǝ;̚5K_bŊ`vN0a,YSbÆ x'u$I>siwvpylTR;Μ9hN3%@7G8~˿:꓏FPLq z-h!|Img]\9`-ル) ! RﱬPG ~QPSoox=7b K^wY͊a,~Qʒ%KL>R8r b*YT-az1SxH0rfc ^?LEՓIag*<7@4p_ #o}է ȫ9cGΧ#B@!ua.! @l0ҥA c&NUP$̏?Yظi1JJk֬qp߈OBH4z7Z'n܌W|䐣۴.=G7`x6ѳ`PP;NR:iI`4k U! CQ쇋% ! :5Cj=Ru+{J^$9e/@ڵ~h8MSʖ-vaҥ(TMnȨ4~'`gu!-J¸c U6#,R5OC˛rJsz & X9^ul6{, x FD̷o^! 8],l64S(ŰV=e_:AqlHg-[)@Uzꅾ}j)pUɣN k $aժUZR%:s~&;9P%KSsX7nܴq/Ʊ{LNTBpr^d"Wpy&Ev3:j.#ǽN|i"#Ͷp2%! B8iȶB 8|?s du9'QcH:׭[͛jժ3g ,$$]J\9 +ϟ:tkM9~@ѧO)Su:T]i5p@ 6 {<|txʕө܁2_bŰ`ӧG̙]":v~gsᔿѣGk]g۶GѾ}{S~l&EGBmaxI7U^~e!EP/!:f5]zC~8n0:%q]e73gmJ dzxoL\f)B зbB@!%ߨU0i ewW"'"gJ4""r5`E9ΝcxE)ER"M4t; s>cE$ִiS5i$y"k׹sg>ם5k}YjzrʊեKT}.bo`I! 3KYqF"N-4( )On}?1;^R^;%UE˚5+rʅƍ#UT:tXdI}wE.]@$pԒ:tHWcs"E￑-[6pW_}7xuĉu7l۶M_Qa<7k,\>:\yEt@+D2Oq\]gcs+ f̘svGX,dQ0ڵg==ǝ"{&L  `~**I(JK˝;.@Ϋ0UMx;9&/\๴ Jzm$}З]! 8^JH }6x%!#`eK7űyN㴽7@&cڴi#hͮ6veɒEoٲ9rN-gvl9.6vJ[Rp?$݉S?\}?7믃teB4]s? Y8b鯿roriy]v>j(-ƕ I֭[7ڵ 5 {ŜVvmͲlٲx14:8DZr)?&o"%ac] ۏX,ȯ!C] CWȁ-&-ZКv=z<9ދt(~댊R#,E@[ܾ'Z y)0-sJ٭[QGqdKB@<q`=,* Flp|9S(P[z6N+Ƨ#zիӧO<b 1vs*i U.;Q^xBJxꩧMղeK0bu*Zx187ߩS'BaÆi$ѣG[T)jhȑY d޽8qNyܳgǍvrPEs9.y:͑@8ݍXl>@d A{ڕIoUkEvjs`Q9n|\f2gw`Aw$_dUOEVS_i!Cy0Ɂ%& s۷/ JAF&ޮ];0NuÇ+$:8u5kkF>*kc__d=<9&qVPB@Uʖ]16W[cQԓI'o>Eb+R E:Dgi:isDN(]po/RmڴI;vL?H* !߂{)VWAPFeϞ]!9v^gȬ(OωF{v=7kt_>J;xQ_+N}0׵<a#ôGw>O?ۆ tEad#f0'UH1D;+WTK)m6HQʽ#MC俣N[`/\QUTqӗCn߾]-JO¿TDQ}R K! D`R*0[WN/T{!#b#WE?8 #X#XOj͝j&GdsZ4ŕ-Gc+W,p:S^.Rywz^1O{6aG>9!R}Nm&ZK$N wv! ?oͭB/F|^Y)fyz~QDQ-֘qʎ$k&!!^EN Uti"G7?Hn@utUB#h\TBF=BteTp] 6)uWoX3gam]vM1=ݣ\H:;+&rh?zǑEC<i<)cj\%ĉW=zzP=t钢ThUF = J?{⪛l|+?^ab3TMH{[C?Xfˣ˼aZq2 & an$RpqĈ޾CNxHv3UHzX[MTptY_m]maӇ`3 Av.'Cl(G)R$̈TLBP 3fԎ&gG4R4HɹIAwR]tqIJsmɒ%3k,aPԩqoŎ;v9]SfΜrovQU2y?@~#wEQͲ-D@RSB_ժ hQ02fխԯ&22±wgh#u T9{t^qqmۢW^`cvpE6r6{|daſ AR1W5r ԎxZ~BfH\6w GDn 0jjf6ѩI:{cu>g81qĺ{?G(]=UsEAt@o:"8,z\ƍSx,)׺uk]5ɩt 3e^7W-t(\u1lrp*=_ ! @MM!gظ][a}jXXsj 0`ͦ8qGa/aS3gl^qqa1cǎȑ#%(@WWGA驧r]:\nw}#w[Z5!A!UqG3^BYGTJ|ЬNNg~:W~{{(xNm(m0Ť$XY P^Y"?/ؙŕbNx(ZhB@$ q`% wB ^ o8FPʖVVP'9x(̞q'@-^ +wAXMx:'XȸbŊw+Y%)=۷o;}4XAH,^3Z/&| na5hLH`8AgR`|ɹZʽY@HgcԫǺUt\(<ȁQqI1vݻW SL6>cS-G5QM8p P s w(R;ޮ]aZo߾Kx\N!  (a! ?(¯8vFf%QXo5H-Hr8Qupz5wh5ŻM ?9q G]%K4iDW#dPw͛ՍXA3ׯ( +R?Z,ZN#@Ltz/T/!@]h߾?!d[!EāE7C"kFp̜Ga7aX V7G:=Kc&hwV^9rV^ N霮qUÇA@ɺ+iV$6 B9ؑfyTsG! )q`Ŕ'#,fvd( Uj!+.߮r٭K{Q`}V VT)xQptG5D8j#X#܁ePHh\9IOrE#hd:\R>5swt;W@aquv~ ! ;)B@a(ԑEi{7u߄,+pC>%vkB+)GJY5{70zhl޼ov8č5CxuKaܩЁ']kP۶9'>vYtP|\ηn> ̾;98y人eJ"fVO]H>Cz! @|V|Җk ! |A68{16v vr=q`3VRܽ ȟv^sQw~<ȏbiBx&nQETtjsnpLFmi)B&fAPt;Ok]`mv{'&"V9i`V)r\! 8\R!k o7#a`ΟMXr> JǷoԁ!cF/.WVz^y,Yh"]lƌ(W.sx0YFx95kV>|\1V,P_G`)7!w`жCuYpw;@(5HGQH*$o(C͹jZ۵'B3uesa.f˷=wV! HpJ[ B@*Moև>2[kX@u?uӪџ!, _ǒvAUNua-m(]'(//Ζ-SWZt)͋&M`ǎ.l*Uƍ#?=C?z'kBᜋTQ{Gf݋ pQމ"!b ߄:NwU_-/>[f*0ڵ9%?a/㓏arI1!]WX:!rH! 8\]!tgu+>#~=ºԿ`vrzpQX|1|'h@QX'  iiWFJJGYCEKyw^۽!߯<~=[ app.ƐǏzUSFE&}NZԖ-[UDTΝSR&Mk+IDj&oS{Lխ[WݹsGM>][Z5uisz嗕iСCqi޻OY V*cT*dr ?wXkz~c͞Ŵt*'70us|ԡ|!_/RJ],L!g[:1! N>zL4nIqE+7G6=sNe_vV*8uz=5}G.,v%&W_}(bJQ0,KV*uԮOgʔ)dǓmV\,6mm׮]t(誫VVr׮][Ν;A2}n/_cqWX:g۶mK.6oڰV <}~ң5d zB̧߹+,fMz֌Y򕵓>p061coÕ=xIѩ~ T7|B@#`zP! |'7sf{ QK>d뺂mB{uڼ֋`^ЯFW<g۔+F9nڴ J s9S9qsH,ȑCp9Ylp8Bw壟:?Z):MF@lo(PERjϰJCH>̎icSB851ٛi}qsťP$`OTye+ytӯ?~Mkh%0HJL! |Q5qB@-[AϓUP'V;9d)?Ѹ! Mt=IO+W"dcR׮]ATzܸq1_nEBٳQupRqI|yKwK~&Mqçz xR<6X¹@PPWѼ) HB">cO AbE\Ո$H~?~DzLN=G|ڱvn@0j<ՍՆF׎0g.݃^5tB@$ -B@E!#]:?~V^]G>8Q8~B+CKEV}/ózÇ3fĉ9cҦM oQQ~Μ$Okgaww۱c邇 {X#TLpʔ){/W7z|j3*X8u:rgg䰺7IEm5p3V @ 3ՅђM&}{길c:U!@SǢyQ>L /X(wF#1jxԘ)! @V±+ ! pRF"҄SݪB+|Ϝcߕ6e-*d BYSY!d_&*%Qi93}ϝcιn˽|>sfw}f9~yO Q~溱"ķj9kY.fttӑ@& H6ݥo߾\- 8gpOVysM/ xNрр@0V 2Ύ,Xӓ%OߊoH{]gYq?@u@=!D>ms#ҥKTӦM8"F8E+. m<hj ・+y=H#850ub%Z+WDe}'Q35@iQC,>D<9/(D(-[s?fFFF uk3R $nzrA?PݴUJY! *|82%J˶c}M7i{j~+/{F>."Ҳjo7#G^z)BГ5]o58G{Ī_@?TOJ#XU>)9/իlT8^0g[h*QB|ӟq=Fj*(a#w mvv/{C,gvѮ=ԺN6 1000|0V;fDFFFB*Tp32#u=}U.$b $cR\)`4`4`4`44`wL2//\veՇd#G a͚5i&ٰaN2Eԩ>,ZH^}Uyk+V˵^+=\.=+WYfi.4??h 6w\=fW_}nցU#<"xbT}I5pr=h.2] e[+$[bJ1,ϟ/ 6^z>=B0M+S˗,6%"Vbfn^Բ gU*jPՉ"ǾM>7hf ,j|t$F-"ݺum?~d͚U|fp | /+R.\(%K9^UAy?d$ː2χc%oi[T.%{MH#WwHߴwLррр@Ԁ2i5200H8t萴kNO\; 7 5k֔[oU֭['WgD .]A+VX C!Z`8p&[I&i #V?ˈ#t{xذa 逸~Iƍ塇7 q;7o.o0Y~CM_ʁ>Km[E+!?l}Kr`̐`쮿ziӦl޼Y6 v552mRx\l+L%,B ՂER%4{7uRLAxzYF>" lZeHӮ>0` `U\Y{(4;jժ{iB)tI5ku&%΅=< ظ)([$ n40^xIl.7ׁ׍ x&eޠ 9v1H ׯ_?ؙ4)Kk$%, qϦ* 8|d֡q    @<c&hhhhjM"xE!ki+&,;Z?-X@'Z(,ۢE Vu֕iӦIťRJ2f}?Nz]++N+V'Ќwa?ℛfDDuaml2mݵuVWC%<.w B[,Gm*3gNOw$t8DحhLyHHr@M@XM4zivL@v.%K6C@ !lDŹ_C խYQd?xlXLK awWZ[ժUK&O#W>s駟jWK.D>믥s.=Dt!Y Ͼ [~ѡzdCVrKwT|fU|3Arٵ [̀Wd    u3Ōw+]tE5\hhw}n[$mZ:PxFȢKĉ50Ep,1ɓ'g jp\-Z_Qx;ZØ7F c$g|&>%&.\R3 SQJ(A?Z0*?p)t: fX/\qBkl"kkcCh%΂,Q9!BI`h/ʍp ]|tza\F]pIڷWf(ya ̺VE?$}RlYݻ]d_x!t "?-:lZӕ^zB@ϙ./_{4oҙԕTYMÇSj0mD$\|y &Tvmi۶;AX^瘫W.Y-9x)PPZU([*ձ]%n=j,o s8g/R7Qw )6wZk>(|FК}y#0OP.i!##EZ|_ ۙTMoȯDDdX#FFi7|S8۷;6iQ|\w~4dp@95/7FR]u8qBd=q&5G!/6<,=,4t: ).7}p\ >,CT ӿ+k-tRrnm۪TCr{_9T*`YuK$ Hl!FjnKaBࢩpգFh@r@[8tgY:+\gtZ Iw}1cFkT;S WO<??,lT7ݬٳ+{z] 5N1| UC grly r(gӭ"0Q8@®`(znx \X w^ VzqݩrUq=RsŸ$*weK%k?w R 1<\u:7#ҹu2$`/^Vvlug$4`,p'1000H} *Lzp! т^a^<wP+W "b̙3EEJK]Zk e{DٴGQƖBG=;,֣GMf϶vk&yn"^wbķ*rZ.Y)V<;V~rѵ#'$9rvqvSZ;vS k-7MՔ*UJhqȺuV{tU[mwCnҥK _j"Zъd^wW|J׾guET}fDK^tO4#p|\_[<Ҋ veM7 ڢMª 0בʀ뮖~K믑{?}+{wof?gez\Tocr&t:t{5҅k@kݻw벱~D~m-%K _{Ӣ-:ikB:Z=Z9;J!+(5I-hF)sdUDbЅtqH-~. Oೈ֥r>H5K+ot[haT95`ܛ   8 ' '^vs#rC)N\9ac>Gss.;+ǏנwXEzKO"2$KLt>ܝ#x>NQKtኔ[$ Kٌp_7 0WgbT[;H/ *swO fkm/H0ev˒0yoJ-\&Q⹩]N|#$:x`}JE2"pF!X Xy˻5|0웍kN^vo(1npaRr[d  ,55HaF !H[IVeeM<ץ\4sab"CN/yq޽:H'L#p A#fϞ+|gD  y.MwIFK'itfP >7|FEJkkJ^pDŨgZI!C CL?0rFstMvlԘܱy׬L3( @%-h7U{ɛz/Yekw^7N?t:]X)VNo`> C8L71^;Ӫ} |&p̮ !@3AX``)]*a] yϓG͎ }M@pmеHM 띸饗ٔBHWG+F=m R;v,"?i|p|TRE!R(-34մp׮SǴ&Q sn_`X'㽖bϹZ8x}z nO1Hx?&%3&|grl>?CǶϤsV*C{P|.㣌~f{XY5z}T|cyH뇵ⳜI>K(Oz +SŴvGuw(||< ihhhhh M5 7' y8Y+QBYN+zy%h-GرCk|y}1:xiZ3NM*P d}rتXUݦTlr>گO#[€_ fת!˙Gg(b xNcű P;w;WAgc@QG Q/* C~)* H;ޖߐ*.[FTX]qLYd:r\tw$ǝp\GPg5xt#i㆐yi޼dB4i5093"33hdmyůR/)L7r#2wF~u^v8c7k޳QX.?9*>Ōb|r}b^A-~UmlC`~*lZ5R%JDL-bDkBK*wRx~V76PAX[{VAC= #)kjK,P'@p(@] y !uxbm7"k}N%\ 9ϱ7~+Yq:sj{      @D'tE beM0A`^,1ML~(+9bTᅰ \ cǫ:˨Kxjė=oX?N{?Wح_L޳%-T ]246 xKslAs쑏+X7\닔 ?g>9x=LruOF>k]>XBUt%JmC|[F2 ϯррр@:р` @HV4Q ˫99V`>" ZmКa >_]&~>Β+;^Vi쑸ɵ^}e] 'Џ,T}'8^ |fi}Nbl>X(< kI@ n@WƽH뵚@4>mQt ]1BRaN6iD l$VV(KsbЊ6V`ZK/E%-\~&*~r .i<#g~gw\nB|rСb]GZ; ]k )ҲMSw@,@IbGE n TW+BG5p!ׇSO=g}J׭ࣣۨ=igR|/WVζ F"zaXϝ+i`jY\_?ܶ:shXly. :ޠt#Գ#r'n"AkZ$NhR3QTKGY+kt,F }vw%`n> |⋁7\ah<_֝8M\%+aыnujMkPu);[,XP뻃q۴i&ZgkVҙ1M|u(N2Եݼys~Z.D =t3mr,Õ `ee:k4`4`4`4p>j@G4zhX(Ct.LS Ǟ;?C/D+|Z+N{z61euT[.!wv*o#&6m! 5PPN@ܼg{c}GADkr0FҚ;wּ4xtYwjtd+P T4j;$-5ڌ\4lSfF(W\hfhÇ+$ߠ [gǍ0H*kN,Z хU\q…5#8;vpq.PB>V%uܕVSv{6'dJs}?F]W6,=Zb>AM#" >=n@htFbTUO TH"꥗^`{Ϙ)H0V?EFFFFF! 8!v{&I`󥙮pC%I_%7C+ĄTm/$\ C_ebZV*)vWqxPP,yS9{)Z (R*\u5й0'w< ԎeSmVNrBd>K-'Nj`RM!`8p Ԩ8f}}'5n*қ;љӞ<- k ği|ue'|R>j .PG>}tZK!Bţd-!VuDkNt{siE ˮݡ#+{C^X_@3qG7u^:QD[Rt6b4 X݈̄ррррр@P>Fb%#vUjڳWl^w-‘G vPZ1LQ/Y)n["M +& ^z:3/%>Eh% ZTIJRȑOE-~SDjT㏉r:D)p0pgDED) Sj<+.ɟ|.eC?8*,-xM*kZuĪ[G|WE4wE=ݍ$ 5 Ѐz8 A~߼NRmg4#@mq>Z,{Y/HR[#Gu&uP}+L|z>n!jSb5l 2P}'X 1n:)W Kx<KVBWpZ$gΜdg V0ċFFj KGA"qX"VZ\#Vץ8,N1ٶt/2d૨ ݻ7"cǎi@`]0ُ`X_,źfDgm n:"o.iB 4o*q\Hv>"M?u}pr'7Ӣ<6o-0) (PA,ɃANV*V"km(qxxpDiڿMn#  6i:^&vD6` ,D= {Miϊ3`r{KnO߲!ӀWS&νzuXmߜ#bgn V=P=Cc 4Ϫ ^Wv~&I^LI4׀J\?ррррр}kKL}__ΩV|*v ",N/խf>$ bž={ꫯfrLW%KJϞ=I&_I2U/BV[D#rd|p+ @:XY|NJuu5B^0A89t萀#f>orS=/:X_5߿_xFO875Kw ?Wyݻ.١JGpX]')ꝝbBAmD<$v XE"ŋ'b#VJJĪÓ=ݵW i&ӦUkło,C6CHBSٱS"-|qMcPV2e$ Yd;!_DZ.JyQX)XF~s+Y]02 Nррр@k̥!&Ђ3v8LhTrĿVq}QGe˖-"k̙ч" ݢ~g5}]1,"*H&:ZP`N%VFW׉o7 |(wydݺu `y-Bnc&—{zBo2-A &@-@v@+3z'Eo+Ki}_䮀d9R w&@77Uh\q:wLZ3ωsbOf7#ПnJߘ141cT P-[EAXR.3b5I3tE-{ "y ;JZNdQxKl<p-&pT@|%C`@-nJJTEfhрΟsmFj4`4`4`4pK8ۊz{X, H8]-xlK?{X ](%`'5eٚjҥPYuu H?X%Jr ~X 0!僵 92I3 N-_\a-[L`|#,mD||P͑S ! u%K/FX_A8wApA&f9qN@2~5D-K|`S %& q` lӅ\jڶ#z5@֩N K8>#v3 o\#V-Xc68@ZlkӨQ4H?xYyA3ivEi7q2ZVd4, .*VD.{N}M']yҠA7K:DdN z <,eL_"*λX#rphX;Z7\_ֳA$U-*ZƯ.}*QQQ&ɚ@ \ ܂Vbe G8=/ ]Fi r#Ke{GK.m?KMZ/7ׄTB|mE04E|[nE"xujWTINW݅;ڇ3 TZ`*lXT.v[np7Zk}\j f1oB։h,]V0ye_@rۗh9M#Gjb!Q;#kv['/W\B`[HfUX:XTij6pE!%C43-XF; $b7'l>v\̣+"_`KqחE''[4dk- hp WA\."ivDx0b4`4`4`4`4`4B ?]jQ?tew䈲?e*~u7*_R % :|pՙTEmA1]xrTU~gqiP 'EUƌ3belRF%Q /]vn'*P dϫ3RZ<;%˩%E[T۶m@*Iwa:ȔѣGuί@ *`_Y*~j͈*UXDb8ET%y4h!v!tMo+{lTr8uyMTζJ^:G*KlN8G^?V?O*6|QR[=_|Ae/k(+_O }#صG0@iX`рррр@J5;Z%OulXWK92J8 f/!9.>O'Sw]NӲ.Ja/ȁew"aSO?ko;T[D'e] IzuV6"VG% cp:C,־b(N7y*I)) ,Wt#o$˲V uHꍄ$gcnVt-Z&|wxYrcz0VVJ߼^lۍn.E\_s:´|r|:a~RG_ǮWtzbu#GRK5a;LkU F2ՖUqVw\`QDA@!w@>2ҀU -,\+)VKE\ 6Oʆx1llfWԢ "\wDm+*Con~X{M@j WkvTn4`4`4`4`4q4໷HŹ=@z_DMW)~, Ԣ%/rǒ%KH~|=H\EκtekR/L뵋Q "TPeə{P)Hc '2EqBBWJ7)UߺQ#D=5M}¥pAM8*yeĈa% 䣣! ]p@7z-)W> Buua(h(Nqf''S5kTVM!w+F$Mot[L Y?˻X_}Ν;SX<['֭iuosf{/pU(Ey ={/C u_SUT1|%D؊U(uب"WoGGKsCTqvQ<2_=]TRqnqDq3vYPNil  v55    Ж166͡{mE:ÒҧOMTN.޽{vF&{c 9yZQ OԞ}b5'o"H[ toQbNg|mIc8"Nxc$k="jbNe_F_j޽rէ\ˬŋ'֝D1Bɉ‐|(gkD˝#Gh 0_;%/Q6@LCԩSNdRSi!o ڼ~͝㦥՚"!:l ʅ?5ܸf7;ͥVb.5Nڎ;45.{@Sc物ByXe|r=,sŋ寿kH>Jz+^8ɾS !rm Z&xό (r(~) m;D~%ܹ4Go{ ;@IDATiO˩-V5,xE돯㬶4D4\a_]smJW+;o޼pr W T,OAPp^n bn6#ңwhx0t v3Y4"/6Ԝzb*x~rrvV72r~iO|W9G)gek){f!.,ӌ{'ݩ3s|r8bTG^~ ֮).o 0Xbik4`4`4`4&`(pjbŎo҄x|5iTꍄF k]XQ/_-8SO<::/_R: Q ldK5"޳u\/2.~yɑ#H5k֔-ZH9J3 ?('ۛ2']Z7ej}{eUɓ'˨Qlٲڍ Ҩ :t^WkDZ^KoB..ZgRb("޳7RX|RE=2pUpPDn=h1T)=3`qrK7H! 8pZ}]\}]kI Z #`fyE]udY)!_|wgE]$pm(Ekf6Ф-,@E@ fڊ%D?A$uHg<+ceO2mӦM $4z)\BnQz*έM 2 XNrEm٦Ig*&8.yc*UZf`{{.q.)__3\K)'&8sP&8VRo>nts ` E8͹k?m?Źzc731PDW}mh|/Vf&]u~>gɛZ/}rg *|?_Ƿ%y`/#֔<5" #FFFFFF  bb7O?qBhA@_FL%%5IZ һvJ4;_`)ZOGMxg~_E l U' gn~Ea=YX< `ڶ]u=C'#y+o&lu&~YL_*W_5 'Lv옐o.9?mC?Py2F,jF=1Ns%o=f;4[-s®rM(J)@e6o?HIk :^މD59+V}Qp+Քx 4;^%]'L 5j6& QtC)+D&vȗp<+K.UX.5 p~nhpTԏM].1~y1&\?-DŽ^z)Vn&H}r@n:>'oC=emݦy0L!g~u9pN_GCUF kSU*ئ~V9{)'xXt{0c߻AzM^Dk .j=9|y>DQʂ‡)%%? \r#خC'L~4b4`4`4`4`4ı$NxA,裏*p 7íKpGH6Nٸqp=p[ӓ @ TA">qX+W.ս{w q\SxR2?'ѰpSo߮rAXgp2yݾqJrMiX$K.DhC _Vu8?A`*%%4bo/kKOM\Px=NxxEe*MQ/נ =5]P^8fwxNs+#^|M >4,<;^@]xY 'qpᴴpWJӤܦ̘1CEIӗS~{g?@H#8"iB@-[5l.+]6%J(X,ڵk1cƨ+X I _NժUK${sA w le˖Eg U/;g 5Qy QA|@rl~LgiPzXU{Q4냩Ӏ='RE׹5XJZJ?KO.%h ;ǤI g >Ӽ~ΝUzxXg ~Lp`/X7?Cьhhh Ch_/Z/)p0/Es_һ8{Be,'Roh}P' ,ڴi !~$`ĉWHh^\MN7A*A=B+ 8MҠVt N )ƍ.Ӯ];ŔԒ\K@+Q>:5A3Q8Y s^yzu9kߕrѵ7@oo "#r=PyOnF2}5(Q[<6t#:O1JWL_ߘQb/Z"Y dkM˓'di.nn,OOQō6jT"J(: dt,tT:8}WX0k KO>98v8Er8$cy)ZMFzif _Ph%g H9buGh%ׯ֖y܊fcq<(\^=7e f:cƋ(k-XHO8 +X "kȵ`:7$i9t&@hkGX-ŝXn~;hsR"n@~@`e޽;Hh.{\y XOl:2џ*` Б̢`$Ci:#QfϞn cfߠx'D<+կӤԁ[Z".1"őP+pT${:p>ZC( fI\ __3$="V/TXk48>V*خ )u b&5F9[O'J6 CKU>ٻ/:[~*5rDdJ WmCmn^ΛEojLGRaUQF+ͧʾD緟?WϹJ`IM6詣c*GcNO?UK`* ʽxRpLňb6eĵ-.^&Z965zzBz[YtX w +!?LwaApV/ \RPeX b 7HձcGE)-% `ISwUu_UpҊn%@@^ u\$YkGͰ09 aYF 8كާ +U18P_RUlYu饗*+/Vw_rNǛ8U{/Z'{%kȒ*8hemX1zTź~{VoU6oKlDz&9]ӭ#眓JRrF$I JP D1!P$EL QQ1?~L,{{C-z:myji戝C`cw}w/Ayͷ Ahe!V 0Cٙtq׮Ku-(ֺ`Gݮ˗ĉU"Pb#|#SbfC+jP DMwD qXoHR%EjfO*Idd U_<!:B=4gmԥ$K3%>_Cśt$/e_Y. K"ܡE5{7}*q7U%C&)d%4\|e'.Isn/p[牙sۡsk;Mvh,ygJM Qvmk̬͆& tܒڙRV 9Zwɸ'k`&JRYdaS?<{SU &Nhj,Uca31CD V;!"C+VT="2:|cpl{ Ё!fC? %Q U&/BLe ƌ"@0JLC]b%yBSN&vAz*2C[o{uN)EdIϑ9r}A62e"F뮍dUЃ+%W A<ԡ2$ 0wGC=%K.վ[N $hElb'I@W\!k֬3#$wlrBi]k׮Ў=:ZW(tOa[ SO>B) hwHYg%gI9AԱ b fo6U5I;I2@'gM:WUƢ8S>DOH2x-$wH74+߇ƹ懺K0+̝KļRmc'Wxݟ2Y|YNgIc*R eˍ3|UE0cYQ]Pښ熿\;aL6qfl}a5c`Ș;Ph00TKH9ӀfבPоJ*iP(ݹb+ՒNL:hsBD*Vf$nXhѢɏL 2!Q>ܸe/EiDSHar)Q>8̳̊szO$Ιg)t|fW5] 16{rr+z(9ԖO} )õ ̡oyǎof{Iv fW`3j1 2x_:N{$`}<scb&|ZWMPxwF@{(@I7w-1{̕Y|:'e„dT UHym*Ŏc}sFgf)} et3SzIfd&@18́ xdSinVGyWU ugNǬ_b֬m0 "2i$KF!`x+Hd|hWM/5'Ԑde sM*cݣ@G/D}$ILXd0@J~ KC'+,sCK`\v2 .TCh!:[[l͛1TX\{>: /qwְL>s);SdqgoǏPR'n-2z =,m-y5/#"oHݥKI meK#q;!I͐PqG鍛(%IsZJ+c.ЈabЮ|}Mψwݍ|uͼ"K$ >8YplLdzPgj`W^U*Ay Zw!% !M>]eWHBϓŷߟz@?OsyxKB*=T}իW}'5s4Q(Y %M%+7`HIQØaI!޼g6GQ7:T$-[fZw/IZ\?Xw-H(oB[KU+X0@6m$O-\(rY۾K*'<%\W{s ϒЍ׉KD"?%B=jH]$j.mШ`Ʉ E nc,՗/p+˅^,͛7ЛB؟XY  'o.yC >E׫Gʦ!(ӮKVV<\/YLwBbK5̞n={j)|EDI!#" V1MQSHӗ g p1xRYBjr¼; B#Kt|F@ 13{"PIj?%%y b`yj7e_$PL8T߱fK);DڍI㏓}]kR,Dܫy{geA&O~Qs$#/Zo%$'$԰u@{ Wh+y;a\ `Q ~`$eC뮻K">|ys_E֬$ M%U2b٣P+aܸqs[rAGH.)/!K.*RP^9IFU$O?P蛬a3Lߔ*%Ox^u! cRz%Bb|,0OsI!lɍl]sΕ6mI;- őѷ(7BWdqg̒)vܡhyl=<%ܛA=%63YZ5O3(xpTLZTzM<%[TQAȲJ+$Ǐ?|,әeirbR,4Iby_f<[rԩ-ˉwh 5k"!yf" i6M:iB$u1^)AB" -}5+rDۼCoOBP\dq4KШ q18 \x'E} إ@zX9N[RHK8lqz塇U*pq,:Ð@{\/8-owY+[wKInt+WJ~tb`!堔DV{2'd رC,kޭp$-)WY{5y;Nu %m'~> j#Λ?x&w}ؽ{w=xT~s?';XEvTk6p>53edG4B ͛3WM*0v'`_!] hVߓ -^ #NJ `{k  ͻa.֕afڝGUdZɏ`JT6k*2<V,̪"[]V_xQ`)aO>C#/o9sȐ!CZ|ɗ^ڵK"Z+-sqx7o:^.I >W/5QCB tO"#UW$Tz K/kǜe2 @t@I3ᏨqتU+ӣGj'ܙ!.ܺ;TQbkFa_}P0_νf~7h:mob8~Q<ݏcC߸iwܯ;2s4Έ ea'F'h6lؠU1`Jud'P9o!# ӲeK+ͦ ,[L3Ru͛7r=1 EZՀ1g~gPVr0?3ś:(j>0â3Zͮtnµpf|WOkS޹z ,vg蜙QFiB ߎ@hvHnӦTi3BC͞vUܹq^Owmt}cfEh9̾!x|U=95O6FuBd$eC憎0Օ m g֘I+z5ARgB951ἅ }s^iׁoƹjq!K"H a dRl)u:={G{I2֮節E`Z,ia!=iV+"k ӑ#G[n%j,`~]D@%wG@a#\NQ+ޗ| ru<]A3Nih;ӍOwE%lna^\Kgl= Mډ{g! cz~W$%[܇@\{>WP{b;׸o4.mUM9aɘd dqy1EONYq׮;tn0qn9EL}ic<sآmN79SGLdjWgE&\~x-R;{>8UEM%9O 9=I&Z2fG-PPd&,vs 'cن _rfTd7r\uL56_X;<`˶ dϜ6 F(%39p2iӾ}{P1T2X|Kα qb̏D\ޠ}g~7[" FMȘ Ehu땵qh2TI-眧`m`E8nj-#9^xcwi޷eq2\˶ dx` e@.rMXO'޶Ѝm|lqNTFyf{3wgM@atwq r]38:u3 Rׁ \d;3Ȅ)SɄ̍h7K^p>!N Ȥˌӡ ,?=\s< }m>q.uV!xrrC\4޸>)@r>Ehs mŖpYة̩|) <m; q/4 Wv<`N80|>"U ə{\]->.C'ǭ!@Q!&[{+V0l v%G=.^o52o3>=<\tN8&2*[zzzzhAdvU*Pzɓ'2eh D G|*MpFO#+C&$g$+'.Æ 3Șm%ᄣx7IJ|H /`>3 `pXH!`.1"vax&5T}"qa.n~֯*f?^._5n_J'6E  *?]6W``ٸ+Vi'+n7on8 8m4 2E`⸷i&$2N xLtNzZV}Fv'C rR VBR=׺ aB\<+KwE.]1չ~2aj+A /]0 :-5| Ji+sB8j: FǍUA<{C{d| c_SOVf&=P6'MȦd-^L?~Y0^wߖ#L]4$iҥ ;b;(/,/\_ΐܨQzk=E1ȾNlT'2ڪ]pT1/1xq//>sVvzzzzSO=" A,7ڬKӦM͒%Kb'Ti\aIk։6O6m۶;Iyr9rZ')a #C"̱+#ͯоCPBʢaYja]{zA.>5wYr^ӤI7woI Ɩx/;AW=\mkkL ނ?އKǥAKzqb (h,xrkw=2H@?¸n1c'}odh#C ZKdImޏר{~Ա&h} 0ҷ׭}R.sNfZ A\H7 [q>wD %XیCk5  ze3Zklr3sV?xOI%͂~ kl=g⥩*5+غjv<&,|IP/!J\:UXdYI., -.s"Rև *ӥG\ ۙl f(l-{X8<Ӕ#HC YE!ONǖ/_:ud3-R/֏luZ_!9L9>.O@R_|7@IDATTL8=]|[l2m. Jqy*sEptH7wy)Sv=dX_@@-gڴ`rI浲;C Wcx6`9հ9Yd$v>e7N =(0T:x,> {kBp4022 [s.wkw_ 0"PHU~YzWVUuV`0н^ Y{{̥Su+F09_9+ p0 l{b6'j y h\!s,(mm{ fNJ[@Td_`b댹8!}ױ^vSz~}#VwBe9]ic  , '6g*p _4E/'e4}wZ/ OF2DrεG"z| s7ƿ5 `>٭0p~.6߶]ZXIPレ<71/g90us7M ӾD&.X)`MAL[Qg^1!ezp@3|Ke>CpxcHXt 5, =Az-ec_ccd%^' C!l>@T;nƹ*2;-_em^d;n{N@],MD 9 -~vB!?j+ωQobs8ܧ1 Í[趂:\S %/tMxdJN |[Ǹ /*5|TD~(l\_(`f@EN}t2y Kkv/K[=s=r@g@,?Es+ b\/2_1!d1[(t4!E<3 Zlyn拖mUXl u'v֬Y桇K5eeR͘1 _s-ǿX,r~vu0)."3|f4*_02_$2&7;50D}9 `'m=Gµ~ꩧO1>#D0ϛ7 -t<lgϞ=j<ôÒAù|CX@)(큅(!ӆ?|ʗ^ǧ2F%]-}gY<&nEtD"Li{λ[;j;وjhcy.Xlڄ;Oi`ZUُ6rDqױ^p,zrh }3oD>.l:".ﳉ'Ual]0JWfE++>5llh YfR2hoE_<VG:BZXXX 4w\/_>}{MFBNEqhϯ?Wo\rau~ LxQQi6:%2_97q @%JS0Iݶm[S{_.[͎ٓ\C@%IpZɄziN|tL`eLO NlfEZ e yAB/ j!%D}ק=1d̍)8~c,2 y8ώ5iTVG57Q'3̾罔8{g֬YcƵhizj.ٸ5 ]%(LV{9,O՗hd"7{AS We%{Ͻqs z:0ϠV]N;/a/(B82|y2o޼hsiX Kgx#UjUG|Cͫx#̙3PfcOVIu3Yֿ֬ؾYXXXmxwUĜ?8 M6dww ,0b6lзߥ?mڴѰ մ(Wd-vMߦhʴdty- H'nĖ쁠NhZQL*\fd"Kj#cX?2_,j7s8m tr2Q}'@?ao#eJ j0}3}t bEW7U0W0< :w12ݻwLf^Pr0ɴsFIMS}q"#0Qqx:B֠oN=?a}`b)F6'5LEg0tUGf< A5 55,d@%HgNg b2GfQ, ͲN6as L 3 3AZV~u^; (u4:e KVYGJU5"O3g"UM>!G0a<cܛX2p!hdDo AKߞy}NP7އ5kLy<|.0FPűzǏ@|eeITڃɄ賮`HegDrء:끣Q̜yCLf9R^TXQ| \a[&l΢;.oNvp qah|dϟ-6mfsL ~._IJ;S@u|*b?Y6%K\DŽwڥ/"ip鲦&e+G1a!qqeݻŌ/7" :`dŘ1 Yϖu_W~`o :dR-h}5?,bǛ3uzus׭\bbr.}h=?:  C̵bhLlfɜ6mYj ?f&Pns9`X1;qFAϭScjvibEhVT 6,6͚^5k) @ǙxcFBg Al24$yACx(!t/>f$sb ;i(߅4`'G74pHh`7-y_r)z?#vզ-}$Sj/|ԭ[77a] ^R05iRbF/drsz=m!笚8"A]'[z Vf=eYXXXg<5j{f '_*R{--gX/Ł?tXPc`!xoDDsb CR&ekm۶8@'\`@;Rw6rЧX?5l$/팖7 Zk7܋W 77n iݹs^'O "sٲeO_B XL A7:w}.ӪU+eEw`ŝȄ2ah91yBYCfcFc8Y!(f/qN~l&{4A\EA =_dc1i(- w~Γ`+c,uȤ,IGb-p9l)N^q-(A;1nO3|(,~&#K6!5a@+)"ie .'-"T7O,Y2:F|J'kCmZ02IG}dbUۨ Ÿ4Cy-Y> 9joB8?>9+ [ 6G`IS 4 n{6{/,Jkɬac=| Y5;"} r7T_KxPΝ37 vH0AZṽyg=nwKk $뤚3tٔpyC4?Ȯ"s7T^ׯw~bx{ 'V MguߣۙZbdǑA@~L$4YXB_Tgoz jqVm9`[@a9p G!׉/mI$%l|E"n_,;<]p%4|$$5n?jL_z^\9zeÇIVT \E֭[À`iW:HXK8x<Տ<&lzM]B L^~,!/ɲBUӏҌu78E֡^:u$!HCUJKO/"SzM{`-7ImQĻ^w$soDI~>0P̟BP4Y-nW1c. Z*~,Yw;~^zE̪b~9y]1Qh ըsfi$͹ABG4k"k[oz^!ނEftN.uۛvmtpO͜-2ez=o_}f`ĻuъL s~5?ghd`yHLw?>?|qK&poŬ`7ZQ̿fH:`zz{uULj!ޙ52$),橛izh !<گ{Wӊ3E- ;@PDǂKf |:.f2g 7ޔ`i3ӻo-XʅTq")@Ō>pw&a/@b~w_DL{ʔ7Ut2;HwauR ,X?n>&Bxt~x_Kt<}+~9m>Ȑq\i2&Fe0ۘYQTxYO  9 @M3er۝MIXE@u(L4mNR (Kg) 7e;Y WX̖֬YcJH<`} ^x@qggʠ7N}}D."b={unw +kG ]#Ъc̘/O.d lB8R֑zzzzByL۶muپȶ>h0gw]NӺfPʉRh;QM̟Ȅ +N|ւDmSv혺3'ji]5}F;t9Uj>V-Ktwtǟ8b#x ZD], ӑU _fu8A+Azz|d/,@$/Rʄ!HluzB97roA /‚2SGW`Ig$(\PGfs2g<_cMNq24%}rO+cNnLhL0KjBde}2d@ BUc XD][/,$cR&kutrGfᘆ׻C܅;_Ʒ g5nb#Cv-kԄZxe! ٷoyF32Xt8T)8~#̬— L! E||̿C۩H=@z`|Z8V=`cqYXXX- xPB A Dh 1|<0l{F#<`֖{}bO@^TDS>Ppa0eyۜ!! *z`4Fh%׌륪Ι37fpzJTb wh|-LN^}"27IFNp0='^'E S՟=/Kׄ>81$#%v_qȢPDm5;52Y'&S9Ⱦu8FVd`PݱDu9N77lf]UYKS@xm7^-Z_@?SUr7/uqX~I$;&rtoT|u0QX1 cd0tPvAhE 3uuq|΄Icag'*k'X+'N~}i-=4l0f5ۄODÕ$wp u&a؀u2 Sp|B1c5s%7o"ﯼi̙3 CBi~,\4b:AtXZzz?`E3[M|3Ť|6[=h=pzXGE]wx`׮],DUp3}ј5lvչdr Y҉3? e9]/aP]09f6x# ,~2_,mP}1'˔KEǢKfЫmSf{Y+sWԳ|̺u A4ۙxi^3`8pi`x9|ou !.>ii@pzy|ev꿕+Wj:<±8;غA0'3g;z>A2gtH}w?CcX˙=%U9G fbLߦ5߈k0J+$ 8x 3o(@2ߘp>[־}{3f̘e*;h S~}է"{ٚg` edh9=Ό<O=v `WzzzzjJă35uaŒ0|)=65E`m9S 猳DZf0̡Άa/X *Pٹs'֍PI$dD3a[MsU w/Sqpk?5k)Y]Z5~cfXB1m+XA>so`Tca&?!ȪKHPmw2Qɹ͏7le_6L"2q,?-zSleAƠ~EůçUQE k5.;S0^&]|nghLUBo)K0G]xLTgXXK=rFTP_@M>圫e*FZuG,u^9o8 Sre'.XZOQT"ї_~f!֭!iٲ=={TFؠ9MEV;(}Q #qR) sMP{ꩧ̴ryq'Մݟ,VFF,,F{~&5jz!ǎx7E\t>zW fME`d̀d=! 0v7W^P|_B7ܛn >Eԃv<=fa,| fr:,?e]g]齌3p !w~фKKe.U#+g7I ާí#`1ђY-gqK+x˗7Bܧ6E'#2ygi)]ѽaؚ w C']D`ԡb!Buashz˝;V]0fsnZW~Ef{u%@޶@3F^XTk~P#$Ӌd\=v>ɚPGR7g:*WZ+jE!zNRTk;] 53]^iH ut#@vxS>q.omԇy?'{ ayYŷѶ^hzsh wn-PmFqWNU FL]`(5,_vNƋ#4ԲE)a`ml:ܺ>e=;=_D"o6a!5(iRIO0SN1SL1w~.E,(%n;2D냘eP.by'ߙX;wiР;b+3ct7 {bMvϛp}Α֬Y3>mŵVb3v}4+a>Xܫt+Gʨ> v}6l ͝) { [C&\cD8D'b c'lB8CD__p 0j,euN&cNQR5(5|cp&f\ۻѸ`܅[}_U?k[kְm@g쥗U}9%U5ymnÞh=p,yXմc8`> eM2_Td<1|/bիWs&6qDXAn\}?l>F#ꪫ2M41+FB~tțO:Qxlڵ歷ϛoi@kz~BY\'8&j0:zX31:_q\zfǎJ,d^Q죮ͅPҤ9}d[g ۊV j_BevET=:`{X*BXkCvӹ 'pVpP}6nܸTڡL8\C'`"߽uW]˖7 ;"=@.k a~)Qƹp2BlK/Cw,Yc{8{6m27p뮻Yge7`۰aC^C?WSP!ST) Uy)gGL-_,0a@1Ν# ؜z9ۖ"BDAg2:URJkXd:k"Go߾ip?CX?֓I8oVz`}=#Ĵf0©Oeb=Q.?! ET<ּ)1xqio$ffB?kh1,_-|6 rOJ0r<-B??2#-[2z1w d"p<B<Ι{X]asUꊡ_κ^:wJwd:6iC2+j;48gu~",ҹjvjr(n2yp)h/ʸK*Cy$k@/:} 0=1m˞o={5&*iݻ\=\l>5on>3q=nBQ|QF-AkV?@*0cfė pHiz#yiz6m֭>Ȱ򁕬.)',3˺իg`La= BQW2>өquGB( %2uF-#PP?gh"f}PU x:T9zPycykYu hϰ86=`cQYXXXw5:Ny(, Z e_5¤}I PS}$zWd<>sԏ=Z9~+2t2I+qD % `0@i.'()mۮl) * i&R{ 4,17!] 2DF Eɵߦ_ 1?͸ӮS C6=tGҜF" !&Q.vyG]9yo;睯הLFf޽{8p$}YWl" #GW^YH'["tܓzM".]  s=;|lo߾=ZWVDuk5|;2Ŋ3{+YC}Fmδ;ZۑZXXXTPD$~hvc̩)MiFPXfG1/&"hI{\Toy $UD "pRb}>\'iŋgLcI͚5'ӧrʙN:jh@m?|E"eb`E`BݕV7/P|Y̳5\e  03~GO?Uݰw}׼~v"?/c6oT>^΂}r!JJqo0T(HX8#Gaw[*1gVֽ/cUN6 G#D' É{W(cl4@Ȋt^fjVjVK2Z5-˕7ݒdtm۶âƝwݗ(5h~`7euiȐyH=Zi` d(W[ni50ٚ@Nzʕ+ˍEiA'1jk9;QjQZoxXlGi=`=`=`=pPvs[;vubyaBIa~ya~@U04H' bajժ=#Cx0+vCO]pas\ٲfc63Vo%`:ٯP̈́k׋,̗( kq/|W @`ZᏬ= >hH[!虓V8>xJ a5 /d3$0H˿ah3Mf e}aPZ8\g ~zgР\q+S 4HYO S|C&f=`=p{ 4֬2@lYI J> @?Jj!gϖ.=ŪT֕+e!05}A|V{\3 -"wp %b%''K2e"RGȈ@D?FqN\-G%%^.UZt@0Kf HӦM%eҬ %""f՚qUu27ByG%jUϿ)^ǻI[թH"Z7 )аE*h܅r{{" ?>NNI"Jͳ%tхZ6TVUc \SQq$?@lLn>l;GBujg򬴋Br`R~Sy`z(  KޏEpwJR\rGe?ɕ'^}EϛO6%e!gDvn8 nG< 5o.jݹGUd{„ J{]j|~T8l{ VHU.~7R16H%Xˤ]6)cWǛ4v̡ӎGp/0ɡV9۶m CGdO?< hm}$W˶_({*NUȕ[ʀww̤Q Z5%6+'Ȉ#L@9VgYAB]WIj›;_̂%t@IuF+FPo"8:\wiKۢ3c%L\)#PtMnZAfM%RfZM6_.-[0$Su7oVP(]B6~7oi.K 9Zoc+rOp.+W.xDA+n -li$3;Bc ۿv&24 ?&4D*-^ؼ" 7Pv联9"?(K7=1&!UR*κN a`[ê0)a|+73ZAa}P01]{މv6nns[3Nny>Q4` `}| DA_!YZV .U$ %=ejXWm,f3 3/}h #7~ )эލd KTW}<'1Q9`.[ >\3RGͿd ܹsͯЎ@P$jq?4s;D)1/#5"fСfڵWEw^Ѷ|Eeʛ_q-_Cݫmn4h>ɓ'kƵ˗ktxkzy=~ϛhe鬨5B2ӺW5`o==4y߿2+]{!}B s$x߾}d1uY2*C>43ό&b s ;a~`:T39]3Z2!EtL2]b[XYĿ|oR@IDATkGvpTQqiOO=Wżxρ!,Lqws I'Ti}+ĭPマ)&LYlod= to| ?zTvry6me;޶*V\^ERIʥ=ȮK\/kVɵ{ I>$c=iR Eޙ#t=|"_-6k*LVԬ#125~+ `zI3Z%vmC@xPq R-yw#”6xXGڷ@K&WsЦBI_F :t\R "|`2X@;GÏw.~Q #!>gv@]-'{4xX^wR׹77(ǯ^)Ig.";=2x`.,YD4k.U/-]7-{IƍomBor8jF=0z瞉m7 UJHs 'ʭ*2+ GVK(z9iOd$կwpnm#˔.\LJWtg+d{rg+c2bGDܜLvH<=B/JϨSAgx'㜎-)f-1Oy7d2q}d|y}aV L02CxΚMψykwHB23,S'ooy ~H櫮*oL< Ϸye [zz ͚QLYfj֓H9~~~4p/_޼z ^;d%[--HY+IE!Z,!"e%ɾDB%JJޙ~s{g+?o̙3y|<;tnRD&LÌc3!LZgg!Am'$s+q䦸/@g_G-?rМx ps/d+Gy::N hИ1G8s^z?|P@L2w;fjG:oaWg 0NgFlQJz2iX(Ա U O71iOw ]9`iР957҅aKz߇[>AG;"\WjwNWC$m^,P30ӤI=xjذ߿i);ɦ;005ۚ8XjBdZkxcz|ǜi~SiCA~'vYGE6ԪUKI6_ ܊M唿5-m4 }Μ _@ SZ( =;m/V!2&e3#56$"6.ȢK:5GwX4p3p3ڵ(zu+Vq3^ # c:l0}7bْJf YL:Mٲe ߯LK.5{3d2X)m53di 4 0dqgy-o fgqDD`rap%5׽s"#jqdiL` ֠7NDLs~I66xK5DT o,#Z5'q?{~/Ftk{ =bkWpk\ -@2|55`D;~k,F.kٲc3f!q뭷F\oBB1vd{&c-ZlSpxEhA'*5͚d O858?'Me>/X540`n+XH#<ѕМ&UM8   tYxpرc&+r:Bd;&ڷ++ s22 & 'P?scim^[9;  9zhƬot*M4DfZF (o޼n\֬YSmX֜fʔɀg3`(A1Yo8^S-UjF*39uz3[ۗԬ4YmD9CNm b4DƟ&1+L?"b:f4/Q.te>v;8yO8akժUN:6V=7 l|5%M`3~wpc㏉\Œy^P5 #F#^Jel;qpO'?T73~Ϙȑzo@ƚ6X3g)pc-|!`mm;Yo>h~tx|]!TQ,p;J{wD𣁺˧8s3{g [?|Y:QK\s'MRz~g?K|7ߵk+c2vtkq-Zk_6AFĀ ɗ3ɤիgnfg\D&T8~#P>ЏkLs U⻚+`yt * X6G?nݺPs6D ~gF>80OT8&FsMeu  9pQ3Jgt<ftqKlM*<};d Eɕ3ߛԴKBE\˝"Xe3k$E>+%PL2㤹(6߀8+SsXun(<]!J^y!J}0e-k.ڐ66XI442hs" UߐG:MNB4Xv tСq/s}@!T.b$ns^q<\Sۈ$W_ bc{֨˗!Y^s2kP_#m/p<1yBzZTo ]?f, e tR7c&Su/pK+~,>uFux"BOX!Rʲ|nHþF%MГ'Og?+>ŃȅbZ`]A>3B[CslF$,O 8non6` 5P"R>UV6h 'Q0#*K:O3a7o*cSɱ˕w]D~bп!⹷x Y>M6LMH!}>„T5ׂA6a8V*7Ŭ|` DsSv-?^F].ޫ֭[_HSdɄQ8y 4]`{ OxHfCnP^KK)Ze5kWKP) z!I 1|P1>+7XD۹}K/ 2Pb(YP/m` 6NvUj};wRû䈘)?^|w(g*93g~^樴^3gpñ+x>F*sXc ;`8v`;.M]Cmeډ\Y]۱+W,^fkyihdyFD}g'!>/L@4Y>ydSFD[a7xXsvZoC5I \DžM\ k:&nk\ \p4A&"`S8\1.:n7@愨/v30K9~ĺ3[`i҉#6"ְIY`mLW [+xY3JO?»Po+n1Mb\D[n/zԔTֽ>Pܻ[) 5yux|x}]?D  %X)A[@' PY*M,j_eΝzxD/JsV"#!vy'/f(U3&pSUsi6&ub] culo '9so<$>$ W72\'ݏa2hүzMy`~>_z=q6@|51W'fB @{0 1.P@Ot/:+b=F/A=+?_bg{ sMQKZe4FyO<!H|FP< 0ꏓ >zY9)V>'5k,a`&j7'< 6mT,R? gOsDISu)Y{9 y j'\2XI}ԓ+b~Q<ѣGu@7ϗaO`߮/Zk!Aݬމoo;(w@ēfA?=stNC~`m@&'XQs@k< f9>* ۠I̙Ӥo[5. r;v֭[W+Lyyx&Ts=Hի9g`F< nKM\ k:&nk\ \,'8dKM36i RUL(Zw8w7F_k۶0u)1u`5Oznd9 ٶb|,-W Zڽ{F#3$:5{leXϿYg˱rCNR򑇥Pp. .xڷ&b|lV (RsW^  1R=ldĹӳ)nxg\\T?!ˌ3d;O-q%{II%KŮPLAO"[F<O>l/XzK!f[=z8N7n8eDAX\bqV#/\%+\0)mW/̟ve0񉌏 2_Ǻd1 BSmwf˷bo '@"\=#&~ ᵓ ct;F$E:!~"Gw!Z&Y7gȝN,m|3"D$@hmnx'dq%I 8)ć ~'w~d1v-&,ժ>R %أL 8~Lk /!FxN[U#MN <.kԩb|B@&O,29&r,"ڵ@*ZkE.0أF/1Od p"W*A4]r6Yk8bZv W?A@X$DBC 3r ":*ye#&o[yxi*Gw~d $ H_~ǎ$e'`KdlƑO)]K,$dh#5 `\UM'3$̛/GI \#CFH-C٪;V rAfY*)Ģ(8[+ f؏rxʕK:wȑV[*JnSGH9M̩$ bu~R U4^Nf >!GVKa{F.p$Xo ًݲHtfhB|5ȝq&I I$Ns\J}cm`ZX)  F#s%Yn-yʐUu|;˟Ow/@wR.ݏc*U_.]M8>2^th1 rzp:<5*0tc"Y<ͻW9*<qW^19RvP"m];r(XI].n`6flBu /JUW@?O]ݣ7Exy-AaV:ʿ5XJ7o"ɵk,8Zk% hlmڴhO\4^t0!4u&b׌FQm`_Hm  T mNhTN}f= c3d!qi007U85ǰy9iyLv_(|zCxDLJ%la|e*M`hzw". 6}gϷt̖]p}! X5j]zŊT>\y4r"wݺ3a3ѯK=4*#ΐ]L^_1cL6R~VF6"yZǘ͌?U`DS;BKFEZ=j^`y Pn[!g6c"!x\@H1xnwD@oF9OScv Z[i.gTDap~H `g8I7|w߯fwkF,=e ([*vp]*6I5JR|YSGn+hujdZ5g;ZGAC.޷mۦ~aޛo |w…L#*nM0W86c_a `JiHqFd%+eԱ{ˈRs븛Z &\ p-p[୷1,t@Kä&'# ONI?NdL1H6ʮa^|jE`Wm77'4Mx|l8sk\hl@N*S>"|<ϙ3g1=ڣi9c8\̩ 4MϨ6'hǵλ.1 hY@K"q\Z-tiì) '+V{g F$@g]-oi*`M`_>>wx9~ќM(j+4t,Z5mWX:3#cxP.hgkTH'چ;1v{h~:*cAQ)- 3DdԷ> O$E+O>5VWL{??e MA>|8˗fǷMmyX˟@Zu]p G4I.4,D\0; `\t ^Z'{*DX; ŋC+tM jʿyC c>3`]gۮ\ [ܡZk 8~;%PUp.L$b%d2Gfrg29&СPI\:i6<$"!T U 4rhWǎ`O4EVLLk ,3oDo86*& ,8 A"2>5c`7iǾDm6Vov8ǑwBiRԩSu?42Q.f=oĈ4$#Ȭ 5ʾ"X`_krbm[X16,l:Uwbyv[t{֝/K0yKbMsܶ cL%׽d>YF(XLFvÍFik=̏^35USZh'AdMXAr8~ l'2@ÓlO.ΝO;`YEGi10ȸMbROGb lǏu t~R}#Yv} 9oڼ{.o%P: VsV@YAġ˹sy7SZ2|w-Oo˺ݻ>-3MxyCj׮m ~lB]AV&D!|=?ᶿ^o_aIT+KU7y+WV=nг f޼'ќdoO).dWO5rjh;f Ow>o]LĒ/kMPrj][bqȚ8Q eEg" +:Ssc}0#0 Y%p~x&?@yx9bOfI.:e˖)A-.AW6M .wb8: Ɣ3E6ÏL+Xʪ>|hKSxR|ƢYHl_Q3C_ѱa_ X٬g*f^}&!]=~ {o͝'% &`sD#|?8>7G'YϞ=faÆ3M\ Ķ `ŶZkאLb _C fXN7DYV3>264Xb%U!")U29'w?.Oo }j`"sW]2ddlyh ǃ؎t 덦 =/^ZP~~5ؽ uWu /hD~~ӂ bmZ|/-枲B{ j.{9Va4oJ&K~dYNZjeJBjS%ɠ m J  5KMo!]`{7:NZi ͪZY=Թޕuy/%)XJee^LTw:1clȖ3 &h$jh8'o1^$ BN_@cd]@VAOHIHGd>d9Z&L֭ʖ]Rk5ǻݱ`}Zx_Z@c-d'9ܞX+u({ewkIG7`_ ] >D` vXVرXyM\ p-Z0: SV-iٲ-pM3wA2]V9hSI ^xKu90BϏI8|pcM6˪U4<%4"#. 4jPn)^Z9D,h+f -TzSۤZ/ޛiN ˌr32'lK#wl2 pE + oem%0̉BIPx|N(w=g[N]-c9 B`U 0(=@"`7'w"E{BCRɜmn3VA[6@hM r"PU,TJqk{+g~p5?C\ {,U\ p-Zڵ@eʬ\ɲjȿeS,iB.,23"Px2Q2`z {2tBh"D)Y@] &qwJIARyc?Zy3=CbfO`oɧT4޹˵gGaJC3:*C(8_ytx&2a# ,|ES~/RDoCGOYipj ڭ; P µ˯}%ծ= p 7Ejj*?76eҭ^ZcL"/KN4G4rx&d>=sevb`}w~w\lʃVq_$~iʬixz̎ibV9uJ[T#dqz,?M o%mp}X& @4V}^d3Wb/]&*qŀz DV Rp!4[7PcXK,yn /uڴ,`ű &[ WP(YU3Y{>"y""YUU425mڈ9J̎S 5_s\ \pXq{Zkװ,^]J1A25j R㮻S W^dQ_^n:Bݯ@ L5W˕Õ+ b&<|mğ@.B`J%Y\-{U||)L&vn$o4ր7DFy:ύVfS.rvc(g$wȑ#]bUd&;nOGT*ynR,{_yYvˁG%\xod~kݎ_*WGƤJ!o!M[[nE݃@$?G/1E<͚!s,U7Cov"V5;0̣z &/U Uxhчz11Ҙ<^uj;n^!|@`s:M5~:9ps&O@s@b6{.U`z"l$* 5]m=“:9avtƅmNP ]{1r(JtD?5Mj ݻł+|7p-Z,X`4\ ȰU4_euw`-Zwp}rÄ0@(L sU Fi{Zwk}KYG7KGurU^mpݛ"^,#WQ"Ƌ,a̩kݺuRD-q\{o w@Wyխ. ,U=#윪k{1  RxȻ idc>G읛ˉYXR7*X0 1cA<< 'b:{Ybc޹TM`yoQ~\Wsx{eO% !6R%Ttgrtr@493y:a˯y1},R\-f͐O=@ 'Gڔ2xP6ž+[ \d`A۱=]~)Uj!5*]xw xnD䆫}#b:=(Nݧ˖QWLv+MB3{W^]lLUUC `ը'O 8O+"]!P-P.IRzmv>9ŏеA?6bF_/fR 1 0nֽ*7AL7M.O{xk9WxMB&n.T{|W7mݵkXpXp-Z̶g#bYUP7ɀIb>H\N'M QBV`w iۺp;#\=}tѣ솻PJwޑByom,1piRx͚Hea-obqWHP< .jUU"S"',`H?FKV/-eW>b먀5z ^ɵk\_qNp-Ze7-+ULe Ѹ;&"m&L*]!b:oT%n  pfLFcGҤj5sUfyjdٶmGCh[?[ԩS啶m% rZO~s_B@׮+>Gw1˥4~S/_,yyeb,p*>O-RP9B&:Tb֭0#]ݨ`߰aEb#FH ` bYX\ʔVN4`AЖQC:T0>G{ID#>sŋ+ dH ߗlƏ=ZU9ܯ?uRsB7QjlQ_:L뻾$u'?z FP{٢)C_%OEg3-Hs[ٲٲ.9MX{"}@IDAT.8^k+ fh 0ߩxٰ): Qƍ%f[ˇZHj߾k ̙3GJ(!}.\X{ӰaC ϒ]+0@oKvLDB Z{d$lW9>[H!Fgz'i`" ,9*AS~J<2DwA$"=`Ep  EyDٳG>3f0*ա Ҟpo301*YȷS |xo,xc֬çOKO SJ1c5zTfYD)g͚Ȫ6|qʥU bUmf% 5KR$WQ@>} /#`vS Cw'@Obkώ*Vxm@"?APA{D4So cלp~^O^Cn{D!"/xaׯ5cnr-T ^$EΔ)*. =8lEf;ixzSgd?S+ϣ!GM\ \v7p-Zk mAo@ی?yKrO1ր׍q/Pz>FyW&Oȿ1O&xx–]̙VmLЄpP˯k2`فCΩIM6<ng_2v^n[ǘi}oF7^|TPh4̐!66_lYsU t9϶5{nȩ?~cole|F7kz&Ia~Cm`ߔO+ Sg4]ƚ9 sec,TGe+)i? ?~jRNmfj6Ze,<<0֤e"[򘱷55+d/7k^H\^Gc`YQ͊+̎;"ᎽzM6Bc/ gq\Tq؝moc)͌FM+}f:jUҌ?pluVp'5 0F2`ـAl*톩csϝ;@̀9hʔ)̃G5%3UT1͘aKyf͂)>n{}gc>ji2}V4ִJOޫYBm-`cy,,Y|}e'O@gxcoۮmP x9}#{zeZg ɵk\ -`f X s5#L̎J^'f-ǻ5K%EKA׾zOo|.H@R5L 5qٽsN?_8TL6۵L&A }ϟ*pB` y36HphŋCuXvݻ_Ҽd 8"Ow=lt˕WZ)Sy݅z ASK `7B5I[{G;%vxzo3 Uxs!A#\wuTS[5:Y0 ֨덵9c?T?{5X-722O?׮]熂Rev~J{gjEK'F4|M[@j)YK. )Rļڧ~M^7\7\W t7JuvxX[frvI~ӲeK&Mw^ps8v}t5r쓿 ƚXS+]̩n3 Xx|kӹW2LY <'Nǭ~*nݞhY$ٿK|cj$"¥ʼƋT"v'Ğ9*ܑ~K#$ozn{ E_}QUwn^}ˉ.f֮:B:tP p_|y,H#zNݺu.ψv\l;k04 "nɄ TUǰKOK'W.1jmǯp/TXfC+d3/)kvyӓTNB*9\aRE 0@vH/,7un6?1:{|@|;Uounv}*vsy|6%n6#MәSeOՁy>M4[ ڠ`a_ƼbDdQ~wP+I/ow#ii`?hpIg@((>R.OZϓ;]{}eʔ)V]R|kOc<`ȋLSq8o5y[{. D!p.ωO^ \2}=;J<)SJ?uA"r>t<Ȋ==Lk\ meY.κ]](C.cy/v#`2it!K4SAfNr2y|!biba2pYLu0eӥ3ӧOW7-Zc+Sdzb B'uTTɌ7δmɓG]Ml,c?@x](Vٸqڀ,-l2uy8w-6AϯY9{{ :n g`_A8tu9d|9-5+u##.F~ҽ8a;uU ۾.;ޡ;3 nx z;} QÇ7o)+";cL,ŇvX_~[RABC&ht֒ZﲀrU藓wHY|N6~5Nq\ F,^#7k\ Ķֲcys eo/t"|HSkʴsȌ>g\ELTN C{{/dn{Wi]'t.ٳg7Pdyu8v ɟr(jpe֊({rMqԍw#K!%1q)S2Cқa_ڙL$QK-5T[0tOѢEĚؘt[^& L 1>B(Ɵ&cL]Ce2vYt~8xd lu) )9V9r0`gD:lZ7><#W > ]9&&lhG~.yn29y$e0Rh[69tvkצ8s&\ p-p-gCfEG p~;t ˊrUkyύ7j3HY駋чEf+}n toM({/*^/ۈ9Z{G5gΈ+ֺ(w8Inrk tɓ&+_#7K}щn_~4h@m&7^] Ce6͔C2q Ym\ =ۊYD |wõaOΜ[\ls7/+g|tܾCLjo,\\@Z:Ii knkW.uo\ 8G˪IC8)~$3"-)ZD|f !h5ؿIBOwzs]̩eOuA@:@' õu^ZO,'+W)&}n=t;BXQ&=wG6_5uN-AkiƆ r;XV! [n1uGc8rL>s@2%O!{kn W b[pzo+{ qƽQ}2FĂk }u /+{^U]_F_GofA1lb=X5@b]wظNk>ƨ!Oԫ#bVVY={Sx ^#>!˲7čٵ INbVwT(y RՃ2j պ"p\FeRJ%.:XpT=1k,lv֭9(֭vqRss-7Y@o1F.т>9 KWO$>|Z[do˨ҎN `Cb6 ]H`6u&zHYs~ՙMLǜlyt!Em86arcf #\0/^C;+m\X7z EARh>m~$57/l>:G_L8y~@N:t6[aDΓS,{.fժs~R˃vSA5 _Y3g?dCAxW7Ng =gϽMR-ZX`vl hs;>|hl7kPD+s p-pU-XWn\ ,0zh\ XLNUM !k$x9d…b!ϔԌq4,(hg#QQȻ8ȟrWww XZn]k{y_jmtGI0IeV_adJ&2fdtOJAwr, N?ab,6q#g8dkgXav:/h+6 4nΘ&hE&;P]8 }"hM{/۫W^^n" `獾},)kޓDFFF'N~RՎ]b%n,YNAr1A|>T)2N=zN]|e:foFL67,5c1 :;[xOyAj~ۄt] pp-ZX`ʔ):>Nj۶i-ZDPL gȐ!Z믃Dt QI>I$2V AAbNzHF C@D\D t6 H\hY ]I(x L ON Ĭ#`(7v-M r,jfDu<3Fd?}nMי&5w5mzN/"48Nf!XC +DG;$I6/' T\ӧAD;iәTqƌ9f&Wr)\6l dIF ԝ>9lId:*BJ7'Nd_\_7~0E]%] WRԸ"q6}9U@հvɵ?iZgȎy{ ꎽiS#:up +!K$l!NY IIU%J)$%ED"UB #mJeCO>vXX/XL>V\`W5Y"e.0Yv:>sM^!?#!zoZ. LXݿ'O*WVG_`b6q >cAM(EWq;+ /@2 ڢ ]L:UΝjbJjڇ #tI'AXrU+:=\b yJ(<`<A"`۶i :)ԃլ]=ӑBYm٪֓t3J+XwٛɺVV ՀՀՀ@p\ ³ dɒi hZܹuD4ĖIs =ŋD2j5PBrLfڎ>;N^İTQ̆wa/E%%7ŗٱ# O{zY1,iGH^t=ܣƍE>1^:^ƈ+g>\BJRB\$HgZksI?[J#GnlסLoێB' _-Ѿj3g+IƎ+ `-XIl,R$ T8LK`E-㽰@܊UHr|{-;Ļ!-ބ:ORnr '@h[;|.'F7xC6m*ȗ_~)D*WAwIjMYFdfnz `I,bۊqr-`|ۿƥUJŕ-\U ÷N]D}` J60IvDp1Q-NF+AHZ*GDL|ɓ[CJ#;șG%ݩ?J\ȫ<8RXUҭXZ`?߉,Xvi8(zy{Lj&Urb?7Y=*f5;%3ݥok_ Iݣ `gU?p- wՑO7*T=Ŷ(=Xʅu#<۵,ֽ@B7%m;6m4.Fk0BP pzK-^y5MrVVG?+VVVV. Z.L"߾}{,El˖-+ vM| O=\~!Ԫ!7,>t/6Ԭ!XɎl-f^=M5Dd>IfT"`UV|6]rVIL21Vu=>V .%]q`bv5H;te*8͛J NmTLlT#$mڷKc1Ud 1<7[uһyikK T87SO={nm3Y>AZ-𜛹u1D/q73z85T>;Q4i@]dj <6Fa"C-/jj_ `/ՀՀ@Kvu%N:BW utL\B@ZiҭN4t N $uG"pUfMɓ'^|~:jjf~=k%kA1Ļ&K6)U2}C$9NsR9L#\p ;`SbsÅ̹ ~Y'GܰҖrG@nXp {x3>qjגU\p@{䪵..x ~ 8t[8Ub3y%櫯nq%$k.ud}V ]~ Z-U'\ZD׊#~ZA?*Um,KpP=N0v oDx}8Hvn0ho 7:^|?p&UUowA W,cŁ[*\c19:tkY!6iҤ,١)Jbjm5<|N`a`#B^e}DV}cJtk7Jwb1qӋ[|n2x?1wF=oE+:ddn㓃5˝e/țO֖H4|9$:Z˸VƘ>~]obR#6;#\Ҁʜ'剺q^!A~'m87^Cs|:wtk?=_P8FVq_ŬG@ TJdL@Gq4^{yK̦ȑS98'jpK5f't?n |Rpnyori[Lȩ8J)> u^#pr1⥔/owg݁#Nw&K` @U{o2_!rl?Xq_õeG`A>|55bkS[(Yn5H|2'0Ҿ6Mb5`5׀؞ՀՀ!sM Rhp[uaEk% [$x<|4eqgCT ?7 .(kW_~`eJYꟅI`UɁ&_XȄѷ;iʉM޼y)$>IJFX_u`Tj(/fqig?S0xaD _f1Ɣh~u!tbozҭ4:.q'>{Jde{rdmgFa?[2n~xpm{gMl@9-Y˯dy @('{NiֱjGɊՀՀՀՀk`Μ9ҵkW(s:)(!b{hF $rSeRH]zE4P4sQ?)SĭЇA;"L ɵ|UfZ" DjTV{ϭɛ /|Co۶M*Tz+L3pB"{J4t)ag۴HprBU~Pz;yՀtߒ*h9Ti/|rV.\O?%g|%'Kn@_\zjd N}:\kEVē)98(YEBK[T"KE3<6 t=h8qI}߹a~pty: O,'%A X dRJE  q)sOgA07u@Wpa yKO`D6j "". ~RAKNP jJwghe5`5`5`59 o^^}U%h9rw}J?dȐi$Bl ȭdR!îHa  w+p; . .3-1$md}qe`ARcGX #Hhw Nx iH9Hq.B䫯D^ʗ//0_,X!d"JP,Kc!`"pSv>}Q3-,9@ghуhZ}4"}D0Ew}%9v~*9V)93$8! (S|ɹtY[dzx|bØȞA&~([l)]gLUӻzzpkM(WUIɳc]:B1}(Nge%/'J2FΡɃ _ vw0̳M ϣt%'-^N(Q 1|s|Y+ i]؄o̯pDW+^쵋''pvGGNd7` t:xx2rN-z3c ={ݻu Qt錆nbج61ܸW!g}q΄4״E/2VlZHt ޴:w'dD|履LBe/2!LڈͰȄ{=u0 v/ d¼aÆgàkuL$iYZ`%SNt'@ %8i #M$UqZV]t7͘px SL1Wa7M$N8hxKRYmQuĬX)ΠZ*j9HOM_O+Ҧl$D$nXxU |'H`LɱM|Zk#]MbO?k)VF1c|oL L~*5Pq(׶RF\"G>(nS#o.Ojo8oKg\5O?OةyP`Ax.:,|qVo:*r^] i֬4-k-/F%t*fV)UqYmǖok{x4~N|F?,n< p~t[a>s *өSJ_MlҶy5"ٖdrM7Iǎ̟5jm>lz:X X X X X  *U\܉;ވ%MU ,khA+Fo)-Y:m;Pq7]оS:.b8(M$lÒG@Nn̋/h_5itK_6|HhF+wjakY=3$S۴KH>~8p±xo6ܡu d_BS1-fa}InuHEMz }5 L 0`@Dr2K.OS?`@[5HĬYL-01YrD~ŗhHiRwy76l0 64|77n4-[뮻Μh"s뭷s9ǀ E-X`^mK,1%K4W\qo7 й2KYAJvYBZ=JCog>{Њ@vi9ğ-{Zf%­}Q|<ߌ^Ӽ<(/uUJQWOedй#cs) 7Υ,[ 6WSz FuŤ 3PhqB :[@8'3O-eXО#ωZPNp |=ޭ^'FGeUV鐶nj .l6mڤ'5B(G|@A S\9]L0L<ٌ3&ziz>. vB wܬHzS?a+>i;yZ53~ф\i#)KLSTTd E ˗2X/amݖa\ +^?ɧ^uFΣid܉b} +/) DI$ >7H;g孌4e/*(@&h}s 5k:r9t 56ʅ cBxXN.ύXS' |+fJZ.a5R?X<): i)?c'NcZKq[oevjgϞ/Z] ]?|6x"biF}p)-ki[c3`aM UN,ʰa[j nOr؂g<#myr9ϵmOJuG>u8gEo2~GY?.@_0M[,aUE&mبˢ_A#/K&p0ʟq͙+fh+ap(!|ԦiӢFܪz?ʕoBB_l.>_I`Jٷ]ک|)[~͚5`,\P ZCq\f|ŕc]͛'#/'ƉweZN~MdE+6c"UJ,U=\XTN]Br+D’DA&Dt'{z= CG,t ]E'OP,p3>E`Q!eʀ RJ{I4:uhRxw:q^#7?wE\O#h[..Syf='f׮Qo9Yۉ7\wג=X]W{4rr.wHѲJzi N :7ĹD\c$F|5m,DD]s+}EdgK>tiʰ|J=O.Nr˰^>P!i ^_?#12B麟Rk|Ȭc^9b`5`5`5`5pɼT.8lYߩ"rvv*׶Ⱦ}^t@B&&0k2+J?g+hjZ?Æ _~Y$Rti_K?3R2īLKE:_q?Z-_+3ΐtg?@RVph̙3Qz2q߲4,uv/"oasBr[m{ΝS@߷v-4O/\YA1"?; D̓x:u-׭m9е8T,%G[ѣ䢋.R}sh`j9 q,vaQ .2)ZTo&^ǩNe6NNɘ {\>)xEr;s@gOq6}%Օ=n7z/> ,YqBkqxJp sT$d_Ki[pV?wd۔cA> Q|2jTf8} Dn˘%KŻ5#8Ŭ@}|ݬåV̄0>=x]8mMp鈁FI K qގ-ݏ3Kv;qv"PL :|a|I?`^Dydp%ꖿ!eJX %oMc ` WٞՀՀ+۵`|<#0]:0A'HZfdIkWW2@ˣ7X>QF: *cǎW_Z"ԦM9r<9VD]kבK[VP3SuP J |Uuj,_q ĭZ_~B.c yw 0P)ť @g/~)<u$c̖ňxx{X:,FF=E=v*!g` )~Wk+iҌ \8.1By˥&M6r&WoKvFjZ '"R[bX[!Gl7ڽަDqM0⊋ZaQxv_I UVVx?>@=#`bK`EpEzC#oH͊9Ft6H*AXYȪ6&DgH``?qp;Oڵay[L&e|A-K`W|2di\yqѢzi;,֔p-ݻw Hr_/!#y5W5@<tȔ_I6ro/WetG[>hNꫯj-}w/% Ƿ̊&daW^ҠA+)mjjjjj 5@N]G(I +FqDz UK!opA|SiЍ=')<9|pr$oHTZ£n(8,|XXJͺJ6\r 5hBtCS:-l|7onDAFIb>1eE>;ӄwG0*/]Pd}9<ݷV*@(mJNFL5EɄT==a'# Q8P za%ïVl[ncOݎ5JtSO=Y|Fd$(8h -($ngdo1JK,FH?T2"0"#1/Ot#cADb5p(4(+SNv[V-  f۶mfȐ!?:2[=L?Dg;"^{M9m1N^Z >/s>?o@;٢mvB^BL1# :Tu B5&/^?eϔ/_?T)˱hwV\i*T`J(avX_orZȍ/ 4踉y|>Z8$5ܡKBU<ѰCOXHEtA *UHY})oVPWb; 1C݌" 8(_#VM9fZY$`|*,XoR tNnEL"}C`!46AK􁫀 4XKt@U/颫aS!8EF9Vj:M7(tψahpj%J]#7Iα K$_Xc?kt7׃ or!*$̾c=D";%ם2eF|:S5{aGQF$=I qҸqc,|a> 뒠 dԩSI'\@"{=I~2۵KB>C4Yb۰V4*ysO$\|3HB0،G0`G|%[!vlyi`eDQ?S9cƙ폐/S>$>`m|FݴiSf.׼[͚5Ӵv0XSOoB=95 06B ho*c.J\$ŨO~؊  P0>XfxW%(A,+V٭>7KzBk׮Њ?_L:M(G.Zlyi`M`'-BTsK//'AUt4Ȳ!6)-CɖFҒ`6b5`5`5`5`5]P.9bvyHD:D %϶v r]\٦L4d@zi3DN-w|F>k^Ey]r"z~z5 ,$8Qo-$%T?L53gcYƵY:6fDH ?d$=K"a6qQo"?nĩ܌R91c1Ί)']’7ޔ_ uF0#QNNp\J.'~8K^: ^ ,yc7q%#r"fUy xcnsE+T<+U~Fq@b}@B"wp2#DZ. _'@w%Hr *UZ~D6oŬ7%0 % x!~=J57Ǥ{0D \wV;봿}yA䤤7#%/am:+4tE<2ωsa&ބ2s h?"'c?oqϪ"f D^| ,W<dTw$ܦ=XmL0EYrv7^#ha'H/r ԭ[WFc]l0*~{Wn9ܒ/|>֡i _-Haqix˃R'LwtlĞK[NunU=e|EBXO 0?el ߇h4C}G"b(\#4j, %}2Q }–=k+U6?Xa%w[# 'ܭ@|A2"\*qx#&G+#lzD ,UR#&gv_ f~YfrV^0Up~[ ]Ҕxh%^"'a3+0'|"ƍ&u.`W/yHAWXIFj%ޝ! kΜ9 ШQ# }ȥ^?s{꫈Yn[$O`3 eRnY1o %T 6/WOJ*%k璋8V3E(K$֭V=I[n%O *|8q'NS"l_?rN?M| T0j"'p >[J|_"(#t˫qcX9 &z -k' ~Y3ggVwpQ-jvhAWp4?Dތil|WXIn4OR-e{*(eՀSn\dν<]w2)MF`+_Pڶm+pyH>F _-@9<%7mA@6$Հ G3Il$u( >VW@D τgoO7\I,tK ֈs.D$ed"?RLGfF\~f/"Z]-3oڋEsW-$v%=[ :/¹vŅiFeLH~eB O2Xx"=oFvN?''_asu ~[2ѱLZ.׆y?1I=/>?Vc2*?\$+t#ckK]D<B&M)VHx d\|~ΝPg;\{ |X˖-S-=:juWsuץ)rB *ӰaC[90"%Ѷf/^|{ejjjjjj~BhTw :Sjؼ,jdu# ͊iM>}4[2paH?-*. Fҋ]7{vVbiЅrU0[ Ɓ\Oq…. ^qQ`ե A-Xi P/Y&G >4^!d 58ڗgcD{c`Scs!zOՎluޖtLժU3f'Y Vu&&GOcR_VLҨQ#ywn%E{; <\[0Rhw 53GP&2C%h/ϭTǝSs I뾑3OસPQ/l8~~fK.s!xqbgϞ筷2I |wAw}/ݒ[VZʧ~Dq:/Rq[^gƝ4x,oy?'@a *o\Е*TH}amUz|h .4h z{" ]|c ;Ӳe;B'OlAzV{.3rK~v{5mf"TLxo'ino'ZxMH djjjjj ;5pվ>}xq X$%Y6;>H";ף'Xbf='nR^X `lW3u8_#-M-yJ4iR:,2E] cHMDcZHvinG%]tzn;( B5 XaIj+,?`ڵeo%g;A}lU]V 8[2 ovF"^V_pRv\%n%qJ9$eIN2k%~Z 7@~Jԝ'iqz.=(k/  BGfQ=}V^yaP et#-|xQqRgⱟ)_t5dX*V.L?Scϗ={Wsui@E w͍VV<-h^T<Q5c5m)aR84I+UA-ETI? .A$J*-}x_~&LS{u q/_^ju!&NV[;V$"6r~1R&#y^{2x`A@ ɹߥీkN~*+t =έ&˝%n!w4s@ nx[rl*GGӠ>ױϚ]oDۭP_)vk5`5`5,lcG:c?}lDm;%C ri0|.p}pd?眦Ϝ9'k^"pCIS[B Iܖ׾ȉ*(C~&=VvD%]` X1ra 72eܦK|v+S;d䚷e_{(M`$ЁV ƒmC'rťʷ~+ []VUeV1gRbAyJ&xG7RMQ@̗D@|`Q|V~͢,х#Gx1D~ jk.t8pnZ}YkSpݗ7=_{klK_غI+Ns0D"sH²VZ6cVV* ՀՀՀ@vk ZK#ۚΕ+W d;eHpbqɫ ._"9?t5|rA($QCQղ.V>3cv'UbŀĔB "{~ɥAL E-dh5k9C.(BQ.Pf4)_^BsZ9:t &\*բQ?<.=(Q0!n 4~}Voٛs,99kՒ5J.X#C_4,~GjԾJ' +> ]z4R cyǤ<ꞌz^?x(RRl@1Kߐ/Kpm$Y= Xq<-? ojRH iΣK(Ӈ0z_ o7>xK wdUpE 4r.y͕W_}Uv%(IW^2~xW"I@w/iq_$^,!<-ZT|!G[Gp{ǞD.Oo p$-Za% ,@:fH3AlI\z8K]57Nj ,NSܿxq@rdfkL6!TWM6ʕ++E ϓ>׎; >)IW?΅)_x(_h_r뤧|ay֩-p0 ؤ"r&{rbdLo/wfU[jjXրojjjoiq%|oc"V %@7):FMŽzC*! /6iX_A;?&$ӱ@B"x։OF\>w"p`Ekɯe; L'6}3)"$N_K$g e v/} Pb`NS]E|K!J*EE={.洲򫑽w,Hz` ؛7W~k|Ӎn(Svi4Ǖ`W"5kox o" h'xgq8&@\-e1G( $Q!r"l3O.ZdF(O!!Pu}ltD.S9 bh[(HH.D:ᬜ}3 e+*WV"6ޫ'̞Y>ÈĺtsRqΫ+Rlqʗ>Z){.g'[IpjatˬCX ъ|ɦ9v*V{(n&t}Yv#r5p ?o?gd K70 f\j귕٭2ϾH[ngNmi0m9b5`5`5@Jd5`5`5p0Xdg'hBm&"Z€-[m( ggVAt ̝;WM&zFLu0lyL+ 낄 yhG⢏Cct0Z-08Vweb11: y[A$>xr",&x#Ѥח /N-;Pܲrљ5" lqk$Wz\B GЈ=YX'6_J׺{A,%/"1 Vb!9ci[m}.eIN,$RFZ5$pnU Zo*YfjGޕ `7Q" S`f` $1zhᢛX ./pUݲMr%ݚ6` eOARG%h[Q\>k3ȼETw "mkd!F p;W>GQDVRww|)wu}q^CٴI4X-_FciסLoxܐnA!CgŔ~Fc{ xZ/hM$y`M(6WV7pL2E[59փa̅t/̣$"+F.Q&Ӻ2sL2|Y,}伢%]TAVFZ8ɗy&Vc7,!5i^L8x&8PXEŷ^d{D/CCx'=69}oT) @a|>ʲ0=]~ɥDR_Z7&VMG/(Y_$Zޯ烎qɲ>-V.ض4ڀ{a/^V9^z@݌;3-׏mf'Dre%q:w<(-g5`5 Z& 0J8w}c):BVcpc`-i~t-X)Y&p'|2ZѽzAƫ 5@H`~Q,?e`-eXV۸a)Hd;Ռb[#%ɩ,4ߝ#,e7M(߉&TN:Մ9,tZ*WфBLZ&TGp&t[p+Lv&ܮ wj]{57 LW6ʋ~fxpè|>2Ƹ>fǟ0dNnܙ}aqlW_3KHtwo>dOό}ad0cd7H ͿNzj,%`6uŽF;=?ݺu:T,\ xV^=Dd@֬HuyrpwzOhsLv"ѣlƒ2'~*9L3"D{0.3} kV\;t\ 7[ts;i~f\C2B#qdPii!: ,> |n+_ϲ}xV9R_? 2yW+}@&W6Z:O#Gӻwo~i+mbP=XXF>L*ݻwרwpyӹ/ T`2adBw3fOfAwoۧI{Cgki_W}9yQowh6WEf"ہH;M>Na,`0UUX6ӷ[7B({ o"S_~9+L(OA . _FO5iE.O, bl^7Zʀ?4p2=#wy64(h-Z]V>3\-6%8.^NHضQj={f`u< M8 ƝSsXV8De 1A DH/\Ud~Ǹ=hsMƀR<1x}[?cKig;Ak=ód`1gykĈZXbV3@m>X:kE1xH_܄<< :sTRFʡhgj``ǰ֏| W1{iAr&6jjj C %]KH裏#1 %"??<6/>t)8Q7"ED0=](t ]=+tC"f:Ja48icYKݕҠ__-5@sAߌVM|2E\q,aE@9pr0~;khP/F>s/H y8=޻L5L b].x.ln~0L;#1!b/jX:5by {}+SF1RF2pF)_(^w "­wKbm VEN׭)U2p'n|p[ Q}]_Do>]>e+ |4#aA.ȟW%oD"+a-8W]qs\R4Jh)4HՕOϏ%X Ȓϒa֮lqj ܛxΌ&IWxkk-i|>RyX횮mɄ ;X dŒ3J&eO/T}J^p?}UoۊTm8$mRE17 daqJL"0n z-+BB>3<&_X"I%6۴#fE<4}:BY!9cDys)Nضm[0%c]jI?c ߿0J!}΀ςۡkÑ>Iݟ0Y}n5`5`52l,k`ѢEOW(DJ-JNX`![/Jcq >T/y>-gVƝ 4[vIgTYH0=HEaՒτ*UU+De3]T-y/dgp>\9Ãosڐքk׮4M+j nezy% 0tM&tsky wbB'D,hsYY5O㎟`ܥoV;Khz{uh;DUҷ~= / -3ה25K:i9o>ݨehE )s|uo? 1gMͪh#5l;XPEt:ل\ٱ*0 iAzpXp\i8t<Æ SSq WVq̧!ebV钆h&WkffE7:Z"+"i1\eBD/-NIzTisk}BݢyO%-,gEh)1jQsob:,Nk֦;\ji=GKqDLD{gMb6jjj5~VI\$!`_ApC@IDATs8Z$ޅ0E~MwXy__Aw=H ۣX< FfX&Ņ&~P՚I]wjrǸ,RpKMTyAV;j{?zhr*½$&]ϩMxˤ[@Pse#b%;|rBw3ϰP3MhƝ^c=x=(ܲ! {L7$J .ck}ef A4 |P)v;#+ىx_.5'N0e@n~_zR~"W(Ul8^I5ԱeaDp *q FВ Sųf2bN0nIkӫW/b&&:M~bիW,_ܴiF]Cae`Y;ymۦ}"b]ɹD.4! :gnt#/2-G9|`\|D+|K~YnF>pU?"RŖ= 4Ŗ9Ԥ!b;L 7Ѻ7WipnX X 4`'c5`5p57ɸb-qP !g ޹Xd}9|w3둧|@'O6d@BrڢL2n`zBsH;vadu[H#,ɶ$6VޥZLvD IvANn`Ye~; sDR{%9/:pIteAxN4ϙo ؄Br1eW'3}5jncfʌ CgJnw}_!Bd͒dk!Q*) RX*JE(I%[Y#T=˽3 >^Ud<>Lӓ`T~PI'/Wrq͓WJKgZaI* FbދW\Rޗ_NLnXʖ[-n68:od͡v`ТE4/tY{dq55ypUA/>' f>"Ɨ=2WjvSLBՠfay7һh45qh c*9R xa,{b@`ڃ"Y(f" cg:^5m'G'#!~Êt 6\{9͈@j5p@PĜ DЊȜrǶ]!yE8OdEg>dH\r%asE鹝R tkDj> : 3d-:+qM`ev33o%P{1`[4#PÏjajfyy=.̒Vc8A,p={޷i=맠l - LMR]aZ؈zz (@`Ԇxy8,|鶙 (wQ1E  sD]!mkTb1!_*i%( :_( P> ä״y[箮> ar5`v2%/s|Fږ1pT^c7V>D'jb7.c+5?8~l? o5<@٨@w=$eRK"q.&?1}/7eͱOn:n[Һ3;!cOrZ/3w1/=qVc-h+بYXGRe>dVd:K / f @&4UD@kgC歔 &#|pY4NCj ,BPV9gs\8fucmdKlhAMfy8:A@r~ÑaR9rcԴ"ϰReu" CTcUD[TQs.~9> -C;')Ͱ8+_>XJ^ףgJMmU׷J5Ju |.1LaÆEm }^Yw+W ?;cVGd:_^+z`O>-Nvݞmofc( Oqx_R4^yf._.P6j)*[]#1$ Pq;Gl0i_)j )x;?}%5 `lohqǧK\6'b2 nݺtj[F%HkCB ȧdt(C  Tš𫴘C\$@8nZBq x;g .xk 2qGh8w5G600|%cxxxxxyFob!e14@Fb)_j.Qk~YK|Vٲ5Sp,"xVHJbU*6+8'pOե+$ڭډUd@m"E-r_+gnEx,qIL^"=[EƎCiڗm T۰;uX]8KOu> DzmH[E>* w+7=&>+Ƶ,jL׾XVIr`Ҩ 5q==kV}JD.j)qHZf9~\nlܸQ:v(C|FTڵk'3fd&W_}5ɞ~ (^ziS6@gK-W\q@,^/<?`5>rXݺ {9 S.nE|'VΜu8*Nnb=K^RrBr\;+Ӭ%Kř0Ig_y|yj\6ĺ:p3000HS3=20000HˤvPMYҡ䶧ߌ{.HcH6 @>0d,XT_?b CNǘ,xsPd#};t C&n!/2`$-LFMo@hYF^bu t-Px3_O%o@&ȨW*ܿp]U<#zg#-% >f߽GC/^\͝;7ݒlc2𧭺{[.Șڕ_]Rni5}ywي^ky{ǐ_UhqY\û_eLjX |ses300H&0%m DDo؜ eІ„!֗-PBӜ9'+:aǻTbP0j\QI1\aw3LW2 /n iUjɝǰ( s"ƐQLM{z::A @!DCrk@d\҂K9ܒHB,OyC!`c`^d1q n p";rZMK`NI Q$iE,'y/ ?* {T K#0s:I|iNp:Ȩ}l\6=&4uxw[|)ݖ{YsX yL b|.^ckk4f/sOK2&Y[ aݕM׽@Y3^4/fS5&Сʕ+VXnJ6o< FiٲehbRj| PL9dv?]N]X̷ gK\LZi"$c3gG}^Od 1~4d0,C`2,] St?,LԽ-cF`{ԇ Fת fZ8۶y8cXd"Kʆ඗3s Yiav{I&65roJYV)E\1K*9:y'D^8\29xG"v!O8ExRA>[٫*{,e p?$ٟ@xJ{Juow:E/^~teׯ6l(5:-ZH ' m1k偸G/ 7Ð`管mu+} %y߼ d{ `fL.q#;Nq$nmt֘b.b5kW_V䷝rdP>AXXUʍ lEbu;T% gQo%jP B|nt ISBQlۮ>vX-Y^.h)WJߨQcSu ?n(=/"(7T|v&P _Ws|#- o⻶axW&O, ,E]$)z{ujtmnM\L$י,ZӬWC -M~PH]r.@,lʖ-]5:UӨE">0փYNU4Z@\q17@x½5X)lTy@كT_UΖ]ˑC1tcƌQ*T,oOD]WjʕbwE4aK,U{x=lxȗ :\bftRaIGIk*b a 9&5K?Biߠ' X;F!nMeGAjNxxx6ѽu4ƌjfÌz> @)@D:9K A$~!+je1"-,NPEt(42egŲ B7"h=@|G5u.#P?M'a|7=e783ہX7ܹSϱ̮f̘jԨ 7h %y U?(㮄`}L/2*])_ԡ\o[@Ñ:0@*ᷨsM¢00w;.SYfUeʔQۢu j1oݺu <6#y @ˆC'g{hLOh*@nU-^XKcryx+Cr gy@ٟU[|cdɭ-qF!3q?#Gf殚{ 0300008=n#[R<5dZu r%nHD(B*b(* *P qLư1͛*R_c! ꓱ87\]C|o.V[ -KCUԆ<B|73,nקiμoŹ(`[j@a#s <$?~6^'rՕ^\NJjR2X…X74Ch]^QXoߨʚS+*|/4o͡C 2w!q]*嗅7#%^ȿ 9e!VuӥuR^=oz?h(Vubۑߦ32ou mo-*~z1_B (dI]T|= qÆH-KH%9%î+^zmt/5p8]^c]"׶ _RFu@VZ<+ ${nt/ͫ`P֦M~0ZXdNOU_~p8ls7{W Ze+6\|8*ؾ}+VK)uD|X^# ],N{E0$xZt̩?}ڴ:1ۆ̞M73:S51(NPk`Ye#~#UFۘ=LN i_Lj 6Ly}.eiA$`D:S:N6"6qby/_53J'ˁߧiVȯw(jhQMV_˪H?7vGqe  :߈aB7Kqn "'?~"]v/" y+3NCɆ\SG}}sJ߾}{Ҷm[A|ᇡ%ws='#Gu]!!CH^"_V|4)1}&{W^!q~gL{Op_DWھ=D>lF.lz{T\J\L^0%W86ԍ~ XZCˀ߃Bߣڵ)D>%j$q@&AkЦgNʲի^Çːm[d4† Z (e.|Zg!#uVr9y=c$ 1,URScqrNzZR?7uc3!曷"YhUJb$#<tke\y,RM*XmlEjʭmď)j05s8bڵBO?$UTx a5xqF&`OkXX/癈᫯>@'L7bV2Ԍ<@-&NH5*-J j7*#SڐӮ ׷Hg(ȖWt.:5((#!j$w23E*(M3m{&9TQjԄ~ٔZ(0ISdvkx<8 wң)Iw;W1;тO?Z6oj֬왳T 7/(0SWudb[{K|l u-! 8P饗}jԨ^SoC=5jap٘u!иZQ^nk}e^dorEQ Q,j"r)w*xMZꫯVmDQc*ط7+.eW딳}f{:N¢hmBmAٸj%2+k 5;`Ūu"[|6+DmoS:^Z:cpO9oQ/u췇i4gV}QOZWiqEU-*_sW~|Fe&^ @~aV^fsFzo '~5h>$Bp2'x"wu>]6~x={~Xp fxxx =`D3ڣ=!N)H|%ŝKx^[ޫ'wߧ*6n_3i~K*{4Z$BՐ/ KκGaʞ0)U+Fӱ'^ #dɡ'Wq?v옪Tʒ%Zزu8J8~ @m%ӧ @N[^zQr~ݬ P4~B<Є 3{G!M!O_AKW(p}ȤHs8'Uo>/v%XtIu]VTVC|Z`oe&ʁ09-&O]p4@W Yc*g(P[lOgpMgk\|\v7fd4CŊիfb.)~+G'4>kų ԢyQA w:p|^bY}1ĢS2}$+Taҁy );uQ|2E&}D⬟7s7n /68yEgd {d͚5駺Yh_=|gyF/^<ݬdx300008KZL>GCZ$y/Lyq&ŗE-Y&V[|ƈ*G(֍9h ͒װ-F|o&>%mn5ťiGOtnT{ho> ?ę=GfŮx(h YZB[@|,Ξ @"u7j!Δ z?$ dift,C* sN8b% R4C+a.VrI6EXBq;_ƍ孷*|6X0|!KHcƾ`I*UGo ]g;XRIݺu#{BG3F]>~I/[l+ٳ~>dmCw400HM[̙#<N`;h= f"BxBr"ʩ=̜%΀WžU;w'=@lں^ >vIܞzߦ'uE`'sE !8mVP}d;Ӿ 2a5,L¤̼HcGl|-50h=S6:Y<7{}4B,i` ?rBb2¬k ii]ve2`@eKWTIoHŒMn}JԜX"/UbB c1 '|Bhal_7ķyTز+uv=ߛ?SK:5m]Z@J@7;oWP@:e.wo:?,ח'|R4hD߭O dZʧV'gHN}5~@J&x(0gTXG@e f]\FKNC}ɢ~^'rq$-Pi1!>A)ipu3ryYp$xɥb_QY / c~N;CĿwĭY.ޏJ >NѠ58ǘGsq&D;9mXc{}+ 30080֙ixxx x9(0 t1%='G8jIrQvE%.^FL< &kA<7}ZbU]!OFﱞkD, hȖY2$LB8wutM*dc>v}c+G&?z}Ͽ(6&NDN8ȪU gۜ!J4=f Ed9ē]d۪Z9 eA]ۤ'=&J?7}Fp~RZ5!k<Ϗ׮/%[8V`7t,'g~-ޡN$K (I0$4"RdD?G(Hۻu8 L 'n?OE %vxQ  ˾WC*4.OzNNɝH,"}y]-{A>o  84k Ƥ.VY i-v U@ō1j( X]~%.N~W"'U_|X~,f7|#M4޽{ku饗 4g~g\ib29n2M}NbW.vtl YPj<X~7imtQ{9|Y``1 !{R*Q/ 1S*^[?"EN2~%M< l],*>Ѧy;+W.^L<9H`jܹRj^サ͛cOc. &8oֈv3<zWx泣(+(N_5FU Pٹ3gs#ك k{<uy?Z2v| u=W䩊t.BFW9rD /GHF~+4(~qQk4"Ueyq+T7a[*UfxZ ){b+<>/_ZV=Z#pz BܧXGce^Ww i]dT[@]硇6KO=T:Tl?/ձvj LN }STqjݳYCSS_XVUKeUOrMO"ζt$,/ʋ>Ά #-[TeT֭u=0N|WBǫs/$iL=,^~j#>ZG-'{]R *ؼ0eV9Q'y @P)Kq{j觬YV.hE#M7+{9z4Îy56j*FHN߭q8*f9 t+![ *;.xۣ&5B]Vn {R]{ `p?8&xQϘ@F{/RA%cfZْYdyx!6`_|iuQY!KE}=GfmRnCT*B/6Krf+gLn)/C(^t:<5"{jVU*ٕ7,ruҾ Һ!rc$J?s79U|74`UYzšŊiH`n^z8Ɠ oKKg@|x IJ|&MmNS3k87ܬuCde䁖T8~្:X,IM0倗vlk>A !IרC \Km%".bm.{Xk.JF[$e`){Xq"vQ#K>Bz,?} !s( @tg/K;lkr]wib=xkժ]!z?QLcm 1P~ i%ozY灀&. +}f K"vw^ٲe;fa_ M^ʖ-u~+h5ȦwDľux"o޼ Q+W*0,GO e8c>$ȉ߫_b5mF[NsclL$e:otΜ9*w?/Y⸁{}E=Uk{ Yt i7xk::{w}Zc @=`4T400$ae˨Qă`}'ԥp'-ZN;/Ô( b6l' MCO Y, a'PqA3M5%EMA}N^ r8(ɽgƸ?&~FopM|-N]n&~.6a&vD(`u(gh7A') ?`1QCߥ> F&Vom'op$Q/ZưCߐ {~'`5Ĩ|A$#O׈ mrF:_}6Z>ZJ*ƯYcbזY3ToI# p=#RBM(\b tB%=r8O<-j4 T'+p@Vyڵ~EBq,yIAk d "8m "mUT drUpο*V:[_kmZE? n`;Ap\QBիW:uab|Aq{GP7oƐ5S-Z;vh`c Ekڴ,}6HLR>qpXK~o jP_^ RKM/HEHn,c"ɄTr܀xXq^I,l.ɺl.^p?V{xxxt=kt0MDDx!HzMNX8XNXhdV)Z3'Ծi֬,h[`ōeg:T mM7ݤ!KZپM{o@`LAkILmL+EHu3dm޷bC +蚹TD&EcV 6 e{0S7Qww*n$ ^eϘ"Y u=_/>/6-5x4Z%ȡq_ tʖZ*%ɴкߠ6ȧ;-5/֊r,v:oKH5şbnGr3qI0f5p^VI"ۧZ5rĪ0Xetb묖Æo>dݤAZW#Fz^d[y3q5׀9q "[fNJhzEX=mgJ bC{m[2u.K;_LDgƗ4tsE$`:A&Ov,Y2 M="̄`΂^q𡌆ڈxR3C@Z,Vǃkwx=ՙ|6+{"WX1nKr]fk^e}gG4md&3c M礅`S,̮ѿQkE WÇh$ f'xBrCV8cH v`KBrF#^°7m W^K4;C,X*1߷_ko"{QG}f@rI&a) YD Z+ΘO]9r|qaÆIFzGRoED=\Ϲ&l(;nXtƱU(JVM V{1vOWBԔu4~?|2hΒ8ya-N̨y'4LOvĉu 5\8'wعs*UʻIK-,]!3-=`hp'f'әs)ax'8rFϚ-ҸqAhG9-ۊ5v 1g1[GEM!-&&}X0jDopX,Zz} ZrGY8kټ%۰rnر0!f!7`aL 7d&2DV\#E5ZRmr9}8S` H\>R%8 }.3/T#HyHψMZgD j80SgH+*B$U B0g@ˎLwY@36o^. ]d2 ,4 dGzI @"qO%/_Zc5G>ϦF2>@IDAT fڦP-&X xğnSNՌvi9XzC}i"ꤌ>P)PZbxۂ @ɭYBf.ux2c6oA={Ʌe/:y|л;uʒe:\r*GfܢO։0B`zd(!orlrPQ'ebRt$; OV}hPŶA)o9PXȾm#vK@ܒj &Sg22iL?uuHm+),;ï)IqqR]lNe_ M۴v"^f[Y##u;\a-91$5+z!~,X෩[O8)Rd>d[L]dn \l, H%gNa: 􍖟RU#ti*}%MD~Q,8Bd$aeec-;"3 HrJ59iV.@‡}]|~|ܿG1OzLh=t63ԍp"|'jC=Ug iPImJ0oS^DV{I?Z@d Ee{`}M? ]m|&bfsI K⹲(9tԒ{D1BptY2wZyᇵ^!TYA'5QE@K/B_dɒKpZ4 3gTfN]-׻ᆱ3F]10ـU 7ޒ¼זc7@o2u߭`i2t4IE~HT󲪢2 Gv&wwQSoUXu^+KuL@fƌC qsc VL ޫ˸ n,B \ L!렂^ގ7 ipU]v~J ᇺ]2h(he˖)dBrQFjȐ!~T@+TS"\/(5#9|Xʖ׫j)g/Lۯ-T,x~WoȚ[*}!:TH.4i̝-Z$meU3'OuN8|טGz(A{;BNRbWĒdQGįN`ekb[5WX9x68K't>8u6\qjlQjg9jjO]+h] {Ҵαl쉍ї~7? T re >t9A5vX\ 2*@Rܧ!NEjP?ljSY[UJ>:UW]=0} +=IRxaƟbiVo3+"s ٥F wBKMAKE9`JH¸ 488Nx.۷O,KI:NnR63aBB[h F'=Bjaq@X*P-f š8.{`DqCF!1y{T,'vr7,93 h/Vކ4bQ3%]^GrnD;#Ǟ) x^׭['N^7 ) dv[GWHuC(uJbyP/KG(!w?\ &jE8X{:Tz hnKj$}*>\Oit-5}*Q߆XJ˙ṣ^j/D۴i#`i0?_0b *O޺%0u*eMJͱd TQ:ϻ7 ^yx I ΐoר^q0¬,{6tDL XQc!bƮ(usAf:Tc"TcXV$7yrui9٬`VqH2T \{}OZV:Cdv5Ye|>uuMv.1Q` 77^5xŝ!*Km,'WtyZcI-^?kF즦L 'dc䤛,^,'d{av6Jb!⫁j6Ujփٲ溵NwʦZ5nn9p{Ht#sGDVڒ M*ZI)oÿ-{AߟRB(UFdɵo_+\R)3x<%@ Q5YqgA4݇~@>λFɅyRG Aɫτ,'>KQxV)-ޒn[m#O A=4.2[*-׺|JԮ] _'?Wb5Po*n+V8.ƨ" 5#n4P6|Cp $ÊUrFv|hh)ӲpO:X>)4008ȸ_s.I<0gYjlZ]vՂdy3°hc= @KƏY@Lc,'@2C$BŐ#[L1'xWAmdS"#P7Xd@#5!+aJi(DZ}%2Cu=)FBoG8y,,Ej]-v˶(/?#CV?\zI1mCB-zז k.2.Fu| J/_0.RIAmm.`vgfH0‹'ah/kdQ4m[ lQ.7tt:_?F٧$PKR;';1Sɕuk&&Ȓ̝gN6ŵN1c 6l`N%3{gite">x0Jiu)譥po!C‘]v/ٳ-1[(Ug[x~0Bl7:uԺfY 1NU ! E` Tr87PGX( z,Qc u\(M{ ʣ>*,FV'qFͼ5ƨEC t h񩧞QFi/f cd50M=믿W-tvFSg!{W'kPKbuʣ ,t>+@6i呙n 0#ȗ;kP1ch0YzI_M6Wr5MVe'+/!S DI&jÑCaÆ 2{lJsg;O\GJ+N]pիWaZ&cz clAA)NiM6ՠ ]Tm& `Ű vŲ"AӧO ג-vt{+i?>GfUnWid`yۊ^˞W ^ (:,w==h'%/|\) n*w]i#/(*?%aIKROz5:2WG#2T dתUʀW^BY+ }LP#%gRbLj9Z.VCGNVCrQo- >Xp!XWM,5Pg ‡Z? $%Vb,ًRk*F;/| Ow\/թ@=/ pH۴9@=`>ƌ"jk()zEOb(kh fjQ-y"?,az#E +GHI_zE-[?k&(i?8p}]Jk"~zѺrZ&/%gRIs@o؛rW.<?d0I v-bկ;rRk?[6Ͼ A?X:vtZ9{8rJˮ=W(,yK^-9/Heҕ>9'%߱oqt np C`l3 ^y;||I Lb6؂15xXzcߴ~ @'v`Z!<0p7Z*< O"^S*:O&9U][9vor ^ Rqɏ7[׬j%Bj#Vf9@+Z~*!Sxxx x02Ʌ00000xpDv 0D"ԨQyA*nȆYˣG4/ư/h` 06xw}5I|7KSw1fBF{S m!u옅к$*!{a.]H)֙!܀Җ#wmꦉ]C L AhN!vA׬҃=uu=|UL8~aV5kE^P=jBW7t ``E,Ɨ\* ?ű#;Ɂ?哽k$wpH"%%5DKt뎕E֕ "|>_h6vc-iYTU̓C>L?Yاx%"W E?GBNɾ:QvrE9yx J@ܫdt&Y2|fWɿ>:uE\p3ZZ9`dI\^aUi7f>C~f˯Sz?0sLON a߽ޫCj4M. Z`E_|bi&X8i4W3|bICҸ]\c &I,0? \ O/iuX{ETUǴ&BUP2Gx  %H;_\fhY(~Xu_X]Eu+j\XKGlK]`lBȩL˨Xm&,ZB ?}V8 {(. J+SW=9c.ڍ1;?7+/ 8& b֛#J`)lp{HI)JWև ΁Gǝ:ly_10xs~UB?0~|DbHдnt ;$Ǟ~B*):j_[P9%2o<~\rD tinnB ?;u(N frt`(pG],A]*'F>ϲ攁x,^Sx_{(RGgZ1o(h^A,g=cKsVpM0 !00QVHhc|zmxt0!z_Aկc\@*AʴAK&Bv=XHEtq Oʮoק3ȯy<lߤ-}8=4-fGx!El/">d3 9O3QM6;wWL4E!WK*`FH*MK>R_Tl[ymeixZٙj8QbA\"WʒK5h?apcv<aZNvp=z_7 &z=(!ˉ3&䰲Bɓb O!<_p:t^8[ċ)OR_,b`jB ZO} i|g`e$Ĩc֮G5:S cɹs-sX (5y@'v3NHM.,ȝXGUO]~ԿjXncC<^ :~SH+ q|U*Kė EbWaDH4J Q{ ިGD1Z<[։Q aajV͜%0Go"cld>P죕]iC5L!#`DI%,%B:!cvDra'WSŊ"[s*%C2Ϣ]M尪M=!I+D'0S!t"(gWFM0?EZZI1U->7p=z.Ep=z?ၥK X7o WXQ5kv ǿZԉZCʞ1 o/ Q`:#K6:UdG trDܙ O? LX--FTaOee(&ya 0fֳ!ρb k$z? LZ%vMVي:ʳ 1:jԗg?ǜ}x;,dIynQ?FU|ǬMD ?֍j޼y@/CA,aIf^éaw_NVwOO"|pJ<Ґ%wlЋ ͒%dD퍘(U7X2l3 bϠpE40$)hbxVWK'd͠5(-..+3C\z_L\p=z@{1a&¯J͛'=^Q{ʞ337-\b[=YQ{~q-W!]s=z\y_ݫr=z[5kHnݤS'O>DE߭4fE|饗Zj7 izժM,7+m&dփP7Ľ:o˄.r`?hsЀ3b! xD_-ׇYj@*P' Pbky9k&,k0 z`~j j DY~5'n6"3ʠsgSҩuk \S&'۱ɠF9ٰ}I8&U#9t~S`#QLܐ[=\eV\`ܹs50 h^4M?ND,СJΜ9PBrqUf&]tYE; =>]+es[aE)2wQ&mz;?f|Gs4iR!33F'_4jH:v3z[OZ\eFUoA90/dl1C+C k |ƒBMta4vV-b {va6+Z{ oZ' ;ڶ]'`Sk!\<\s=zcǎ)15j[{ 3z7zOL2]wS`n󋗨pUK"^kPn|m^u,ruk5mFR$I9Acs{u7Ij(yn:R]s1y*p̚=71gUdZ)ߗ?["Z8gՊIՋF~lF*oʛ(6j5ZTCSgըfM}.BPՎ;/lٲ$trkaO{Npz-͙7o#Vm?k֬.e]<nݕ%J2"@=hթÇ'2I5.*QO%NHj3uY ]QIU{ȤVR2Wv1ĺ>8:7->kgϞ*O&4iz*=O<۷O!E>F̝ MBϋ5U=߽SqŸڳQzUTjbʚ13poKU_՚kF P}T\ͺ\p=pye`_v\p=z@B<@2fUTllٲi1kG?OL%QZRlV݄gD1<D+V,Ѝl%qU({A hApIm.'ӧlkgGF֬b}J"cd, e+UT1d.]Vhxɘ.-C@F09A(C7"M# At]|-]r.A.X%)@(zL_A̸XS_K<^@UXЩh"]? AGM.عsT^]7khh+Sfgf9{._ b%ui!m/EN,dOcoMSAcrWX&sg43g~hi ,I`.>`ZaZzOjzt +\Q3 S}|ɺ.߉`Ni{kcMb M, lu򇄴c6zu[gd8 $nڊpBˌU&M|Idf1e312eJHn\MHJ.ԋq2曚}v]rJiѢ }QI"֙a+7iDkPXfr{4!3]v:a0+NH|X$ʒKAU֩) V)پm@4cd]paj5gL)p/YW(D,baQ9>h<+BYXD\"?Og6ֽϙEfAO|駟j,uA5[zHÔɲi=E0ϔ>ݞgpH/Jzή& 9Cz[ %,zhFzGO UL2{nx65fn`!  bF/'39E`;m~A69>fL+c0ybg˪*[!.eix]x\$fdFY$c)iJԦlDf/ Z~?2 aÆI.]q~S|I6llkw3d Qzi#V"_9 g8@3nLzP̪PE>EOM%bfaPx1QC9M~I-A{`jľ 㠓\qwp=z( M㧟~Z^~e 6; #pG#0Eg!`\lزe,\P/(IԐUݓ  #X{(&7j sv{t%A~E|mde?د|pm۶ikz>{ͺ)ϰF&!~4|}nQrtX @g|}Ug"-_5u:BUk`C;غA<;j+`!;oj04cw1\WVe+{`PwYjF1dYY=a4#0%.h<^|yݵd {_\;k BYH{fHǰG L0". X:CF9{aP0h1>wes>wYDaD#@@c[iK!Q}[J@(|JX0ʙYzb`\ܹspQmW3V:qL=q9 Ahm]42G:DYC4~=]7Rc,wʼXȏ OnsαɄ@IDAT[^!j1[^[rn/-ٔqi \ Y6$  )Jp/fAY=4'XdRїHe6G7|eD=F ɓGGy %q5`pg?2 "E @W 8!1W-RvS,"8N\`E p;ot}r·LZARWoq#m{ږ|0 dɢQ 9#+,;O-d"EP6x>2cժUzi0cӦM0q]Y!4 y62T qΒ9c3A F.I YX>yn"4e^yB7ZvI˲'%ņ]п*dIy>KE))ѐ)-x|oW#S" kR:AMBRk,eE @P h>J>yX_I5wh Ȩ`?${W$h[A"|u,X䝖*ZXؘ1cCd㳼 qy=OFdPKC=rVyd>W:c;vwcd_UI\."dd=S~ߕ(!k 0 :+KҾW=Vf}N@cg/VZwp9Q>d`B6I,߿OUSdl :TL)_ xxMDe1 BzMH}&1b6i ~9d :ǹ}L}i-(7wʗ~ (փ$7Q"DJy% X |$ Blc53d>]t~3x5Q?C}y]dwfr6.sw\N{]p=z=F`ȼfVŋkI&dE1 Z^gzzN%D]N:i6sR2M=rN֔Y"1ð9rŐ s70N!ÌE* \g!_L{_ƓOI,FrIz}> ` rw C8F.܆Y:-Otz޽r mb.q(:;NUy= W p PwԤ(1#WN] A2<^(VB:/q+vv jFAXW AxKb3kDm"kS [ZJ3]s-3~y2N&>=w,JeA?S/׋g /Q'9Pzb& {[9&N&ٰaCrB^Q5 " `p=zN `w\\(jm(Zg<\eT9 Ѹcܧ9v4wXycXe*y,NwuPe@ɺ,57 MM`:  …A3f9+jC-05=B1Ҝk9\f{ YVp".Oü.HuUb{C6 }68FXd].@`cϚ)G3hq[ P(FP'fndGjj22(*M#Mľ2,jྣ 0~-3`PJ%f.b/5# >1`|#o~4K^t^SDaL`sx_9H pP=-1-r^,$Bi f R>Kz>KNC@rjsڋ0o0΂w$9B{B;dɃR`.dL%r?$&(g%Ibpnsmj,HS.9Xc&  #cs2ڕJl'j'W T=``/KЋ @ TOc&5jf/-xWQ]Ōz? [K5G}M#E_Zb~E.aaߢ% ԪQUl1/WWqqECӃ $KM2&p4ٮ2{,]ểZ># /uw?W\$:\=`~8Ekqf3Z'!RU 1"(Å"Ťi~Pid7Ǣ|ͣedtÕN81r9 0.% Oe΁0gDmYJ5!C|)EWE9RJ_B!EO>ZNpl.h#bP!^(< |Ld "7eH垟-wQS\1q!E`z^d_r,"@CL`h#(pf*cl`œ'OHV@ߏ?f]-((ϐ5C^XO5/._ U*bBQ ,WFKjkՖsT!e=3[ F`Qlmb!B }^Sk֊CK8s%bH'%EGhKk\['C̚Zw{O>#QSeط owXLłw2Ã.'|/)^1;Dj)idf* ${_eb 1(nE. r2&-82s!KKʂ?mC̅}/D=l}ɒz6Y|_,@> 2S17yFsgbu)*:Epk6G3s[1:utaOEf, IG 7I/=W3%>!PE 8y]ֽ{w>%#I]O1K 2c-߫i9.\p=p{Pz@xUd^:k|.PKbuq8%}&Kj { eV?ӪQ~ӖZ1`8I]6X!N20>_"\*,^u2y2dSy^z0Mu ~?eTW|}$Q;Ƞ|Z2SVTSt[ FpSe@,-\RD(Թu׎E>?tn`XO@\G>O!Io8>Cgݷx簿qs5c^3W>OR47k.Myx15r>k e;-=OY`[3{[RMt ')obj/9bIߕFSyLS hX4Y8P?*TPrT{?͘jRaqN3t N\(xhX"y3vZ],]"dgIb擷#ʚg {@pک{*.xT`s*qI}]f5i<mMϭk@F(k6.SC7 uIS+V]+B m3Km Xz?t?HUX"$.>C߮\c=3]s=z !cŽT!/K) :c"Iի:m@kt\>gv=b!tʒKD}0IBgp"RO%&7BBQ#C&eG9zWmbOaqd²1qNeZcX(EF?fR{H)2l,j,yk(h'9!dh9Uj6 3:7;/^CHO6c? ٱckr vUViX8?DM{dQf 3;Ya}XjěbelGENk&I GW1anL)vBWՠu,\gkb]$?$_]A5濤AT,N=֣G(u'kO!1}o#tNeD ԂuIIɗ%G#Cϱ{XebX)̬J3D?|`![ -YdMjdjqp4' !.ό)b"|X~Ye:"S* 1߅FBY]ʔ]jж4ϡ\Hw>/vjbeÐ`^QoZ_ 4T#k2c! g8CwR YAp!p=z{1WO|ZW af>"';w J_T#6RdRcku3Hff=t0tҁp OAesDyQڌU%rA=9Y v"@/^& 2d}mI[͛Wo`o{tD:Cy =+pc 1S?tQsu-9Z^sbe5m)E_j{D}G,* D*әR4[FYز,;P(zd~h!{a],Yr=6 tQjОN АٳꫯM t£gx̋e'J}jjmP߭6[l}`MX s1$AnP#O^?AM(Ky6%y(K?3,^g߄ KIieoJ&"wvh+C:<51JJL{i3X dYDywh9Bs쁅_HtvFj*1!ԚKۇ_<&|f6xJ<[y|Ͷψb~2GG*Kh {ATQlGѸGwbEzm_~b^bh;ՀZ>/|nieXcpF#&2Yz;cwcw=zW[*hwKhR\m*cF٥S\ծ:Iv/X˝ȘhP Cѩ:_|fqq 1->Sx:AW?*#׬`236m%C/SdEd]ɮݲՃt{|b$ʅ3J>){v`|a>\t߳ u?TժʽkWJJO>uQ2g͋Yx-dȸ!{!c+0pV)쉐+3B 0.M_iHcAd?YUk|V̹~iNH_Ja-|Wےֹr!SF$8hQ]LVT3@=9|s'5c!ԣb}B.cи&ǹwy6 R*>Ѐ:r|lq#GٌrdkIƔ^Y5[Mo4n-oIw|MgMk!=,oǰ夲e :߷k$̘I&"I&}LF& O?9Gvc|&d\>7|6o\\dcQ{08;sz;.u'-w\p=p` 6Zx,㑇Z!#X7|` rste2bx#˅3 ._\bŊZl  TȬ啳EjGv Hd2^;d \j=ߞ F&b+hq^ ,CޱK.QΧN* V!*@MmaI ɰMLl(FB& _hZ*j@/'%$X!uRy֛l Sl<+5_ys'hpD}֪{by P4*x>.?sz,l!0;F9E@N˜1~i#L#ĬQVtd6[{xQsYxnQKM*6GOՕ+#Fh6k|CMlgAºz\xWkr=z\2<0 5XQq|2W3Ӓ3, *3\w#5D#g~… 4(\ʨO6laC:V;V\sj Z:AIRп_~'{ u{au 8P༹5w.CG^t2y5B4"&+=hBǫ@,7$eݳ08r(룏c f` Y}Q2k![ ɒ% Ӥ.ZdKd@GkUÝ˟!0ɮ^IFM2Ir>9Cʓ@_ lD1 1'#N& &ZV v}`\ kU*1G Χ8PuIڛRh3n%/}n% `j +|P3 ēfb2tZc|x)J01{{ZZ&E@I@.hp9VJ {然w-jzZA#bV̑n)ӕ!jDC"ܯٷ@9>㻯qJϖr[IKWIm3L(9J?!AځԔ#wr5FWoVznH0Ҍk ݬ¦ FC4y\<֭ 4`-\ ˸xB Y.Bո ?]k;7>}@ !f6ls (? '8WDFFJu5̸g>N C>?L9-%2bƘ+dH|qLvJOv^v Z!B2 #3Tp2KfC稤tI?{Wz]C*J&yOecvkWU+Qt4~̰I| gQ%ɔJPF6?|r-޿ʌҥ 5}J/{=pRjA75gϴ[g1"#6k- ]`njJ%_a dFrl%Fb=ldk.X[DD z7{ֽ6\ ha) ׳5LϢOQ!yr2.A֯_*`!{Y…:|r@~~)T*ÔƳO߲aCO`Q! ?O4nh8e`F8:-CЅbhPEAXK(C'0F WKuxa*A:ЇSiٽ`H!!EQ5u \/ X1{&2}-1\ P%VBW c_LЦM6l+-y4){BNS3؄O+%KevK~1ÃZ茋d1c~!=GIf"_'C\rdanVJbԯ+&,YbK>] L(S/KL\UFN|WC9rȊ j@s׃Bݶ#2w"hƀbܢ9Kv/M|Wk1J [xw.F"F*~6AΞ%j)(934!P;hLѹsg np=z?k?]p=zw,tc|sŮos9̜z' 7l5qZ12=wno x2A3do?ty'Gv1Zͳqx6Ĝ5MBzvp~ɔ dud.P=~ -s m22L`'3wN oAVV=  m̛ E]ҥKkDW?8$-*|<خtD}5v9Q똱-QkFLD֬_wOZE]Q`/no<_0)G d1[ܹ-*~?-I) F9z B`v-X@vޭ=FzJ$y{e)=TwIʿyI ~IR_(͢O/22sGZ15!h=fU( 缋Zj׿I'PA`#X2 0>'1A}I&+ NM 69P~_Z LA K1-6y>'v]R3bĪTMaoO \ωf (-ZDiacw[zu\zNa喚6mZ`gfULRdUX$)J P8^hQ}O<ޓrnXTPQ֭̏[DU>}4 :#ٹs. #T`y2,uH}qth Yt{$9vo k{[:4ڟC ~U5[WxԐ,e[` eʔlm`m8>Y$V5 R *V iŲ^z󎤏oY4Mp&@iDM윦;uꔮ7aH!.#8XZ /˅Kt2(z)Š#_#1zל&h?cP $O@"^S ylj +@:&bճב+}ɮ\/@ Xݬo/_ێ1$Pg 5nHV}Xk,U5T PU`qO4p-+Fܱ-WE}zzX +oيEls? c׾@J\(o2P)p`}ZS?n1k8͒wP$y=˔qsM@MkVt=z7_ͯs/xɓ'댃UQBQWdd3-e:Po9Ff,k׮ZUV(a9Pe 3Ѩ°/jҼNS۵kEPlYi ]d8 `M#@ecDioH dٲe4g}oEI"ʈ9j?}PP:؜œC8@ ˨$e 2BiH#vąa1<ѱX.]׻2{7o#:u>6sLioaU|:WHC$%4:L2!"̜ ӟڲIJ"[TnCjf1gL!n<!,*S{уU.O%߾C^8PIj{B'#]:^eG͠8B:g3/1&J~x7RiW!VO3]zgq7K_v=&aR7LeY# {֮(}Zjzwn9ab'dz˺tL<Ӊ̧2Zܿfe$px~^qo"sdJ_$Vb!$ڞ;O՚rp=z `ݑ{XSX0(Q"pɢ?6 TV-G(KnPyѲn:ɞ=ԫuk ͛s"?z"#ߊѧ؏䉹V 3Rc˧=_|2)|z>pq7_zAMg*DwGʞGqJFA8&V8mss.#,\8ohɓGҧOA28 hym *$ hfQ :b5i teؾfGų3oYQjѪ b{_dyM3q  (~}W=>\c<Xw̭rzecVlJC$ӭ.'.\O+$ f:sXÓ;b3"Hr}4X[Ⱥ<n|둒XDŽ' \ !WAoLwC1W-[ȳb8p9=z`˘>' ywGE8P5N0cF4ǜCo7<-8ckdϮk υ~pzY0X1_S nA_KfdwXFUUREd :ưW]A&>B(CA< X;a|m tx4(* wٰQ̑ÐrB/bWycX衛=cg}h~$LL΅8 HV ;͠3:^SbW%jF,bPEavE^啪i2uގUȬ% 4tJhƻD2ܶ+Tٱ `/m5wfk5sdi3B?_!k3fKm^e ~Ֆ[z.u;wL\p=5 LVmX`1n!@%ԚZbdΜYJ߶m['R 5y rꉟ O3~.\T)>ϧqM "q/˸Ѻ|߽M51J|YVۇ~(ƍ0nذanݤ@(:x_\9STVM/p3fgdD_*)SfHՆ Ђ0sxΝdӬ jqld .{}R27t"8Oy>mzn2eֽ`>Fn2dS__Z4<\Q*:t 7VcId j`o' c 4M_C!z&p`'L0 {`|PzgZ1{tLzvAAy(xOGȬ8&!;df2pQ%dHZ&V]1n@l>ڏ2пҖXގ GιFn\ܑ,|B_Ћ:tz735pHrZ 7Bҵ0.yxK?uj W p=zx\p=zfTFRQTTi,o`]kX3c } `&v_~:#aR)dV PǤnљXEBt 0g;yU^Ukzgaޛޟ-X)CYgoڬld3B236*91JG5dM~1*_*wfGrdܳ^ }7/lo{C,lb`~Ç+qШR'NTF}QHtyv=Ϛ5k>:܄ ͖\N-ZT?3ygVZ !|f @IDAT\6 PYѠخ}0c?=&BX\h޷lUy|:)nf5 {~ "gq~Gg'(uphuʛ!z)ա gZ3fY1soʛn][ GV߆xOY]~\k 3?bnxSݧ|}_wDYPޕPw\)JEB˖-NdHRDRIBPBJ{sfqUd{fofC /^l8Ͻ`vu0\7|=p\yD&LwN@ jL9vh}DD^),='Gҵ'|]uDh%.&|eSL*6tv6ALbq̴}-2י&0@˩LK 0*x/'`!KzΪ&p{ USog V(,DSaF#/MnH|eH4HCEӟԭ[ LA1n#:QA78 Xټz< "zR5u ~`ok6s0j3sV~ 9p+hDd~/>gYbv4mK\rraEٽ}e zU+j)t,h>Ն.#m.gOw#Խw亢Cɧl 7P+}=T˸rW43 y&t`ǪB'|_  J\$/& 8ժrT1Ռ*Vy@hmV*S)1͉uȓY*EΒ̙ 1%l RʘY3yzH叴ڢܬY4iw4Qs Ԍ]inMzGKRTN/-\3m[L@XXHZj4b8֧?ɪ4uw(AyH*DKi$ӠzH>+Ddi;)ZTƌgtCϱ,"72T/L}|D|Cg(;@AɇN0Q{fUun.|k3gGIBd+6I3`gʕ䗇FYgˋςRG+dNb 4q йH(JR,e&NP1q/@5xz|Uq_[+gs"__icJ,E9Oݨa]Ϸk#Q\By2ۇO&;we+IjMq}̺ԯ{?]G%<;_,((O 1jf%RZb"PC̚ "yI; /\0׆y|b/+Ji0)tV y;*/ Ʃ.8nwJ!y#-JޠA(H摲{ZWl?*^9HjU|_$ u֭[\JoDG*8\lYAwx\, j7{AZMcgSw ʝ 1Z|h*w̔'u*tYuq1޼w]F!~_e K;^Aʌ{|=|8~d1h%D#Z}!  (XJa% YA~! <+Qs`v뭍b]Rԫyci։d?Y8а!@,Dh<.߁N1o5bֽ.ǟ_ YF/~!Fp2`My.\Xڴi6S@ePÇԩSx "Jȯ< +W֔CjDۇc ʐ+ j2j(a=Z4TzgCٳK9)gP(={iag@B̩-Y4RTmd!NKֻ!зsY{dڊ;q"|Cni8R mux!"z0:Tp+뚶ba}kRTLr y Ux{>|O%$釯%O$I0?VkZCDUv F}edӲet?1U޵ U"@al3D ϝ;w*` ɠF(K.ƌ ҧlrĄt 9Md&xE}x8kNJ`6xE1K6W4mԻ7.0UEt%OWA1_@~>in hSg50gF8}؋g6]%peOyTӟ;Ū|FS k*imTwtI钑=ry,d*2-o#K {ɛo) 9i+׮]$mbDrjs%߽'rF~yx{[=/v_1- l=FԠO۶8gmb|={1JK +fA09>K]媎sqk.l 2eAČ?~ֽ+ &MŒt) u H[0M4L~_;W^M41HeS5RJp/E޽fxٳgkwyk<-Gf̘1/^…z$Iw]a$a.zS'oвop k"Eٓh'ʇI@ ħZ_ժU t׿j zgNz+ |^C쟙~<\5Og3gc|n:t=moĉ geSN-Сٽ{*6nXx}PUiIN2St҄\> obdU|owf 7Yz%%6{.*p)PPegٺuk\RQ{BÆ,|6~?Ǽs&NNռx ->-Tw/ M3ERf\~}D{ǂ.[]b,Q28JPĸr`Ly2禢3o 5lj;Ujnǯ{zo|={'$[Y 61NރIBv 2\tE&^x3"x -B7nQ)`YB\<t3goC2tU SٲMKpЛʉv"Z 8"]we T %?~4Ng. j#`)߱x} F4پ}At/GctF jzo:h7z/a\L S y&#qss9ƙ`<n2AS b? ]܄t7=Yθ9Ӟ^)LoE :K$mX;3xUpgd<,b`0 u> Msk" +ΌgLnYvBHKw1GBDg1':ݿ߄ܢOW[dfr]>ߥ̶={@5G|={1 b[nT d!Aed dqg"#:m/N59<)4*q#r G@ {oU!JNS[AjSi˔R_b'M`guM]r iZRILNTT\9]nf[O pr i(EŀD/>J%a?AwחiI˗"EENLM%5ҪB9ޑiCTs%jĹeK^dk0koq[ɗW^?زט} ,IoOyXW]!IoWaK%SbC®[G,3M+'3+REG>S_<~L%&w\ qt_g[zIr܇ۃoIil >B5F:O2\-zYOg⻸zmYr!QEDR '!MٓZ I`#@:?Q8,p{ʌyT0W-uS?(@2Ӥ%o|`z={cTqLU_fJͮv1 D)p Ω2jwYIRJNZI,8ȵn%:SNJ-G K'߱*{ӦMS~3Rd*s6l  ֮s=կbbT-.U/,K+&0yQlJ\j֬N L'nWU.=g=2>Nm 9DzRӔI5ʖ-+:KןI`bϘ*&$7T=ku xp Kh)q( |Dg;UG~tzt%J[1AlnwYTөp;qXHn| ~hD*]JkۧFeAok "zvKYg'T౉!e\6ƄApq8!)/\k$E03"$u DgdQT05v(D2 `AX0G_{@TWQyj! K>we9S|=X}={H}iw@O dґ*X61HOͶ4H۵l)橙&EHz'⋵ʫ#&][,s9G(͚5[ y*{AzkRWʗcԊDuU&;ȥ'3%0|4$Q0Y$.Vbsb+~7U~Q XT-]^E3ތ4͉jHL~߾}yf pSFڋnbN@v ^A4&q. sir5FU12X{d8Uk|vrHbp K:8k'iEab#?@?+u$컆FΥ+| Rc'Juy; TFv-}w& `223ұU؁bH1֨-~1Q !M}~ZB5֬7/tMq !iF;iF "}={<={1Gb#E?id2+WDd-NH{+)ѩyjraYt}Rwߕ?-4daMӨ4s<[5aa4 .44cy7(ϯ`SᘂȔG*N#hG*6WΝS0ѽ.U8@budD81B͜yPH+ i!zb?E;#$Z:)|x7Jykx&ZŝhttPo2O?N#qv0շ}{0 *(UL3Kc$,߻:"93ȎD+l|q9߉j[n!e;0yB8*k3b]ZVI┩/Ix#!d.Ϝ9bIA9sցTߕäǐ}ٲezYI9MN3^Pep?ʢ*T/$Y<5vXU *dֻ LϞ=Ͷm: G/02W]uY|yDXGrx[+Yfܧk}" xy ˜uhe\OR>L͌3$c~7]t1H3> @*3τ}ȗH7y5Z6_T$9q<}$oذOu?ɛ:6!Bh36M޽#]!A; Z)Q4'j~m&K܃9x VO:U1r 4&2+:$XpDf,ԧػE0*vSd$'{Ϣntm ePFZj6Kc/t``LhWK >g)= x>}wEt(pi%B7|07|=px}$V\РTMV]ζf[(rۗ-hT ̊ᤚi*Ѩwo(xIqhEĿp-5j@fWUpA',/&9]ʕWЇ82FmBPR1/49zmp@R+ՠ`۸ "]*soڴ)RvYg6.U1|ޑff2Ĝ 3ꫯ6̓>hPz* i -" gy 5kr?eT0t9IA,T,d %k5?6 #RTGčS Y w, l2g8\eH]&Il]Uc{t7շ QyԪ2TЋ6QyƔ>c§EK.մ9~_aUt禂!WrU@|^/5t{-=cڏ  .b)Ժ>7y3뮻95ŏf~ žohɉկ l|S,Nnh\ղQy~ Рעs8'Y8>[fۻbu$v-\G^b_VɫvS3DP|.TGӧ{b.5kK'h"!o6Éd; T +_*RNxSJӦM1?SeDj rW_C'eb)baܾ} 0@{1#Ȯ!BȅC~$9rJk6 k1!Cd)b7:zo[?|}Ot^CwqqysyfNy~,j"X*J6`&9xzysG1N@+Y<\X9A_&!O>'ٕ*X\,\f*pw&ē zu wcqoUOC_DdĂyzFj17~P2 [b Pǀ۹}B;4oA=[N;998&Ϙ!u?ڵkGT U1X$Ǘ"uĺ#@7N,X'̛iI@/TG2/o{T);qC%G9 qAʼng"XHxdHgD*5ĩXX! IB/IgIWg׮]X`%@"\"E0hI9y\sJI੾z 9i,qw+͟J~x,Fij+W*:"yF{p)WH`H ]%o}SiٹK{F rg*V@qƾW6OBNy).##QX< &tZBq R4ay f0 `鮤pU]Ak+ź61S;b:ӳk`1k^lEqn$M?_VTdN$Ÿ;wԫWOT PGReR\9TPQF pIy?~t Z3 &qIѢ4K)2ѣGatg|{' 9.(n%{ Y|0 C}Q&s\.:H$U3*q+Qseq Pz{ON~/|={DsR @6N/HI}ޙr8sg= +Yj <ޞ)O'bʃ=xE|:`-A QrI͑~&^2o>^Eb?0D>!Yoԩن}\Lpq #IswO^>Tȷ6E!l*їCZ$j~{h~DD^Ea}lѢEl89 }DvaE/)n_8KS3snǏ{1>) h +qI]S@ 7c{4Nb,ZJHYsVO^pr輪fab<>DEӧ+HЊTčqD褭J*ύ!@0NJ~$l@Oy EJ~HM$4T 6)a]f~cq|={xW"KNF0bs^YquO@0AZE<5q#2{|."4"7缯 e *oB:/,QHNp'L{>P EVqC%O? > .LQ Kiu ;DT佋gV? KIIwݫj2*=Inh-A D)WP!) 2}!KU^Iw / Q %8P/;~q }oUJ}gg82ʘZ|= HDV֌#f*@6~b5*[ڳ0!`2yt31gԮ)V֏WAA2[Q=6E$LbdĞ7KKIsG|yo6:'R|׼"?X:) CʲeˤI&->'>>UAeq2UVb!GNY8GfOQa(Q qܨh+y`W@^8GBժzO*oƈ FZٌ\8W'?2#ۍ}r JCb*PXT݉żU#2s<. R}`VΜ9~~}u|>C3\5b QdT[:\BNW`=_C'__{ĩD~(5ɱN T !Bvׯ F?Њ~6DCj~yu AzH7|S VMfFTQFf>1>ѾKԖEߍ &34Zdɒ)`d!?^!R]VA4pi4/wNCq.(JmUc4#'Ǭ{@<7|=xx<;/Gdb9\/DɒsaBJPp1Igs 3F$*/$|q/q va(˟$־`IXƙ)gR?[!JY*>@ei 0\׍˅<9L:`2RT87\;5TH 5mfP‹R;)xz|^x Z&qɥuN=E.uӪzH~>Կm{\Z6?4|Y gs&Y8XxXrRHnkVEx, %h|IrObΝqZH>|")G bɩD&̙cExgV|ˁu 75 dk#_dIݯA$!o4<.2cO*"MBe*w{Bw0T;(>~n);Uoٷxu|d{m p=w^{qZgG炥J-D2[1=rz*;+WE&6 bа&xWqm85ԶY=WmLw*;v9Aʊ"I3HMrvXƚ9rP}y z(%?|>WD╱c}f{gy=$}';I | 'iD>fG?`T5.WBu>L],W\zT\Y:t耡NNdٔ)S ЯϔL=CTgso)5dSIO$z= ,8V‚!% K{NJ* 0r`:@IDATHG0iPLiTр%눼aҥovcvڥXP3j[E+NteG&F >|8nJ\jBn1΂E4],yiXFt2ϧhX;(,̙dd?aΒHdd[ol0AFwk֋w=xм|p*U5AX{ϫ>O ШRg+maS\h6tin&+ m۶5 V"yǀ;ʹ>y18HDO[_"ę4p *^&9tn+ta'Wwæu?sw f~1}0ޘR=K/T^]oN4}x b>:!_$u95bL1\\ӧOmu܆ʧn1_~9zX 0`A]d&/ڬYt^ #uB05|dҗDlu׊=cZ47^v}=uuȔ]kXHs=(nnUG4R'=w}Ӎi1ϼT{H"p8*3Dk^{5L]!qEJ~]&9"n6qn]d" 5q1#6OM 1sAxiLa*q3>q'Ar% }1[nʾ/fd¥H Neҥ$q"&8xR2*y>7SVOEU0<gR+Li Vb&<*bլ)mpb>*yLaPf)ީH]s{B Di U-φM l?{Үl7if[ I`Zqo*fcP|A-pVW̫ZMC W9eݲӸiyrNj"]:g$lGC䣌E#*7=2E-鰈.sʴiӤB 8b"ԅUH̴3'ЯhxɅ\5irRVVM_ }UٷW=ߦGZ/"RQ)Bqq/W/4:[n+V E.޻L9Z#">} HJ]={rĽHcV.bOBSfVW_O_;]fW$yL if;-8s-NfSJ_)\HG{WҘ@hceU(צּ7$Yf\=vbn!V‡륱D3&40( {IπoŽk8lF5f13gp}o@z|yi+`L 9mf95x!%y}~);HGqg??9_V?9I1$ȅ2r< ?n9rA}.0Ne.Y€>e뉻qxS ]^GlUE39Cn5Dd '%`8k۱Xom+zfb;@ W<4 fCm} dUN2_eˤ_: `@PA (Rzf͚w.4jH7%kOdJɍ{G0^yA8<1A1r ˷{^n@y*pJXp/0Kd!كayX ,:B9by$Y$sٳGMĊSZ74􌀾o^^.ff D&&GtvZT55JLi!-[_5)Z;j[%gK5Kz07f~Acw jn< `w ʀ 0{,j>4FFWkQT&r"HOniW:7G #`=n8Kb>xCx'w#IO004s|iӮaڠ1>is+~"~wgUmv]K|EQq,pfΓKr(:]qmGʃFKBx~hjzr+/XxyxҾbHbiߣ=, ={Q9F۶: Tb-镘WV',:j]̓AZ~|~EU*2HvOG ?WW5`ԝh36iu A2 XhMSS/]wdftqGܧ`uH]|@Ha2*J`Lʡ2դ  XNNqrΉd Fs%O0}2)p!. " 2Ǻhªv0<4ݞRt4Ν;uO%xeT4 +:> _>[^/DjBJdŸa+bݸQrOC@jnvQr@NW潗Ni֭uF3;CprWmo~pa8'htOW%JϞ={*¨.*vd`dΜ9mXi~z%2ҧFMb&! o H݊mi$+ra֤ k;W|Z!=U~]G$ʠu1HW,#vWoP~VѼwaXyo' D|.IIчCz%`^_J`ڔv膝FWj1"q":1chG[HEXW>Zіqg(B4w(1O4Eoo-#^ ϗb\&VBZ/&Qk:/.w" M^8Gzu9*ӇjVw>ɍ X/`*\b:TH`b?u{*]c^Cx= i]pA2h"M <3ҦM9rϴ,n(QB%w.Lɢ<(;q ;#F 7qXLr0Ma_J3kW o "Exqr 32|7y)ۗQC`T yENF-C'fNUP'\Xݦaw5F ShRՃ,hX^;{RU;\oB`:4,F|ABx82C(ѣ(>ۙ7oLoL#{w/?h?X{F/I @Br3ʆzJڦ}eƍzZn-^\*ٲ|` ~^srs 2$we2ןoS&%!_HH'gߐ9S,xŨA{7RO"|=p|y t`ݖӍݾXW^ԍ%u$.G7;& <;?[.iVK[S9*1_m "?V7EF@ %o4PM{mH`+iyAZ_P0s+6B?IC|`gLҕw&9r:HSA4f`RX-e xowfMN[J[$׶G@$;U2o )pJwapW#فO^ #qGrhMa ^f' z'C)TAw)="~&辽ra|$[fLtmFm/~Vl|AÕȰ;d[br)\hJA(>C{0L ׼h6/x&"rW5mT9OUjU]'%qLRn 4H֬YX䶪X1ҭ[7!ƈh#H۰aCr?yٵaA~=gGcyBn GFc $ FD>a 43nXK >CigqFdA$ " Xnz-#R"վ4*G$Zti/4W={Z$s!,0}X$&6m"۴?O"urQ~Gx8 -,C?sucG lϘ*v&I%X $^YV?ȣŅ){t[F)"8 .B4ek)P;4mQS iMYQ$rڝw)&MRמ3S5ŽX[@r<tI)w1f}{(ȅ(?E@i„ fțDc Se˖i4!mŴ޽{s3S)wt,/*|\kז 0Dl@ŀPS|EX΍~Z:rAu Li,by(?P!"-m€,YH UmSOt 7| ŪVUjbqzz_$ysԿ | Uܰbءkp{Q fp >ey|>ՁcKOx%Il-Rb ծWƢ}J{BꈋY Mh?;B f#B$/1Ad \>)o{)&BdŷlÏTuʺ(8վcdY.6 ?H?j9QNÜϊx]5»HFnoZ O,)2s yX8Ujh[%aAzGWuJUVXl\#m۶Ѩ ?Vɏy֬Yuu*AXlltXFz\裏qHBxTBrq15Cڟ WE "RfuK{J1H7Ͼ5[Wx+83)l1=T:5Rp5p!/?zebBeKDּŇt)Y!U ?D5={~rŞv[x*ȢM$5jUĺ QAH0^ሬ .yUU}'_#[IL;g"6nb?q,kG-1 hb]~^nHP)UL-HHNRv~ȍC2rdK~#DŽ8urBd"&6^ɏ{ӠԻk^M XR|dA{7CNͯYj[4hڵҥK-ԤIO>FtYt 6LU7n,\scz#o^9Kξ*;vlf1+(~F6=C:}<}d73GܕZj%l ,#]%ҒU\"U(B`\"ER։i%q q D;4BV`#ZCu%pLɯ/=+̺FWg&5:wR2"g5Ⱥ)qvH0M'>R*_ՕP (M]N̙2A Z6iJ, 'Iп>UrJ)LQ,~QreZFǮ0JjT8Ff0cqi\y>ῊS73E櫯)\\kJ`=<OЌ9Qם#A~xFn*{ʔ=q,ЌrJ_Yr0]B۷˗o!*UR'V{ɑTyoٲA.'r5j\R͚+y4,p4&]0z!@:ҥK+y6hСC`1U-\;p0W+HM~?Ua=><`vR,1"9ї2ߋGXPnf-^; jvUBjyf!w|qo).{7|4X;=#kku:*@X2ɨK-DB=6||x &Z&G:+Ĺ8>"UuSWfdmS<nLU*Jt:b ^|npV$n$QZ;5z*m lժBՈ/D*^^*vn⾊ p=) lK9s>䟩@]WV u'L9]:BtnAGҌfdBJguj /$` ۾M*^vWԥ4Ghd2dٳg󕷇UQHӦMuI#\9 UV-L"'k#Zvx7EGA5J# *$-ZbD"% Bbϟ%rSq.:h<Gu  T#]pb1)Y $0 |/¿nSP{X5L6+BfuSG |K"el+To1ԓf  *G>GWRˬ\-CWyB ,Mwv%zNÿH:_|HߦQp8Ό\ U$fpݒ4cEihs~F'eYW2NOU4)N9/ST\YZv97S1!itKfg~[ǚ8~$gګr$F={)r*ymX;iE0رC.U\9]"Qez i3gA1 f4Hy{rK13P7xu\[ݒ(d$ab dɒzL/rO)GVFW`&DSWqPF,ًeܽWbeĝ=WqrW_)9\Es fs^B.oqׁs.=Uukmat-?{'EtkvO@A * AIE$D$A JPQD $ QIF Oݝڛݽ|L穝zo XG/ϱd&ps\ j듿q;Jp I[-0qb9.MX6Dyz}}v/';A08ljkoȎjjjj Gߩ D؃H/G 4W_(x`a!rN 9H y(/Y7hX}0S(s'1?p<'In)Żq{]O־@p9c 7%իF5>M^~Վ?oX%qb=<,iDQƭm۶I1}Сq*h(ziꫯj8mx-v<,lzbf#qB7GzѸLYT6EpB!ժf-t`j?Q̄[..yzȓY]&H991Št,׷zj{0n%âmRߥ}L36 Nnc$FC ~5qGƳ8~Gj 착z6;OSGUFh~85,$V[bjjjjjЀ&$*e>ow 96r wB_3*}TT/d=)T ;Մ;]`1mmhBzUׄhBfTĦ@Un¸o"2`kbX8Ɲ8Yŕ1̝;W={h^6mLM߾}e Lx`S*&>?M]<3X tBJPFj"S&-E&汓isLm7!NS븓n+> T+_q׮O&h&tH XC_67Atkr\,ۦ`լkU!tNv7k̔͗Ut-3|p_ ꢓ"tL,]T-_\7Tޓ|!N$i%EggyZ|v_:"-SOE/ c|wwz7 4@'4s rӞjFFDiKBxowH3`&Yt$x3Mڟ{Y 8Z t1M98|&|ֹ0$vPIxqo݄\jBce˖wPXA9gb5Pְmjjj`?w'O<.'>$m]Mo̊<ͷW7`;3zu0m۶@8~K/"zUTI VǙUVi-i=zÆX4xp~ȥ GwS8b]6izMع ezBw]b̓BgPո0%"F+.f͑q%^qzm})&T_t|w"Τra,̝4PA pz>Qh0sB/,>;^6U`wUMVgz!* |>u-/W.Y~ '`fϞ*zehe]cXɫ˞ӶܹOg~ ʷpQzI2x>g/aX⇚Bo >Y WU?bB%0j4ϰ7d+ 0$P7**N(yw c,qUDpBj;^ޔ X:tyڵk37|k̀ԻFXȑ#8OpzQ[5O5(u~_=GqM2ao818"?̷6HMƣp^Dkg̍rJXIdh1!,4˰/ȡ)VG}2~d\_`@ Ν,K>鈙~< \CEJW_S Q7D\ȚFV4x ȠF6AW3%| d*ߊdRs2]L!AY}7o,06 <ҪULץ`SfΜ)s̑#F0dѢEZo^L"~ Y&H(9j ~lub5`5`5`5iM/S HE<8K4~J*F)?USƩd@ $MY{=-"cǎqUƍ'L:UA DI~W @u\`D̖HrjCqI+K& λŭ\]WfLW$8qRk'<Lvqp?8}{V߄F1x|NeG\ ĭ_FBEARfޥ[l ~[;kͰݼdAdlzf 3d-FkJ/=7zxoV0Sp 1KEA̪W2 8._}Ur|~cMKsI9z7't2&nnPԨ.77ӻX?,mՀ/9Hp/|qbb6}ڴVFh Y=(`B%+Rx M$pLQ0O>tIFR3#ʠ<쳥 XL0q. (A`Tv|'g׀5`^Հ+ӧOׅ=)9]]J d PpXiZ/б j  Z̄4p,NIEZjڏ_`06ׯx\0,Glj׮7zDe޼yBqHFn>̈;jLdb9qT < Md}6Bz8^J5j#+.VlR ܁;3S;A/Vwh?z I&zkc  *yL) 誰( @'z<~Yh\(l UB#3mN.{1m\`68.)iL 4zv*,](N ˰8 OIw9 fꆔ;vleWo)]:37Э8WMz^Mo@>v*GhRVŇq&zW!Z^YVkՀՀ~jݯ/^Qi'Ӌ/F״؝+z?7z}􉞗Q%0Fk( A~jҤ4#Ǖfly` @jDd ˑ K!\9Ͷ^5@J'}8J` -j8퓞fQC}u}yptzO/.T-7B)l+@Kc嗡{.]tB `ghDX}%-wgZ9ZL~a&Žq|Ht`oC'ļ\+ W)b>ɌeSC`{Lw'm;z뭷?zRcĉAe9mO.oafb8.=o׮ѳo9 {MV{bhD5^b/Jw?{⨣}*ŋGf!wqąxXd٧יڹ?4C )f0O?.&$h\8'rfuH}0z_}L[T:ɸ )oFc%R ⥍λOeNfg/]4 2ߺ,_< mNP 4x+#*Veh4۷olٵk^,hzV, cƍ:,x}D33`y[#G 2jrYWh+]'m]߄9߄6]6m6޷ߦEO42D&0jH]X%<6u0b4'~w~aS+W`q`=Ryuj&Tq.V}]HKY̎>$`g%c "2r QCymJIB9j.ti<ƸI9j(, Ռ5eIG}>;cWSUf-fsCsig&@ch}e˖O,QƝuSmwc͠9N,_̌hbƍT7#9/UWq-d'4tUz=^]A 9j.} D l۶m]uW㾴"[ ;q:Sm'[s!.pBYJo2ɥ|sgQXͅarx̩h ׼  M]>:p㽾!d(b2`-ȮlhugB0sӨq9(J&5A3-Y &Լ ѫIt}IN9 cヒɯ/7k'7#+?mQ]r.gO?Mohs Խ{w ,!B=bwX1` 4kعsg ?y7L]nݺ۷k7|p\siժ lٲ7dm8[3Tc enky^v 4/4xw}tq'O1טpsMFBX_tƉ0(_Y'|=u{8e}2.w~ |;nxoB9HGzиPzDOK@IDATW2=Io~ -A?zbeGCްICn˨?V(3ϦO4ԝ<=р{/-*5LV^!G"|i,rx+/Ų?iƜeTf 6BC;+~ozl2LWiwc++6޶.c̪bV?"GXd)=0I&Msf> OdF<'!0' |ş$5 _ Y93h/qtrTreE_xlPO)@{~Ǚ7Rk}[ N/棏ŭ Vz!X-Km%o:'L6Rs[$pT1͛7}FBȞH,b6mY+ ,Y$K$f;w:OW27kqY*,H&= iո!z ,K <.Ie[#%Nq*T(PA0V:/Ns՗ٰQh62vm^YV!p@LVC8{ĻchY4Aul.  R !sY ~xEO{7w,Nb}O<(>TM2n.N~34T`rR pD۷E[ X X 88Ô%]022^\YX}xEIViƩ2pg;4xՆ4)4:=fqaI[QE`"vcb[-#=|q1L׉" o5WÕ۴&Ω`so"?%ϟN3>$v`pz PUǟd\6{g`ыq`qOիJ~EMBW @d7:hA=i+&a|!sE QJ_l]ivRm{7TĴj'9rNEb4o&YbuL Uět>-c;l ifNX)p϶subV~cq/!fL&snt5l*d;ή8ժj)'gE?}3%d Xkye[Ky+bs8bDjI^" HgSЀ5`e m5`5`5`5po%g|g5ǖ8ɔA=x\KܽK GXOqŁSm;{HLjwiOq,nZS)ݾě0IvsW5Wx} ||e}OmoT؁׉ t6?0۶nPzz^-Wh@l+,N/<# c GCwT${D/I}S 6y9Vqa]1Ǩpjgn ཨ=bAz<`@qJK WSu;fx^Đ͇#ѣeѢE2~x5'6K>}@uSh˖-"owR+w\{p(x=Mz%PKݺuZ77lPgXN,O?~b>#nz^5@ᓙ /^lx0:ysͬx<|nŏg|82}qn̝s}ǒ!Py c dC b XvѬmkbmjjjj`i T[owq GBHR;{=[V>_Koog^Ӯoco9!(Hʪ$BRl?K7DNXʨX=C5S[ \j7cfF5ҝrH (M$u:^` ˰ly`;f 6 }:0iC/1DCr ͑rOx<-^_b- `O,R$S8x&OO"`p4B^@fx!/0 u. ո1i䩂XO|P~<#Cͤ&~IZX4)a'Hx!51$PY:ۻ+]v̞=[4i"?6J߾}r5dM)j$ G}tl{\54%6Zc7r*/!622߿dl9?19ӛgWIYC-/Bń{˄w]qesrh\N]2p 4O]flZ I02O?H[o^V^-J/ó_~Y pBzeG>UW]=zfeW֭[3z\.]Zαu OCj ЪlX'9x $0?zJg`puҦACy5kԐFip/b^\ k"FkpN37HZ*vT`Jf\o<6l-i5q4J9^{5iٲpdj_={ߜM}a9΁Dܜ tf<35Ti pSN@uJl9U?Sq?TpަX zmVkP~y \e5j$N +C]u]vح4Oc mh}g֯v]ڽ 3_2+^(Bx_K4L{OȄa6O=poLԼ{Zi4{Px4@r2BqP,, c;qrqb']ri܈.:(*@C xb⋠ü;0.|q0\I&NcM㖯WhZ,Pj]oe:V\mڴ)#Hg_MjߋR'\8 E(9(0NZmf:u-sm.H%2<\+W.zq5Y(}&mQƜ~15]̌`vYA:rAfʜ~Fz2;}!/F}h6mMqcYf{  /˛K&~?e>S46io/ZH]gi8b^ȑ#yg g4l es@q3>f3fŝwi ʕ7T5n|c;wމ:]w ^ 62YgȮO`UjUM7nq]J߃1S;wV:p ,aGiLY׾oҥKt%ce7XkNR4` X;#(x߀ŋ${q'F=}RYͪX=YT#G+W_}]fR(t^reSvmCc~b̿u_F?g _>I+zFay.RhИ dy>} XX#6 yFU̅ϻ(w׬kvz:uZAӽL/=#I>4 0Cj*}>i}l^|uK%ΝЀ`߬Ta̗-[hӸl޼zE63V>w&#'7eh .kLN9"c!:uu3'OEh•np'n f.qQWw0YȮȹŲGpK*ېi١CM_)]ޯܨsL&kժFKw <tMߦASP1׶ ?\FD9 X4:SQ̕)ih􎕌~̌~ov +5,#TR7Hr@c#<$& 㛵b5`5`5`5`5/58L?nS)d8;f;p=D(%\,J8x%V6E"X@lnL!f"1+_c16 8OySerza8=[5{%+zp'"l^ L_$=R  PD)a'_AAK1G`1gI!9ׂ^Dڵ"P0yV`h![@ ($!oU3w}:E`61 Iϖc!`E0@-QV; KĒR@Υ]Eֿ&%VLː^DG <pG>Qw_R 8HZioN*3&RTCĴ3F|9a 5{0Ǽ} PsknZ# a{WJ2w lk&Y (w&y+мt%<`@`^7+I&LGB\J YOJ?ҿed2{+7>?! 10+mٹsc )q!oh6L;l PB=bS4J AN-rsFs|\ӧO1Z6l)'5a%+4=̧р5`m+VVVVyԀ>嫏=Z9FzޮUC77~+wuR]B"O?t{/<,DvR 7j~,<ؑX X X X U p1DV(ͺ^i̓ Xps \t_]UpNZkapB'Ҟ:_: NTb"*ݒӷvI`lQ$nnEf3?FkVDz@3a:M腢/`JIv㐦>ViB fWKӏ*Ðj92lqǕ6ؗV?EhNรvK9(rʯ]/jOz\; kJ6Eۊ+'_KJ'd+ℓ(R`Ƕ,1$pvO̳gƏwq Ȋ/ BjERe6"8& \^$Age'*[23: l Zf&7`|zX4T% 0;հGO0X.O,krpAڞ\ 1l0~hS/9zR&Ŋp/P=NO2 I9P;˭U_u⌽I0nZʨJwu{4iM+KayC)ʜ?3Oo&.$'^ =OwV9qc s.z*Y_Xs;-=J}sG G(0T1| 'i}.-Cb i34ebр ワ®}jhNȳ-Q/4,sYOՉs>X>O.(W{464HשSG|8v>q =*ʊI ;B >4`Q#ޝq[isB}wz X,<V2w8{*~:LϽz`97˩@60/;3; R!D`\׬YSnFş# %{1Bo eѣ}ͣ'yX2 |e6 ($` ^4B2GJdoer$g#Q]G13sBDi&`$@nu"w'%yHVB(3qv{?zӜ,CקMszrL̳R4PljvDVVVVEA^QAp+VV~r T^`/9+EOvCR܄Gޔ~!Rw׵, = ^^2X%&,5-t5z%]xX&VB ‘5ckӦA貞{:K1ۦj%{0>=xx44ƝPbvs,okcI %$ a j`kk@FiڴG& 6 5:@zd!r =2aP/$zJ4 as53͋JvQj4u7ox߂p a#?S|Y^L|7Hdiƍw#܁zے(Hxʥw RլY3AԀPLfFheOQ`i}zi#̂QF?cT~'QO:$,VpFg!-Vo߮S8YbEt,bRWp?;gZ%n8 A]((o0 ?Goݫ~VteBNqjahj!4`ʍ|In:{ABnB5a JhFC6G;7`%-I"CCM[DiwfR2"xGvCiTc84C/,xOÓY7FxY !þ0,2~ܸO YCCw7N1x+WE `OK4'<#: > 4!SC;C ۴£BӰ̀ñR+};+b bz!tg Ȃ <in ?F-Ncw- .NelHj@q9f Lmaub^&X@շoߤm"`do$eHeHON!UiV3.&ZoL$f'HI)Viji4CIBB2]r%B&`ƌr2er$N5"Uy\yH,!YC"kB 3uFW^= ˨~F3 Q\$ ;}0:p >j|Vx4ЖVxcGb5`5`5`5`5? ^BiPj6l?>OVw8" Z9/޵C̽_ ϓE/~ ]j~JYT\2QͽO*N1X)t='ޅ]%b tp/ܽYP`("8>\]KSψ3f.!4@ʗ_~)Ex` ”b2=nҤ KYV3-h3I='N0T8mׯvmZ+RY=cֺH-c%pMyAdM,eJI5`=@[jjjj k ( {4̺B>N`F65\gX8OW+za!ƫ+zKxLQE- XxEU){,j9,q Nb3v;wI-^l- LzVx<Һukz\/vB!p|=~M1?\/TهgiiV`u"tPp~d屇Ź;|2hF;~xj͛7 ۃP޾ }Q.{Nݨ  BسӬ-c5o ;$sHG,_ LzrC\3?Ff|?ySG5^i$2e,X@ lߊ@hD3ڑX X X X HڸPv&(VBxi`_Kr+SUK7ޗc*=`B?rt\ޟF7˳'k7g$Z0^{bu`%NG"_?C'T5su7kGY5@s= 4G)am۶4|+A4a] i/_e:t`nݪej9ʴfJ'Sԙx'N4؄ß"!.j2y߻x`aQcCly{'4/vZt YjK`hHa34Qiff*Vh`ˬig5' HKK… $ L^z% F{;ް!Zo `8\ĝplvVyՀ3mjjjhl,5b"ļ.Vo͹WQc$|jsq"5Cj̜@7W;5#w G׽4/R^ \ƭHăLo3fQW$06 NFttAbk$7]yFWIo *(-7.Ҹ`v8 n["ʵԬU-V5X(*/_^=x^O'ӈ;ԩ0㥗n?_OӠA=eX4̋;˶#BY>l.SO/bH$ zqQfg?>{MJI ,dz Z  &0I.]b<&wqԯ__@4Mq}E4Ċ@Aj!W]u>Wdr'gڭ [|%ا!a?:R+`]5 \_Yj80¸pͩ 2D ] ։]jR<vP~0|s(ΑG{ sd}dDꗬ^}/b~D1B* >h6m̛7O gLJrư!s [n0611V>s+ubugK4.ZH^!6y_k;i޻4-Uzb[|>.>DOa;{e _zZ%^1;HaGQ¼x`Dk; 7y(FT?I1YC!1ir5ר'ƻaT:_, = cFbG KU'5(N1  =M|Qxl8]/V7p |W(sV=[j + XVVVVVVq)< }1]k^'ƣ$iH09sܖmE~YЮW9ص >8's;zx/,)Gފ#̛~usvGqO |tK&ӗ/)>0l 4ɓ'낔æawa<-[_` k`:b(pDe˖rE 0sdqbO ]g? e.c*zpg7H R 8tԨs<͚5crꙇdam Pj`j # v߈ܕغu4,1TT RgX3]&G+6GuXٰXH= ],X,$.hڐ! ߱P~$֦I&~i޼9FlnL#o0"_4_>y|Us=צl}7N:Iӧ+Y Ba !P>-C32pkRRR B :]-Ae!|הλ< B/Ee d{;^=FX#]txm@~l]Yfێ<:3t,  l믿 шEcdxâݐ%E퐽i]vƗr& c(zj  'ly/QVcȌ~7|>/0.l( nVC=E`5>0:,Rq`ɀĐ#4˖-SvNlOh҅+hd~W^gX7MsV'V<р0'4,%E+p>7LgA;jP7VjJeq~dV` }qtlr:FAr*'(asAi;S+T4 17|&᭕tՍKO4px۶Gff;A -n];,Z7bqJNW6 fkm Kp;}g sc8aF!L^ z{!+.C"V "OE13ӀTgT3DB 2n8 b(!AD1n7rRS2 m`ȅt1hx/_f$ "aH; (nVrixe]!,J~x5/w-Dk9 <^pb0d4C]$x>I-8h 6NF@IDAT-#I.'TB0.~<6;^+hO $'?k/y ω/c QiݜcM XVU[ X X dƛMx?D%pfcuaBw[C2N^1(7b^_I\qՕ3tmS*0t(!WW)Xvr:-8z]V}':hXwʿz$ܹsbd>KNs CޠAhBlǎ:y'W1c+VI#)Mc A8T{6x=4 dJ{UX@٨epG)f,f a#]0yU,,;\re!W.]*TF/#/u-%fJ^^90B_[#rJm.-۠ՀՀՀ@j VuN v$nb֬|g'0@®v`]qy =uq,r1kZyEF@ʾ} t2/S$//>b lwdצM$@ WJ J4\HLK7BfBM^x!Zd0/b%9眣M _w_)gϞ.إK'- ,Z'}饗ƥǞ$a^EڵkF06CtӲ a_|!ʲha,@#gN(. b94LV&;i"~-Ncz "s{;3^ΜskeĢ^wN jjJ!ڢ7}Dkީٱ3`c&w r)F?LķELGy]FkǥґwJ\۾}#h cR)i & DLWTM4Q^{T!e}eآ&TEFY^k"Wf ˗/7!GGp=&v* R:VY#ͥEY f޷׍<-n^oh 4߂NK$@$E ;R.L8G+JSk".t zl)^DR%ҞIucq}H5+1oM6s@|ٜ/iq w;Pı/sv^H3$N"q6mٲTTB*%|vۈ3A*IES U޽Z^jgRePGh)ycaMEx8Rb9dm}J_mTlG/$Eלw0hh,}XqðiӦJ*sI:yT@#TV|I5I) &GVsǔׅ >ܴꖛ6mR>y]Jk9>='=kYwNڵk&:ڼyTL|ͤJ8s]KG^GoGϚ&GUC q٣Tw_PܜK9aL }6jj^ڥIU` @+Z,E$@! \W2\9Dxj)~oxБARPx俊xɓ@`N"'Z-$pH/|rN20I1굵頤[0[q\o6cBƲ.YKl\3m 22Th,5U='NM$kΜ9 OGű#[πG={l̔FՈ#|)!+-tk^Ĥ1E&N q|_1uA|8ti]zqƾ @flf_!S2Dɏ<@פҥ].:tqJ*|Zv'|\roGm^|yaÆRu]6bTOn'G; c;tKD~rv$@$ 2h$@$@鄀S1ACtu0ᤅQ`oJJ- u0{Q)iwڦ"{k۵gSe L`ϙ G 걥o5M^`.YuNUZ5/h΄7tvgRXZ۪灇EOud?YG\G @2ep 7=,=>}::gȻA-#c:W  5 Zľ$^n >J5,K" ukGYo㉧K;q]q۹tt4! R2)\y9r$t$?6QdFN*ʿ)X_qZjNIu!tG[hp}WЎ1|\S,@]M[ը1PZʩ+Zq^p~? {BC!N(EExNՠZ k#b畻f[;< $Ⱥ:x ωoay5u8YnWݧwmjgi5+ >,-N>>H*S9PDu$Or)SSU9Ϥb&1q5o"#K<ʘRK.Qc8qdZIEDN:ΉCV252GR1cFST.s.@G/Zt5<9jzx xC% HSt`)~NN$@%Er59%сIDHJbZ`uSu(-zZsN>iN4ges]yn-K1tY*Y}nÞm;:U' @0BD$@$xR?<߶g|v떰tI`ո!@Ÿ#sGMϿqmѢ?z oRPÃ=t[ YۺJ5/6C1y " ޺}y z/:v&}L.]TԎUjUs'P:]T*:zH曦c͚5ͱsR ZcMH?5}t#.U }F1u ZbYOS:rϴӑMJ޵3ˈK;) &2en :{1%bR0O'  HF$@$];x8Ž)eJ ҼGc73s;_~br&q`YbYW;wĒsUR -+qk=AR&f&g_ؾ/ōS~wqjʏvH%x1@T tu@1%(|bcc}(>q[2TCiƅ_B$YH>e˖5t YSVњ[J:J˴ǙT*RyV,S!SrbbZBS%MUgPL9HH.A)ɌTc3 G)x_n z)2lvT1geTߞb ֏)تt^H L}pB:{Y3d-S&B%KJ<`=X% 9vVwasUxy HBGX(tW1jnwW4o@I7~|`;S`A+ȺCRĥ@ռy0{l/?ZP"۷o믛I&CӆS_AiMP{>\ *l TSs  $Ё& p&W_R_uwI WKv^Uhn<-)nך"y AٹP{^M\ qͫj0XfI&p7X{.-ZPS-ѳfYrat@WP`q8$0||e iLAѸK%C5kt.b]ŮYMJ*ڹX˹&B-Ԭg v^O% %@d !vo;uQ*]oZؕ+p%Ne? `"..7éP/0ۺw\x>,[DL+V V2+o ִɓ'͹;v*gQvmSM/k3|w/fΙ^{ E1?˗/7!S;K*a(%c_JiU⵱T?!xxn ,) ѕO8=p8_|F)S⢕n6#/BO?g%tjT(=zA&jw1ODD_bku !/wxqb ~C8LQ6& :ǒ# @hax;> o޻cByv|0 J}ΌY>X8s%T`u{̀j)<mr *_q,iac3hd4o!D+b>ȕr>ׯI;yj`($JBj8`iݜ9s 5GDǎ踩Tԩ)TWʦ͝;W)S:޽oL(Uʕ+̥f08b2\PHZTR-8qBiI$@@ HH HJ0aQp7]~rv_htiq㗥t~}?~3n8RFKFDE(5#+L[?NK(/ M a OCZ|s.h$ #[ftH/ZHz$f 6$4VpaʦӺ!'iG;'Tf VPoLz3{ f+sf)4l5`0O4  {y8 @2/vDcxj/<3GJv9Lga称=+8,X`*%8E#,wZo݉gڶ\ٔG3\>@*ae^KLr@%`)ځBr uډ5n"歠4:# Gx8x@$@iC@moz,|% 0K-]4DjԨQعs'ʖ-hV=Oh7Y ?| ҍORE ݕJ%v" E(=>xɥ庥tO3P3gqw8( K,IH -[QxTZV|%<ҕ"'H]~@-QX*Qo<}80vqJ]R!oT111~r^Qοؒ;kVX-E"@dNүs \.KJӫ/7%N0tuXi3%ٖHRAT@c \ΆV o>V|{#`u'PF כ6mj6#)&Nfajܑwߏ\rͬ1c@0C#2K,qMj5kvZheL4 | `ڌ͕xTƤ {)wl ܙ{rT^' H!F` @J țlg2Ė*kR Ͷ@x!s^}ƍ'{ ΝÌ3Lٳg'DFoJ;Wzw^8uM:wJ`ҒE@4TXBIU+R8I5 8u+ΧS3@9VK$@$[ˍ 5uy -V N'?o%UPh-]E)V`A|8x0 3jK7o<F\˗/GnRfM|GxbE84(}yPe>e)5:F& -.mȗWW"몫҄-Z[PB)+fxt% 0_|Ȗ-Tc V=x]U jmZº ^钀+ӧ 2ǭz8|:"`l-Ůʕٸк2:a0.HXQp `R&O"po^ߚ`9N:Ks9*wJKCƄsϟׯɓe;5` oMv StIw@JI@.J;Ovy1 `uo,guO)x>(>\u+! N I.o8O7]pW~g sve.2e *K*xYu[]ͣw *Uرc?[cݻ׫W߁sN4irN:hݺ5N:eC٧OSQoN ՗*U ۶i{mR]@;w.DY3]OyǎfM%RfJߩU &e-h[PǏC4UKkXy8p 2>_v۲_ don f*ʑNzp,JЂ$@$@)%!؞HH Uԛ| Gha#jPI{nCHMfѣGs=+VUV[1|1n8UV8lق믿]t1)m7|s[?B]/6݋ڵ 7܀!CSo{<~Z*كʒDiB ƉvN|W{8:wl'mK.mƹ뮻'O#/κb߾}ؾ}ԩSG ><l{쨠kR9CG4GH5 mHa{E4I X:m~I=f 0>[-)DK ݺe $@$. $VDM @v87p褜9`7;̛:d{ڵLWH\U9שS'6 2׵^{bdɂڵkk"tĹ5YM81JRӧOA$J̍޽畘dI˘b 6m.\hC=djǎ3DPG13gl,:t9nTiKFٳVH;& JSޞ^]74H SPWDERdQd$X:yEwB}ں&x>\-MG8#Fz HXQs+ `P:xoM ;>Xm'pCIP=vD]"-ZaÆ'D1vz[$F%Cʍk2e2MdK%)FHIkZz5N8aN}ǐȷ~Ϙ1r?K?ũ)߰Zxu7%.* : . K;ڷ=Gy}C;G?1`b K{_pzϋc @#@V0 @J ( 8ԀJa52| vj).YsjwU._<^/^x}/&mD9IRǿ yLā%s&;wn%YիW^M68i&^:y$&bn}m;)on:[R: EMb> X0RJpe `@),OIb{"-38Ԑv 8DHžXa@ "t䐷k*֐u_9#@WR9etC<OjUxqv7o^tÆ )ߣ8lUX1%5we̘-Z0"*/Bm۶q-<.̊tSzys'ntHd OϐIrHR0fX?I@0xF ~;Gnf΂պES͡6$HH yJ'"HGSjxXUgx,U믿LTD7lxVk*:D]L޿ H:._2#&"& Λ784hKkذp3f )%&ZUbu%Y%㋸k~%KB%RKEd^*'?댎29}Ձ/vÞ9v3-S#1eKâvGR !!fnXHd邀'k[P*f&:oj8uAHH.MK3b  tB@ito+dZkvx{;[D?"t.ѣG]p„ F]Gg/IH&q$uhS-Z\:t(2d`DE}fƍʕ+9s`ƍ_wuS֯_o*^I{Ӓj(UO:bbF?K%JKCYV|ˇugqOEtoW/Q\fqIUB1Yw UWo?3`7n违>WoM~vAG72P!ަoNqH H!@ k;>NzPT$H?;! dP4U՜yIBWv$Ih*19*~2ѩ,Cl;DCK* ɓ=}uEDIU zA;|uPܼye'gTz9OxPo`o BHu!/Fg:Xw&M|w1Ev\ :.p $@iB@ gEA J)`vӁJEqҰ&S89 {lu zioi^;tW'B:7'R[7NP[l1]x@We˰BPE/S`CGT#l$".4  _ʋU ($fWkNxK lvm9r]AΫ(ڒҕ8}^Pz0Asl :BӐ ϊ9vFʑ#|ƕ%uX ᣀ[n6i0=&Z7fu i+V"LpVKkDZ,]|PKe .t`EnH l޼?|*t )Q> ƎR7:+>%(Ko_ jv[;>40gZ`簻w1cSFJAxvH V HH"X{rwhgѺu&^rwukIM 7a@J#G"Ng2U*{@:CFeնU@`鏀C&Jy/1MIϨasp^~%=m{% #!v n l-\RM]O:I@: oٳY}f곭-9s'鋀~m#֫# پ[t`H[T#S:wLG;& `VXr$ dXlll'5{}L`ln _U{֭LSgo*X:{aZF`a)GJՁW+y؊HH Ёv "%0oXmx>XD ijv.a.Mi$jV]U1pPOR+}J>/ے @ ЁHH `ā5`[he˖AЁB́nF׳}XEu$ԌYl+ðXA$@L@`#'ّe'" YfE>}ci 5 ߶m(PΝoi{(1P)tӷo_̜9w_Us?3-Z;w62|(klݺ5佗h_f=w[x@$2 HO< 2СCȕ+yHDQd*O>Fm]jx6U@*F NG>T/gJ$@阀]:yw+FSTEFG%RK6sԁD7lз_y3ePIt5C 嚤#ܴ~Q`k׮K/ krnݺ:uy%5wqDwhBxHLXAIy#?G9/oGyMi+a( tK@}jnp}X9ySoMU=IH sL`!&)#&z!Sرcԇ~h1)mϤ4C?EA*7%K.Vy%m,Y'N@!\;wM7_$@aD0\ D#CMoy>p@ŋc֭8q"i+EW| @ X^ KGwڷoիW'Cϼf3fiӦybL.3f̈{Dy~ իW/~X_znV/^#՛} 0 `QHH SNaȑԩy'YRPXI8~׮] $a#?Ahv6˖@Ƞ5h$@$@K1{TE>|0$oΜ9OL|DK:M)2O"Pn94 ٳgM|qWHlܸTnHܹsc̘1FWVZkmڴ1_~ hʗ4 HkJ;I% oD#"G[Eєs4H)gpjՅVgN 3G9=R=' 3% ѲJo۶-$Yf>ݪYZ]On4$I%cK[f„ |I8:$2Ms5 t`MH.$ VÆ ûkHJUBC#pVҩ&m0i#)YViq@ⰫUuOHH %K)q&UZDKڟfn} 5CEѣf~e˖E:WDI$H0hdKLhWB&"~z&ZX"RKԠ/ 0!Z  7SmcժUXby'zʄ \|%zOꯠoGY9s^G\VSC͞{k ݕ6$@$@%-[6#>h dʔɌ-S@;h{N4uvmx|L믿DIhެ)&ΦcI%kȐ!&_~_*UD͝;\7ĨQ8OtH%jKߛeʚ3g.$@$r 9rNH$@$*{K<//<~tMoA-k[K;oi$@$@A@J"DD+pOQ'Oxq   tFtvù] vάp< -6x aeg*`^=JEGXoj#Z>{ HHHH  &]M$@$R;34o+KΟɜ#պEj 34['&  HWJW% %*~N˶~5 ͪ7 ΰ*ZŢd줮ᅲ5v믿o~hٲ$|AS/ޅDΜ9ӧGEƍ'ҊHHM@x$@$@!'L Mlx֭%Kא WdLtg(!sN4izB:uкuk:unݺuh޼9j֬YfxȔ)jԨGܹsFJެm>|:t(^̛7q뭷;/틙3g|d Æ 9N'"z]zu|gf_sE?2N!i i믿RJغu+j,Z?xL$@$`B`HH  oPUKaer6 ua ˖bw'RժUg̙ArʨPvڅݻw7|z ׯǤI0p@o|xȑuBKrPhQg̘1ջwo12j(tٳgO34\p!_d?`?0ֹskxsNp4w&O"E\r={6DNL,Y*~asHHCXQIHB@3N."y%hC+3aJ@S+V4+Y8<|.B7o^,X8gώ~>/6HK,'*馛Lĕm &cKtW…\*%svq G8ġ&c+zȝ;\L/ZsԎ~$O@"c7o6_ 9|%oKk7 e3 @ZpFKXVr ,)zOG|NOv۶m>KAvគ7;vDGIT?k֬hJSf?lڴ)D{1ovsM:7 qZ 4ȤJjwݦʕ+͞%L/4)t۰a~wL28.] Y[V()[4H-qLl ЩS')S}[lh.] @j ^ڤ̊8E}^{N׼~+ `^/q&6D)j8Ł,د/25s$@$ J4v! H;Π!qΫ`/[q+LԻs`5zf~ Hz8$ ?JDE]M6@Ɓ%Z&0qA AQ'ow\,bXȢ0Ԯ]<`*ùQa@ HJ踹ьժUO?y]K4'1s$vM^%bTR׸8}ř&֟HHObK  4&}y z&)OfΈנCk`=ҖXboO?9r@۶m}$I ɣI_f͚jh%"".%D>#$j5嗤=zKTٲe?~6.vUW[n1?ͣ5qy&z=/_>F_upIA@MS׭((QВTגrT%u~=rHw(_gY<  H5:RIHBI%ª[y6OayVB|7Y4k,t'N8$5IT:qZժUhԯ_\4e˖ٳ&I%JMΝ3:TPnI*i~(Ѥ3gI哴>GLzKҭ>%bGơC_\#…UI铢';ejL^#F0$F$@$| >c@$@$p={C%~]sf}9W$D@GO=G'bӑbXjrkg[˖-!Zܹo %b%bիWǍ7h %$EQ_b5vXO>1"@2HJTrN+T;wĴïJ[P!:իllܸѬСɆ bƱ;:)_Gi~HH 0,3G!  gz-X͛#XcDo㽹r݆ > $.H|uѼf |xtƌI\j"-,F󞸶$ 6ũ* 8q :8EұtX *V @` a$@$@ Ug&OioMhlOO'MH[ fDLWb}מyu6  4!@V`$@$@PѺ=ڵ־ĚF9 gPAX㪺EP +zkZLL kl[  tA)6s$@$NpZ93aU#y%ԝ4θ`7i7+$ ($C"mf*mVRM4W3 - @T+n'7C$@I83ymddn$%[$_" C    )7'# HH=j"XݟgЄM"X-Yıt^E]IHHH#.p $@$N 8 @-^ Wwx *e*ǎ!XW^U{fHHHHH PNƹHHH%NqΫ~ylst^7$@$@$@$@$JJ%8v# H=?p= r /~o ӞC} ca]{m"    ȸO\% D pk< *Wjv`u@UԼr    $ϹIH Pv=avQIX}7i_e{HHHHH JڜH1py,y5zx:ԁPVjWϭ @@ƱHHH 19x 7 vgk!Á `w{>*M @8+@$@QL@= o'ž8vvQ[PsjmE>1    5:BM @:"Μm'MݦUT>p=E>9    5:BM @:"6쩓`h;W?5X*pgT#    P`P|$@$_~ RQk }tg称.IHHHH  %mE$@$o+zYT"    $`䜛HHz 9ؽ{Dv    ; [ Dޞ3?2|)ε @{*.HH  Ӏ_~p\D$@$@$@$@QAXQq   bb{p]Vdؽ#-9IHHHH ]V$ @0־9& KH%KPdI9s&JwRgH _^X b8; D9:s{$@@NisBlxS}+G`j{#{v$$    Vs  ɓ'H"(W\' \AL*' O Wrv#    HF`%ې @ؼy3:wU*R'sx0Y t^}HHHHXs/ tJD޽?`Ȑ!xgp k{k.C>@fPbEر*UW_-[  DbŰj*sbРAZ*s\nիW{(^8J*qtݻÝwމM-ZAؽ{=WٮծuD%    H%@V9H#pM7ǐK0aqLٶ;w{wa߾}ؾ};VXa^&M–-[hkFqQTP [n͛77-qBskȑ#͜ #<ǏhѢ=z4v1dʔ<6mqvIH2'G~XH#    >:Ϙ3 @Xeʒ% .o̙3+^S M65m>} o޼J& n=]pu\ D7:rw$@@!Zl a?tW_?q)ouY 8дu!w9YF hrMC+\Yf/ƍj". ԩ\raӦMFufI*ڵ 64)hm F$@$@$@$@$Z.B;%g# ׋_";}4ڴi ^0\RsID?c¾)9vpS7yդI_I5y     BIH RᅦDVbՔWz#T-2e *VKiӦȚ5+֬YÇTR3g7nlgώ3f%w֭K/Ν;cǎ\߅"ΰիWYf޽;.6ٹsq@ uA֭q)w8x6&M¦Mpj k׮ח$@$@$@$@$@H @P7j/Vd']U1=r:}4lق믿]t1y~w}פ݉#JM-Z@ő;wD|hذ!rʅ*1Z %KԮ],Qx;Gɒ%#OO۱cq]>IHHHH'@V\xH’# `]}5l$u0fkbŊ(*=FFr$w>#    8O,H"ҺON/d1QBv#mV,37]P8x 4իW7zYR?O>1 ᔘ%64hڊ+|>Sȑm۶5F*(~f,=     xX0 @xP11pV6S2C 09sbxo"9r 0WǍgZ_D]h_|֭[g+:S-O? e˖ᣏ>{hƒ?0f͚eG#>c Ⱥ2gd[Y=܃'"wܦ/ $NzHH (4hb5]Yґ<*? َ[Iv Iȧrʙ7pCwߡPBZa&49㏛(mF+4KCJ4     `* u>`Ƿ.+o^x6mG-'"YWeLtĉTKIbR< gg"; u4ՀڤWFs^K B7*^=UO?}5ja *U HHHHH 0HB@i%5aO{ ͂2O8z!L:՗B(Xu ȒK.QFAő%b4     #@ '\ qycaϞ^]!     tIXs$@N@:K®^-ܗ @Ё4HRG@ oGg X+n "     (!@VHnH :QmSY*uwtl      *]IH 6ť ^y%<W˱HHHHH"X|v !Xw lp7B$@$@$@$@$@K%$@$pm=y'e$@$@$@$@$@$]ݐ DgT8O5gX9sF\     >:Ϙ3 @c}([ʞ=vV~N7gBw!] S Ó P11p7s^uh {4:pws^y<'    &F`.& M@7 VB7x& p-Ӱoðnw'$@$@$@$@$@EȺ_\- @Pg©]jf؃+vyU'O{W ̙ŗ( FFb     P :u ;` SN. f:]u^v      "@Vd/H B~#Fn~>-BqTon=4     &ˆ}\< @8PǏP qسg`'>xCn̎ ?~HHHHH"#"q$@L@:WV5kSG[>w @#n\0 )g/%Ϛt^>9W[,ٶ<ΫBw"pEgibwJ=&Dc    Dt`E]I‚3gǟʮwnDdڡgEO i "Y&lC ZG m A2$D']*$Uuﬕ{=ovqgwD]liį#a/_^ޮ @>% SӥX"М<%c'\6 ﭹ)Vm1mbK @ @ t}~,@hN0'hgFdm#\ ?|Ɲ6:Z:Z~"GEGѴ @cYvK^hc#;>C>Z-:^):lxw-.ڏ|UYDֶ @-`w۾='@R.`>>'@{%Kuؑ3sNt@ۂś"52m" @̕J !עuȰُF'PϺY]bUjF8 @`./ xr^48.\ԞϏ#;+}=ѥ> @eV/O @1hyDuSI@sĈoXg @j/  #Ј 7G:9_ёs; @zK@[%@2oF-"Lm YUϓ"/yw @Z ێZ ÏD롇G 2>MbaFMz} @ Ow@'W>6r] *37e4'^жJp @lxD6tH^{,+"64}rO @@ xuG]hw#oG3 \A @@`S<//Z;~ا#6B @Z#-=TJy%D;yR @=(`Vb&/^r{E=3v  @XJ!@@K{ĢD1 @ V`6  @'7,V_fd"@ @b*Vr @@! F+ @  U~;Z?k[ŀUD5 @ ЃAl] -[cƌ:+ox!)/~E @`)c=Ϗ?|zKWP o *rA^{Fv*X @i6|>}z Cɯ.bs8  @&j֐g^zi^oY7pmb׎O>9b',~Q #~&>GxU)Q @*3 !пRtWv&)J;ӿ+4/Ys܏".+ PyJ!@ @X<'@.\o}3&^} ?yoE㟾Zk~H @@X]r6$+5\"G |" @5%`֚tx_"jyv^d[m; @I , @G'E~-)q @>si$@s܏"ZZOߪt#@ @z͉ P;|Ȏ?.  @`u @@@s¡1l @貀d. @ s~ȯFN] @B/t@bEUeޗ @~sjD @ @Z xVi0 @ @ 7FD @Vi0 @ @ 7FD @Vi0 @ @ 7FD @Vi0 @ @ 7FD @Vi0 @ @ 7FD@?1cF~qYguˈ_~lM v׏߿7NfϞ; @ j8D@];Dkk~^E%|RSO-%"~^   @6Si g.,gxOViҹ7pC~o|#f1w83ϏÇ?'P;W:th\s5{zzh{^q).ML',^88#Fć?/cƌ)2a„=ztq+VySN-j=: @z 簌G/~1}kcر4iR㏏/}K馛?O<_׿u:AJu>jo> ?qF .򢆓N:)nvm?):ꫯ.BEʭ*nU6uOR˪;UW]UlԞB;u]+m.(a駟.6kq;7Fm?O=[i/- @`wn~"l?^U/¢_ӞX)I@x0A:#㡇nث=tj6tͰa.J!R_.Vs+=n8eȐ!czkt^mV~o&L+ǐ(cl>Ւ6lO?~qZ=y;_5 @5hilLC~#M#ziURD}AWU\UU)ZdI\6׿"T:uj'?)ͻ "J&Ls)K_⊘6mZ S?)JtG 7mOWo?L~\W^yeq9R҆dMk)JI+&F)Jw' @j#61SE#OOoQgik6^{->tÇGZ6kOԶn[|aiU:g=Xsʱ ƠA+}bZT  @kX)+r @ @;I~$zWш  @` P#/<.]q{ahxB @@ERL_LogoLuminescence/man/extdata.Rd0000644000176200001440000000452013604173243015364 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{extdata} \alias{extdata} \title{Collection of External Data} \description{ Description and listing of data provided in the folder \code{data/extdata} } \details{ The \strong{R} package \code{Luminescence} includes a number of raw data files, which are mostly used in the example sections of appropriate functions. They are also used internally for testing corresponding functions using the \code{testthat} package (see files in \verb{tests/testthat/}) to ensure their operational reliability. \strong{Accessibility} If the \strong{R} package \code{Luminescence} is installed correctly the preferred way to access and use these data from within \strong{R} is as follows: \code{system.file("extdata/", package = "Luminescence")} \strong{Individual file descriptions} \emph{>>Daybreak_TestFile.DAT/.txt<<} \strong{Type:} raw measurement data \cr \strong{Device:} Daybreak OSL/TL reader\cr \strong{Measurement date:} unknown\cr \strong{Location:} unknown\cr \strong{Provided by:} unknown\cr \strong{Related R function(s):} \code{read_Daybreak2R()}\cr \strong{Reference:} unknown \emph{>>DorNie_0016.psl<<} \strong{Type:} raw measurement data \cr \strong{Device:} SUERC portable OSL reader \cr \strong{Measurement date:} 19/05/2016 \cr \strong{Location:} Dormagen-Nievenheim, Germany \cr \strong{Provided by:} Christoph Burow (University of Cologne) \cr \strong{Related R function(s):} \code{read_PSL2R()} \cr \strong{Reference:} unpublished \cr \strong{Additional information:} Sample measured at an archaeological site near \cr Dormagen-Nievenheim (Germany) during a practical course on Luminesence dating in 2016. \cr \emph{>>QNL84_2_bleached.txt}, \emph{QNL84_2_unbleached.txt<<} \strong{Type:} Test data for exponential fits \cr \strong{Reference:} Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. \cr \emph{>>STRB87_1_bleached.txt}, \emph{STRB87_1_unbleached.txt<<} \strong{Type:} Test data for exponential fits \cr \strong{Reference:} Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. \emph{>>XYSG_file.xysg} \strong{Type:} XSYG-file stump \cr **Info: ** XSYG-file with some basic curves to test functions \cr \strong{Reference:} no reference available } \keyword{datasets} Luminescence/man/plot_DRCSummary.Rd0000644000176200001440000000712613604173245016765 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_DRCSummary.R \name{plot_DRCSummary} \alias{plot_DRCSummary} \title{Create a Dose-Response Curve Summary Plot} \usage{ plot_DRCSummary( object, source_dose_rate = NULL, sel_curves = NULL, show_dose_points = FALSE, show_natural = FALSE, n = 51L, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Results} object (\strong{required}): input object created by the function \link{analyse_SAR.CWOSL}. The input object can be provided as \link{list}.} \item{source_dose_rate}{\link{numeric} (\emph{optional}): allows to modify the axis and show values in Gy, instead seconds. Only a single numerical values is allowed.} \item{sel_curves}{\link{numeric} (optional): id of the curves to be plotting in its occuring order. A sequence can be provided for selecting, e.g., only every 2nd curve from the input object} \item{show_dose_points}{\link{logical} (with default): enable or disable plot of dose points in the graph} \item{show_natural}{\link{logical} (with default): enable or disable the plot of the natural Lx/Tx values} \item{n}{\link{integer} (with default): the number of x-values used to evaluate one curve object. Large numbers slow down the plotting process and are usually not needed} \item{...}{Further arguments and graphical parameters to be passed.} } \value{ An \linkS4class{RLum.Results} object is returned: Slot: \strong{@data}\cr \tabular{lll}{ \strong{OBJECT} \tab \strong{TYPE} \tab \strong{COMMENT}\cr \code{results} \tab \link{data.frame} \tab with dose and LxTx values \cr \code{data} \tab \linkS4class{RLum.Results} \tab original input data \cr } Slot: \strong{@info}\cr \tabular{lll}{ \strong{OBJECT} \tab \strong{TYPE} \tab \strong{COMMENT} \cr \code{call} \tab \code{call} \tab the original function call \cr \code{args} \tab \code{list} \tab arguments of the original function call \cr } \emph{Note: If the input object is a \link{list} a list of \linkS4class{RLum.Results} objects is returned.} } \description{ While analysing OSL SAR or pIRIR data the view on the data is limited usually to one dose-response curve (DRC) at the time for one aliquot. This function overcomes this limitation by plotting all DRC from an \linkS4class{RLum.Results} object created by the function \link{analyse_SAR.CWOSL} in one single plot. } \details{ If you want plot your DRC on an energy scale (dose in Gy), you can either use the option \code{source_dose_rate} provided below or your can SAR analysis with the dose points in Gy (better axis scaling). } \section{Function version}{ 0.2.1 } \examples{ #load data example data data(ExampleData.BINfileData, envir = environment()) #transform the values from the first position in a RLum.Analysis object object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) results <- analyse_SAR.CWOSL( object = object, signal.integral.min = 1, signal.integral.max = 2, background.integral.min = 900, background.integral.max = 1000, plot = FALSE ) ##plot only DRC plot_DRCSummary(results) } \seealso{ \linkS4class{RLum.Results}, \link{analyse_SAR.CWOSL} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) \cr Christoph Burow, University of Cologne , RLum Developer Team} \section{How to cite}{ Kreutzer, S., Burow, C., 2020. plot_DRCSummary(): Create a Dose-Response Curve Summary Plot. Function version 0.2.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } Luminescence/man/convert_PSL2CSV.Rd0000644000176200001440000000336113604173244016571 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convert_PSL2CSV.R \name{convert_PSL2CSV} \alias{convert_PSL2CSV} \title{Export PSL-file(s) to CSV-files} \usage{ convert_PSL2CSV(file, ...) } \arguments{ \item{file}{\link{character} (\strong{required}): name of the PSL-file to be converted to CSV-files} \item{...}{further arguments that will be passed to the function \link{read_PSL2R} and \link{write_RLum2CSV}} } \value{ The function returns either a CSV-file (or many of them) or for the option \code{export = FALSE} a list comprising objects of type \link{data.frame} and \link{matrix} } \description{ This function is a wrapper function around the functions \link{read_PSL2R} and \link{write_RLum2CSV} and it imports an PSL-file (SUERC portable OSL reader file format) and directly exports its content to CSV-files. If nothing is set for the argument \code{path} (\link{write_RLum2CSV}) the input folder will become the output folder. } \section{Function version}{ 0.1.0 } \examples{ \dontrun{ ##select your BIN-file file <- file.choose() ##convert convert_PSL2CSV(file) } } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data}, \linkS4class{RLum.Results}, \link[utils:write.table]{utils::write.table}, \link{write_RLum2CSV}, \link{read_PSL2R} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. convert_PSL2CSV(): Export PSL-file(s) to CSV-files. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{IO} Luminescence/man/ExampleData.SurfaceExposure.Rd0000644000176200001440000000767613604173243021260 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.SurfaceExposure} \alias{ExampleData.SurfaceExposure} \title{Example OSL surface exposure dating data} \format{A \link{list} with 4 elements: \tabular{ll}{ \strong{Element} \tab \strong{Content} \cr \verb{$sample_1} \tab A \link{data.frame} with 3 columns (depth, intensity, error) \cr \verb{$sample_2} \tab A \link{data.frame} with 3 columns (depth, intensity, error) \cr \verb{$set_1} \tab A \link{list} of 4 \link{data.frame}s, each representing a sample with different ages \cr \verb{$set_2} \tab A \link{list} of 5 \link{data.frame}s, each representing a sample with different ages \cr }} \source{ See examples for the code used to create the data sets. } \description{ A set of synthetic OSL surface exposure dating data to demonstrate the \link{fit_SurfaceExposure} functionality. See examples to reproduce the data interactively. } \details{ \strong{\verb{$sample_1}} \tabular{ccc}{ \strong{mu} \tab \strong{sigmaphi} \tab \strong{age} \cr 0.9 \tab 5e-10 \tab 10000 \cr } \strong{\verb{$sample_2}} \tabular{ccccc}{ \strong{mu} \tab \strong{sigmaphi} \tab \strong{age} \tab \strong{Dose rate} \tab \strong{D0} \cr 0.9 \tab 5e-10 \tab 10000 \tab 2.5 \tab 40 \cr } \strong{\verb{$set_1}} \tabular{ccc}{ \strong{mu} \tab \strong{sigmaphi} \tab \strong{ages} \cr 0.9 \tab 5e-10 \tab 1e3, 1e4, 1e5, 1e6 \cr } \strong{\verb{$set_2}} \tabular{ccccc}{ \strong{mu} \tab \strong{sigmaphi} \tab \strong{ages} \tab \strong{Dose rate} \tab \strong{D0} \cr 0.9 \tab 5e-10 \tab 1e2, 1e3, 1e4, 1e5, 1e6 \tab 1.0 \tab 40 \cr } } \examples{ ## ExampleData.SurfaceExposure$sample_1 sigmaphi <- 5e-10 age <- 10000 mu <- 0.9 x <- seq(0, 10, 0.1) fun <- exp(-sigmaphi * age * 365.25*24*3600 * exp(-mu * x)) set.seed(666) synth_1 <- data.frame(depth = x, intensity = jitter(fun, 1, 0.1), error = runif(length(x), 0.01, 0.2)) ## VALIDATE sample_1 fit_SurfaceExposure(synth_1, mu = mu, sigmaphi = sigmaphi) ## ExampleData.SurfaceExposure$sample_2 sigmaphi <- 5e-10 age <- 10000 mu <- 0.9 x <- seq(0, 10, 0.1) Ddot <- 2.5 / 1000 / 365.25 / 24 / 60 / 60 # 2.5 Gy/ka in Seconds D0 <- 40 fun <- (sigmaphi * exp(-mu * x) * exp(-(age * 365.25*24*3600) * (sigmaphi * exp(-mu * x) + Ddot/D0)) + Ddot/D0) / (sigmaphi * exp(-mu * x) + Ddot/D0) set.seed(666) synth_2 <- data.frame(depth = x, intensity = jitter(fun, 1, 0.1), error = runif(length(x), 0.01, 0.2)) ## VALIDATE sample_2 fit_SurfaceExposure(synth_2, mu = mu, sigmaphi = sigmaphi, Ddot = 2.5, D0 = D0) ## ExampleData.SurfaceExposure$set_1 sigmaphi <- 5e-10 mu <- 0.9 x <- seq(0, 15, 0.2) age <- c(1e3, 1e4, 1e5, 1e6) set.seed(666) synth_3 <- vector("list", length = length(age)) for (i in 1:length(age)) { fun <- exp(-sigmaphi * age[i] * 365.25*24*3600 * exp(-mu * x)) synth_3[[i]] <- data.frame(depth = x, intensity = jitter(fun, 1, 0.05)) } ## VALIDATE set_1 fit_SurfaceExposure(synth_3, age = age, sigmaphi = sigmaphi) ## ExampleData.SurfaceExposure$set_2 sigmaphi <- 5e-10 mu <- 0.9 x <- seq(0, 15, 0.2) age <- c(1e2, 1e3, 1e4, 1e5, 1e6) Ddot <- 1.0 / 1000 / 365.25 / 24 / 60 / 60 # 2.0 Gy/ka in Seconds D0 <- 40 set.seed(666) synth_4 <- vector("list", length = length(age)) for (i in 1:length(age)) { fun <- (sigmaphi * exp(-mu * x) * exp(-(age[i] * 365.25*24*3600) * (sigmaphi * exp(-mu * x) + Ddot/D0)) + Ddot/D0) / (sigmaphi * exp(-mu * x) + Ddot/D0) synth_4[[i]] <- data.frame(depth = x, intensity = jitter(fun, 1, 0.05)) } ## VALIDATE set_2 fit_SurfaceExposure(synth_4, age = age, sigmaphi = sigmaphi, D0 = D0, Ddot = 1.0) \dontrun{ ExampleData.SurfaceExposure <- list( sample_1 = synth_1, sample_2 = synth_2, set_1 = synth_3, set_2 = synth_4 ) } } \references{ Unpublished synthetic data } \keyword{datasets} Luminescence/man/as.Rd0000644000176200001440000000341713604173243014341 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/RLum.Analysis-class.R, % R/RLum.Data.Curve-class.R, R/RLum.Data.Image-class.R, % R/RLum.Data.Spectrum-class.R, R/RLum.Results-class.R \name{as} \alias{as} \title{as() - RLum-object coercion} \arguments{ \item{from}{\linkS4class{RLum}, \link{list}, \link{data.frame}, \link{matrix} (\strong{required}): object to be coerced from} \item{to}{\link{character} (\strong{required}): class name to be coerced to} } \description{ for \verb{[RLum.Analysis-class]} for \verb{[RLum.Data.Curve-class]} for \verb{[RLum.Data.Image-class]} for \verb{[RLum.Data.Spectrum-class]} for \verb{[RLum.Results-class]} } \details{ \strong{\linkS4class{RLum.Analysis}} \tabular{ll}{ \strong{from} \tab \strong{to}\cr \code{list} \tab \code{list}\cr } Given that the \link{list} consits of \linkS4class{RLum.Analysis} objects. \strong{\linkS4class{RLum.Data.Curve}} \tabular{ll}{ \strong{from} \tab \strong{to}\cr \code{list} \tab \code{list} \cr \code{data.frame} \tab \code{data.frame}\cr \code{matrix} \tab \code{matrix} } \strong{\linkS4class{RLum.Data.Image}} \tabular{ll}{ \strong{from} \tab \strong{to}\cr \code{data.frame} \tab \code{data.frame}\cr \code{matrix} \tab \code{matrix} } \strong{\linkS4class{RLum.Data.Spectrum}} \tabular{ll}{ \strong{from} \tab \strong{to}\cr \code{data.frame} \tab \code{data.frame}\cr \code{matrix} \tab \code{matrix} } \strong{\linkS4class{RLum.Results}} \tabular{ll}{ \strong{from} \tab \strong{to}\cr \code{list} \tab \code{list}\cr } Given that the \link{list} consits of \linkS4class{RLum.Results} objects. } \note{ Due to the complex structure of the \code{RLum} objects itself a coercing to standard R data structures will be always loosely! } \seealso{ \link[methods:as]{methods::as} } Luminescence/man/read_XSYG2R.Rd0000644000176200001440000001530513604173245015730 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/read_XSYG2R.R \name{read_XSYG2R} \alias{read_XSYG2R} \title{Import XSYG files to R} \usage{ read_XSYG2R( file, recalculate.TL.curves = TRUE, fastForward = FALSE, import = TRUE, pattern = ".xsyg", verbose = TRUE, txtProgressBar = TRUE ) } \arguments{ \item{file}{\link{character} or \link{list} (\strong{required}): path and file name of the XSYG file. If input is a \code{list} it should comprise only \code{character}s representing each valid path and xsyg-file names. Alternatively the input character can be just a directory (path), in this case the the function tries to detect and import all xsyg files found in the directory.} \item{recalculate.TL.curves}{\link{logical} (\emph{with default}): if set to \code{TRUE}, TL curves are returned as temperature against count values (see details for more information) Note: The option overwrites the time vs. count TL curve. Select \code{FALSE} to import the raw data delivered by the lexsyg. Works for TL curves and spectra.} \item{fastForward}{\link{logical} (\emph{with default}): if \code{TRUE} for a more efficient data processing only a list of \code{RLum.Analysis} objects is returned.} \item{import}{\link{logical} (\emph{with default}): if set to \code{FALSE}, only the XSYG file structure is shown.} \item{pattern}{\link{regex} (\emph{with default}): optional regular expression if \code{file} is a link to a folder, to select just specific XSYG-files} \item{verbose}{\link{logical} (\emph{with default}): enable or disable verbose mode. If verbose is \code{FALSE} the \code{txtProgressBar} is also switched off} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables \code{TRUE} or disables \code{FALSE} the progression bar during import} } \value{ \strong{Using the option \code{import = FALSE}} A list consisting of two elements is shown: \itemize{ \item \link{data.frame} with information on file. \item \link{data.frame} with information on the sequences stored in the XSYG file. } \strong{Using the option \code{import = TRUE} (default)} A list is provided, the list elements contain: \item{Sequence.Header}{\link{data.frame} with information on the sequence.} \item{Sequence.Object}{\linkS4class{RLum.Analysis} containing the curves.} } \description{ Imports XSYG files produced by a Freiberg Instrument lexsyg reader into R. } \details{ \strong{How does the import function work?} The function uses the \link{xml} package to parse the file structure. Each sequence is subsequently translated into an \linkS4class{RLum.Analysis} object. \strong{General structure XSYG format}\preformatted{ x0 , y0 ; x1 , y1 ; x2 , y2 ; x3 , y3 } So far, each XSYG file can only contain one \verb{}, but multiple sequences. Each record may comprise several curves. \strong{TL curve recalculation} On the FI lexsyg device TL curves are recorded as time against count values. Temperature values are monitored on the heating plate and stored in a separate curve (time vs. temperature). If the option \code{recalculate.TL.curves = TRUE} is chosen, the time values for each TL curve are replaced by temperature values. Practically, this means combining two matrices (Time vs. Counts and Time vs. Temperature) with different row numbers by their time values. Three cases are considered: \enumerate{ \item HE: Heating element \item PMT: Photomultiplier tube \item Interpolation is done using the function \link{approx} } CASE (1): \code{nrow(matrix(PMT))} > \code{nrow(matrix(HE))} Missing temperature values from the heating element are calculated using time values from the PMT measurement. CASE (2): \code{nrow(matrix(PMT))} < \code{nrow(matrix(HE))} Missing count values from the PMT are calculated using time values from the heating element measurement. CASE (3): \code{nrow(matrix(PMT))} == \code{nrow(matrix(HE))} A new matrix is produced using temperature values from the heating element and count values from the PMT. \strong{Note:} Please note that due to the recalculation of the temperature values based on values delivered by the heating element, it may happen that mutiple count values exists for each temperature value and temperature values may also decrease during heating, not only increase. \strong{Advanced file import} To allow for a more efficient usage of the function, instead of single path to a file just a directory can be passed as input. In this particular case the function tries to extract all XSYG-files found in the directory and import them all. Using this option internally the function constructs as list of the XSYG-files found in the directory. Please note no recursive detection is supported as this may lead to endless loops. } \note{ This function is a beta version as the XSYG file format is not yet fully specified. Thus, further file operations (merge, export, write) should be done using the functions provided with the package \link{xml}. \strong{So far, no image data import is provided!} \cr Corresponding values in the XSXG file are skipped. } \section{Function version}{ 0.6.8 } \examples{ ##(1) import XSYG file to R (uncomment for usage) #FILE <- file.choose() #temp <- read_XSYG2R(FILE) ##(2) additional examples for pure XML import using the package XML ## (uncomment for usage) ##import entire XML file #FILE <- file.choose() #temp <- XML::xmlRoot(XML::xmlTreeParse(FILE)) ##search for specific subnodes with curves containing 'OSL' #getNodeSet(temp, "//Sample/Sequence/Record[@recordType = 'OSL']/Curve") ##(2) How to extract single curves ... after import data(ExampleData.XSYG, envir = environment()) ##grep one OSL curves and plot the first curve OSLcurve <- get_RLum(OSL.SARMeasurement$Sequence.Object, recordType="OSL")[[1]] ##(3) How to see the structure of an object? structure_RLum(OSL.SARMeasurement$Sequence.Object) } \section{How to cite}{ Kreutzer, S., 2020. read_XSYG2R(): Import XSYG files to R. Function version 0.6.8. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Grehl, S., Kreutzer, S., Hoehne, M., 2013. Documentation of the XSYG file format. Unpublished Technical Note. Freiberg, Germany \strong{Further reading} XML: \url{http://en.wikipedia.org/wiki/XML} } \seealso{ \link{xml}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data.Curve}, \link{approx} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{IO} Luminescence/man/calc_OSLLxTxRatio.Rd0000644000176200001440000001554313604173244017200 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_OSLLxTxRatio.R \name{calc_OSLLxTxRatio} \alias{calc_OSLLxTxRatio} \title{Calculate Lx/Tx ratio for CW-OSL curves} \usage{ calc_OSLLxTxRatio( Lx.data, Tx.data = NULL, signal.integral, signal.integral.Tx = NULL, background.integral, background.integral.Tx = NULL, background.count.distribution = "non-poisson", use_previousBG = FALSE, sigmab = NULL, sig0 = 0, digits = NULL ) } \arguments{ \item{Lx.data}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): requires a CW-OSL shine down curve (x = time, y = counts)} \item{Tx.data}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\emph{optional}): requires a CW-OSL shine down curve (x = time, y = counts). If no input is given the Tx.data will be treated as \code{NA} and no Lx/Tx ratio is calculated.} \item{signal.integral}{\link{vector} (\strong{required}): vector with the limits for the signal integral.} \item{signal.integral.Tx}{\link{vector} (\emph{optional}): vector with the limits for the signal integral for the Tx curve. If nothing is provided the value from \code{signal.integral} is used.} \item{background.integral}{\link{vector} (\strong{required}): vector with the bounds for the background integral.} \item{background.integral.Tx}{\link{vector} (\emph{optional}): vector with the limits for the background integral for the Tx curve. If nothing is provided the value from \code{background.integral} is used.} \item{background.count.distribution}{\link{character} (\emph{with default}): sets the count distribution assumed for the error calculation. Possible arguments \code{poisson} or \code{non-poisson}. See details for further information} \item{use_previousBG}{\link{logical} (\emph{with default}): If set to \code{TRUE} the background of the Lx-signal is substracted also from the Tx-signal. Please note that in this case separat signal integral limits for the Tx signal are not allowed and will be reset.} \item{sigmab}{\link{numeric} (\emph{optional}): option to set a manual value for the overdispersion (for LnTx and TnTx), used for the Lx/Tx error calculation. The value should be provided as absolute squared count values, e.g. \code{sigmab = c(300,300)}. \strong{Note:} If only one value is provided this value is taken for both (LnTx and TnTx) signals.} \item{sig0}{\link{numeric} (\emph{with default}): allow adding an extra component of error to the final Lx/Tx error value (e.g., instrumental errror, see details).} \item{digits}{\link{integer} (\emph{with default}): round numbers to the specified digits. If digits is set to \code{NULL} nothing is rounded.} } \value{ Returns an S4 object of type \linkS4class{RLum.Results}. Slot \code{data} contains a \link{list} with the following structure: \strong{@data}\preformatted{$LxTx.table (data.frame) .. $ LnLx .. $ LnLx.BG .. $ TnTx .. $ TnTx.BG .. $ Net_LnLx .. $ Net_LnLx.Error .. $ Net_TnTx.Error .. $ LxTx .. $ LxTx.Error $ calc.parameters (list) .. $ sigmab.LnTx .. $ sigmab.TnTx .. $ k } \strong{@info}\preformatted{$ call (original function call) } } \description{ Calculate Lx/Tx ratios from a given set of CW-OSL curves assuming late light background subtraction. } \details{ The integrity of the chosen values for the signal and background integral is checked by the function; the signal integral limits have to be lower than the background integral limits. If a \link{vector} is given as input instead of a \link{data.frame}, an artificial \link{data.frame} is produced. The error calculation is done according to Galbraith (2002). \strong{Please note:} In cases where the calculation results in \code{NaN} values (for example due to zero-signal, and therefore a division of 0 by 0), these \code{NaN} values are replaced by 0. \strong{sigmab} The default value of \code{sigmab} is calculated assuming the background is constant and \strong{would not} applicable when the background varies as, e.g., as observed for the early light substraction method. \strong{sig0} This argument allows to add an extra component of error to the final Lx/Tx error value. The input will be treated as factor that is multiplied with the already calculated LxTx and the result is add up by: \deqn{se(LxTx) = \sqrt(se(LxTx)^2 + (LxTx * sig0)^2)} \strong{background.count.distribution} This argument allows selecting the distribution assumption that is used for the error calculation. According to Galbraith (2002, 2014) the background counts may be overdispersed (i.e. do not follow a poisson distribution, which is assumed for the photomultiplier counts). In that case (might be the normal case) it has to be accounted for the overdispersion by estimating \eqn{\sigma^2} (i.e. the overdispersion value). Therefore the relative standard error is calculated as: \itemize{ \item \code{poisson} \deqn{rse(\mu_{S}) \approx \sqrt(Y_{0} + Y_{1}/k^2)/Y_{0} - Y_{1}/k} \item \code{non-poisson} \deqn{rse(\mu_{S}) \approx \sqrt(Y_{0} + Y_{1}/k^2 + \sigma^2(1+1/k))/Y_{0} - Y_{1}/k} } \strong{Please note} that when using the early background subtraction method in combination with the 'non-poisson' distribution argument, the corresponding Lx/Tx error may considerably increase due to a high sigmab value. Please check whether this is valid for your data set and if necessary consider to provide an own sigmab value using the corresponding argument \code{sigmab}. } \note{ The results of this function have been cross-checked with the Analyst (vers. 3.24b). Access to the results object via \link{get_RLum}. \strong{Caution:} If you are using early light subtraction (EBG), please either provide your own \code{sigmab} value or use \code{background.count.distribution = "poisson"}. } \section{Function version}{ 0.7.0 } \examples{ ##load data data(ExampleData.LxTxOSLData, envir = environment()) ##calculate Lx/Tx ratio results <- calc_OSLLxTxRatio(Lx.data, Tx.data, signal.integral = c(1:2), background.integral = c(85:100)) ##get results object get_RLum(results) } \section{How to cite}{ Kreutzer, S., 2020. calc_OSLLxTxRatio(): Calculate Lx/Tx ratio for CW-OSL curves. Function version 0.7.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Duller, G., 2018. Analyst v4.57 - User Manual. \url{http://users.aber.ac.uk/ggd/}\cr Galbraith, R.F., 2002. A note on the variance of a background-corrected OSL count. Ancient TL, 20 (2), 49-51. Galbraith, R.F., 2014. A further note on the variance of a background-corrected OSL count. Ancient TL, 31 (2), 1-3. } \seealso{ \linkS4class{RLum.Data.Curve}, \link{Analyse_SAR.OSLdata}, \link{plot_GrowthCurve}, \link{analyse_SAR.CWOSL} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/analyse_Al2O3C_Measurement.Rd0000644000176200001440000001473213604173243020744 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_Al2O3C_Measurement.R \name{analyse_Al2O3C_Measurement} \alias{analyse_Al2O3C_Measurement} \title{Al2O3:C Passive Dosimeter Measurement Analysis} \usage{ analyse_Al2O3C_Measurement( object, signal_integral = NULL, dose_points = c(0, 4), recordType = c("OSL (UVVIS)", "TL (UVVIS)"), calculate_TL_dose = FALSE, irradiation_time_correction = NULL, cross_talk_correction = NULL, travel_dosimeter = NULL, test_parameters = NULL, verbose = TRUE, plot = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} \strong{(required)}: measurement input} \item{signal_integral}{\link{numeric} (\emph{optional}): signal integral, used for the signal and the background. Example: \code{c(1:10)} for the first 10 channels. If nothing is provided the full range is used} \item{dose_points}{\link{numeric} (\emph{with default}): vector with dose points, if dose points are repeated, only the general pattern needs to be provided. Default values follow the suggestions made by Kreutzer et al., 2018} \item{recordType}{\link{character} (\emph{with default}): input curve selection, which is passed to function \link{get_RLum}. To deactivate the automatic selection set the argument to \code{NULL}} \item{calculate_TL_dose}{\link{logical} (\emph{with default}): Enables/disables experimental dose estimation based on the TL curves. Taken is the ratio of the peak sums of each curves +/- 5 channels.} \item{irradiation_time_correction}{\link{numeric} or \linkS4class{RLum.Results} (\emph{optional}): information on the used irradiation time correction obained by another experiements. I a \code{numeric} is provided it has to be of length two: mean, standard error} \item{cross_talk_correction}{\link{numeric} or \linkS4class{RLum.Results} (\emph{optional}): information on the used irradiation time correction obained by another experiements. If a \code{numeric} vector is provided it has to be of length three: mean, 2.5 \% quantile, 97.5 \% quantile.} \item{travel_dosimeter}{\link{numeric} (\emph{optional}): specify the position of the travel dosimeter (so far measured a the same time). The dose of travel dosimeter will be subtracted from all other values.} \item{test_parameters}{\link{list} (\emph{with default}): set test parameters. Supported parameters are: \code{TL_peak_shift} All input: \link{numeric} values, \code{NA} and \code{NULL} (s. Details)} \item{verbose}{\link{logical} (\emph{with default}): enable/disable verbose mode} \item{plot}{\link{logical} (\emph{with default}): enable/disable plot output, if \code{object} is of type \link{list}, a \link{numeric} vector can be provided to limit the plot output to certain aliquots} \item{...}{further arguments that can be passed to the plot output, supported are \code{norm}, \code{main}, \code{mtext}, \code{title} (for self-call mode to specify, e.g., sample names)} } \value{ Function returns results numerically and graphically: -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \verb{$data} \tab \code{data.frame} \tab the estimated equivalent dose \cr \verb{$data_table} \tab \code{data.frame} \tab full dose and signal table \cr \code{test_parameters} \tab \code{data.frame} \tab results with test paramaters \cr \code{data_TDcorrected} \tab \code{data.frame} \tab travel dosimeter corrected results (only if TD was provided)\cr } \emph{Note: If correction the irradiation time and the cross-talk correction method is used, the De values in the table \code{data} table are already corrected, i.e. if you want to get an uncorrected value, you can use the column \code{CT_CORRECTION} remove the correction} \strong{slot:} \strong{\verb{@info}} The original function call ------------------------\cr \verb{[ PLOT OUTPUT ]}\cr ------------------------\cr \itemize{ \item OSL and TL curves, combined on two plots. } } \description{ The function provides the analysis routines for measurements on a FI lexsyg SMART reader using Al2O3:C chips according to Kreutzer et al., 2018 } \details{ \strong{Working with a travel dosimeter} The function allows to define particular aliquots as travel dosimeters. For example: \code{travel_dosimeter = c(1,3,5)} sets aliquots 1, 3 and 5 as travel dosimeters. These dose values of this dosimeters are combined and automatically subtracted from the obtained dose values of the other dosimeters. **Calculate TL dose ** The argument \code{calculate_TL_dose} provides the possibility to experimentally calculate a TL-dose, i.e. an apparent dose value derived from the TL curve ratio. However, it should be noted that this value is only a fallback in case something went wrong during the measurement of the optical stimulation. The TL derived dose value is corrected for cross-talk and for the irradiation time, but not considered if a travel dosimeter is defined. \strong{Test parameters} \code{TL_peak_shift} \link{numeric} (default: \code{15}): Checks whether the TL peak shift is bigger > 15 K, indicating a problem with the thermal contact of the chip. \code{stimulation_power} \link{numeric} (default: \code{0.05}): So far available, information on the delievered optical stimulation are compared. Compared are the information from the first curves with all others. If the ratio differs more from unity than the defined by the threshold, a warning is returned. } \section{Function version}{ 0.2.5 } \examples{ ##load data data(ExampleData.Al2O3C, envir = environment()) ##run analysis analyse_Al2O3C_Measurement(data_CrossTalk) } \section{How to cite}{ Kreutzer, S., 2020. analyse_Al2O3C_Measurement(): Al2O3:C Passive Dosimeter Measurement Analysis. Function version 0.2.5. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., 2018. Environmental Dose Rate Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. Geochromometria 45, 56-67. \doi{10.1515/geochr-2015-0086} } \seealso{ \link{analyse_Al2O3C_ITC} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/analyse_SAR.TL.Rd0000644000176200001440000001170613604173243016415 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_SAR.TL.R \name{analyse_SAR.TL} \alias{analyse_SAR.TL} \title{Analyse SAR TL measurements} \usage{ analyse_SAR.TL( object, object.background, signal.integral.min, signal.integral.max, integral_input = "channel", sequence.structure = c("PREHEAT", "SIGNAL", "BACKGROUND"), rejection.criteria = list(recycling.ratio = 10, recuperation.rate = 10), dose.points, log = "", ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} or a \link{list} of such objects (\strong{required}) : input object containing data for analysis} \item{object.background}{currently not used} \item{signal.integral.min}{\link{integer} (\strong{required}): requires the channel number for the lower signal integral bound (e.g. \code{signal.integral.min = 100})} \item{signal.integral.max}{\link{integer} (\strong{required}): requires the channel number for the upper signal integral bound (e.g. \code{signal.integral.max = 200})} \item{integral_input}{\link{character} (\emph{with default}): defines the input for the the arguments \code{signal.integral.min} and \code{signal.integral.max}. These limits can be either provided \code{'channel'} number (the default) or \code{'temperature'}. If \code{'temperature'} is chosen the best matching channel is selected.} \item{sequence.structure}{\link{vector} \link{character} (\emph{with default}): specifies the general sequence structure. Three steps are allowed (\code{"PREHEAT"}, \code{"SIGNAL"}, \code{"BACKGROUND"}), in addition a parameter \code{"EXCLUDE"}. This allows excluding TL curves which are not relevant for the protocol analysis. (\strong{Note:} None TL are removed by default)} \item{rejection.criteria}{\link{list} (\emph{with default}): list containing rejection criteria in percentage for the calculation.} \item{dose.points}{\link{numeric} (\emph{optional}): option set dose points manually} \item{log}{\link{character} (\emph{with default}): a character string which contains "x" if the x axis is to be logarithmic, "y" if the y axis is to be logarithmic and "xy" or "yx" if both axes are to be logarithmic. See \link{plot.default}).} \item{...}{further arguments that will be passed to the function \link{plot_GrowthCurve}} } \value{ A plot (\emph{optional}) and an \linkS4class{RLum.Results} object is returned containing the following elements: \item{De.values}{\link{data.frame} containing De-values and further parameters} \item{LnLxTnTx.values}{\link{data.frame} of all calculated Lx/Tx values including signal, background counts and the dose points.} \item{rejection.criteria}{\link{data.frame} with values that might by used as rejection criteria. NA is produced if no R0 dose point exists.} \strong{note:} the output should be accessed using the function \link{get_RLum} } \description{ The function performs a SAR TL analysis on a \linkS4class{RLum.Analysis} object including growth curve fitting. } \details{ This function performs a SAR TL analysis on a set of curves. The SAR procedure in general is given by Murray and Wintle (2000). For the calculation of the Lx/Tx value the function \link{calc_TLLxTxRatio} is used. \strong{Provided rejection criteria} \verb{[recyling.ratio]}: calculated for every repeated regeneration dose point. \verb{[recuperation.rate]}: recuperation rate calculated by comparing the Lx/Tx values of the zero regeneration point with the Ln/Tn value (the Lx/Tx ratio of the natural signal). For methodological background see Aitken and Smith (1988) } \note{ \strong{THIS IS A BETA VERSION} None TL curves will be removed from the input object without further warning. } \section{Function version}{ 0.3.0 } \examples{ ##load data data(ExampleData.BINfileData, envir = environment()) ##transform the values from the first position in a RLum.Analysis object object <- Risoe.BINfileData2RLum.Analysis(TL.SAR.Data, pos=3) ##perform analysis analyse_SAR.TL( object = object, signal.integral.min = 210, signal.integral.max = 220, fit.method = "EXP OR LIN", sequence.structure = c("SIGNAL", "BACKGROUND")) } \section{How to cite}{ Kreutzer, S., 2020. analyse_SAR.TL(): Analyse SAR TL measurements. Function version 0.3.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Aitken, M.J. and Smith, B.W., 1988. Optical dating: recuperation after bleaching. Quaternary Science Reviews 7, 387-393. Murray, A.S. and Wintle, A.G., 2000. Luminescence dating of quartz using an improved single-aliquot regenerative-dose protocol. Radiation Measurements 32, 57-73. } \seealso{ \link{calc_TLLxTxRatio}, \link{plot_GrowthCurve}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results}, \link{get_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CRNS-Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} \keyword{plot} Luminescence/man/analyse_Al2O3C_ITC.Rd0000644000176200001440000001140613604173243017071 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_Al2O3C_ITC.R \name{analyse_Al2O3C_ITC} \alias{analyse_Al2O3C_ITC} \title{Al2O3 Irradiation Time Correction Analysis} \usage{ analyse_Al2O3C_ITC( object, signal_integral = NULL, dose_points = c(2, 4, 8, 12, 16), recordType = c("OSL (UVVIS)"), method_control = NULL, verbose = TRUE, plot = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} or \link{list} \strong{(required)}: results obtained from the measurement. Alternatively a list of 'RLum.Analysis' objects can be provided to allow an automatic analysis.} \item{signal_integral}{\link{numeric} (\emph{optional}): signal integral, used for the signal and the background. If nothing is provided the full range is used. Argument can be provided as \link{list}.} \item{dose_points}{\link{numeric} (\emph{with default}): vector with dose points, if dose points are repeated, only the general pattern needs to be provided. Default values follow the suggestions made by Kreutzer et al., 2018. Argument can be provided as \link{list}.} \item{recordType}{\link{character} (\emph{with default}): input curve selection, which is passed to function \link{get_RLum}. To deactivate the automatic selection set the argument to \code{NULL}} \item{method_control}{\link{list} (\emph{optional}): optional parameters to control the calculation. See details for further explanations} \item{verbose}{\link{logical} (\emph{with default}): enable/disable verbose mode} \item{plot}{\link{logical} (\emph{with default}): enable/disable plot output} \item{...}{further arguments that can be passed to the plot output} } \value{ Function returns results numerically and graphically: -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \verb{$data} \tab \code{data.frame} \tab correction value and error \cr \verb{$table} \tab \code{data.frame} \tab table used for plotting \cr \verb{$table_mean} \tab \code{data.frame} \tab table used for fitting \cr \verb{$fit} \tab \code{lm} or \code{nls} \tab the fitting as returned by the function \link{plot_GrowthCurve} } \strong{slot:} \strong{\verb{@info}} The original function call ------------------------\cr \verb{[ PLOT OUTPUT ]}\cr ------------------------\cr \itemize{ \item A dose response curve with the marked correction values } } \description{ The function provides a very particular analysis to correct the irradiation time while irradiating Al2O3:C chips in a luminescence reader. } \details{ Background: Due to their high dose sensitivity Al2O3:C chips are usually irradiated for only a very short duration or under the closed beta-source within a luminescence reader. However, due to its high dose sensitivity, during the movement towards the beta-source, the pellet already receives and non-negligible dose. Based on measurements following a protocol suggested by Kreutzer et al., 2018, a dose response curve is constructed and the intersection (absolute value) with the time axis is taken as real irradiation time. \strong{\code{method_control}} To keep the generic argument list as clear as possible, arguments to allow a deeper control of the method are all preset with meaningful default parameters and can be handled using the argument \code{method_control} only, e.g., \code{method_control = list(fit.method = "LIN")}. Supported arguments are: \tabular{lll}{ \strong{ARGUMENT} \tab \strong{FUNCTION} \tab \strong{DESCRIPTION}\cr \code{mode} \tab \code{plot_GrowthCurve} \tab as in \link{plot_GrowthCurve}; sets the mode used for fitting\cr \code{fit.method} \tab \code{plot_GrowthCurve} \tab as in \link{plot_GrowthCurve}; sets the function applied for fitting\cr } } \section{Function version}{ 0.1.1 } \examples{ ##load data data(ExampleData.Al2O3C, envir = environment()) ##run analysis analyse_Al2O3C_ITC(data_ITC) } \section{How to cite}{ Kreutzer, S., 2020. analyse_Al2O3C_ITC(): Al2O3 Irradiation Time Correction Analysis. Function version 0.1.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., 2018. Environmental Dose Rate Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. Geochromometria 45, 56-67. doi: 10.1515/geochr-2015-0086 } \seealso{ \link{plot_GrowthCurve} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/CW2pLMi.Rd0000644000176200001440000001223713604173244015114 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/CW2pLMi.R \name{CW2pLMi} \alias{CW2pLMi} \title{Transform a CW-OSL curve into a pLM-OSL curve via interpolation under linear modulation conditions} \usage{ CW2pLMi(values, P) } \arguments{ \item{values}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): \linkS4class{RLum.Data.Curve} or \code{data.frame} with measured curve data of type stimulation time (t) (\code{values[,1]}) and measured counts (cts) (\code{values[,2]})} \item{P}{\link{vector} (\emph{optional}): stimulation time in seconds. If no value is given the optimal value is estimated automatically (see details). Greater values of P produce more points in the rising tail of the curve.} } \value{ The function returns the same data type as the input data type with the transformed curve values. \strong{\code{RLum.Data.Curve}} \tabular{rl}{ \verb{$CW2pLMi.x.t} \tab: transformed time values \cr \verb{$CW2pLMi.method} \tab: used method for the production of the new data points } } \description{ Transforms a conventionally measured continuous-wave (CW) OSL-curve into a pseudo linearly modulated (pLM) curve under linear modulation conditions using the interpolation procedure described by Bos & Wallinga (2012). } \details{ The complete procedure of the transformation is given in Bos & Wallinga (2012). The input \code{data.frame} consists of two columns: time (t) and count values (CW(t)) \strong{Nomenclature} \itemize{ \item P = stimulation time (s) \item 1/P = stimulation rate (1/s) } \strong{Internal transformation steps} (1) log(CW-OSL) values (2) Calculate t' which is the transformed time: \deqn{t' = 1/2*1/P*t^2} (3) Interpolate CW(t'), i.e. use the log(CW(t)) to obtain the count values for the transformed time (t'). Values beyond \code{min(t)} and \code{max(t)} produce \code{NA} values. (4) Select all values for t' < \code{min(t)}, i.e. values beyond the time resolution of t. Select the first two values of the transformed data set which contain no \code{NA} values and use these values for a linear fit using \link{lm}. (5) Extrapolate values for t' < \code{min(t)} based on the previously obtained fit parameters. (6) Transform values using \deqn{pLM(t) = t/P*CW(t')} (7) Combine values and truncate all values for t' > \code{max(t)} \strong{NOTE:} The number of values for t' < \code{min(t)} depends on the stimulation period (P) and therefore on the stimulation rate 1/P. To avoid the production of too many artificial data at the raising tail of the determined pLM curves it is recommended to use the automatic estimation routine for \code{P}, i.e. provide no own value for \code{P}. } \note{ According to Bos & Wallinga (2012) the number of extrapolated points should be limited to avoid artificial intensity data. If \code{P} is provided manually and more than two points are extrapolated, a warning message is returned. } \section{Function version}{ 0.3.1 } \examples{ ##(1) ##load CW-OSL curve data data(ExampleData.CW_OSL_Curve, envir = environment()) ##transform values values.transformed <- CW2pLMi(ExampleData.CW_OSL_Curve) ##plot plot(values.transformed$x, values.transformed$y.t, log = "x") ##(2) - produce Fig. 4 from Bos & Wallinga (2012) ##load data data(ExampleData.CW_OSL_Curve, envir = environment()) values <- CW_Curve.BosWallinga2012 ##open plot area plot(NA, NA, xlim = c(0.001,10), ylim = c(0,8000), ylab = "pseudo OSL (cts/0.01 s)", xlab = "t [s]", log = "x", main = "Fig. 4 - Bos & Wallinga (2012)") values.t <- CW2pLMi(values, P = 1/20) lines(values[1:length(values.t[,1]),1],CW2pLMi(values, P = 1/20)[,2], col = "red", lwd = 1.3) text(0.03,4500,"LM", col = "red", cex = .8) values.t <- CW2pHMi(values, delta = 40) lines(values[1:length(values.t[,1]),1],CW2pHMi(values, delta = 40)[,2], col = "black", lwd = 1.3) text(0.005,3000,"HM", cex =.8) values.t <- CW2pPMi(values, P = 1/10) lines(values[1:length(values.t[,1]),1], CW2pPMi(values, P = 1/10)[,2], col = "blue", lwd = 1.3) text(0.5,6500,"PM", col = "blue", cex = .8) } \section{How to cite}{ Kreutzer, S., 2020. CW2pLMi(): Transform a CW-OSL curve into a pLM-OSL curve via interpolation under linear modulation conditions. Function version 0.3.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Bos, A.J.J. & Wallinga, J., 2012. How to visualize quartz OSL signal components. Radiation Measurements, 47, 752-758. \strong{Further Reading} Bulur, E., 1996. An Alternative Technique For Optically Stimulated Luminescence (OSL) Experiment. Radiation Measurements, 26, 701-709. Bulur, E., 2000. A simple transformation for converting CW-OSL curves to LM-OSL curves. Radiation Measurements, 32, 141-145. } \seealso{ \link{CW2pLM}, \link{CW2pHMi}, \link{CW2pPMi}, \link{fit_LMCurve}, \linkS4class{RLum.Data.Curve} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne Based on comments and suggestions from:\cr Adrie J.J. Bos, Delft University of Technology, The Netherlands , RLum Developer Team} \keyword{manip} Luminescence/man/ExampleData.CW_OSL_Curve.Rd0000644000176200001440000000305113604173243020306 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \docType{data} \name{ExampleData.CW_OSL_Curve} \alias{ExampleData.CW_OSL_Curve} \alias{CW_Curve.BosWallinga2012} \title{Example CW-OSL curve data for the package Luminescence} \format{Data frame with 1000 observations on the following 2 variables: \describe{ \item{list("x")}{a numeric vector, time} \item{list("y")}{a numeric vector, counts} }} \source{ \strong{ExampleData.CW_OSL_Curve} \tabular{ll}{ Lab: \tab Luminescence Laboratory Bayreuth\cr Lab-Code: \tab BT607\cr Location: \tab Saxony/Germany\cr Material: \tab Middle grain quartz measured on aluminum cups on a Risoe TL/OSL DA-15 reader.\cr Reference: \tab unpublished data } \strong{CW_Curve.BosWallinga2012} \tabular{ll}{ Lab: \tab Netherlands Centre for Luminescence Dating (NCL)\cr Lab-Code: \tab NCL-2108077\cr Location: \tab Guadalentin Basin, Spain\cr Material: \tab Coarse grain quartz\cr Reference: \tab Bos & Wallinga (2012) and Baartman et al. (2011) } } \description{ \code{data.frame} containing CW-OSL curve data (time, counts) } \examples{ data(ExampleData.CW_OSL_Curve, envir = environment()) plot(ExampleData.CW_OSL_Curve) } \references{ Baartman, J.E.M., Veldkamp, A., Schoorl, J.M., Wallinga, J., Cammeraat, L.H., 2011. Unravelling Late Pleistocene and Holocene landscape dynamics: The Upper Guadalentin Basin, SE Spain. Geomorphology, 125, 172-185. Bos, A.J.J. & Wallinga, J., 2012. How to visualize quartz OSL signal components. Radiation Measurements, 47, 752-758. } \keyword{datasets} Luminescence/man/calc_FastRatio.Rd0000644000176200001440000001166113604173244016615 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_FastRatio.R \name{calc_FastRatio} \alias{calc_FastRatio} \title{Calculate the Fast Ratio for CW-OSL curves} \usage{ calc_FastRatio( object, stimulation.power = 30.6, wavelength = 470, sigmaF = 2.6e-17, sigmaM = 4.28e-18, Ch_L1 = 1, Ch_L2 = NULL, Ch_L3 = NULL, x = 1, x2 = 0.1, dead.channels = c(0, 0), fitCW.sigma = FALSE, fitCW.curve = FALSE, plot = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis}, \linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): x, y data of measured values (time and counts).} \item{stimulation.power}{\link{numeric} (\emph{with default}): Stimulation power in mW/cm^2} \item{wavelength}{\link{numeric} (\emph{with default}): Stimulation wavelength in nm} \item{sigmaF}{\link{numeric} (\emph{with default}): Photoionisation cross-section (cm^2) of the fast component. Default value after Durcan & Duller (2011).} \item{sigmaM}{\link{numeric} (\emph{with default}): Photoionisation cross-section (cm^2) of the medium component. Default value after Durcan & Duller (2011).} \item{Ch_L1}{\link{numeric} (\emph{with default}): An integer specifying the channel for L1.} \item{Ch_L2}{\link{numeric} (\emph{optional}): An integer specifying the channel for L2.} \item{Ch_L3}{\link{numeric} (\emph{optional}): A vector of length 2 with integer values specifying the start and end channels for L3 (e.g., \code{c(40, 50)}).} \item{x}{\link{numeric} (\emph{with default}): \\% of signal remaining from the fast component. Used to define the location of L2 and L3 (start).} \item{x2}{\link{numeric} (\emph{with default}): \\% of signal remaining from the medium component. Used to define the location of L3 (end).} \item{dead.channels}{\link{numeric} (\emph{with default}): Vector of length 2 in the form of \code{c(x, y)}. Channels that do not contain OSL data, i.e. at the start or end of measurement.} \item{fitCW.sigma}{\link{logical} (\emph{optional}): fit CW-OSL curve using \link{fit_CWCurve} to calculate \code{sigmaF} and \code{sigmaM} (\strong{experimental}).} \item{fitCW.curve}{\link{logical} (\emph{optional}): fit CW-OSL curve using \link{fit_CWCurve} and derive the counts of L2 and L3 from the fitted OSL curve (\strong{experimental}).} \item{plot}{\link{logical} (\emph{with default}): plot output (\code{TRUE}/\code{FALSE})} \item{...}{available options: \code{verbose} (\link{logical}). Further arguments passed to \link{fit_CWCurve}.} } \value{ Returns a plot (\emph{optional}) and an S4 object of type \linkS4class{RLum.Results}. The slot \code{data} contains a \link{list} with the following elements: \item{summary}{\link{data.frame} summary of all relevant results} \item{data}{the original input data} \item{fit}{\linkS4class{RLum.Results} object if either \code{fitCW.sigma} or \code{fitCW.curve} is \code{TRUE}} \item{args}{\link{list} of used arguments} \item{call}{\verb{[call]} the function call} } \description{ Function to calculate the fast ratio of quartz CW-OSL single grain or single aliquot curves after Durcan & Duller (2011). } \details{ This function follows the equations of Durcan & Duller (2011). The energy required to reduce the fast and medium quartz OSL components to \code{x} and \code{x2} \\% respectively using eq. 3 to determine channels L2 and L3 (start and end). The fast ratio is then calculated from: \eqn{(L1-L3)/(L2-L3)}. } \section{Function version}{ 0.1.1 } \examples{ # load example CW-OSL curve data("ExampleData.CW_OSL_Curve") # calculate the fast ratio w/o further adjustments res <- calc_FastRatio(ExampleData.CW_OSL_Curve) # show the summary table get_RLum(res) } \section{How to cite}{ King, G.E., Durcan, J., Burow, C., 2020. calc_FastRatio(): Calculate the Fast Ratio for CW-OSL curves. Function version 0.1.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Durcan, J.A. & Duller, G.A.T., 2011. The fast ratio: A rapid measure for testing the dominance of the fast component in the initial OSL signal from quartz. Radiation Measurements 46, 1065-1072. Madsen, A.T., Duller, G.A.T., Donnelly, J.P., Roberts, H.M. & Wintle, A.G., 2009. A chronology of hurricane landfalls at Little Sippewissett Marsh, Massachusetts, USA, using optical dating. Geomorphology 109, 36-45. \strong{Further reading} Steffen, D., Preusser, F. & Schlunegger, 2009. OSL quartz age underestimation due to unstable signal components. Quaternary Geochronology 4, 353-362. } \seealso{ \link{fit_CWCurve}, \link{get_RLum}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results}, \linkS4class{RLum.Data.Curve} } \author{ Georgina E. King, University of Bern (Switzerland) \cr Julie A. Durcan, University of Oxford (United Kingdom) \cr Christoph Burow, University of Cologne (Germany) , RLum Developer Team} Luminescence/man/fit_CWCurve.Rd0000644000176200001440000002011213604173244016106 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/fit_CWCurve.R \name{fit_CWCurve} \alias{fit_CWCurve} \title{Nonlinear Least Squares Fit for CW-OSL curves -beta version-} \usage{ fit_CWCurve( values, n.components.max, fit.failure_threshold = 5, fit.method = "port", fit.trace = FALSE, fit.calcError = FALSE, LED.power = 36, LED.wavelength = 470, cex.global = 0.6, sample_code = "Default", output.path, output.terminal = TRUE, output.terminalAdvanced = TRUE, plot = TRUE, ... ) } \arguments{ \item{values}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): x, y data of measured values (time and counts). See examples.} \item{n.components.max}{\link{vector} (\emph{optional}): maximum number of components that are to be used for fitting. The upper limit is 7.} \item{fit.failure_threshold}{\link{vector} (\emph{with default}): limits the failed fitting attempts.} \item{fit.method}{\link{character} (\emph{with default}): select fit method, allowed values: \code{'port'} and \code{'LM'}. \code{'port'} uses the 'port' routine usint the funtion \link{nls} \code{'LM'} utilises the function \code{nlsLM} from the package \code{minpack.lm} and with that the Levenberg-Marquardt algorithm.} \item{fit.trace}{\link{logical} (\emph{with default}): traces the fitting process on the terminal.} \item{fit.calcError}{\link{logical} (\emph{with default}): calculate 1-sigma error range of components using \link{confint}} \item{LED.power}{\link{numeric} (\emph{with default}): LED power (max.) used for intensity ramping in mW/cm^2. \strong{Note:} The value is used for the calculation of the absolute photoionisation cross section.} \item{LED.wavelength}{\link{numeric} (\emph{with default}): LED wavelength used for stimulation in nm. \strong{Note:} The value is used for the calculation of the absolute photoionisation cross section.} \item{cex.global}{\link{numeric} (\emph{with default}): global scaling factor.} \item{sample_code}{\link{character} (\emph{optional}): sample code used for the plot and the optional output table (mtext).} \item{output.path}{\link{character} (\emph{optional}): output path for table output containing the results of the fit. The file name is set automatically. If the file already exists in the directory, the values are appended.} \item{output.terminal}{\link{logical} (\emph{with default}): terminal ouput with fitting results.} \item{output.terminalAdvanced}{\link{logical} (\emph{with default}): enhanced terminal output. Requires \code{output.terminal = TRUE}. If \code{output.terminal = FALSE} no advanced output is possible.} \item{plot}{\link{logical} (\emph{with default}): returns a plot of the fitted curves.} \item{...}{further arguments and graphical parameters passed to \link{plot}.} } \value{ \strong{plot (\emph{optional})} the fitted CW-OSL curves are returned as plot. \strong{table (\emph{optional})} an output table (*.csv) with parameters of the fitted components is provided if the \code{output.path} is set. \strong{RLum.Results} Beside the plot and table output options, an \linkS4class{RLum.Results} object is returned. \code{fit}: an \code{nls} object (\verb{$fit}) for which generic R functions are provided, e.g. \link{summary}, \link{confint}, \link{profile}. For more details, see \link{nls}. \code{output.table}: a \link{data.frame} containing the summarised parameters including the error \code{component.contribution.matrix}: \link{matrix} containing the values for the component to sum contribution plot (\verb{$component.contribution.matrix}). Matrix structure:\cr Column 1 and 2: time and \code{rev(time)} values \cr Additional columns are used for the components, two for each component, containing I0 and n0. The last columns \code{cont.} provide information on the relative component contribution for each time interval including the row sum for this values. \strong{object} beside the plot and table output options, an \linkS4class{RLum.Results} object is returned. \code{fit}: an \code{nls} object (\verb{$fit}) for which generic R functions are provided, e.g. \link{summary}, \link{confint}, \link{profile}. For more details, see \link{nls}. \code{output.table}: a \link{data.frame} containing the summarised parameters including the error\cr \code{component.contribution.matrix}: \link{matrix} containing the values for the component to sum contribution plot (\verb{$component.contribution.matrix}).\cr Matrix structure:\cr Column 1 and 2: time and \code{rev(time)} values\cr Additional columns are used for the components, two for each component, containing I0 and n0. The last columns \code{cont.} provide information on the relative component contribution for each time interval including the row sum for this values. } \description{ The function determines the weighted least-squares estimates of the component parameters of a CW-OSL signal for a given maximum number of components and returns various component parameters. The fitting procedure uses the \link{nls} function with the \code{port} algorithm. } \details{ \strong{Fitting function} The function for the CW-OSL fitting has the general form: \deqn{y = I0_{1}*\lambda_{1}*exp(-\lambda_1*x) + ,\ldots, + I0_{i}*\lambda_{i}*exp(-\lambda_i*x) } where \eqn{0 < i < 8} and \eqn{\lambda} is the decay constant \cr and \eqn{I0} the intial number of trapped electrons. \emph{(for the used equation cf. Boetter-Jensen et al., 2003, Eq. 2.31)} \strong{Start values} Start values are estimated automatically by fitting a linear function to the logarithmized input data set. Currently, there is no option to manually provide start parameters. \strong{Goodness of fit} The goodness of the fit is given as pseudoR^2 value (pseudo coefficient of determination). According to Lave (1970), the value is calculated as: \deqn{pseudoR^2 = 1 - RSS/TSS} where \eqn{RSS = Residual~Sum~of~Squares} \cr and \eqn{TSS = Total~Sum~of~Squares} \strong{Error of fitted component parameters} The 1-sigma error for the components is calculated using the function \link{confint}. Due to considerable calculation time, this option is deactived by default. In addition, the error for the components can be estimated by using internal R functions like \link{summary}. See the \link{nls} help page for more information. \emph{For details on the nonlinear regression in R, see Ritz & Streibig (2008).} } \note{ \strong{Beta version - This function has not been properly tested yet and} \strong{should therefore not be used for publication purposes!} The pseudo-R^2 may not be the best parameter to describe the goodness of the fit. The trade off between the \code{n.components} and the pseudo-R^2 value is currently not considered. The function \strong{does not} ensure that the fitting procedure has reached a global minimum rather than a local minimum! } \section{Function version}{ 0.5.2 } \examples{ ##load data data(ExampleData.CW_OSL_Curve, envir = environment()) ##fit data fit <- fit_CWCurve(values = ExampleData.CW_OSL_Curve, main = "CW Curve Fit", n.components.max = 4, log = "x") } \section{How to cite}{ Kreutzer, S., 2020. fit_CWCurve(): Nonlinear Least Squares Fit for CW-OSL curves -beta version-. Function version 0.5.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Boetter-Jensen, L., McKeever, S.W.S., Wintle, A.G., 2003. Optically Stimulated Luminescence Dosimetry. Elsevier Science B.V. Lave, C.A.T., 1970. The Demand for Urban Mass Transportation. The Review of Economics and Statistics, 52 (3), 320-323. Ritz, C. & Streibig, J.C., 2008. Nonlinear Regression with R. In: R. Gentleman, K. Hornik, G. Parmigiani, eds., Springer, p. 150. } \seealso{ \link{fit_LMCurve}, \link{plot},\link{nls}, \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Results}, \link{get_RLum}, \link[minpack.lm:nlsLM]{minpack.lm::nlsLM} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{dplot} \keyword{models} Luminescence/man/calc_CosmicDoseRate.Rd0000644000176200001440000002362613604173244017571 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_CosmicDoseRate.R \name{calc_CosmicDoseRate} \alias{calc_CosmicDoseRate} \title{Calculate the cosmic dose rate} \usage{ calc_CosmicDoseRate( depth, density, latitude, longitude, altitude, corr.fieldChanges = FALSE, est.age = NA, half.depth = FALSE, error = 10, ... ) } \arguments{ \item{depth}{\link{numeric} (\strong{required}): depth of overburden (m). For more than one absorber use \cr \code{c(depth_1, depth_2, ..., depth_n)}} \item{density}{\link{numeric} (\strong{required}): average overburden density (g/cm^3). For more than one absorber use \cr \code{c(density_1, density_2, ..., density_n)}} \item{latitude}{\link{numeric} (\strong{required}): latitude (decimal degree), N positive} \item{longitude}{\link{numeric} (\strong{required}): longitude (decimal degree), E positive} \item{altitude}{\link{numeric} (\strong{required}): altitude (m above sea-level)} \item{corr.fieldChanges}{\link{logical} (\emph{with default}): correct for geomagnetic field changes after Prescott & Hutton (1994). Apply only when justified by the data.} \item{est.age}{\link{numeric} (\emph{with default}): estimated age range (ka) for geomagnetic field change correction (0-80 ka allowed)} \item{half.depth}{\link{logical} (\emph{with default}): How to overcome with varying overburden thickness. If \code{TRUE} only half the depth is used for calculation. Apply only when justified, i.e. when a constant sedimentation rate can safely be assumed.} \item{error}{\link{numeric} (\emph{with default}): general error (percentage) to be implemented on corrected cosmic dose rate estimate} \item{...}{further arguments (\code{verbose} to disable/enable console output).} } \value{ Returns a terminal output. In addition an \linkS4class{RLum.Results}-object is returned containing the following element: \item{summary}{\link{data.frame} summary of all relevant calculation results.} \item{args}{\link{list} used arguments} \item{call}{\link{call} the function call} The output should be accessed using the function \link{get_RLum} } \description{ This function calculates the cosmic dose rate taking into account the soft- and hard-component of the cosmic ray flux and allows corrections for geomagnetic latitude, altitude above sea-level and geomagnetic field changes. } \details{ This function calculates the total cosmic dose rate considering both the soft- and hard-component of the cosmic ray flux. \strong{Internal calculation steps} (1) Calculate total depth of all absorber in hg/cm^2 (1 hg/cm^2 = 100 g/cm^2) \deqn{absorber = depth_1*density_1 + depth_2*density_2 + ... + depth_n*density_n} (2) If \code{half.depth = TRUE} \deqn{absorber = absorber/2} (3) Calculate cosmic dose rate at sea-level and 55 deg. latitude a) If absorber is > 167 g/cm^2 (only hard-component; Allkofer et al. 1975): apply equation given by Prescott & Hutton (1994) (c.f. Barbouti & Rastin 1983) \deqn{D0 = C/(((absorber+d)^\alpha+a)*(absober+H))*exp(-B*absorber)} b) If absorber is < 167 g/cm^2 (soft- and hard-component): derive D0 from Fig. 1 in Prescott & Hutton (1988). (4) Calculate geomagnetic latitude (Prescott & Stephan 1982, Prescott & Hutton 1994) \deqn{\lambda = arcsin(0.203*cos(latitude)*cos(longitude-291)+0.979* sin(latitude))} (5) Apply correction for geomagnetic latitude and altitude above sea-level. Values for F, J and H were read from Fig. 3 shown in Prescott & Stephan (1982) and fitted with 3-degree polynomials for lambda < 35 degree and a linear fit for lambda > 35 degree. \deqn{Dc = D0*(F+J*exp((altitude/1000)/H))} (6) Optional: Apply correction for geomagnetic field changes in the last 0-80 ka (Prescott & Hutton 1994). Correction and altitude factors are given in Table 1 and Fig. 1 in Prescott & Hutton (1994). Values for altitude factor were fitted with a 2-degree polynomial. The altitude factor is operated on the decimal part of the correction factor. \deqn{Dc' = Dc*correctionFactor} \strong{Usage of \code{depth} and \code{density}} (1) If only one value for depth and density is provided, the cosmic dose rate is calculated for exactly one sample and one absorber as overburden (i.e. \code{depth*density}). (2) In some cases it might be useful to calculate the cosmic dose rate for a sample that is overlain by more than one absorber, e.g. in a profile with soil layers of different thickness and a distinct difference in density. This can be calculated by providing a matching number of values for \code{depth} and \code{density} (e.g. \verb{depth = c(1, 2), density = c(1.7, 2.4)}) (3) Another possibility is to calculate the cosmic dose rate for more than one sample of the same profile. This is done by providing more than one values for \code{depth} and only one for \code{density}. For example, \code{depth = c(1, 2, 3)} and \code{density = 1.7} will calculate the cosmic dose rate for three samples in 1, 2 and 3 m depth in a sediment of density 1.7 g/cm^3. } \note{ Despite its universal use the equation to calculate the cosmic dose rate provided by Prescott & Hutton (1994) is falsely stated to be valid from the surface to 10^4 hg/cm^2 of standard rock. The original expression by Barbouti & Rastin (1983) only considers the muon flux (i.e. hard-component) and is by their own definition only valid for depths between 10-10^4 hg/cm^2. Thus, for near-surface samples (i.e. for depths < 167 g/cm^2) the equation of Prescott & Hutton (1994) underestimates the total cosmic dose rate, as it neglects the influence of the soft-component of the cosmic ray flux. For samples at zero depth and at sea-level the underestimation can be as large as ~0.1 Gy/ka. In a previous article, Prescott & Hutton (1988) give another approximation of Barbouti & Rastins equation in the form of \deqn{D = 0.21*exp(-0.070*absorber+0.0005*absorber^2)} which is valid for depths between 150-5000 g/cm^2. For shallower depths (< 150 g/cm^2) they provided a graph (Fig. 1) from which the dose rate can be read. As a result, this function employs the equation of Prescott & Hutton (1994) only for depths > 167 g/cm^2, i.e. only for the hard-component of the cosmic ray flux. Cosmic dose rate values for depths < 167 g/cm^2 were obtained from the "AGE" programm (Gruen 2009) and fitted with a 6-degree polynomial curve (and hence reproduces the graph shown in Prescott & Hutton 1988). However, these values assume an average overburden density of 2 g/cm^3. It is currently not possible to obtain more precise cosmic dose rate values for near-surface samples as there is no equation known to the author of this function at the time of writing. } \section{Function version}{ 0.5.2 } \examples{ ##(1) calculate cosmic dose rate (one absorber) calc_CosmicDoseRate(depth = 2.78, density = 1.7, latitude = 38.06451, longitude = 1.49646, altitude = 364, error = 10) ##(2a) calculate cosmic dose rate (two absorber) calc_CosmicDoseRate(depth = c(5.0, 2.78), density = c(2.65, 1.7), latitude = 38.06451, longitude = 1.49646, altitude = 364, error = 10) ##(2b) calculate cosmic dose rate (two absorber) and ##correct for geomagnetic field changes calc_CosmicDoseRate(depth = c(5.0, 2.78), density = c(2.65, 1.7), latitude = 12.04332, longitude = 4.43243, altitude = 364, corr.fieldChanges = TRUE, est.age = 67, error = 15) ##(3) calculate cosmic dose rate and export results to .csv file #calculate cosmic dose rate and save to variable results<- calc_CosmicDoseRate(depth = 2.78, density = 1.7, latitude = 38.06451, longitude = 1.49646, altitude = 364, error = 10) # the results can be accessed by get_RLum(results, "summary") #export results to .csv file - uncomment for usage #write.csv(results, file = "c:/users/public/results.csv") ##(4) calculate cosmic dose rate for 6 samples from the same profile ## and save to .csv file #calculate cosmic dose rate and save to variable results<- calc_CosmicDoseRate(depth = c(0.1, 0.5 , 2.1, 2.7, 4.2, 6.3), density = 1.7, latitude = 38.06451, longitude = 1.49646, altitude = 364, error = 10) #export results to .csv file - uncomment for usage #write.csv(results, file = "c:/users/public/results_profile.csv") } \section{How to cite}{ Burow, C., 2020. calc_CosmicDoseRate(): Calculate the cosmic dose rate. Function version 0.5.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Allkofer, O.C., Carstensen, K., Dau, W.D., Jokisch, H., 1975. Letter to the editor. The absolute cosmic ray flux at sea level. Journal of Physics G: Nuclear and Particle Physics 1, L51-L52. Barbouti, A.I., Rastin, B.C., 1983. A study of the absolute intensity of muons at sea level and under various thicknesses of absorber. Journal of Physics G: Nuclear and Particle Physics 9, 1577-1595. Crookes, J.N., Rastin, B.C., 1972. An investigation of the absolute intensity of muons at sea-level. Nuclear Physics B 39, 493-508. Gruen, R., 2009. The "AGE" program for the calculation of luminescence age estimates. Ancient TL 27, 45-46. Prescott, J.R., Hutton, J.T., 1988. Cosmic ray and gamma ray dosimetry for TL and ESR. Nuclear Tracks and Radiation Measurements 14, 223-227. Prescott, J.R., Hutton, J.T., 1994. Cosmic ray contributions to dose rates for luminescence and ESR dating: large depths and long-term time variations. Radiation Measurements 23, 497-500. Prescott, J.R., Stephan, L.G., 1982. The contribution of cosmic radiation to the environmental dose for thermoluminescence dating. Latitude, altitude and depth dependences. PACT 6, 17-25. } \seealso{ \link{BaseDataSet.CosmicDoseRate} } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} Luminescence/man/convert_BIN2CSV.Rd0000644000176200001440000000360713604173244016546 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convert_BIN2CSV.R \name{convert_BIN2CSV} \alias{convert_BIN2CSV} \title{Export Risoe BIN-file(s) to CSV-files} \usage{ convert_BIN2CSV(file, ...) } \arguments{ \item{file}{\link{character} (\strong{required}): name of the BIN-file to be converted to CSV-files} \item{...}{further arguments that will be passed to the function \link{read_BIN2R} and \link{write_RLum2CSV}} } \value{ The function returns either a CSV-file (or many of them) or for the option \code{export == FALSE} a list comprising objects of type \link{data.frame} and \link{matrix} } \description{ This function is a wrapper function around the functions \link{read_BIN2R} and \link{write_RLum2CSV} and it imports a Risoe BIN-file and directly exports its content to CSV-files. If nothing is set for the argument \code{path} (\link{write_RLum2CSV}) the input folder will become the output folder. } \section{Function version}{ 0.1.0 } \examples{ ##transform Risoe.BINfileData values to a list data(ExampleData.BINfileData, envir = environment()) convert_BIN2CSV(subset(CWOSL.SAR.Data, POSITION == 1), export = FALSE) \dontrun{ ##select your BIN-file file <- file.choose() ##convert convert_BIN2CSV(file) } } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data}, \linkS4class{RLum.Results}, \link[utils:write.table]{utils::write.table}, \link{write_RLum2CSV}, \link{read_BIN2R} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. convert_BIN2CSV(): Export Risoe BIN-file(s) to CSV-files. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{IO} Luminescence/man/PSL2Risoe.BINfileData.Rd0000644000176200001440000000372313604173245017523 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/PSL2Risoe.BINfileData.R \name{PSL2Risoe.BINfileData} \alias{PSL2Risoe.BINfileData} \title{Convert portable OSL data to an Risoe.BINfileData object} \usage{ PSL2Risoe.BINfileData(object, ...) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} (\strong{required}): \code{RLum.Analysis} object produced by \link{read_PSL2R}} \item{...}{currently not used.} } \value{ Returns an S4 \linkS4class{Risoe.BINfileData} object that can be used to write a BIN file using \link{write_R2BIN}. } \description{ Converts an \code{RLum.Analysis} object produced by the function \code{read_PSL2R()} to an \code{Risoe.BINfileData} object \strong{(BETA)}. } \details{ This function converts an \linkS4class{RLum.Analysis} object that was produced by the \link{read_PSL2R} function to an \linkS4class{Risoe.BINfileData}. The \code{Risoe.BINfileData} can be used to write a Risoe BIN file via \link{write_R2BIN}. } \section{Function version}{ 0.0.1 } \examples{ # (1) load and plot example data set data("ExampleData.portableOSL", envir = environment()) plot_RLum(ExampleData.portableOSL) # (2) merge all RLum.Analysis objects into one merged <- merge_RLum(ExampleData.portableOSL) merged # (3) convert to RisoeBINfile object bin <- PSL2Risoe.BINfileData(merged) bin # (4) write Risoe BIN file \dontrun{ write_R2BIN(bin, "~/portableOSL.binx") } } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data.Curve}, \linkS4class{Risoe.BINfileData} } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} \section{How to cite}{ Burow, C., 2020. PSL2Risoe.BINfileData(): Convert portable OSL data to an Risoe.BINfileData object. Function version 0.0.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{IO} Luminescence/man/calc_SourceDoseRate.Rd0000644000176200001440000001371513604173244017612 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_SourceDoseRate.R \name{calc_SourceDoseRate} \alias{calc_SourceDoseRate} \title{Calculation of the source dose rate via the date of measurement} \usage{ calc_SourceDoseRate( measurement.date = Sys.Date(), calib.date, calib.dose.rate, calib.error, source.type = "Sr-90", dose.rate.unit = "Gy/s", predict = NULL ) } \arguments{ \item{measurement.date}{\link{character} or \link{Date} (with default): Date of measurement in "YYYY-MM-DD". If no value is provided, the date will be set to today. The argument can be provided as vector.} \item{calib.date}{\link{character} or \link{Date} (\strong{required}): date of source calibration in "YYYY-MM-DD"} \item{calib.dose.rate}{\link{numeric} (\strong{required}): dose rate at date of calibration in Gy/s or Gy/min} \item{calib.error}{\link{numeric} (\strong{required}): error of dose rate at date of calibration Gy/s or Gy/min} \item{source.type}{\link{character} (\emph{with default}): specify irradiation source (\code{Sr-90}, \code{Co-60}, \code{Cs-137}, \code{Am-214}), see details for further information} \item{dose.rate.unit}{\link{character} (\emph{with default}): specify dose rate unit for input (\code{Gy/min} or \code{Gy/s}), the output is given in Gy/s as valid for the function \link{Second2Gray}} \item{predict}{\link{integer} (\emph{with default}): option allowing to predicit the dose rate of the source over time in days set by the provided value. Starting date is the value set with \code{measurement.date}, e.g., \code{calc_SourceDoseRate(..., predict = 100)} calculates the source dose rate for the next 100 days.} } \value{ Returns an S4 object of type \linkS4class{RLum.Results}. Slot \code{data} contains a \link{list} with the following structure:\preformatted{$ dose.rate (data.frame) .. $ dose.rate .. $ dose.rate.error .. $ date (corresponding measurement date) $ parameters (list) .. $ source.type .. $ halflife .. $ dose.rate.unit $ call (the original function call) } The output should be accessed using the function \link{get_RLum}.\cr A plot method of the output is provided via \link{plot_RLum} } \description{ Calculating the dose rate of the irradiation source via the date of measurement based on: source calibration date, source dose rate, dose rate error. The function returns a data.frame that provides the input argument dose_rate for the function \link{Second2Gray}. } \details{ Calculation of the source dose rate based on the time elapsed since the last calibration of the irradiation source. Decay parameters assume a Sr-90 beta source. \deqn{dose.rate = D0 * exp(-log(2) / T.1/2 * t)} \cr with: D0 <- calibration dose rate T.1/2 <- half-life of the source nuclide (here in days) t <- time since source calibration (in days) log(2) / T.1/2 equals the decay constant lambda Information on the date of measurements may be taken from the data's original .BIN file (using e.g., \code{BINfile <- readBIN2R()} and the slot \code{BINfile@METADATA$DATE}) \strong{Allowed source types and related values} \tabular{rllll}{ \strong{#} \tab \strong{Source type} \tab \strong{T.1/2} \tab \strong{Reference} \cr \verb{[1]} \tab Sr-90 \tab 28.90 y \tab NNDC, Brookhaven National Laboratory \cr \verb{[2]}\tab Am-214 \tab 432.6 y \tab NNDC, Brookhaven National Laboratory \cr \verb{[3]} \tab Co-60 \tab 5.274 y \tab NNDC, Brookhaven National Laboratory \cr \verb{[4} \tab Cs-137 \tab 30.08 y \tab NNDC, Brookhaven National Laboratory} } \note{ Please be careful when using the option \code{predict}, especially when a multiple set for \code{measurement.date} and \code{calib.date} is provided. For the source dose rate prediction the function takes the last value \code{measurement.date} and predicts from that the the source source dose rate for the number of days requested, means: the (multiple) orignal input will be replaced. However, the function do not change entries for the calibration dates, but mix them up. Therefore, it is not recommended to use this option when multiple calibration dates (\code{calib.date}) are provided. } \section{Function version}{ 0.3.2 } \examples{ ##(1) Simple function usage ##Basic calculation of the dose rate for a specific date dose.rate <- calc_SourceDoseRate(measurement.date = "2012-01-27", calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019) ##show results get_RLum(dose.rate) ##(2) Usage in combination with another function (e.g., Second2Gray() ) ## load example data data(ExampleData.DeValues, envir = environment()) ## use the calculated variable dose.rate as input argument ## to convert De(s) to De(Gy) Second2Gray(ExampleData.DeValues$BT998, dose.rate) ##(3) source rate prediction and plotting dose.rate <- calc_SourceDoseRate(measurement.date = "2012-01-27", calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019, predict = 1000) plot_RLum(dose.rate) ##(4) export output to a LaTeX table (example using the package 'xtable') \dontrun{ xtable::xtable(get_RLum(dose.rate)) } } \section{How to cite}{ Fuchs, M.C., Kreutzer, S., 2020. calc_SourceDoseRate(): Calculation of the source dose rate via the date of measurement. Function version 0.3.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ NNDC, Brookhaven National Laboratory \url{http://www.nndc.bnl.gov/} } \seealso{ \link{Second2Gray}, \link{get_RLum}, \link{plot_RLum} } \author{ Margret C. Fuchs, HZDR, Helmholtz-Institute Freiberg for Resource Technology (Germany) \cr Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{manip} Luminescence/man/Risoe.BINfileData-class.Rd0000644000176200001440000003111013604173245020154 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Risoe.BINfileData-class.R \docType{class} \name{Risoe.BINfileData-class} \alias{Risoe.BINfileData-class} \alias{show,Risoe.BINfileData-method} \alias{set_Risoe.BINfileData,ANY-method} \alias{get_Risoe.BINfileData,Risoe.BINfileData-method} \title{Class \code{"Risoe.BINfileData"}} \usage{ \S4method{show}{Risoe.BINfileData}(object) \S4method{set_Risoe.BINfileData}{ANY}( METADATA = data.frame(), DATA = list(), .RESERVED = list() ) \S4method{get_Risoe.BINfileData}{Risoe.BINfileData}(object, ...) } \arguments{ \item{object}{an object of class \linkS4class{Risoe.BINfileData}} \item{METADATA}{Object of class "data.frame" containing the meta information for each curve.} \item{DATA}{Object of class "list" containing numeric vector with count data.} \item{.RESERVED}{Object of class "list" containing list of undocumented raw values for internal use only.} \item{...}{other arguments that might be passed} } \description{ S4 class object for luminescence data in R. The object is produced as output of the function \link{read_BIN2R}. } \section{Methods (by generic)}{ \itemize{ \item \code{show}: Show structure of RLum and Risoe.BINfile class objects \item \code{set_Risoe.BINfileData}: The Risoe.BINfileData is normally produced as output of the function read_BIN2R. This construction method is intended for internal usage only. \item \code{get_Risoe.BINfileData}: Formal get-method for Risoe.BINfileData object. It does not allow accessing the object directly, it is just showing a terminal message. }} \section{Slots}{ \describe{ \item{\code{METADATA}}{Object of class "data.frame" containing the meta information for each curve.} \item{\code{DATA}}{Object of class "list" containing numeric vector with count data.} \item{\code{.RESERVED}}{Object of class "list" containing list of undocumented raw values for internal use only.} }} \note{ \strong{Internal METADATA - object structure} This structure is compatible with BIN/BINX-files version 03-08, however, it does not follow (in its sequential arrangment) the manual provided by the manufacturer, but an own structure accounting for the different versions. \tabular{rllll}{ \strong{#} \tab \strong{Name} \tab \strong{Data Type} \tab \strong{V} \tab \strong{Description} \cr \verb{[,1]} \tab ID \tab \code{numeric} \tab RLum \tab Unique record ID (same ID as in slot \code{DATA})\cr \verb{[,2]} \tab SEL \tab \code{logic} \tab RLum \tab Record selection, not part official BIN-format, triggered by TAG\cr \verb{[,3]} \tab VERSION \tab \code{raw} \tab 03-08 \tab BIN-file version number \cr \verb{[,4]} \tab LENGTH \tab \code{integer} \tab 03-08 \tab Length of this record\cr \verb{[,5]} \tab PREVIOUS \tab \code{integer} \tab 03-08 \tab Length of previous record\cr \verb{[,6]} \tab NPOINTS \tab \code{integer} \tab 03-08 \tab Number of data points in the record\cr \verb{[,7]} \tab RECTYPE \tab \code{integer} \tab 08 \tab Record type \cr \verb{[,8]} \tab RUN \tab \code{integer} \tab 03-08 \tab Run number\cr \verb{[,9]} \tab SET \tab \code{integer} \tab 03-08 \tab Set number\cr \verb{[,10]} \tab POSITION \tab \code{integer} \tab 03-08 \tab Position number\cr \verb{[,11]} \tab GRAIN \tab \code{integer} \tab 03-04 \tab Grain number\cr \verb{[,12]} \tab GRAINNUMBER \tab \code{integer} \tab 05-08 \tab Grain number\cr \verb{[,13]} \tab CURVENO \tab \code{integer} \tab 05-08 \tab Curve number\cr \verb{[,14]} \tab XCOORD \tab \code{integer} \tab 03-08 \tab X position of a single grain\cr \verb{[,15]} \tab YCOORD \tab \code{integer} \tab 03-08 \tab Y position of a single grain\cr \verb{[,16]} \tab SAMPLE \tab \code{factor} \tab 03-08 \tab Sample name\cr \verb{[,17]} \tab COMMENT \tab \code{factor} \tab 03-08 \tab Comment name\cr \verb{[,18]} \tab SYSTEMID \tab \code{integer} \tab 03-08 \tab Risoe system id\cr \verb{[,19]} \tab FNAME \tab \code{factor} \tab 05-08 \tab File name (\emph{.bin/}.binx)\cr \verb{[,20]} \tab USER \tab \code{facotr} \tab 03-08 \tab User name\cr \verb{[,21]} \tab TIME \tab \code{character} \tab 03-08 \tab Data collection time (hh-mm-ss)\cr \verb{[,22]} \tab DATE \tab \code{factor} \tab 03-08 \tab Data collection date (ddmmyy)\cr \verb{[,23]} \tab DTYPE \tab \code{character} \tab 03-08 \tab Data type\cr \verb{[,24]} \tab BL_TIME \tab \code{numeric} \tab 03-08 \tab Bleaching time\cr \verb{[,25]} \tab BL_UNIT \tab \code{integer} \tab 03-08 \tab Bleaching unit (mJ, J, secs, mins, hrs)\cr \verb{[,26]} \tab NORM1 \tab \code{numeric} \tab 03-08 \tab Normalisation factor (1)\cr \verb{[,27]} \tab NORM2 \tab \code{numeric} \tab 03-08 \tab Normalisation factor (2)\cr \verb{[,28]} \tab NORM3 \tab \code{numeric} \tab 03-08 \tab Normalisation factor (3)\cr \verb{[,29]} \tab BG \tab \code{numeric} \tab 03-08 \tab Background level\cr \verb{[,30]} \tab SHIFT \tab \code{integer} \tab 03-08 \tab Number of channels to shift data\cr \verb{[,31]} \tab TAG \tab \code{integer} \tab 03-08 \tab Tag, triggers SEL\cr \verb{[,32]} \tab LTYPE \tab \code{character} \tab 03-08 \tab Luminescence type\cr \verb{[,33]} \tab LIGHTSOURCE \tab \code{character} \tab 03-08 \tab Light source\cr \verb{[,34]} \tab LPOWER \tab \code{numeric} \tab 03-08 \tab Optical stimulation power\cr \verb{[,35]} \tab LIGHTPOWER \tab \code{numeric} \tab 05-08 \tab Optical stimulation power\cr \verb{[,36]} \tab LOW \tab \code{numeric} \tab 03-08 \tab Low (temperature, time, wavelength)\cr \verb{[,37]} \tab HIGH \tab \code{numeric} \tab 03-08 \tab High (temperature, time, wavelength)\cr \verb{[,38]} \tab RATE \tab \code{numeric} \tab 03-08 \tab Rate (heating rate, scan rate)\cr \verb{[,39]} \tab TEMPERATURE \tab \code{integer} \tab 03-08 \tab Sample temperature\cr \verb{[,40]} \tab MEASTEMP \tab \code{integer} \tab 05-08 \tab Measured temperature\cr \verb{[,41]} \tab AN_TEMP \tab \code{numeric} \tab 03-08 \tab Annealing temperature\cr \verb{[,42]} \tab AN_TIME \tab \code{numeric} \tab 03-08 \tab Annealing time\cr \verb{[,43]} \tab TOLDELAY \tab \code{integer} \tab 03-08 \tab TOL 'delay' channels\cr \verb{[,44]} \tab TOLON \tab \code{integer} \tab 03-08 \tab TOL 'on' channels\cr \verb{[,45]} \tab TOLOFF \tab \code{integer} \tab 03-08 \tab TOL 'off' channels\cr \verb{[,46]} \tab IRR_TIME \tab \code{numeric} \tab 03-08 \tab Irradiation time\cr \verb{[,47]} \tab IRR_TYPE \tab \code{integer} \tab 03-08 \tab Irradiation type (alpha, beta or gamma)\cr \verb{[,48]} \tab IRR_UNIT \tab \code{integer} \tab 03-04 \tab Irradiation unit (Gy, Rads, secs, mins, hrs)\cr \verb{[,49]} \tab IRR_DOSERATE \tab \code{numeric} \tab 05-08 \tab Irradiation dose rate (Gy/s)\cr \verb{[,50]} \tab IRR_DOSERATEERR \tab \code{numeric} \tab 06-08 \tab Irradiation dose rate error (Gy/s)\cr \verb{[,51]} \tab TIMESINCEIRR \tab \code{integer} \tab 05-08 \tab Time since irradiation (s)\cr \verb{[,52]} \tab TIMETICK \tab \code{numeric} \tab 05-08 \tab Time tick for pulsing (s)\cr \verb{[,53]} \tab ONTIME \tab \code{integer} \tab 05-08 \tab On-time for pulsing (in time ticks)\cr \verb{[,54]} \tab OFFTIME \tab \code{integer} \tab 03 \tab Off-time for pulsed stimulation (in s) \cr \verb{[,55]} \tab STIMPERIOD \tab \code{integer} \tab 05-08 \tab Stimulation period (on+off in time ticks)\cr \verb{[,56]} \tab GATE_ENABLED \tab \code{raw} \tab 05-08 \tab PMT signal gating enabled\cr \verb{[,57]} \tab ENABLE_FLAGS \tab \code{raw} \tab 05-08 \tab PMT signal gating enabled\cr \verb{[,58]} \tab GATE_START \tab \code{integer} \tab 05-08 \tab Start gating (in time ticks)\cr \verb{[,59]} \tab GATE_STOP \tab \code{ingeter} \tab 05-08 \tab Stop gating (in time ticks), 'Gateend' for version 04, here only GATE_STOP is used\cr \verb{[,60]} \tab PTENABLED \tab \code{raw} \tab 05-08 \tab Photon time enabled\cr \verb{[,61]} \tab DTENABLED \tab \code{raw} \tab 05-08 \tab PMT dead time correction enabled\cr \verb{[,62]} \tab DEADTIME \tab \code{numeric} \tab 05-08 \tab PMT dead time (s)\cr \verb{[,63]} \tab MAXLPOWER \tab \code{numeric} \tab 05-08 \tab Stimulation power to 100 percent (mW/cm^2)\cr \verb{[,64]} \tab XRF_ACQTIME \tab \code{numeric} \tab 05-08 \tab XRF acquisition time (s)\cr \verb{[,65]} \tab XRF_HV \tab \code{numeric} \tab 05-08 \tab XRF X-ray high voltage (V)\cr \verb{[,66]} \tab XRF_CURR \tab \code{integer} \tab 05-08 \tab XRF X-ray current (uA)\cr \verb{[,67]} \tab XRF_DEADTIMEF \tab \code{numeric} \tab 05-08 \tab XRF dead time fraction\cr \verb{[,68]} \tab DETECTOR_ID \tab \code{raw} \tab 07-08 \tab Detector ID\cr \verb{[,69]} \tab LOWERFILTER_ID \tab \code{integer} \tab 07-08 \tab Lower filter ID in reader\cr \verb{[,70]} \tab UPPERFILTER_ID \tab \code{integer} \tab 07-08 \tab Uper filter ID in reader\cr \verb{[,71]} \tab ENOISEFACTOR \tab \code{numeric} \tab 07-08 \tab Excess noise filter, usage unknown \cr \verb{[,72]} \tab MARKPOS_X1 \tab \code{numeric} \tab 08 \tab Coordinates marker position 1 \cr \verb{[,73]} \tab MARKPOS_Y1 \tab \code{numeric} \tab 08 \tab Coordinates marker position 1 \cr \verb{[,74]} \tab MARKPOS_X2 \tab \code{numeric} \tab 08 \tab Coordinates marker position 2 \cr \verb{[,75]} \tab MARKPOS_Y2 \tab \code{numeric} \tab 08 \tab Coordinates marker position 2 \cr \verb{[,76]} \tab MARKPOS_X3 \tab \code{numeric} \tab 08 \tab Coordinates marker position 3 \cr \verb{[,77]} \tab MARKPOS_Y3 \tab \code{numeric} \tab 08 \tab Coordinates marker position 3 \cr \verb{[,78]} \tab EXTR_START \tab \code{numeric} \tab 08 \tab usage unknown \cr \verb{[,79]} \tab EXTR_END \tab \code{numeric} \tab 08 \tab usage unknown\cr \verb{[,80]} \tab SEQUENCE \tab \code{character} \tab 03-04 \tab Sequence name } V = BIN-file version (RLum means that it does not depend on a specific BIN version) Note that the \code{Risoe.BINfileData} object combines all values from different versions from the BIN-file, reserved bits are skipped, however, the function \link{write_R2BIN} reset arbitrary reserved bits. Invalid values for a specific version are set to \code{NA}. Furthermore, the internal R data types do not necessarily match the required data types for the BIN-file data import! Data types are converted during data import.\cr \strong{LTYPE} values \tabular{rll}{ \verb{[,0]} \tab TL \tab: Thermoluminescence \cr \verb{[,1]} \tab OSL \tab: Optically stimulated luminescence \cr \verb{[,2]} \tab IRSL \tab: Infrared stimulated luminescence \cr \verb{[,3]} \tab M-IR \tab: Infrared monochromator scan\cr \verb{[,4]} \tab M-VIS \tab: Visible monochromator scan\cr \verb{[,5]} \tab TOL \tab: Thermo-optical luminescence \cr \verb{[,6]} \tab TRPOSL \tab: Time Resolved Pulsed OSL\cr \verb{[,7]} \tab RIR \tab: Ramped IRSL\cr \verb{[,8]} \tab RBR \tab: Ramped (Blue) LEDs\cr \verb{[,9]} \tab USER \tab: User defined\cr \verb{[,10]} \tab POSL \tab: Pulsed OSL \cr \verb{[,11]} \tab SGOSL \tab: Single Grain OSL\cr \verb{[,12]} \tab RL \tab: Radio Luminescence \cr \verb{[,13]} \tab XRF \tab: X-ray Fluorescence } \strong{DTYPE} values \tabular{rll}{ \verb{[,0]} \tab 0 \tab Natural \cr \verb{[,1]} \tab 1 \tab N+dose \cr \verb{[,2]} \tab 2 \tab Bleach \cr \verb{[,3]} \tab 3 \tab Bleach+dose \cr \verb{[,4]} \tab 4 \tab Natural (Bleach) \cr \verb{[,5]} \tab 5 \tab N+dose (Bleach) \cr \verb{[,6]} \tab 6 \tab Dose \cr \verb{[,7]} \tab 7 \tab Background } \strong{LIGHTSOURCE} values \tabular{rll}{ \verb{[,0]} \tab 0 \tab None \cr \verb{[,1]} \tab 1 \tab Lamp \cr \verb{[,2]} \tab 2 \tab IR diodes/IR Laser \cr \verb{[,3]} \tab 3 \tab Calibration LED \cr \verb{[,4]} \tab 4 \tab Blue Diodes \cr \verb{[,5]} \tab 5 \tab White lite \cr \verb{[,6]} \tab 6 \tab Green laser (single grain) \cr \verb{[,7]} \tab 7 \tab IR laser (single grain) } (information on the BIN/BINX file format are kindly provided by Risoe, DTU Nutech) } \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("Risoe.BINfileData", ...)}. } \section{Function version}{ 0.4.0 } \examples{ showClass("Risoe.BINfileData") } \section{How to cite}{ Kreutzer, S., 2020. Risoe.BINfileData-class(): Class 'Risoe.BINfileData'. Function version 0.4.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Risoe DTU, 2013. The Sequence Editor User Manual - Feb 2013 and Risoe DTU, 2016. The Sequence Editor User Manual - Feburar 2016 \url{http://www.nutech.dtu.dk/} } \seealso{ \link{plot_Risoe.BINfileData}, \link{read_BIN2R}, \link{write_R2BIN}, \link{merge_Risoe.BINfileData}, \link{Risoe.BINfileData2RLum.Analysis} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, Université Bordeaux Montaigne (France)\cr based on information provided by Torben Lapp and Karsten Bracht Nielsen (Risø DTU, Denmark) , RLum Developer Team} \keyword{classes} \keyword{internal} Luminescence/man/CW2pHMi.Rd0000644000176200001440000001540113604173244015104 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/CW2pHMi.R \name{CW2pHMi} \alias{CW2pHMi} \title{Transform a CW-OSL curve into a pHM-OSL curve via interpolation under hyperbolic modulation conditions} \usage{ CW2pHMi(values, delta) } \arguments{ \item{values}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): \linkS4class{RLum.Data.Curve} or \link{data.frame} with measured curve data of type stimulation time (t) (\code{values[,1]}) and measured counts (cts) (\code{values[,2]}).} \item{delta}{\link{vector} (\emph{optional}): stimulation rate parameter, if no value is given, the optimal value is estimated automatically (see details). Smaller values of delta produce more points in the rising tail of the curve.} } \value{ The function returns the same data type as the input data type with the transformed curve values. \strong{\code{RLum.Data.Curve}} \tabular{ll}{ \verb{$CW2pHMi.x.t} \tab: transformed time values \cr \verb{$CW2pHMi.method} \tab: used method for the production of the new data points } \strong{\code{data.frame}} \tabular{ll}{ \verb{$x} \tab: time\cr \verb{$y.t} \tab: transformed count values\cr \verb{$x.t} \tab: transformed time values \cr \verb{$method} \tab: used method for the production of the new data points } } \description{ This function transforms a conventionally measured continuous-wave (CW) OSL-curve to a pseudo hyperbolic modulated (pHM) curve under hyperbolic modulation conditions using the interpolation procedure described by Bos & Wallinga (2012). } \details{ The complete procedure of the transformation is described in Bos & Wallinga (2012). The input \code{data.frame} consists of two columns: time (t) and count values (CW(t)) \strong{Internal transformation steps} (1) log(CW-OSL) values (2) Calculate t' which is the transformed time: \deqn{t' = t-(1/\delta)*log(1+\delta*t)} (3) Interpolate CW(t'), i.e. use the log(CW(t)) to obtain the count values for the transformed time (t'). Values beyond \code{min(t)} and \code{max(t)} produce \code{NA} values. (4) Select all values for t' < \code{min(t)}, i.e. values beyond the time resolution of t. Select the first two values of the transformed data set which contain no \code{NA} values and use these values for a linear fit using \link{lm}. (5) Extrapolate values for t' < \code{min(t)} based on the previously obtained fit parameters. (6) Transform values using \deqn{pHM(t) = (\delta*t/(1+\delta*t))*c*CW(t')} \deqn{c = (1+\delta*P)/\delta*P} \deqn{P = length(stimulation~period)} (7) Combine all values and truncate all values for t' > \code{max(t)} \strong{NOTE:} The number of values for t' < \code{min(t)} depends on the stimulation rate parameter \code{delta}. To avoid the production of too many artificial data at the raising tail of the determined pHM curve, it is recommended to use the automatic estimation routine for \code{delta}, i.e. provide no value for \code{delta}. } \note{ According to Bos & Wallinga (2012), the number of extrapolated points should be limited to avoid artificial intensity data. If \code{delta} is provided manually and more than two points are extrapolated, a warning message is returned. The function \link{approx} may produce some \code{Inf} and \code{NaN} data. The function tries to manually interpolate these values by calculating the \code{mean} using the adjacent channels. If two invalid values are succeeding, the values are removed and no further interpolation is attempted. In every case a warning message is shown. } \section{Function version}{ 0.2.2 } \examples{ ##(1) - simple transformation ##load CW-OSL curve data data(ExampleData.CW_OSL_Curve, envir = environment()) ##transform values values.transformed<-CW2pHMi(ExampleData.CW_OSL_Curve) ##plot plot(values.transformed$x, values.transformed$y.t, log = "x") ##(2) - load CW-OSL curve from BIN-file and plot transformed values ##load BINfile #BINfileData<-readBIN2R("[path to BIN-file]") data(ExampleData.BINfileData, envir = environment()) ##grep first CW-OSL curve from ALQ 1 curve.ID<-CWOSL.SAR.Data@METADATA[CWOSL.SAR.Data@METADATA[,"LTYPE"]=="OSL" & CWOSL.SAR.Data@METADATA[,"POSITION"]==1 ,"ID"] curve.HIGH<-CWOSL.SAR.Data@METADATA[CWOSL.SAR.Data@METADATA[,"ID"]==curve.ID[1] ,"HIGH"] curve.NPOINTS<-CWOSL.SAR.Data@METADATA[CWOSL.SAR.Data@METADATA[,"ID"]==curve.ID[1] ,"NPOINTS"] ##combine curve to data set curve<-data.frame(x = seq(curve.HIGH/curve.NPOINTS,curve.HIGH, by = curve.HIGH/curve.NPOINTS), y=unlist(CWOSL.SAR.Data@DATA[curve.ID[1]])) ##transform values curve.transformed <- CW2pHMi(curve) ##plot curve plot(curve.transformed$x, curve.transformed$y.t, log = "x") ##(3) - produce Fig. 4 from Bos & Wallinga (2012) ##load data data(ExampleData.CW_OSL_Curve, envir = environment()) values <- CW_Curve.BosWallinga2012 ##open plot area plot(NA, NA, xlim=c(0.001,10), ylim=c(0,8000), ylab="pseudo OSL (cts/0.01 s)", xlab="t [s]", log="x", main="Fig. 4 - Bos & Wallinga (2012)") values.t<-CW2pLMi(values, P=1/20) lines(values[1:length(values.t[,1]),1],CW2pLMi(values, P=1/20)[,2], col="red" ,lwd=1.3) text(0.03,4500,"LM", col="red" ,cex=.8) values.t<-CW2pHMi(values, delta=40) lines(values[1:length(values.t[,1]),1],CW2pHMi(values, delta=40)[,2], col="black", lwd=1.3) text(0.005,3000,"HM", cex=.8) values.t<-CW2pPMi(values, P=1/10) lines(values[1:length(values.t[,1]),1],CW2pPMi(values, P=1/10)[,2], col="blue", lwd=1.3) text(0.5,6500,"PM", col="blue" ,cex=.8) } \section{How to cite}{ Kreutzer, S., 2020. CW2pHMi(): Transform a CW-OSL curve into a pHM-OSL curve via interpolation under hyperbolic modulation conditions. Function version 0.2.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Bos, A.J.J. & Wallinga, J., 2012. How to visualize quartz OSL signal components. Radiation Measurements, 47, 752-758.\cr \strong{Further Reading} Bulur, E., 1996. An Alternative Technique For Optically Stimulated Luminescence (OSL) Experiment. Radiation Measurements, 26, 701-709. Bulur, E., 2000. A simple transformation for converting CW-OSL curves to LM-OSL curves. Radiation Measurements, 32, 141-145. } \seealso{ \link{CW2pLM}, \link{CW2pLMi}, \link{CW2pPMi}, \link{fit_LMCurve}, \link{lm}, \linkS4class{RLum.Data.Curve} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr Based on comments and suggestions from:\cr Adrie J.J. Bos, Delft University of Technology, The Netherlands , RLum Developer Team} \keyword{manip} Luminescence/man/plot_Risoe.BINfileData.Rd0000644000176200001440000001051013604173245020110 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_Risoe.BINfileData.R \name{plot_Risoe.BINfileData} \alias{plot_Risoe.BINfileData} \title{Plot single luminescence curves from a BIN file object} \usage{ plot_Risoe.BINfileData( BINfileData, position, run, set, sorter = "POSITION", ltype = c("IRSL", "OSL", "TL", "RIR", "RBR", "RL"), curve.transformation, dose_rate, temp.lab, cex.global = 1, ... ) } \arguments{ \item{BINfileData}{\linkS4class{Risoe.BINfileData} (\strong{required}): requires an S4 object returned by the \link{read_BIN2R} function.} \item{position}{\link{vector} (\emph{optional}): option to limit the plotted curves by position (e.g. \code{position = 1}, \code{position = c(1,3,5)}).} \item{run}{\link{vector} (\emph{optional}): option to limit the plotted curves by run (e.g., \code{run = 1}, \code{run = c(1,3,5)}).} \item{set}{\link{vector} (\emph{optional}): option to limit the plotted curves by set (e.g., \code{set = 1}, \code{set = c(1,3,5)}).} \item{sorter}{\link{character} (\emph{with default}): the plot output can be ordered by "POSITION","SET" or "RUN". POSITION, SET and RUN are options defined in the Risoe Sequence Editor.} \item{ltype}{\link{character} (\emph{with default}): option to limit the plotted curves by the type of luminescence stimulation. Allowed values: \code{"IRSL"}, \code{"OSL"},\code{"TL"}, \code{"RIR"}, \code{"RBR"} (corresponds to LM-OSL), \code{"RL"}. All type of curves are plotted by default.} \item{curve.transformation}{\link{character} (\emph{optional}): allows transforming CW-OSL and CW-IRSL curves to pseudo-LM curves via transformation functions. Allowed values are: \code{CW2pLM}, \code{CW2pLMi}, \code{CW2pHMi} and \code{CW2pPMi}. See details.} \item{dose_rate}{\link{numeric} (\emph{optional}): dose rate of the irradition source at the measurement date. If set, the given irradiation dose will be shown in Gy. See details.} \item{temp.lab}{\link{character} (\emph{optional}): option to allow for different temperature units. If no value is set deg. C is chosen.} \item{cex.global}{\link{numeric} (\emph{with default}): global scaling factor.} \item{...}{further undocumented plot arguments.} } \value{ Returns a plot. } \description{ Plots single luminescence curves from an object returned by the \link{read_BIN2R} function. } \details{ \strong{Nomenclature} See \linkS4class{Risoe.BINfileData} \strong{curve.transformation} This argument allows transforming continuous wave (CW) curves to pseudo (linear) modulated curves. For the transformation, the functions of the package are used. Currently, it is not possible to pass further arguments to the transformation functions. The argument works only for \code{ltype} \code{OSL} and \code{IRSL}. \strong{Irradiation time} Plotting the irradiation time (s) or the given dose (Gy) requires that the variable \code{IRR_TIME} has been set within the BIN-file. This is normally done by using the 'Run Info' option within the Sequence Editor or by editing in R. } \note{ The function has been successfully tested for the Sequence Editor file output version 3 and 4. } \section{Function version}{ 0.4.1 } \examples{ ##load data data(ExampleData.BINfileData, envir = environment()) ##plot all curves from the first position to the desktop #pdf(file = "~/Desktop/CurveOutput.pdf", paper = "a4", height = 11, onefile = TRUE) ##example - load from *.bin file #BINfile<- file.choose() #BINfileData<-read_BIN2R(BINfile) #par(mfrow = c(4,3), oma = c(0.5,1,0.5,1)) #plot_Risoe.BINfileData(CWOSL.SAR.Data,position = 1) #mtext(side = 4, BINfile, outer = TRUE, col = "blue", cex = .7) #dev.off() } \section{How to cite}{ Kreutzer, S., Dietze, M., 2020. plot_Risoe.BINfileData(): Plot single luminescence curves from a BIN file object. Function version 0.4.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Duller, G., 2007. Analyst. pp. 1-45. } \seealso{ \linkS4class{Risoe.BINfileData},\link{read_BIN2R}, \link{CW2pLM}, \link{CW2pLMi}, \link{CW2pPMi}, \link{CW2pHMi} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr Michael Dietze, GFZ Potsdam (Germany) , RLum Developer Team} \keyword{dplot} Luminescence/man/calc_AliquotSize.Rd0000644000176200001440000001523013604173244017166 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_AliquotSize.R \name{calc_AliquotSize} \alias{calc_AliquotSize} \title{Estimate the amount of grains on an aliquot} \usage{ calc_AliquotSize( grain.size, sample.diameter, packing.density = 0.65, MC = TRUE, grains.counted, plot = TRUE, ... ) } \arguments{ \item{grain.size}{\link{numeric} (\strong{required}): mean grain size (microns) or a range of grain sizes from which the mean grain size is computed (e.g. \code{c(100,200)}).} \item{sample.diameter}{\link{numeric} (\strong{required}): diameter (mm) of the targeted area on the sample carrier.} \item{packing.density}{\link{numeric} (\emph{with default}): empirical value for mean packing density. \cr If \code{packing.density = "Inf"} a hexagonal structure on an infinite plane with a packing density of \eqn{0.906\ldots} is assumed.} \item{MC}{\link{logical} (\emph{optional}): if \code{TRUE} the function performs a monte carlo simulation for estimating the amount of grains on the sample carrier and assumes random errors in grain size distribution and packing density. Requires a vector with min and max grain size for \code{grain.size}. For more information see details.} \item{grains.counted}{\link{numeric} (\emph{optional}): grains counted on a sample carrier. If a non-zero positive integer is provided this function will calculate the packing density of the aliquot. If more than one value is provided the mean packing density and its standard deviation is calculated. Note that this overrides \code{packing.density}.} \item{plot}{\link{logical} (\emph{with default}): plot output (\code{TRUE}/\code{FALSE})} \item{...}{further arguments to pass (\verb{main, xlab, MC.iter}).} } \value{ Returns a terminal output. In addition an \linkS4class{RLum.Results} object is returned containing the following element: \item{.$summary}{\link{data.frame} summary of all relevant calculation results.} \item{.$args}{\link{list} used arguments} \item{.$call}{\link{call} the function call} \item{.$MC}{\link{list} results of the Monte Carlo simulation} The output should be accessed using the function \link{get_RLum}. } \description{ Estimate the number of grains on an aliquot. Alternatively, the packing density of an aliquot is computed. } \details{ This function can be used to either estimate the number of grains on an aliquot or to compute the packing density depending on the the arguments provided. The following function is used to estimate the number of grains \code{n}: \deqn{n = (\pi*x^2)/(\pi*y^2)*d} where \code{x} is the radius of the aliquot size (microns), \code{y} is the mean radius of the mineral grains (mm) and \code{d} is the packing density (value between 0 and 1). \strong{Packing density} The default value for \code{packing.density} is 0.65, which is the mean of empirical values determined by Heer et al. (2012) and unpublished data from the Cologne luminescence laboratory. If \code{packing.density = "Inf"} a maximum density of \eqn{\pi/\sqrt12 = 0.9068\ldots} is used. However, note that this value is not appropriate as the standard preparation procedure of aliquots resembles a PECC (\emph{"Packing Equal Circles in a Circle"}) problem where the maximum packing density is asymptotic to about 0.87. \strong{Monte Carlo simulation} The number of grains on an aliquot can be estimated by Monte Carlo simulation when setting \code{MC = TRUE}. Each of the parameters necessary to calculate \code{n} (\code{x}, \code{y}, \code{d}) are assumed to be normally distributed with means \eqn{\mu_x, \mu_y, \mu_d} and standard deviations \eqn{\sigma_x, \sigma_y, \sigma_d}. For the mean grain size random samples are taken first from \eqn{N(\mu_y, \sigma_y)}, where \eqn{\mu_y = mean.grain.size} and \eqn{\sigma_y = (max.grain.size-min.grain.size)/4} so that 95\\% of all grains are within the provided the grain size range. This effectively takes into account that after sieving the sample there is still a small chance of having grains smaller or larger than the used mesh sizes. For each random sample the mean grain size is calculated, from which random subsamples are drawn for the Monte Carlo simulation. The packing density is assumed to be normally distributed with an empirically determined \eqn{\mu = 0.65} (or provided value) and \eqn{\sigma = 0.18}. The normal distribution is truncated at \code{d = 0.87} as this is approximately the maximum packing density that can be achieved in PECC problem. The sample diameter has \eqn{\mu = sample.diameter} and \eqn{\sigma = 0.2} to take into account variations in sample disc preparation (i.e. applying silicon spray to the disc). A lower truncation point at \code{x = 0.5} is used, which assumes that aliqouts with smaller sample diameters of 0.5 mm are discarded. Likewise, the normal distribution is truncated at 9.8 mm, which is the diameter of the sample disc. For each random sample drawn from the normal distributions the amount of grains on the aliquot is calculated. By default, \code{10^5} iterations are used, but can be reduced/increased with \code{MC.iter} (see \code{...}). The results are visualised in a bar- and boxplot together with a statistical summary. } \section{Function version}{ 0.31 } \examples{ ## Estimate the amount of grains on a small aliquot calc_AliquotSize(grain.size = c(100,150), sample.diameter = 1, MC.iter = 100) ## Calculate the mean packing density of large aliquots calc_AliquotSize(grain.size = c(100,200), sample.diameter = 8, grains.counted = c(2525,2312,2880), MC.iter = 100) } \section{How to cite}{ Burow, C., 2020. calc_AliquotSize(): Estimate the amount of grains on an aliquot. Function version 0.31. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Duller, G.A.T., 2008. Single-grain optical dating of Quaternary sediments: why aliquot size matters in luminescence dating. Boreas 37, 589-612. Heer, A.J., Adamiec, G., Moska, P., 2012. How many grains are there on a single aliquot?. Ancient TL 30, 9-16. \strong{Further reading} Chang, H.-C., Wang, L.-C., 2010. A simple proof of Thue's Theorem on Circle Packing. \url{http://arxiv.org/pdf/1009.4322v1.pdf}, 2013-09-13. Graham, R.L., Lubachevsky, B.D., Nurmela, K.J., Oestergard, P.R.J., 1998. Dense packings of congruent circles in a circle. Discrete Mathematics 181, 139-154. Huang, W., Ye, T., 2011. Global optimization method for finding dense packings of equal circles in a circle. European Journal of Operational Research 210, 474-481. } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} Luminescence/man/calc_Lamothe2003.Rd0000644000176200001440000001213713604173244016616 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_Lamothe2003.R \name{calc_Lamothe2003} \alias{calc_Lamothe2003} \title{Apply fading correction after Lamothe et al., 2003} \usage{ calc_Lamothe2003( object, dose_rate.envir, dose_rate.source, g_value, tc = NULL, tc.g_value = tc, verbose = TRUE, plot = TRUE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Results} \link{data.frame} (\strong{required}): Input data for applying the fading correction. Alow are (1) \link{data.frame} with three columns (dose, LxTx, LxTx error), (2) \linkS4class{RLum.Results} object created by the function \link{analyse_SAR.CWOSL} or \link{analyse_pIRIRSequence}} \item{dose_rate.envir}{\link{numeric} vector of length 2 (\strong{required}): Environmental dose rate in mGy/a} \item{dose_rate.source}{\link{numeric} vector of length 2 (\strong{required}): Irradiation source dose rate in Gy/s} \item{g_value}{\link{numeric} vector of length 2 (\strong{required}): g_value in \\%/decade \emph{recalculated at the moment} the equivalent dose was calculated, i.e. tc is either similar for the g-value measurement \strong{and} the De measurement or needs be to recalculated (cf. \link{calc_FadingCorr}). Inserting a normalised g-value, e.g., normalised to 2-days , will lead to wrong results} \item{tc}{\link{numeric} (optional): time in seconds between irradiation and the prompt measurement used in the De estimation (cf. Huntley & Lamothe 2001). If set to \code{NULL} it is assumed that tc is similar for the equivalent dose estimation and the g-value estimation} \item{tc.g_value}{\link{numeric} (with default): the time in seconds between irradiation and the prompt measurement used for estimating the g-value. If the g-value was normalised to, e.g., 2 days, this time in seconds (i.e., 172800) should be given here along with the time used for the De estimation. If nothing is provided the time is set to tc, which is usual case for g-values obtained using the SAR method and g-values that had been not normalised to 2 days. Note: If this value is not \code{NULL} the functions expects a \link{numeric} value for \code{tc}.} \item{verbose}{\link{logical} (with default): Enables/disables terminal verbose mode} \item{plot}{\link{logical} (with default): Enables/disables plot output} \item{...}{further arguments passed to the function \link{plot_GrowthCurve}} } \value{ The function returns are graphical output produced by the function \link{plot_GrowthCurve} and an \linkS4class{RLum.Results}. -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \tabular{lll}{ \strong{Element} \tab \strong{Type} \tab \strong{Description}\cr \verb{$data} \tab \code{data.frame} \tab the fading corrected values \cr \verb{$fit} \tab \code{nls} \tab the object returned by the dose response curve fitting \cr } '\strong{slot:} \strong{\verb{@info}} The original function call } \description{ This function applies the fading correction for the prediction of long-term fading as suggested by Lamothe et atl., 2003. The function basically adjusts the Ln/Tn values and fit a new dose-response curve using the function \link{plot_GrowthCurve}. } \section{Function version}{ 0.1.0 } \examples{ ##load data ##ExampleData.BINfileData contains two BINfileData objects ##CWOSL.SAR.Data and TL.SAR.Data data(ExampleData.BINfileData, envir = environment()) ##transform the values from the first position in a RLum.Analysis object object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) ##perform SAR analysis and set rejection criteria results <- analyse_SAR.CWOSL( object = object, signal.integral.min = 1, signal.integral.max = 2, background.integral.min = 900, background.integral.max = 1000, verbose = FALSE, plot = FALSE, onlyLxTxTable = TRUE ) ##run fading correction results_corr <- calc_Lamothe2003( object = results, dose_rate.envir = c(1.676 , 0.180), dose_rate.source = c(0.184, 0.003), g_value = c(2.36, 0.6), plot = TRUE, fit.method = "EXP") } \section{How to cite}{ Kreutzer, S., Mercier, N., 2020. calc_Lamothe2003(): Apply fading correction after Lamothe et al., 2003. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Huntley, D.J., Lamothe, M., 2001. Ubiquity of anomalous fading in K-feldspars and the measurement and correction for it in optical dating. Canadian Journal of Earth Sciences 38, 1093-1106. Lamothe, M., Auclair, M., Hamzaoui, C., Huot, S., 2003. Towards a prediction of long-term anomalous fadingof feldspar IRSL. Radiation Measurements 37, 493-498. } \seealso{ \link{plot_GrowthCurve}, \link{calc_FadingCorr}, \link{analyse_SAR.CWOSL}, \link{analyse_pIRIRSequence} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France), Norbert Mercier, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/RLum.Data.Curve-class.Rd0000644000176200001440000001576313604173245017664 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/RLum.Data.Curve-class.R \docType{class} \name{RLum.Data.Curve-class} \alias{RLum.Data.Curve-class} \alias{show,RLum.Data.Curve-method} \alias{set_RLum,RLum.Data.Curve-method} \alias{get_RLum,RLum.Data.Curve-method} \alias{length_RLum,RLum.Data.Curve-method} \alias{names_RLum,RLum.Data.Curve-method} \alias{bin_RLum.Data,RLum.Data.Curve-method} \alias{smooth_RLum,RLum.Data.Curve-method} \title{Class \code{"RLum.Data.Curve"}} \usage{ \S4method{show}{RLum.Data.Curve}(object) \S4method{set_RLum}{RLum.Data.Curve}( class, originator, .uid, .pid, recordType = NA_character_, curveType = NA_character_, data = matrix(0, ncol = 2), info = list() ) \S4method{get_RLum}{RLum.Data.Curve}(object, info.object = NULL) \S4method{length_RLum}{RLum.Data.Curve}(object) \S4method{names_RLum}{RLum.Data.Curve}(object) \S4method{bin_RLum.Data}{RLum.Data.Curve}(object, bin_size = 2) \S4method{smooth_RLum}{RLum.Data.Curve}(object, k = NULL, fill = NA, align = "right", method = "mean") } \arguments{ \item{object}{\code{\link{get_RLum}}, \code{\link{length_RLum}}, \code{\link{names_RLum}} (\strong{required}): an object of class \linkS4class{RLum.Data.Curve}} \item{class}{\code{\link{set_RLum}}; \link{character} (\strong{required}): name of the \code{RLum} class to create} \item{originator}{\code{\link{set_RLum}}; \link{character} (\emph{automatic}): contains the name of the calling function (the function that produces this object); can be set manually.} \item{.uid}{\code{\link{set_RLum}}; \link{character} (\emph{automatic}): sets an unique ID for this object using the internal C++ function \code{create_UID}.} \item{.pid}{\code{\link{set_RLum}}; \link{character} (\emph{with default}): option to provide a parent id for nesting at will.} \item{recordType}{\code{\link{set_RLum}}; \link{character} (\emph{optional}): record type (e.g., "OSL")} \item{curveType}{\code{\link{set_RLum}}; \link{character} (\emph{optional}): curve type (e.g., "predefined" or "measured")} \item{data}{\code{\link{set_RLum}}; \link{matrix} (\strong{required}): raw curve data. If \code{data} itself is a \code{RLum.Data.Curve}-object this can be used to re-construct the object (s. details), i.e. modified parameters except \code{.uid}, \code{.pid} and \code{originator}. The rest will be subject to copy and paste unless provided.} \item{info}{\code{\link{set_RLum}}; \link{list} (\emph{optional}): info elements} \item{info.object}{\code{\link{get_RLum}} \link{character} (\emph{optional}): name of the wanted info element} \item{bin_size}{\link{integer} (\emph{with default}): set number of channels used for each bin, e.g. \code{bin_size = 2} means that two channels are binned.} \item{k}{\code{\link{smooth_RLum}}; \link{integer} (\emph{with default}): window for the rolling mean; must be odd for rollmedian. If nothing is set k is set automatically} \item{fill}{\code{\link{smooth_RLum}}; \link{numeric} (\emph{with default}): a vector defining the left and the right hand data} \item{align}{\code{\link{smooth_RLum}}; \link{character} (\emph{with default}): specifying whether the index of the result should be left- or right-aligned or centered (default) compared to the rolling window of observations, allowed \code{"right"}, \code{"center"} and \code{"left"}} \item{method}{\code{\link{smooth_RLum}}; \link{character} (\emph{with default}): defines which method should be applied for the smoothing: \code{"mean"} or \code{"median"}} } \value{ \strong{\code{set_RLum}} Returns an \linkS4class{RLum.Data.Curve} object. \strong{\code{get_RLum}} \enumerate{ \item A \link{matrix} with the curve values or \item only the info object if \code{info.object} was set. } \strong{\code{length_RLum}} Number of channels in the curve (row number of the matrix) **`names_RLum`** Names of the info elements (slot `info`) \strong{\code{bin_RLum.Data}} Same object as input, after applying the binning. \strong{\code{smooth_RLum}} Same object as input, after smoothing } \description{ Class for representing luminescence curve data. } \section{Methods (by generic)}{ \itemize{ \item \code{show}: Show structure of \code{RLum.Data.Curve} object \item \code{set_RLum}: Construction method for RLum.Data.Curve object. The slot info is optional and predefined as empty list by default. \item \code{get_RLum}: Accessor method for RLum.Data.Curve object. The argument info.object is optional to directly access the info elements. If no info element name is provided, the raw curve data (matrix) will be returned. \item \code{length_RLum}: Returns the length of the curve object, which is the maximum of the value time/temperature of the curve (corresponding to the stimulation length) \item \code{names_RLum}: Returns the names info elements coming along with this curve object \item \code{bin_RLum.Data}: Allows binning of specific objects \item \code{smooth_RLum}: Smoothing of RLum.Data.Curve objects using the function \link[zoo:rollmean]{zoo::rollmean} or \link[zoo:rollmean]{zoo::rollmedian}. In particular the internal function \code{.smoothing} is used. }} \section{Slots}{ \describe{ \item{\code{recordType}}{Object of class "character" containing the type of the curve (e.g. "TL" or "OSL")} \item{\code{curveType}}{Object of class "character" containing curve type, allowed values are measured or predefined} \item{\code{data}}{Object of class \link{matrix} containing curve x and y data. 'data' can also be of type \code{RLum.Data.Curve} to change object values without deconstructing the object. For example:\preformatted{set_RLum(class = 'RLum.Data.Curve', data = Your.RLum.Data.Curve, recordType = 'never seen before') } would just change the recordType. Missing arguements the value is taken from the input object in 'data' (which is already an RLum.Data.Curve object in this example)} }} \note{ The class should only contain data for a single curve. For additional elements the slot \code{info} can be used (e.g. providing additional heating ramp curve). Objects from the class \code{RLum.Data.Curve} are produced by other functions (partyl within \linkS4class{RLum.Analysis} objects), namely: \link{Risoe.BINfileData2RLum.Analysis}, \link{read_XSYG2R} } \section{Create objects from this Class}{ Objects can be created by calls of the form \code{set_RLum(class = "RLum.Data.Curve", ...)}. } \section{Class version}{ 0.5.1 } \examples{ showClass("RLum.Data.Curve") ##set empty curve object set_RLum(class = "RLum.Data.Curve") } \seealso{ \linkS4class{RLum}, \linkS4class{RLum.Data}, \link{plot_RLum}, \link{merge_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. RLum.Data.Curve-class(): Class 'RLum.Data.Curve'. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{classes} \keyword{internal} Luminescence/man/convert_Activity2Concentration.Rd0000644000176200001440000000651013604173244022101 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convert_Activity2Concentration.R \name{convert_Activity2Concentration} \alias{convert_Activity2Concentration} \title{Convert Nuclide Activities to Concentrations and Vice Versa} \usage{ convert_Activity2Concentration(data, input_unit = "Bq/kg", verbose = TRUE) } \arguments{ \item{data}{\link{data.frame} \strong{(required)}: provide dose rate data (activity or concentration) in three columns. The first column indicates the nuclides, the 2nd column measured value and in the 3rd column its error value. Allowed nuclide data are \code{'U-238'}, \code{'Th-232'} and \code{'K-40'}. See examples for an example.} \item{input_unit}{\link{character} (\emph{with default}): specify unit of input data given in the dose rate data frame, choose between 'Bq/kg' and 'ppm/\%' the default is 'Bq/kg'} \item{verbose}{\link{logical} (\emph{with default}): enable or disable verbose mode} } \description{ The function performs the conversion of the specific activities into concentrations and vice versa for the nuclides U-238, Th-232 and K-40 to harmonise the measurement unit with the required data input unit of potential analytical tools for, e.g. dose rate calculation or related functions such as \link{use_DRAC}. } \details{ The conversion from nuclide activity of a sample to nuclide concentration is performed using conversion factors that are based on the mass-related specific activity of the respective nuclides. The factors can be calculated using the equation: \deqn{ A = avogadronumber * N.freq / N.mol.mass * ln(2) / N.half.life } \deqn{ f = A / 10^6 } where: \itemize{ \item \code{A} - specific activity of the nuclide \item \code{N.freq} - natural frequency of the isotop \item \code{N.mol.mass} molare mass \item \code{n.half.life} half-life of the nuclide } example for U238: \itemize{ \item \eqn{avogadronumber = 6.02214199*10^23} \item \eqn{uran.half.life = 1.41*10^17} (in s) \item \eqn{uran.mol.mass = 0.23802891} (in kg/mol) \item \eqn{uran.freq = 0.992745} (in mol) \item \eqn{A.U = avogadronumber * uran.freq / uran.mol.mass * ln(2) / uran.half.life} (specific activity in Bq/kg) \item \eqn{f.U = A.kg / 10^6} } } \section{Function version}{ 0.1.0 } \examples{ ##construct data.frame data <- data.frame( NUCLIDES = c("U-238", "Th-232", "K-40"), VALUE = c(40,80,100), VALUE_ERROR = c(4,8,10), stringsAsFactors = FALSE) ##perform analysis convert_Activity2Concentration(data) } \section{How to cite}{ Fuchs, M.C., 2020. convert_Activity2Concentration(): Convert Nuclide Activities to Concentrations and Vice Versa. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Debertin, K., Helmer, R.G., 1988. Gamma- and X-ray Spectrometry with Semiconductor Detectors, Elsevier Science Publishers, p.283 Wiechen, A., Ruehle, H., Vogl, K., 2013. Bestimmung der massebezogenen Aktivitaet von Radionukliden. AEQUIVAL/MASSAKT, ISSN 1865-8725, \url{https://www.bmu.de/fileadmin/Daten_BMU/Download_PDF/Strahlenschutz/aequival-massakt_v2013-07_bf.pdf} } \author{ Margret C. Fuchs, Helmholtz-Institut Freiberg for Resource Technology (Germany) , RLum Developer Team} \keyword{IO} Luminescence/man/calc_WodaFuchs2008.Rd0000644000176200001440000000474213604173244017120 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_WodaFuchs2008.R \name{calc_WodaFuchs2008} \alias{calc_WodaFuchs2008} \title{Obtain the equivalent dose using the approach by Woda and Fuchs 2008} \usage{ calc_WodaFuchs2008(data, breaks = NULL, plot = TRUE, ...) } \arguments{ \item{data}{\link{data.frame} or \linkS4class{RLum.Results} object (\strong{required}): for \link{data.frame}: two columns: De (\code{values[,1]}) and De error (\code{values[,2]}). For plotting multiple data sets, these must be provided as \code{list} (e.g. \code{list(dataset1, dataset2)}).} \item{breaks}{\link{numeric}: Either number or locations of breaks. See \verb{[hist]} for details. If missing, the number of breaks will be estimated based on the bin width (as function of median error).} \item{plot}{\link{logical} (\emph{with default}): enable plot output.} \item{...}{Further plot arguments passed to the function.} } \description{ The function generates a histogram-like reorganisation of the data, to assess counts per bin. The log-transformed counts per bin are used to calculate the second derivative of the data (i.e., the curvature of the curve) and to find the central value of the bin hosting the distribution maximum. A normal distribution model is fitted to the counts per bin data to estimate the dose distribution parameters. The uncertainty of the model is estimated based on all input equivalent doses smaller that of the modelled central value. } \section{Function version}{ 0.2.0 } \examples{ ## read example data set data(ExampleData.DeValues, envir = environment()) results <- calc_WodaFuchs2008( data = ExampleData.DeValues$CA1, xlab = expression(paste(D[e], " [Gy]")) ) } \section{How to cite}{ Kreutzer, S., Dietze, M., 2020. calc_WodaFuchs2008(): Obtain the equivalent dose using the approach by Woda and Fuchs 2008. Function version 0.2.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Woda, C., Fuchs, M., 2008. On the applicability of the leading edge method to obtain equivalent doses in OSL dating and dosimetry. Radiation Measurements 43, 26-37. } \seealso{ \link{calc_FuchsLang2001}, \link{calc_CentralDose} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France),\cr Michael Dietze, GFZ Potsdam (Germany) , RLum Developer Team} Luminescence/man/merge_RLum.Data.Curve.Rd0000644000176200001440000001014413604173244017723 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/merge_RLum.Data.Curve.R \name{merge_RLum.Data.Curve} \alias{merge_RLum.Data.Curve} \title{Merge function for RLum.Data.Curve S4 class objects} \usage{ merge_RLum.Data.Curve(object, merge.method = "mean", method.info) } \arguments{ \item{object}{\link{list} of \linkS4class{RLum.Data.Curve} (\strong{required}): list of S4 objects of class \code{RLum.Curve}.} \item{merge.method}{\link{character} (\strong{required}): method for combining of the objects, e.g. \code{'mean'}, \code{'sum'}, see details for further information and allowed methods. Note: Elements in slot info will be taken from the first curve in the list.} \item{method.info}{\link{numeric} (\emph{optional}): allows to specify how info elements of the input objects are combined, e.g. \code{1} means that just the elements from the first object are kept, \code{2} keeps only the info elements from the 2 object etc. If nothing is provided all elements are combined.} } \value{ Returns an \linkS4class{RLum.Data.Curve} object. } \description{ Function allows merging of RLum.Data.Curve objects in different ways } \details{ This function simply allowing to merge \linkS4class{RLum.Data.Curve} objects without touching the objects itself. Merging is always applied on the 2nd colum of the data matrix of the object. \strong{Supported merge operations are \linkS4class{RLum.Data.Curve}} \code{"sum"} All count values will be summed up using the function \link{rowSums}. \code{"mean"} The mean over the count values is calculated using the function \link{rowMeans}. \code{"median"} The median over the count values is calculated using the function \link[matrixStats:rowMedians]{matrixStats::rowMedians}. \code{"sd"} The standard deviation over the count values is calculated using the function \link[matrixStats:rowSds]{matrixStats::rowSds}. \code{"var"} The variance over the count values is calculated using the function \link[matrixStats:rowVars]{matrixStats::rowVars}. \code{"min"} The min values from the count values is chosen using the function \link[matrixStats:rowRanges]{matrixStats::rowMins}. \code{"max"} The max values from the count values is chosen using the function \link[matrixStats:rowRanges]{matrixStats::rowMins}. \code{"append"} Appends count values of all curves to one combined data curve. The channel width is automatically re-calculated, but requires a constant channel width of the original data. \code{"-"} The row sums of the last objects are subtracted from the first object. \code{"*"} The row sums of the last objects are mutliplied with the first object. \code{"/"} Values of the first object are divided by row sums of the last objects. } \note{ The information from the slot \code{recordType} is taken from the first \linkS4class{RLum.Data.Curve} object in the input list. The slot 'curveType' is filled with the name \code{merged}. } \section{S3-generic support}{ This function is fully operational via S3-generics: \code{+}, \code{-}, \code{/}, \code{*}, \code{merge} } \section{Function version}{ 0.2.0 } \examples{ ##load example data data(ExampleData.XSYG, envir = environment()) ##grep first and 3d TL curves TL.curves <- get_RLum(OSL.SARMeasurement$Sequence.Object, recordType = "TL (UVVIS)") TL.curve.1 <- TL.curves[[1]] TL.curve.3 <- TL.curves[[3]] ##plot single curves plot_RLum(TL.curve.1) plot_RLum(TL.curve.3) ##subtract the 1st curve from the 2nd and plot TL.curve.merged <- merge_RLum.Data.Curve(list(TL.curve.3, TL.curve.1), merge.method = "/") plot_RLum(TL.curve.merged) } \seealso{ \link{merge_RLum}, \linkS4class{RLum.Data.Curve} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. merge_RLum.Data.Curve(): Merge function for RLum.Data.Curve S4 class objects. Function version 0.2.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{internal} \keyword{utilities} Luminescence/man/plot_RLum.Data.Spectrum.Rd0000644000176200001440000002152013604173245020321 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_RLum.Data.Spectrum.R \name{plot_RLum.Data.Spectrum} \alias{plot_RLum.Data.Spectrum} \title{Plot function for an RLum.Data.Spectrum S4 class object} \usage{ plot_RLum.Data.Spectrum( object, par.local = TRUE, plot.type = "contour", optical.wavelength.colours = TRUE, bg.spectrum = NULL, bg.channels = NULL, bin.rows = 1, bin.cols = 1, norm = NULL, rug = TRUE, limit_counts = NULL, xaxis.energy = FALSE, legend.text, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Data.Spectrum} or \link{matrix} (\strong{required}): S4 object of class \code{RLum.Data.Spectrum} or a \code{matrix} containing count values of the spectrum.\cr Please note that in case of a matrix rownames and colnames are set automatically if not provided.} \item{par.local}{\link{logical} (\emph{with default}): use local graphical parameters for plotting, e.g. the plot is shown in one column and one row. If \code{par.local = FALSE} global parameters are inherited.} \item{plot.type}{\link{character} (\emph{with default}): plot type, for 3D-plot use \code{persp}, or \code{interactive}, for a 2D-plot \code{contour}, \code{single} or \code{multiple.lines} (along the time or temperature axis) or \code{transect} (along the wavelength axis) \cr} \item{optical.wavelength.colours}{\link{logical} (\emph{with default}): use optical wavelength colour palette. Note: For this, the spectrum range is limited: \code{c(350,750)}. Own colours can be set with the argument \code{col}. If you provide already binned spectra, the colour assignment is likely to be wrong, since the colour gradients are calculated using the bin number.} \item{bg.spectrum}{\linkS4class{RLum.Data.Spectrum} or \link{matrix} (\emph{optional}): Spectrum used for the background subtraction. By definition, the background spectrum should have been measured with the same setting as the signal spectrum. If a spectrum is provided, the argument \code{bg.channels} works only on the provided background spectrum.} \item{bg.channels}{\link{vector} (\emph{optional}): defines channel for background subtraction If a vector is provided the mean of the channels is used for subtraction. If a spectrum is provided via \code{bg.spectrum}, this argument only works on the \code{bg.spectrum}. \strong{Note:} Background subtraction is applied prior to channel binning} \item{bin.rows}{\link{integer} (\emph{with default}): allow summing-up wavelength channels (horizontal binning), e.g. \code{bin.rows = 2} two channels are summed up. Binning is applied after the background subtraction.} \item{bin.cols}{\link{integer} (\emph{with default}): allow summing-up channel counts (vertical binning) for plotting, e.g. \code{bin.cols = 2} two channels are summed up. Binning is applied after the background subtraction.} \item{norm}{\link{character} (\emph{optional}): Normalise data to the maximum (\code{norm = "max"}) or minimum (\code{norm = "min"}) count values. The normalisation is applied after the binning.} \item{rug}{\link{logical} (\emph{with default}): enables or disables colour rug. Currently only implemented for plot type \code{multiple.lines} and \code{single}} \item{limit_counts}{\link{numeric} (\emph{optional}): value to limit all count values to this value, i.e. all count values above this threshold will be replaced by this threshold. This is helpful especially in case of TL-spectra.} \item{xaxis.energy}{\link{logical} (\emph{with default}): enables or disables energy instead of wavelength axis. For the conversion the function \link{convert_Wavelength2Energy} is used. \strong{Note:} This option means not only simnply redrawing the axis, instead the spectrum in terms of intensity is recalculated, s. details.} \item{legend.text}{\link{character} (\emph{with default}): possiblity to provide own legend text. This argument is only considered for plot types providing a legend, e.g. \code{plot.type="transect"}} \item{...}{further arguments and graphical parameters that will be passed to the \code{plot} function.} } \value{ Returns a plot. } \description{ The function provides a standardised plot output for spectrum data of an RLum.Data.Spectrum S4 class object } \details{ \strong{Matrix structure} \cr (cf. \linkS4class{RLum.Data.Spectrum}) \itemize{ \item \code{rows} (x-values): wavelengths/channels (xlim, xlab) \item \code{columns} (y-values): time/temperature (ylim, ylab) \item \code{cells} (z-values): count values (zlim, zlab) } \emph{Note: This nomenclature is valid for all plot types of this function!} \strong{Nomenclature for value limiting} \itemize{ \item \code{xlim}: Limits values along the wavelength axis \item \code{ylim}: Limits values along the time/temperature axis \item \code{zlim}: Limits values along the count value axis } \strong{Details on the plot functions} Spectrum is visualised as 3D or 2D plot. Both plot types are based on internal R plot functions. \strong{\code{plot.type = "persp"}} Arguments that will be passed to \link{persp}: \itemize{ \item \code{shade}: default is \code{0.4} \item \code{phi}: default is \code{15} \item \code{theta}: default is \code{-30} \item \code{expand}: default is \code{1} \item \code{ticktype}: default is \code{detailed}, \code{r}: default is \code{10} } \strong{Note:} Further parameters can be adjusted via \code{par}. For example to set the background transparent and reduce the thickness of the lines use: \code{par(bg = NA, lwd = 0.7)} previous the function call. \strong{\code{plot.type = "single"}} Per frame a single curve is returned. Frames are time or temperature steps. \strong{\code{plot.type = "multiple.lines"}} All frames plotted in one frame. \strong{\code{plot.type = "transect"}} Depending on the selected wavelength/channel range a transect over the time/temperature (y-axis) will be plotted along the wavelength/channels (x-axis). If the range contains more than one channel, values (z-values) are summed up. To select a transect use the \code{xlim} argument, e.g. \code{xlim = c(300,310)} plot along the summed up count values of channel 300 to 310. \strong{Further arguments that will be passed (depending on the plot type)} \code{xlab}, \code{ylab}, \code{zlab}, \code{xlim}, \code{ylim}, \code{zlim}, \code{main}, \code{mtext}, \code{pch}, \code{type} ("single", "multiple.lines", "interactive"), \code{col}, \code{border}, \code{box} \code{lwd}, \code{bty}, \code{showscale} ("interactive") } \note{ Not all additional arguments (\code{...}) will be passed similarly! } \section{Function version}{ 0.6.2 } \examples{ ##load example data data(ExampleData.XSYG, envir = environment()) ##(1)plot simple spectrum (2D) - contour plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="contour", xlim = c(310,750), ylim = c(0,300), bin.rows=10, bin.cols = 1) ##(2) plot spectrum (3D) plot_RLum.Data.Spectrum( TL.Spectrum, plot.type="persp", xlim = c(310,750), ylim = c(0,100), bin.rows=10, bin.cols = 1) ##(3) plot spectrum on energy axis ##please note the background subtraction plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="persp", ylim = c(0,200), bin.rows=10, bg.channels = 10, bin.cols = 1, xaxis.energy = TRUE) ##(4) plot multiple lines (2D) - multiple.lines (with ylim) plot_RLum.Data.Spectrum( TL.Spectrum, plot.type="multiple.lines", xlim = c(310,750), ylim = c(0,100), bin.rows=10, bin.cols = 1) \dontrun{ ##(4) interactive plot using the package plotly ("surface") plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="interactive", xlim = c(310,750), ylim = c(0,300), bin.rows=10, bin.cols = 1) ##(5) interactive plot using the package plotly ("contour") plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="interactive", xlim = c(310,750), ylim = c(0,300), bin.rows=10, bin.cols = 1, type = "contour", showscale = TRUE) ##(6) interactive plot using the package plotly ("heatmap") plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="interactive", xlim = c(310,750), ylim = c(0,300), bin.rows=10, bin.cols = 1, type = "heatmap", showscale = TRUE) ##(7) alternative using the package fields fields::image.plot(get_RLum(TL.Spectrum)) contour(get_RLum(TL.Spectrum), add = TRUE) } } \seealso{ \linkS4class{RLum.Data.Spectrum}, \link{convert_Wavelength2Energy}, \link{plot}, \link{plot_RLum}, \link{persp}, \link[plotly:plot_ly]{plotly::plot_ly}, \link{contour} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. plot_RLum.Data.Spectrum(): Plot function for an RLum.Data.Spectrum S4 class object. Function version 0.6.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{aplot} Luminescence/man/plot_Histogram.Rd0000644000176200001440000001260513604173245016732 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_Histogram.R \name{plot_Histogram} \alias{plot_Histogram} \title{Plot a histogram with separate error plot} \usage{ plot_Histogram( data, na.rm = TRUE, mtext, cex.global, se, rug, normal_curve, summary, summary.pos, colour, interactive = FALSE, ... ) } \arguments{ \item{data}{\link{data.frame} or \linkS4class{RLum.Results} object (\strong{required}): for \code{data.frame}: two columns: De (\code{data[,1]}) and De error (\code{data[,2]})} \item{na.rm}{\link{logical} (\emph{with default}): excludes \code{NA} values from the data set prior to any further operations.} \item{mtext}{\link{character} (\emph{optional}): further sample information (\link{mtext}).} \item{cex.global}{\link{numeric} (\emph{with default}): global scaling factor.} \item{se}{\link{logical} (\emph{optional}): plots standard error points over the histogram, default is \code{FALSE}.} \item{rug}{\link{logical} (\emph{optional}): adds rugs to the histogram, default is \code{TRUE}.} \item{normal_curve}{\link{logical} (\emph{with default}): adds a normal curve to the histogram. Mean and sd are calculated from the input data. More see details section.} \item{summary}{\link{character} (\emph{optional}): add statistic measures of centrality and dispersion to the plot. Can be one or more of several keywords. See details for available keywords.} \item{summary.pos}{\link{numeric} or \link{character} (\emph{with default}): optional position coordinates or keyword (e.g. \code{"topright"}) for the statistical summary. Alternatively, the keyword \code{"sub"} may be specified to place the summary below the plot header. However, this latter option in only possible if \code{mtext} is not used. In case of coordinate specification, y-coordinate refers to the right y-axis.} \item{colour}{\link{numeric} or \link{character} (\emph{with default}): optional vector of length 4 which specifies the colours of the following plot items in exactly this order: histogram bars, rug lines, normal distribution curve and standard error points (e.g., \code{c("grey", "black", "red", "grey")}).} \item{interactive}{\link{logical} (\emph{with default}): create an interactive histogram plot (requires the 'plotly' package)} \item{...}{further arguments and graphical parameters passed to \link{plot} or \link{hist}. If y-axis labels are provided, these must be specified as a vector of length 2 since the plot features two axes (e.g. \code{ylab = c("axis label 1", "axis label 2")}). Y-axes limits (\code{ylim}) must be provided as vector of length four, with the first two elements specifying the left axes limits and the latter two elements giving the right axis limits.} } \description{ Function plots a predefined histogram with an accompanying error plot as suggested by Rex Galbraith at the UK LED in Oxford 2010. } \details{ If the normal curve is added, the y-axis in the histogram will show the probability density. A statistic summary, i.e. a collection of statistic measures of centrality and dispersion (and further measures) can be added by specifying one or more of the following keywords: \itemize{ \item \code{"n"} (number of samples), \item \code{"mean"} (mean De value), \item \code{"mean.weighted"} (error-weighted mean), \item \code{"median"} (median of the De values), \item \code{"sdrel"} (relative standard deviation in percent), \item \code{"sdrel.weighted"} (error-weighted relative standard deviation in percent), \item \code{"sdabs"} (absolute standard deviation), \item \code{"sdabs.weighted"} (error-weighted absolute standard deviation), \item \code{"serel"} (relative standard error), \item \code{"serel.weighted"} (error-weighted relative standard error), \item \code{"seabs"} (absolute standard error), \item \code{"seabs.weighted"} (error-weighted absolute standard error), \item \code{"kurtosis"} (kurtosis) and \item \code{"skewness"} (skewness). } } \note{ The input data is not restricted to a special type. } \section{Function version}{ 0.4.4 } \examples{ ## load data data(ExampleData.DeValues, envir = environment()) ExampleData.DeValues <- Second2Gray(ExampleData.DeValues$BT998, dose.rate = c(0.0438,0.0019)) ## plot histogram the easiest way plot_Histogram(ExampleData.DeValues) ## plot histogram with some more modifications plot_Histogram(ExampleData.DeValues, rug = TRUE, normal_curve = TRUE, cex.global = 0.9, pch = 2, colour = c("grey", "black", "blue", "green"), summary = c("n", "mean", "sdrel"), summary.pos = "topleft", main = "Histogram of De-values", mtext = "Example data set", ylab = c(expression(paste(D[e], " distribution")), "Standard error"), xlim = c(100, 250), ylim = c(0, 0.1, 5, 20)) } \seealso{ \link{hist}, \link{plot} } \author{ Michael Dietze, GFZ Potsdam (Germany)\cr Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Dietze, M., Kreutzer, S., 2020. plot_Histogram(): Plot a histogram with separate error plot. Function version 0.4.4. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } Luminescence/man/merge_RLum.Results.Rd0000644000176200001440000000225713604173244017436 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/merge_RLum.Results.R \name{merge_RLum.Results} \alias{merge_RLum.Results} \title{Merge function for RLum.Results S4-class objects} \usage{ merge_RLum.Results(objects) } \arguments{ \item{objects}{\link{list} (\strong{required}): a list of \linkS4class{RLum.Results} objects} } \description{ Function merges objects of class \linkS4class{RLum.Results}. The slots in the objects are combined depending on the object type, e.g., for \link{data.frame} and \link{matrix} rows are appended. } \note{ The originator is taken from the first element and not reset to \code{merge_RLum} } \section{Function version}{ 0.2.0 } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. merge_RLum.Results(): Merge function for RLum.Results S4-class objects. Function version 0.2.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{internal} Luminescence/man/get_rightAnswer.Rd0000644000176200001440000000160113604173244017064 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/get_rightAnswer.R \name{get_rightAnswer} \alias{get_rightAnswer} \title{Function to get the right answer} \usage{ get_rightAnswer(...) } \arguments{ \item{...}{you can pass an infinite number of further arguments} } \value{ Returns the right answer } \description{ This function returns just the right answer } \section{Function version}{ 0.1.0 } \examples{ ## you really want to know? get_rightAnswer() } \author{ inspired by R.G. , RLum Developer Team} \section{How to cite}{ NA, NA, , , 2020. get_rightAnswer(): Function to get the right answer. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } Luminescence/man/fit_LMCurve.Rd0000644000176200001440000002454213604173244016120 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/fit_LMCurve.R \name{fit_LMCurve} \alias{fit_LMCurve} \title{Nonlinear Least Squares Fit for LM-OSL curves} \usage{ fit_LMCurve( values, values.bg, n.components = 3, start_values, input.dataType = "LM", fit.method = "port", sample_code = "", sample_ID = "", LED.power = 36, LED.wavelength = 470, fit.trace = FALSE, fit.advanced = FALSE, fit.calcError = FALSE, bg.subtraction = "polynomial", verbose = TRUE, plot = TRUE, plot.BG = FALSE, ... ) } \arguments{ \item{values}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): x,y data of measured values (time and counts). See examples.} \item{values.bg}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\emph{optional}): x,y data of measured values (time and counts) for background subtraction.} \item{n.components}{\link{integer} (\emph{with default}): fixed number of components that are to be recognised during fitting (min = 1, max = 7).} \item{start_values}{\link{data.frame} (\emph{optional}): start parameters for lm and xm data for the fit. If no start values are given, an automatic start value estimation is attempted (see details).} \item{input.dataType}{\link{character} (\emph{with default}): alter the plot output depending on the input data: "LM" or "pLM" (pseudo-LM). See: \link{CW2pLM}} \item{fit.method}{\link{character} (\emph{with default}): select fit method, allowed values: \code{'port'} and \code{'LM'}. \code{'port'} uses the 'port' routine usint the funtion \link{nls} \code{'LM'} utilises the function \code{nlsLM} from the package \code{minpack.lm} and with that the Levenberg-Marquardt algorithm.} \item{sample_code}{\link{character} (\emph{optional}): sample code used for the plot and the optional output table (mtext).} \item{sample_ID}{\link{character} (\emph{optional}): additional identifier used as column header for the table output.} \item{LED.power}{\link{numeric} (\emph{with default}): LED power (max.) used forintensity ramping in mW/cm^2. \strong{Note:} This value is used for the calculation of the absolute photoionisation cross section.} \item{LED.wavelength}{\link{numeric} (\emph{with default}): LED wavelength in nm used for stimulation. \strong{Note:} This value is used for the calculation of the absolute photoionisation cross section.} \item{fit.trace}{\link{logical} (\emph{with default}): traces the fitting process on the terminal.} \item{fit.advanced}{\link{logical} (\emph{with default}): enables advanced fitting attempt for automatic start parameter recognition. Works only if no start parameters are provided. \strong{Note:} It may take a while and it is not compatible with \code{fit.method = "LM"}.} \item{fit.calcError}{\link{logical} (\emph{with default}): calculate 1-sigma error range of components using \link{confint}.} \item{bg.subtraction}{\link{character} (\emph{with default}): specifies method for background subtraction (\code{polynomial}, \code{linear}, \code{channel}, see Details). \strong{Note:} requires input for \code{values.bg}.} \item{verbose}{\link{logical} (\emph{with default}): terminal output with fitting results.} \item{plot}{\link{logical} (\emph{with default}): returns a plot of the fitted curves.} \item{plot.BG}{\link{logical} (\emph{with default}): returns a plot of the background values with the fit used for the background subtraction.} \item{...}{Further arguments that may be passed to the plot output, e.g. \code{xlab}, \code{xlab}, \code{main}, \code{log}.} } \value{ Various types of plots are returned. For details see above. Furthermore an \code{RLum.Results} object is returned with the following structure: \strong{\verb{@data:}} \code{.. $data} : \link{data.frame} with fitting results\cr \code{.. $fit} : nls (\link{nls} object)\cr \code{.. $component.contribution.matrix} : \link{list} component distribution matrix \strong{\verb{info:}} \code{.. $call} : \link{call} the original function call Matrix structure for the distribution matrix: Column 1 and 2: time and \code{rev(time)} values\cr Additional columns are used for the components, two for each component, containing I0 and n0. The last columns \code{cont.} provide information on the relative component contribution for each time interval including the row sum for this values. } \description{ The function determines weighted nonlinear least-squares estimates of the component parameters of an LM-OSL curve (Bulur 1996) for a given number of components and returns various component parameters. The fitting procedure uses the function \link{nls} with the \code{port} algorithm. } \details{ \strong{Fitting function} The function for the fitting has the general form: \deqn{y = (exp(0.5)*Im_1*x/xm_1)*exp(-x^2/(2*xm_1^2)) + ,\ldots, + exp(0.5)*Im_i*x/xm_i)*exp(-x^2/(2*xm_i^2))} where \eqn{1 < i < 8} This function and the equations for the conversion to b (detrapping probability) and n0 (proportional to initially trapped charge) have been taken from Kitis et al. (2008): \deqn{xm_i=\sqrt{max(t)/b_i}} \deqn{Im_i=exp(-0.5)n0/xm_i} \strong{Background subtraction} Three methods for background subtraction are provided for a given background signal (\code{values.bg}). \itemize{ \item \code{polynomial}: default method. A polynomial function is fitted using \link{glm} and the resulting function is used for background subtraction: \deqn{y = a*x^4 + b*x^3 + c*x^2 + d*x + e} \item \code{linear}: a linear function is fitted using \link{glm} and the resulting function is used for background subtraction: \deqn{y = a*x + b} \item \code{channel}: the measured background signal is subtracted channelwise from the measured signal. } \strong{Start values} The choice of the initial parameters for the \code{nls}-fitting is a crucial point and the fitting procedure may mainly fail due to ill chosen start parameters. Here, three options are provided: \strong{(a)} If no start values (\code{start_values}) are provided by the user, a cheap guess is made by using the detrapping values found by Jain et al. (2003) for quartz for a maximum of 7 components. Based on these values, the pseudo start parameters xm and Im are recalculated for the given data set. In all cases, the fitting starts with the ultra-fast component and (depending on \code{n.components}) steps through the following values. If no fit could be achieved, an error plot (for \code{plot = TRUE}) with the pseudo curve (based on the pseudo start parameters) is provided. This may give the opportunity to identify appropriate start parameters visually. \strong{(b)} If start values are provided, the function works like a simple \link{nls} fitting approach. \strong{(c)} If no start parameters are provided and the option \code{fit.advanced = TRUE} is chosen, an advanced start paramter estimation is applied using a stochastical attempt. Therefore, the recalculated start parameters \strong{(a)} are used to construct a normal distribution. The start parameters are then sampled randomly from this distribution. A maximum of 100 attempts will be made. \strong{Note:} This process may be time consuming. \strong{Goodness of fit} The goodness of the fit is given by a pseudoR^2 value (pseudo coefficient of determination). According to Lave (1970), the value is calculated as: \deqn{pseudoR^2 = 1 - RSS/TSS} where \eqn{RSS = Residual~Sum~of~Squares} and \eqn{TSS = Total~Sum~of~Squares} \strong{Error of fitted component parameters} The 1-sigma error for the components is calculated using the function \link{confint}. Due to considerable calculation time, this option is deactived by default. In addition, the error for the components can be estimated by using internal R functions like \link{summary}. See the \link{nls} help page for more information. \emph{For more details on the nonlinear regression in R, see Ritz & Streibig (2008).} } \note{ The pseudo-R^2 may not be the best parameter to describe the goodness of the fit. The trade off between the \code{n.components} and the pseudo-R^2 value currently remains unconsidered. The function \strong{does not} ensure that the fitting procedure has reached a global minimum rather than a local minimum! In any case of doubt, the use of manual start values is highly recommended. } \section{Function version}{ 0.3.2 } \examples{ ##(1) fit LM data without background subtraction data(ExampleData.FittingLM, envir = environment()) fit_LMCurve(values = values.curve, n.components = 3, log = "x") ##(2) fit LM data with background subtraction and export as JPEG ## -alter file path for your preferred system ##jpeg(file = "~/Desktop/Fit_Output\\%03d.jpg", quality = 100, ## height = 3000, width = 3000, res = 300) data(ExampleData.FittingLM, envir = environment()) fit_LMCurve(values = values.curve, values.bg = values.curveBG, n.components = 2, log = "x", plot.BG = TRUE) ##dev.off() ##(3) fit LM data with manual start parameters data(ExampleData.FittingLM, envir = environment()) fit_LMCurve(values = values.curve, values.bg = values.curveBG, n.components = 3, log = "x", start_values = data.frame(Im = c(170,25,400), xm = c(56,200,1500))) } \section{How to cite}{ Kreutzer, S., 2020. fit_LMCurve(): Nonlinear Least Squares Fit for LM-OSL curves. Function version 0.3.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Bulur, E., 1996. An Alternative Technique For Optically Stimulated Luminescence (OSL) Experiment. Radiation Measurements, 26, 5, 701-709. Jain, M., Murray, A.S., Boetter-Jensen, L., 2003. Characterisation of blue-light stimulated luminescence components in different quartz samples: implications for dose measurement. Radiation Measurements, 37 (4-5), 441-449. Kitis, G. & Pagonis, V., 2008. Computerized curve deconvolution analysis for LM-OSL. Radiation Measurements, 43, 737-741. Lave, C.A.T., 1970. The Demand for Urban Mass Transportation. The Review of Economics and Statistics, 52 (3), 320-323. Ritz, C. & Streibig, J.C., 2008. Nonlinear Regression with R. R. Gentleman, K. Hornik, & G. Parmigiani, eds., Springer, p. 150. } \seealso{ \link{fit_CWCurve}, \link{plot}, \link{nls}, \link[minpack.lm:nlsLM]{minpack.lm::nlsLM}, \link{get_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{dplot} \keyword{models} Luminescence/man/calc_FadingCorr.Rd0000644000176200001440000001662013604173244016737 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_FadingCorr.R \name{calc_FadingCorr} \alias{calc_FadingCorr} \title{Apply a fading correction according to Huntley & Lamothe (2001) for a given g-value and a given tc} \usage{ calc_FadingCorr( age.faded, g_value, tc = NULL, tc.g_value = tc, n.MC = 10000, seed = NULL, interval = c(0.01, 500), txtProgressBar = TRUE, verbose = TRUE ) } \arguments{ \item{age.faded}{\link{numeric} \link{vector} (\strong{required}): uncorrected age with error in ka (see example)} \item{g_value}{\link{vector} (\strong{required}): g-value and error obtained from separate fading measurements (see example). Alternatively an \linkS4class{RLum.Results} object can be provided produced by the function \link{analyse_FadingMeasurement}, in this case tc is set automatically} \item{tc}{\link{numeric} (\strong{required}): time in seconds between irradiation and the prompt measurement (cf. Huntley & Lamothe 2001). Argument will be ignored if \code{g_value} was an \linkS4class{RLum.Results} object} \item{tc.g_value}{\link{numeric} (\emph{with default}): the time in seconds between irradiation and the prompt measurement used for estimating the g-value. If the g-value was normalised to, e.g., 2 days, this time in seconds (i.e., 172800) should be given here. If nothing is provided the time is set to tc, which is usual case for g-values obtained using the SAR method and g-values that had been not normalised to 2 days.} \item{n.MC}{\link{integer} (\emph{with default}): number of Monte Carlo simulation runs for error estimation. If \code{n.MC = 'auto'} is used the function tries to find a 'stable' error for the age. \strong{Note:} This may take a while!} \item{seed}{\link{integer} (\emph{optional}): sets the seed for the random number generator in R using \link{set.seed}} \item{interval}{\link{numeric} (\emph{with default}): a vector containing the end-points (age interval) of the interval to be searched for the root in 'ka'. This argument is passed to the function \link[stats:uniroot]{stats::uniroot} used for solving the equation.} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables or disables \link{txtProgressBar}} \item{verbose}{\link{logical} (\emph{with default}): enables or disables terminal output} } \value{ Returns an S4 object of type \linkS4class{RLum.Results}.\cr Slot: \strong{\verb{@data}}\cr \tabular{lll}{ \strong{Object} \tab \strong{Type} \tab \strong{Comment} \cr \code{age.corr} \tab \link{data.frame} \tab Corrected age \cr \code{age.corr.MC} \tab \link{numeric} \tab MC simulation results with all possible ages from that simulation \cr } Slot: \strong{\verb{@info}}\cr \tabular{lll}{ \strong{Object} \tab \strong{Type} \tab \strong{Comment} \cr \code{info} \tab \link{character} \tab the original function call } } \description{ This function solves the equation used for correcting the fading affected age including the error for a given g-value according to Huntley & Lamothe (2001). } \details{ As the g-value sligthly depends on the time between irradiation and the prompt measurement, this is tc, always a tc value needs to be provided. If the g-value was normalised to a distinct time or evaluated with a different tc value (e.g., external irradiation), also the tc value for the g-value needs to be provided (argument \code{tc.g_value} and then the g-value is recalcualted to tc of the measurement used for estimating the age applying the following equation: \deqn{\kappa_{tc} = \kappa_{tc.g} / (1 - \kappa_{tc.g} * log(tc/tc.g))} where \deqn{\kappa_{tc.g} = g / 100 / log(10)} with \code{log} the natural logarithm. The error of the fading-corrected age is determined using a Monte Carlo simulation approach. Solving of the equation is realised using \link{uniroot}. Large values for \code{n.MC} will significantly increase the computation time.\cr \strong{\code{n.MC = 'auto'}} The error estimation based on a stochastic process, i.e. for a small number of MC runs the calculated error varies considerably every time the function is called, even with the same input values. The argument option \code{n.MC = 'auto'} tries to find a stable value for the standard error, i.e. the standard deviation of values calculated during the MC runs (\code{age.corr.MC}), within a given precision (2 digits) by increasing the number of MC runs stepwise and calculating the corresponding error. If the determined error does not differ from the 9 values calculated previously within a precision of (here) 3 digits the calculation is stopped as it is assumed that the error is stable. Please note that (a) the duration depends on the input values as well as on the provided computation ressources and it may take a while, (b) the length (size) of the output vector \code{age.corr.MC}, where all the single values produced during the MC runs are stored, equals the number of MC runs (here termed observations). To avoid an endless loop the calculation is stopped if the number of observations exceeds 10^7. This limitation can be overwritten by setting the number of MC runs manually, e.g. \code{n.MC = 10000001}. Note: For this case the function is not checking whether the calculated error is stable.\cr \strong{\code{seed}} This option allows to recreate previously calculated results by setting the seed for the R random number generator (see \link{set.seed} for details). This option should not be mixed up with the option \strong{\code{n.MC = 'auto'}}. The results may appear similar, but they are not comparable!\cr \strong{FAQ}\cr Q: Which tc value is expected?\cr A: tc is the time in seconds between irradiation and the prompt measurement applied during your De measurement. However, this tc might differ from the tc used for estimating the g-value. In the case of an SAR measurement tc should be similar, however, if it differs, you have to provide this tc value (the one used for estimating the g-value) using the argument \code{tc.g_value}.\cr } \note{ Special thanks to Sebastien Huot for his support and clarification via e-mail. } \section{Function version}{ 0.4.2 } \examples{ ##run the examples given in the appendix of Huntley and Lamothe, 2001 ##(1) faded age: 100 a results <- calc_FadingCorr( age.faded = c(0.1,0), g_value = c(5.0, 1.0), tc = 2592000, tc.g_value = 172800, n.MC = 100) ##(2) faded age: 1 ka results <- calc_FadingCorr( age.faded = c(1,0), g_value = c(5.0, 1.0), tc = 2592000, tc.g_value = 172800, n.MC = 100) ##(3) faded age: 10.0 ka results <- calc_FadingCorr( age.faded = c(10,0), g_value = c(5.0, 1.0), tc = 2592000, tc.g_value = 172800, n.MC = 100) ##access the last output get_RLum(results) } \section{How to cite}{ Kreutzer, S., 2020. calc_FadingCorr(): Apply a fading correction according to Huntley & Lamothe (2001) for a given g-value and a given tc. Function version 0.4.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Huntley, D.J., Lamothe, M., 2001. Ubiquity of anomalous fading in K-feldspars and the measurement and correction for it in optical dating. Canadian Journal of Earth Sciences, 38, 1093-1106. } \seealso{ \linkS4class{RLum.Results}, \link{get_RLum}, \link{uniroot} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{datagen} Luminescence/man/calc_CommonDose.Rd0000644000176200001440000001063513604173244016764 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_CommonDose.R \name{calc_CommonDose} \alias{calc_CommonDose} \title{Apply the (un-)logged common age model after Galbraith et al. (1999) to a given De distribution} \usage{ calc_CommonDose(data, sigmab, log = TRUE, ...) } \arguments{ \item{data}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): for \link{data.frame}: two columns with De \code{(data[,1])} and De error \code{(data[,2])}} \item{sigmab}{\link{numeric} (\emph{with default}): additional spread in De values. This value represents the expected overdispersion in the data should the sample be well-bleached (Cunningham & Walling 2012, p. 100). \strong{NOTE}: For the logged model (\code{log = TRUE}) this value must be a fraction, e.g. 0.2 (= 20 \\%). If the un-logged model is used (\code{log = FALSE}), sigmab must be provided in the same absolute units of the De values (seconds or Gray).} \item{log}{\link{logical} (\emph{with default}): fit the (un-)logged central age model to De data} \item{...}{currently not used.} } \value{ Returns a terminal output. In addition an \linkS4class{RLum.Results} object is returned containing the following element: \item{.$summary}{\link{data.frame} summary of all relevant model results.} \item{.$data}{\link{data.frame} original input data} \item{.$args}{\link{list} used arguments} \item{.$call}{\link{call} the function call} The output should be accessed using the function \link{get_RLum} } \description{ Function to calculate the common dose of a De distribution. } \details{ \strong{(Un-)logged model} When \code{log = TRUE} this function calculates the weighted mean of logarithmic De values. Each of the estimates is weighted by the inverse square of its relative standard error. The weighted mean is then transformed back to the dose scale (Galbraith & Roberts 2012, p. 14). The log transformation is not applicable if the De estimates are close to zero or negative. In this case the un-logged model can be applied instead (\code{log = FALSE}). The weighted mean is then calculated using the un-logged estimates of De and their absolute standard error (Galbraith & Roberts 2012, p. 14). } \section{Function version}{ 0.1.1 } \examples{ ## load example data data(ExampleData.DeValues, envir = environment()) ## apply the common dose model calc_CommonDose(ExampleData.DeValues$CA1) } \section{How to cite}{ Burow, C., 2020. calc_CommonDose(): Apply the (un-)logged common age model after Galbraith et al. (1999) to a given De distribution. Function version 0.1.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for mixed fission track ages. Nuclear Tracks Radiation Measurements 4, 459-470. Galbraith, R.F., Roberts, R.G., Laslett, G.M., Yoshida, H. & Olley, J.M., 1999. Optical dating of single grains of quartz from Jinmium rock shelter, northern Australia. Part I: experimental design and statistical models. Archaeometry 41, 339-364. Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error calculation and display in OSL dating: An overview and some recommendations. Quaternary Geochronology 11, 1-27. \strong{Further reading} Arnold, L.J. & Roberts, R.G., 2009. Stochastic modelling of multi-grain equivalent dose (De) distributions: Implications for OSL dating of sediment mixtures. Quaternary Geochronology 4, 204-230. Bailey, R.M. & Arnold, L.J., 2006. Statistical modelling of single grain quartz De distributions and an assessment of procedures for estimating burial dose. Quaternary Science Reviews 25, 2475-2502. Cunningham, A.C. & Wallinga, J., 2012. Realizing the potential of fluvial archives using robust OSL chronologies. Quaternary Geochronology 12, 98-106. Rodnight, H., Duller, G.A.T., Wintle, A.G. & Tooth, S., 2006. Assessing the reproducibility and accuracy of optical dating of fluvial deposits. Quaternary Geochronology, 1 109-120. Rodnight, H., 2008. How many equivalent dose values are needed to obtain a reproducible distribution?. Ancient TL 26, 3-10. } \seealso{ \link{calc_CentralDose}, \link{calc_FiniteMixture}, \link{calc_FuchsLang2001}, \link{calc_MinDose} } \author{ Christoph Burow, University of Cologne (Germany) , RLum Developer Team} Luminescence/man/analyse_FadingMeasurement.Rd0000644000176200001440000001654413604173243021055 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/analyse_FadingMeasurement.R \name{analyse_FadingMeasurement} \alias{analyse_FadingMeasurement} \title{Analyse fading measurements and returns the fading rate per decade (g-value)} \usage{ analyse_FadingMeasurement( object, structure = c("Lx", "Tx"), signal.integral, background.integral, t_star = "half", n.MC = 100, verbose = TRUE, plot = TRUE, plot.single = FALSE, ... ) } \arguments{ \item{object}{\linkS4class{RLum.Analysis} (\strong{required}): input object with the measurement data. Alternatively, a \link{list} containing \linkS4class{RLum.Analysis} objects or a \link{data.frame} with three columns (x = LxTx, y = LxTx error, z = time since irradiation) can be provided. Can also be a wide table, i.e. a \link{data.frame} with a number of colums divisible by 3 and where each triplet has the before mentioned column structure. \strong{Please note: The input object should solely consists of the curve needed for the data analysis, i.e. only IRSL curves representing Lx (and Tx)} If data from multiple aliquots are provided please \strong{see the details below} with regard to Lx/Tx normalisation. \strong{The function assumes that all your measurements are related to one (comparable) sample. If you to treat independent samples, you have use this function in a loop.}} \item{structure}{\link{character} (\emph{with default}): sets the structure of the measurement data. Allowed are \code{'Lx'} or \code{c('Lx','Tx')}. Other input is ignored} \item{signal.integral}{\link{vector} (\strong{required}): vector with the limits for the signal integral. Not required if a \code{data.frame} with LxTx values are provided.} \item{background.integral}{\link{vector} (\strong{required}): vector with the bounds for the background integral. Not required if a \code{data.frame} with LxTx values are provided.} \item{t_star}{\link{character} (\emph{with default}): method for calculating the time elasped since irradiaton. Options are: \code{'half'}, which is \eqn{t_star := t_1 + (t_2 - t_1)/2} (Auclair et al., 2003) and \code{'end'}, which takes the time between irradiation and the measurement step. Default is \code{'half'}} \item{n.MC}{\link{integer} (\emph{with default}): number for Monte Carlo runs for the error estimation} \item{verbose}{\link{logical} (\emph{with default}): enables/disables verbose mode} \item{plot}{\link{logical} (\emph{with default}): enables/disables plot output} \item{plot.single}{\link{logical} (\emph{with default}): enables/disables single plot mode, i.e. one plot window per plot. Alternatively a vector specifying the plot to be drawn, e.g., \code{plot.single = c(3,4)} draws only the last two plots} \item{...}{(\emph{optional}) further arguments that can be passed to internally used functions (see details)} } \value{ An \linkS4class{RLum.Results} object is returned: Slot: \strong{@data} \tabular{lll}{ \strong{OBJECT} \tab \strong{TYPE} \tab \strong{COMMENT}\cr \code{fading_results} \tab \code{data.frame} \tab results of the fading measurement in a table \cr \code{fit} \tab \code{lm} \tab object returned by the used linear fitting function \link[stats:lm]{stats::lm}\cr \code{rho_prime} \tab \code{data.frame} \tab results of rho' estimation after Kars et al. 2008 \cr \code{LxTx_table} \tab \code{data.frame} \tab Lx/Tx table, if curve data had been provided \cr \code{irr.times} \tab \code{integer} \tab vector with the irradiation times in seconds \cr } Slot: \strong{@info} \tabular{lll}{ \strong{OBJECT} \tab \code{TYPE} \tab \code{COMMENT}\cr \code{call} \tab \code{call} \tab the original function call\cr } } \description{ The function analysis fading measurements and returns a fading rate including an error estimation. The function is not limited to standard fading measurements, as can be seen, e.g., Huntley and Lamothe 2001. Additionally, the density of recombination centres (rho') is estimated after Kars et al. 2008. } \details{ All provided output corresponds to the \eqn{tc} value obtained by this analysis. Additionally in the output object the g-value normalised to 2-days is provided. The output of this function can be passed to the function \link{calc_FadingCorr}. \strong{Fitting and error estimation} For the fitting the function \link[stats:lm]{stats::lm} is used without applying weights. For the error estimation all input values, except tc, as the precision can be consdiered as sufficiently high enough with regard to the underlying problem, are sampled assuming a normal distribution for each value with the value as the mean and the provided uncertainty as standard deviation. \strong{Density of recombination centres} The density of recombination centres, expressed by the dimensionless variable rho', is estimated by fitting equation 5 in Kars et al. 2008 to the data. For the fitting the function \link[stats:nls]{stats::nls} is used without applying weights. For the error estimation the same procedure as for the g-value is applied (see above). \strong{Multiple aliquots & Lx/Tx normalisation} Be aware that this function will always normalise all Lx/Tx values by the Lx/Tx value of the prompt measurement of the first aliquot. This implicitly assumes that there are no systematic inter-aliquot variations in Lx/Tx values. If deemed necessary to normalise the Lx/Tx values of each aliquot by its individual prompt measurement please do so \strong{before} running \link{analyse_FadingMeasurement} and provide the already normalised values for \code{object} instead. } \section{Function version}{ 0.1.11 } \examples{ ## load example data (sample UNIL/NB123, see ?ExampleData.Fading) data("ExampleData.Fading", envir = environment()) ##(1) get fading measurement data (here a three column data.frame) fading_data <- ExampleData.Fading$fading.data$IR50 ##(2) run analysis g_value <- analyse_FadingMeasurement( fading_data, plot = TRUE, verbose = TRUE, n.MC = 10) ##(3) this can be further used in the function ## to correct the age according to Huntley & Lamothe, 2001 results <- calc_FadingCorr( age.faded = c(100,2), g_value = g_value, n.MC = 10) } \section{How to cite}{ Kreutzer, S., Burow, C., 2020. analyse_FadingMeasurement(): Analyse fading measurements and returns the fading rate per decade (g-value). Function version 0.1.11. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Auclair, M., Lamothe, M., Huot, S., 2003. Measurement of anomalous fading for feldpsar IRSL using SAR. Radiation Measurements 37, 487-492. doi:10.1016/S1350-4487(03)00018-0 Huntley, D.J., Lamothe, M., 2001. Ubiquity of anomalous fading in K-feldspars and the measurement and correction for it in optical dating. Canadian Journal of Earth Sciences 38, 1093-1106. doi:10.1139/cjes-38-7-1093 Kars, R.H., Wallinga, J., Cohen, K.M., 2008. A new approach towards anomalous fading correction for feldspar IRSL dating-tests on samples in field saturation. Radiation Measurements 43, 786-790. doi:10.1016/j.radmeas.2008.01.021 } \seealso{ \link{calc_OSLLxTxRatio}, \link{read_BIN2R}, \link{read_XSYG2R}, \link{extract_IrradiationTimes} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) \cr Christoph Burow, University of Cologne (Germany) , RLum Developer Team} \keyword{datagen} Luminescence/man/calc_Huntley2006.Rd0000644000176200001440000003176313604173244016666 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_Huntley2006.R \name{calc_Huntley2006} \alias{calc_Huntley2006} \title{Apply the Huntley (2006) model} \usage{ calc_Huntley2006( data, LnTn = NULL, rhop, ddot, readerDdot, normalise = TRUE, fit.method = c("EXP", "GOK")[1], lower.bounds = c(-Inf, -Inf, -Inf), summary = TRUE, plot = TRUE, ... ) } \arguments{ \item{data}{\link{data.frame} (\strong{required}): A \code{data.frame} with one of the following structures: \itemize{ \item A \strong{three column} data frame with numeric values on a) dose (s), b) LxTx and and c) LxTx error. \item If a \strong{two column} data frame is provided it is automatically assumed that errors on LxTx are missing. A third column will be attached with an arbitrary 5 \\% error on the provided LxTx values. \item Can also be a \strong{wide table}, i.e. a \link{data.frame} with a number of colums divisible by 3 and where each triplet has the aforementioned column structure. }\preformatted{ (optional) | dose (s)| LxTx | LxTx error | | [ ,1] | [ ,2]| [ ,3] | |---------|------|------------| [1, ]| 0 | LnTn | LnTn error | (optional, see arg 'LnTn') [2, ]| R1 | L1T1 | L1T1 error | ... | ... | ... | ... | [x, ]| Rx | LxTx | LxTx error | } \strong{NOTE:} The function assumes the first row of the function to be the \code{Ln/Tn}-value. If you want to provide more than one \code{Ln/Tn}-value consider using the argument \code{LnTn}.} \item{LnTn}{\link{data.frame} (\strong{optional}): This argument should \strong{only} be used to provide more than one \code{Ln/Tn}-value. It assumes a two column data frame with the following structure:\preformatted{ | LnTn | LnTn error | | [ ,1] | [ ,2] | |--------|--------------| [1, ]| LnTn_1 | LnTn_1 error | [2, ]| LnTn_2 | LnTn_2 error | ... | ... | ... | [x, ]| LnTn_x | LnTn_x error | } The function will calculate a \strong{mean} \code{Ln/Tn}-value and uses either the standard deviation or the highest individual error, whichever is larger. If another mean value (e.g. a weighted mean or median) or error is preferred, this value must be calculated beforehand and used in the first row in the data frame for argument \code{data}. \strong{NOTE:} If you provide \code{LnTn}-values with this argument the data frame for the \code{data}-argument \strong{must not} contain any \code{LnTn}-values!} \item{rhop}{\link{numeric} (\strong{required}): The density of recombination centres (\eqn{\rho}') and its error (see Huntley 2006), given as numeric vector of length two. Note that \eqn{\rho}' must \strong{not} be provided as the common logarithm. Example: \code{rhop = c(2.92e-06, 4.93e-07)}.} \item{ddot}{\link{numeric} (\strong{required}): Environmental dose rate and its error, given as a numeric vector of length two. Expected unit: Gy/ka. Example: \code{ddot = c(3.7, 0.4)}.} \item{readerDdot}{\link{numeric} (\strong{required}): Dose rate of the irradiation source of the OSL reader and its error, given as a numeric vector of length two. Expected unit: Gy/s. Example: \code{readerDdot = c(0.08, 0.01)}.} \item{normalise}{\link{logical} (\emph{with default}): If \code{TRUE} (the default) all measured and computed LxTx values are normalised by the pre-exponential factor A (see details).} \item{fit.method}{\link{character} (\emph{with default}): Fit function of the dose response curve. Can either be \code{EXP} (the default) or \code{GOK}. Note that \code{EXP} (single saturating exponential) is the original function the model after Huntley (2006) and Kars et al. (2008) was designed to use. The use of a general-order kinetics function (\code{GOK}) is an experimental adaption of the model and should be used with great care.} \item{lower.bounds}{\link{numeric} (\emph{with default}): Only applicable for \code{fit.method = 'GOK'}. A vector of length 3 that contains the lower bound values for fitting the general-order kinetics function using \link[minpack.lm:nlsLM]{minpack.lm::nlsLM}. In most cases, the default values (c(\verb{-Inf, -Inf, -Inf})) are appropriate for finding a best fit, but sometimes it may be useful to restrict the lower bounds to e.g. c(\verb{0, 0, 0}). The values of the vector are for parameters \code{a}, \code{D0} and \code{c} in that particular order (see details in \link[Luminescence:plot_GrowthCurve]{Luminescence::plot_GrowthCurve}).} \item{summary}{\link{logical} (\emph{with default}): If \code{TRUE} (the default) various parameters provided by the user and calculated by the model are added as text on the right-hand side of the plot.} \item{plot}{\link{logical} (\emph{with default}): enables/disables plot output.} \item{...}{Further parameters: \itemize{ \item \code{verbose} \link{logical}: Show or hide console output \item \code{n.MC} \link{numeric}: Number of Monte Carlo iterations (default = \code{100000}). \strong{Note} that it is generally advised to have a large number of Monte Carlo iterations for the results to converge. Decreasing the number of iterations will often result in unstable estimates. } All other arguments are passed to \link{plot} and \link{plot_GrowthCurve}.} } \value{ An \linkS4class{RLum.Results} object is returned: Slot: \strong{@data}\cr \tabular{lll}{ \strong{OBJECT} \tab \strong{TYPE} \tab \strong{COMMENT}\cr \code{results} \tab \link{data.frame} \tab results of the of Kars et al. 2008 model \cr \code{data} \tab \link{data.frame} \tab original input data \cr \code{Ln} \tab \link{numeric} \tab Ln and its error \cr \code{LxTx_tables} \tab \code{list} \tab A \code{list} of \code{data.frames} containing data on dose, LxTx and LxTx error for each of the dose response curves. Note that these \strong{do not} contain the natural Ln signal, which is provided separately. \cr \code{fits} \tab \code{list} \tab A \code{list} of \code{nls} objects produced by \link[minpack.lm:nlsLM]{minpack.lm::nlsLM} when fitting the dose response curves \cr } Slot: \strong{@info}\cr \tabular{lll}{ \strong{OBJECT} \tab \strong{TYPE} \tab \strong{COMMENT} \cr \code{call} \tab \code{call} \tab the original function call \cr \code{args} \tab \code{list} \tab arguments of the original function call \cr } } \description{ A function to calculate the expected sample specific fraction of saturation based on the model of Huntley (2006) using the approach as implemented in Kars et al. (2008) or Guralnik et al. (2015). } \details{ This function applies the approach described in Kars et al. (2008) or Guralnik et al. (2015), which are both developed from the model of Huntley (2006) to calculate the expected sample specific fraction of saturation of a feldspar and also to calculate fading corrected age using this model. \eqn{\rho}' (\code{rhop}), the density of recombination centres, is a crucial parameter of this model and must be determined separately from a fading measurement. The function \link{analyse_FadingMeasurement} can be used to calculate the sample specific \eqn{\rho}' value. \strong{Kars et al. (2008) - Single saturating exponential} To apply the approach after Kars et al. (2008) use \code{fit.method = "EXP"}. Firstly, the unfaded D0 value is determined through applying equation 5 of Kars et al. (2008) to the measured LxTx data as a function of irradiation time, and fitting the data with a single saturating exponential of the form: \deqn{LxTx(t*) = A x \phi(t*) x (1 - exp(-(t* / D0)))} where \deqn{\phi(t*) = exp(-\rho' x ln(1.8 x s_tilde x t*)^3)} after King et al. (2016) where \code{A} is a pre-exponential factor, \verb{t*} (s) is the irradiation time, starting at the mid-point of irradiation (Auclair et al. 2003) and \code{s_tilde} (3x10^15 s^-1) is the athermal frequency factor after Huntley (2006). \cr Using fit parameters \code{A} and \code{D0}, the function then computes a natural dose response curve using the environmental dose rate, \code{D_dot} (Gy/s) and equations \verb{[1]} and \verb{[2]}. Computed LxTx values are then fitted using the \link{plot_GrowthCurve} function and the laboratory measured LnTn can then be interpolated onto this curve to determine the fading corrected De value, from which the fading corrected age is calculated. \strong{Guralnik et al. (2015) - General-order kinetics} To apply the approach after Guralnik et al. (2015) use \code{fit.method = "GOK"}. The approach of Guralnik et al. (2015) is very similar to that of Kars et al. (2008), but instead of using a single saturating exponential the model fits a general-order kinetics function of the form: \deqn{LxTx(t*) = A x \phi(t*) x (1-(1+(1/D0) x t* x c)^(-1/c))} where \code{A}, \eqn{\phi}, \verb{t*} and \code{D0} are the same as above and \code{c} is a dimensionless kinetic order modifier (cf. equation 10 in Guralnik et al., 2015). \strong{Level of saturation} The \code{calc_Huntley2006} function also calculates the level of saturation (n/N) and the field saturation (i.e. athermal steady state, (n/N)_SS) value for the sample under investigation using the sample specific \eqn{\rho}', unfaded \code{D0} and \code{D_dot} values, following the approach of Kars et al. (2008). \strong{Uncertainties} Uncertainties are reported at 1 sigma and are assumed to be normally distributed and are estimated using monte-carlo resamples (\code{n.MC = 1000}) of \eqn{\rho}' and LxTx during dose response curve fitting, and of \eqn{\rho}' in the derivation of (n/N) and (n/N)_SS. *\emph{Age calculated from 2\\emph{D0 of the simulated natural DRC}} In addition to the age calculated from the equivalent dose derived from \code{Ln/Tn} projected on the simulated natural dose response curve (DRC), this function also calculates an age from twice the characteristic saturation dose (\code{D0}) of the simulated natural DRC. This can be a useful information for (over)saturated samples (ie. no intersect of \code{Ln/Tn} on the natural DRC) to obtain at least a "minimum age" estimate of the sample. In the console output this value is denoted by \emph{"Age @2D0 (ka):"}. } \note{ \strong{This function has BETA status and should not be used for publication work!} } \section{Function version}{ 0.4.1 } \examples{ ## Load example data (sample UNIL/NB123, see ?ExampleData.Fading) data("ExampleData.Fading", envir = environment()) ## (1) Set all relevant parameters # a. fading measurement data (IR50) fading_data <- ExampleData.Fading$fading.data$IR50 # b. Dose response curve data data <- ExampleData.Fading$equivalentDose.data$IR50 ## (2) Define required function parameters ddot <- c(7.00, 0.004) readerDdot <- c(0.134, 0.0067) # Analyse fading measurement and get an estimate of rho'. # Note that the RLum.Results object can be directly used for further processing. # The number of MC runs is reduced for this example rhop <- analyse_FadingMeasurement(fading_data, plot = TRUE, verbose = FALSE, n.MC = 10) ## (3) Apply the Kars et al. (2008) model to the data kars <- calc_Huntley2006(data = data, rhop = rhop, ddot = ddot, readerDdot = readerDdot, n.MC = 25) \dontrun{ # You can also provide LnTn values separately via the 'LnTn' argument. # Note, however, that the data frame for 'data' must then NOT contain # a LnTn value. See argument descriptions! LnTn <- data.frame(LnTn = c(1.84833, 2.24833), LnTn.error = c(0.17, 0.22)) LxTx <- data[2:nrow(data), ] kars <- calc_Huntley2006(data = LxTx, LnTn = LnTn, rhop = rhop, ddot = ddot, readerDdot = readerDdot, n.MC = 25) } } \section{How to cite}{ King, G.E., Burow, C., 2020. calc_Huntley2006(): Apply the Huntley (2006) model. Function version 0.4.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Kars, R.H., Wallinga, J., Cohen, K.M., 2008. A new approach towards anomalous fading correction for feldspar IRSL dating-tests on samples in field saturation. Radiation Measurements 43, 786-790. doi:10.1016/j.radmeas.2008.01.021 Guralnik, B., Li, B., Jain, M., Chen, R., Paris, R.B., Murray, A.S., Li, S.-H., Pagonis, P., Herman, F., 2015. Radiation-induced growth and isothermal decay of infrared-stimulated luminescence from feldspar. Radiation Measurements 81, 224-231. Huntley, D.J., 2006. An explanation of the power-law decay of luminescence. Journal of Physics: Condensed Matter 18, 1359-1365. doi:10.1088/0953-8984/18/4/020 King, G.E., Herman, F., Lambert, R., Valla, P.G., Guralnik, B., 2016. Multi-OSL-thermochronometry of feldspar. Quaternary Geochronology 33, 76-87. doi:10.1016/j.quageo.2016.01.004 \strong{Further reading} Morthekai, P., Jain, M., Cunha, P.P., Azevedo, J.M., Singhvi, A.K., 2011. An attempt to correct for the fading in million year old basaltic rocks. Geochronometria 38(3), 223-230. } \author{ Georgina E. King, University of Bern (Switzerland) \cr Christoph Burow, University of Cologne (Germany) , RLum Developer Team} \keyword{datagen} Luminescence/man/merge_Risoe.BINfileData.Rd0000644000176200001440000000667513604173244020251 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/merge_Risoe.BINfileData.R \name{merge_Risoe.BINfileData} \alias{merge_Risoe.BINfileData} \title{Merge Risoe.BINfileData objects or Risoe BIN-files} \usage{ merge_Risoe.BINfileData( input.objects, output.file, keep.position.number = FALSE, position.number.append.gap = 0 ) } \arguments{ \item{input.objects}{\link{character} with \linkS4class{Risoe.BINfileData} objects (\strong{required}): Character vector with path and files names (e.g. \code{input.objects = c("path/file1.bin", "path/file2.bin")} or \linkS4class{Risoe.BINfileData} objects (e.g. \code{input.objects = c(object1, object2)}). Alternatively a \code{list} is supported.} \item{output.file}{\link{character} (\emph{optional}): File output path and name. If no value is given, a \linkS4class{Risoe.BINfileData} is returned instead of a file.} \item{keep.position.number}{\link{logical} (\emph{with default}): Allows keeping the original position numbers of the input objects. Otherwise the position numbers are recalculated.} \item{position.number.append.gap}{\link{integer} (\emph{with default}): Set the position number gap between merged BIN-file sets, if the option \code{keep.position.number = FALSE} is used. See details for further information.} } \value{ Returns a \code{file} or a \linkS4class{Risoe.BINfileData} object. } \description{ Function allows merging Risoe BIN/BINX files or Risoe.BINfileData objects. } \details{ The function allows merging different measurements to one file or one object. The record IDs are recalculated for the new object. Other values are kept for each object. The number of input objects is not limited. \code{position.number.append.gap} option If the option \code{keep.position.number = FALSE} is used, the position numbers of the new data set are recalculated by adding the highest position number of the previous data set to the each position number of the next data set. For example: The highest position number is 48, then this number will be added to all other position numbers of the next data set (e.g. 1 + 48 = 49) However, there might be cases where an additional addend (summand) is needed before the next position starts. Example: \itemize{ \item Position number set (A): \verb{1,3,5,7} \item Position number set (B): \verb{1,3,5,7} } With no additional summand the new position numbers would be: \verb{1,3,5,7,8,9,10,11}. That might be unwanted. Using the argument \code{position.number.append.gap = 1} it will become: \verb{1,3,5,7,9,11,13,15,17}. } \note{ The validity of the output objects is not further checked. } \section{Function version}{ 0.2.7 } \examples{ ##merge two objects data(ExampleData.BINfileData, envir = environment()) object1 <- CWOSL.SAR.Data object2 <- CWOSL.SAR.Data object.new <- merge_Risoe.BINfileData(c(object1, object2)) } \section{How to cite}{ Kreutzer, S., 2020. merge_Risoe.BINfileData(): Merge Risoe.BINfileData objects or Risoe BIN-files. Function version 0.2.7. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Duller, G., 2007. Analyst. } \seealso{ \linkS4class{Risoe.BINfileData}, \link{read_BIN2R}, \link{write_R2BIN} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{IO} \keyword{manip} Luminescence/man/RLum.Results-class.Rd0000644000176200001440000001210413604173245017353 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/RLum.Results-class.R \docType{class} \name{RLum.Results-class} \alias{RLum.Results-class} \alias{show,RLum.Results-method} \alias{set_RLum,RLum.Results-method} \alias{get_RLum,RLum.Results-method} \alias{length_RLum,RLum.Results-method} \alias{names_RLum,RLum.Results-method} \title{Class \code{"RLum.Results"}} \usage{ \S4method{show}{RLum.Results}(object) \S4method{set_RLum}{RLum.Results}(class, originator, .uid, .pid, data = list(), info = list()) \S4method{get_RLum}{RLum.Results}(object, data.object, info.object = NULL, drop = TRUE) \S4method{length_RLum}{RLum.Results}(object) \S4method{names_RLum}{RLum.Results}(object) } \arguments{ \item{object}{\code{\link{get_RLum}}; \linkS4class{RLum.Results} (\strong{required}): an object of class \linkS4class{RLum.Results} to be evaluated} \item{class}{\code{\link{set_RLum}}; \link{character} \strong{(required)}: name of the \code{RLum} class to create} \item{originator}{\code{\link{set_RLum}}; \link{character} (\emph{automatic}): contains the name of the calling function (the function that produces this object); can be set manually.} \item{.uid}{\code{\link{set_RLum}}; \link{character} (\emph{automatic}): sets an unique ID for this object using the internal C++ function \code{create_UID}.} \item{.pid}{\code{\link{set_RLum}}; \link{character} (\emph{with default}): option to provide a parent id for nesting at will.} \item{data}{\code{\link{set_RLum}}; \link{list} (\emph{optional}): a list containing the data to be stored in the object} \item{info}{\code{\link{set_RLum}}; \link{list} (\emph{optional}): a list containing additional info data for the object} \item{data.object}{\code{\link{get_RLum}}; \link{character} or \link{numeric}: name or index of the data slot to be returned} \item{info.object}{\code{\link{get_RLum}}; \link{character} (\emph{optional}): name of the wanted info element} \item{drop}{\code{\link{get_RLum}}; \link{logical} (\emph{with default}): coerce to the next possible layer (which are data objects, \code{drop = FALSE} keeps the original \code{RLum.Results}} } \value{ \strong{\code{set_RLum}}: Returns an object from the class \linkS4class{RLum.Results} \strong{\code{get_RLum}}: Returns: \enumerate{ \item Data object from the specified slot \item \link{list} of data objects from the slots if 'data.object' is vector or \item an \linkS4class{RLum.Results} for \code{drop = FALSE}. } \strong{\code{length_RLum}} Returns the number of data elements in the \code{RLum.Results} object. \strong{\code{names_RLum}} Returns the names of the data elements in the object. } \description{ Object class contains results data from functions (e.g., \link{analyse_SAR.CWOSL}). } \section{Methods (by generic)}{ \itemize{ \item \code{show}: Show structure of \code{RLum.Results} object \item \code{set_RLum}: Construction method for an RLum.Results object. \item \code{get_RLum}: Accessor method for RLum.Results object. The argument data.object allows directly accessing objects delivered within the slot data. The default return object depends on the object originator (e.g., \code{fit_LMCurve}). If nothing is specified always the first \code{data.object} will be returned. Note: Detailed specification should be made in combination with the originator slot in the receiving function if results are pipped. \item \code{length_RLum}: Returns the length of the object, i.e., number of stored data.objects \item \code{names_RLum}: Returns the names data.objects }} \section{Slots}{ \describe{ \item{\code{data}}{Object of class "list" containing output data} }} \note{ The class is intended to store results from functions to be used by other functions. The data in the object should always be accessed by the method \code{get_RLum}. } \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("RLum.Results", ...)}. } \section{Class version}{ 0.5.2 } \examples{ showClass("RLum.Results") ##create an empty object from this class set_RLum(class = "RLum.Results") ##use another function to show how it works ##Basic calculation of the dose rate for a specific date dose.rate <- calc_SourceDoseRate( measurement.date = "2012-01-27", calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019) ##show object dose.rate ##get results get_RLum(dose.rate) ##get parameters used for the calcualtion from the same object get_RLum(dose.rate, data.object = "parameters") ##alternatively objects can be accessed using S3 generics, such as dose.rate$parameters } \seealso{ \linkS4class{RLum}, \link{plot_RLum}, \link{merge_RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. RLum.Results-class(): Class 'RLum.Results'. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{classes} \keyword{internal} \keyword{methods} Luminescence/man/read_BIN2R.Rd0000644000176200001440000001411013604173245015537 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/read_BIN2R.R \name{read_BIN2R} \alias{read_BIN2R} \title{Import Risø BIN/BINX-files into R} \usage{ read_BIN2R( file, show.raw.values = FALSE, position = NULL, n.records = NULL, zero_data.rm = TRUE, duplicated.rm = FALSE, fastForward = FALSE, show.record.number = FALSE, txtProgressBar = TRUE, forced.VersionNumber = NULL, ignore.RECTYPE = FALSE, pattern = NULL, verbose = TRUE, ... ) } \arguments{ \item{file}{\link{character} or \link{list} (\strong{required}): path and file name of the BIN/BINX file (URLs are supported). If input is a \code{list} it should comprise only \code{character}s representing each valid path and BIN/BINX-file names. Alternatively the input character can be just a directory (path), in this case the the function tries to detect and import all BIN/BINX files found in the directory.} \item{show.raw.values}{\link{logical} (\emph{with default}): shows raw values from BIN file for \code{LTYPE}, \code{DTYPE} and \code{LIGHTSOURCE} without translation in characters. Can be provided as \code{list} if \code{file} is a \code{list}.} \item{position}{\link{numeric} (\emph{optional}): imports only the selected position. Note: the import performance will not benefit by any selection made here. Can be provided as \code{list} if \code{file} is a \code{list}.} \item{n.records}{\link{raw} (\emph{optional}): limits the number of imported records. Can be used in combination with \code{show.record.number} for debugging purposes, e.g. corrupt BIN-files. Can be provided as \code{list} if \code{file} is a \code{list}.} \item{zero_data.rm}{\link{logical} (\emph{with default}): remove erroneous data with no count values. As such data are usally not needed for the subsequent data analysis they will be removed by default. Can be provided as \code{list} if \code{file} is a \code{list}.} \item{duplicated.rm}{\link{logical} (\emph{with default}): remove duplicated entries if \code{TRUE}. This may happen due to an erroneous produced BIN/BINX-file. This option compares only predeccessor and successor. Can be provided as \code{list} if \code{file} is a \code{list}.} \item{fastForward}{\link{logical} (\emph{with default}): if \code{TRUE} for a more efficient data processing only a list of \code{RLum.Analysis} objects is returned instead of a \linkS4class{Risoe.BINfileData} object. Can be provided as \code{list} if \code{file} is a \code{list}.} \item{show.record.number}{\link{logical} (\emph{with default}): shows record number of the imported record, for debugging usage only. Can be provided as \code{list} if \code{file} is a \code{list}.} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables or disables \link{txtProgressBar}.} \item{forced.VersionNumber}{\link{integer} (\emph{optional}): allows to cheat the version number check in the function by own values for cases where the BIN-file version is not supported. Can be provided as \code{list} if \code{file} is a \code{list}. \strong{Note:} The usage is at own risk, only supported BIN-file versions have been tested.} \item{ignore.RECTYPE}{\link{logical} (\emph{with default}): this argument allows to ignore values in the byte 'RECTYPE' (BIN-file version 08), in case there are not documented or faulty set. In this case the corrupted records are skipped.} \item{pattern}{\link{character} (\emph{optional}): argument that is used if only a path is provided. The argument will than be passed to the function \link{list.files} used internally to construct a \code{list} of wanted files} \item{verbose}{\link{logical} (\emph{with default}): enables or disables verbose mode} \item{...}{further arguments that will be passed to the function \link{Risoe.BINfileData2RLum.Analysis}. Please note that any matching argument automatically sets \code{fastForward = TRUE}} } \value{ Returns an S4 \linkS4class{Risoe.BINfileData} object containing two slots: \item{METADATA}{A \link{data.frame} containing all variables stored in the bin-file.} \item{DATA}{A \link{list} containing a numeric \link{vector} of the measured data. The ID corresponds to the record ID in METADATA.} If \code{fastForward = TRUE} a list of \linkS4class{RLum.Analysis} object is returned. The internal coercing is done using the function \link{Risoe.BINfileData2RLum.Analysis} } \description{ Import a *.bin or a *.binx file produced by a Risoe DA15 and DA20 TL/OSL reader into R. } \details{ The binary data file is parsed byte by byte following the data structure published in the Appendices of the Analyst manual p. 42. For the general BIN/BINX-file structure, the reader is referred to the Risoe website: \url{http://www.nutech.dtu.dk/} } \note{ The function works for BIN/BINX-format versions 03, 04, 05, 06, 07 and 08. The version number depends on the used Sequence Editor. \strong{ROI data sets introduced with BIN-file version 8 are not supported and skipped durint import.} } \section{Function version}{ 0.16.2 } \examples{ ##(1) import Risoe BIN-file to R (uncomment for usage) #FILE <- file.choose() #temp <- read_BIN2R(FILE) #temp } \section{How to cite}{ Kreutzer, S., Fuchs, M.C., 2020. read_BIN2R(): Import Risø BIN/BINX-files into R. Function version 0.16.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ DTU Nutech, 2016. The Squence Editor, Users Manual, February, 2016. \url{http://www.nutech.dtu.dk/english/products-and-services/radiation-instruments/tl_osl_reader/manuals} } \seealso{ \link{write_R2BIN}, \linkS4class{Risoe.BINfileData}, \link[base:readBin]{base::readBin}, \link{merge_Risoe.BINfileData}, \linkS4class{RLum.Analysis} \link[utils:txtProgressBar]{utils::txtProgressBar}, \link{list.files} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France)\cr Margret C. Fuchs, HZDR Freiberg, (Germany) \cr based on information provided by Torben Lapp and Karsten Bracht Nielsen (Risø DTU, Denmark) , RLum Developer Team} \keyword{IO} Luminescence/man/ExampleData.TR_OSL.Rd0000644000176200001440000000162113604173243017157 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.TR_OSL} \alias{ExampleData.TR_OSL} \title{Example TR-OSL data} \format{One \linkS4class{RLum.Data.Curve} dataset imported using the function \link{read_XSYG2R} \describe{ \code{ExampleData.TR_OSL}: A single \linkS4class{RLum.Data.Curve} object with the TR-OSL data }} \description{ Single TR-OSL curve obtained by Schmidt et al. (under review) for quartz sample BT729 (origin: Trebgast Valley, Germay, quartz, 90-200 µm, unpublished data). } \examples{ ##(1) curves data(ExampleData.TR_OSL, envir = environment()) plot_RLum(ExampleData.TR_OSL) } \references{ Schmidt, C., Simmank, O., Kreutzer, S., under review. Time-Resolved Optically Stimulated Luminescence of Quartz in the Nanosecond Time Domain. Journal of Luminescence, 1-90 } \seealso{ \link{fit_OSLLifeTimes} } \keyword{datasets} Luminescence/man/plot_AbanicoPlot.Rd0000644000176200001440000004360213604173244017170 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_AbanicoPlot.R \name{plot_AbanicoPlot} \alias{plot_AbanicoPlot} \title{Function to create an Abanico Plot.} \usage{ plot_AbanicoPlot( data, na.rm = TRUE, log.z = TRUE, z.0 = "mean.weighted", dispersion = "qr", plot.ratio = 0.75, rotate = FALSE, mtext, summary, summary.pos, summary.method = "MCM", legend, legend.pos, stats, rug = FALSE, kde = TRUE, hist = FALSE, dots = FALSE, boxplot = FALSE, y.axis = TRUE, error.bars = FALSE, bar, bar.col, polygon.col, line, line.col, line.lty, line.label, grid.col, frame = 1, bw = "SJ", output = TRUE, interactive = FALSE, ... ) } \arguments{ \item{data}{\link{data.frame} or \linkS4class{RLum.Results} object (\strong{required}): for \code{data.frame} two columns: De (\code{data[,1]}) and De error (\code{data[,2]}). To plot several data sets in one plot the data sets must be provided as \code{list}, e.g. \code{list(data.1, data.2)}.} \item{na.rm}{\link{logical} (\emph{with default}): exclude NA values from the data set prior to any further operations.} \item{log.z}{\link{logical} (\emph{with default}): Option to display the z-axis in logarithmic scale. Default is \code{TRUE}.} \item{z.0}{\link{character} or \link{numeric}: User-defined central value, used for centering of data. One out of \code{"mean"}, \code{"mean.weighted"} and \code{"median"} or a numeric value (not its logarithm). Default is \code{"mean.weighted"}.} \item{dispersion}{\link{character} (\emph{with default}): measure of dispersion, used for drawing the scatter polygon. One out of \itemize{ \item \code{"qr"} (quartile range), \item \code{"pnn"} (symmetric percentile range with nn the lower percentile, e.g. \item \code{"p05"} depicting the range between 5 and 95 \%), \item \code{"sd"} (standard deviation) and \item \code{"2sd"} (2 standard deviations), } The default is \code{"qr"}. Note that \code{"sd"} and \code{"2sd"} are only meaningful in combination with \code{"z.0 = 'mean'"} because the unweighted mean is used to center the polygon.} \item{plot.ratio}{\link{numeric}: Relative space, given to the radial versus the cartesian plot part, default is \code{0.75}.} \item{rotate}{\link{logical}: Option to turn the plot by 90 degrees.} \item{mtext}{\link{character}: additional text below the plot title.} \item{summary}{\link{character} (\emph{optional}): add statistic measures of centrality and dispersion to the plot. Can be one or more of several keywords. See details for available keywords. Results differ depending on the log-option for the z-scale (see details).} \item{summary.pos}{\link{numeric} or \link{character} (\emph{with default}): optional position coordinates or keyword (e.g. \code{"topright"}) for the statistical summary. Alternatively, the keyword \code{"sub"} may be specified to place the summary below the plot header. However, this latter option in only possible if \code{mtext} is not used.} \item{summary.method}{\link{character} (\emph{with default}): keyword indicating the method used to calculate the statistic summary. One out of \itemize{ \item \code{"unweighted"}, \item \code{"weighted"} and \item \code{"MCM"}. } See \link{calc_Statistics} for details.} \item{legend}{\link{character} vector (\emph{optional}): legend content to be added to the plot.} \item{legend.pos}{\link{numeric} or \link{character} (\emph{with default}): optional position coordinates or keyword (e.g. \code{"topright"}) for the legend to be plotted.} \item{stats}{\link{character}: additional labels of statistically important values in the plot. One or more out of the following: \itemize{ \item \code{"min"}, \item \code{"max"}, \item \code{"median"}. }} \item{rug}{\link{logical}: Option to add a rug to the KDE part, to indicate the location of individual values.} \item{kde}{\link{logical}: Option to add a KDE plot to the dispersion part, default is \code{TRUE}.} \item{hist}{\link{logical}: Option to add a histogram to the dispersion part. Only meaningful when not more than one data set is plotted.} \item{dots}{\link{logical}: Option to add a dot plot to the dispersion part. If number of dots exceeds space in the dispersion part, a square indicates this.} \item{boxplot}{\link{logical}: Option to add a boxplot to the dispersion part, default is \code{FALSE}.} \item{y.axis}{\link{logical}: Option to hide y-axis labels. Useful for data with small scatter.} \item{error.bars}{\link{logical}: Option to show De-errors as error bars on De-points. Useful in combination with \verb{y.axis = FALSE, bar.col = "none"}.} \item{bar}{\link{numeric} (\emph{with default}): option to add one or more dispersion bars (i.e., bar showing the 2-sigma range) centered at the defined values. By default a bar is drawn according to \code{"z.0"}. To omit the bar set \code{"bar = FALSE"}.} \item{bar.col}{\link{character} or \link{numeric} (\emph{with default}): colour of the dispersion bar. Default is \code{"grey60"}.} \item{polygon.col}{\link{character} or \link{numeric} (\emph{with default}): colour of the polygon showing the data scatter. Sometimes this polygon may be omitted for clarity. To disable it use \code{FALSE} or \code{polygon = FALSE}. Default is \code{"grey80"}.} \item{line}{\link{numeric}: numeric values of the additional lines to be added.} \item{line.col}{\link{character} or \link{numeric}: colour of the additional lines.} \item{line.lty}{\link{integer}: line type of additional lines} \item{line.label}{\link{character}: labels for the additional lines.} \item{grid.col}{\link{character} or \link{numeric} (\emph{with default}): colour of the grid lines (originating at \verb{[0,0]} and strechting to the z-scale). To disable grid lines use \code{FALSE}. Default is \code{"grey"}.} \item{frame}{\link{numeric} (\emph{with default}): option to modify the plot frame type. Can be one out of \itemize{ \item \code{0} (no frame), \item \code{1} (frame originates at 0,0 and runs along min/max isochrons), \item \code{2} (frame embraces the 2-sigma bar), \item \code{3} (frame embraces the entire plot as a rectangle). } Default is \code{1}.} \item{bw}{\link{character} (\emph{with default}): bin-width for KDE, choose a numeric value for manual setting.} \item{output}{\link{logical}: Optional output of numerical plot parameters. These can be useful to reproduce similar plots. Default is \code{TRUE}.} \item{interactive}{\link{logical} (\emph{with default}): create an interactive abanico plot (requires the 'plotly' package)} \item{...}{Further plot arguments to pass. \code{xlab} must be a vector of length 2, specifying the upper and lower x-axes labels.} } \value{ returns a plot object and, optionally, a list with plot calculus data. } \description{ A plot is produced which allows comprehensive presentation of data precision and its dispersion around a central value as well as illustration of a kernel density estimate, histogram and/or dot plot of the dose values. } \details{ The Abanico Plot is a combination of the classic Radial Plot (\code{plot_RadialPlot}) and a kernel density estimate plot (e.g \code{plot_KDE}). It allows straightforward visualisation of data precision, error scatter around a user-defined central value and the combined distribution of the values, on the actual scale of the measured data (e.g. seconds, equivalent dose, years). The principle of the plot is shown in Galbraith & Green (1990). The function authors are thankful for the thoughtprovocing figure in this article. The semi circle (z-axis) of the classic Radial Plot is bent to a straight line here, which actually is the basis for combining this polar (radial) part of the plot with any other cartesian visualisation method (KDE, histogram, PDF and so on). Note that the plot allows dispaying two measures of distribution. One is the 2-sigma bar, which illustrates the spread in value errors, and the other is the polygon, which stretches over both parts of the Abanico Plot (polar and cartesian) and illustrates the actual spread in the values themselves. Since the 2-sigma-bar is a polygon, it can be (and is) filled with shaded lines. To change density (lines per inch, default is 15) and angle (default is 45 degrees) of the shading lines, specify these parameters. See \code{?polygon()} for further help. The Abanico Plot supports other than the weighted mean as measure of centrality. When it is obvious that the data is not (log-)normally distributed, the mean (weighted or not) cannot be a valid measure of centrality and hence central dose. Accordingly, the median and the weighted median can be chosen as well to represent a proper measure of centrality (e.g. \code{centrality = "median.weighted"}). Also user-defined numeric values (e.g. from the central age model) can be used if this appears appropriate. The proportion of the polar part and the cartesian part of the Abanico Plot can be modfied for display reasons (\code{plot.ratio = 0.75}). By default, the polar part spreads over 75 \\% and leaves 25 \\% for the part that shows the KDE graph. A statistic summary, i.e. a collection of statistic measures of centrality and dispersion (and further measures) can be added by specifying one or more of the following keywords: \itemize{ \item \code{"n"} (number of samples) \item \code{"mean"} (mean De value) \item \code{"median"} (median of the De values) \item \code{"sd.rel"} (relative standard deviation in percent) \item \code{"sd.abs"} (absolute standard deviation) \item \code{"se.rel"} (relative standard error) \item \code{"se.abs"} (absolute standard error) \item \code{"in.2s"} (percent of samples in 2-sigma range) \item \code{"kurtosis"} (kurtosis) \item \code{"skewness"} (skewness) } \strong{Note} that the input data for the statistic summary is sent to the function \code{calc_Statistics()} depending on the log-option for the z-scale. If \code{"log.z = TRUE"}, the summary is based on the logarithms of the input data. If \code{"log.z = FALSE"} the linearly scaled data is used. \strong{Note} as well, that \code{"calc_Statistics()"} calculates these statistic measures in three different ways: \code{unweighted}, \code{weighted} and \code{MCM-based} (i.e., based on Monte Carlo Methods). By default, the MCM-based version is used. If you wish to use another method, indicate this with the appropriate keyword using the argument \code{summary.method}. The optional parameter \code{layout} allows to modify the entire plot more sophisticated. Each element of the plot can be addressed and its properties can be defined. This includes font type, size and decoration, colours and sizes of all plot items. To infer the definition of a specific layout style cf. \code{get_Layout()} or type eg. for the layout type \code{"journal"} \code{get_Layout("journal")}. A layout type can be modified by the user by assigning new values to the list object. It is possible for the z-scale to specify where ticks are to be drawn by using the parameter \code{at}, e.g. \code{at = seq(80, 200, 20)}, cf. function documentation of \code{axis}. Specifying tick positions manually overrides a \code{zlim}-definition. } \section{Function version}{ 0.1.11 } \examples{ ## load example data and recalculate to Gray data(ExampleData.DeValues, envir = environment()) ExampleData.DeValues <- ExampleData.DeValues$CA1 ## plot the example data straightforward plot_AbanicoPlot(data = ExampleData.DeValues) ## now with linear z-scale plot_AbanicoPlot(data = ExampleData.DeValues, log.z = FALSE) ## now with output of the plot parameters plot1 <- plot_AbanicoPlot(data = ExampleData.DeValues, output = TRUE) str(plot1) plot1$zlim ## now with adjusted z-scale limits plot_AbanicoPlot(data = ExampleData.DeValues, zlim = c(10, 200)) ## now with adjusted x-scale limits plot_AbanicoPlot(data = ExampleData.DeValues, xlim = c(0, 20)) ## now with rug to indicate individual values in KDE part plot_AbanicoPlot(data = ExampleData.DeValues, rug = TRUE) ## now with a smaller bandwidth for the KDE plot plot_AbanicoPlot(data = ExampleData.DeValues, bw = 0.04) ## now with a histogram instead of the KDE plot plot_AbanicoPlot(data = ExampleData.DeValues, hist = TRUE, kde = FALSE) ## now with a KDE plot and histogram with manual number of bins plot_AbanicoPlot(data = ExampleData.DeValues, hist = TRUE, breaks = 20) ## now with a KDE plot and a dot plot plot_AbanicoPlot(data = ExampleData.DeValues, dots = TRUE) ## now with user-defined plot ratio plot_AbanicoPlot(data = ExampleData.DeValues, plot.ratio = 0.5) ## now with user-defined central value plot_AbanicoPlot(data = ExampleData.DeValues, z.0 = 70) ## now with median as central value plot_AbanicoPlot(data = ExampleData.DeValues, z.0 = "median") ## now with the 17-83 percentile range as definition of scatter plot_AbanicoPlot(data = ExampleData.DeValues, z.0 = "median", dispersion = "p17") ## now with user-defined green line for minimum age model CAM <- calc_CentralDose(ExampleData.DeValues, plot = FALSE) plot_AbanicoPlot(data = ExampleData.DeValues, line = CAM, line.col = "darkgreen", line.label = "CAM") ## now create plot with legend, colour, different points and smaller scale plot_AbanicoPlot(data = ExampleData.DeValues, legend = "Sample 1", col = "tomato4", bar.col = "peachpuff", pch = "R", cex = 0.8) ## now without 2-sigma bar, polygon, grid lines and central value line plot_AbanicoPlot(data = ExampleData.DeValues, bar.col = FALSE, polygon.col = FALSE, grid.col = FALSE, y.axis = FALSE, lwd = 0) ## now with direct display of De errors, without 2-sigma bar plot_AbanicoPlot(data = ExampleData.DeValues, bar.col = FALSE, ylab = "", y.axis = FALSE, error.bars = TRUE) ## now with user-defined axes labels plot_AbanicoPlot(data = ExampleData.DeValues, xlab = c("Data error (\%)", "Data precision"), ylab = "Scatter", zlab = "Equivalent dose [Gy]") ## now with minimum, maximum and median value indicated plot_AbanicoPlot(data = ExampleData.DeValues, stats = c("min", "max", "median")) ## now with a brief statistical summary as subheader plot_AbanicoPlot(data = ExampleData.DeValues, summary = c("n", "in.2s")) ## now with another statistical summary plot_AbanicoPlot(data = ExampleData.DeValues, summary = c("mean.weighted", "median"), summary.pos = "topleft") ## now a plot with two 2-sigma bars for one data set plot_AbanicoPlot(data = ExampleData.DeValues, bar = c(30, 100)) ## now the data set is split into sub-groups, one is manipulated data.1 <- ExampleData.DeValues[1:30,] data.2 <- ExampleData.DeValues[31:62,] * 1.3 ## now a common dataset is created from the two subgroups data.3 <- list(data.1, data.2) ## now the two data sets are plotted in one plot plot_AbanicoPlot(data = data.3) ## now with some graphical modification plot_AbanicoPlot(data = data.3, z.0 = "median", col = c("steelblue4", "orange4"), bar.col = c("steelblue3", "orange3"), polygon.col = c("steelblue1", "orange1"), pch = c(2, 6), angle = c(30, 50), summary = c("n", "in.2s", "median")) ## create Abanico plot with predefined layout definition plot_AbanicoPlot(data = ExampleData.DeValues, layout = "journal") ## now with predefined layout definition and further modifications plot_AbanicoPlot(data = data.3, z.0 = "median", layout = "journal", col = c("steelblue4", "orange4"), bar.col = adjustcolor(c("steelblue3", "orange3"), alpha.f = 0.5), polygon.col = c("steelblue3", "orange3")) ## for further information on layout definitions see documentation ## of function get_Layout() ## now with manually added plot content ## create empty plot with numeric output AP <- plot_AbanicoPlot(data = ExampleData.DeValues, pch = NA, output = TRUE) ## identify data in 2 sigma range in_2sigma <- AP$data[[1]]$data.in.2s ## restore function-internal plot parameters par(AP$par) ## add points inside 2-sigma range points(x = AP$data[[1]]$precision[in_2sigma], y = AP$data[[1]]$std.estimate.plot[in_2sigma], pch = 16) ## add points outside 2-sigma range points(x = AP$data[[1]]$precision[!in_2sigma], y = AP$data[[1]]$std.estimate.plot[!in_2sigma], pch = 1) } \section{How to cite}{ Dietze, M., Kreutzer, S., 2020. plot_AbanicoPlot(): Function to create an Abanico Plot.. Function version 0.1.11. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Galbraith, R. & Green, P., 1990. Estimating the component ages in a finite mixture. International Journal of Radiation Applications and Instrumentation. Part D. Nuclear Tracks and Radiation Measurements, 17 (3), 197-206. Dietze, M., Kreutzer, S., Burow, C., Fuchs, M.C., Fischer, M., Schmidt, C., 2015. The abanico plot: visualising chronometric data with individual standard errors. Quaternary Geochronology. doi:10.1016/j.quageo.2015.09.003 } \seealso{ \link{plot_RadialPlot}, \link{plot_KDE}, \link{plot_Histogram} } \author{ Michael Dietze, GFZ Potsdam (Germany)\cr Sebastian Kreutzer, RAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr Inspired by a plot introduced by Galbraith & Green (1990) , RLum Developer Team} Luminescence/man/read_SPE2R.Rd0000644000176200001440000001005113604173245015556 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/read_SPE2R.R \name{read_SPE2R} \alias{read_SPE2R} \title{Import Princeton Intruments (TM) SPE-file into R} \usage{ read_SPE2R( file, output.object = "RLum.Data.Image", frame.range, txtProgressBar = TRUE, verbose = TRUE ) } \arguments{ \item{file}{\link{character} (\strong{required}): spe-file name (including path), e.g. \itemize{ \item \verb{[WIN]}: \code{read_SPE2R("C:/Desktop/test.spe")} \item \verb{[MAC/LINUX]}: \code{readSPER("/User/test/Desktop/test.spe")}. Additionally internet connections are supported. }} \item{output.object}{\link{character} (\emph{with default}): set \code{RLum} output object. Allowed types are \code{"RLum.Data.Spectrum"}, \code{"RLum.Data.Image"} or \code{"matrix"}} \item{frame.range}{\link{vector} (\emph{optional}): limit frame range, e.g. select first 100 frames by \code{frame.range = c(1,100)}} \item{txtProgressBar}{\link{logical} (\emph{with default}): enables or disables \link{txtProgressBar}.} \item{verbose}{\link{logical} (\emph{with default}): enables or disables verbose mode} } \value{ Depending on the chosen option the functions returns three different type of objects: \code{output.object} \code{RLum.Data.Spectrum} An object of type \linkS4class{RLum.Data.Spectrum} is returned. Row sums are used to integrate all counts over one channel. \code{RLum.Data.Image} An object of type \linkS4class{RLum.Data.Image} is returned. Due to performace reasons the import is aborted for files containing more than 100 frames. This limitation can be overwritten manually by using the argument \code{frame.frange}. \code{matrix} Returns a matrix of the form: Rows = Channels, columns = Frames. For the transformation the function \link{get_RLum} is used, meaning that the same results can be obtained by using the function \link{get_RLum} on an \code{RLum.Data.Spectrum} or \code{RLum.Data.Image} object. } \description{ Function imports Princeton Instruments (TM) SPE-files into R environment and provides \code{RLum} objects as output. } \details{ Function provides an import routine for the Princton Instruments SPE format. Import functionality is based on the file format description provided by Princton Instruments and a MatLab script written by Carl Hall (s. references). } \note{ \strong{The function does not test whether the input data are spectra or pictures for spatial resolved analysis!} The function has been successfully tested for SPE format versions 2.x. \emph{Currently not all information provided by the SPE format are supported.} } \section{Function version}{ 0.1.2 } \examples{ ## to run examples uncomment lines and run the code ##(1) Import data as RLum.Data.Spectrum object #file <- file.choose() #temp <- read_SPE2R(file) #temp ##(2) Import data as RLum.Data.Image object #file <- file.choose() #temp <- read_SPE2R(file, output.object = "RLum.Data.Image") #temp ##(3) Import data as matrix object #file <- file.choose() #temp <- read_SPE2R(file, output.object = "matrix") #temp ##(4) Export raw data to csv, if temp is a RLum.Data.Spectrum object # write.table(x = get_RLum(temp), # file = "[your path and filename]", # sep = ";", row.names = FALSE) } \section{How to cite}{ Kreutzer, S., 2020. read_SPE2R(): Import Princeton Intruments (TM) SPE-file into R. Function version 0.1.2. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Princeton Instruments, 2014. Princeton Instruments SPE 3.0 File Format Specification, Version 1.A (for document URL please use an internet search machine) Hall, C., 2012: readSPE.m. \url{http://www.mathworks.com/matlabcentral/fileexchange/35940-readspe/content/readSPE.m} } \seealso{ \link{readBin}, \linkS4class{RLum.Data.Spectrum}, \link[raster:raster]{raster::raster} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} \keyword{IO} Luminescence/man/calc_HomogeneityTest.Rd0000644000176200001440000000431313604173244020044 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calc_HomogeneityTest.R \name{calc_HomogeneityTest} \alias{calc_HomogeneityTest} \title{Apply a simple homogeneity test after Galbraith (2003)} \usage{ calc_HomogeneityTest(data, log = TRUE, ...) } \arguments{ \item{data}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): for \link{data.frame}: two columns with De \code{(data[,1])} and De error \code{(values[,2])}} \item{log}{\link{logical} (\emph{with default}): perform the homogeneity test with (un-)logged data} \item{...}{further arguments (for internal compatibility only).} } \value{ Returns a terminal output. In addition an \linkS4class{RLum.Results}-object is returned containing the following elements: \item{summary}{\link{data.frame} summary of all relevant model results.} \item{data}{\link{data.frame} original input data} \item{args}{\link{list} used arguments} \item{call}{\link{call} the function call} The output should be accessed using the function \link{get_RLum} } \description{ A simple homogeneity test for De estimates } \details{ For details see Galbraith (2003). } \section{Function version}{ 0.3.0 } \examples{ ## load example data data(ExampleData.DeValues, envir = environment()) ## apply the homogeneity test calc_HomogeneityTest(ExampleData.DeValues$BT998) ## using the data presented by Galbraith (2003) df <- data.frame( x = c(30.1, 53.8, 54.3, 29.0, 47.6, 44.2, 43.1), y = c(4.8, 7.1, 6.8, 4.3, 5.2, 5.9, 3.0)) calc_HomogeneityTest(df) } \section{How to cite}{ Burow, C., Kreutzer, S., 2020. calc_HomogeneityTest(): Apply a simple homogeneity test after Galbraith (2003). Function version 0.3.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Galbraith, R.F., 2003. A simple homogeneity test for estimates of dose obtained using OSL. Ancient TL 21, 75-77. } \seealso{ \link{pchisq} } \author{ Christoph Burow, University of Cologne (Germany), Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) , RLum Developer Team} Luminescence/man/convert_XSYG2CSV.Rd0000644000176200001440000000356713604173244016735 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convert_XSYG2CSV.R \name{convert_XSYG2CSV} \alias{convert_XSYG2CSV} \title{Export XSYG-file(s) to CSV-files} \usage{ convert_XSYG2CSV(file, ...) } \arguments{ \item{file}{\link{character} (\strong{required}): name of the XSYG-file to be converted to CSV-files} \item{...}{further arguments that will be passed to the function \link{read_XSYG2R} and \link{write_RLum2CSV}} } \value{ The function returns either a CSV-file (or many of them) or for the option \code{export = FALSE} a list comprising objects of type \link{data.frame} and \link{matrix} } \description{ This function is a wrapper function around the functions \link{read_XSYG2R} and \link{write_RLum2CSV} and it imports an XSYG-file and directly exports its content to CSV-files. If nothing is set for the argument \code{path} (\link{write_RLum2CSV}) the input folder will become the output folder. } \section{Function version}{ 0.1.0 } \examples{ ##transform XSYG-file values to a list data(ExampleData.XSYG, envir = environment()) convert_XSYG2CSV(OSL.SARMeasurement$Sequence.Object[1:10], export = FALSE) \dontrun{ ##select your BIN-file file <- file.choose() ##convert convert_XSYG2CSV(file) } } \seealso{ \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data}, \linkS4class{RLum.Results}, \link[utils:write.table]{utils::write.table}, \link{write_RLum2CSV}, \link{read_XSYG2R} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. convert_XSYG2CSV(): Export XSYG-file(s) to CSV-files. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{IO} Luminescence/man/plot_ViolinPlot.Rd0000644000176200001440000001026313604173245017072 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_ViolinPlot.R \name{plot_ViolinPlot} \alias{plot_ViolinPlot} \title{Create a violin plot} \usage{ plot_ViolinPlot( data, boxplot = TRUE, rug = TRUE, summary = NULL, summary.pos = "sub", na.rm = TRUE, ... ) } \arguments{ \item{data}{\link{numeric} or \linkS4class{RLum.Results} (\strong{required}): input data for plotting. Alternatively a \link{data.frame} or a \link{matrix} can be provided, but only the first column will be considered by the function} \item{boxplot}{\link{logical} (\emph{with default}): enable or disable boxplot} \item{rug}{\link{logical} (\emph{with default}): enable or disable rug} \item{summary}{\link{character} (\emph{optional}): add statistic measures of centrality and dispersion to the plot. Can be one or more of several keywords. See details for available keywords.} \item{summary.pos}{\link{numeric} or \link{character} (\emph{with default}): optional position keywords (cf., \link{legend}) for the statistical summary. Alternatively, the keyword \code{"sub"} may be specified to place the summary below the plot header. However, this latter option in only possible if \code{mtext} is not used.} \item{na.rm}{\link{logical} (\emph{with default}): exclude NA values from the data set prior to any further operations.} \item{...}{further arguments and graphical parameters passed to \link{plot.default}, \link[stats:density]{stats::density} and \link{boxplot}. See details for further information} } \description{ Draws a kernal densiy plot in combination with a boxplot in its middle. The shape of the violin is constructed using a mirrored density curve. This plot is especially designed for cases where the individual errors are zero or to small to be visualised. The idea for this plot is based on the the 'volcano plot' in the ggplot2 package by Hadely Wickham and Winston Chang. The general idea for the Violin Plot seems to be introduced by Hintze and Nelson (1998). } \details{ The function is passing several arguments to the function \link{plot}, \link[stats:density]{stats::density}, \link[graphics:boxplot]{graphics::boxplot}: Supported arguments are: \code{xlim}, \code{main}, \code{xlab}, \code{ylab}, \code{col.violin}, \code{col.boxplot}, \code{mtext}, \code{cex}, \code{mtext} \strong{\verb{Valid summary keywords}} \code{'n'}, \code{'mean'}, \code{'median'}, \code{'sd.abs'}, \code{'sd.rel'}, \code{'se.abs'}, \code{'se.rel'}. \code{'skewness'}, \code{'kurtosis'} } \note{ Although the code for this function was developed independently and just the idea for the plot was based on the 'ggplot2' package plot type 'volcano', it should be mentioned that, beyond this, two other R packages exist providing a possibility to produces this kind of plot, namely: 'vioplot' and 'violinmplot' (see References for details). } \section{Function version}{ 0.1.4 } \examples{ ## read example data set data(ExampleData.DeValues, envir = environment()) ExampleData.DeValues <- Second2Gray(ExampleData.DeValues$BT998, c(0.0438,0.0019)) ## create plot straightforward plot_ViolinPlot(data = ExampleData.DeValues) } \section{How to cite}{ Kreutzer, S., 2020. plot_ViolinPlot(): Create a violin plot. Function version 0.1.4. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Daniel Adler (2005). vioplot: A violin plot is a combination of a box plot and a kernel density plot. R package version 0.2 http://CRAN.R-project.org/package=violplot Hintze, J.L., Nelson, R.D., 1998. A Box Plot-Density Trace Synergism. The American Statistician 52, 181-184. Raphael W. Majeed (2012). violinmplot: Combination of violin plot with mean and standard deviation. R package version 0.2.1. http://CRAN.R-project.org/package=violinmplot Wickham. H (2009). ggplot2: elegant graphics for data analysis. Springer New York. } \seealso{ \link[stats:density]{stats::density}, \link{plot}, \link{boxplot}, \link{rug}, \link{calc_Statistics} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} Luminescence/man/Luminescence-package.Rd0000644000176200001440000001206513604173243017740 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \docType{package} \name{Luminescence-package} \alias{Luminescence-package} \alias{Luminescence} \title{Comprehensive Luminescence Dating Data Analysis} \description{ \if{html}{ \figure{RL_Logo.png}{options: width="50px" alt="r-luminescence.org"}\cr \emph{R Luminescence Developer Team} } } \details{ A collection of various R functions for the purpose of Luminescence dating data analysis. This includes, amongst others, data import, export, application of age models, curve deconvolution, sequence analysis and plotting of equivalent dose distributions. \strong{Full list of authors and contributors} (alphabetic order) \tabular{ll}{ Martin Autzen \tab \emph{DTU NUTECH Center for Nuclear Technologies} \cr Christoph Burow \tab \emph{University of Cologne, Germany}* \cr Claire Christophe \tab \emph{IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne, France} \cr Michael Dietze \tab \emph{GFZ Helmholtz Centre Potsdam, Germany} \cr Julie Durcan \tab \emph{University of Oxford, United Kingdom} \cr Pierre Guibert \tab \emph{IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne, France} \cr Manfred Fischer\tab \emph{University of Bayreuth, Germany} \cr Margret C. Fuchs \tab \emph{Helmholtz-Zentrum Dresden-Rossendorf, Helmholtz-Institute Freiberg for Resource Technology, Freiberg, Germany} \cr Johannes Friedrich \tab \emph{Chair of Geomorphology, University of Bayreuth, Germany} \cr Guillaume Guérin \tab \emph{IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne, France} \cr Georgina E. King \tab \emph{University of Lausanne, Switzerland} \cr Sebastian Kreutzer \tab *Department of Geography & Earth Sciences, Aberystwyth University, United Kingdom * \cr Norbert Mercier \tab \emph{IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne, France} \cr Svenja Riedesel \tab \emph{Aberystwyth University, United Kingdom} \cr Christoph Schmidt \tab \emph{Chair of Geomorophology, University of Bayreuth, Germany} \cr Rachel K. Smedley \tab \emph{Liverpool University, United Kingdom} \cr Anne Philippe \tab \emph{Universite de Nantes and ANJA INRIA, Rennes, France} \cr Antoine Zink \tab \emph{C2RMF, Palais du Louvre, Paris, France} } \strong{Supervisor of the initial version in 2012} Markus Fuchs, Justus-Liebig-University Giessen, Germany \strong{Support contact} \email{developers@r-luminescence.org} We may further encourage the usage of our support forum. For this please visit our project website (link below). \strong{Bug reporting} \itemize{ \item \email{developers@r-luminescence.org} or \item \url{https://github.com/R-Lum/Luminescence/issues} } \strong{Project website} \itemize{ \item \url{http://www.r-luminescence.org} } \strong{Project source code repository} \itemize{ \item \url{https://github.com/R-Lum/Luminescence} } \strong{Related package projects} \itemize{ \item \url{https://cran.r-project.org/package=RLumShiny} \item \url{https://cran.r-project.org/package=RLumModel} } \strong{Package maintainer} Sebastian Kreutzer, Department of of Geography & Earth Sciences, Aberystwyth University, United Kingdom,\cr \email{sebastian.kreutzer@u-bordeaux-montaigne.fr} \strong{Funding} Cooperation and personal exchange between the developers is gratefully funded by the DFG (SCHM 3051/3-1) in the framework of the program "Scientific Networks". Project title: "RLum.Network: Ein Wissenschaftsnetzwerk zur Analyse von Lumineszenzdaten mit R" (2014-2018) Between 2014--2019, the work of Sebastian Kreutzer as maintainer of the package was supported by LabEx LaScArBxSK (ANR - n. ANR-10-LABX-52). } \references{ Dietze, M., Kreutzer, S., Fuchs, M.C., Burow, C., Fischer, M., Schmidt, C., 2013. A practical guide to the R package Luminescence. Ancient TL, 31 (1), 11-18. Dietze, M., Kreutzer, S., Burow, C., Fuchs, M.C., Fischer, M., Schmidt, C., 2016. The abanico plot: visualising chronometric data with individual standard errors. Quaternary Geochronology 31, 1-7. https://doi.org/10.1016/j.quageo.2015.09.003 Fuchs, M.C., Kreutzer, S., Burow, C., Dietze, M., Fischer, M., Schmidt, C., Fuchs, M., 2015. Data processing in luminescence dating analysis: An exemplary workflow using the R package 'Luminescence'. Quaternary International, 362,8-13. https://doi.org/10.1016/j.quaint.2014.06.034 Kreutzer, S., Schmidt, C., Fuchs, M.C., Dietze, M., Fischer, M., Fuchs, M., 2012. Introducing an R package for luminescence dating analysis. Ancient TL, 30 (1), 1-8. Mercier, N., Kreutzer, S., Christophe, C., Guérin, G., Guibert, P., Lahaye, C., Lanos, P., Philippe, A., Tribolo, C., 2016. Bayesian statistics in luminescence dating: The ’baSAR’-model and its implementation in the R package ’Luminescence’. Ancient TL 34 (2), 14-21. Smedley, R.K., 2015. A new R function for the Internal External Uncertainty (IEU) model. Ancient TL, 33 (1), 16-21. King, E.G., Burow, C., Roberts, H., Pearce, N.J.G., 2018. Age determination using feldspar: evaluating fading-correction model performance. Radiation Measurements 119, 58-73. https://doi.org/10.1016/j.radmeas.2018.07.013 } \keyword{package} Luminescence/man/names_RLum.Rd0000644000176200001440000000315713604173244016002 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/names_RLum.R \name{names_RLum} \alias{names_RLum} \alias{names_RLum,list-method} \title{S4-names function for RLum S4 class objects} \usage{ names_RLum(object) \S4method{names_RLum}{list}(object) } \arguments{ \item{object}{\linkS4class{RLum} (\strong{required}): S4 object of class \code{RLum}} } \value{ Returns a \link{character} } \description{ Function calls object-specific names functions for RLum S4 class objects. } \details{ The function provides a generalised access point for specific \linkS4class{RLum} objects.\cr Depending on the input object, the corresponding 'names' function will be selected. Allowed arguments can be found in the documentations of the corresponding \linkS4class{RLum} class. } \section{Functions}{ \itemize{ \item \code{names_RLum,list-method}: Returns a list of \linkS4class{RLum} objects that had been passed to \link{names_RLum} }} \section{Function version}{ 0.1.0 } \seealso{ \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Image}, \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Results} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. names_RLum(): S4-names function for RLum S4 class objects. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{utilities} Luminescence/man/ExampleData.Al2O3C.Rd0000644000176200001440000000336613604173243017050 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.Al2O3C} \alias{ExampleData.Al2O3C} \alias{data_CrossTalk} \alias{data_ITC} \title{Example Al2O3:C Measurement Data} \format{Two datasets comprising \linkS4class{RLum.Analysis} data imported using the function \link{read_XSYG2R} \describe{ \code{data_ITC}: Measurement data to determine the irradiation time correction, the data can be analysed with the function \link{analyse_Al2O3C_ITC} \code{data_CrossTalk}: Measurement data obtained while estimating the irradiation cross-talk of the reader used for the experiments. The data can be analysed either with the function \link{analyse_Al2O3C_CrossTalk} or \link{analyse_Al2O3C_Measurement} }} \description{ Measurement data obtained from measuring Al2O3:C chips at the IRAMAT-CRP2A, Université Bordeaux Montainge in 2017 on a Freiberg Instruments lexsyg SMART reader. The example data used in particular to allow test of the functions developed in framework of the work by Kreutzer et al., 2018. } \note{ From both datasets unneeded curves have been removed and the number of aliquots have been reduced to a required minimum to keep the file size small, but still being able to run the corresponding functions. } \examples{ ##(1) curves data(ExampleData.Al2O3C, envir = environment()) plot_RLum(data_ITC[1:2]) } \references{ Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., 2018. Environmental Dose Rate Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. Geochronometria 45, 56–67. \doi{10.1515/geochr-2015-0086} } \seealso{ \link{analyse_Al2O3C_ITC}, \link{analyse_Al2O3C_CrossTalk}, \link{analyse_Al2O3C_Measurement} } \keyword{datasets} Luminescence/man/ExampleData.RLum.Data.Image.Rd0000644000176200001440000000203013604173243020660 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Luminescence-package.R \name{ExampleData.RLum.Data.Image} \alias{ExampleData.RLum.Data.Image} \title{Example data as \linkS4class{RLum.Data.Image} objects} \format{Object of class \linkS4class{RLum.Data.Image}} \source{ \strong{ExampleData.RLum.Data.Image} These data were kindly provided by Regina DeWitt. \tabular{ll}{ Lab.: \tab Department of Physics, East-Carolina University, NC, USA\cr Lab-Code: \tab - \cr Location: \tab - \cr Material: \tab - \cr Reference: \tab - \cr } Image data is a measurement of fluorescent ceiling lights with a cooled Princeton Instruments (TM) camera fitted on Risoe DA-20 TL/OSL reader. } \description{ Measurement of Princton Instruments camera imported with the function \link{read_SPE2R} to R to produce an \linkS4class{RLum.Data.Image} object. } \section{Version}{ 0.1 } \examples{ ##load data data(ExampleData.RLum.Data.Image, envir = environment()) ##plot data plot_RLum(ExampleData.RLum.Data.Image) } \keyword{datasets} Luminescence/man/CW2pPMi.Rd0000644000176200001440000001271013604173244015114 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/CW2pPMi.R \name{CW2pPMi} \alias{CW2pPMi} \title{Transform a CW-OSL curve into a pPM-OSL curve via interpolation under parabolic modulation conditions} \usage{ CW2pPMi(values, P) } \arguments{ \item{values}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): \linkS4class{RLum.Data.Curve} or \code{data.frame} with measured curve data of type stimulation time (t) (\code{values[,1]}) and measured counts (cts) (\code{values[,2]})} \item{P}{\link{vector} (\emph{optional}): stimulation period in seconds. If no value is given, the optimal value is estimated automatically (see details). Greater values of P produce more points in the rising tail of the curve.} } \value{ The function returns the same data type as the input data type with the transformed curve values. \code{RLum.Data.Curve} \tabular{rl}{ \verb{$CW2pPMi.x.t} \tab: transformed time values \cr \verb{$CW2pPMi.method} \tab: used method for the production of the new data points } \code{data.frame} \tabular{rl}{ \verb{$x} \tab: time\cr \verb{$y.t} \tab: transformed count values\cr \verb{$x.t} \tab: transformed time values \cr \verb{$method} \tab: used method for the production of the new data points } } \description{ Transforms a conventionally measured continuous-wave (CW) OSL-curve into a pseudo parabolic modulated (pPM) curve under parabolic modulation conditions using the interpolation procedure described by Bos & Wallinga (2012). } \details{ The complete procedure of the transformation is given in Bos & Wallinga (2012). The input \code{data.frame} consists of two columns: time (t) and count values (CW(t)) \strong{Nomenclature} \itemize{ \item P = stimulation time (s) \item 1/P = stimulation rate (1/s) } \strong{Internal transformation steps} (1) log(CW-OSL) values (2) Calculate t' which is the transformed time: \deqn{t' = (1/3)*(1/P^2)t^3} (3) Interpolate CW(t'), i.e. use the log(CW(t)) to obtain the count values for the transformed time (t'). Values beyond \code{min(t)} and \code{max(t)} produce \code{NA} values. (4) Select all values for t' < \code{min(t)}, i.e. values beyond the time resolution of t. Select the first two values of the transformed data set which contain no \code{NA} values and use these values for a linear fit using \link{lm}. (5) Extrapolate values for t' < \code{min(t)} based on the previously obtained fit parameters. The extrapolation is limited to two values. Other values at the beginning of the transformed curve are set to 0. (6) Transform values using \deqn{pLM(t) = t^2/P^2*CW(t')} (7) Combine all values and truncate all values for t' > \code{max(t)} \strong{NOTE:} The number of values for t' < \code{min(t)} depends on the stimulation period \code{P}. To avoid the production of too many artificial data at the raising tail of the determined pPM curve, it is recommended to use the automatic estimation routine for \code{P}, i.e. provide no value for \code{P}. } \note{ According to Bos & Wallinga (2012), the number of extrapolated points should be limited to avoid artificial intensity data. If \code{P} is provided manually, not more than two points are extrapolated. } \section{Function version}{ 0.2.1 } \examples{ ##(1) ##load CW-OSL curve data data(ExampleData.CW_OSL_Curve, envir = environment()) ##transform values values.transformed <- CW2pPMi(ExampleData.CW_OSL_Curve) ##plot plot(values.transformed$x,values.transformed$y.t, log = "x") ##(2) - produce Fig. 4 from Bos & Wallinga (2012) ##load data data(ExampleData.CW_OSL_Curve, envir = environment()) values <- CW_Curve.BosWallinga2012 ##open plot area plot(NA, NA, xlim = c(0.001,10), ylim = c(0,8000), ylab = "pseudo OSL (cts/0.01 s)", xlab = "t [s]", log = "x", main = "Fig. 4 - Bos & Wallinga (2012)") values.t <- CW2pLMi(values, P = 1/20) lines(values[1:length(values.t[,1]),1],CW2pLMi(values, P = 1/20)[,2], col = "red",lwd = 1.3) text(0.03,4500,"LM", col = "red", cex = .8) values.t <- CW2pHMi(values, delta = 40) lines(values[1:length(values.t[,1]),1], CW2pHMi(values, delta = 40)[,2], col = "black", lwd = 1.3) text(0.005,3000,"HM", cex = .8) values.t <- CW2pPMi(values, P = 1/10) lines(values[1:length(values.t[,1]),1], CW2pPMi(values, P = 1/10)[,2], col = "blue", lwd = 1.3) text(0.5,6500,"PM", col = "blue", cex = .8) } \section{How to cite}{ Kreutzer, S., 2020. CW2pPMi(): Transform a CW-OSL curve into a pPM-OSL curve via interpolation under parabolic modulation conditions. Function version 0.2.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Bos, A.J.J. & Wallinga, J., 2012. How to visualize quartz OSL signal components. Radiation Measurements, 47, 752-758. \strong{Further Reading} Bulur, E., 1996. An Alternative Technique For Optically Stimulated Luminescence (OSL) Experiment. Radiation Measurements, 26, 701-709. Bulur, E., 2000. A simple transformation for converting CW-OSL curves to LM-OSL curves. Radiation Measurements, 32, 141-145. } \seealso{ \link{CW2pLM}, \link{CW2pLMi}, \link{CW2pHMi}, \link{fit_LMCurve}, \linkS4class{RLum.Data.Curve} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) Based on comments and suggestions from:\cr Adrie J.J. Bos, Delft University of Technology, The Netherlands , RLum Developer Team} \keyword{manip} Luminescence/man/CW2pLM.Rd0000644000176200001440000000572113604173244014743 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/CW2pLM.R \name{CW2pLM} \alias{CW2pLM} \title{Transform a CW-OSL curve into a pLM-OSL curve} \usage{ CW2pLM(values) } \arguments{ \item{values}{\linkS4class{RLum.Data.Curve} or \link{data.frame} (\strong{required}): \code{RLum.Data.Curve} data object. Alternatively, a \code{data.frame} of the measured curve data of type stimulation time (t) (\code{values[,1]}) and measured counts (cts) (\code{values[,2]}) can be provided.} } \value{ The function returns the same data type as the input data type with the transformed curve values (\link{data.frame} or \linkS4class{RLum.Data.Curve}). } \description{ Transforms a conventionally measured continuous-wave (CW) curve into a pseudo linearly modulated (pLM) curve using the equations given in Bulur (2000). } \details{ According to Bulur (2000) the curve data are transformed by introducing two new parameters P (stimulation period) and u (transformed time): \deqn{P=2*max(t)} \deqn{u=\sqrt{(2*t*P)}} The new count values are then calculated by \deqn{ctsNEW = cts(u/P)} and the returned \code{data.frame} is produced by: \code{data.frame(u,ctsNEW)} The output of the function can be further used for LM-OSL fitting. } \note{ The transformation is recommended for curves recorded with a channel resolution of at least 0.05 s/channel. } \section{Function version}{ 0.4.1 } \examples{ ##read curve from CWOSL.SAR.Data transform curve and plot values data(ExampleData.BINfileData, envir = environment()) ##read id for the 1st OSL curve id.OSL <- CWOSL.SAR.Data@METADATA[CWOSL.SAR.Data@METADATA[,"LTYPE"] == "OSL","ID"] ##produce x and y (time and count data for the data set) x<-seq(CWOSL.SAR.Data@METADATA[id.OSL[1],"HIGH"]/CWOSL.SAR.Data@METADATA[id.OSL[1],"NPOINTS"], CWOSL.SAR.Data@METADATA[id.OSL[1],"HIGH"], by = CWOSL.SAR.Data@METADATA[id.OSL[1],"HIGH"]/CWOSL.SAR.Data@METADATA[id.OSL[1],"NPOINTS"]) y <- unlist(CWOSL.SAR.Data@DATA[id.OSL[1]]) values <- data.frame(x,y) ##transform values values.transformed <- CW2pLM(values) ##plot plot(values.transformed) } \section{How to cite}{ Kreutzer, S., 2020. CW2pLM(): Transform a CW-OSL curve into a pLM-OSL curve. Function version 0.4.1. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Bulur, E., 2000. A simple transformation for converting CW-OSL curves to LM-OSL curves. Radiation Measurements, 32, 141-145. \strong{Further Reading} Bulur, E., 1996. An Alternative Technique For Optically Stimulated Luminescence (OSL) Experiment. Radiation Measurements, 26, 701-709. } \seealso{ \link{CW2pHMi}, \link{CW2pLMi}, \link{CW2pPMi}, \link{fit_LMCurve}, \link{lm}, \linkS4class{RLum.Data.Curve} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \keyword{manip} Luminescence/man/merge_RLum.Analysis.Rd0000644000176200001440000000453313604173244017557 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/merge_RLum.Analysis.R \name{merge_RLum.Analysis} \alias{merge_RLum.Analysis} \title{Merge function for RLum.Analysis S4 class objects} \usage{ merge_RLum.Analysis(objects) } \arguments{ \item{objects}{\link{list} of \linkS4class{RLum.Analysis} (\strong{required}): list of S4 objects of class \code{RLum.Analysis}. Furthermore other objects of class \linkS4class{RLum} can be added, see details.} } \value{ Return an \linkS4class{RLum.Analysis} object. } \description{ Function allows merging of RLum.Analysis objects and adding of allowed objects to an RLum.Analysis. } \details{ This function simply allowing to merge \linkS4class{RLum.Analysis} objects. Additionally other \linkS4class{RLum} objects can be added to an existing \linkS4class{RLum.Analysis} object. Supported objects to be added are: \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Spectrum} and \linkS4class{RLum.Data.Image}. The order in the new \linkS4class{RLum.Analysis} object is the object order provided with the input list. } \note{ The information for the slot 'protocol' is taken from the first \linkS4class{RLum.Analysis} object in the input list. Therefore at least one object of type \linkS4class{RLum.Analysis} has to be provided. } \section{Function version}{ 0.2.0 } \examples{ ##merge different RLum objects from the example data data(ExampleData.RLum.Analysis, envir = environment()) data(ExampleData.BINfileData, envir = environment()) object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) curve <- get_RLum(object)[[2]] temp.merged <- merge_RLum.Analysis(list(curve, IRSAR.RF.Data, IRSAR.RF.Data)) } \seealso{ \link{merge_RLum}, \linkS4class{RLum.Analysis}, \linkS4class{RLum.Data.Curve}, \linkS4class{RLum.Data.Spectrum}, \linkS4class{RLum.Data.Image}, \linkS4class{RLum} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) , RLum Developer Team} \section{How to cite}{ Kreutzer, S., 2020. merge_RLum.Analysis(): Merge function for RLum.Analysis S4 class objects. Function version 0.2.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \keyword{internal} \keyword{utilities} Luminescence/man/Second2Gray.Rd0000644000176200001440000001013713604173245016055 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Second2Gray.R \name{Second2Gray} \alias{Second2Gray} \title{Converting equivalent dose values from seconds (s) to gray (Gy)} \usage{ Second2Gray(data, dose.rate, error.propagation = "omit") } \arguments{ \item{data}{\link{data.frame} (\strong{required}): input values, structure: data (\code{values[,1]}) and data error (\code{values [,2]}) are required} \item{dose.rate}{\linkS4class{RLum.Results}, \link{data.frame} or \link{numeric} (\strong{required}): \code{RLum.Results} needs to be orginated from the function \link{calc_SourceDoseRate}, for \code{vector} dose rate in Gy/s and dose rate error in Gy/s} \item{error.propagation}{\link{character} (\emph{with default}): error propagation method used for error calculation (\code{omit}, \code{gaussian} or \code{absolute}), see details for further information} } \value{ Returns a \link{data.frame} with converted values. } \description{ Conversion of absorbed radiation dose in seconds (s) to the SI unit gray (Gy) including error propagation. Normally used for equivalent dose data. } \details{ Calculation of De values from seconds (s) to gray (Gy) \deqn{De [Gy] = De [s] * Dose Rate [Gy/s])} Provided calculation error propagation methods for error calculation (with 'se' as the standard error and 'DR' of the dose rate of the beta-source): \strong{(1) \code{omit}} (default) \deqn{se(De) [Gy] = se(De) [s] * DR [Gy/s]} In this case the standard error of the dose rate of the beta-source is treated as systematic (i.e. non-random), it error propagation is omitted. However, the error must be considered during calculation of the final age. (cf. Aitken, 1985, pp. 242). This approach can be seen as method (2) (gaussian) for the case the (random) standard error of the beta-source calibration is 0. Which particular method is requested depends on the situation and cannot be prescriptive. \strong{(2) \code{gaussian}} error propagation \deqn{se(De) [Gy] = \sqrt((DR [Gy/s] * se(De) [s])^2 + (De [s] * se(DR) [Gy/s])^2)} Applicable under the assumption that errors of De and se are uncorrelated. \strong{(3) \code{absolute}} error propagation \deqn{se(De) [Gy]= abs(DR [Gy/s] * se(De) [s]) + abs(De [s] * se(DR) [Gy/s])} Applicable under the assumption that errors of De and se are not uncorrelated. } \note{ If no or a wrong error propagation method is given, the execution of the function is stopped. Furthermore, if a \code{data.frame} is provided for the dose rate values is has to be of the same length as the data frame provided with the argument \code{data} } \section{Function version}{ 0.6.0 } \examples{ ##(A) for known source dose rate at date of measurement ## - load De data from the example data help file data(ExampleData.DeValues, envir = environment()) ## - convert De(s) to De(Gy) Second2Gray(ExampleData.DeValues$BT998, c(0.0438,0.0019)) ##(B) for source dose rate calibration data ## - calculate source dose rate first dose.rate <- calc_SourceDoseRate(measurement.date = "2012-01-27", calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019) # read example data data(ExampleData.DeValues, envir = environment()) # apply dose.rate to convert De(s) to De(Gy) Second2Gray(ExampleData.DeValues$BT998, dose.rate) } \section{How to cite}{ Kreutzer, S., Dietze, M., Fuchs, M.C., 2020. Second2Gray(): Converting equivalent dose values from seconds (s) to gray (Gy). Function version 0.6.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Aitken, M.J., 1985. Thermoluminescence dating. Academic Press. } \seealso{ \link{calc_SourceDoseRate} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr Michael Dietze, GFZ Potsdam (Germany)\cr Margret C. Fuchs, HZDR, Helmholtz-Institute Freiberg for Resource Technology (Germany) , RLum Developer Team} \keyword{manip} Luminescence/man/plot_DRTResults.Rd0000644000176200001440000001640613604173245017013 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot_DRTResults.R \name{plot_DRTResults} \alias{plot_DRTResults} \title{Visualise dose recovery test results} \usage{ plot_DRTResults( values, given.dose = NULL, error.range = 10, preheat, boxplot = FALSE, mtext, summary, summary.pos, legend, legend.pos, par.local = TRUE, na.rm = FALSE, ... ) } \arguments{ \item{values}{\linkS4class{RLum.Results} or \link{data.frame} (\strong{required}): input values containing at least De and De error. To plot more than one data set in one figure, a \code{list} of the individual data sets must be provided (e.g. \code{list(dataset.1, dataset.2)}).} \item{given.dose}{\link{numeric} (\emph{optional}): given dose used for the dose recovery test to normalise data. If only one given dose is provided this given dose is valid for all input data sets (i.e., \code{values} is a list). Oherwise a given dose for each input data set has to be provided (e.g., \code{given.dose = c(100,200)}). If \code{given.dose} in \code{NULL} the values are plotted without normalisation (might be useful for preheat plateau tests). \strong{Note:} Unit has to be the same as from the input values (e.g., Seconds or Gray).} \item{error.range}{\link{numeric}: symmetric error range in percent will be shown as dashed lines in the plot. Set \code{error.range} to 0 to void plotting of error ranges.} \item{preheat}{\link{numeric}: optional vector of preheat temperatures to be used for grouping the De values. If specified, the temperatures are assigned to the x-axis.} \item{boxplot}{\link{logical}: optionally plot values, that are grouped by preheat temperature as boxplots. Only possible when \code{preheat} vector is specified.} \item{mtext}{\link{character}: additional text below the plot title.} \item{summary}{\link{character} (\emph{optional}): adds numerical output to the plot. Can be one or more out of: \itemize{ \item \code{"n"} (number of samples), \item \code{"mean"} (mean De value), \item \code{"weighted$mean"} (error-weighted mean), \item \code{"median"} (median of the De values), \item \code{"sd.rel"} (relative standard deviation in percent), \item \code{"sd.abs"} (absolute standard deviation), \item \code{"se.rel"} (relative standard error) and \item \code{"se.abs"} (absolute standard error) } and all other measures returned by the function \link{calc_Statistics}.} \item{summary.pos}{\link{numeric} or \link{character} (\emph{with default}): optional position coordinates or keyword (e.g. \code{"topright"}) for the statistical summary. Alternatively, the keyword \code{"sub"} may be specified to place the summary below the plot header. However, this latter option in only possible if \code{mtext} is not used.} \item{legend}{\link{character} vector (\emph{optional}): legend content to be added to the plot.} \item{legend.pos}{\link{numeric} or \link{character} (\emph{with default}): optional position coordinates or keyword (e.g. \code{"topright"}) for the legend to be plotted.} \item{par.local}{\link{logical} (\emph{with default}): use local graphical parameters for plotting, e.g. the plot is shown in one column and one row. If \code{par.local = FALSE}, global parameters are inherited, i.e. parameters provided via \code{par()} work} \item{na.rm}{\link{logical}: indicating wether \code{NA} values are removed before plotting from the input data set} \item{...}{further arguments and graphical parameters passed to \link{plot}, supported are: \code{xlab}, \code{ylab}, \code{xlim}, \code{ylim}, \code{main}, \code{cex}, \code{las} and `pch``} } \value{ A plot is returned. } \description{ The function provides a standardised plot output for dose recovery test measurements. } \details{ Procedure to test the accuracy of a measurement protocol to reliably determine the dose of a specific sample. Here, the natural signal is erased and a known laboratory dose administered which is treated as unknown. Then the De measurement is carried out and the degree of congruence between administered and recovered dose is a measure of the protocol's accuracy for this sample.\cr In the plot the normalised De is shown on the y-axis, i.e. obtained De/Given Dose. } \note{ Further data and plot arguments can be added by using the appropriate R commands. } \section{Function version}{ 0.1.13 } \examples{ ## read example data set and misapply them for this plot type data(ExampleData.DeValues, envir = environment()) ## plot values plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], given.dose = 2800, mtext = "Example data") ## plot values with legend plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], given.dose = 2800, legend = "Test data set") ## create and plot two subsets with randomised values x.1 <- ExampleData.DeValues$BT998[7:11,] x.2 <- ExampleData.DeValues$BT998[7:11,] * c(runif(5, 0.9, 1.1), 1) plot_DRTResults(values = list(x.1, x.2), given.dose = 2800) ## some more user-defined plot parameters plot_DRTResults(values = list(x.1, x.2), given.dose = 2800, pch = c(2, 5), col = c("orange", "blue"), xlim = c(0, 8), ylim = c(0.85, 1.15), xlab = "Sample aliquot") ## plot the data with user-defined statistical measures as legend plot_DRTResults(values = list(x.1, x.2), given.dose = 2800, summary = c("n", "mean.weighted", "sd")) ## plot the data with user-defined statistical measures as sub-header plot_DRTResults(values = list(x.1, x.2), given.dose = 2800, summary = c("n", "mean.weighted", "sd"), summary.pos = "sub") ## plot the data grouped by preheat temperatures plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], given.dose = 2800, preheat = c(200, 200, 200, 240, 240)) ## read example data set and misapply them for this plot type data(ExampleData.DeValues, envir = environment()) ## plot values plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], given.dose = 2800, mtext = "Example data") ## plot two data sets grouped by preheat temperatures plot_DRTResults(values = list(x.1, x.2), given.dose = 2800, preheat = c(200, 200, 200, 240, 240)) ## plot the data grouped by preheat temperatures as boxplots plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], given.dose = 2800, preheat = c(200, 200, 200, 240, 240), boxplot = TRUE) } \section{How to cite}{ Kreutzer, S., Dietze, M., 2020. plot_DRTResults(): Visualise dose recovery test results. Function version 0.1.13. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Wintle, A.G., Murray, A.S., 2006. A review of quartz optically stimulated luminescence characteristics and their relevance in single-aliquot regeneration dating protocols. Radiation Measurements, 41, 369-391. } \seealso{ \link{plot} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060 - Université Bordeaux Montaigne (France)\cr Michael Dietze, GFZ Potsdam (Germany) , RLum Developer Team} \keyword{dplot} Luminescence/man/fit_ThermalQuenching.Rd0000644000176200001440000001254713604173244020043 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/fit_ThermalQuenching.R \name{fit_ThermalQuenching} \alias{fit_ThermalQuenching} \title{Fitting Thermal Quenching Data} \usage{ fit_ThermalQuenching( data, start_param = list(), method_control = list(), n.MC = 100, verbose = TRUE, plot = TRUE, ... ) } \arguments{ \item{data}{\link{data.frame} (\strong{required}): input data with three columns, the first column contains temperature values in deg. C, colmns 2 and 3 the dependent values with its error} \item{start_param}{\link{list} (optional): option to provide own start parameters for the fitting, see detalis} \item{method_control}{\link{list} (optianl): further options to fine tune the fitting, see details for further information} \item{n.MC}{\link{numeric} (\emph{with default}): number of Monte Carlo runs for the error estimation. If \code{n.MC} is \code{NULL} or \verb{<=1}, the error estimation is skipped} \item{verbose}{\link{logical} (\emph{with default}): enables/disables terminal output} \item{plot}{\link{logical} (\emph{with default}): enables/disables plot output} \item{...}{further arguments that can be passed to control the plotting, support are \code{main}, \code{pch}, \code{col_fit}, \code{col_points}, \code{lty}, \code{lwd}, \code{xlab}, \code{ylab}, \code{xlim}, \code{ylim}, \code{xaxt}} } \value{ The function returns numerical output and an (\emph{optional}) plot. -----------------------------------\cr \verb{[ NUMERICAL OUTPUT ]}\cr -----------------------------------\cr \strong{\code{RLum.Results}}-object \strong{slot:} \strong{\verb{@data}} \verb{[.. $data : data.frame]}\cr A table with all fitting parameters and the number of Monte Carlo runs used for the error estimation. \verb{[.. $fit : nls object]} \cr The nls \link[stats:nls]{stats::nls} object returned by the function \link[minpack.lm:nlsLM]{minpack.lm::nlsLM}. This object can be further passed to other functions supporting an nls object (cf. details section in \link[stats:nls]{stats::nls}) \strong{slot:} \strong{\verb{@info}} \verb{[.. $call : call]}\cr The original function call. -----------------------------------\cr \verb{[ GAPHICAL OUTPUT ]}\cr -----------------------------------\cr Plotted are temperature against the signal and their uncertainties. The fit is shown as dashed-line (can be modified). Please note that for the fitting the absolute temperature values are used but are re-calculated to deg. C for the plot. } \description{ Applying a nls-fitting to thermal quenching data. } \details{ \strong{Used equation}\cr The equation used for the fitting is \deqn{y = (A / (1 + C * (exp(-W / (k * x))))) + c} \emph{W} is the energy depth in eV and \emph{C} is dimensionless constant. \emph{A} and \emph{c} are used to adjust the curve for the given signal. \emph{k} is the Blotzmann in eV/K and \emph{x} is the absolute temperature in K. \strong{Error estimation}\cr The error estimation is done be varying the input parameters using the given uncertanties in a Monte Carlo simulation. Errors are assumed to follow a normal distribution. \strong{start_param} \cr The function allows the injection of own start parameters via the argument \code{start_param}. The parameters needs to be provided as names list. The names are the parameters to be optimised. Examples: \code{start_param = list(A = 1, C = 1e+5, W = 0.5, c = 0)} \strong{method_control} \cr The following arguments can be provided via \code{method_control}. Please note that arguments provided via \code{method_control} are not further tested, i.e., if the function crashs your input was probably wrong. \tabular{lll}{ \strong{ARGUMENT} \tab \strong{TYPE} \tab \strong{DESCRIPTION}\cr \code{upper} \tab named \link{vector} \tab sets upper fitting boundaries, if provided boundaries for all arguments are requried, e.g., \code{c(A = 0, C = 0, W = 0, c = 0)} \cr \code{lower} \tab names \link{vector} \tab sets lower fitting boundaries (see \code{upper} for details) \cr \code{trace} \tab \link{logical} \tab enables/disables progression trace for \link[minpack.lm:nlsLM]{minpack.lm::nlsLM}\cr \code{weights} \tab \link{numeric} \tab option to provide own weights for the fitting, the length of this vector needs to be equal to the number for rows of the input \code{data.frame}. If set to \code{NULL} no weights are applied. The weights are defined by the thrid column of the input \code{data.frame}. } } \section{Function version}{ 0.1.0 } \examples{ ##create short example dataset data <- data.frame( T = c(25, 40, 50, 60, 70, 80, 90, 100, 110), V = c(0.06, 0.058, 0.052, 0.051, 0.041, 0.034, 0.035, 0.033, 0.032), V_X = c(0.012, 0.009, 0.008, 0.008, 0.007, 0.006, 0.005, 0.005, 0.004)) ##fit fit_ThermalQuenching( data = data, n.MC = NULL) } \section{How to cite}{ Kreutzer, S., 2020. fit_ThermalQuenching(): Fitting Thermal Quenching Data. Function version 0.1.0. In: Kreutzer, S., Burow, C., Dietze, M., Fuchs, M.C., Schmidt, C., Fischer, M., Friedrich, J., 2020. Luminescence: Comprehensive Luminescence Dating Data Analysis. R package version 0.9.7. https://CRAN.R-project.org/package=Luminescence } \references{ Wintle, A.G., 1975. Thermal Quenching of Thermoluminescence in Quartz. Geophys. J. R. astr. Soc. 41, 107–113. } \seealso{ \link[minpack.lm:nlsLM]{minpack.lm::nlsLM} } \author{ Sebastian Kreutzer, IRAMAT-CRP2A, UMR5060, CNRS - Université Bordeaux Montaigne (Frange) , RLum Developer Team} Luminescence/DESCRIPTION0000644000176200001440000001610213605457601014401 0ustar liggesusersPackage: Luminescence Type: Package Title: Comprehensive Luminescence Dating Data Analysis Version: 0.9.7 Date: 2020-01-04 Author: Sebastian Kreutzer [aut, trl, cre, dtc] (), Christoph Burow [aut, trl, dtc] (), Michael Dietze [aut] (), Margret C. Fuchs [aut], Christoph Schmidt [aut] (), Manfred Fischer [aut, trl], Johannes Friedrich [aut] (), Norbert Mercier [ctb] (), Rachel K. Smedley [ctb] (), Claire Christophe [ctb], Antoine Zink [ctb] (), Julie Durcan [ctb] (), Georgina E. King [ctb, dtc] (), Anne Philippe [ctb], Guillaume Guerin [ctb] (), Svenja Riedesel [ctb] (), Martin Autzen [ctb], Pierre Guibert [ctb] (), Markus Fuchs [ths] Authors@R: c( person("Sebastian", "Kreutzer", role = c("aut", "trl", "cre", "dtc"), email = "sebastian.kreutzer@u-bordeaux-montaigne.fr", comment = c(ORCID = "0000-0002-0734-2199")), person("Christoph", "Burow", role = c("aut", "trl", "dtc"), comment = c(ORCID = "0000-0002-5023-4046")), person("Michael", "Dietze", role = c("aut"), comment = c(ORCID = "0000-0001-6063-1726")), person("Margret C.", "Fuchs", role = c("aut")), person("Christoph", "Schmidt", role = c("aut"), comment = c(ORCID = "0000-0002-2309-3209")), person("Manfred", "Fischer", role = c("aut", "trl")), person("Johannes", "Friedrich", role = c("aut"), comment = c(ORCID = "0000-0002-0805-9547")), person("Norbert", "Mercier", role = c("ctb"), comment = c(ORCID = "0000-0002-6375-9108")), person("Rachel K.", "Smedley", role = c("ctb"), comment = c(ORCID = "0000-0001-7773-5193")), person("Claire", "Christophe", role = c("ctb")), person("Antoine", "Zink", rol = c("ctb"), comment = c(ORCID = "0000-0001-7146-1101")), person("Julie", "Durcan", role = c("ctb"), comment = c(ORCID = "0000-0001-8724-8022")), person("Georgina E.", "King", role = c("ctb", "dtc"), comment = c(ORCID = "0000-0003-1059-8192")), person("Anne", "Philippe", role = c("ctb")), person("Guillaume", "Guerin", role = c("ctb"), comment = c(ORCID = "0000-0001-6298-5579")), person("Svenja", "Riedesel", role = c("ctb"), comment = c(ORCID = "0000-0003-2936-8776")), person("Martin", "Autzen", role = c("ctb")), person("Pierre", "Guibert", role = c("ctb"), comment = c(ORCID = "0000-0001-8969-8684")), person("Markus", "Fuchs", role = c("ths"))) Maintainer: Sebastian Kreutzer Description: A collection of various R functions for the purpose of Luminescence dating data analysis. This includes, amongst others, data import, export, application of age models, curve deconvolution, sequence analysis and plotting of equivalent dose distributions. Contact: Package Developers License: GPL-3 BugReports: https://github.com/R-Lum/Luminescence/issues Depends: R (>= 3.4.0), utils, magrittr (>= 1.5) LinkingTo: Rcpp (>= 1.0.0), RcppArmadillo (>= 0.9.300.0.0) Imports: bbmle (>= 1.0.20), data.table (>= 1.12.0), DEoptim (>= 2.2-4), httr (>= 1.4.0), matrixStats (>= 0.54.0), methods, minpack.lm (>= 1.2), plotrix (>= 3.7), raster (>= 2.8-0), readxl (>= 1.3.0), shape (>= 1.4.3), parallel, XML (>= 3.98-1.9), zoo (>= 1.8) Suggests: RLumShiny (>= 0.2.2), plotly (>= 4.9.0), rmarkdown (>= 1.12), rstudioapi (>= 0.7), rjags (>= 4-8), coda (>= 0.19-1), pander (>= 0.6.1), testthat (>= 2.0.0), devtools (>= 2.0.0), R.rsp (>= 0.43.0) VignetteBuilder: R.rsp URL: https://CRAN.R-project.org/package=Luminescence Encoding: UTF-8 Collate: 'Analyse_SAR.OSLdata.R' 'CW2pHMi.R' 'CW2pLM.R' 'CW2pLMi.R' 'CW2pPMi.R' 'Luminescence-package.R' 'PSL2Risoe.BINfileData.R' 'RcppExports.R' 'replicate_RLum.R' 'RLum-class.R' 'smooth_RLum.R' 'names_RLum.R' 'structure_RLum.R' 'length_RLum.R' 'set_RLum.R' 'get_RLum.R' 'RLum.Analysis-class.R' 'RLum.Data-class.R' 'bin_RLum.Data.R' 'RLum.Data.Curve-class.R' 'RLum.Data.Image-class.R' 'RLum.Data.Spectrum-class.R' 'RLum.Results-class.R' 'set_Risoe.BINfileData.R' 'get_Risoe.BINfileData.R' 'Risoe.BINfileData-class.R' 'Risoe.BINfileData2RLum.Analysis.R' 'Risoe.BINfileData2RLum.Data.Curve.R' 'Second2Gray.R' 'addins_RLum.R' 'analyse_Al2O3C_CrossTalk.R' 'analyse_Al2O3C_ITC.R' 'analyse_Al2O3C_Measurement.R' 'analyse_FadingMeasurement.R' 'analyse_IRSAR.RF.R' 'analyse_SAR.CWOSL.R' 'analyse_SAR.TL.R' 'analyse_baSAR.R' 'analyse_pIRIRSequence.R' 'analyse_portableOSL.R' 'app_RLum.R' 'apply_CosmicRayRemoval.R' 'apply_EfficiencyCorrection.R' 'calc_AliquotSize.R' 'calc_AverageDose.R' 'calc_CentralDose.R' 'calc_CommonDose.R' 'calc_CosmicDoseRate.R' 'calc_FadingCorr.R' 'calc_FastRatio.R' 'calc_FiniteMixture.R' 'calc_FuchsLang2001.R' 'calc_HomogeneityTest.R' 'calc_Huntley2006.R' 'calc_IEU.R' 'calc_Kars2008.R' 'calc_Lamothe2003.R' 'calc_MaxDose.R' 'calc_MinDose.R' 'calc_OSLLxTxRatio.R' 'calc_SourceDoseRate.R' 'calc_Statistics.R' 'calc_TLLxTxRatio.R' 'calc_ThermalLifetime.R' 'calc_WodaFuchs2008.R' 'calc_gSGC.R' 'convert_Activity2Concentration.R' 'convert_BIN2CSV.R' 'convert_Daybreak2CSV.R' 'convert_PSL2CSV.R' 'convert_RLum2Risoe.BINfileData.R' 'convert_Wavelength2Energy.R' 'convert_XSYG2CSV.R' 'extract_IrradiationTimes.R' 'fit_CWCurve.R' 'fit_LMCurve.R' 'fit_OSLLifeTimes.R' 'fit_SurfaceExposure.R' 'fit_ThermalQuenching.R' 'get_Layout.R' 'get_Quote.R' 'get_rightAnswer.R' 'github.R' 'install_DevelopmentVersion.R' 'internal_as.latex.table.R' 'internals_RLum.R' 'merge_RLum.Analysis.R' 'merge_RLum.Data.Curve.R' 'merge_RLum.R' 'merge_RLum.Results.R' 'merge_Risoe.BINfileData.R' 'methods_DRAC.R' 'methods_RLum.R' 'plot_AbanicoPlot.R' 'plot_DRCSummary.R' 'plot_DRTResults.R' 'plot_DetPlot.R' 'plot_FilterCombinations.R' 'plot_GrowthCurve.R' 'plot_Histogram.R' 'plot_KDE.R' 'plot_NRt.R' 'plot_RLum.Analysis.R' 'plot_RLum.Data.Curve.R' 'plot_RLum.Data.Image.R' 'plot_RLum.Data.Spectrum.R' 'plot_RLum.R' 'plot_RLum.Results.R' 'plot_RadialPlot.R' 'plot_Risoe.BINfileData.R' 'plot_ViolinPlot.R' 'read_BIN2R.R' 'read_Daybreak2R.R' 'read_PSL2R.R' 'read_SPE2R.R' 'read_XSYG2R.R' 'report_RLum.R' 'scale_GammaDose.R' 'template_DRAC.R' 'tune_Data.R' 'use_DRAC.R' 'utils_DRAC.R' 'verify_SingleGrainData.R' 'write_R2BIN.R' 'write_RLum2CSV.R' 'zzz.R' RoxygenNote: 7.0.2 NeedsCompilation: yes Packaged: 2020-01-04 20:34:45 UTC; kreutzer Repository: CRAN Date/Publication: 2020-01-08 23:02:25 UTC Luminescence/build/0000755000176200001440000000000013604173345013771 5ustar liggesusersLuminescence/build/vignette.rds0000644000176200001440000000053413604173345016332 0ustar liggesusersSAO0.0HLIML1제 a8x#y~*|g#Neq\hNpѱllhvRQbGD1]ۣwp B{k.A/r~==|C{|>ocp:}]2> }#Kj,aI?3p=]WJn܇\}֫ (ԈCf=Qfj"lΉ "}}}PLuminescence/build/partial.rdb0000644000176200001440000006616113604173344016127 0ustar liggesusersi{W&Q ˒QAZEQLed:I]ꮭ:uʙgzy5S_ft ,5ثkfeplV=f,>(g:dI/K(Of>{Q^?zM n ԾTo+,٥鸙י!w:PuᡑڻzOL+Wf'=dkV04^n /2ӵU1ݼwflT2~K}3١t/:V mVc~JE?Ovk'?w8Cs)7'%pͰd|lV<ó *,!SFl17 ;&wDlslCt2{ ?b;!T\Xڛ~3ykC^`dljuO(Yw d|Q2?!M9\0خ;o^f7Ax6f7Axԟ& L35/Z^G iQsO(&0Tg,F'،ߐpFu NkY^Y &!'o,R!Icu69=b3*E,o&Ȓ 3m.u({QZs-WZ8 yT̕ʋatxj|^،YP+lVQD.>\8QX|Gb'/F@Oi7}MG;ҡiST;;1{ - < FWۅf dhfC.<pg:jqviloA *TBQG\uzadFЊũk /hc rwxל"&mQ!8QQqZ)7J*qWX' K7l ^ {{$WR~mX}^OE M9 ,@.('j'0 /h#3}')b2gL*LT&R6]d-48?pm82dklnf<;4-N8y\9Ak y?IU|d82J%{y~ޘ ⭠ݑ~7$d`2Kv5$esvP1kk?(1=?MY-1mAԡUޘrKO%TzwpG6ۥJb~@u֢G1Zb5鲢^Dw!V~Vk$* FzY Oʸ˚U/Lx꿸k?e:HŚ'Jn`Muߩ*Pb,4}Nƃܪ]E뵖p? #G4Fk3s[ϡGV)qpd.BW5ܨFI~ENT%7/I1rXvBdo0miwUK@whPsq~r^ԾfS确G WFCʨqBZe)B +)6k3~x0w풝bsf镑bO ۏe+0a'v&׫ ۮɲg GC{b21|2~Z3?_ NjzԳNUЁ(C~2`#)6rO鍛i&Hh] zw`77gVg~T+ySHl+v轅m%{{R## }ۍߺW<ΌOdgcxH(*l1s~2—4(lqg4;\Fe<GY\(+z^dwӶ{wۓU.5RŨ jo#$QZsvЬ,ʳDH,*/ۛsK[s nV٥#Kl]To=L[m\1xuxGci?Fqr QTmgZ2Kn =+p16Ѫ@I$[ c: PwH mQJ.hx"⑬ʔnn %B<|D؊]1k]HYQZG캁!l:09(dZGށ|Gúss19d5CӶܒks8*]cצ\ w|_ػpM_W0_:nͳJ9\-nΠfφ/e VZWvWȹE7la]Re3 p6X8ھVh>x6g]h&<2X[y]glzE&^o#1+k{I{ռ?=%oFoXg&w-+SlHgYgCF؉-]a}lw}&Ū2[?/,Tw\ϱ7SL2攚tSj,oJsJpeu6MJ!ElQM Qsޮ9+mWMǽ&ږg lfc-U &_71 /hxTYE{%RJiM\,|ͫt9*k-J _5랽AbnbڲR؛콛h^h╅Oj͕}ц k9]o5j-#Rޜg,%-wӞpƾ-V7TP7&;JhްnF믝ܔk,onVX*eegnCЌͮ\4P1~7 DG+1lX'*;o &ZQw1$^ˢ4bÝ@Dߍ~_QLdEp2In&TdͰt3npL ;w+&='Y)7r} n^$hh'\w s7iاys.z57o~7߄oBI.QȣҸ%#8X=&3G+2ݥoz($F8yL6 DvqPcH &߱fQW Z Mw<%~D,-gW|KfBJ!krx>caě`+~ |xpH?S:rxYςph*>H1arNK}:ҭbs3H%4ie{=cX&Oۼ\ Rp1o-{Y3dc%=7W&%n9qnC#{EW[=Οz.7_ gmU/Ǿ $^|:&}*TH]Xx$hhZW0LgSotq%ȗo%R!JTGX\JOd7Tm&R!MfJG7p`#G4.x~ot&x$MwMĞO'~G1[ŗ0Ebn@E~EK ($NIܯL cۂ[2Tg!Q&hŷiն蘼1Op8?.Ӛ-`a1S)܀^4K>ө_I5n:iWd}Q(/GAcĎ0FYI4Aub3TQx2#3U^KGH+: O#Hv"˪th x$t$hNARo"jIIm}L#^Ƚ.$2:x4c'R{%x @8뫛*~8Y}cTBP27|`U5rJȵܚmZ}Aԙrذ]4bsU/,HN 'dH0:w+/A28fW+HU5פ&{UR@7.m;k6 Y}X2;$;OE> lDjȿXvZn>?[mi'Fؾ Y*҉񮘛H{S׹-Y뱁~/Y;=|2þ%ZyPSi-UEwWaJ1@ܠPRqíBpjӐoI}C<F!oATpXv&$ca#( dY#Rwx ,xB=͟ +KF$!Fׁ#  d(dgpǓކ|[Wf޳qa01&-eK146hb>޶ݯNz^.OY4-\]L7ZJ>q_(< RVͨ}bNm$_hsx n!vFDؚVy4 ?;$7jGÎhɵ0.%56=HOбIY2--h{Gf]ބ,Xw4} ,bY`_)Zbc2\7gL ed\:B R{" VOA>rNOC>l;i6oʯ%Aa6>P/>&;[\>=q4konW0hK:zu`?ַ<2p/b$2`8Yjy,ˆ%MC~4߷:B݆؀?~}WɅsd=e_AJ(SD!畉<*ASF{,*cF1v4؋tBޠnAb`d<SOի)f0A&iܦKM= Fl_saL_|L,cVFX+l9ʲuVQV6JQ)NC],d*U,| 'H'i7 օ'Jj:g!Kh (8V, I\2c09Vh 8~mI#ǐ?d?nd <>#pLnqԒx䘁zḂmA1R!X0ڿeda=՛E4 =%ȁ[ھӸc_`"d OB>)+E xbD֘d7 h}Dz7!/<]ItR0 K ːY璮͖ ajI崆SQ`՛f"!l*Qtk^,̌еlr:d `bX C|?!vADؚHq0$=S*YRbt-nú[Rw {>l}#m[RuoݭĐ;<Y8hZ,WtĨ3Iy`z4;xlUk^pD?oeև琥B. AZAYtu_CZ_?Ubzs/55z.@ yEڬBs/&ǐt$2G iow lҬQυuY%l'0p/}7%e^sK)8 ,u(X<95‘'CEI$ 0 9NY4 *? ;>H9d %p>2YZX+א/u Ydw3WCtW KB&)W!nCH׀!g77x"l@>z 4M$,IGcfi/oJx"5?Xbfz9UL O4C>ܺeysH=hJ\=h$}/c)ed)SCYrӫ9`C C//]k$cT“ aEh&F-: q'HFGѾ3wb_^|"8ģ==~:3fO0KCN7 Erx2pX )EqWIkd FdylB.^iQY2io )g泘5WIڕA0Jd0 ڮև>"V2eDXikOipۡs|ΗmeSK24/F0+~,:gIo7Ei0rOdV2юϐ!bN2#*g _h}GN/Blio%4 k{Yi3M]+JpNjv 0NJJ B9E+m] DXpR\uljn3;4101NNO>y!;/Ii{;GeUd+ɻS 8YjBLĜ)% k[E'8n?BD,nۺ 0#Q0It^)Xt؝hvRw_Um\NpM۟LDq.k٢hs^ j ŝJ#ԓO5DAڑhh\LvB s9=Ƽ{sd js~AJO w/H%%+X%+XpK.?ǡZݩJH["ģ$, .?}MX,_s^5*yܝL\+vpɋCV zݬ'l { Ðhʀ=`X $/ J'_Os*EG1WQD[d1%㮹etnn qbc %r:?b%s][fs3yFcjUYf١ /ZU>v̚.WQJ`-4-#4cm%`иig"Hj=U:V"l"Tk拟 r`UiS,x 1'__nFW3Oi@D$=PRS0jVSyV&M;3Qn=_s58PCvspu6ɁPj7.P~('~$%ȗ4)cn+sMgd؁ Ցp~SĿX>,ֹzPdaLqf~]K"ԸOɣCTGg2AJ.h,5Դ>0?M WWbܢ]+TM6/F^Kȍ+0It -4ؒoE# ?ͮM+a^V9AgO<{eQ_ ,d֐4hPO&&/# *Yc[-ߢ~lǾB^iMb#Qqt(a\nW@p*TGw‘w [K}MSb~rPm~@8C<9^9Uxb[oaKM6s Vfn  ?!MȜvCƘ$:,RwYl,Gesf2F{JxMeH\LnG!:+5먧_D̿pJ LSʂEky60>cJ.= ? l,26R~dĔb4W"s h,sL~ mOrAB>9w%<Y_[O| t1A..m*aM%ug oЦ$p6O D6d}6nYZcǶU%&ǿڳzZՅ9-+*^!j4lZ`Z,Y4JEkzUL~lZ4.@^PNX hܒpKpM=z)cki]ʶZi5Sȟ*n Tp [\ -ڥBʏV B-*  alihuo`z'M4[Z p9$⑬Mϑ_;W|_KK. K5 T4qC?=%<;5q۔ 7ׅ +M@j݄=رtbJGG1[lh30F;$'å,8`DW- ЮDMW*a?9X,)oGzzZ@S#(z/c.\YkKMռQz!nϿAV,n߅TVw5>~ ~O+u0JwwBo# "T㢠+ oyWa4=!KM` )M>ݱ-N``mQb?1\ze^E6-0FQQc؄/޷L)6jK"}0/rEvcSQ=)ӵ2|ϛ9t,hVg~8Km=˪FlrooYh&3U.Kr !?ZK {LGM "lSG&EeΕwK -gش/?üc-%;,zsf镑bO ۏ"&ף?D5ɲ'\ 0Zn){ g8 gw6gl+K:ЖߠT-}Qe8ASB>DM;_t2r, ȍ~%H&|z;7gVg8n)\ŚU*<Ƴ;::-,{ Jv t~?pz?=020YR;H_qqӹDv6&瀷!M#qpS|;i3WuleQ zth'>r%fGmnŽ޻I*wӒmظpv1jJ-WxэͷqbNF֜]Z%q=ʳDУ,*/ҒkçۼUv[d࿴c*VۣݹbR!AnYv+~QΜdf m~؊Co+p샘ґ-$41]4ށ dI?J phB:8#YS)@x45ܧ'a"\@xИJ'f]xa3єz07k% (!nsi'p/Zxs%~6g.KMeg&OSLO9ǞV`㟱{S0*rd׍3q 05atN,UaQ@d}g:v挚WGOB>@Oqvܦb٬牰x*ڊ1wB>~cˀ&ѿd'ݮyb[q~kg+,٥_f^gܑ@uFWkj?[f3Y\]x( fFË­BΔJ&~@:2O&rY+C1CV߿I9o RC mq[Km6URt ;8ֈ2JY3w w^E3E s]f% pnԶ= &G KuڃS m>P 9M؅LwvXfp}Ԣ_4HʶD~ cs_NCnNg8na}w JazFY2dF%`(|&! cRw Y=J3Hu{qv0Y.>?3C*-lT#s= ]^aq"~fE38 yV9QL$TéB֎2&v$tNŶv]AȃofHsoJ4iv6iu;ngkGߞ֎ ڑp[[;"0ժml..mlsj#CnSkǐ;@DS{;Z<"u1-%Wșmo-{B-N@P&2դ^Z=t)伿ޠi!b[K;#0ԏq4.#"lg'ϣ];o]G:VoOGln~6zD`kkKw5t5&lcwSpZ$r ִzʂiĮxQ-hD AfoHq%ȗ#=Z:ʮ% Vׁu!j1Fe hp2/ 2YJMgvys89h g^"tVrvz/~ K4 y^nVu; kz9ѨT@Y!8n!-lݎm"ȓ+&GI 6r'b*=a $FcW[Ƒk{H}C<}yOMt2&W([ג6G')Z`p$`6,JP, ug lGA@~Lḓyk)XF75&CVwlD =IH.dYӕ´2μq;Ԉ}C5zU}G?R6nCԯp2*kKrKhs{Ð=HnGmr%^ItJF{ /X:a~TJ$d[er 8y=fރ|OC-GL'!O*/p V:Azܥ/|bIa!> s1cې3@nQZ={@\͗j3e]t"x8 Y}C h,Ql 4Fx'3?_f!Ŭ)\FhA#!m]~`kVӽlp0a|_k =qja(uE4fi "ȌVvC:)`;nFd/[yahλ ^l`|Fi6HY9rg  F=4F lDK 3fՁ "_o @Yms8p;oo ϩ:pԼX @.o@-MDL7XowklN'ΉvYeȗ[I&uqGW _QP/u^r,Ep2- *bD*sȟ+?bU fpy݆S|LN\0b,U]y{xco4Dýp:XwsYxMc9ak½鴨w$p'p}aމB7j_9]47 Jlѯ GӀD3CoZϦ'>wJx*Q?*n?GY&]53;4,>m9ss:ȒEӠvzs`rQmvr_4Ё(䶭ș*_gpK&!v{k߁;ϒܱK?|k_2#?^Wש왫^ڮ/!ub<{; ;d`IQ0;\t. 4zYިlLR)\AI4(/QEu kL>R}x]H~qrd>@# Y7o?qBl}L #s|<1*֮g|a[{)DxOpG[cF}ɤ>mSH$x4~ދl;P(?C<*F\AY4UVp|l|8AxI+ ^"N/B^d' f}E} \M~ߝoCl5+Z(>d>^ͩav%&'Pu'`U3O@u-(D+ ~A58!]#ӳMN hb]c[ 4 o!T[0L"'8#iDg,h? ܵ=pC<:ɡs kh\d2C<*ɹԓnB.L!͈+x4{\މQ}j x49gg:} N!ȇ4bWIPmcQQ$GG!Kn7I1 riɅSm&3D{:edU=OZSSjryؗP$+Kvߌސș4p )X!A!9ģ|e~5D.b[Lj\&Ѵ.YgVzI9E_z!oY5`?x%1 xCo(g™MҨl]MOպOsߺyh*'1iݲ+2pC ޞc9g\%'!w܄k.R!M)0RC+֒3ː5l[J;U`/d!88;ėp8 wLjW\&sׁ ֐ү֐o;jn}-jCg@pwu˦@%X~}alGw!UaYx2'7{X %R|N7U3(<_j6.N9Me;2*:ܙ~MؖStW> h;,ƑԺW ;Xփb L?{~``y 6+ŔDrlb['RZԧJ#sG1W>PU۳v%v5+)6wy8Ix`rz-2Y[Z$ 6Yd>κ-qx&r>Hnw >=yv/ٔH8YnWV||y{,2dEImE1%1QweN  Ydfr*6?}-ŮQc#+'&uW!O? ;0M8m~-_2חc4  XpU2l 7Yq4MzA2.g^.b4^Et8EtѠumJ\x.d)ir=SẈ:Pal `h&/BGxcX[KːŋUE, d}5W!ׁžJqM766rMj6'Z3_xtc nMrUb2Ǘ{kLH.<2hsA& 93/k%P'7xv묬 -29Gޡ Vj;[2HzCp!;NԯtV ހ,5T\ l1|YWwC9轩bfgv26+pSy|,f|/yǨ4>pc?/Yn0DNlN_A:v㮠;},kɻ`-MnioP𥸏 tGG}Y_0h~pamɠ1%i.t~:xDEк_D[ГR1SH['o%jpMW;B=0{B \[Nm}U'~+cF>97N ; YM\ 7^tj_KJfe+·{Rk[S;[ao/M~$cJ_G:5:EAwzz%py؛ڐiM؆Xn*ٳYlD!woCtG'v~+^ wB޹ 9;ɮG79sh|C!Qwon H arv2oBwz2 'Y)oN.5>rП$ |Y}V>؞9Mmn LD^~;b,A)N>at|N^'6S!0j({H W pekڪx e;Im#BH>Aq?j%y9 h_,;vRhT^~xAp}#Է+ -/]6Mϟtp"ܥ>J0:~]uZ1?vKtN.%S? (ru ;=p& &?"cgNqV?tG]5V ?GHh;YO?@8b~_.(:R䅯\cYk C>LqNQ<s&l* g2V0_p ,B>(èֽ]{%vZB<$i;yt~4U/`ن VSt0sOr ǥhWȂcհ>Whp1oE+ߘJa.qR7QYK4NqGl;1kkIXtL;!}s!bCӬqr6ͦ*l{5^I _oSlƗED`._,uke+_LGLte4XER׍OME2,H,mNLULf";8:6iYcF9dP&Nį/WoVNNoP#lq3GFDV7VoAQҘ9bi*q6uhŊ,ES # B8¬2m>pt(Y/kw͊6}f_Ѝeq"֏YK]64b#7n ՗vDȹ9s<ȘSHUy5Txg%wtt4[X 6/ ndFoL޾?82y\Ltn2;3>Ih}‡7eT[;S\LXߑa],}e[f5K>N[3 Awz=[aw w6za5x$WxͷXmk5YO* ەqꝠVk Kcyґ%6ȡ / fGljc4պ;W ^jqE8$;ܶ-FR_Tk3g-% ?lơ7ەJ8A_DY=߇zq%Z}׍I ޳M:FY ޞii6m,ڎΚhPߕSKFpˤ {e8]sY w߂ߙrhfH)cн\lP uCh L !K?u1 49ܭFگcҸ ӿC{ Emd"9֗eX SӬAdҟHLz)dƨL ukfp-RU< Sk]Mvc'|fuY-鿬[7R/߉j؊;')wX|{6, 9Љ7!:*%P~HKF6aqB}'_Eg gQ/F^_ҐKY mpܱM͓ {+NyW>=~ c H4p6ph 5^LxTn``|;g!݆CCB@eqI fo@V\ބ|S>;dsC<$X}|A+5RwR`!MQr0*-PgP-xޥJ}Q̕szQ^Leg&OSi :x&g;Өj>$ r*%8=~C?@В'R),Ua!Z1|B[5h;1)> d?šfmB'o]6+c"G?Dm sm/c1wB>~cd] d Km6 3|&!G8#G6_Hv"!Dc^#]=u~צ2UzexfdRo'3k3CHuC#+^w{ab-W3W.NoW[CHIzkuhocS19++A>D~W\aOJ&45;[Gt M.Lp^1HhwfY͘LZ  ͘y(70M0ȬF"8@Y*jݓ3Jl`k, YC:{D9ģJw&˖B0W5clU棣.B Q?S#<ѐ啚0}oHR?!ICv)jQ̖b}֐lyt^tp7`|zNM >*TpbL@5`lO =x$}j*`C<.tОБ퇗NYn1`'V+P 9`d0!Giiem#`?~mb% ƈ1fB !oY' K5MͲlx%<BmI@~nH',6u̗51qb yQQ:s4NT09lK~g$~(h2:fAx+'4OM-˅T-73*L܁<6Ro" 7954&ʦ.ۥZB4J+ƚ< F[ 7xk[@ppnj,֪U#},?U⧃/KnZ0 !-w,,H퇐?lOr4 ;@[a_QwC]qѦ#A,7&\!uG Svp^|A[r0`.h]Ch\-ƨKdwM?_"* R\cDE?OESɭ^K|+T>\ɥgE'Ov`õ&;tg}3Jվд]cH3Z߮éAεA#9ģ)s,hֻ߇dNShLo;6]5{93Wx \/EU +:l*(^l>;(Oe<&װy;}$'B5_:JTGG1W>0XyKFLv÷̌@X!?M|3%}h@`EHۊulE[>KnB~~@/i~#e[O)#ȏYI~.|? KFA 9WfP ,A.* *u0QȎz5DLh; ˷6М}@nN}|H Y=|>(cyY-6:v\.RtpmpeYѸ MHPC gk%.6j%f-5槹M/?N8oH8M ^!Tji-r:Gi`RLfjdA8(B.eޱ1\E3բthm? H*n]֢a4QfYbjF{ v[~/˚F'p",Md hCPTxn.rfIV$j0W0UU(G:bٴh{ۮkgŢQE'8 [,ڑб Kỡ:Bn(j\h-|3-c+YX0e/Y%޸l3:5F e%<~ae@zZ4+#J r{)4{ȣ*M޺+Y"%!b]꽨zs'ǯ穑0d9~;&% 7!6n> %2槽~;&meAC]-650H?lS¸KВhd'l(aGM!c+P)%Qq/r7ί]!I:O:,i~noPJM A@Nm2iѮ!+"KVνʣs_pBmsPO$?=.N9 V-%(a/k*pv-w>L ;ۇ'^v1k{ig'8d@F;݇69]"'9ģ){{Uj6}{FicjT7jܠ b{V#ejp{ @j偾44 S :߭R20Ye3B7B`_RlyA|.^&aLr?ڥe? MatBl5[,sW{b9rWG4?28< +yNRlA7ijшDyJMnX a}7,5_bs!CW}+#h'`Ud!Nd ; tGe=CabRtSǫ,/e0ԠCOδVj'e j2ϲD=5BmBwo] HRG;X U,,%XhOCta^Yp!< 2#k"õ90!<2_9)'+~6@iB}@?B}W*La#!DaxmzbZ]Zq$[-YЇF*ʯd* DoxOX NV)?Ⱦ/]C0k`žZy0#7!^3|zq5FBgX|?`uM ,!Kt8<kvԪx K-.N!B}UةzET_K`Dvn1k*B9K<;BH}9v- u_n8Ot&kp'dI/gVKs|t-Szct~qRU9D ʙm~\6h{GNiָR*\DFF*j~W*&|eO_~P]郹|l/ҷ,8VbQwE\yen74!6sϲa,iV;Ld}UǦ#"Ҷ7?/j2JG;Ww_%js¡P{Ar>):[\ϱ#Rۄ+lR!v+ȁ+˵]_PVC9knմ*^?c'7,K7CE5+^xHɷH+1sf9슕_?-El 6 p<6>fl /4rϲ_knޮ##T#n~zsh@xp AO :F:t͘m9HB;Ai)κ8ֽ2cAfټ`IEM'N^:M1jŠ6rB[Yׇ}r]dO~z&uW-(Y$y0D6~؊{PO{50:0QYNy{w ~}qQKf9F4oJXr@DIÈw*RDؕo o u]s*=v xWwyn:ȏt`~#2PJ߇)_PbkpdXǶMO:s{" ՟DPutC1jE7T9n,BS"EaMgO&qi:z=CGg/|D{?l%a  r@tKjXPh B:8#YcVD8;!M yMӱp"Vp5E\ɪ1wdtD.\vCf}t(JV/p QR[@ƉQ3>7}28bOglFM<α>b?g;Өrd7\%7`j|/~"Ũ&]JKWϛhMCx QcHI'_H)jVAwL Dؔ,+nzay 7?! h#om$;uv]\4c$SuWgf^Nf.0:3T;<42^{_/Vr53zģGl0s04^n /24HlT2ƥ#TE;>u3+'ãhu48;4A(l4fLí9f٬x/^Nesq]֌+wLuminescence/tests/0000755000176200001440000000000013604173345014034 5ustar liggesusersLuminescence/tests/testthat/0000755000176200001440000000000013605457601015675 5ustar liggesusersLuminescence/tests/testthat/test_calc_Statistics.R0000644000176200001440000000630413231137116022165 0ustar liggesuserscontext("calc_Statistics") ## load example data data(ExampleData.DeValues, envir = environment()) ## calculate statistics and show output set.seed(1) temp <- calc_Statistics(ExampleData.DeValues$BT998, n.MCM = 1000) temp_alt1 <- calc_Statistics(ExampleData.DeValues$BT998, n.MCM = 1000, digits = 2) temp_alt2 <- calc_Statistics(ExampleData.DeValues$BT998, n.MCM = 1000, digits = NULL) temp_RLum <- set_RLum(class = "RLum.Results", data = list(data = ExampleData.DeValues$BT998)) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("list", "vector")) expect_equal(length(temp), 3) }) test_that("Test certain input scenarios", { expect_is(calc_Statistics(temp_RLum), "list") df <- ExampleData.DeValues$BT998 df[,2] <- 0 expect_warning(calc_Statistics(df)) df <- ExampleData.DeValues$BT998 expect_silent(calc_Statistics(df, weight.calc = "reciprocal")) }) test_that("check error messages", { testthat::skip_on_cran() df <- ExampleData.DeValues$BT998 expect_error(calc_Statistics(data = matrix(0,2)), regexp = "[calc_Statistics()] Input data is neither of type 'data.frame' nor 'RLum.Results'", fixed = TRUE) expect_error(calc_Statistics(data = df, weight.calc = "test")) }) test_that("check weighted values from output", { testthat::skip_on_cran() expect_equal(temp$weighted$n, 25) expect_equal(sum(unlist(temp_alt1)),24535.72) expect_equal(sum(unlist(temp_alt2)),24534.1) expect_equal(round(temp$weighted$mean, digits = 3), 2896.036) expect_equal(round(temp$weighted$median, digits = 2), 2884.46) expect_equal(round(temp$weighted$sd.abs, digits = 4), 240.2228) expect_equal(round(temp$weighted$sd.rel, digits = 6), 8.294885) expect_equal(round(temp$weighted$se.abs, digits = 5), 48.04457) expect_equal(round(temp$weighted$se.rel, digits = 6), 1.658977) expect_equal(round(temp$weighted$skewness, digits = 6), 1.342018) expect_equal(round(temp$weighted$kurtosis, digits = 6), 4.387913) }) test_that("check unweighted values from output", { testthat::skip_on_cran() expect_equal(temp$weighted$n, 25) expect_equal(round(temp$unweighted$mean, digits = 3), 2950.818) expect_equal(round(temp$unweighted$median, digits = 2), 2884.46) expect_equal(round(temp$unweighted$sd.abs, digits = 4), 281.6433) expect_equal(round(temp$unweighted$sd.rel, digits = 6), 9.544584) expect_equal(round(temp$unweighted$se.abs, digits = 5), 56.32866) expect_equal(round(temp$unweighted$se.rel, digits = 6), 1.908917) expect_equal(round(temp$unweighted$skewness, digits = 6), 1.342018) expect_equal(round(temp$unweighted$kurtosis, digits = 6), 4.387913) }) test_that("check MCM values from output", { expect_equal(temp$MCM$n, 25) expect_equal(round(temp$MCM$mean, digits = 3), 2950.992) expect_equal(round(temp$MCM$median, digits = 3), 2885.622) expect_equal(round(temp$MCM$sd.abs, digits = 4), 295.0737) expect_equal(round(temp$MCM$sd.rel, digits = 6), 9.999137) expect_equal(round(temp$MCM$se.abs, digits = 5), 59.01474) expect_equal(round(temp$MCM$se.rel, digits = 6), 1.999827) expect_equal(round(temp$MCM$skewness, digits = 3), 1286.082) expect_equal(round(temp$MCM$kurtosis, digits = 3), 4757.097) }) Luminescence/tests/testthat/test_calc_IEU.R0000755000176200001440000000301613232564314020462 0ustar liggesuserscontext("calc_IEU") data(ExampleData.DeValues, envir = environment()) temp <- calc_IEU(ExampleData.DeValues$CA1, a = 0.2, b = 1.9, interval = 1, verbose = FALSE, plot = FALSE) test_that("Test general behaviour", { data(ExampleData.DeValues, envir = environment()) ##standard expect_silent(calc_IEU( ExampleData.DeValues$CA1, a = 0.2, b = 1.9, interval = 1, verbose = FALSE, plot =FALSE )) ##enable plot expect_message(calc_IEU( ExampleData.DeValues$CA1, a = 0.2, b = 1.9, interval = 1, trace = TRUE, verbose = TRUE, plot = TRUE )) ##verbose without setting expect_message(calc_IEU( ExampleData.DeValues$CA1, a = 0.2, b = 1.9, interval = 1, plot = FALSE )) ##cause stop expect_error(calc_IEU( "ExampleData.DeValues$CA1", a = 0.2, b = 1.9, interval = 1, plot = FALSE )) ##provide RLum.Results ##cause stop expect_silent(calc_IEU( set_RLum(class = "RLum.Results", data = list(test = ExampleData.DeValues$CA1)), a = 0.2, b = 1.9, interval = 1, verbose = FALSE, plot = FALSE )) }) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 5) }) test_that("check values from output example", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(results$de, 46.67) expect_equal(results$de_err, 2.55) expect_equal(results$n, 24) }) Luminescence/tests/testthat/test_RLum.Data.Spectrum.R0000644000176200001440000000310713517556614022416 0ustar liggesuserscontext("RLum.Data.Spectrum") test_that("check class", { testthat::skip_on_cran() ##set empty spectrum object and show it expect_output(show(set_RLum(class = "RLum.Data.Spectrum"))) ##check replacements object <- set_RLum(class = "RLum.Data.Spectrum") expect_s4_class(set_RLum(class = "RLum.Data.Spectrum", data = object), class = "RLum.Data.Spectrum") ##check get_RLum object <- set_RLum(class = "RLum.Data.Spectrum", data = object, info = list(a = "test")) expect_error(get_RLum(object, info.object = "est"), regexp = "Invalid element name. Valid names are: a") expect_error(get_RLum(object, info.object = 1L), "'info.object' has to be a character!") expect_is(get_RLum(object, info.object = "a"), "character") ##test method names expect_type(names(object), "character") ##test bin_RLum() expect_error(bin_RLum.Data(object, bin_size.col = "test"), "'bin_size.row' and 'bin_size.col' must be of class 'numeric'!") object@data <- matrix(data = rep(1:20, each = 10), ncol = 20) rownames(object@data) <- 1:10 colnames(object@data) <- 1:20 expect_s4_class(object = bin_RLum.Data(object, bin_size.row = 2), "RLum.Data.Spectrum") expect_s4_class(object = bin_RLum.Data(object, bin_size.row = 1, bin_size.col = 2), "RLum.Data.Spectrum") ##check conversions expect_s4_class(as(object = data.frame(x = 1:10), Class = "RLum.Data.Spectrum"), "RLum.Data.Spectrum") expect_is(as(set_RLum("RLum.Data.Spectrum"), "data.frame"), "data.frame") expect_s4_class(as(object = matrix(1:10,ncol = 2), Class = "RLum.Data.Spectrum"), "RLum.Data.Spectrum") }) Luminescence/tests/testthat/test_analyse_pIRIRSequence.R0000644000176200001440000000316313540751607023215 0ustar liggesuserscontext("analyse_pIRIRSequence") set.seed(1) data(ExampleData.BINfileData, envir = environment()) object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos = 1) object <- get_RLum(object, record.id = c(-29, -30)) sequence.structure <- c(1, 2, 2, 3, 4, 4) sequence.structure <- as.vector(sapply(seq(0, length(object) - 1, by = 4), function(x) { sequence.structure + x })) object <- sapply(1:length(sequence.structure), function(x) { object[[sequence.structure[x]]] }) object <- set_RLum(class = "RLum.Analysis", records = object, protocol = "pIRIR") results <- analyse_pIRIRSequence( object, signal.integral.min = 1, signal.integral.max = 2, background.integral.min = 900, background.integral.max = 1000, fit.method = "EXP", sequence.structure = c("TL", "pseudoIRSL1", "pseudoIRSL2"), main = "Pseudo pIRIR data set based on quartz OSL", plot = FALSE, plot.single = TRUE, verbose = FALSE ) test_that("check class and length of output", { testthat::skip_on_cran() expect_is(results, "RLum.Results") expect_equal(length(results), 4) expect_is(results$LnLxTnTx.table, "data.frame") expect_is(results$rejection.criteria, "data.frame") }) test_that("check output", { testthat::skip_on_cran() ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(round(sum(results$data[1:2, 1:4]), 0),7583) }else{ expect_equal(round(sum(results$data[1:2, 1:4]), 0),7584) } expect_equal(round(sum(results$rejection.criteria$Value), 2),3338.69) }) Luminescence/tests/testthat/test_plot_DRCSummary.R0000644000176200001440000000223613417222471022102 0ustar liggesuserscontext("plot_DRCSummary") test_that("Test certain input scenarios", { testthat::skip_on_cran() ##function stop expect_error(plot_DRCSummary("test"), regexp = "The input is not of class 'RLum.Results'") expect_error(plot_DRCSummary(set_RLum("RLum.Results")), regexp = "'object' was created by none supported function, cf. manual for allowed originators") }) test_that("Test plotting", { testthat::skip_on_cran() #load data example data data(ExampleData.BINfileData, envir = environment()) #transform the values from the first position in a RLum.Analysis object object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) results <- analyse_SAR.CWOSL( object = object, signal.integral.min = 1, signal.integral.max = 2, background.integral.min = 900, background.integral.max = 1000, plot = FALSE, verbose = FALSE ) ##simple expect_silent(plot_DRCSummary(results)) ##plus points expect_silent(plot_DRCSummary(results, show_dose_points = TRUE, show_natural = TRUE)) ##excpect warning expect_warning(plot_DRCSummary(results, show_dose_points = TRUE, show_natural = TRUE, sel_curves = 1000)) }) Luminescence/tests/testthat/test_structure_RLum.R0000644000176200001440000000057113517034741022057 0ustar liggesuserscontext("structure_RLum") test_that("Test whether the function works", { testthat::skip_on_cran() data(ExampleData.RLum.Analysis, envir = environment()) expect_silent(structure_RLum(IRSAR.RF.Data)) expect_is(structure_RLum(IRSAR.RF.Data), "data.frame") ##test a list of such elements expect_is(structure_RLum(list(IRSAR.RF.Data,IRSAR.RF.Data, "a")), "list") }) Luminescence/tests/testthat/test_read_PSL2R.R0000644000176200001440000000072313231137116020705 0ustar liggesuserscontext("read_PSL2R") test_that("Test functionality", { testthat::skip_on_cran() ## default values expect_is(read_PSL2R( file = system.file("extdata/DorNie_0016.psl", package = "Luminescence") ), "RLum.Analysis") ## custom values (all inverted) expect_is(read_PSL2R( file = system.file("extdata/DorNie_0016.psl", package = "Luminescence"), drop_bg = TRUE, as_decay_curve = FALSE, smooth = TRUE, merge = TRUE ), "RLum.Analysis") }) Luminescence/tests/testthat/test_fit_OSLLifeTimes.R0000644000176200001440000000301213571743147022161 0ustar liggesuserscontext("fit_OSLLifeTimes()") ##load example data data(ExampleData.TR_OSL, envir = environment()) temp_list <- list(ExampleData.TR_OSL, ExampleData.TR_OSL) temp_analysis <- set_RLum("RLum.Analysis", records = temp_list) test_that("standard check", { testthat::skip_on_cran() ##trigger errors expect_null(fit_OSLLifeTimes(object = "test")) ## Test different inputs ##simple run set.seed(1) expect_s4_class(object = fit_OSLLifeTimes( object = ExampleData.TR_OSL, plot = FALSE, n.components = 1), class = "RLum.Results") ##simple list expect_s4_class(object = fit_OSLLifeTimes( object = temp_list, plot = FALSE, n.components = 1), class = "RLum.Results") ##simple RLum.Analysis expect_s4_class(object = fit_OSLLifeTimes( object = temp_analysis, verbose = FALSE, plot = FALSE, n.components = 1), class = "RLum.Results") ##test arguments ##simple run expect_s4_class(object = fit_OSLLifeTimes( object = ExampleData.TR_OSL, method_control = list(seed = 1, weights = FALSE), plot = FALSE, n.components = 1), class = "RLum.Results") ##test options expect_s4_class(object = fit_OSLLifeTimes( object = ExampleData.TR_OSL, verbose = FALSE, plot = FALSE, n.components = 1), class = "RLum.Results") ##warning for log expect_warning(object = fit_OSLLifeTimes( object = ExampleData.TR_OSL, verbose = FALSE, plot = TRUE, log = "x", n.components = 1), regexp = "log-scale requires x-values > 0, set min xlim to 0.01!") }) Luminescence/tests/testthat/test_calc_CentralDose.R0000644000176200001440000000261013540751607022244 0ustar liggesuserscontext("calc_CentralDose") data(ExampleData.DeValues, envir = environment()) temp <- calc_CentralDose( ExampleData.DeValues$CA1, plot = FALSE, verbose = FALSE) temp_NA <- data.frame(rnorm(10)+5, rnorm(10)+5) temp_NA[1,1] <- NA test_that("errors and warnings function", { testthat::skip_on_cran() expect_error(calc_CentralDose(data = "error"), "'data' has to be of type 'data.frame' or 'RLum.Results'!") expect_error(calc_CentralDose(temp, sigmab = 10), "sigmab needs to be given as a fraction between 0 and 1") expect_s4_class(calc_CentralDose(temp_NA), "RLum.Results") expect_warning(calc_CentralDose(temp_NA, na.rm = TRUE)) expect_error(calc_CentralDose(data.frame()), "should have at least two columns and two rows!") }) test_that("standard and output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 4) ##log and trace expect_s4_class(calc_CentralDose(ExampleData.DeValues$CA1, log = FALSE, trace = TRUE), "RLum.Results") }) test_that("check summary output", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(round(results$de, digits = 5), 65.70929) expect_equal(round(results$de_err, digits = 6), 3.053443) expect_equal(round(results$OD, digits = 5), 34.69061) expect_equal(round(results$OD_err, digits = 6), 3.458774) expect_equal(round(results$Lmax, digits = 5), 31.85046) }) Luminescence/tests/testthat/test_calc_MaxDose.R0000755000176200001440000000206513540751607021410 0ustar liggesuserscontext("calc_MaxDose") data(ExampleData.DeValues, envir = environment()) temp <- calc_MaxDose(ExampleData.DeValues$CA1, sigmab = 0.2, par = 3, plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 9) }) test_that("check values from output example", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(round(results$de, digits = 2), 76.58) expect_equal(round(results$de_err, digits = 2), 7.57) expect_equal(results$ci_level, 0.95) expect_equal(round(results$ci_lower, digits = 2), 69.65) expect_equal(round(results$ci_upper, digits = 2), 99.33) expect_equal(results$par, 3) expect_equal(round(results$sig, digits = 2), 1.71) expect_equal(round(results$p0, digits = 2), 0.65) expect_equal(results$mu, NA) expect_equal(round(results$Lmax, digits = 2), -19.79) expect_equal(round(results$BIC, digits = 2), 58.87) }) Luminescence/tests/testthat/test_calc_CosmicDoseRate.R0000644000176200001440000000344213231137116022677 0ustar liggesuserscontext("calc_CosmicDoseRate") temp <- calc_CosmicDoseRate(depth = 2.78, density = 1.7, latitude = 38.06451, longitude = 1.49646, altitude = 364, error = 10) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 3) }) test_that("check values from output example 1", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(results$depth, 2.78) expect_equal(results$density, 1.7) expect_equal(results$latitude, 38.06451) expect_equal(results$longitude, 1.49646) expect_equal(results$altitude, 364) expect_equal(round(results$total_absorber.gcm2, digits = 0), 473) expect_equal(round(results$d0, digits = 3), 0.152) expect_equal(round(results$geom_lat, digits = 1), 41.1) expect_equal(round(results$dc, digits = 3), 0.161) }) test_that("check values from output example 2b", { testthat::skip_on_cran() temp <- calc_CosmicDoseRate(depth = c(5.0, 2.78), density = c(2.65, 1.7), latitude = 12.04332, longitude = 4.43243, altitude = 364, corr.fieldChanges = TRUE, est.age = 67, error = 15) results <- get_RLum(temp) expect_equal(results$depth.1, 5) expect_equal(results$depth.2, 2.78) expect_equal(results$density.1, 2.65) expect_equal(results$density.2, 1.7) expect_equal(results$latitude, 12.04332) expect_equal(results$longitude, 4.43243) expect_equal(results$altitude, 364) expect_equal(round(results$total_absorber.gcm2, digits = 0), 1798) expect_equal(round(results$d0, digits = 4), 0.0705) expect_equal(round(results$geom_lat, digits = 1), 15.1) expect_equal(round(results$dc, digits = 3), 0.072) }) Luminescence/tests/testthat/test_names_RLum.R0000644000176200001440000000054413517034761021124 0ustar liggesuserscontext("names_RLum") test_that("Test whether function works", { testthat::skip_on_cran() data(ExampleData.RLum.Analysis, envir = environment()) expect_silent(names_RLum(IRSAR.RF.Data)) expect_is(names_RLum(IRSAR.RF.Data), "character") ##test a list of such elements expect_is(names_RLum(list(IRSAR.RF.Data,IRSAR.RF.Data, "a")), "list") }) Luminescence/tests/testthat/test_fit_ThermalQuenching.R0000644000176200001440000000325213456400021023145 0ustar liggesuserscontext("fit_ThermalQuenching()") ##create example data data <- data.frame( T = c(25, 40, 50, 60, 70, 80, 90, 100, 110), V = c(0.06, 0.058, 0.052, 0.051, 0.041, 0.034, 0.035, 0.033, 0.032), V_X = c(0.012, 0.009, 0.008, 0.008, 0.007, 0.006, 0.005, 0.005, 0.004)) data_list <- list(data, data) data_NA <- data data_NA[1,] <- NA test_that("standard check", { testthat::skip_on_cran() ##trgger errors expect_error(fit_ThermalQuenching(data = "test")) ##simple run with error expect_error(fit_ThermalQuenching( data = data[,1:2], n.MC = NULL), regexp = "'data' is empty or has less than three columns!") ##simple run with warning expect_warning(fit_ThermalQuenching( data = cbind(data,data), n.MC = NULL), regexp = "data' has more than 3 columns, taking only the first three!") ##simple run with warning NA expect_warning(fit_ThermalQuenching( data = data_NA, n.MC = NULL), regexp = "NA values in 'data' automatically removed!") ##simple run expect_s4_class(fit_ThermalQuenching( data = data, n.MC = NULL), class = "RLum.Results") ##simple run with fitting error expect_null(fit_ThermalQuenching( data = data.frame(T = 1:10, V = 1:10, V_X = 1:10), n.MC = NULL)) # ##switch off weights expect_s4_class(fit_ThermalQuenching( data = data, method_control = list(weights = NULL), n.MC = NULL), class = "RLum.Results") ##simple list run expect_s4_class(fit_ThermalQuenching( data = data_list, n.MC = NULL), class = "RLum.Results") ##simple run without plot etc expect_s4_class(fit_ThermalQuenching( data = data, verbose = FALSE, plot = TRUE, n.MC = 10), class = "RLum.Results") }) Luminescence/tests/testthat/test_analyse_FadingMeasurement.R0000644000176200001440000000154213431376746024202 0ustar liggesuserscontext("analyse_FadingMeasurement") test_that("general test", { testthat::skip_on_cran() ## load example data (sample UNIL/NB123, see ?ExampleData.Fading) data("ExampleData.Fading", envir = environment()) ##(1) get fading measurement data (here a three column data.frame) fading_data <- ExampleData.Fading$fading.data$IR50 ##break function expect_error(analyse_FadingMeasurement(object = "test"), regexp = "'object' needs to be of type 'RLum.Analysis' or a 'list' of such objects!") ## run routine analysis expect_is(g_value <- analyse_FadingMeasurement( fading_data, plot = TRUE, verbose = TRUE, n.MC = 10), class = "RLum.Results") ##not plot not verbose expect_is(g_value <- analyse_FadingMeasurement( fading_data, plot = FALSE, verbose = FALSE, n.MC = 10), class = "RLum.Results") }) Luminescence/tests/testthat/test_calc_gSGC.R0000644000176200001440000000246413231137116020621 0ustar liggesuserscontext("calc_gSGC") set.seed(seed = 1) temp <- calc_gSGC(data = data.frame( LnTn = 2.361, LnTn.error = 0.087, Lr1Tr1 = 2.744, Lr1Tr1.error = 0.091, Dr1 = 34.4), plot = FALSE, verbose = FALSE ) test_that("plot and verbose and so", { expect_output(calc_gSGC(data = data.frame( LnTn = 2.361, LnTn.error = 0.087, Lr1Tr1 = 2.744, Lr1Tr1.error = 0.091, Dr1 = 34.4), plot = TRUE, verbose = TRUE )) }) test_that("test errors", { testthat::skip_on_cran() expect_error(calc_gSGC(data = NA)) expect_error(calc_gSGC(data = data.frame( LnTn = 2.361, LnTn.error = 0.087, Lr1Tr1 = 2.744, Lr1Tr1.error = 0.091, Dr1 = 34.4), gSGC.type = 1, plot = FALSE, verbose = FALSE)) expect_error(calc_gSGC(data = data.frame(a = 1, b = 1, c = 1, d = 1, e = 1, f = 1))) }) test_that("check class and length of output", { testthat::skip_on_cran() expect_is(temp, class = "RLum.Results", info = NULL, label = NULL) expect_is(temp$De, class = "data.frame", info = NULL, label = NULL) expect_is(temp$De.MC, class = "list", info = NULL, label = NULL) expect_equal(length(temp), 3) }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(round(sum(temp$De), digits = 2), 30.39) expect_equal(round(sum(temp$De.MC[[1]]), 0), 10848) }) Luminescence/tests/testthat/test_calc_OSLLxTxRatio.R0000755000176200001440000001673213231137116022320 0ustar liggesuserscontext("calc_OSLLxTxRatio") ##preloads data(ExampleData.LxTxOSLData, envir = environment()) temp <- calc_OSLLxTxRatio( Lx.data = Lx.data, Tx.data = Tx.data, signal.integral = c(1:2), background.integral = c(85:100)) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 2) }) test_that("test arguments", { testthat::skip_on_cran() ##digits expect_silent(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:2), background.integral = c(85:100), digits = 1)) ##sigmab expect_silent(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:2), background.integral = c(85:100), sigmab = c(1000,100) )) ##poisson expect_silent(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:2), background.integral = c(85:100), background.count.distribution = "poisson" )) }) test_that("test input", { testthat::skip_on_cran() ##RLum.Curve expect_silent(calc_OSLLxTxRatio( set_RLum(class = "RLum.Data.Curve", data = as.matrix(Lx.data)), set_RLum(class = "RLum.Data.Curve", data = as.matrix(Tx.data)), signal.integral = c(1:2), background.integral = c(70:100))) ##matrix expect_silent(calc_OSLLxTxRatio( as.matrix(Lx.data), as.matrix(Tx.data), signal.integral = c(1:2), background.integral = c(70:100))) ##numeric expect_silent(calc_OSLLxTxRatio( as.numeric(Lx.data[,2]), as.numeric(Tx.data[,2]), signal.integral = c(1:2), background.integral = c(70:100))) ##RLum.Curve expect_silent(calc_OSLLxTxRatio( set_RLum(class = "RLum.Data.Curve", data = as.matrix(Lx.data)), Tx.data = NULL, signal.integral = c(1:2), background.integral = c(70:100))) ##matrix expect_silent(calc_OSLLxTxRatio( as.matrix(Lx.data), Tx.data = NULL, signal.integral = c(1:2), background.integral = c(70:100))) ##numeric expect_silent(calc_OSLLxTxRatio( as.numeric(Lx.data[,2]), Tx.data = NULL, signal.integral = c(1:2), background.integral = c(70:100))) }) test_that("force function break", { testthat::skip_on_cran() expect_error(calc_OSLLxTxRatio( Lx.data[1:10,], Tx.data, signal.integral = c(1:2), background.integral = c(85:100) ), "Channel numbers of Lx and Tx data differ!") expect_error(calc_OSLLxTxRatio( "Lx.data", Tx.data, signal.integral = c(1:2), background.integral = c(85:100) ), "Data type of Lx and Tx data differs!") expect_error(calc_OSLLxTxRatio( "Lx.data", "Tx.data", signal.integral = c(1:2), background.integral = c(85:100) ), "Data type error! Required types are data.frame or numeric vector.") expect_error(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:2000), background.integral = c(85:100) ), "signal.integral is not valid!") expect_error(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:90), background.integral = c(85:100) ), "Overlapping of 'signal.integral' and 'background.integral' is not permitted!") expect_error(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:10), signal.integral.Tx = c(1:90), background.integral = c(85:100), background.integral.Tx = c(85:100) ), "Overlapping of 'signal.integral.Tx' and 'background.integral.Tx' is not permitted!") expect_error(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:20), background.integral = c(85:1000) ), "background.integral is not valid! Max: 100") expect_error(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:10), signal.integral.Tx = c(1:10), background.integral = c(85:100), background.integral.Tx = c(85:10000) ), "background.integral.Tx is not valid! Max: 100") expect_error(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:10), signal.integral.Tx = c(1:1000), background.integral = c(85:100), background.integral.Tx = c(85:100) ), "signal.integral.Tx is not valid!") expect_error(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:20), signal.integral.Tx = c(1:20), background.integral = 80:100, background.integral.Tx = NULL ), "You have to provide both: signal.integral.Tx and background.integral.Tx!") expect_error(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:20), background.integral = 80:100, sigmab = "test" ), "'sigmab' has to be of type numeric.") expect_error(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:20), background.integral = 80:100, sigmab = 1:100 ), "Maximum allowed vector length for 'sigmab' is 2.") }) test_that("create warnings", { testthat::skip_on_cran() expect_warning(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:20), background.integral = 80:100 ), "Number of background channels for Lx < 25; error estimation might be not reliable!") expect_warning(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:20), background.integral = 60:100, background.count.distribution = "hallo" ), "Unknown method for background.count.distribution. A non-poisson distribution is assumed!") expect_warning(calc_OSLLxTxRatio( Lx.data, Tx.data, signal.integral = c(1:20), signal.integral.Tx = c(2:20), background.integral = 60:100, background.integral.Tx = 40:100, use_previousBG = TRUE ), " For option use_previousBG = TRUE independent Lx and Tx integral limits are not allowed. Integral limits of Lx used for Tx.") }) test_that("check weird circumstances", { testthat::skip_on_cran() ##(1) - Lx curve 0 expect_type(calc_OSLLxTxRatio( data.frame(Lx.data[,1],0), Tx.data, signal.integral = c(1:2), background.integral = c(85:100) )$LxTx.table, type = "list") ##(2) - Tx curve 0 expect_type(calc_OSLLxTxRatio( Lx.data, data.frame(Tx.data[,1],0), signal.integral = c(1:2), background.integral = c(85:100) )$LxTx.table, type = "list") ##(3) - Lx and Tx curve 0 expect_type(calc_OSLLxTxRatio( data.frame(Lx.data[,1],0), data.frame(Tx.data[,1],0), signal.integral = c(1:2), background.integral = c(85:100) )$LxTx.table, type = "list") ##(4) - Lx < 0 expect_type(calc_OSLLxTxRatio( data.frame(Lx.data[,1],-1000), data.frame(Tx.data[,1],0), signal.integral = c(1:2), background.integral = c(85:100) )$LxTx.table, type = "list") ##(5) - Tx < 0 expect_type(calc_OSLLxTxRatio( Lx.data, data.frame(Lx.data[,1],-1000), signal.integral = c(1:2), background.integral = c(85:100) )$LxTx.table, type = "list") ##(6) - Lx & Tx < 0 expect_type(calc_OSLLxTxRatio( data.frame(Lx.data[,1],-1000), data.frame(Tx.data[,1],-1000), signal.integral = c(1:2), background.integral = c(85:100) )$LxTx.table, type = "list") }) test_that("check values from output example", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(results$LnLx, 81709) expect_equal(results$LnLx.BG, 530) expect_equal(results$TnTx, 7403) expect_equal(results$TnTx.BG, 513) expect_equal(results$Net_LnLx, 81179) expect_equal(round(results$Net_LnLx.Error, digits = 4), 286.5461) expect_equal(results$Net_TnTx, 6890) expect_equal(round(results$Net_TnTx.Error, digits = 5), 88.53581) expect_equal(round(results$LxTx, digits = 5), 11.78215) expect_equal(round(results$LxTx.Error, digits = 7), 0.1570077) }) Luminescence/tests/testthat/test_calc_Huntley2008.R0000644000176200001440000000663613456417557022030 0ustar liggesuserscontext("calc_Huntley2006") set.seed(1) data("ExampleData.Fading", envir = environment()) fading_data <- ExampleData.Fading$fading.data$IR50 data <- ExampleData.Fading$equivalentDose.data$IR50 ddot <- c(7.00, 0.004) readerDdot <- c(0.134, 0.0067) rhop <- analyse_FadingMeasurement(fading_data, plot = FALSE, verbose = FALSE, n.MC = 10) huntley <- calc_Huntley2006( data = data, rhop = rhop, ddot = ddot, readerDdot = readerDdot, n.MC = 50, plot = FALSE, verbose = FALSE ) test_that("check class and length of output", { testthat::skip_on_cran() ##rhop expect_is(rhop, class = "RLum.Results", info = NULL, label = NULL) expect_is(rhop$fading_results, "data.frame") expect_is(rhop$fit, "lm") expect_is(rhop$rho_prime, "data.frame") ##kars expect_is(huntley, class = "RLum.Results", info = NULL, label = NULL) expect_is(huntley$results, class = "data.frame", info = NULL, label = NULL) expect_is(huntley$data, class = "data.frame", info = NULL, label = NULL) expect_is(huntley$Ln, class = "numeric", info = NULL, label = NULL) expect_is(huntley$fits, class = "list", info = NULL, label = NULL) }) test_that("check values from analyse_FadingMeasurement()", { expect_equal(round(sum(rhop$fading_results[,1:9]),0),415) expect_equal(round(sum(rhop$rho_prime),5),2e-05) expect_equal(round(sum(rhop$irr.times)), 2673108) }) test_that("check values from calc_Huntley2008()", { testthat::skip_on_cran() ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(round(huntley$results$Sim_Age, 1), 41.3) expect_equal(round(huntley$results$Sim_Age_2D0, 0), 164) expect_equal(round(sum(huntley$Ln),4), 0.1585) }else{ expect_equal(round(huntley$results$Sim_Age, 1), 42.3) expect_equal(round(huntley$results$Sim_Age_2D0, 0), 163) expect_equal(round(sum(huntley$Ln),4), 0.1621) } expect_equal(round(sum(huntley$data),0), 191530) expect_equal(round(sum(residuals(huntley$fits$simulated)),1), 0.3) expect_equal(round(sum(residuals(huntley$fits$measured)),4), 0.1894) expect_equal(round(sum(residuals(huntley$fits$unfaded)),4), 1.6293) ## COMPARE calc_Kars2008 (deprecated) vs. re-named calc_Huntley2006 test_that("compare deprecated calc_Kars2008 and calc_Huntley2006", { testthat::skip_on_cran() expect_identical({ set.seed(1) calc_Huntley2006( data = data, rhop = rhop, ddot = ddot, readerDdot = readerDdot, n.MC = 50, fit.method = "EXP", plot = FALSE, verbose = FALSE)$results }, { set.seed(1) suppressWarnings( calc_Kars2008( data = data, rhop = rhop, ddot = ddot, readerDdot = readerDdot, n.MC = 50, fit.method = "EXP", plot = FALSE, verbose = FALSE)$results ) })#EndOf::expect_identical expect_identical({ set.seed(1) calc_Huntley2006( data = data, rhop = rhop, ddot = ddot, readerDdot = readerDdot, n.MC = 500, fit.method = "GOK", plot = FALSE, verbose = FALSE)$results }, { set.seed(1) suppressWarnings( calc_Kars2008( data = data, rhop = rhop, ddot = ddot, readerDdot = readerDdot, n.MC = 500, fit.method = "GOK", plot = FALSE, verbose = FALSE)$results ) })#EndOf::expect_identical }) }) Luminescence/tests/testthat/test_analyse_SARCWOSL.R0000644000176200001440000000352713540751607022040 0ustar liggesuserscontext("analyse_SAR.CWOSL") set.seed(1) data(ExampleData.BINfileData, envir = environment()) object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) ##perform SAR analysis and set rejection criteria results <- analyse_SAR.CWOSL( object = object, signal.integral.min = 1, signal.integral.max = 2, background.integral.min = 900, background.integral.max = 1000, log = "x", fit.method = "EXP", rejection.criteria = list( recycling.ratio = 10, recuperation.rate = 10, testdose.error = 10, palaeodose.error = 10, exceed.max.regpoint = TRUE), plot = FALSE, verbose = FALSE ) test_that("check class and length of output", { testthat::skip_on_cran() expect_is(results, "RLum.Results") expect_equal(length(results), 4) expect_is(results$data, "data.frame") expect_is(results$LnLxTnTx.table, "data.frame") expect_is(results$rejection.criteria, "data.frame") expect_is(results$Formula, "expression") }) test_that("check De values", { testthat::skip_on_cran() ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(object = round(sum(results$data[1:2]), digits = 0), 1717) }else{ expect_equal(object = round(sum(results$data[1:2]), digits = 0), 1716) } }) test_that("check LxTx table", { testthat::skip_on_cran() expect_equal(object = round(sum(results$LnLxTnTx.table$LxTx), digits = 5), 20.92051) expect_equal(object = round(sum(results$LnLxTnTx.table$LxTx.Error), digits = 2), 0.34) }) test_that("check rejection criteria", { testthat::skip_on_cran() ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(object = round(sum(results$rejection.criteria$Value), digits = 0), 1669) }else{ expect_equal(object = round(sum(results$rejection.criteria$Value), digits = 0), 1669) } }) Luminescence/tests/testthat/test_read_XSYG2R.R0000644000176200001440000000267313417217664021064 0ustar liggesuserscontext("read_XSYG2R") test_that("test import of XSYG files", { testthat::skip_on_cran() ##force error expect_type(read_XSYG2R("https://raw.githubusercontent.com/R-Lum/rxylib/master/inst/extg", fastForward = TRUE), type = "NULL") expect_type(read_XSYG2R("/Test", fastForward = TRUE), type = "NULL") ##successfull import expect_type(read_XSYG2R("https://raw.githubusercontent.com/R-Lum/rxylib/master/inst/extdata/TLSpectrum.xsyg", import = FALSE), type = "list") expect_is(read_XSYG2R("https://raw.githubusercontent.com/R-Lum/rxylib/master/inst/extdata/TLSpectrum.xsyg", fastForward = TRUE, import = FALSE), class = "data.frame") expect_silent(read_XSYG2R("https://raw.githubusercontent.com/R-Lum/rxylib/master/inst/extdata/TLSpectrum.xsyg", verbose = FALSE)) expect_silent(read_XSYG2R("https://raw.githubusercontent.com/R-Lum/rxylib/master/inst/extdata/TLSpectrum.xsyg", verbose = FALSE, recalculate.TL.curves = FALSE)) expect_silent(read_XSYG2R("https://raw.githubusercontent.com/R-Lum/rxylib/master/inst/extdata/TLSpectrum.xsyg", verbose = FALSE, pattern = "xsyg",recalculate.TL.curves = FALSE)) expect_type(read_XSYG2R("https://raw.githubusercontent.com/R-Lum/rxylib/master/inst/extdata/TLSpectrum.xsyg", fastForward = FALSE), type = "list") results <- expect_type(read_XSYG2R("https://raw.githubusercontent.com/R-Lum/rxylib/master/inst/extdata/TLSpectrum.xsyg", fastForward = TRUE), type = "list") expect_output(print(results)) }) Luminescence/tests/testthat/test_github.R0000644000176200001440000000254713257166230020347 0ustar liggesuserscontext("GitHub Interface") ## NOTE: # Unauthenticated requests to the GiHub APIv3 are limited to 60 requests per hour # (associated with the originating request). Exceeding the rate limit results in a # 403 Forbidden reply. Since CIs make multiple requests when testing the rate limit # is easily reached. We check whether we either get a valid response, or at least # a 403 response. test_that("Check github_commits()", { testthat::skip_on_cran() testthat::skip_on_os("mac") response <- tryCatch(github_commits(), error = function(e) return(e)) if (inherits(response, "error")) expect_output(print(response), regexp = "status code 403") else expect_is(response, "data.frame") rm(response) }) test_that("Check github_branches()", { testthat::skip_on_cran() testthat::skip_on_os("mac") response <- tryCatch(github_branches(), error = function(e) return(e)) if (inherits(response, "error")) expect_output(print(response), regexp = "status code 403") else expect_is(response, "data.frame") rm(response) }) test_that("Check github_issues()", { testthat::skip_on_cran() testthat::skip_on_os("mac") response <- tryCatch(github_issues(), error = function(e) return(e)) if (inherits(response, "error")) expect_output(print(response), regexp = "status code 403") else expect_is(response, "list") rm(response) }) Luminescence/tests/testthat/test_RisoeBINfileData-class.R0000644000176200001440000000104413231137116023214 0ustar liggesuserscontext("RisoeBINfileData Class Tests") test_that("Check the example and the numerical values", { testthat::skip_on_cran() ##construct empty object temp <- set_Risoe.BINfileData(METADATA = data.frame(), DATA = list(), .RESERVED = list()) ##get function and check whether we get NULL expect_null(get_Risoe.BINfileData(temp)) ##check object expect_is(temp, class = "Risoe.BINfileData") expect_output(show(temp)) ##show method data(ExampleData.BINfileData, envir = environment()) expect_output(show(CWOSL.SAR.Data)) }) Luminescence/tests/testthat/test_write_RLum2CSV.R0000644000176200001440000000222713431063630021601 0ustar liggesuserscontext("write_RLumCSV") test_that("test errors and general export function", { testthat::skip_on_cran() ##test error expect_error(write_RLum2CSV(object = "", export = FALSE), regexp = "[write_RLum2CSV()] Object needs to be a member of the object class RLum!", fixed = TRUE) ##test export data("ExampleData.portableOSL", envir = environment()) expect_is(write_RLum2CSV(ExampleData.portableOSL, export = FALSE), "list") ##test RLum.Results objects ## load example data data(ExampleData.DeValues, envir = environment()) results <- calc_CommonDose(ExampleData.DeValues$CA1) ##using option compact expect_warning(write_RLum2CSV(object = results,export = FALSE), regexp = "elements could not be converted to a CSV-structure!") ##using option compact = FALSE expect_warning(write_RLum2CSV(object = results,export = FALSE, compact = TRUE), regexp = "elements could not be converted to a CSV-structure!") ##real export expect_warning( write_RLum2CSV(object = results, path = tempdir(), compact = TRUE), regexp = "elements could not be converted to a CSV-structure!") }) Luminescence/tests/testthat/test_merge_RLumDataCurve.R0000644000176200001440000000126413231137116022706 0ustar liggesuserscontext("merge_RLum.Data.Curve") test_that("Merge tests", { testthat::skip_on_cran() ##load example data data(ExampleData.XSYG, envir = environment()) TL.curves <- get_RLum(OSL.SARMeasurement$Sequence.Object, recordType = "TL (UVVIS)") TL.curve.1 <- TL.curves[[1]] TL.curve.3 <- TL.curves[[3]] ##check for error expect_error(merge_RLum.Data.Curve("", merge.method = "/")) ##check various operations expect_is(TL.curve.1 + TL.curve.3, "RLum.Data.Curve") expect_is(TL.curve.1 - TL.curve.3, "RLum.Data.Curve") expect_is(TL.curve.3 / TL.curve.1, "RLum.Data.Curve") expect_warning(TL.curve.3 / TL.curve.1) expect_is(TL.curve.1 * TL.curve.3, "RLum.Data.Curve") }) Luminescence/tests/testthat/test_report_RLum.R0000644000176200001440000000057213231137116021324 0ustar liggesuserscontext("report_RLum") test_that("Test Simple RLum Report", { testthat::skip_on_cran() ### load example data data("ExampleData.DeValues") temp <- calc_CommonDose(ExampleData.DeValues$CA1) # create the standard HTML report expect_null(report_RLum(object = temp, timestamp = FALSE)) expect_null(report_RLum(object = temp, timestamp = FALSE, compact = FALSE)) }) Luminescence/tests/testthat/test_calc_FuchsLang2001.R0000755000176200001440000000136013231137116022210 0ustar liggesuserscontext("calc_FuchsLang2001") data(ExampleData.DeValues, envir = environment()) temp <- calc_FuchsLang2001(ExampleData.DeValues$BT998, cvThreshold = 5, plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 5) }) test_that("check values from output example 1", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(results$de, 2866.11) expect_equal(results$de_err, 157.35) expect_equal(results$de_weighted, 2846.66) expect_equal(results$de_weighted_err, 20.58) expect_equal(results$n.usedDeValues, 22) }) Luminescence/tests/testthat/test_plot_GrowthCurve.R0000644000176200001440000000744113540751607022403 0ustar liggesuserscontext("plot_GrowthCurve") set.seed(1) data(ExampleData.LxTxData, envir = environment()) temp_EXP <- plot_GrowthCurve( LxTxData, fit.method = "EXP", output.plot = FALSE, verbose = FALSE, NumberIterations.MC = 10 ) temp_LIN <- plot_GrowthCurve( LxTxData, fit.method = "LIN", output.plot = FALSE, verbose = FALSE, NumberIterations.MC = 10 ) temp_EXPLIN <- plot_GrowthCurve( LxTxData, fit.method = "EXP+LIN", output.plot = FALSE, verbose = FALSE, NumberIterations.MC = 10 ) temp_EXPEXP <- plot_GrowthCurve( LxTxData, fit.method = "EXP+EXP", output.plot = FALSE, verbose = FALSE, NumberIterations.MC = 10 ) temp_QDR <- plot_GrowthCurve( LxTxData, fit.method = "QDR", output.plot = FALSE, verbose = FALSE, NumberIterations.MC = 10 ) test_that("check class and length of output", { testthat::skip_on_cran() expect_is(temp_EXP, class = "RLum.Results", info = NULL, label = NULL) expect_is(temp_EXP$Fit, class = "nls") expect_is(temp_LIN, class = "RLum.Results", info = NULL, label = NULL) expect_is(temp_LIN$Fit, class = "lm") expect_is(temp_EXPLIN, class = "RLum.Results", info = NULL, label = NULL) expect_is(temp_EXPLIN$Fit, class = "nls") expect_is(temp_EXPEXP, class = "RLum.Results", info = NULL, label = NULL) expect_is(temp_EXPEXP$Fit, class = "nls") expect_is(temp_QDR, class = "RLum.Results", info = NULL, label = NULL) expect_is(temp_QDR$Fit, class = "lm") }) test_that("check values from output example", { testthat::skip_on_cran() expect_equivalent(round(temp_EXP$De[[1]], digits = 2), 1737.88) ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(round(sum(temp_EXP$De.MC, na.rm = TRUE), digits = 0), 17441) }else{ expect_equal(round(sum(temp_EXP$De.MC, na.rm = TRUE), digits = 0), 17562) } expect_equivalent(round(temp_LIN$De[[1]], digits = 2), 1811.33) ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(round(sum(temp_LIN$De.MC, na.rm = TRUE), digits = 0),18238) }else{ expect_equal(round(sum(temp_LIN$De.MC, na.rm = TRUE), digits = 0),18398) } expect_equivalent(round(temp_EXPLIN$De[[1]], digits = 2), 1791.53) ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(round(sum(temp_EXPLIN$De.MC, na.rm = TRUE), digits = 0),17474) }else{ expect_equal(round(sum(temp_EXPLIN$De.MC, na.rm = TRUE), digits = 0),18045) } expect_equivalent(round(temp_EXPEXP$De[[1]], digits = 2), 1787.15) ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(round(sum(temp_EXPEXP$De.MC, na.rm = TRUE), digits = 0), 7316) }else{ expect_equal(round(sum(temp_EXPEXP$De.MC, na.rm = TRUE), digits = 0), 7303) } expect_equivalent(round(temp_QDR$De[[1]], digits = 2), 1666.2) ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(round(sum(temp_QDR$De.MC, na.rm = TRUE), digits = 0), 14937) }else{ expect_equal(round(sum(temp_QDR$De.MC, na.rm = TRUE), digits = 0), 16476) } }) test_that("check extrapolation", { testthat::skip_on_cran() LxTxData[1,2:3] <- c(0.5, 0.001) LIN <- plot_GrowthCurve(LxTxData,mode = "extrapolation", fit.method = "LIN") EXP <- plot_GrowthCurve(LxTxData,mode = "extrapolation", fit.method = "EXP") EXPLIN <- plot_GrowthCurve(LxTxData,mode = "extrapolation", fit.method = "EXP+LIN") expect_equivalent(round(LIN$De$De,0), 165) expect_equivalent(round(EXP$De$De,0), 110) #it fails on some unix platforms for unknown reason. #expect_equivalent(round(EXPLIN$De$De,0), 110) }) Luminescence/tests/testthat/test_RLum.Analysis-class.R0000644000176200001440000000435113517346165022632 0ustar liggesuserscontext("RLum.Analysis-class") test_that("Check the example and the numerical values", { testthat::skip_on_cran() ##load example data data("ExampleData.RLum.Analysis") obj <- IRSAR.RF.Data ## set_RLum() ##construct empty object tmp <- set_RLum("RLum.Analysis", protocol = "testthat", records = lapply(1:20, function(x) { set_RLum("RLum.Data.Curve", recordType = "test", data = matrix(1:10, ncol = 2), info = list(el = as.character(x))) }), info = list(el = "test")) ##overwrite object expect_s4_class(set_RLum("RLum.Analysis", records = tmp), "RLum.Analysis") ## as() expect_is(as(tmp, "list"), "list") expect_is(as(list(), "RLum.Analysis"), "RLum.Analysis") ## output expect_output(print(as(list(), "RLum.Analysis")), regexp = "This is an empty object") expect_is(set_RLum("RLum.Analysis", protocol = "testthat", records = set_RLum("RLum.Analysis", protocol = "nest", records = list(matrix(1:10, ncol = 2))), info = list(el = "test")), "RLum.Analysis") ## get_RLum expect_length(get_RLum(obj, subset = (recordType == "RF")), 2) expect_null(get_RLum(obj, subset = (recordType == ""))) expect_length(get_RLum(tmp, subset = (el == "2")), 1) expect_is(get_RLum(tmp, subset = (el == "2")), "RLum.Analysis") expect_is(get_RLum(tmp, info.object = "el"), "character") expect_warning(get_RLum(tmp, info.object = "missing"), regexp = "Invalid info.object name") expect_error(get_RLum(tmp, record.id = "character"), "'record.id' has to be of type 'numeric' or 'logical'!") expect_error(get_RLum(tmp, recordType = 1L), "'recordType' has to be of type 'character'!") expect_error(get_RLum(tmp, curveType = 1L), "'curveType' has to be of type 'character'!") expect_error(get_RLum(tmp, RLum.type = 1L), "'RLum.type' has to be of type 'character'!") expect_error(get_RLum(tmp, get.index = "a"), "'get.index' has to be of type 'logical'!") expect_null(get_RLum(set_RLum("RLum.Analysis"), info = "test")) ##structure RLum expect_error( structure_RLum(set_RLum("RLum.Analysis", records = list(set_RLum("RLum.Data.Image")))), "Only 'RLum.Data.Curve' objects are allowed!") }) Luminescence/tests/testthat/test_calc_SourceDoseRate.R0000755000176200001440000000323713232376434022740 0ustar liggesuserscontext("calc_SourceDoseRate") temp <- calc_SourceDoseRate(measurement.date = "2012-01-27", calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019) test_that("General tests", { ##simple run expect_silent(calc_SourceDoseRate( calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019 )) ##simple run expect_silent(calc_SourceDoseRate( measurement.date = "2018-01-02", calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019 )) ##simple run predict expect_silent(calc_SourceDoseRate( measurement.date = "2018-01-02", calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019, predict = 10 )) ##Gy/min as unit expect_silent(calc_SourceDoseRate( measurement.date = "2018-01-02", calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019, dose.rate.unit = "Gy/min" )) ##cause stop expect_error(calc_SourceDoseRate( measurement.date = "2018-01-02", calib.date = "2014-12-19", calib.dose.rate = 0.0438, calib.error = 0.0019, source.type = "SK" )) }) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 3) }) test_that("check values from output example 1", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(round(results$dose.rate, digits = 8), 0.04695031) expect_equal(round(results$dose.rate.error, digits = 9), 0.002036657) expect_equal(results$date, as.Date("2012-01-27")) }) Luminescence/tests/testthat/test_Analyse_SAROSLdata.R0000644000176200001440000000327613245572601022376 0ustar liggesuserscontext("Test old Analyse_SAROSLdata()") test_that("full example test", { testthat::skip_on_cran() data(ExampleData.BINfileData, envir = environment()) output <- Analyse_SAR.OSLdata(input.data = CWOSL.SAR.Data, signal.integral = c(1:5), background.integral = c(900:1000), position = c(1:1), output.plot = FALSE) ##checks expect_is(output, "list") expect_length(output, 3) ## errors expect_error({ Analyse_SAR.OSLdata() }, regexp = "No input data given") expect_error({ Analyse_SAR.OSLdata(input.data = CWOSL.SAR.Data) }, regexp = "No signal integral is given") expect_error({ Analyse_SAR.OSLdata(input.data = CWOSL.SAR.Data, signal.integral = 1:3) }, regexp = "No background integral is given") expect_error({ Analyse_SAR.OSLdata(input.data = subset(CWOSL.SAR.Data, LTYPE == "IRSL"), signal.integral = 1:3, background.integral = 200:250) }, regexp = "No 'OSL' curves found") ## should work expect_is({ Analyse_SAR.OSLdata(input.data = CWOSL.SAR.Data, signal.integral = 1:3, background.integral = 200:250, position = 1, background.count.distribution = "non-poisson", sigmab = 0.1, output.plot = TRUE) }, "list") expect_is({ tmp <- subset(CWOSL.SAR.Data, LTYPE == "OSL" & POSITION == 1 & ID <= 457) Analyse_SAR.OSLdata(tmp, 1:3, 200:250, output.plot = TRUE, output.plot.single = TRUE) }, "list") }) Luminescence/tests/testthat/test_scale_GammaDose.R0000644000176200001440000001122613417222471022061 0ustar liggesuserscontext("fit_SurfaceExposure") data("ExampleData.ScaleGammaDose", envir = environment()) d <- ExampleData.ScaleGammaDose ## Conversion factors: Liritzisetal2013 results <- scale_GammaDose(data = d, conversion_factors = "Liritzisetal2013", fractional_gamma_dose = "Aitken1985", plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(results), c("RLum.Results", "RLum")) expect_equal(length(results), 6) expect_equal(is(results$summary)[1], "data.frame") }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(formatC(results$summary$dose_rate_total, 4), "0.9242") expect_equal(formatC(results$summary$dose_rate_total_err, 4), "0.2131") }) ## Conversion factors: Guerinetal2011 results <- scale_GammaDose(data = d, conversion_factors = "Guerinetal2011", fractional_gamma_dose = "Aitken1985", plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(results), c("RLum.Results", "RLum")) expect_equal(length(results), 6) expect_equal(is(results$summary)[1], "data.frame") }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(formatC(results$summary$dose_rate_total, 4), "0.9214") expect_equal(formatC(results$summary$dose_rate_total_err, 4), "0.2124") }) ## Conversion factors: Guerinetal2011 results <- scale_GammaDose(data = d, conversion_factors = "AdamiecAitken1998", fractional_gamma_dose = "Aitken1985", plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(results), c("RLum.Results", "RLum")) expect_equal(length(results), 6) expect_equal(is(results$summary)[1], "data.frame") }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(formatC(results$summary$dose_rate_total, 4), "0.9123") expect_equal(formatC(results$summary$dose_rate_total_err, 4), "0.2097") }) ## CONSOLE & PLOT OUTPUT test_that("console & plot", { expect_output({ scale_GammaDose(d, plot = TRUE, verbose = TRUE) scale_GammaDose(d, plot = TRUE, plot_single = FALSE, verbose = TRUE) }) }) ## WARNINGS & FAILURES test_that("check input data", { testthat::skip_on_cran() expect_error( scale_GammaDose(NA, plot = FALSE, verbose = TRUE), "must be a data frame" ) expect_error( scale_GammaDose(d[ ,1:10], plot = FALSE, verbose = TRUE), "must have 12 columns" ) expect_warning({ tmp <- d colnames(tmp) <- letters[1:ncol(tmp)] scale_GammaDose(tmp, plot = FALSE, verbose = TRUE) }, "Unexpected column names" ) expect_error({ tmp <- d tmp$sample_offset <- NA scale_GammaDose(tmp, plot = FALSE, verbose = TRUE) }, "Only one layer must be contain a numeric value in column 'sample_offset'" ) expect_error({ tmp <- d tmp$sample_offset[5] <- "target" scale_GammaDose(tmp, plot = FALSE, verbose = TRUE) }, "Non-numeric value in the the row of the target layer." ) expect_error({ tmp <- d tmp$sample_offset[5] <- -1 scale_GammaDose(tmp, plot = FALSE, verbose = TRUE) }, "The numeric value in 'sample_offset' must be positive." ) expect_error({ tmp <- d tmp$sample_offset[5] <- 20 scale_GammaDose(tmp, plot = FALSE, verbose = TRUE) }, "Impossible! Sample offset larger than the target-layer's thickness!" ) expect_error({ scale_GammaDose(d, conversion_factors = c("a", "b"), plot = FALSE, verbose = TRUE) }, "must be an object of length 1 and of class 'character'." ) expect_error({ scale_GammaDose(d, conversion_factors = 1, plot = FALSE, verbose = TRUE) }, "must be an object of length 1 and of class 'character'." ) expect_error({ scale_GammaDose(d, conversion_factors = "HansGuenter2020", plot = FALSE, verbose = TRUE) }, "Invalid 'conversion_factors'. Valid options:" ) expect_error({ scale_GammaDose(d, fractional_gamma_dose = c("a", "b"), plot = FALSE, verbose = TRUE) }, "must be an object of length 1 and of class 'character'." ) expect_error({ scale_GammaDose(d, fractional_gamma_dose = 1, plot = FALSE, verbose = TRUE) }, "must be an object of length 1 and of class 'character'." ) expect_error({ scale_GammaDose(d, fractional_gamma_dose = "momgetthecameraiamontheinternet1995", plot = FALSE, verbose = TRUE) }, "Invalid 'fractional_gamma_dose'. Valid options:" ) }) Luminescence/tests/testthat/test_RLum.R0000644000176200001440000000070513231137116017727 0ustar liggesuserscontext("RLum") test_that("check class", { testthat::skip_on_cran() object <- set_RLum(class = "RLum.Data.Curve") expect_length(rep(object, 10), 10) ## data(ExampleData.DeValues, envir = environment()) temp <- calc_FuchsLang2001(ExampleData.DeValues$BT998, cvThreshold = 5, plot = FALSE) expect_output(show(temp)) ## test coercion expect_is(as(as(object = temp, Class = "list"), "RLum.Results"), "RLum.Results") }) Luminescence/tests/testthat/test_read_SPE2R.R0000644000176200001440000000073513231137116020701 0ustar liggesuserscontext("read_SPE2R") test_that("Test general functionality", { testthat::skip_on_cran() ##crash function expect_null(read_SPE2R(file = "text")) ## default values expect_is(read_SPE2R("https://github.com/R-Lum/Luminescence/blob/master/tests/testdata/SPEfile.SPE?raw=true"), "RLum.Data.Image") ##test verbose expect_is(read_SPE2R("https://github.com/R-Lum/Luminescence/blob/master/tests/testdata/SPEfile.SPE?raw=true", verbose = FALSE), "RLum.Data.Image") }) Luminescence/tests/testthat/test_extract_IrradiationTimes.R0000644000176200001440000000050613232576216024061 0ustar liggesuserscontext("extract_IrradiationTimes") test_that("Test the extraction of irradiation times", { testthat::skip_on_cran() ##general test file <- system.file("extdata/XSYG_file.xsyg", package="Luminescence") ##general test expect_is(extract_IrradiationTimes(object = file, txtProgressBar = FALSE), "RLum.Results") }) Luminescence/tests/testthat/test_convert_RLum2Risoe.BINfileData.R0000644000176200001440000000121713231137116024613 0ustar liggesuserscontext("convert_RLum2Risoe.BINfileData") test_that("test for errors", { testthat::skip_on_cran() expect_error(convert_RLum2Risoe.BINfileData(object = NA)) }) test_that("functionality", { testthat::skip_on_cran() ##load example data data(ExampleData.RLum.Analysis, envir = environment()) ##provide RLum.Analysis expect_is(convert_RLum2Risoe.BINfileData(IRSAR.RF.Data), "Risoe.BINfileData") ##provide RLum.Data.Curve expect_is(convert_RLum2Risoe.BINfileData(IRSAR.RF.Data@records[[1]]), "Risoe.BINfileData") ##provide list expect_is(convert_RLum2Risoe.BINfileData(list(IRSAR.RF.Data,IRSAR.RF.Data)), "Risoe.BINfileData") }) Luminescence/tests/testthat/test_convert_PSL2CSV.R0000644000176200001440000000102013232572404021677 0ustar liggesuserscontext("convert_PSL2CSV()") test_that("General test", { testthat::skip_on_cran() ##get file file <- system.file("extdata/DorNie_0016.psl", package="Luminescence") ##stop expect_error(convert_PSL2CSV()) ##the case where we have an object of type RLum expect_is(convert_PSL2CSV(read_PSL2R(file), export = FALSE), class = "list") ##export FALSE expect_is(convert_PSL2CSV(file, export = FALSE), class = "list") ##write to temp expect_silent(convert_PSL2CSV(file, export = TRUE, path = tempdir())) }) Luminescence/tests/testthat/test_PSL2RisoeBINfiledata.R0000644000176200001440000000050413231137116022612 0ustar liggesuserscontext("Test PSL2Risoe.BINfileData") test_that("simple test", { testthat::skip_on_cran() data("ExampleData.portableOSL", envir = environment()) merged <- merge_RLum(ExampleData.portableOSL) bin <- PSL2Risoe.BINfileData(merged) ##checks expect_is(bin, "Risoe.BINfileData") expect_equal(length(bin), 70) }) Luminescence/tests/testthat/test_analyse_SARTL.R0000644000176200001440000000105013420124162021440 0ustar liggesuserscontext("analyse_SAR.TL") ##Full check test_that("Test examples", { skip_on_cran() ##load data data(ExampleData.BINfileData, envir = environment()) ##transform the values from the first position in a RLum.Analysis object object <- Risoe.BINfileData2RLum.Analysis(TL.SAR.Data, pos=3) ##perform analysis expect_is( analyse_SAR.TL( object, signal.integral.min = 210, signal.integral.max = 220, fit.method = "EXP OR LIN", sequence.structure = c("SIGNAL", "BACKGROUND") ), "RLum.Results" ) }) Luminescence/tests/testthat/test_methods_DRAC.R0000644000176200001440000000334113242571355021314 0ustar liggesuserscontext("methods_DRAC") ##Full check test_that("methods_DRAC", { skip_on_cran() input <- template_DRAC() ## print expect_output(print(input, blueprint = TRUE)) expect_output(print(input, blueprint = FALSE)) ## as.data.frame expect_is(as.data.frame(input), "data.frame") expect_s3_class(as.data.frame(input), "DRAC.data.frame") ## [[<- expect_warning({ input <- template_DRAC() input[[1]] <- 1i }, regexp = "cannot use objects of class") expect_warning({ input <- template_DRAC() input[[1]] <- c(1, 2) }, regexp = "Input must be of length") expect_warning({ input <- template_DRAC() input[[5]] <- "1" }, regexp = "Input must be of class numeric") expect_warning({ input <- template_DRAC() input[[5]] <- "X" input[[5]] <- "abc" }, regexp = "Cannot coerce < abc > to a numeric value") expect_warning({ input <- template_DRAC(nrow = 2) input[[5]] <- c("X", 1) input[[5]] <- c("X", "abc") }, regexp = "Cannot coerce < abc > to a numeric value") expect_warning({ input <- template_DRAC() input[[5]] <- 1L input[[5]] <- "abc" }, regexp = "Input must be of class integer") expect_warning({ input <- template_DRAC() input[[13]] <- "abc" }, regexp = "Invalid option") expect_warning({ input <- template_DRAC() input[[13]] <- 1 }, regexp = "Input must be of class character") ## [<- expect_identical( object = template_DRAC(), expected = { input <- template_DRAC() input[1] <- NA_character_ input }) ## $<- expect_identical( object = template_DRAC(), expected = { input <- template_DRAC() input$`Project ID` <- NA_character_ input }) }) Luminescence/tests/testthat/test_apply_CosmicRayRemoval.R0000644000176200001440000000324613423367503023507 0ustar liggesuserscontext("apply_CosmicRayRemoval") test_that("check function", { testthat::skip_on_cran() ##load data data(ExampleData.XSYG, envir = environment()) ##crash the function expect_error(apply_CosmicRayRemoval("error"), regexp = "An object of class 'character' is not supported as input; please read the manual!") ##run basic tests expect_silent(apply_CosmicRayRemoval(TL.Spectrum, method = "Pych")) expect_silent(apply_CosmicRayRemoval(TL.Spectrum, method = "smooth")) expect_silent(apply_CosmicRayRemoval(TL.Spectrum, method = "smooth", MARGIN = 1)) expect_output(apply_CosmicRayRemoval(TL.Spectrum, method = "Pych", MARGIN = 2, verbose = TRUE, plot = TRUE)) expect_silent(apply_CosmicRayRemoval(TL.Spectrum, method = "Pych", method.Pych.smoothing = 2, method.Pych.threshold_factor = 2)) ##constructe objects for different tests RLum_list <- list(TL.Spectrum) RLum.Analysis <- set_RLum("RLum.Analysis", records = RLum_list) RLum.Analysis_list <- list(RLum.Analysis) RLum_list_mixed <- list(TL.Spectrum, set_RLum("RLum.Data.Curve")) RLum.Analysis_mixed <- set_RLum("RLum.Analysis", records = RLum_list_mixed ) RLum.Analysis_mixed_list <- list(RLum.Analysis_mixed) ##run tests expect_is(apply_CosmicRayRemoval(RLum_list), class = "list") expect_is(apply_CosmicRayRemoval(RLum.Analysis), class = "RLum.Analysis") expect_is(apply_CosmicRayRemoval(RLum.Analysis_list), class = "list") expect_error(apply_CosmicRayRemoval(RLum_list_mixed)) expect_is(apply_CosmicRayRemoval(RLum.Analysis_mixed), class = "RLum.Analysis") expect_is(apply_CosmicRayRemoval(RLum.Analysis_mixed_list), class = "list") }) Luminescence/tests/testthat/test_apply_EfficiencyCorrection.R0000644000176200001440000000277113475772460024377 0ustar liggesuserscontext("apply_EfficiencyCorrection") test_that("check function", { testthat::skip_on_cran() ##load data data(ExampleData.XSYG, envir = environment()) ##create efficiency data eff_data <- data.frame(WAVELENGTH = 1:1000, runif(1000)) ##break function expect_error(apply_EfficiencyCorrection(object = "ERROR"), regexp = "Input object is not of type RLum.Data.Spectrum") expect_error(apply_EfficiencyCorrection(object = TL.Spectrum, spectral.efficiency = "ERROR"), regexp = "'spectral.efficiency' is not of type data.frame") eff_data_false <- eff_data eff_data_false[1,2] <- 2 expect_error(apply_EfficiencyCorrection( object = TL.Spectrum, spectral.efficiency = eff_data_false), regexp = "Relative quantum efficiency values > 1 are not allowed.") ##run tests expect_s4_class(apply_EfficiencyCorrection(TL.Spectrum,spectral.efficiency = eff_data), "RLum.Data.Spectrum") ##run list test expect_warning( apply_EfficiencyCorrection(list(a = "test", TL.Spectrum), spectral.efficiency = eff_data), regexp = "Skipping character object in input list.") ##run test with RLum.Analysis objects expect_s4_class( apply_EfficiencyCorrection(set_RLum("RLum.Analysis", records = list(TL.Spectrum)), spectral.efficiency = eff_data), "RLum.Analysis") ##run test with everything combined input <- list(a = "test", TL.Spectrum,set_RLum("RLum.Analysis", records = list(TL.Spectrum))) apply_EfficiencyCorrection(input, eff_data) }) Luminescence/tests/testthat/test_read_BIN2R.R0000644000176200001440000000414013576410175020667 0ustar liggesuserscontext("read_BIN2R") test_that("test the import of various BIN-file versions", { testthat::skip_on_cran() ##test for various erros expect_error(read_BIN2R(file = ""), "[read_BIN2R()] File does not exist!", fixed = TRUE) ##this test need an internet connect ... test for it if(!httr::http_error("https://github.com/R-Lum/Luminescence/tree/master/tests/testdata")){ ##try to import every format by using the files on GitHub ##V3 expect_is( read_BIN2R(file = "https://github.com/R-Lum/Luminescence/raw/master/tests/testdata/BINfile_V3.bin", txtProgressBar = FALSE), class = "Risoe.BINfileData") ##V4 expect_is( read_BIN2R(file = "https://github.com/R-Lum/Luminescence/raw/master/tests/testdata/BINfile_V4.bin", txtProgressBar = FALSE), class = "Risoe.BINfileData") ##V5 expect_is( read_BIN2R(file = "https://github.com/R-Lum/Luminescence/raw/master/tests/testdata/BINfile_V5.binx", txtProgressBar = FALSE), class = "Risoe.BINfileData") ##V6 expect_is( read_BIN2R(file = "https://github.com/R-Lum/Luminescence/raw/master/tests/testdata/BINfile_V6.binx", txtProgressBar = FALSE), class = "Risoe.BINfileData") ##V6 - show method expect_output(read_BIN2R(file = "https://github.com/R-Lum/Luminescence/raw/master/tests/testdata/BINfile_V6.binx", txtProgressBar = FALSE)) ##V7 expect_is( read_BIN2R(file = "https://github.com/R-Lum/Luminescence/raw/master/tests/testdata/BINfile_V7.binx", txtProgressBar = FALSE), class = "Risoe.BINfileData") ##V8 expect_is( read_BIN2R(file = "https://github.com/R-Lum/Luminescence/raw/master/tests/testdata/BINfile_V8.binx", txtProgressBar = FALSE), class = "Risoe.BINfileData") ##test further options ##n.records and fastForward expect_is( read_BIN2R(file = "https://github.com/R-Lum/Luminescence/raw/master/tests/testdata/BINfile_V4.bin", txtProgressBar = FALSE, n.records = 1, fastForward = TRUE, verbose = FALSE), class = "list") } }) Luminescence/tests/testthat/test_calc_WodaFuchs2008.R0000644000176200001440000000103513231137116022224 0ustar liggesuserscontext("test_calc_WodaFuchs2008") test_that("Test general functionality", { testthat::skip_on_cran() ##load example data ## read example data set data(ExampleData.DeValues, envir = environment()) ##test arguments expect_is(calc_WodaFuchs2008(data = ExampleData.DeValues$CA1), "RLum.Results") ##test arguments expect_is(calc_WodaFuchs2008(data = ExampleData.DeValues$CA1, plot = FALSE), "RLum.Results") ##test arguments expect_is(calc_WodaFuchs2008(data = ExampleData.DeValues$CA1, breaks = 20), "RLum.Results") }) Luminescence/tests/testthat/test_zzz.R0000644000176200001440000000163613232573137017721 0ustar liggesuserscontext("zzz") test_that("Test zzz functions ... they should still work", { testthat::skip_on_cran() ##get right answer expect_equal(get_rightAnswer(), 46) expect_equal(get_rightAnswer("test"), 46) ##get quote expect_silent(get_Quote()) expect_silent(get_Quote(ID = 1)) expect_silent(get_Quote(ID = 10, separated = TRUE)) expect_silent(get_Quote(ID = 1e06)) ##tune data expect_warning(tune_Data(1:10)) expect_warning(tune_Data(data.frame(runif(n = 10, 8,12),runif(n = 10, 0.1,0.3) ), decrease.error = TRUE)) expect_warning(tune_Data(data.frame(runif(n = 10, 8,12),runif(n = 10, 0.1,0.3) ), increase.data = TRUE)) ##sTeve ## read example data set data(ExampleData.DeValues, envir = environment()) ExampleData.DeValues <- Second2Gray(ExampleData.DeValues$BT998, c(0.0438,0.0019)) ## create plot straightforward expect_silent(plot_KDE(data = ExampleData.DeValues, fun = TRUE)) }) Luminescence/tests/testthat/test_calc_CommonDose.R0000644000176200001440000000115413231137116022074 0ustar liggesuserscontext("calc_CommonDose") data(ExampleData.DeValues, envir = environment()) temp <- calc_CommonDose(ExampleData.DeValues$CA1, plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 4) }) test_that("check values from output", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(round(results$de, digits = 5), 62.15999) expect_equal(round(results$de_err, digits = 7), 0.7815117) expect_true(temp@data$args$log) expect_equal(temp@data$args$sigmab, 0) }) Luminescence/tests/testthat/test_calc_AliquotSize.R0000644000176200001440000000447013456607411022317 0ustar liggesuserscontext("calc_AliquotSize") set.seed(1) temp <- calc_AliquotSize( grain.size = c(100,150), sample.diameter = 1, MC.iter = 100, plot = FALSE, verbose = FALSE) test_that("consistency checks", { expect_error(calc_AliquotSize(grain.size = 1:3)) expect_error(calc_AliquotSize(grain.size = 100, packing.density = 2)) expect_error(calc_AliquotSize(grain.size = 100, packing.density = 1, sample.diameter = -1)) expect_error(calc_AliquotSize(grain.size = 100, sample.diameter = 9.8, MC = TRUE)) expect_output(calc_AliquotSize(grain.size = 100, packing.density = 1, sample.diameter = 9.8, grains.counted = 30, MC = TRUE), regexp = "Monte Carlo simulation is only available for estimating the amount of grains on the sample disc.") expect_is(calc_AliquotSize(grain.size = 100, packing.density = "inf", sample.diameter = 9.8, MC = FALSE), "RLum.Results") expect_is(calc_AliquotSize(grain.size = c(100, 150), grains.counted = 1000, sample.diameter = 9.8, MC = FALSE), "RLum.Results") expect_is(calc_AliquotSize(grain.size = c(100, 150), grains.counted = c(1000, 1100, 900), sample.diameter = 10, MC = FALSE), "RLum.Results") }) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 2) expect_is(temp$summary, "data.frame") expect_is(temp$MC, "list") }) test_that("check summary output", { testthat::skip_on_cran() result <- get_RLum(temp) expect_equal(result$grain.size, 125) expect_equal(result$sample.diameter, 1) expect_equal(result$packing.density, 0.65) expect_equal(result$n.grains, 42) expect_equal(result$grains.counted, NA) }) test_that("check MC run", { testthat::skip_on_cran() expect_equal(round(temp$MC$statistics$n), 100) expect_equal(round(temp$MC$statistics$mean), 43) expect_equal(round(temp$MC$statistics$median), 39) ##fix for different R versions if(R.version$major == "3" && as.numeric(R.version$minor) < 6){ expect_equal(round(temp$MC$statistics$sd.abs), 20) }else{ expect_equal(round(temp$MC$statistics$sd.abs), 19) } expect_equal(round(temp$MC$statistics$sd.rel), 45) expect_equal(round(temp$MC$statistics$se.abs), 2) expect_equal(round(temp$MC$statistics$se.rel), 5) expect_length(temp$MC$kde$x, 10000) expect_length(temp$MC$kde$y, 10000) }) Luminescence/tests/testthat/test_get_RLum.R0000644000176200001440000000272613431555751020606 0ustar liggesuserscontext("get_RLum") data(ExampleData.DeValues, envir = environment()) temp <- calc_CentralDose(ExampleData.DeValues$CA1, plot = FALSE, verbose = FALSE) temp_RLumDataCurve <- set_RLum(class = "RLum.Data.Curve") temp_RLumDataImage <- set_RLum(class = "RLum.Data.Image") temp_RLumDataSpectrum <- set_RLum(class = "RLum.Data.Spectrum") temp_RLumAnalysis <- set_RLum(class = "RLum.Analysis") temp_RLumResults <- set_RLum(class = "RLum.Results") test_that("check class and length of output", { testthat::skip_on_cran() expect_is(get_RLum(temp), class = "data.frame") expect_is(get_RLum(temp, data.object = "args"), class = "list") ##test objects expect_is(get_RLum(temp_RLumDataCurve), class = "matrix") expect_is(get_RLum(temp_RLumDataImage), class = "RasterBrick") expect_is(get_RLum(temp_RLumDataSpectrum), class = "matrix") expect_null(get_RLum(temp_RLumAnalysis)) expect_null(get_RLum(temp_RLumResults)) }) test_that("check get_RLum on a list and NULL", { testthat::skip_on_cran() object <- set_RLum(class = "RLum.Analysis", records = rep(set_RLum(class = "RLum.Data.Curve"), 10)) expect_warning(get_RLum(object, recordType = "test")) expect_is(get_RLum(NULL), "NULL") ##check class argument a <- list(set_RLum("RLum.Results"), set_RLum("RLum.Analysis", records = list(set_RLum("RLum.Data.Curve")))) expect_is(get_RLum(a, class = "test", drop = FALSE), class = "list") expect_is(get_RLum(a, class = "RLum.Results", drop = FALSE), class = "list") }) Luminescence/tests/testthat/test_plot_RLum.Data.Spectrum.R0000644000176200001440000000651013422162363023442 0ustar liggesuserscontext("Test Various Plot Functions") test_that("test pure success of the plotting without warning or error", { testthat::skip_on_cran() ##RLum.Data.Spectrum ------- data(ExampleData.XSYG, envir = environment()) m <- TL.Spectrum@data bg.spectrum <- set_RLum(class = "RLum.Data.Spectrum", data = TL.Spectrum@data[,15:16, drop = FALSE]) ##crash the function with wrong input expect_error(plot_RLum.Data.Spectrum(object = "test"), regexp = "Input object neither of class 'RLum.Data.Spectrum' nor 'matrix'.") ##try a matrix as input expect_message(plot_RLum.Data.Spectrum(object = m), regexp = "Input has been converted to a RLum.Data.Spectrum object using set_RLum()") ##remove rownames and column names rownames(m) <- NULL colnames(m) <- NULL expect_message(plot_RLum.Data.Spectrum(object = m), regexp = "Input has been converted to a RLum.Data.Spectrum object using set_RLum()") ##standard plot with some settings expect_silent(plot( TL.Spectrum, plot.type = "contour", main = "Test", xlab = "test", ylab = "test", mtext = "test", cex = 1.2, pch = 2, lwd = 2, bty = "n", sub = "est" )) ##persp plot expect_silent(suppressWarnings( plot_RLum.Data.Spectrum( TL.Spectrum, plot.type = "persp", xlim = c(310, 750), ylim = c(0, 100), bin.rows = 10, bin.cols = 2 ) )) ##test background subtraction expect_warning(plot_RLum.Data.Spectrum( TL.Spectrum, plot.type = "persp", xlim = c(310, 750), ylim = c(0, 300), bg.spectrum = bg.spectrum, bg.channels = 1:3, bin.rows = 10, bin.cols = 1 )) ##test energy axis expect_silent(plot_RLum.Data.Spectrum( TL.Spectrum, plot.type = "multiple.lines", xlim = c(1.4, 4), ylim = c(0, 300), bg.spectrum = bg.spectrum, bg.channels = 2, bin.cols = 1, xaxis.energy = TRUE )) ##multiple.lines expect_silent(suppressWarnings( plot_RLum.Data.Spectrum( TL.Spectrum, plot.type = "multiple.lines", xlim = c(310, 750), ylim = c(0, 100), bin.rows = 10, bin.cols = 1 ) )) ##interactive expect_silent(suppressWarnings( plot_RLum.Data.Spectrum( TL.Spectrum, plot.type = "interactive", xlim = c(310, 750), ylim = c(0, 300), bin.rows = 10, bin.cols = 1 ) )) ##interactive heatmap expect_silent(suppressWarnings( plot_RLum.Data.Spectrum( TL.Spectrum, plot.type = "interactive", xlim = c(310, 750), ylim = c(0, 300), bin.rows = 10, bin.cols = 1, type = "heatmap", showscale = TRUE ) )) ##interactive contour expect_silent(suppressWarnings( plot_RLum.Data.Spectrum( TL.Spectrum, plot.type = "interactive", xlim = c(310, 750), ylim = c(0, 300), bin.rows = 10, bin.cols = 1, type = "contour", showscale = TRUE ) )) ##create more error expect_error(plot( TL.Spectrum, plot.type = "contour", xlim = c(310, 750), ylim = c(0, 300), bin.cols = 0 )) }) Luminescence/tests/testthat/test_bin_RLumData.R0000644000176200001440000000116513231137116021352 0ustar liggesuserscontext("bin_RLum.Data") data(ExampleData.CW_OSL_Curve, envir = environment()) curve <- set_RLum( class = "RLum.Data.Curve", recordType = "OSL", data = as.matrix(ExampleData.CW_OSL_Curve) ) test_that("check class and length of output", { testthat::skip_on_cran() expect_is(bin_RLum.Data(curve), class = "RLum.Data.Curve", info = NULL, label = NULL) expect_length(bin_RLum.Data(curve)[,1], 500) }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(sum(bin_RLum.Data(curve)[,2]), 119200) expect_equal(sum(bin_RLum.Data(curve, bin = 5)[1,2]), 41146) }) Luminescence/tests/testthat/test_plot_Functions.R0000644000176200001440000001235113231137116022056 0ustar liggesuserscontext("Test Various Plot Functions") test_that("test pure success of the plotting without warning or error", { testthat::skip_on_cran() ##distribution plots data(ExampleData.DeValues, envir = environment()) ExampleData.DeValues <- ExampleData.DeValues$CA1 expect_silent(plot_RadialPlot(ExampleData.DeValues)) expect_silent(plot_KDE(ExampleData.DeValues)) expect_silent(plot_Histogram(ExampleData.DeValues)) expect_silent(plot_ViolinPlot(ExampleData.DeValues)) ##plot NRT data("ExampleData.BINfileData", envir = environment()) data <- Risoe.BINfileData2RLum.Analysis(object = CWOSL.SAR.Data, pos = 8, ltype = "OSL") allCurves <- get_RLum(data) pos <- seq(1, 9, 2) curves <- allCurves[pos] expect_silent(plot_NRt(curves)) ##filter combinations filter1 <- density(rnorm(100, mean = 450, sd = 20)) filter1 <- matrix(c(filter1$x, filter1$y/max(filter1$y)), ncol = 2) filter2 <- matrix(c(200:799,rep(c(0,0.8,0),each = 200)), ncol = 2) expect_silent(plot_FilterCombinations(filters = list(filter1, filter2))) ##plot_Det data(ExampleData.BINfileData, envir = environment()) object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) expect_is( plot_DetPlot( object, signal.integral.min = 1, signal.integral.max = 3, background.integral.min = 900, background.integral.max = 1000, n.channels = 5, ), "RLum.Results" ) ##plot DRT data(ExampleData.DeValues, envir = environment()) expect_silent(plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], given.dose = 2800, mtext = "Example data")) ##plot RisoeBINFileData data(ExampleData.BINfileData, envir = environment()) expect_silent(plot_Risoe.BINfileData(CWOSL.SAR.Data,position = 1)) ##various RLum plots ##RLum.Data.Curve data(ExampleData.CW_OSL_Curve, envir = environment()) temp <- as(ExampleData.CW_OSL_Curve, "RLum.Data.Curve") expect_silent(plot(temp)) ##RLum.Data.Image data(ExampleData.RLum.Data.Image, envir = environment()) expect_silent(plot(ExampleData.RLum.Data.Image)) ##RLum.Data.Spectrum ------- data(ExampleData.XSYG, envir = environment()) expect_silent(plot(TL.Spectrum, plot.type="contour", xlim = c(310,750), ylim = c(0,300))) expect_silent(suppressWarnings(plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="persp", xlim = c(310,750), ylim = c(0,100), bin.rows=10, bin.cols = 1))) expect_silent(suppressWarnings(plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="multiple.lines", xlim = c(310,750), ylim = c(0,100), bin.rows=10, bin.cols = 1))) expect_silent(suppressWarnings(plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="interactive", xlim = c(310,750), ylim = c(0,300), bin.rows=10, bin.cols = 1))) expect_silent(suppressWarnings(plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="interactive", xlim = c(310,750), ylim = c(0,300), bin.rows=10, bin.cols = 1, type = "heatmap", showscale = TRUE))) expect_silent(suppressWarnings(plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="interactive", xlim = c(310,750), ylim = c(0,300), bin.rows=10, bin.cols = 1, type = "contour", showscale = TRUE))) expect_error(plot(TL.Spectrum, plot.type="contour", xlim = c(310,750), ylim = c(0,300), bin.cols = 0)) ##RLum.Analysis data(ExampleData.BINfileData, envir = environment()) temp <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) expect_silent(plot( temp, subset = list(recordType = "TL"), combine = TRUE, norm = TRUE, abline = list(v = c(110)) )) ##RLum.Results grains<- calc_AliquotSize(grain.size = c(100,150), sample.diameter = 1, plot = FALSE, MC.iter = 100) expect_silent(plot_RLum.Results(grains)) ##special plot RLum.Reuslts data(ExampleData.DeValues, envir = environment()) mam <- calc_MinDose(data = ExampleData.DeValues$CA1, sigmab = 0.2, log = TRUE, plot = FALSE) expect_silent(plot_RLum(mam)) cdm <- calc_CentralDose(ExampleData.DeValues$CA1) expect_silent(plot_RLum(cdm)) FMM <- calc_FiniteMixture(ExampleData.DeValues$CA1, sigmab = 0.2, n.components = c(2:4), pdf.weight = TRUE, dose.scale = c(0, 100)) plot_RLum(FMM) }) test_that("test for return values, if any", { testthat::skip_on_cran() data(ExampleData.DeValues, envir = environment()) output <- plot_AbanicoPlot(ExampleData.DeValues, output = TRUE) expect_is(output, "list") expect_length(output, 10) }) Luminescence/tests/testthat/test_analyse_portableOSL.R0000644000176200001440000000120013231137116022741 0ustar liggesuserscontext("analyse_portableOSL") data("ExampleData.portableOSL", envir = environment()) merged <- merge_RLum(ExampleData.portableOSL) results <- analyse_portableOSL( merged, signal.integral = 1:5, invert = FALSE, normalise = TRUE, plot = FALSE ) test_that("check class and length of output", { testthat::skip_on_cran() expect_is(results, "RLum.Results") expect_equal(length(results), 3) expect_is(results$summary, "data.frame") expect_is(results$data, "RLum.Analysis") }) test_that("check output", { testthat::skip_on_cran() expect_equal(round(sum(results$summary), digits = 2), 70.44) }) Luminescence/tests/testthat/test_analyse_Al2O3C_Measurement.R0000644000176200001440000000112213271035531024050 0ustar liggesuserscontext("analyse_Al2O3C_Measurement") ##Full check test_that("Full check", { skip_on_cran() ##load data data(ExampleData.Al2O3C, envir = environment()) ##00 - cause function breaks expect_error(analyse_Al2O3C_Measurement()) expect_error(analyse_Al2O3C_Measurement(object = "test")) expect_warning(analyse_Al2O3C_Measurement(object = data_CrossTalk, signal_integral = 1000)) ##run analysis expect_is(analyse_Al2O3C_Measurement(data_CrossTalk), "RLum.Results") expect_is(analyse_Al2O3C_Measurement(data_CrossTalk, calculate_TL_dose = TRUE), "RLum.Results") }) Luminescence/tests/testthat/test_replicate_RLum.R0000644000176200001440000000044713231367220021763 0ustar liggesuserscontext("replicate_RLum") test_that("Test replication of RLum-objects", { skip_on_cran() data(ExampleData.RLum.Analysis, envir = environment()) expect_silent(results <- rep(IRSAR.RF.Data[[1]], 5)) expect_silent(rep(IRSAR.RF.Data[[1]])) ##check expect_equal(length(results),5) }) Luminescence/tests/testthat/test_fit_LMCurve.R0000644000176200001440000000305213231137116021225 0ustar liggesuserscontext("fit_LWCurve") ## Test 1 with NLS data(ExampleData.FittingLM, envir = environment()) fit <- fit_LMCurve(values = values.curve, values.bg = values.curveBG, n.components = 3, log = "x", start_values = data.frame(Im = c(170,25,400), xm = c(56,200,1500)), plot = FALSE) test_that("check class and length of output", { expect_equal(is(fit), c("RLum.Results", "RLum")) expect_equal(length(fit), 3) }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(fit$data$n.components, 3) expect_equal(round(fit$data$Im1, digits = 0), 169) expect_equal(round(fit$data$xm1, digits = 0), 49) expect_equal(round(fit$data$b1, digits = 0), 2) expect_equal(round(fit$data$`pseudo-R^2`, digits = 0), 1) }) ## Test 2 with LM data(ExampleData.FittingLM, envir = environment()) fit <- fit_LMCurve(values = values.curve, values.bg = values.curveBG, n.components = 3, log = "x", fit.method = "LM", plot = FALSE) test_that("check class and length of output", { expect_equal(is(fit), c("RLum.Results", "RLum")) expect_equal(length(fit), 3) }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(fit$data$n.components, 3) expect_equal(round(fit$data$Im1, digits = 0), 169) expect_equal(round(fit$data$xm1, digits = 0), 49) expect_equal(round(fit$data$b1, digits = 0), 2) expect_equal(round(fit$data$`pseudo-R^2`, digits = 0), 1) }) Luminescence/tests/testthat/test_verify_SingleGrainData.R0000644000176200001440000000137713231137116023436 0ustar liggesuserscontext("Test verify_SingleGrainData") test_that("Various function test", { testthat::skip_on_cran() data(ExampleData.XSYG, envir = environment()) object <- get_RLum( OSL.SARMeasurement$Sequence.Object, recordType = "OSL (UVVIS)", drop = FALSE) ##initial output <- verify_SingleGrainData(object) ##return value expect_is(output, "RLum.Results") expect_is(output$selection_full, "data.frame") ##check options expect_silent(suppressWarnings(verify_SingleGrainData(object, plot = TRUE))) expect_silent(suppressWarnings(verify_SingleGrainData(object, threshold = 100))) expect_silent(suppressWarnings(verify_SingleGrainData(object, verbose = FALSE))) expect_silent(suppressWarnings(verify_SingleGrainData(object, cleanup = TRUE))) }) Luminescence/tests/testthat/test_Second2Gray.R0000644000176200001440000000136213231137116021170 0ustar liggesuserscontext("Second2Gray") data(ExampleData.DeValues, envir = environment()) results <- Second2Gray(ExampleData.DeValues$BT998, c(0.2,0.01)) results_alt1 <- Second2Gray(ExampleData.DeValues$BT998, c(0.2,0.01), error.propagation = "gaussian") results_alt2 <- Second2Gray(ExampleData.DeValues$BT998, c(0.2,0.01), error.propagation = "absolute") test_that("check class and length of output", { testthat::skip_on_cran() expect_is(results, class = "data.frame", info = NULL, label = NULL) }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(sum(results[[1]]), 14754.09) expect_equal(sum(results[[2]]), 507.692) expect_equal(sum(results_alt1[[2]]), 895.911) expect_equal(sum(results_alt2[[2]]), 1245.398) }) Luminescence/tests/testthat/test_analyse_baSAR.R0000644000176200001440000000253713231137116021521 0ustar liggesuserscontext("analyse_baSAR") ##Full check test_that("Full check of analyse_baSAR function", { skip_on_cran() set.seed(1) ##(1) load package test data set data(ExampleData.BINfileData, envir = environment()) ##(2) selecting relevant curves, and limit dataset CWOSL.SAR.Data <- subset(CWOSL.SAR.Data, subset = POSITION %in% c(1:3) & LTYPE == "OSL") ##(3) run analysis ##please not that the here selected parameters are ##choosen for performance, not for reliability results <- analyse_baSAR( object = CWOSL.SAR.Data, source_doserate = c(0.04, 0.001), signal.integral = c(1:2), background.integral = c(80:100), fit.method = "EXP", method_control = list(inits = list( list(.RNG.name = "base::Wichmann-Hill", .RNG.seed = 1), list(.RNG.name = "base::Wichmann-Hill", .RNG.seed = 2), list(.RNG.name = "base::Wichmann-Hill", .RNG.seed = 3) )), plot = TRUE, verbose = TRUE, n.MCMC = 100, txtProgressBar = TRUE ) expect_is( results, class = "RLum.Results", info = NULL, label = NULL ) expect_is(results$summary, "data.frame") expect_is(results$mcmc, "mcmc.list") expect_is(results$models, "list") expect_type(round(sum(results$summary[, c(6:9)]), 2),type = "double") }) Luminescence/tests/testthat/test_internals.R0000644000176200001440000000571413422625752021066 0ustar liggesuserscontext("internals") test_that("Test internals", { testthat::skip_on_cran() # .warningCatcher() --------------------------------------------------------------------------- expect_warning(Luminescence:::.warningCatcher(for(i in 1:5){warning("test")})) # .smoothing ---------------------------------------------------------------------------------- expect_silent(Luminescence:::.smoothing(runif(100), k = 5, method = "median")) expect_error(Luminescence:::.smoothing(runif(100), method = "test")) # fancy_scientific ()-------------------------------------------------------------------------- plot(seq(1e10, 1e20, length.out = 10),1:10, xaxt = "n") expect_silent(axis(1, at = axTicks(1),labels = Luminescence:::fancy_scientific(axTicks(1)))) # .create_StatisticalSummaryText() ------------------------------------------------------------ expect_silent(Luminescence:::.create_StatisticalSummaryText()) expect_is( Luminescence:::.create_StatisticalSummaryText( calc_Statistics(data.frame(1:10,1:10)), keywords = "mean"), class = "character") # .unlist_RLum() ------------------------------------------------------------------------------ expect_length(Luminescence:::.unlist_RLum(list(a = list(b = list(c = list(d = 1, e = 2))))), 2) # .matrix_binning() --------------------------------------------------------------------------- m <- matrix(data = c(rep(1:20,each = 20)), ncol = 10, nrow = 20) rownames(m) <- 1:nrow(m) colnames(m) <- 1:ncol(m) ##crash the function expect_error(Luminescence:::.matrix_binning("none matrix"), regexp = "Input is not of class 'matrix'!") ##test operation modes and arguments expect_is(Luminescence:::.matrix_binning(m, bin_size = 4, bin_col = FALSE), class = "matrix") expect_is(Luminescence:::.matrix_binning(m, bin_size = 4, bin_col = TRUE), class = "matrix") ##test row / column renaming options expect_is(Luminescence:::.matrix_binning(m, bin_size = 2, bin_col = FALSE, names = "groups"), class = "matrix") expect_is(Luminescence:::.matrix_binning(m, bin_size = 2, bin_col = FALSE, names = "mean"), class = "matrix") expect_is(Luminescence:::.matrix_binning(m, bin_size = 2, bin_col = FALSE, names = "sum"), class = "matrix") expect_is(Luminescence:::.matrix_binning(m, bin_size = 2, bin_col = FALSE, names = c("test1", "test2")), class = "matrix") ##clean-up rm(m) ## C++ code ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ## # src_create_RLumDataCurve_matrix ------------------------------------------------------------- ##RLum.Data.Curve() ... test src_create_RLumDataCurve_matrix() expect_output( Luminescence:::src_create_RLumDataCurve_matrix( DATA = 1:100, VERSION = 4, NPOINTS = 100, LTYPE = "TL", LOW = 0, HIGH = 500, AN_TEMP = 0, TOLDELAY = 0, TOLON = 0, TOLOFF = 0 ) ) }) Luminescence/tests/testthat/test_analyse_IRSARRF.R0000644000176200001440000000322213231364720021674 0ustar liggesuserscontext("analyse_IRSAR.RF") test_that("check class and length of output", { testthat::skip_on_cran() set.seed(1) data(ExampleData.RLum.Analysis, envir = environment()) results_fit <- analyse_IRSAR.RF(object = IRSAR.RF.Data, plot = TRUE, method = "FIT") results_slide <- analyse_IRSAR.RF(object = IRSAR.RF.Data, plot = TRUE, method = "SLIDE", n.MC = NULL) results_slide_alt <- analyse_IRSAR.RF( object = IRSAR.RF.Data, plot = FALSE, method = "SLIDE", n.MC = 10, method.control = list(vslide_range = 'auto', trace_vslide = TRUE), txtProgressBar = FALSE ) expect_equal(is(results_fit), c("RLum.Results", "RLum")) expect_equal(length(results_fit), 5) expect_equal(length(results_slide), 5) expect_is(results_fit$fit, class = "nls", info = NULL, label = NULL) expect_is(results_slide$fit, class = "nls", info = NULL, label = NULL) expect_length(results_slide$slide, 10) expect_equal(results_fit$data$DE, 623.25) expect_equal(results_fit$data$DE.LOWER, 600.63) expect_equal(results_slide$data$DE, 610.17) expect_equal(round(results_slide_alt$data$DE, digits = 0), 384) }) test_that("test controlled chrash conditions", { testthat::skip_on_cran() ##the sliding range should not exceed a certrain value ... test it data(ExampleData.RLum.Analysis, envir = environment()) expect_error( analyse_IRSAR.RF( object = IRSAR.RF.Data, plot = FALSE, method = "SLIDE", n.MC = 10, method.control = list(vslide_range = c(0,1e+08)), txtProgressBar = FALSE ), regexp = "[:::src_analyse_IRSAR_SRS()] 'vslide_range' exceeded maximum size (1e+08)!", fixed = TRUE) }) Luminescence/tests/testthat/test_as_latex_table.R0000644000176200001440000000032313231137116022013 0ustar liggesuserscontext("Internal as LaTeX table") test_that("Check github_commits()", { testthat::skip_on_cran() df <- data.frame(x = "test", y = 1:10) expect_output(Luminescence:::.as.latex.table.data.frame(df)) }) Luminescence/tests/testthat/test_convert_Wavelength2Energy.R0000644000176200001440000000436413456605675024200 0ustar liggesuserscontext("convert_Wavelength2Energy") test_that("test convert functions", { testthat::skip_on_cran() # Set up test scenario ------------------------------------------------------------------------ #create artifical dataset according to Mooney et al. (2013) lambda <- seq(400,800,50) data <- matrix(data = rep(1, 2 * length(lambda)), ncol = 2) rownames(data) <- lambda colnames(data) <- 1:ncol(data) ##set plot function p <- function(m) { plot(x = m[, 1], y = m[, 2]) polygon(x = c(m[, 1], rev(m[, 1])), y = c(m[, 2], rep(0, nrow(m)))) for (i in 1:nrow(m)) { lines(x = rep(m[i, 1], 2), y = c(0, m[i, 2])) } } # Test ---------------------------------------------------------------------------------------- ##crash function expect_error(convert_Wavelength2Energy("test"), regexp = "Class 'character' not supported as input!") ##test all three allowed input objects expect_is(convert_Wavelength2Energy(data), class = "matrix") expect_is(convert_Wavelength2Energy(as.data.frame(data)), class = "data.frame") object <- set_RLum(class = "RLum.Data.Spectrum", data = data[,1,drop = FALSE]) expect_is(convert_Wavelength2Energy(object), class = "RLum.Data.Spectrum") ##test the list option expect_is(convert_Wavelength2Energy(list(data, as.data.frame(data), object)), class = "list") ##test order argument expect_is(convert_Wavelength2Energy(data, order = TRUE), class = "matrix") ##test special treatment of RLum.Data.Spectrum objects object@info[["curveDescripter"]] <- "energy" expect_message(convert_Wavelength2Energy(object), regexp = "Your object has already an energy scale, nothing done!") ##Code below just a cross check if wanted ##matrix # m <- cbind(as.numeric(rownames(data)), data) # par(mfrow = c(1,2)) # p(m) # p(convert_Wavelength2Energy(m)) # # ##df # df <- as.data.frame(cbind(as.numeric(rownames(data)), data)) # p(df) # p(convert_Wavelength2Energy(df)) # # ##RLum.Data.Spectrum # object <- set_RLum(class = "RLum.Data.Spectrum", data = data[,1,drop = FALSE]) # par(mfrow = c(1,2)) # plot_RLum.Data.Spectrum(object, plot.type = "single", par.local = FALSE) # plot_RLum.Data.Spectrum(convert_Wavelength2Energy(object), plot.type = "single", par.local = FALSE) }) Luminescence/tests/testthat/test_CW2pX.R0000644000176200001440000000516513231137116017760 0ustar liggesuserscontext("CW2X Conversion Tests") ##load data data(ExampleData.CW_OSL_Curve, envir = environment()) values <- CW_Curve.BosWallinga2012 test_that("Check the example and the numerical values", { testthat::skip_on_cran() values_pLM <- CW2pLM(values) values_pLMi <- CW2pLMi(values, P = 1/20) values_pLMi_alt <- CW2pLMi(values) values_pHMi <- CW2pHMi(values, delta = 40) values_pHMi_alt <- CW2pHMi(values) values_pHMi_alt1 <- CW2pHMi(values, delta = 2) values_pPMi <- CW2pPMi(values, P = 1/10) ##check conversion sum values expect_equal(round(sum(values_pLM), digits = 0),90089) expect_equal(round(sum(values_pLMi[,1:2]), digits = 0),197522) expect_equal(round(sum(values_pLMi_alt[,1:2]), digits = 0),197522) expect_equal(round(sum(values_pHMi[,1:2]), digits = 0),217431) expect_equal(round(sum(values_pHMi_alt[,1:2]), digits = 0),217519) expect_equal(round(sum(values_pHMi_alt1[,1:2]), digits = 0), 221083) expect_equal(round(sum(values_pPMi[,1:2]), digits = 0),196150) }) test_that("Test RLum.Types", { testthat::skip_on_cran() ##load CW-OSL curve data data(ExampleData.CW_OSL_Curve, envir = environment()) object <- set_RLum( class = "RLum.Data.Curve", data = as.matrix(ExampleData.CW_OSL_Curve), curveType = "measured", recordType = "OSL" ) ##transform values expect_is(CW2pLM(object), class = "RLum.Data.Curve") expect_is(CW2pLMi(object), class = "RLum.Data.Curve") expect_is(CW2pHMi(object), class = "RLum.Data.Curve") expect_is(CW2pPMi(object), class = "RLum.Data.Curve") ##test error handling expect_error(CW2pLMi(values, P = 0), regexp = "[CW2pLMi] P has to be > 0!", fixed = TRUE) expect_warning(CW2pLMi(values, P = 10)) expect_error(object = CW2pLM(values = matrix(0, 2))) expect_error(object = CW2pLMi(values = matrix(0, 2))) expect_error(object = CW2pHMi(values = matrix(0, 2))) expect_error(object = CW2pPMi(values = matrix(0, 2))) object@recordType <- "RF" expect_error(object = CW2pLM(values = object), regexp = "[CW2pLM()] recordType RF is not allowed for the transformation!", fixed = TRUE) expect_error(object = CW2pLMi(values = object), regexp = "[CW2pLMi()] recordType RF is not allowed for the transformation!", fixed = TRUE) expect_error(object = CW2pHMi(values = object), regexp = "[CW2pHMi()] recordType RF is not allowed for the transformation!", fixed = TRUE) expect_error(object = CW2pPMi(values = object), regexp = "[CW2pPMi()] recordType RF is not allowed for the transformation!", fixed = TRUE) }) Luminescence/tests/testthat/test_analyse_Al2O3C_ITC.R0000644000176200001440000000123113231137116022201 0ustar liggesuserscontext("analyse_Al2O3_ITC") ##Full check test_that("Full check", { skip_on_cran() ##check stops ##RLum-object expect_error(object = analyse_Al2O3_ITC(object = "test")) ##input curve type a <- set_RLum(class = "RLum.Data.Curve", recordType = "OSL", data = matrix(1:20, ncol = 2)) b <- set_RLum(class = "RLum.Data.Curve", recordType = "TL") object <- set_RLum(class = "RLum.Analysis", records = list(a,b)) expect_error(object = analyse_Al2O3_ITC(object)) ##check with example data ##load data data(ExampleData.Al2O3C, envir = environment()) ##run analysis expect_is(analyse_Al2O3C_ITC(data_ITC), "RLum.Results") }) Luminescence/tests/testthat/test_calc_FastRatio.R0000644000176200001440000000267013231137116021731 0ustar liggesuserscontext("calc_FastRatio") data("ExampleData.CW_OSL_Curve") temp <- calc_FastRatio(ExampleData.CW_OSL_Curve, plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 5) }) test_that("check values from output", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(round(results$fast.ratio, digits = 3), 405.122) expect_equal(round(results$fast.ratio.se, digits = 4), 119.7442) expect_equal(round(results$fast.ratio.rse, digits = 5), 29.55756) expect_equal(results$channels, 1000) expect_equal(round(results$channel.width, digits = 2), 0.04) expect_equal(results$dead.channels.start, 0) expect_equal(results$dead.channels.end, 0) expect_equal(results$sigmaF, 2.6e-17) expect_equal(results$sigmaM, 4.28e-18) expect_equal(results$stimulation.power, 30.6) expect_equal(results$wavelength, 470) expect_equal(results$t_L1, 0) expect_equal(round(results$t_L2, digits = 6), 2.446413) expect_equal(round(results$t_L3_start, digits = 5), 14.86139) expect_equal(round(results$t_L3_end, digits = 5), 22.29208) expect_equal(results$Ch_L1, 1) expect_equal(results$Ch_L2, 62) expect_equal(results$Ch_L3_start, 373) expect_equal(results$Ch_L3_end, 558) expect_equal(results$Cts_L1, 11111) expect_equal(results$Cts_L2, 65) expect_equal(round(results$Cts_L3, digits = 5), 37.66667) }) Luminescence/tests/testthat/test_calc_TLLxTxRatio.R0000644000176200001440000000270013231137116022165 0ustar liggesuserscontext("calc_TLLxTxRatio") ##load package example data data(ExampleData.BINfileData, envir = environment()) ##convert Risoe.BINfileData into a curve object temp <- Risoe.BINfileData2RLum.Analysis(TL.SAR.Data, pos = 3) Lx.data.signal <- get_RLum(temp, record.id=1) Lx.data.background <- get_RLum(temp, record.id=2) Tx.data.signal <- get_RLum(temp, record.id=3) Tx.data.background <- get_RLum(temp, record.id=4) signal.integral.min <- 210 signal.integral.max <- 230 temp <- calc_TLLxTxRatio(Lx.data.signal, Lx.data.background, Tx.data.signal, Tx.data.background, signal.integral.min, signal.integral.max) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 1) }) test_that("check values from output", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(length(results), 10) expect_equal(results$LnLx, 257042) expect_equal(results$LnLx.BG, 4068) expect_equal(results$TnTx, 82298) expect_equal(results$TnTx.BG, 2943) expect_equal(results$net_LnLx, 252974) expect_equal(round(results$net_LnLx.Error, digits = 2), 49468.92) expect_equal(results$net_TnTx, 79355) expect_equal(round(results$net_TnTx.Error,2), 21449.72) expect_equal(round(results$LxTx, digits = 6), 3.187877) expect_equal(round(results$LxTx.Error, digits = 6), 1.485073) }) Luminescence/tests/testthat/test_calc_ThermalLifetime.R0000644000176200001440000000615413604172511023113 0ustar liggesuserscontext("calc_ThermalLifetime") ##EXAMPLE 1 ##calculation for two trap-depths with similar frequency factor for different temperatures set.seed(1) temp <- calc_ThermalLifetime( E = c(1.66, 1.70), s = 1e+13, T = 10:20, output_unit = "Ma", verbose = FALSE ) ##EXAMPLE 2 ##profiling of thermal life time for E and s and their standard error temp2 <- calc_ThermalLifetime( E = c(1.600, 0.003), s = c(1e+13,1e+011), T = 20, profiling = TRUE, output_unit = "Ma", verbose = FALSE, plot = FALSE ) test_that("check class and length of output example 1", { testthat::skip_on_cran() expect_s4_class(temp, "RLum.Results") expect_equal(length(temp), 2) }) # test_that("check values from output example 1", { testthat::skip_on_cran() expect_is(temp$lifetimes, c("array", "structure", "vector")) expect_equal(dim(temp$lifetimes), c(1, 2, 11)) ##check results for 10 °C results <- lapply(1:length(10:20), function(x){ temp$lifetimes[,,x] }) expect_equal(round(results[[1]], digits = 3), c("1.66" = 1115.541, "1.7" = 5747.042)) expect_equal(round(results[[2]], digits = 4), c("1.66" = 878.0196, "1.7" = 4497.3585)) expect_equal(round(results[[3]], digits = 4), c("1.66" = 692.2329, "1.7" = 3525.4738)) expect_equal(round(results[[4]], digits = 4), c("1.66" = 546.6658, "1.7" = 2768.3216)) expect_equal(round(results[[5]], digits = 4), c("1.66" = 432.4199, "1.7" = 2177.4436)) expect_equal(round(results[[6]], digits = 4), c("1.66" = 342.6069, "1.7" = 1715.5406)) expect_equal(round(results[[7]], digits = 4), c("1.66" = 271.8854, "1.7" = 1353.8523)) expect_equal(round(results[[8]], digits = 4), c("1.66" = 216.1065, "1.7" = 1070.1642)) expect_equal(round(results[[9]], digits = 4), c("1.66" = 172.0421, "1.7" = 847.2879)) expect_equal(round(results[[10]], digits = 4), c("1.66" = 137.1765, "1.7" = 671.9020)) expect_equal(round(results[[11]], digits = 4), c("1.66" = 109.5458, "1.7" = 533.6641)) }) test_that("check class and length of output example 2", { testthat::skip_on_cran() expect_s4_class(temp2, c("RLum.Results")) testthat::expect_equal(length(temp2), 2) }) test_that("check values from output example 2", { testthat::skip_on_cran() testthat::expect_is(temp2$lifetimes, class = c("numeric", "vector")) testthat::expect_equal(length(temp2$lifetimes), 1000) testthat::expect_equal(dim(temp2$profiling_matrix), c(1000, 4)) }) test_that("check arguments", { testthat::skip_on_cran() ##missing E and/or s expect_error(calc_ThermalLifetime()) ##profiling settings expect_warning(calc_ThermalLifetime(E = 1.4, s = 1e05, profiling_config = list(n = 10))) expect_error(calc_ThermalLifetime(E = 1.4, s = 1e05, profiling = TRUE, profiling_config = list(E.distribution = "test"))) expect_error(calc_ThermalLifetime(E = 1.4, s = 1e05, profiling = TRUE, profiling_config = list(s.distribution = "test"))) ##output expect_warning(calc_ThermalLifetime(E = 1.4, s = 1e05, output_unit = "test")) expect_output(calc_ThermalLifetime(E = 1.4, s = 1e05, verbose = TRUE)) expect_output(calc_ThermalLifetime(E = c(1.4, 0.001), s = c(1e05,1e03), plot = TRUE, profiling = TRUE)) }) Luminescence/tests/testthat/test_use_DRAC.R0000644000176200001440000000264113242571355020447 0ustar liggesuserscontext("use_DRAC") ##Full check test_that("Test DRAC", { skip_on_cran() ##use manuel example ##create template input <- template_DRAC(preset = "DRAC-example_quartz") ##test expect_s3_class(input, "DRAC.list") ##fill (which also tests the methods) input$`Project ID` <- "DRAC-Example" input$`Sample ID` <- "Quartz" input$`Conversion factors` <- "AdamiecAitken1998" input$`External U (ppm)` <- 3.4 input$`errExternal U (ppm)` <- 0.51 input$`External Th (ppm)` <- 14.47 input$`errExternal Th (ppm)` <- 1.69 input$`External K (%)` <- 1.2 input$`errExternal K (%)` <- 0.14 input$`Calculate external Rb from K conc?` <- "Y" input$`Calculate internal Rb from K conc?` <- "Y" input$`Scale gammadoserate at shallow depths?` <- "Y" input$`Grain size min (microns)` <- 90 input$`Grain size max (microns)` <- 125 input$`Water content ((wet weight - dry weight)/dry weight) %` <- 5 input$`errWater content %` <- 2 input$`Depth (m)` <- 2.2 input$`errDepth (m)` <- 0.22 input$`Overburden density (g cm-3)` <- 1.8 input$`errOverburden density (g cm-3)` <- 0.1 input$`Latitude (decimal degrees)` <- 30.0000 input$`Longitude (decimal degrees)` <- 70.0000 input$`Altitude (m)` <- 150 input$`De (Gy)` <- 20 input$`errDe (Gy)` <- 0.2 ##run DRAC expect_s4_class(output <- use_DRAC(input), "RLum.Results") ## print method for DRAC.highlights expect_output(print(output$DRAC$highlights), regexp = "TO:GP = errAge") }) Luminescence/tests/testthat/test_plot_AbanicoPlot.R0000644000176200001440000001445113540751607022316 0ustar liggesuserscontext("plot_AbanicoPlot()") test_that("Test examples from the example page", { testthat::skip_on_cran() ## load example data and recalculate to Gray data(ExampleData.DeValues, envir = environment()) ExampleData.DeValues <- ExampleData.DeValues$CA1 ## plot the example data straightforward expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues)) ## now with linear z-scale expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, log.z = FALSE)) ## now with output of the plot parameters expect_is(plot_AbanicoPlot(data = ExampleData.DeValues, output = TRUE), "list") ## now with adjusted z-scale limits expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, zlim = c(10, 200))) ## now with adjusted x-scale limits expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, xlim = c(0, 20))) ## now with rug to indicate individual values in KDE part expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, rug = TRUE)) ## now with a smaller bandwidth for the KDE plot expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, bw = 0.04)) ## now with a histogram instead of the KDE plot expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, hist = TRUE, kde = FALSE)) ## now with a KDE plot and histogram with manual number of bins expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, hist = TRUE, breaks = 20)) ## now with a KDE plot and a dot plot expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, dots = TRUE)) ## now with user-defined plot ratio expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, plot.ratio = 0.5)) ## now with user-defined central value expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, z.0 = 70)) ## now with median as central value expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, z.0 = "median")) ## now with the 17-83 percentile range as definition of scatter expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, z.0 = "median", dispersion = "p17")) ## now with user-defined green line for minimum age model CAM <- calc_CentralDose(ExampleData.DeValues, plot = FALSE) expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, line = CAM, line.col = "darkgreen", line.label = "CAM")) ## now create plot with legend, colour, different points and smaller scale expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, legend = "Sample 1", col = "tomato4", bar.col = "peachpuff", pch = "R", cex = 0.8)) ## now without 2-sigma bar, polygon, grid lines and central value line expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, bar.col = FALSE, polygon.col = FALSE, grid.col = FALSE, y.axis = FALSE, lwd = 0)) ## now with direct display of De errors, without 2-sigma bar expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, bar.col = FALSE, ylab = "", y.axis = FALSE, error.bars = TRUE)) ## now with user-defined axes labels expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, xlab = c("Data error (%)", "Data precision"), ylab = "Scatter", zlab = "Equivalent dose [Gy]")) ## now with minimum, maximum and median value indicated expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, stats = c("min", "max", "median"))) ## now with a brief statistical summary as subheader expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, summary = c("n", "in.2s"))) ## now with another statistical summary expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, summary = c("mean.weighted", "median"), summary.pos = "topleft")) ## now a plot with two 2-sigma bars for one data set expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, bar = c(30, 100))) ## now the data set is split into sub-groups, one is manipulated data.1 <- ExampleData.DeValues[1:30,] data.2 <- ExampleData.DeValues[31:62,] * 1.3 data.3 <- list(data.1, data.2) ## now the two data sets are plotted in one plot expect_silent(plot_AbanicoPlot(data = data.3)) ## now with some graphical modification expect_silent(plot_AbanicoPlot(data = data.3, z.0 = "median", col = c("steelblue4", "orange4"), bar.col = c("steelblue3", "orange3"), polygon.col = c("steelblue1", "orange1"), pch = c(2, 6), angle = c(30, 50), summary = c("n", "in.2s", "median"))) ## create Abanico plot with predefined layout definition expect_silent(plot_AbanicoPlot(data = ExampleData.DeValues, layout = "journal")) ## now with predefined layout definition and further modifications expect_silent(plot_AbanicoPlot(data = data.3, z.0 = "median", layout = "journal", col = c("steelblue4", "orange4"), bar.col = adjustcolor(c("steelblue3", "orange3"), alpha.f = 0.5), polygon.col = c("steelblue3", "orange3"))) ## for further information on layout definitions see documentation ## of function get_Layout() ## now with manually added plot content ## create empty plot with numeric output expect_is(plot_AbanicoPlot(data = ExampleData.DeValues, pch = NA, output = TRUE), "list") }) test_that("Cause full function stop", { testthat::skip_on_cran() ##wrong input data expect_error(plot_AbanicoPlot(data = "Michael"), regexp = "Input data format is neither 'data.frame' nor 'RLum.Results'") }) Luminescence/tests/testthat/test_calc_Lamothe2003.R0000644000176200001440000000616613235116332021740 0ustar liggesuserscontext("calc_Kars2008") test_that("Force function to break", { testthat::skip_on_cran() ##argument check ##object expect_error(calc_Lamothe2003(), regexp = "Input for 'object' missing but required!") ##dose_rate.envir expect_error(calc_Lamothe2003(object = NULL), regexp = "Input for 'dose_rate.envir' missing but required!") expect_error(calc_Lamothe2003(object = NULL, dose_rate.envir = 1, dose_rate.source = 1, g_value = 1), regexp = "Input for 'dose_rate.envir' is not of type 'numeric' and/or of length < 2!") ##dose_rate.source expect_error(calc_Lamothe2003(object = NULL, dose_rate.envir = NULL), regexp = "Input for 'dose_rate.source' missing but required!") expect_error(calc_Lamothe2003(object = NULL, dose_rate.envir = c(1,1), dose_rate.source = 1, g_value = 1), regexp = "Input for 'dose_rate.source' is not of type 'numeric' and/or of length < 2!") ##check warnings expect_warning(calc_Lamothe2003(object = data.frame(x = c(0,10,20), y = c(1.4,0.7,2.3), z = c(0.01,0.02, 0.03)), dose_rate.envir = c(1,2,3), dose_rate.source = c(1,2,3), g_value = c(1,1))) ##g_value expect_error(calc_Lamothe2003(object = NULL, dose_rate.envir = NULL, dose_rate.source = NULL), regexp = "Input for 'g_value' missing but required!") ##object expect_error(calc_Lamothe2003(object = NULL, dose_rate.envir = c(1,2,3), dose_rate.source = c(1,2,3), g_value = NULL), regexp = "Unsupported data type for 'object'") expect_error(calc_Lamothe2003(object = set_RLum("RLum.Results"), dose_rate.envir = c(1,2,3), dose_rate.source = c(1,2,3), g_value = NULL)) ##tc expect_error(calc_Lamothe2003(object = set_RLum("RLum.Results"), dose_rate.envir = c(1,2,3), dose_rate.source = c(1,2,3), g_value = c(1,1), tc.g_value = 1000), regexp = "If you set 'tc.g_value' you have to provide a value for 'tc' too!") }) test_that("Test the function itself", { testthat::skip_on_cran() ##This is based on the package example ##load data ##ExampleData.BINfileData contains two BINfileData objects ##CWOSL.SAR.Data and TL.SAR.Data data(ExampleData.BINfileData, envir = environment()) ##transform the values from the first position in a RLum.Analysis object object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) ##perform SAR analysis and set rejection criteria results <- analyse_SAR.CWOSL( object = object, signal.integral.min = 1, signal.integral.max = 2, background.integral.min = 900, background.integral.max = 1000, verbose = FALSE, plot = FALSE, onlyLxTxTable = TRUE ) ##run fading correction expect_is(calc_Lamothe2003( object = results, dose_rate.envir = c(1.676 , 0.180), dose_rate.source = c(0.184, 0.003), g_value = c(2.36, 0.6), plot = TRUE, fit.method = "EXP"), class = "RLum.Results") ##run fading correction expect_is(calc_Lamothe2003( object = results, dose_rate.envir = c(1.676 , 0.180), dose_rate.source = c(0.184, 0.003), g_value = c(2.36, 0.6), tc = 1000, tc.g_value = 1200, plot = TRUE, fit.method = "EXP"), class = "RLum.Results") }) Luminescence/tests/testthat/test_calc_FiniteMixture.R0000755000176200001440000000136013231137116022627 0ustar liggesuserscontext("calc_FiniteMixture") data(ExampleData.DeValues, envir = environment()) temp <- calc_FiniteMixture( ExampleData.DeValues$CA1, sigmab = 0.2, n.components = 2, grain.probability = TRUE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 10) }) test_that("check values from output example 1", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(results$de[1], 31.5299) expect_equal(results$de[2], 72.0333) expect_equal(results$de_err[1], 3.6387) expect_equal(results$de_err[2], 2.4082) expect_equal(results$proportion[1], 0.1096) expect_equal(results$proportion[2], 0.8904) }) Luminescence/tests/testthat/test_calc_MinDose.R0000755000176200001440000000206713540751607021410 0ustar liggesuserscontext("calc_MinDose") data(ExampleData.DeValues, envir = environment()) temp <- calc_MinDose(data = ExampleData.DeValues$CA1, sigmab = 0.1, verbose = FALSE, plot = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 9) }) test_that("check values from output example", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(round(results$de, digits = 5), 34.31834) expect_equal(round(results$de_err, digits = 6), 2.550964) expect_equal(results$ci_level, 0.95) expect_equal(round(results$ci_lower, digits = 5), 29.37526) expect_equal(round(results$ci_upper, digits = 5), 39.37503) expect_equal(results$par, 3) expect_equal(round(results$sig, digits = 2), 2.07) expect_equal(round(results$p0, digits = 8), 0.01053938) expect_equal(results$mu, NA) expect_equal(round(results$Lmax, digits = 5), -43.57969) expect_equal(round(results$BIC, digits = 4), 106.4405) }) Luminescence/tests/testthat/test_fit_SurfaceExposure.R0000644000176200001440000000717713231137116023047 0ustar liggesuserscontext("fit_SurfaceExposure") data("ExampleData.SurfaceExposure", envir = environment()) d1 <- ExampleData.SurfaceExposure$sample_1 d2 <- ExampleData.SurfaceExposure$sample_2 d3 <- ExampleData.SurfaceExposure$set_1 d4 <- ExampleData.SurfaceExposure$set_2 ## Example data 1 fit <- fit_SurfaceExposure(data = d1, sigmaphi = 5e-10, mu = 0.9, plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(fit), c("RLum.Results", "RLum")) expect_equal(length(fit), 5) expect_equal(is(fit$fit), "nls") }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(round(fit$summary$age), 9893) expect_equal(round(fit$summary$age_error), 369) }) # Sub-test - weighted fitting fit <- fit_SurfaceExposure(data = d1, sigmaphi = 5e-10, mu = 0.9, weights = TRUE, plot = FALSE, verbose = FALSE) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(round(fit$summary$age), 9624) expect_equal(round(fit$summary$age_error), 273) }) ## Example data 2 fit <- fit_SurfaceExposure(data = d2, age = 1e4, sigmaphi = 5e-10, Ddot = 2.5, D0 = 40, plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(fit), c("RLum.Results", "RLum")) expect_equal(length(fit), 5) expect_equal(is(fit$fit), "nls") }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(round(fit$summary$mu, 3), 0.904) expect_equal(round(fit$summary$mu_error, 3), 0.007) }) ## Example data 3 fit <- fit_SurfaceExposure(data = d3, age = c(1e3, 1e4, 1e5, 1e6), sigmaphi = 5e-10, plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(fit), c("RLum.Results", "RLum")) expect_equal(nrow(fit$summary), 4) expect_equal(length(fit), 5) expect_equal(is(fit$fit), "nls") }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(round(unique(fit$summary$mu), 3), 0.901) expect_equal(round(unique(fit$summary$mu_error), 3), 0.002) }) ## Example data 4 fit <- fit_SurfaceExposure(data = d4, age = c(1e2, 1e3, 1e4, 1e5, 1e6), sigmaphi = 5e-10, Ddot = 1.0, D0 = 40, plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(fit), c("RLum.Results", "RLum")) expect_equal(nrow(fit$summary), 5) expect_equal(length(fit), 5) expect_equal(is(fit$fit), "nls") }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(round(unique(fit$summary$mu), 3), 0.899) expect_equal(round(unique(fit$summary$mu_error), 3), 0.002) }) #### WARNINGS & FAILURES test_that("not enough parameters provided", { testthat::skip_on_cran() expect_message( fit_SurfaceExposure(data = d1, plot = FALSE, verbose = TRUE), "Unable to fit the data" ) expect_equal( is(fit_SurfaceExposure(data = d2, plot = FALSE, verbose = FALSE)$fit), "simpleError" ) expect_warning( fit_SurfaceExposure(data = d4, age = c(1e2, 1e3, 1e4, 1e5, 1e6), sigmaphi = 5e-10, Ddot = 1.0, D0 = 40, weights = TRUE, plot = FALSE, verbose = TRUE), "is not supported when" ) expect_error( fit_SurfaceExposure(data = d4, age = 1e4, sigmaphi = 5e-10, Ddot = 1.0, D0 = 40, plot = FALSE, verbose = FALSE), "'age' must be of the same length" ) }) Luminescence/tests/testthat/test_convert_X2CSV.R0000644000176200001440000000165013231137116021455 0ustar liggesuserscontext("convert_X2CSV") test_that("test convert functions", { testthat::skip_on_cran() ##test for errors expect_error(convert_BIN2CSV(file = "", export = FALSE), regexp = "[read_BIN2R()] File does not exist!", fixed = TRUE) expect_error(convert_Daybreak2CSV(file = "", export = FALSE), regexp = "[read_Daybreak2R()] file name does not seem to exist.", fixed = TRUE) #expect_error(convert_PSL2CSV(file = "", export = FALSE)) expect_error(suppressWarnings(convert_XSYG2CSV(file = "", export = FALSE))) ##test conversion itself ##BIN2CSV data(ExampleData.BINfileData, envir = environment()) expect_is(convert_BIN2CSV(subset(CWOSL.SAR.Data, POSITION == 1), export = FALSE), "list") ##XSYG2CSV data(ExampleData.XSYG, envir = environment()) expect_is(convert_XSYG2CSV(OSL.SARMeasurement$Sequence.Object[1:10], export = FALSE), "list") }) Luminescence/tests/testthat/test_RLum.Data.Image.R0000644000176200001440000000244613517336174021640 0ustar liggesuserscontext("RLum.Data.Image") test_that("check class ", { testthat::skip_on_cran() ##load example data data(ExampleData.RLum.Data.Image, envir = environment()) ##set-method ##set empty class expect_s4_class(set_RLum(class = "RLum.Data.Image"), "RLum.Data.Image") ##overwrite only data expect_s4_class(set_RLum(class = "RLum.Data.Image", data = set_RLum("RLum.Data.Image")), "RLum.Data.Image") ##show-method ##show example data expect_output(show(ExampleData.RLum.Data.Image)) ##get-method expect_error(get_RLum(ExampleData.RLum.Data.Image, info.object = 1), regexp = "'info.object' has to be a character!") expect_error(get_RLum(ExampleData.RLum.Data.Image, info.object = "unknown")) expect_is(get_RLum(ExampleData.RLum.Data.Image, info.object = "NumFrames"), "integer") ##names expect_is(names_RLum(ExampleData.RLum.Data.Image), class = "character") ##conversions ##from matrix and to matrix expect_is(as(matrix(1:10, ncol = 2), "RLum.Data.Image"), "RLum.Data.Image") expect_is(as(ExampleData.RLum.Data.Image, "matrix"), "matrix") ##from data.frame and to data.frame df <- as.data.frame(as(ExampleData.RLum.Data.Image, "matrix")) expect_is(as(df, "RLum.Data.Image"), "RLum.Data.Image") expect_is(as(ExampleData.RLum.Data.Image, "data.frame"), "data.frame") }) Luminescence/tests/testthat/test_calc_AverageDose.R0000644000176200001440000000127113231137116022216 0ustar liggesuserscontext("calc_AverageDose") data(ExampleData.DeValues, envir = environment()) temp <- calc_AverageDose(ExampleData.DeValues$CA1[1:56,], sigma_m = 0.1, plot = FALSE, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 3) }) test_that("check summary output", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(round(results$AVERAGE_DOSE, digits = 4), 65.3597) expect_equal(round(results$SIGMA_D, digits = 4), 0.3092) expect_equal(round(results$L_MAX, digits = 5), -19.25096) }) Luminescence/tests/testthat/test_subset_RLum.R0000644000176200001440000000210213241027635021312 0ustar liggesuserscontext("subset_RLum") # RLum.Analysis ----------------------------------------------------------- test_that("subset RLum.Analysis", { testthat::skip_on_cran() data("ExampleData.RLum.Analysis") temp <- IRSAR.RF.Data ## subset.RLum.Analysis() - S3 method ### empty call expect_s4_class(subset(temp), "RLum.Analysis") expect_length(subset(temp), length(temp)) expect_identical(subset(temp)[[1]], temp[[1]]) ### errors expect_error(subset(temp, LTYPE == "RF"), regexp = "Valid terms are") expect_null(subset(temp, recordType == "xx")) ### valid expect_is(subset(temp, recordType == "RF"), class = "RLum.Analysis") expect_is(subset(temp, recordType == "RF")[[1]], class = "RLum.Data.Curve") expect_length(subset(temp, recordType == "RF"), n = length(temp)) ## get_RLum(, subset = ()) expect_is(get_RLum(temp, subset = recordType == "RF"), class = "RLum.Analysis") expect_is(get_RLum(temp, subset = recordType == "RF")[[1]], class = "RLum.Data.Curve") expect_length(get_RLum(temp, subset = recordType == "RF"), n = length(temp)) }) Luminescence/tests/testthat/test_merge_RisoeBINfileData.R0000644000176200001440000000121613231137116023271 0ustar liggesuserscontext("merge_RisoeBINfileData") ##Full check test_that("Test merging", { skip_on_cran() ##expect error expect_error(merge_Risoe.BINfileData(input.objects = "data")) expect_error(merge_Risoe.BINfileData(input.objects = c("data", "data2"))) expect_error(merge_Risoe.BINfileData(input.objects = list("data", "data2")), regexp = "[merge_Risoe.BINfileData()] Input list does not contain Risoe.BINfileData objects!", fixed = TRUE) ##expect success data(ExampleData.BINfileData, envir = environment()) object1 <- CWOSL.SAR.Data object2 <- CWOSL.SAR.Data expect_is(merge_Risoe.BINfileData(c(object1, object2)), "Risoe.BINfileData") }) Luminescence/tests/testthat/test_calc_HomogeneityTest.R0000755000176200001440000000146213231137116023165 0ustar liggesuserscontext("calc_HomogeneityTest") ##use the data example given by Galbraith (2003) df <- data.frame( x = c(30.1, 53.8, 54.3, 29.0, 47.6, 44.2, 43.1), y = c(4.8, 7.1, 6.8, 4.3, 5.2, 5.9, 3.0)) temp <- calc_HomogeneityTest(df) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 3) }) test_that("check values from output example", { testthat::skip_on_cran() results <- get_RLum(temp) ##test the normal expect_equal(results$n, 7) expect_equal(round(results$g.value, 4), 19.2505) expect_equal(results$df, 6) expect_equal(round(results$P.value,3), 0.004) ##test the unlogged version temp <- calc_HomogeneityTest(df, log = FALSE)$summary expect_equal(round(temp$P.value,3),0.001) }) Luminescence/tests/testthat/test_analyse_Al2O3C_CrossTalk.R0000644000176200001440000000037713231137116023501 0ustar liggesuserscontext("analyse_Al2O3_CrossTalk") ##Full check test_that("Full check", { skip_on_cran() ##load data data(ExampleData.Al2O3C, envir = environment()) ##run analysis expect_is(analyse_Al2O3C_CrossTalk(data_CrossTalk), "RLum.Results") }) Luminescence/tests/testthat/test_template_DRAC.R0000644000176200001440000000361313243270653021464 0ustar liggesuserscontext("template_DRAC") ##Full check test_that("Check template creation ", { skip_on_cran() ## test output class expect_is(template_DRAC(), "DRAC.list") expect_is(template_DRAC(notification = FALSE), "DRAC.list") expect_is(template_DRAC(nrow = 10, notification = FALSE), "DRAC.list") ## test presets expect_identical(as.numeric(template_DRAC(notification = FALSE, preset = "quartz_coarse")$`a-value`), 0.035) expect_identical(as.numeric(template_DRAC(notification = FALSE, preset = "quartz_fine")$`a-value`), 0.035) expect_identical(as.numeric(template_DRAC(notification = FALSE, preset = "feldspar_coarse")$`a-value`), 0.08) expect_identical(as.numeric(template_DRAC(notification = FALSE, preset = "polymineral_fine")$`a-value`), 0.08) expect_identical(as.numeric(template_DRAC(notification = FALSE, preset = "DRAC-example_quartz")$`De (Gy)`), 20) expect_identical(as.numeric(template_DRAC(notification = FALSE, preset = "DRAC-example_feldspar")$`De (Gy)`), 15) expect_identical(as.numeric(template_DRAC(notification = FALSE, preset = "DRAC-example_polymineral")$`De (Gy)`), 204.47) expect_true( do.call(all.equal, as.list(template_DRAC(nrow = 2, notification = FALSE, preset = "DRAC-example_quartz")$`De (Gy)`)) ) ## misc tests expect_true(all(is.na(template_DRAC(notification = FALSE)))) expect_true(!all(is.na(template_DRAC(preset = "DRAC-example_polymineral", notification = FALSE)))) expect_equal(length(template_DRAC(notification = FALSE)), 53) expect_equal(length(template_DRAC(nrow = 10, notification = FALSE)[[1]]), 10) ## expect failure expect_error(template_DRAC(nrow = -1, notification = FALSE)) expect_error(template_DRAC(nrow = 34, notification = FALSE)) expect_error(template_DRAC(preset = "this_one_does_not_exist")) expect_error(template_DRAC(preset = c("this_one_does_not_exist", "this_one_neither"))) expect_error(template_DRAC(preset = 999)) }) Luminescence/tests/testthat/test_smooth_RLum.R0000644000176200001440000000206713517343010021322 0ustar liggesuserscontext("smooth_RLum") data(ExampleData.CW_OSL_Curve, envir = environment()) temp <- set_RLum( class = "RLum.Data.Curve", recordType = "OSL", data = as.matrix(ExampleData.CW_OSL_Curve) ) ##create RLum.Analysis object temp_analysis <- set_RLum("RLum.Analysis", records = list(temp, temp)) test_that("check class and length of output", { testthat::skip_on_cran() ##standard tests expect_is(temp, class = "RLum.Data.Curve", info = NULL, label = NULL) expect_is(smooth_RLum(temp), class = "RLum.Data.Curve", info = NULL, label = NULL) ##test on a list ##RLum list expect_is(smooth_RLum(list(temp, temp)), "list") ##normal list expect_is(smooth_RLum(list(a = 1, b = 2)), "list") ##test on an RLum.Analysis-object expect_s4_class(smooth_RLum(temp_analysis), "RLum.Analysis") }) test_that("check values from output example", { testthat::skip_on_cran() expect_equivalent(round(mean(smooth_RLum(temp, k = 5)[,2], na.rm = TRUE), 0), 100) expect_equivalent(round(mean(smooth_RLum(temp, k = 10)[,2], na.rm = TRUE), 0), 85) }) Luminescence/tests/testthat/test_plot_RLum.Analysis.R0000644000176200001440000000214313234562731022555 0ustar liggesuserscontext("plot_RLum.Analysis") test_that("Test the basic plot functionality", { testthat::skip_on_cran() ##create dataset ##load data data(ExampleData.BINfileData, envir = environment()) ##convert values for position 1 temp <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) ##Basic plot expect_silent(plot_RLum.Analysis( temp, subset = list(recordType = "TL"), combine = TRUE, norm = TRUE, abline = list(v = c(110)) )) ##test arguments ##ylim expect_silent(plot_RLum.Analysis( temp, subset = list(recordType = "TL"), combine = FALSE, norm = TRUE, ylim = c(1,100), xlim = c(1,100), abline = list(v = c(110)) )) ##test arguments ##ylim - warning expect_warning(plot_RLum.Analysis( temp, subset = list(recordType = "TL"), combine = FALSE, norm = TRUE, ylim = c(1,200), xlim = c(1,100), abline = list(v = c(110)) )) ##test arguments ##ylim - warning expect_warning(plot_RLum.Analysis( temp, subset = list(recordType = "TL"), combine = FALSE, norm = TRUE, log = "y" )) }) Luminescence/tests/testthat/test_convert_Activity2Concentration.R0000644000176200001440000000153213231137116025214 0ustar liggesuserscontext("convert_Activity2Concentration") data <- data.frame( NUCLIDES = c("U-238", "Th-232", "K-40"), VALUE = c(40,80,100), VALUE_ERROR = c(4,8,10), stringsAsFactors = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(convert_Activity2Concentration(data)), c("RLum.Results", "RLum")) expect_equal(is(convert_Activity2Concentration(data, verbose = FALSE)), c("RLum.Results", "RLum")) expect_equal(length(convert_Activity2Concentration(data)), 1) expect_error(convert_Activity2Concentration()) expect_error(convert_Activity2Concentration(data = data.frame(a = 1, b = 2))) }) test_that("check values from output example", { testthat::skip_on_cran() results <- convert_Activity2Concentration(data) expect_equal(round(sum(results$data$`CONC. ERROR (ppm/%)`),5), 2.32815) }) Luminescence/tests/testthat/test_calc_FadingCorr.R0000644000176200001440000000230213231137116022043 0ustar liggesuserscontext("calc_FadingCorr") set.seed(1) temp <- calc_FadingCorr( age.faded = c(0.1,0), g_value = c(5.0, 1.0), tc = 2592000, tc.g_value = 172800, n.MC = 100, verbose = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(temp), c("RLum.Results", "RLum")) expect_equal(length(temp), 2) ##check the verbose mode expect_is(calc_FadingCorr( age.faded = c(0.1,0), g_value = c(5.0, 1.0), tc = 2592000, tc.g_value = 172800, n.MC = 1, verbose = TRUE), class = "RLum.Results") }) test_that("check values from output example 1", { testthat::skip_on_cran() results <- get_RLum(temp) expect_equal(results$AGE, 0.1168) expect_equal(results$AGE.ERROR, 0.0035) expect_equal(results$AGE_FADED, 0.1) expect_equal(results$AGE_FADED.ERROR, 0) expect_equal(results$G_VALUE, 5.312393) expect_equal(round(results$G_VALUE.ERROR, 5), 1.01190) expect_equal(results$KAPPA, 0.02307143) expect_equal(results$KAPPA.ERROR, 0.00439463) expect_equal(results$TC, 8.213721e-05) expect_equal(results$TC.G_VALUE, 5.475814e-06) expect_equal(results$n.MC, 100) expect_equal(results$OBSERVATIONS, 100) expect_equal(results$SEED, NA) }) Luminescence/tests/testthat/test_write_R2BIN.R0000644000176200001440000000337013425064354021107 0ustar liggesuserscontext("write_R2BIN") test_that("write to empty connection", { testthat::skip_on_cran() #Unit test for write_BIN2R() function #create data file data(ExampleData.BINfileData, envir = environment()) ##empty RisoeBINfileData object empty <- set_Risoe.BINfileData() ##replace the raw by numeric CWOSL.SAR.Data@METADATA$VERSION <- as.numeric(CWOSL.SAR.Data@METADATA$VERSION) CWOSL.SAR.Data@METADATA[] <- lapply(CWOSL.SAR.Data@METADATA, function(x){ if(is.factor(x)){ as.character(x) }else{ x } }) ##combing with existing BIN-file object new <- as.data.frame( data.table::rbindlist(l = list(empty@METADATA,CWOSL.SAR.Data@METADATA),fill = TRUE), stringsAsFactors = FALSE) ##new object new <- set_Risoe.BINfileData(METADATA = new, DATA = CWOSL.SAR.Data@DATA) ##replace NA values new@METADATA[is.na(new@METADATA)] <- 0 ##replace RECTYPE new@METADATA$RECTYPE <- 1 ##reduce files size considerably down to two records new <- subset(new, ID == 1:2) ##create files path <- tempfile() write_R2BIN(object = new, file = paste0(path, "BINfile_V3.bin"), version = "03") write_R2BIN(object = new, file = paste0(path, "BINfile_V4.bin"), version = "04") write_R2BIN(object = new, file = paste0(path, "BINfile_V65binx"), version = "05") write_R2BIN(object = new, file = paste0(path, "BINfile_V6.binx"), version = "06") write_R2BIN(object = new, file = paste0(path, "BINfile_V7.binx"), version = "07") write_R2BIN(object = new, file = paste0(path, "BINfile_V8.binx"), version = "08") ##catch errors expect_error(write_R2BIN(object = "a", file = ""), "[write_R2BIN()] Input object is not of type Risoe.BINfileData!", fixed = TRUE) expect_error(write_R2BIN(object = set_Risoe.BINfileData(), file = "")) }) Luminescence/tests/testthat/test_app_RLum.R0000644000176200001440000000044313417222427020574 0ustar liggesuserscontext("app_RLum") test_that("Simply check app frame", { skip_on_cran() if("RLumShiny" %in% installed.packages()){ expect_message(app_RLum(app = "abc"), regexp = "Invalid app name") }else{ expect_error(app_RLum(app = "abc"), regexp = "Shiny applications require") } }) Luminescence/tests/testthat/test_fit_CWCurve.R0000644000176200001440000000133413231137116021227 0ustar liggesuserscontext("fit_CWCurve") data(ExampleData.CW_OSL_Curve, envir = environment()) fit <- fit_CWCurve(values = ExampleData.CW_OSL_Curve, main = "CW Curve Fit", n.components.max = 4, log = "x", plot = FALSE) test_that("check class and length of output", { testthat::skip_on_cran() expect_equal(is(fit), c("RLum.Results", "RLum")) expect_equal(length(fit), 3) }) test_that("check values from output example", { testthat::skip_on_cran() expect_equal(fit$data$n.components, 3) expect_equal(round(fit$data$I01, digits = 0), 2388) expect_equal(round(fit$data$lambda1, digits = 1), 4.6) expect_equal(round(fit$data$`pseudo-R^2`, digits = 0), 1) }) Luminescence/tests/testthat/test_RLum.Data.Curve.R0000644000176200001440000000175613422473312021674 0ustar liggesuserscontext("RLum.Data.Curve") test_that("check class", { testthat::skip_on_cran() ##set empty curve object and show it expect_output(show(set_RLum(class = "RLum.Data.Curve"))) ##check replacements fo object <- set_RLum(class = "RLum.Data.Curve") expect_s4_class(set_RLum(class = "RLum.Data.Curve", data = object), class = "RLum.Data.Curve") ##check get_RLum object <- set_RLum(class = "RLum.Data.Curve", data = object, info = list(a = "test")) expect_warning(get_RLum(object, info.object = "est"), regexp = "Invalid info.object name") ##test names expect_type(names(object), "character") ##test bin expect_warning(bin_RLum.Data(object, bin_size = -2), "Argument 'bin_size' invalid, nothing was done!") ##check conversions expect_s4_class(as(object = list(1:10), Class = "RLum.Data.Curve"), "RLum.Data.Curve") expect_type(as(object = object, Class = "list"), "list") expect_s4_class(as(object = matrix(1:10,ncol = 2), Class = "RLum.Data.Curve"), "RLum.Data.Curve") }) Luminescence/tests/testthat/test_read_Daybreak2R.R0000644000176200001440000000136013231137116021767 0ustar liggesuserscontext("test_read_Daybreak2R") test_that("Test functionality", { testthat::skip_on_cran() ##TXT ##basic import options expect_is(read_Daybreak2R( file = system.file("extdata/Daybreak_TestFile.txt", package = "Luminescence") ), "list") ##verbose off expect_is(read_Daybreak2R( file = system.file("extdata/Daybreak_TestFile.txt", package = "Luminescence"), verbose = FALSE ), "list") ##txtProgressbar off expect_is(read_Daybreak2R( file = system.file("extdata/Daybreak_TestFile.txt", package = "Luminescence"), txtProgressBar = FALSE ), "list") ##DAT ##basic import options expect_is(read_Daybreak2R( file = system.file("extdata/Daybreak_TestFile.DAT", package = "Luminescence") ), "list") }) Luminescence/tests/testthat.R0000644000176200001440000000010413231137116016002 0ustar liggesuserslibrary(testthat) library(Luminescence) test_check("Luminescence") Luminescence/src/0000755000176200001440000000000013604173345013461 5ustar liggesusersLuminescence/src/create_UID.cpp0000644000176200001440000000224713231137116016126 0ustar liggesusers// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Title: create_UID() // Author: Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) // Contact: sebastian.kreutzer@u-bordeaux-montaigne.fr // Version: 0.1.0 [2016-01-26] // Purpose: The purpose of this function is to create a unique ID for RLum objects based on the system time //and a random number. // +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #include #include using namespace Rcpp; // [[Rcpp::export("create_UID")]] CharacterVector create_UID() { //define variables CharacterVector random; time_t rawtime; struct tm * timeinfo; char timestamp [80]; //set date + timestamp (code snippet taken from C++ reference page) time (&rawtime); timeinfo = localtime (&rawtime); strftime (timestamp,80,"%Y-%m-%d-%I:%M.",timeinfo); //get time information and add a random number //according to the CRAN policy the standard C-function, rand(), even sufficient here, is not allowed random = runif(1); //combine and return results return timestamp + Rcpp::as(random); } Luminescence/src/src_analyse_IRSARRF_SRS.cpp0000644000176200001440000001512613371033672020413 0ustar liggesusers// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Title: src_analyse_IRSARRF_SRS() // Author: Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) // Contact: sebastian.kreutzer@u-bordeaux-montaigne.fr // Version: 0.3.5 [2017-02-06] // Purpose: // // Function calculates the squared residuals for the R function analyse_IRSAR.RF() // including MC runs for the obtained minimum. The function allows a horizontal and // a vertical sliding of the curve // +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #include #include // [[Rcpp::depends(RcppArmadillo)]] using namespace Rcpp; // [[Rcpp::export("src_analyse_IRSARRF_SRS")]] RcppExport SEXP analyse_IRSARRF_SRS(NumericVector values_regenerated_limited, NumericVector values_natural_limited, NumericVector vslide_range, int n_MC, bool trace = false ){ //check for the vslide_range() if(vslide_range.length() > 1e+08){ stop("[:::src_analyse_IRSAR_SRS()] 'vslide_range' exceeded maximum size (1e+08)!"); } //pre-define variables NumericVector residuals = values_natural_limited.length(); NumericVector results = values_regenerated_limited.size() - values_natural_limited.size(); NumericVector results_vector_min_MC = n_MC; //variables for the algorithm int v_length; int v_index; NumericVector v_leftright(2); //the virtual vector NumericVector t_leftright(2); //the test points NumericVector c_leftright(2); //the calculation //(1) calculate sum of the squared residuals // this will be used to find the best fit of the curves (which is the minimum) //initialise values v_length = vslide_range.length(); v_index = 0; v_leftright[0] = 0; v_leftright[1] = vslide_range.length() - 1; if(v_length == 1){ t_leftright[0] = 0; t_leftright[1] = 0; }else{ t_leftright[0] = v_length/3; t_leftright[1] = 2 * v_length/3; } //***TRACE**** if(trace == true){ Rcout << "\n\n [:::src_analyse_IRSAR_SRS()]"; Rcout << "\n\n--- Inititalisation --- \n "; Rcout << "\n >> v_leftright: " << v_leftright; Rcout << "\n >> t_leftright: " << t_leftright; Rcout << "\n\n --- Optimisation --- \n "; Rcout << "\n ---------------------------------------------------------------------------------------------------------"; Rcout << "\n v_length \t\t v_leftright \t\t c_leftright \t\t\t\t absolute offset"; Rcout << "\n ---------------------------------------------------------------------------------------------------------"; } //start loop do { for (int t=0;t c_leftright[1]){ v_index = v_leftright[1]; //set index to right test index //update vector window (the right remains the same this time) v_leftright[0] = t_leftright[0]; //update window length v_length = v_leftright[1] - v_leftright[0]; }else{ v_length = 1; } //update test point index t_leftright[0] = v_leftright[0] + v_length/3; t_leftright[1] = v_leftright[0] + (2 * (v_length/3)); //***TRACE**** if(trace == true){ Rcout << "\n " << v_length << " \t\t\t " << v_leftright << " \t\t " << c_leftright << " \t\t\t " << vslide_range[v_index]; } } while (v_length > 1); //***TRACE**** if(trace == true){ Rcout << "\n ---------------------------------------------------------------------------------------------------------"; Rcout << "\n >> SRS minimum: \t\t " << c_leftright[0]; Rcout << "\n >> Vertical offset index: \t " << v_index + 1; Rcout << "\n >> Vertical offset absolute: \t " << vslide_range[v_index] << "\n\n"; } //(2) error calculation //use this values to bootstrap and find minimum values and to account for the variation //that may result from this method itself (the minimum lays within a valley of minima) // //using the obtained sliding vector and the function RcppArmadillo::sample() (which equals the //function sample() in R, but faster) //http://gallery.rcpp.org/articles/using-the-Rcpp-based-sample-implementation //this follows the way described in Frouin et al., 2017 ... still ... for (int i=0; i #include #include #include using namespace Rcpp; // [[Rcpp::export]] NumericMatrix src_get_XSYG_curve_values(std::string s) { //00: count pairs int pairs = std::count(s.begin(), s.end(), ';') + 1; //01: replace all ; by , std::replace(s.begin(), s.end(), ';', ','); //02: set needed matrix NumericMatrix m(pairs, 2); //03: set variables std::istringstream ss(s); std::string value; int i = 0; int sw = 0; //04: loop over string and convert to double while (std::getline(ss, value, ',')) { if (sw % 2 == 0){ m(i,0) = atof(value.c_str()); }else{ m(i,1) = atof(value.c_str()); i++; } sw++; } return m; } Luminescence/src/src_create_RLumDataCurve_matrix.cpp0000644000176200001440000000703613571743147022435 0ustar liggesusers// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Title: src_create_RLumDataCurve_matrix() // Author: Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) // Contact: sebastian.kreutzer@u-bordeaux-montaigne.fr // Version: 0.1.3 [2019-09-19] // Purpose: Function to create the RLum.Data.Curve() matrix ... faster than in R itself // - Mainly used by the function Risoe.BINfileData2RLum.Data.Curve() // +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #include using namespace Rcpp; // ----------------------------------------------------------------------------------------------- // Define own function to create a sequence for the x-axis // .. but we do not export them to avoid side effects, as this function is not the same as the // .. base R function seq() // .. no export NumericVector seq_RLum(double from, double to, double length_out) { //calculate by double by = (to - from) / length_out; //set sequence vector and so set the first channel NumericVector sequence(static_cast(length_out), (from + by)); //loop and create sequence for (int i=1; i < static_cast(length_out); i++) sequence[i] = sequence[i-1] + by; return sequence; } // ----------------------------------------------------------------------------------------------- // The function we want to export // [[Rcpp::export("src_create_RLumDataCurve_matrix")]] NumericMatrix create_RLumDataCurve_matrix( NumericVector DATA, double VERSION, int NPOINTS, String LTYPE, double LOW, double HIGH, double AN_TEMP, int TOLDELAY, int TOLON, int TOLOFF ){ //generate X vectors if(NPOINTS > 0){ //set needed vectors and predefine matrix NumericVector X(NPOINTS); NumericMatrix curve_matrix(NPOINTS,2); //fill x column for the case we have a TL curve if(LTYPE == "TL" && VERSION >= 4.0){ //provide a fallback for non-conform BIN/BINX-files, otherwise the //the TL curves are wrong withouth having a reason. if((TOLON == 0) & (TOLOFF == 0) & (TOLDELAY == 0)){ Rcout << "[src_create_RLumDataCurve_matrix()] BIN/BINX-file non-conform. TL curve may be wrong!\n"; TOLOFF = NPOINTS; } //the heating curve consists of three vectors that needed to //be combined // //(A) - the start ramping NumericVector heat_ramp_start = seq_RLum(LOW,AN_TEMP,static_cast(TOLDELAY)); // //(B) - the plateau //B is simply TOLON // //(C) - the end ramping NumericVector heat_ramp_end = seq_RLum(AN_TEMP, HIGH, static_cast(TOLOFF)); //set index counters int c = 0; //fill vector for temperature for(int i = 0; i < X.length(); i++){ if(i < heat_ramp_start.length()){ X[i] = heat_ramp_start[i]; }else if(i >= heat_ramp_start.length() && i < heat_ramp_start.length() + static_cast(TOLON)){ X[i] = AN_TEMP; }else if(i >= heat_ramp_start.length() + TOLON){ X[i] = heat_ramp_end[c]; c++; } } }else{ X = seq_RLum(LOW, HIGH, static_cast(NPOINTS)); } //set final matrix curve_matrix.column(0) = X; curve_matrix.column(1) = DATA; return(curve_matrix); }else{ //set final matrix for the case NPOINTS <= 0 //fill this with NA values NumericMatrix curve_matrix(1,2); curve_matrix(0,0) = NumericVector::get_na(); curve_matrix(0,1) = NumericVector::get_na(); return(curve_matrix); } } Luminescence/src/RcppExports.cpp0000644000176200001440000000752413604173202016456 0ustar liggesusers// Generated by using Rcpp::compileAttributes() -> do not edit by hand // Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 #include #include using namespace Rcpp; // create_UID CharacterVector create_UID(); RcppExport SEXP _Luminescence_create_UID() { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; rcpp_result_gen = Rcpp::wrap(create_UID()); return rcpp_result_gen; END_RCPP } // analyse_IRSARRF_SRS RcppExport SEXP analyse_IRSARRF_SRS(NumericVector values_regenerated_limited, NumericVector values_natural_limited, NumericVector vslide_range, int n_MC, bool trace); RcppExport SEXP _Luminescence_analyse_IRSARRF_SRS(SEXP values_regenerated_limitedSEXP, SEXP values_natural_limitedSEXP, SEXP vslide_rangeSEXP, SEXP n_MCSEXP, SEXP traceSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericVector >::type values_regenerated_limited(values_regenerated_limitedSEXP); Rcpp::traits::input_parameter< NumericVector >::type values_natural_limited(values_natural_limitedSEXP); Rcpp::traits::input_parameter< NumericVector >::type vslide_range(vslide_rangeSEXP); Rcpp::traits::input_parameter< int >::type n_MC(n_MCSEXP); Rcpp::traits::input_parameter< bool >::type trace(traceSEXP); rcpp_result_gen = Rcpp::wrap(analyse_IRSARRF_SRS(values_regenerated_limited, values_natural_limited, vslide_range, n_MC, trace)); return rcpp_result_gen; END_RCPP } // create_RLumDataCurve_matrix NumericMatrix create_RLumDataCurve_matrix(NumericVector DATA, double VERSION, int NPOINTS, String LTYPE, double LOW, double HIGH, double AN_TEMP, int TOLDELAY, int TOLON, int TOLOFF); RcppExport SEXP _Luminescence_create_RLumDataCurve_matrix(SEXP DATASEXP, SEXP VERSIONSEXP, SEXP NPOINTSSEXP, SEXP LTYPESEXP, SEXP LOWSEXP, SEXP HIGHSEXP, SEXP AN_TEMPSEXP, SEXP TOLDELAYSEXP, SEXP TOLONSEXP, SEXP TOLOFFSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericVector >::type DATA(DATASEXP); Rcpp::traits::input_parameter< double >::type VERSION(VERSIONSEXP); Rcpp::traits::input_parameter< int >::type NPOINTS(NPOINTSSEXP); Rcpp::traits::input_parameter< String >::type LTYPE(LTYPESEXP); Rcpp::traits::input_parameter< double >::type LOW(LOWSEXP); Rcpp::traits::input_parameter< double >::type HIGH(HIGHSEXP); Rcpp::traits::input_parameter< double >::type AN_TEMP(AN_TEMPSEXP); Rcpp::traits::input_parameter< int >::type TOLDELAY(TOLDELAYSEXP); Rcpp::traits::input_parameter< int >::type TOLON(TOLONSEXP); Rcpp::traits::input_parameter< int >::type TOLOFF(TOLOFFSEXP); rcpp_result_gen = Rcpp::wrap(create_RLumDataCurve_matrix(DATA, VERSION, NPOINTS, LTYPE, LOW, HIGH, AN_TEMP, TOLDELAY, TOLON, TOLOFF)); return rcpp_result_gen; END_RCPP } // src_get_XSYG_curve_values NumericMatrix src_get_XSYG_curve_values(std::string s); RcppExport SEXP _Luminescence_src_get_XSYG_curve_values(SEXP sSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< std::string >::type s(sSEXP); rcpp_result_gen = Rcpp::wrap(src_get_XSYG_curve_values(s)); return rcpp_result_gen; END_RCPP } static const R_CallMethodDef CallEntries[] = { {"_Luminescence_create_UID", (DL_FUNC) &_Luminescence_create_UID, 0}, {"_Luminescence_analyse_IRSARRF_SRS", (DL_FUNC) &_Luminescence_analyse_IRSARRF_SRS, 5}, {"_Luminescence_create_RLumDataCurve_matrix", (DL_FUNC) &_Luminescence_create_RLumDataCurve_matrix, 10}, {"_Luminescence_src_get_XSYG_curve_values", (DL_FUNC) &_Luminescence_src_get_XSYG_curve_values, 1}, {NULL, NULL, 0} }; RcppExport void R_init_Luminescence(DllInfo *dll) { R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); R_useDynamicSymbols(dll, FALSE); } Luminescence/vignettes/0000755000176200001440000000000013604173345014702 5ustar liggesusersLuminescence/vignettes/HowTo_analyse_Al2O3.Rmd0000644000176200001440000002512413571743302021025 0ustar liggesusers--- title: "How to Analyse Al2O3:C Measurements" author: "Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montagine, France" date: "Last modified: `r Sys.Date()` ('Luminescence' version: `r packageVersion('Luminescence')`)" output: rmarkdown::html_vignette: fig_caption: yes number_sections: yes toc: yes vignette: > %\VignetteEncoding{UTF-8} --- # Scope The package 'Luminescence' offers three distinct functions dealing with the analysis of Al$_{2}$O$_{3}$:C chip measurements: 1. `analyse_Al2O3C_ITC()` 2. `analyse_Al2O3C_CrossTalk()` 3. `analyse_Al2O3C_Measurement()` Only the last function is usually needed to routinely estimate the (environmental $\gamma$-) dose the chip had received. However, the first two functions are needed to correct for equipment related issues. If you have already performed the first two analyses or you do not feel the need for them, you can directly start with the Sec. [4](#Al2O3_Dose_Determination). The following tutorial assumes that all measurements have been performed on a Freiberg Instruments *lexsyg SMART* luminescence reader (Richter et al., 2015). Nevertheless, the general procedure should also work for other luminescence readers. However, please keep in mind that for readers without a separate measurement chamber the determination of the irradiation cross-talk becomes indispensable and should not be skipped. Please further note that this vignette covers only the **R** related part of the data analysis and will not explain the theoretical and physical background. Please see Kreutzer et al. (2018), for details. # Determine irradiation time correction factors ## Data import ```{r, echo=FALSE, message=FALSE, warning=FALSE, cache=TRUE} library(Luminescence) temp <- read_XSYG2R("~/Lumi/Bordeaux/Al2O3_Test/XSYG/Manuscript/2017-03-22_20170302_Al2O3_MDL.xsyg", fastForward = TRUE, txtProgressBar = FALSE) data_ITC <- temp[[1]] temp_CT <- read_XSYG2R("~/Lumi/Bordeaux/Al2O3_Test/XSYG/Manuscript/", fastForward = TRUE, pattern = "CrossTalk_NEW", txtProgressBar = FALSE) data_CT <- temp_CT temp_CAL <- read_XSYG2R("~/Lumi/Bordeaux/Al2O3_Test/XSYG/Manuscript/", fastForward = TRUE, pattern = "Calibration", txtProgressBar = FALSE) data_EnvD <- temp_CAL temp_CAL_info <- read_XSYG2R("~/Lumi/Bordeaux/Al2O3_Test/XSYG/Manuscript/", import = FALSE, pattern = "Calibration") ``` To determine the irradiation time correction factor the function `analyse_Al2O3C_ITC()` is used. The measurement sequence is based on the suggestions made by Kreutzer et al. (2018). To import the measurement data run the function `read_XSYG2R()`. The curve selection is done automatically by the function using the argument `recordType` (preset). Modify this argument if the selection here does not fit your equipment. ```{r,eval = FALSE} library(Luminescence) data_ITC <- read_XSYG2R("MyIrradiationTimeCorrectionMeasurement.XSYG", fastForward = TRUE) data_ITC ``` ```{r, echo=FALSE, R.options=list(width = 10)} cat(paste(c(capture.output(data_ITC)[1:10], "... "), collapse = "\n")) ``` ## Run analysis The imported data are either a single `RLum.Analysis` object or a `list` of such objects, which can be directly passed to the function `analyse_Al2O3C_ITC()`. *Please note that if you follow the suggestions by Kreutzer et al. (in press) no further function arguments are necessary.* \newline{} ```{r Fig1, fig.cap="Dose response curve used to correct the irradiation time for the movement duration of the sample carrier.", out.width=".8\\linewidth", fig.align="center", dpi=75} results_ITC <- analyse_Al2O3C_ITC(object = data_ITC) ``` The analysis returns a plot (Fig.\ref{fig:Fig1}) and the output is stored in the object `results_ITC` and will be used later. \clearpage{} # Irradiation cross-talk estimation ## Data import The data import is similar to the data import given above (including the automatic curve selection). ```{r,eval = FALSE} library(Luminescence) data_CT <- read_XSYG2R("MyCrossTalkIrradiationMeasurement.XSYG", fastForward = TRUE) data_CT ``` ```{r, echo=FALSE, R.options=list(width = 10)} data_CT[[1]] ``` ## Data analysis For the data analysis the function `analyse_Al2O3C_CrossTalk()` is called. Amongst others, the function has a parameter called `irradiation_time_correction`. This parameter can be left empty **or** the results from the previous irradiation time correction measurements can be directly passed to the function. Graphical results are shown in Fig.$~$\ref{fig:Fig2}. The numerical output is again an `RLum.Results` object which can be kept for a later usage, i.e. for an automatic correction during the dose estimation. ```{r, Fig2, fig.cap="Graphical function output of the crosstalk data analysis. Please note that the plot shows the results from repeated cross-talk measurements as described by Kreutzer et al. (2018).", fig.align="center", fig.height=6.5, fig.width=8, fig.pos="h", out.height="400px", out.width="500px", dpi=75} results_CT <- analyse_Al2O3C_CrossTalk( object = data_CT, irradiation_time_correction = results_ITC) ``` \clearpage{} # Al$_{2}$O$_{3}$:C dose determination {#Al2O3_Dose_Determination} This section describes the workflow for the final apparent dose estimation of an Al$_{2}$O$_{3}$ chip. The analyses done above are not necessary but recommended to correct for the equipment characteristics. ## Data import The data import follows the examples given below and consists of calling the function `read_XSYG2R()`. An additional curve selection is not necessary, so far the sequence follows the suggestion by Kreutzer et al. (2018) ```{r,eval = FALSE} library(Luminescence) data_EnvD <- read_XSYG2R("MyEnvironmentalDoseMeasurement.XSYG", fastForward = TRUE) data_EnvD ``` ## Data analysis For the analysis of the environmental dose the Al$_{2}$O$_{3}$ chips received, the results from the estimation of the irradiation time correction analysis and the cross-talk measurements are provided as input objects. The function automatically corrects for both effects. If this is not wanted the arguments can be set to `NULL` (the default) ```{r, eval = FALSE, echo = TRUE, fig.height = 4, fig.width=8, fig.align="center", dpi=75} results_EnvD <- analyse_Al2O3C_Measurement( object = data_EnvD, irradiation_time_correction = results_ITC, cross_talk_correction = results_CT) ``` ```{r, echo = FALSE, fig.height = 4, fig.width=8, fig.align="center", fig.cap="Representive curve plots for one chip. Shown are OSL and TL curves.", dpi=75} analyse_Al2O3C_Measurement( object = data_EnvD[[2]], irradiation_time_correction = results_ITC, cross_talk_correction = results_CT) ``` ```{r, echo = FALSE} results_EnvD <- analyse_Al2O3C_Measurement( object = data_EnvD, irradiation_time_correction = results_ITC, cross_talk_correction = results_CT, verbose = FALSE, plot = FALSE) ``` For this example six Al$_{2}$O$_{3}$:C chips have been measured. The function returns an object of type `RLum.Results`: ```{r} results_EnvD ``` To access the numerical results the given data objects within the `RLum.Results` object can be accessed using the `$` sign: ```{r, eval = FALSE} results_EnvD$data ``` ```{r, echo = FALSE} knitr::kable(results_EnvD$data) ``` This table is as `data.frame` and can be further treated with standard methods in **R**. For example, the table show one extreme value on position (the first row). This value has obviously no meaning and indeed, here the value was biased due to a technical error and should be removed from the data set. ```{r} results_table <- results_EnvD$data[-1,] ``` In a last step the results can be plotted, e.g., using the Abancio Plot (Dietze et al., 2016). ```{r, fig.width = 6, fig.height=5, warning=FALSE, message=FALSE, fig.align="center"} plot_AbanicoPlot( data = results_table[,1:2], zlab = expression(paste(D[e], " [s]")), main = "Dose Distribution", summary = c("n", "mean", "sd.abs", "sd.rel") ) ``` ##Further analysis To further data processing steps might be of particular interest in the given context: ### Travel dosimeter(s) Usually, the data set to analyse contains chips, which were used as travel dosimeters, i.e. this chips were reset in the field at the time the other dosimeters were taken and then transported along with the field dosimeters. The dose of the travel dosimeters is subtracted from the measured dose of the field dosimeters. This can be done manually or automatically using the argument `travel_dosimeter` in the function `analyse_Al2O3C_Measurement()`. For the data set given above the analysis can be used as follows, assuming that the travel dosimeters were placed on measurement position 2 and 3: ```{r, echo = TRUE} results_EnvD_alternative <- analyse_Al2O3C_Measurement( object = data_EnvD[-1], travel_dosimeter = c(2,3), irradiation_time_correction = results_ITC, cross_talk_correction = results_CT, verbose = FALSE, plot = FALSE) results_EnvD_alternative ``` The original data set remains untouched, but the now the returned object contains a new, additional, object called `data_TDcorrected` with the corrected values: ```{r} results_EnvD_alternative$data_TDcorrected ``` ### Gray to seconds So far all returned values had the unit s (seconds), however, usually, the energy dose is wanted. To transform the values from the time domain to the dose domain, the function `Second2Gray()` can be used. Please make sure that you recalculate your calibration values to the measurement date. # Ease your workflow The analysis steps for the irradiation time correction and the irradiation cross-talk estimation might do not want to be repeated for every analysis. Thus, **R** allows the save results and recall them for the next analysis using the functions `save()` and `load()` ```{r, eval = FALSE} save(list = c("results_CT", "results_ITC"), file = "MyResults.Rdata") load(file = "MyResults.Rdata") ``` # References {-} Dietze, M., Kreutzer, S., Burow, C., Fuchs, M.C., Fischer, M., Schmidt, C., 2016. The abanico plot: visualising chronometric data with individual standard errors 31, 12–18. [doi:10.1016/j.quageo.2015.09.003](https://doi.org/10.1016/j.quageo.2015.09.003) Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., in press. Environmental Dose Rate Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. Gechromometrie 45, 56-67. [doi: 10.1515/geochr-2015-0086](https://dx.doi.org/10.1515/geochr-2015-0086) Richter, D., Richter, A., Dornich, K., 2015. Lexsyg smart — a luminescence detection system for dosimetry, material research and dating application. Geochronometria 42, 202–209. [doi: 10.1515/geochr-2015-0022](https://doi.org/10.1515/geochr-2015-0022) Luminescence/vignettes/HowTo_analyse_Al2O3.html.asis0000644000176200001440000000026713231137116022177 0ustar liggesusers%\VignetteIndexEntry{Analyse Al2O3:C Measurements} %\VignetteEngine{R.rsp::asis} %\VignetteKeyword{PDF} %\VignetteKeyword{HTML} %\VignetteKeyword{vignette} %\VignetteKeyword{package} Luminescence/vignettes/HowTo_analyse_pIRIRMeasurements.Rmd0000644000176200001440000002675713456371167023550 0ustar liggesusers--- title: "How to analyse post-IR IRSL measurements?" author: "Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montagine, France" date: "Last modified: `r Sys.Date()` ('Luminescence' version: `r packageVersion('Luminescence')`)" output: rmarkdown::html_vignette: fig_caption: yes number_sections: yes toc: yes bibliography: Tutorial_Analysing_pIRIR_protocol.bib vignette: > %\VignetteEncoding{UTF-8} --- # Scope and introduction Using the function `analyse_SAR.CWOSL()` are `Analyse_SAR.OSLdata()` from the **R** package 'Luminescence' allows to analyse standard OSL (quartz) measurements based on the SAR protocol [@Murray_2000cr]. The function `analyse_SAR.CWOSL()` can also be used for analysing measurements based on the post-IR IRSL protocol (pIRIR, @Thomsen_2008qv), since the measurement protocol based on the SAR structure (see the following table) comprising a set of curves with a $L_{x}$ and $T_{x}$ signal pattern. Step | Example | Signal | --------------|-------------|-----------| Irradiation | beta-irr. | - | Heating | preheat/TL | - | Stimulation | OSL | $L_x$ | Irradiation | beta-irr. |- | Heating | cutheat/TL | - | Stimulation | OSL | $T_x$ | To lower the entry level and make the analysis of post-IR IRSL data more straightforward, some time ago, the function `analyse_pIRIRSequence()` was developed. This function is basically a wrapper around the two functions `analyse_SAR.CWOSL()` and `plot_GrowthCurve()`. This vignette provides a short tutorial exemplifying the analysis of post-IR IRSL data in **R**. To avoid misunderstandings, please keep in mind that the post-IR IRSL protocol is a simple extension of the SAR structure by introducing further stimulations steps only: Step | Example | Signal | --------------|-------------|-----------| Irradiation | beta-irr. | - | Heating | preheat/TL | - | Stimulation | IR$_{50}$ | $L_{x_{_1}}$ | Stimulation | pIRIR$_{225}$ | $L_{x_{_2}}$ | Irradiation | beta-irr. |- | Heating | preheat/TL | - | Stimulation | IR$_{50}$ | $T_{x_{_1}}$ | Stimulation | pIRIR$_{225}$ | $T_{x_{_2}}$ | While the number of IRSL stimulation steps is not limited in general (cf. @Fu:2012ca), the number of steps used for recording the signal of interest and the test dose signal must be equal. Example, if the sequence has two stimulation steps for the signal ($L_{x_{_1}}$, $L_{x_{_2}}$) (as in the table given above), it also needs two stimulations steps for measuring the test dose. Further steps, e.g., hot bleach steps at the end of the cycle, are allowed, but **do not** belong to the SAR structure and should be removed prior any analysis using the function `analyse_pIRIRSequence()`. *Note: The terminal and graphical output show below is partly truncated to shorten the length of this vignette, however, calling the functions in R will show the full output.* # Running example In our example, the measurement was carried out on a Freiberg Instruments lexsyg luminescence reader. Measurement data are stored in XML-based file format called *XSYG*. Two pIRIR signals were measured: A IR$_{50}$ and a pIRIR$_{225}$ signal. The preheat steps were carried out as TL. ## Data import To start with, the package 'Luminescence' itself has to be loaded. In a next step, measurement data are imported using the function `read_XSYG2R()`. If your input format is a BIN/BINX-file, replace the function `read_XSYG2R()` by `read_BIN2R()`. ```{r,echo=TRUE, message=FALSE, R.options=list(width = 10)} library(Luminescence) temp <- read_XSYG2R("pIRIR_measurementData.xsyg", fastForward = TRUE, txtProgressBar = FALSE) ``` To se the dataset in the **R** terminal, just call the object `temp` ```{r, eval = FALSE} temp ``` ```{r, echo = FALSE} cat(paste(c(capture.output(temp)[1:15], "... "), collapse = "\n")) ``` The output shows an `RLum.Analysis` object, which contains all recorded curves (`RLum.Data.Curve` objects) from one aliquot (e.g., cup/disc). In total, the dataset contains the curves of `r length(temp)` aliquots. All records are numbered, here from `#1` to `#208` (shown only until `#29`) and named by their corresponding record type (`TL`, `IRSL`). So far available, within round brackets, information on the detector are given (`UVVIS` and `NA`). This reveals that the object contains curves which are not wanted for the analysis. Curves which belong to a specific measurement step (e.g., IRSL stimulation) are connected with the `<>` symbol. However, curves with `(NA)` are curves recorded by technical components (e.g., temperature sensor) other than the photomultiplier tube and not wanted, even they belong to the dataset. In our case, unfortunately, the information `(UVVIS)` is rather uninformative, but a usual case, since *it depends on the measurement device whether information on the detector are available or not.* This example emphasises that prior knowledge of the data structure and the used technical components are indispensable. ## Select wanted curves To select only wanted curves wanted for the analysis the function `get_RLum()` can be used: ```{r} temp_sel <- get_RLum(temp, recordType = "UVVIS", drop = FALSE) ``` ```{r, eval = FALSE} temp_sel ``` ```{r, echo = FALSE} cat(paste(c(capture.output(temp_sel)[1:10], "... "), collapse = "\n")) ``` The function `get_RLum()` is very powerful and supports sophisticated subsetting of a `RLum.Analysis` objects. Further useful arguments are `curveType` and `record.id`. The latter one allows a subsetting by record id (e.g., `record.id = 2` to select `#2`) and supports also negative subsetting (e.g., to remove only `#2`, type `record.id = -2`). To understand the meaning of the argument `drop = FALSE`, please call the function `get_RLum()` another time with `drop = TRUE` and see the difference in the **R** terminal. For all supported arguments see the manual of the function by typing `?get_RLum` in the **R** terminal. In our example, however, the dataset does not yet follow the SAR structure. The sequence comprises a hotbleach at the end of each cycles (record `#7` in the terminal output example above). This curves are not wanted a have to be removed. This can be done using again the function `get_RLum()` with the argument `record.id`. Please note that by executing the following example the object `temp_sel` will be replaced. ```{r} temp_sel <- get_RLum(temp_sel, record.id = -seq(7,length(temp_sel[[1]]), by = 7), drop = FALSE) ``` ```{r, eval = FALSE} temp_sel ``` ```{r, echo = FALSE} cat(paste(c(capture.output(temp_sel)[1:10], "... "), collapse = "\n")) ``` Using a negative subsetting, all hotbleach curves have been removed using the call `-seq(7,length(temp_sel[[1]]), by = 7)`. Important is to understand that the function `length()` was called for the first list element of `temp_sel`, which contains the recorded curves for the first aliquot only. To see the differences type: ```{r} length(temp_sel) length(temp_sel[[1]]) ``` In other words, our measurement record has data from `r length(temp_sel)` aliquots and each aliquot consits of (at least) `r length(temp_sel[[1]])` records. We here further assume that the number of records is similar for each aliquot. # Analyse sequence Now the object `temp_sel` only comprises TL and IRSL curves, and this data can be directly passed to the function `analyse_pIRIRSequence()`: ```{r, echo=FALSE, eval = FALSE} results <- analyse_pIRIRSequence( object = temp_sel, signal.integral.min = 1, signal.integral.max = 10, background.integral.min = 800, background.integral.max = 999, dose.points = c(0, 340, 680, 1020, 1360, 0, 340)) ``` ```{r, fig.height=14, fig.width=14, out.height="400px", out.height="400px", fig.align="center", dpi=50} results <- analyse_pIRIRSequence( object = temp_sel[[1]], signal.integral.min = 1, signal.integral.max = 10, background.integral.min = 800, background.integral.max = 999, dose.points = c(0, 340, 680, 1020, 1360, 0, 340), verbose = FALSE) ``` ```{r, echo=FALSE, eval = TRUE} results <- analyse_pIRIRSequence( object = temp_sel, signal.integral.min = 1, signal.integral.max = 10, background.integral.min = 800, background.integral.max = 999, dose.points = c(0, 340, 680, 1020, 1360, 0, 340), verbose = FALSE, plot = FALSE) ``` The function expects the setting of some arguments, for details and meaning, please see `?analyse_pIRIRSequence`. If the imported measurement data do not carry information on the `dose.points`, as it is the case in our example, these values have to be provided manually. Please note that the information needed for `dose.points` is something which was defined while writing the measurement sequence (your irradiation times). The function output is a comprehensive plot scheme and a so-called `RLum.Results` object, which contains all relevant calculations from the analysis. ```{r} results ``` The $D_{e}$ values (here in seconds) can be seen by calling the `$data` element from the object `results`, which is a `data.frame` (and here limited to three columns): ```{r} results$data[,c("De", "De.Error", "RC.Status", "Signal")] ``` The column `RC.Status` informs you about a failed rejection criterium, which one is not revealed, but the column provides a possibilty for further subsetting. For a quick data processing this is, together with the plot output, usually enough information. However, to see all rejection criteria type `results$rejection.criteria`. To see all information type `results$data` without further information. If you want to combine the two tables for a more virtous data processing, you can merge both tables by calling: ```{r} df <- merge(results$data, results$rejection.criteria, by = "UID") head(df) ``` The result may appear confusing in a first instance, since, e.g. the column `De` appears to contain duplicated entries. But still, each row is unique and in sum contains unique information. # Plot results To plot the `pIRIR225` $D_{e}$ values the following call can be used: ```{r, fig.align="center", fig.width=4, fig.height=4, dpi=50} plot_KDE( data = subset(results$data, Signal == "pIRIR225")[, c("De", "De.Error")], xlab = expression(paste(D[e], " [s]")), summary = c("n", "mean", "sd.abs") ) ``` # Compacted call The above-listed steps can also be shortened to a concise **R** call using the so-called `magriitr` operator, which basically pipes the results from function to function. To have a difference the final plot is an Abanico plot @Dietze:2015dp. *Please note that for this last example the arguments `plot` and `verbose` have been set to `FALSE` for most of the functions.* ```{r, eval = TRUE, warning=FALSE, fig.align="center", fig.width=5, fig.height=5, dpi=50} results <- read_XSYG2R("pIRIR_measurementData.xsyg", fastForward = TRUE, verbose = FALSE) %>% get_RLum(recordType = "UVVIS", drop = FALSE) %>% get_RLum(.,record.id = -seq(7,length(.[[1]]), by = 7), drop = FALSE) %>% analyse_pIRIRSequence( signal.integral.min = 1, signal.integral.max = 10, background.integral.min = 800, background.integral.max = 999, dose.points = c(0, 340, 680, 1020, 1360, 0, 340), verbose = FALSE, plot = FALSE) %>% get_RLum() %>% subset(.,subset = Signal == "pIRIR225") %>% plot_AbanicoPlot( data = .[, c("De", "De.Error")], zlab = expression(paste(D[e], " [s]")), summary = c("n", "mean", "sd.abs") ) ``` #References {-} Luminescence/vignettes/S4classObjects.pdf.asis0000644000176200001440000000030613231137116021150 0ustar liggesusers%\VignetteIndexEntry{S4-class Object Structure in 'Luminescence'} %\VignetteEngine{R.rsp::asis} %\VignetteKeyword{PDF} %\VignetteKeyword{HTML} %\VignetteKeyword{vignette} %\VignetteKeyword{package} Luminescence/vignettes/HowTo_analyse_pIRIRMeasurements.html.asis0000644000176200001440000000027413233636635024707 0ustar liggesusers%\VignetteIndexEntry{Analyse post-IR IRSL Measurements} %\VignetteEngine{R.rsp::asis} %\VignetteKeyword{PDF} %\VignetteKeyword{HTML} %\VignetteKeyword{vignette} %\VignetteKeyword{package} Luminescence/vignettes/Tutorial_Analysing_pIRIR_protocol.bib0000644000176200001440000000225113233633341024111 0ustar liggesusers@article{Murray_2000cr, author = {Murray, A S and Wintle, Ann G}, title = {{Luminescence dating of quartz using an improved single-aliquot regenerative-dose protocol}}, journal = {Radiation Measurements}, year = {2000}, volume = {32}, number = {1}, pages = {57--73} } @article{Thomsen_2008qv, author = {Thomsen, K J and Murray, A S and Jain, M and Boetter-Jensen, L}, title = {{Laboratory fading rates of various luminescence signals from feldspar-rich sediment extracts}}, journal = {Radiation Measurements}, year = {2008}, volume = {43}, number = {9-10}, pages = {1474--1486} } @article{Fu:2012ca, author = {Fu, Xiao and Li, Bo and Li, Sheng-Hua}, title = {{Testing a multi-step post-IR IRSL dating method using polymineral fine grains from Chinese loess}}, journal = {Radiation Measurements}, year = {2012}, volume = {10}, pages = {8--15} } @article{Dietze:2015dp, author = {Dietze, Michael and Kreutzer, Sebastian and Burow, Christoph and Fuchs, Margret C and Fischer, Manfred and Schmidt, Christoph}, title = {{The abanico plot: visualising chronometric data with individual standard errors}}, journal = {Quaternary Geochronology}, year = {2016}, volume = {31}, pages = {12--18} } Luminescence/R/0000755000176200001440000000000013604173345013073 5ustar liggesusersLuminescence/R/plot_RadialPlot.R0000644000176200001440000015651213604172511016313 0ustar liggesusers#' Function to create a Radial Plot #' #' A Galbraith's radial plot is produced on a logarithmic or a linear scale. #' #' Details and the theoretical background of the radial plot are given in the #' cited literature. This function is based on an S script of Rex Galbraith. To #' reduce the manual adjustments, the function has been rewritten. Thanks to #' Rex Galbraith for useful comments on this function. \cr #' Plotting can be disabled by adding the argument `plot = "FALSE"`, e.g. #' to return only numeric plot output. #' #' Earlier versions of the Radial Plot in this package had the 2-sigma-bar #' drawn onto the z-axis. However, this might have caused misunderstanding in #' that the 2-sigma range may also refer to the z-scale, which it does not! #' Rather it applies only to the x-y-coordinate system (standardised error vs. #' precision). A spread in doses or ages must be drawn as lines originating at #' zero precision (x0) and zero standardised estimate (y0). Such a range may be #' drawn by adding lines to the radial plot ( `line`, `line.col`, #' `line.label`, cf. examples). #' #' A statistic summary, i.e. a collection of statistic measures of #' centrality and dispersion (and further measures) can be added by specifying #' one or more of the following keywords: #' - `"n"` (number of samples), #' - `"mean"` (mean De value), #' - `"mean.weighted"` (error-weighted mean), #' - `"median"` (median of the De values), #' - `"sdrel"` (relative standard deviation in percent), #' - `"sdrel.weighted"` (error-weighted relative standard deviation in percent), #' - `"sdabs"` (absolute standard deviation), #' - `"sdabs.weighted"` (error-weighted absolute standard deviation), #' - `"serel"` (relative standard error), #' - `"serel.weighted"` (error-weighted relative standard error), #' - `"seabs"` (absolute standard error), #' - `"seabs.weighted"` (error-weighted absolute standard error), #' - `"in.2s"` (percent of samples in 2-sigma range), #' - `"kurtosis"` (kurtosis) and #' - `"skewness"` (skewness). #' #' @param data [data.frame] or [RLum.Results-class] object (**required**): #' for `data.frame` two columns: De (`data[,1]`) and De error (`data[,2]`). #' To plot several data sets in one plot, the data sets must be provided as #' `list`, e.g. `list(data.1, data.2)`. #' #' @param na.rm [logical] (*with default*): #' excludes `NA` values from the data set prior to any further operations. #' #' @param log.z [logical] (*with default*): #' Option to display the z-axis in logarithmic scale. Default is `TRUE`. #' #' @param central.value [numeric]: #' User-defined central value, primarily used for horizontal centering #' of the z-axis. #' #' @param centrality [character] or [numeric] (*with default*): #' measure of centrality, used for automatically centering the plot and drawing #' the central line. Can either be one out of #' - `"mean"`, #' - `"median"`, #' - `"mean.weighted"` and #' - `"median.weighted"` or a #' - numeric value used for the standardisation. #' #' @param mtext [character]: #' additional text below the plot title. #' #' @param summary [character] (*optional*): #' add statistic measures of centrality and dispersion to the plot. #' Can be one or more of several keywords. See details for available keywords. #' #' @param summary.pos [numeric] or [character] (*with default*): #' optional position coordinates or keyword (e.g. `"topright"`) #' for the statistical summary. Alternatively, the keyword `"sub"` may be #' specified to place the summary below the plot header. However, this latter #' option is only possible if `mtext` is not used. #' #' @param legend [character] vector (*optional*): #' legend content to be added to the plot. #' #' @param legend.pos [numeric] or [character] (with #' default): optional position coordinates or keyword (e.g. `"topright"`) #' for the legend to be plotted. #' #' @param stats [character]: additional labels of statistically #' important values in the plot. One or more out of the following: #' - `"min"`, #' - `"max"`, #' - `"median"`. #' #' @param rug [logical]: #' Option to add a rug to the z-scale, to indicate the location of individual values #' #' @param plot.ratio [numeric]: #' User-defined plot area ratio (i.e. curvature of the z-axis). If omitted, #' the default value (`4.5/5.5`) is used and modified automatically to optimise #' the z-axis curvature. The parameter should be decreased when data points #' are plotted outside the z-axis or when the z-axis gets too elliptic. #' #' @param bar.col [character] or [numeric] (*with default*): #' colour of the bar showing the 2-sigma range around the central #' value. To disable the bar, use `"none"`. Default is `"grey"`. #' #' @param y.ticks [logical]: #' Option to hide y-axis labels. Useful for data with small scatter. #' #' @param grid.col [character] or [numeric] (*with default*): #' colour of the grid lines (originating at `[0,0]` and stretching to #' the z-scale). To disable grid lines, use `"none"`. Default is `"grey"`. #' #' @param line [numeric]: #' numeric values of the additional lines to be added. #' #' @param line.col [character] or [numeric]: #' colour of the additional lines. #' #' @param line.label [character]: #' labels for the additional lines. #' #' @param output [logical]: #' Optional output of numerical plot parameters. These can be useful to #' reproduce similar plots. Default is `FALSE`. #' #' @param ... Further plot arguments to pass. `xlab` must be a vector of #' length 2, specifying the upper and lower x-axes labels. #' #' @return Returns a plot object. #' #' @section Function version: 0.5.5 #' #' @author #' Michael Dietze, GFZ Potsdam (Germany)\cr #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr #' Based on a rewritten S script of Rex Galbraith, 2010 #' #' @seealso [plot], [plot_KDE], [plot_Histogram] #' #' @references #' Galbraith, R.F., 1988. Graphical Display of Estimates Having #' Differing Standard Errors. Technometrics, 30 (3), 271-281. #' #' Galbraith, R.F., 1990. The radial plot: Graphical assessment of spread in #' ages. International Journal of Radiation Applications and Instrumentation. #' Part D. Nuclear Tracks and Radiation Measurements, 17 (3), 207-214. #' #' Galbraith, R. & Green, P., 1990. Estimating the component ages in a finite #' mixture. International Journal of Radiation Applications and #' Instrumentation. Part D. Nuclear Tracks and Radiation Measurements, 17 (3) #' 197-206. #' #' Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for mixed fission #' track ages. Nuclear Tracks And Radiation Measurements, 21 (4), 459-470. #' #' Galbraith, R.F., 1994. Some Applications of Radial Plots. Journal of the #' American Statistical Association, 89 (428), 1232-1242. #' #' Galbraith, R.F., 2010. On plotting OSL equivalent doses. Ancient TL, 28 (1), #' 1-10. #' #' Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent #' dose and error calculation and display in OSL dating: An overview and some #' recommendations. Quaternary Geochronology, 11, 1-27. #' #' @examples #' #' ## load example data #' data(ExampleData.DeValues, envir = environment()) #' ExampleData.DeValues <- Second2Gray(ExampleData.DeValues$BT998, c(0.0438,0.0019)) #' #' ## plot the example data straightforward #' plot_RadialPlot(data = ExampleData.DeValues) #' #' ## now with linear z-scale #' plot_RadialPlot(data = ExampleData.DeValues, #' log.z = FALSE) #' #' ## now with output of the plot parameters #' plot1 <- plot_RadialPlot(data = ExampleData.DeValues, #' log.z = FALSE, #' output = TRUE) #' plot1 #' plot1$zlim #' #' ## now with adjusted z-scale limits #' plot_RadialPlot(data = ExampleData.DeValues, #' log.z = FALSE, #' zlim = c(100, 200)) #' #' ## now the two plots with serious but seasonally changing fun #' #plot_RadialPlot(data = data.3, fun = TRUE) #' #' ## now with user-defined central value, in log-scale again #' plot_RadialPlot(data = ExampleData.DeValues, #' central.value = 150) #' #' ## now with a rug, indicating individual De values at the z-scale #' plot_RadialPlot(data = ExampleData.DeValues, #' rug = TRUE) #' #' ## now with legend, colour, different points and smaller scale #' plot_RadialPlot(data = ExampleData.DeValues, #' legend.text = "Sample 1", #' col = "tomato4", #' bar.col = "peachpuff", #' pch = "R", #' cex = 0.8) #' #' ## now without 2-sigma bar, y-axis, grid lines and central value line #' plot_RadialPlot(data = ExampleData.DeValues, #' bar.col = "none", #' grid.col = "none", #' y.ticks = FALSE, #' lwd = 0) #' #' ## now with user-defined axes labels #' plot_RadialPlot(data = ExampleData.DeValues, #' xlab = c("Data error (%)", #' "Data precision"), #' ylab = "Scatter", #' zlab = "Equivalent dose [Gy]") #' #' ## now with minimum, maximum and median value indicated #' plot_RadialPlot(data = ExampleData.DeValues, #' central.value = 150, #' stats = c("min", "max", "median")) #' #' ## now with a brief statistical summary #' plot_RadialPlot(data = ExampleData.DeValues, #' summary = c("n", "in.2s")) #' #' ## now with another statistical summary as subheader #' plot_RadialPlot(data = ExampleData.DeValues, #' summary = c("mean.weighted", "median"), #' summary.pos = "sub") #' #' ## now the data set is split into sub-groups, one is manipulated #' data.1 <- ExampleData.DeValues[1:15,] #' data.2 <- ExampleData.DeValues[16:25,] * 1.3 #' #' ## now a common dataset is created from the two subgroups #' data.3 <- list(data.1, data.2) #' #' ## now the two data sets are plotted in one plot #' plot_RadialPlot(data = data.3) #' #' ## now with some graphical modification #' plot_RadialPlot(data = data.3, #' col = c("darkblue", "darkgreen"), #' bar.col = c("lightblue", "lightgreen"), #' pch = c(2, 6), #' summary = c("n", "in.2s"), #' summary.pos = "sub", #' legend = c("Sample 1", "Sample 2")) #' #' @md #' @export plot_RadialPlot <- function( data, na.rm = TRUE, log.z = TRUE, central.value, centrality = "mean.weighted", mtext, summary, summary.pos, legend, legend.pos, stats, rug = FALSE, plot.ratio, bar.col, y.ticks = TRUE, grid.col, line, line.col, line.label, output = FALSE, ... ) { ## Homogenise input data format if(is(data, "list") == FALSE) {data <- list(data)} ## Check input data for(i in 1:length(data)) { if(is(data[[i]], "RLum.Results") == FALSE & is(data[[i]], "data.frame") == FALSE) { stop(paste("[plot_RadialPlot] Error: Input data format is neither", "'data.frame' nor 'RLum.Results'"), call. = FALSE) } else { if(is(data[[i]], "RLum.Results") == TRUE) { data[[i]] <- get_RLum(data[[i]], "data") } ##use only the first two columns data[[i]] <- data[[i]][,1:2] } } ## check data and parameter consistency-------------------------------------- if(missing(stats) == TRUE) {stats <- numeric(0)} if(missing(summary) == TRUE) { summary <- c("n", "in.2s") } if(missing(summary.pos) == TRUE) { summary.pos <- "sub" } if(missing(bar.col) == TRUE) { bar.col <- rep("grey80", length(data)) } if(missing(grid.col) == TRUE) { grid.col <- rep("grey70", length(data)) } if(missing(summary) == TRUE) { summary <- NULL } if(missing(summary.pos) == TRUE) { summary.pos <- "topleft" } if(missing(mtext) == TRUE) { mtext <- "" } ## check z-axis log-option for grouped data sets if(is(data, "list") == TRUE & length(data) > 1 & log.z == FALSE) { warning(paste("Option 'log.z' is not set to 'TRUE' altough more than one", "data set (group) is provided.")) } ## optionally, remove NA-values if(na.rm == TRUE) { for(i in 1:length(data)) { data[[i]] <- na.exclude(data[[i]]) } } ## create preliminary global data set De.global <- data[[1]][,1] if(length(data) > 1) { for(i in 2:length(data)) { De.global <- c(De.global, data[[i]][,1]) } } ## calculate major preliminary tick values and tick difference extraArgs <- list(...) if("zlim" %in% names(extraArgs)) { limits.z <- extraArgs$zlim } else { z.span <- (mean(De.global) * 0.5) / (sd(De.global) * 100) z.span <- ifelse(z.span > 1, 0.9, z.span) limits.z <- c((ifelse(test = min(De.global) <= 0, yes = 1.1, no = 0.9) - z.span) * min(De.global), (1.1 + z.span) * max(De.global)) } ## calculate correction dose to shift negative values if(min(De.global) < 0) { if("zlim" %in% names(extraArgs)) { De.add <- abs(extraArgs$zlim[1]) } else { ## estimate delta De to add to all data De.add <- min(10^ceiling(log10(abs(De.global))) * 10) ## optionally readjust delta De for extreme values if(De.add <= abs(min(De.global))) { De.add <- De.add * 10 } } } else { De.add <- 0 } ticks <- round(pretty(limits.z, n = 5), 3) De.delta <- ticks[2] - ticks[1] ## optionally add correction dose to data set and adjust error if(log.z == TRUE) { for(i in 1:length(data)) { data[[i]][,1] <- data[[i]][,1] + De.add } De.global <- De.global + De.add } ## calculate major preliminary tick values and tick difference extraArgs <- list(...) if("zlim" %in% names(extraArgs)) { limits.z <- extraArgs$zlim } else { z.span <- (mean(De.global) * 0.5) / (sd(De.global) * 100) z.span <- ifelse(z.span > 1, 0.9, z.span) limits.z <- c((ifelse(min(De.global) <= 0, 1.1, 0.9) - z.span) * min(De.global), (1.1 + z.span) * max(De.global)) } ticks <- round(pretty(limits.z, n = 5), 3) De.delta <- ticks[2] - ticks[1] ## calculate and append statistical measures -------------------------------- ## z-values based on log-option z <- lapply(1:length(data), function(x){ if(log.z == TRUE) {log(data[[x]][,1])} else {data[[x]][,1]}}) if(is(z, "list") == FALSE) {z <- list(z)} data <- lapply(1:length(data), function(x) { cbind(data[[x]], z[[x]])}) rm(z) ## calculate se-values based on log-option se <- lapply(1:length(data), function(x, De.add){ if(log.z == TRUE) { if(De.add != 0) { data[[x]][,2] <- data[[x]][,2] / (data[[x]][,1] + De.add) } else { data[[x]][,2] / data[[x]][,1] } } else { data[[x]][,2] }}, De.add = De.add) if(is(se, "list") == FALSE) {se <- list(se)} data <- lapply(1:length(data), function(x) { cbind(data[[x]], se[[x]])}) rm(se) ## calculate central values if(centrality[1] == "mean") { z.central <- lapply(1:length(data), function(x){ rep(mean(data[[x]][,3], na.rm = TRUE), length(data[[x]][,3]))}) } else if(centrality[1] == "median") { z.central <- lapply(1:length(data), function(x){ rep(median(data[[x]][,3], na.rm = TRUE), length(data[[x]][,3]))}) } else if(centrality[1] == "mean.weighted") { z.central <- lapply(1:length(data), function(x){ sum(data[[x]][,3] / data[[x]][,4]^2) / sum(1 / data[[x]][,4]^2)}) } else if(centrality[1] == "median.weighted") { ## define function after isotone::weighted.median median.w <- function (y, w) { ox <- order(y) y <- y[ox] w <- w[ox] k <- 1 low <- cumsum(c(0, w)) up <- sum(w) - low df <- low - up repeat { if (df[k] < 0) k <- k + 1 else if (df[k] == 0) return((w[k] * y[k] + w[k - 1] * y[k - 1]) / (w[k] + w[k - 1])) else return(y[k - 1]) } } z.central <- lapply(1:length(data), function(x){ rep(median.w(y = data[[x]][,3], w = data[[x]][,4]), length(data[[x]][,3]))}) } else if(is.numeric(centrality) == TRUE & length(centrality) == length(data)) { z.central.raw <- if(log.z == TRUE) { log(centrality + De.add) } else { centrality + De.add } z.central <- lapply(1:length(data), function(x){ rep(z.central.raw[x], length(data[[x]][,3]))}) } else if(is.numeric(centrality) == TRUE & length(centrality) > length(data)) { z.central <- lapply(1:length(data), function(x){ rep(median(data[[x]][,3], na.rm = TRUE), length(data[[x]][,3]))}) } else { stop("Measure of centrality not supported!") } data <- lapply(1:length(data), function(x) { cbind(data[[x]], z.central[[x]])}) rm(z.central) ## calculate precision precision <- lapply(1:length(data), function(x){ 1 / data[[x]][,4]}) if(is(precision, "list") == FALSE) {precision <- list(precision)} data <- lapply(1:length(data), function(x) { cbind(data[[x]], precision[[x]])}) rm(precision) ## calculate standard estimate std.estimate <- lapply(1:length(data), function(x){ (data[[x]][,3] - data[[x]][,5]) / data[[x]][,4]}) if(is(std.estimate, "list") == FALSE) {std.estimate <- list(std.estimate)} data <- lapply(1:length(data), function(x) { cbind(data[[x]], std.estimate[[x]])}) ## append empty standard estimate for plotting data <- lapply(1:length(data), function(x) { cbind(data[[x]], std.estimate[[x]])}) rm(std.estimate) ## generate global data set data.global <- cbind(data[[1]], rep(x = 1, times = nrow(data[[1]]))) colnames(data.global) <- rep("", 9) if(length(data) > 1) { for(i in 2:length(data)) { data.add <- cbind(data[[i]], rep(x = i, times = nrow(data[[i]]))) colnames(data.add) <- rep("", 9) data.global <- rbind(data.global, data.add) } } ## create column names colnames(data.global) <- c("De", "error", "z", "se", "z.central", "precision", "std.estimate", "std.estimate.plot") ## calculate global central value if(centrality[1] == "mean") { z.central.global <- mean(data.global[,3], na.rm = TRUE) } else if(centrality[1] == "median") { z.central.global <- median(data.global[,3], na.rm = TRUE) } else if(centrality[1] == "mean.weighted") { z.central.global <- sum(data.global[,3] / data.global[,4]^2) / sum(1 / data.global[,4]^2) } else if(centrality[1] == "median.weighted") { ## define function after isotone::weighted.mean median.w <- function (y, w) { ox <- order(y) y <- y[ox] w <- w[ox] k <- 1 low <- cumsum(c(0, w)) up <- sum(w) - low df <- low - up repeat { if (df[k] < 0) k <- k + 1 else if (df[k] == 0) return((w[k] * y[k] + w[k - 1] * y[k - 1])/(w[k] + w[k - 1])) else return(y[k - 1]) } } z.central.global <- median.w(y = data.global[,3], w = data.global[,4]) } else if(is.numeric(centrality) == TRUE & length(centrality == length(data))) { z.central.global <- mean(data.global[,3], na.rm = TRUE) } ## optionally adjust zentral value by user-defined value if(missing(central.value) == FALSE) { # ## adjust central value for De.add central.value <- central.value + De.add z.central.global <- ifelse(log.z == TRUE, log(central.value), central.value) } ## create column names for(i in 1:length(data)) { colnames(data[[i]]) <- c("De", "error", "z", "se", "z.central", "precision", "std.estimate", "std.estimate.plot") } ## re-calculate standardised estimate for plotting for(i in 1:length(data)) { data[[i]][,8] <- (data[[i]][,3] - z.central.global) / data[[i]][,4] } data.global.plot <- data[[1]][,8] if(length(data) > 1) { for(i in 2:length(data)) { data.global.plot <- c(data.global.plot, data[[i]][,8]) } } data.global[,8] <- data.global.plot ## print warning for too small scatter if(max(abs(1 / data.global[6])) < 0.02) { small.sigma <- TRUE print(paste("Attention, small standardised estimate scatter.", "Toggle off y.ticks?")) } ## read out additional arguments--------------------------------------------- extraArgs <- list(...) main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {expression(paste(D[e], " distribution"))} sub <- if("sub" %in% names(extraArgs)) {extraArgs$sub} else {""} if("xlab" %in% names(extraArgs)) { if(length(extraArgs$xlab) != 2) { stop("Argmuent xlab is not of length 2!") } else {xlab <- extraArgs$xlab} } else { xlab <- c(if(log.z == TRUE) { "Relative standard error (%)" } else { "Standard error" }, "Precision") } ylab <- if("ylab" %in% names(extraArgs)) { extraArgs$ylab } else { "Standardised estimate" } zlab <- if("zlab" %in% names(extraArgs)) { extraArgs$zlab } else { expression(paste(D[e], " [Gy]")) } if("zlim" %in% names(extraArgs)) { limits.z <- extraArgs$zlim } else { z.span <- (mean(data.global[,1]) * 0.5) / (sd(data.global[,1]) * 100) z.span <- ifelse(z.span > 1, 0.9, z.span) limits.z <- c((0.9 - z.span) * min(data.global[[1]]), (1.1 + z.span) * max(data.global[[1]])) } if("xlim" %in% names(extraArgs)) { limits.x <- extraArgs$xlim } else { limits.x <- c(0, max(data.global[,6])) } if(limits.x[1] != 0) { limits.x[1] <- 0 warning("Lower x-axis limit not set to zero, issue corrected!") } if("ylim" %in% names(extraArgs)) { limits.y <- extraArgs$ylim } else { y.span <- (mean(data.global[,1]) * 10) / (sd(data.global[,1]) * 100) y.span <- ifelse(y.span > 1, 0.98, y.span) limits.y <- c(-(1 + y.span) * max(abs(data.global[,7])), (0.8 + y.span) * max(abs(data.global[,7]))) } cex <- if("cex" %in% names(extraArgs)) { extraArgs$cex } else { 1 } lty <- if("lty" %in% names(extraArgs)) { extraArgs$lty } else { rep(2, length(data)) } lwd <- if("lwd" %in% names(extraArgs)) { extraArgs$lwd } else { rep(1, length(data)) } pch <- if("pch" %in% names(extraArgs)) { extraArgs$pch } else { rep(1, length(data)) } col <- if("col" %in% names(extraArgs)) { extraArgs$col } else { 1:length(data) } tck <- if("tck" %in% names(extraArgs)) { extraArgs$tck } else { NA } tcl <- if("tcl" %in% names(extraArgs)) { extraArgs$tcl } else { -0.5 } show <- if("show" %in% names(extraArgs)) {extraArgs$show} else {TRUE} if(show != TRUE) {show <- FALSE} fun <- if("fun" %in% names(extraArgs)) { extraArgs$fun } else { FALSE } ## define auxiliary plot parameters ----------------------------------------- ## optionally adjust plot ratio if(missing(plot.ratio) == TRUE) { if(log.z == TRUE) { plot.ratio <- 1 / (1 * ((max(data.global[,6]) - min(data.global[,6])) / (max(data.global[,7]) - min(data.global[,7])))) } else { plot.ratio <- 4.5 / 5.5 } } if(plot.ratio > 10^6) {plot.ratio <- 10^6} ## calculate conversion factor for plot coordinates f <- (max(data.global[,6]) - min(data.global[,6])) / (max(data.global[,7]) - min(data.global[,7])) * plot.ratio ## calculate major and minor z-tick values tick.values.major <- signif(pretty(limits.z, n = 5), 3) tick.values.minor <- signif(pretty(limits.z, n = 25), 3) tick.values.major <- tick.values.major[tick.values.major >= min(tick.values.minor)] tick.values.major <- tick.values.major[tick.values.major <= max(tick.values.minor)] tick.values.major <- tick.values.major[tick.values.major >= limits.z[1]] tick.values.major <- tick.values.major[tick.values.major <= limits.z[2]] tick.values.minor <- tick.values.minor[tick.values.minor >= limits.z[1]] tick.values.minor <- tick.values.minor[tick.values.minor <= limits.z[2]] if(log.z == TRUE) { tick.values.major <- log(tick.values.major) tick.values.minor <- log(tick.values.minor) } ## calculate z-axis radius r.x <- limits.x[2] / max(data.global[,6]) + 0.05 r <- max(sqrt((data.global[,6])^2+(data.global[,7] * f)^2)) * r.x ## calculate major z-tick coordinates tick.x1.major <- r / sqrt(1 + f^2 * ( tick.values.major - z.central.global)^2) tick.y1.major <- (tick.values.major - z.central.global) * tick.x1.major tick.x2.major <- (1 + 0.015 * cex) * r / sqrt( 1 + f^2 * (tick.values.major - z.central.global)^2) tick.y2.major <- (tick.values.major - z.central.global) * tick.x2.major ticks.major <- cbind(tick.x1.major, tick.x2.major, tick.y1.major, tick.y2.major) ## calculate minor z-tick coordinates tick.x1.minor <- r / sqrt(1 + f^2 * ( tick.values.minor - z.central.global)^2) tick.y1.minor <- (tick.values.minor - z.central.global) * tick.x1.minor tick.x2.minor <- (1 + 0.007 * cex) * r / sqrt( 1 + f^2 * (tick.values.minor - z.central.global)^2) tick.y2.minor <- (tick.values.minor - z.central.global) * tick.x2.minor ticks.minor <- cbind(tick.x1.minor, tick.x2.minor, tick.y1.minor, tick.y2.minor) ## calculate z-label positions label.x <- 1.03 * r / sqrt(1 + f^2 * (tick.values.major - z.central.global)^2) label.y <- (tick.values.major - z.central.global) * tick.x2.major ## create z-axes labels if(log.z == TRUE) { label.z.text <- signif(exp(tick.values.major), 3) } else { label.z.text <- signif(tick.values.major, 3) } ## subtract De.add from label values if(De.add != 0) { label.z.text <- label.z.text #- De.add } labels <- cbind(label.x, label.y, label.z.text) ## calculate coordinates for 2-sigma-polygon overlay polygons <- matrix(nrow = length(data), ncol = 8) for(i in 1:length(data)) { polygons[i,1:4] <- c(limits.x[1], limits.x[1], max(data.global[,6]), max(data.global[,6])) polygons[i,5:8] <- c(-2, 2, (data[[i]][1,5] - z.central.global) * polygons[i,3] + 2, (data[[i]][1,5] - z.central.global) * polygons[i,4] - 2) } ## calculate node coordinates for semi-circle user.limits <- if(log.z == TRUE) { log(limits.z) } else{ limits.z } ellipse.values <- seq(from = min(c(tick.values.major, tick.values.minor, user.limits[2])), to = max(c(tick.values.major, tick.values.minor, user.limits[2])), length.out = 500) ellipse.x <- r / sqrt(1 + f^2 * (ellipse.values - z.central.global)^2) ellipse.y <- (ellipse.values - z.central.global) * ellipse.x ellipse <- cbind(ellipse.x, ellipse.y) ellipse.lims <- rbind(range(ellipse[,1]), range(ellipse[,2])) ## check if z-axis overlaps with 2s-polygon polygon_y_max <- max(polygons[,7]) polygon_y_min <- min(polygons[,7]) z_2s_upper <- ellipse.x[abs(ellipse.y - polygon_y_max) == min(abs(ellipse.y - polygon_y_max))] z_2s_lower <- ellipse.x[abs(ellipse.y - polygon_y_min) == min(abs(ellipse.y - polygon_y_min))] if(max(polygons[,3]) >= z_2s_upper | max(polygons[,3]) >= z_2s_lower) { print("[plot_RadialPlot] Warning: z-scale touches 2s-polygon. Decrease plot ratio.") } ## calculate statistical labels if(length(stats == 1)) {stats <- rep(stats, 2)} stats.data <- matrix(nrow = 3, ncol = 3) data.stats <- as.numeric(data.global[,1]) if("min" %in% stats == TRUE) { stats.data[1, 3] <- data.stats[data.stats == min(data.stats)][1] stats.data[1, 1] <- data.global[data.stats == stats.data[1, 3], 6][1] stats.data[1, 2] <- data.global[data.stats == stats.data[1, 3], 8][1] } if("max" %in% stats == TRUE) { stats.data[2, 3] <- data.stats[data.stats == max(data.stats)][1] stats.data[2, 1] <- data.global[data.stats == stats.data[2, 3], 6][1] stats.data[2, 2] <- data.global[data.stats == stats.data[2, 3], 8][1] } if("median" %in% stats == TRUE) { stats.data[3, 3] <- data.stats[data.stats == quantile(data.stats, 0.5, type = 3)] stats.data[3, 1] <- data.global[data.stats == stats.data[3, 3], 6][1] stats.data[3, 2] <- data.global[data.stats == stats.data[3, 3], 8][1] } ## recalculate axes limits if necessary limits.z.x <- range(ellipse[,1]) limits.z.y <- range(ellipse[,2]) if(!("ylim" %in% names(extraArgs))) { if(limits.z.y[1] < 0.66 * limits.y[1]) { limits.y[1] <- 1.8 * limits.z.y[1] } if(limits.z.y[2] > 0.77 * limits.y[2]) { limits.y[2] <- 1.3 * limits.z.y[2] } } if(!("xlim" %in% names(extraArgs))) { if(limits.z.x[2] > 1.1 * limits.x[2]) { limits.x[2] <- limits.z.x[2] } } ## calculate and paste statistical summary De.stats <- matrix(nrow = length(data), ncol = 18) colnames(De.stats) <- c("n", "mean", "mean.weighted", "median", "median.weighted", "kde.max", "sd.abs", "sd.rel", "se.abs", "se.rel", "q25", "q75", "skewness", "kurtosis", "sd.abs.weighted", "sd.rel.weighted", "se.abs.weighted", "se.rel.weighted") for(i in 1:length(data)) { data_to_stats <- data[[i]] data_to_stats$De <- data_to_stats$De - De.add statistics <- calc_Statistics(data = data_to_stats) De.stats[i,1] <- statistics$weighted$n De.stats[i,2] <- statistics$unweighted$mean De.stats[i,3] <- statistics$weighted$mean De.stats[i,4] <- statistics$unweighted$median De.stats[i,5] <- statistics$unweighted$median De.stats[i,7] <- statistics$unweighted$sd.abs De.stats[i,8] <- statistics$unweighted$sd.rel De.stats[i,9] <- statistics$unweighted$se.abs De.stats[i,10] <- statistics$weighted$se.rel De.stats[i,11] <- quantile(data[[i]][,1], 0.25) De.stats[i,12] <- quantile(data[[i]][,1], 0.75) De.stats[i,13] <- statistics$unweighted$skewness De.stats[i,14] <- statistics$unweighted$kurtosis De.stats[i,15] <- statistics$weighted$sd.abs De.stats[i,16] <- statistics$weighted$sd.rel De.stats[i,17] <- statistics$weighted$se.abs De.stats[i,18] <- statistics$weighted$se.rel ## kdemax - here a little doubled as it appears below again De.density <- try(density(x = data[[i]][,1], kernel = "gaussian", from = limits.z[1], to = limits.z[2]), silent = TRUE) if(class(De.density) == "try-error") { De.stats[i,6] <- NA } else { De.stats[i,6] <- De.density$x[which.max(De.density$y)] } } label.text = list(NA) if(summary.pos[1] != "sub") { n.rows <- length(summary) for(i in 1:length(data)) { stops <- paste(rep("\n", (i - 1) * n.rows), collapse = "") summary.text <- character(0) for(j in 1:length(summary)) { summary.text <- c(summary.text, paste( "", ifelse("n" %in% summary[j] == TRUE, paste("n = ", De.stats[i,1], "\n", sep = ""), ""), ifelse("mean" %in% summary[j] == TRUE, paste("mean = ", round(De.stats[i,2], 2), "\n", sep = ""), ""), ifelse("mean.weighted" %in% summary[j] == TRUE, paste("weighted mean = ", round(De.stats[i,3], 2), "\n", sep = ""), ""), ifelse("median" %in% summary[j] == TRUE, paste("median = ", round(De.stats[i,4], 2), "\n", sep = ""), ""), ifelse("median.weighted" %in% summary[j] == TRUE, paste("weighted median = ", round(De.stats[i,5], 2), "\n", sep = ""), ""), ifelse("kdemax" %in% summary[j] == TRUE, paste("kdemax = ", round(De.stats[i,6], 2), " \n ", sep = ""), ""), ifelse("sdabs" %in% summary[j] == TRUE, paste("sd = ", round(De.stats[i,7], 2), "\n", sep = ""), ""), ifelse("sdrel" %in% summary[j] == TRUE, paste("rel. sd = ", round(De.stats[i,8], 2), " %", "\n", sep = ""), ""), ifelse("seabs" %in% summary[j] == TRUE, paste("se = ", round(De.stats[i,9], 2), "\n", sep = ""), ""), ifelse("serel" %in% summary[j] == TRUE, paste("rel. se = ", round(De.stats[i,10], 2), " %", "\n", sep = ""), ""), ifelse("skewness" %in% summary[j] == TRUE, paste("skewness = ", round(De.stats[i,13], 2), "\n", sep = ""), ""), ifelse("kurtosis" %in% summary[j] == TRUE, paste("kurtosis = ", round(De.stats[i,14], 2), "\n", sep = ""), ""), ifelse("in.2s" %in% summary[j] == TRUE, paste("in 2 sigma = ", round(sum(data[[i]][,7] > -2 & data[[i]][,7] < 2) / nrow(data[[i]]) * 100 , 1), " %", sep = ""), ""), ifelse("sdabs.weighted" %in% summary[j] == TRUE, paste("abs. weighted sd = ", round(De.stats[i,15], 2), "\n", sep = ""), ""), ifelse("sdrel.weighted" %in% summary[j] == TRUE, paste("rel. weighted sd = ", round(De.stats[i,16], 2), "\n", sep = ""), ""), ifelse("seabs.weighted" %in% summary[j] == TRUE, paste("abs. weighted se = ", round(De.stats[i,17], 2), "\n", sep = ""), ""), ifelse("serel.weighted" %in% summary[j] == TRUE, paste("rel. weighted se = ", round(De.stats[i,18], 2), "\n", sep = ""), ""), sep = "")) } summary.text <- paste(summary.text, collapse = "") label.text[[length(label.text) + 1]] <- paste(stops, summary.text, stops, sep = "") } } else { for(i in 1:length(data)) { summary.text <- character(0) for(j in 1:length(summary)) { summary.text <- c(summary.text, ifelse("n" %in% summary[j] == TRUE, paste("n = ", De.stats[i,1], " | ", sep = ""), ""), ifelse("mean" %in% summary[j] == TRUE, paste("mean = ", round(De.stats[i,2], 2), " | ", sep = ""), ""), ifelse("mean.weighted" %in% summary[j] == TRUE, paste("weighted mean = ", round(De.stats[i,3], 2), " | ", sep = ""), ""), ifelse("median" %in% summary[j] == TRUE, paste("median = ", round(De.stats[i,4], 2), " | ", sep = ""), ""), ifelse("median.weighted" %in% summary[j] == TRUE, paste("weighted median = ", round(De.stats[i,5], 2), " | ", sep = ""), ""), ifelse("kdemax" %in% summary[j] == TRUE, paste("kdemax = ", round(De.stats[i,6], 2), " | ", sep = ""), ""), ifelse("sdrel" %in% summary[j] == TRUE, paste("rel. sd = ", round(De.stats[i,8], 2), " %", " | ", sep = ""), ""), ifelse("sdabs" %in% summary[j] == TRUE, paste("abs. sd = ", round(De.stats[i,7], 2), " | ", sep = ""), ""), ifelse("serel" %in% summary[j] == TRUE, paste("rel. se = ", round(De.stats[i,10], 2), " %", " | ", sep = ""), ""), ifelse("seabs" %in% summary[j] == TRUE, paste("abs. se = ", round(De.stats[i,9], 2), " | ", sep = ""), ""), ifelse("skewness" %in% summary[j] == TRUE, paste("skewness = ", round(De.stats[i,13], 2), " | ", sep = ""), ""), ifelse("kurtosis" %in% summary[j] == TRUE, paste("kurtosis = ", round(De.stats[i,14], 2), " | ", sep = ""), ""), ifelse("in.2s" %in% summary[j] == TRUE, paste("in 2 sigma = ", round(sum(data[[i]][,7] > -2 & data[[i]][,7] < 2) / nrow(data[[i]]) * 100 , 1), " % ", sep = ""), ""), ifelse("sdabs.weighted" %in% summary[j] == TRUE, paste("abs. weighted sd = ", round(De.stats[i,15], 2), " %", " | ", sep = ""), ""), ifelse("sdrel.weighted" %in% summary[j] == TRUE, paste("rel. weighted sd = ", round(De.stats[i,16], 2), " %", " | ", sep = ""), ""), ifelse("seabs.weighted" %in% summary[j] == TRUE, paste("abs. weighted se = ", round(De.stats[i,17], 2), " %", " | ", sep = ""), ""), ifelse("serel.weighted" %in% summary[j] == TRUE, paste("rel. weighted se = ", round(De.stats[i,18], 2), " %", " | ", sep = ""), "") ) } summary.text <- paste(summary.text, collapse = "") label.text[[length(label.text) + 1]] <- paste( " ", summary.text, sep = "") } ## remove outer vertical lines from string for(i in 2:length(label.text)) { label.text[[i]] <- substr(x = label.text[[i]], start = 3, stop = nchar(label.text[[i]]) - 3) } } ## remove dummy list element label.text[[1]] <- NULL ## convert keywords into summary placement coordinates if(missing(summary.pos) == TRUE) { summary.pos <- c(limits.x[1], limits.y[2]) summary.adj <- c(0, 1) } else if(length(summary.pos) == 2) { summary.pos <- summary.pos summary.adj <- c(0, 1) } else if(summary.pos[1] == "topleft") { summary.pos <- c(limits.x[1], limits.y[2]) summary.adj <- c(0, 1) } else if(summary.pos[1] == "top") { summary.pos <- c(mean(limits.x), limits.y[2]) summary.adj <- c(0.5, 1) } else if(summary.pos[1] == "topright") { summary.pos <- c(limits.x[2], limits.y[2]) summary.adj <- c(1, 1) } else if(summary.pos[1] == "left") { summary.pos <- c(limits.x[1], mean(limits.y)) summary.adj <- c(0, 0.5) } else if(summary.pos[1] == "center") { summary.pos <- c(mean(limits.x), mean(limits.y)) summary.adj <- c(0.5, 0.5) } else if(summary.pos[1] == "right") { summary.pos <- c(limits.x[2], mean(limits.y)) summary.adj <- c(1, 0.5) }else if(summary.pos[1] == "bottomleft") { summary.pos <- c(limits.x[1], limits.y[1]) summary.adj <- c(0, 0) } else if(summary.pos[1] == "bottom") { summary.pos <- c(mean(limits.x), limits.y[1]) summary.adj <- c(0.5, 0) } else if(summary.pos[1] == "bottomright") { summary.pos <- c(limits.x[2], limits.y[1]) summary.adj <- c(1, 0) } ## convert keywords into legend placement coordinates if(missing(legend.pos) == TRUE) { legend.pos <- c(limits.x[1], limits.y[2]) legend.adj <- c(0, 1) } else if(length(legend.pos) == 2) { legend.pos <- legend.pos legend.adj <- c(0, 1) } else if(legend.pos[1] == "topleft") { legend.pos <- c(limits.x[1], limits.y[2]) legend.adj <- c(0, 1) } else if(legend.pos[1] == "top") { legend.pos <- c(mean(limits.x), limits.y[2]) legend.adj <- c(0.5, 1) } else if(legend.pos[1] == "topright") { legend.pos <- c(limits.x[2], limits.y[2]) legend.adj <- c(1, 1) } else if(legend.pos[1] == "left") { legend.pos <- c(limits.x[1], mean(limits.y)) legend.adj <- c(0, 0.5) } else if(legend.pos[1] == "center") { legend.pos <- c(mean(limits.x), mean(limits.y)) legend.adj <- c(0.5, 0.5) } else if(legend.pos[1] == "right") { legend.pos <- c(limits.x[2], mean(limits.y)) legend.adj <- c(1, 0.5) } else if(legend.pos[1] == "bottomleft") { legend.pos <- c(limits.x[1], limits.y[1]) legend.adj <- c(0, 0) } else if(legend.pos[1] == "bottom") { legend.pos <- c(mean(limits.x), limits.y[1]) legend.adj <- c(0.5, 0) } else if(legend.pos[1] == "bottomright") { legend.pos <- c(limits.x[2], limits.y[1]) legend.adj <- c(1, 0) } ## calculate line coordinates and further parameters if(missing(line) == FALSE) { line = line + De.add if(log.z == TRUE) {line <- log(line)} line.coords <- list(NA) for(i in 1:length(line)) { line.x <- c(limits.x[1], r / sqrt(1 + f^2 * (line[i] - z.central.global)^2)) line.y <- c(0, (line[i] - z.central.global) * line.x[2]) line.coords[[length(line.coords) + 1]] <- rbind(line.x, line.y) } line.coords[1] <- NULL if(missing(line.col) == TRUE) { line.col <- seq(from = 1, to = length(line.coords)) } if(missing(line.label) == TRUE) { line.label <- rep("", length(line.coords)) } } ## calculate rug coordinates if(missing(rug) == FALSE) { if(log.z == TRUE) { rug.values <- log(De.global) } else { rug.values <- De.global } rug.coords <- list(NA) for(i in 1:length(rug.values)) { rug.x <- c(r / sqrt(1 + f^2 * (rug.values[i] - z.central.global)^2) * 0.988, r / sqrt(1 + f^2 * (rug.values[i] - z.central.global)^2) * 0.995) rug.y <- c((rug.values[i] - z.central.global) * rug.x[1], (rug.values[i] - z.central.global) * rug.x[2]) rug.coords[[length(rug.coords) + 1]] <- rbind(rug.x, rug.y) } rug.coords[1] <- NULL } ## Generate plot ------------------------------------------------------------ ## check if plotting is enabled if(show == TRUE) { ## determine number of subheader lines to shif the plot if(length(summary) > 0 & summary.pos[1] == "sub") { shift.lines <- length(data) + 1 } else {shift.lines <- 1} ## setup plot area par(mar = c(4, 4, shift.lines + 1.5, 7), xpd = TRUE, cex = cex) ## create empty plot plot(NA, xlim = limits.x, ylim = limits.y, main = "", sub = sub, xlab = "", ylab = "", xaxs = "i", yaxs = "i", frame.plot = FALSE, axes = FALSE) ## add y-axis label mtext(side = 2, line = 2.5, at = 0, adj = 0.5, cex = cex, text = ylab) ## calculate upper x-axis label values label.x.upper <- if(log.z == TRUE) { as.character(round(1/axTicks(side = 1)[-1] * 100, 1)) } else { as.character(round(1/axTicks(side = 1)[-1], 1)) } ## optionally, plot 2-sigma-bar if(bar.col[1] != "none") { for(i in 1:length(data)) { polygon(x = polygons[i,1:4], y = polygons[i,5:8], lty = "blank", col = bar.col[i]) } } ## optionally, add grid lines if(grid.col[1] != "none") { for(i in 1:length(tick.x1.major)) { lines(x = c(limits.x[1], tick.x1.major[i]), y = c(0, tick.y1.major[i]), col = grid.col) } } ## optionally, plot central value lines if(lwd[1] > 0 & lty[1] > 0) { for(i in 1:length(data)) { x2 <- r / sqrt(1 + f^2 * ( data[[i]][1,5] - z.central.global)^2) y2 <- (data[[i]][1,5] - z.central.global) * x2 lines(x = c(limits.x[1], x2), y = c(0, y2), lty = lty[i], lwd = lwd[i], col = col[i]) } } ## optionally add further lines if(missing(line) == FALSE) { for(i in 1:length(line)) { lines(x = line.coords[[i]][1,], y = line.coords[[i]][2,], col = line.col[i]) text(x = line.coords[[i]][1,2], y = line.coords[[i]][2,2] + par()$cxy[2] * 0.3, labels = line.label[i], pos = 2, col = line.col[i], cex = cex * 0.9) } } ## overplot unwanted parts polygon(x = c(ellipse[,1], limits.x[2] * 2, limits.x[2] * 2), y = c(ellipse[,2], max(ellipse[,2]), min(ellipse[,2])), col = "white", lty = 0) ## add plot title title(main = main, line = shift.lines, font = 2) ## plot lower x-axis (precision) x.axis.ticks <- axTicks(side = 1) x.axis.ticks <- x.axis.ticks[c(TRUE, x.axis.ticks <= limits.x[2])] x.axis.ticks <- x.axis.ticks[x.axis.ticks <= limits.x[2]] ## axis with lables and ticks axis(side = 1, at = x.axis.ticks, lwd = 1, xlab = "") ## extend axis line to right side of the plot lines(x = c(max(x.axis.ticks, na.rm = TRUE), limits.x[2]), y = c(limits.y[1], limits.y[1])) ## draw closing tick on right hand side axis(side = 1, tcl = 0.5, lwd = 0, lwd.ticks = 1, at = limits.x[2], labels = FALSE) axis(side = 1, tcl = -0.5, lwd = 0, lwd.ticks = 1, at = limits.x[2], labels = FALSE) ## add upper axis label mtext(text = xlab[1], at = (limits.x[1] + limits.x[2]) / 2, side = 1, line = -3.5, cex = cex) ## add lower axis label mtext(text = xlab[2], at = (limits.x[1] + limits.x[2]) / 2, side = 1, line = 2.5, cex = cex) ## plot upper x-axis axis(side = 1, tcl = 0.5, lwd = 0, lwd.ticks = 1, at = x.axis.ticks[-1], labels = FALSE) ## remove first tick label (infinity) label.x.upper <- label.x.upper[1:(length(x.axis.ticks) - 1)] ## add tick labels axis(side = 1, lwd = 0, labels = label.x.upper, at = x.axis.ticks[-1], line = -3) ## plot minor z-ticks for(i in 1:length(tick.values.minor)) { lines(x = c(tick.x1.minor[i], tick.x2.minor[i]), y = c(tick.y1.minor[i], tick.y2.minor[i])) } ## plot major z-ticks for(i in 1:length(tick.values.major)) { lines(x = c(tick.x1.major[i], tick.x2.major[i]), y = c(tick.y1.major[i], tick.y2.major[i])) } ## plot z-axis lines(ellipse) ## plot z-values text(x = label.x, y = label.y, label = label.z.text, 0) ## plot z-label mtext(side = 4, at = 0, line = 5, las = 3, adj = 0.5, cex = cex, text = zlab) ## optionally add rug if(rug == TRUE) { for(i in 1:length(rug.coords)) { lines(x = rug.coords[[i]][1,], y = rug.coords[[i]][2,], col = col[data.global[i,9]]) } } ## plot values for(i in 1:length(data)) { points(data[[i]][,6][data[[i]][,6] <= limits.x[2]], data[[i]][,8][data[[i]][,6] <= limits.x[2]], col = col[i], pch = pch[i]) } ## optionally add min, max, median sample text if(length(stats) > 0) { text(x = stats.data[,1], y = stats.data[,2], labels = round(stats.data[,3], 1), pos = 2, cex = 0.85) } ## optionally add legend content if(missing(legend) == FALSE) { legend(x = legend.pos[1], y = 0.8 * legend.pos[2], xjust = legend.adj[1], yjust = legend.adj[2], legend = legend, pch = pch, col = col, text.col = col, cex = 0.8 * cex, bty = "n") } ## plot y-axis if(y.ticks == TRUE) { char.height <- par()$cxy[2] tick.space <- axisTicks(usr = limits.y, log = FALSE) tick.space <- (max(tick.space) - min(tick.space)) / length(tick.space) if(tick.space < char.height * 1.5) { axis(side = 2, at = c(-2, 2), labels = c("", ""), las = 1) axis(side = 2, at = 0, tcl = 0, labels = paste("\u00B1", "2"), las = 1) } else { axis(side = 2, at = seq(-2, 2, by = 2), las = 2) } } else { axis(side = 2, at = 0) } ## optionally add subheader text mtext(side = 3, line = shift.lines - 2, text = mtext, cex = 0.8 * cex) ## add summary content for(i in 1:length(data)) { if(summary.pos[1] != "sub") { text(x = summary.pos[1], y = 0.8 * summary.pos[2], adj = summary.adj, labels = label.text[[i]], cex = 0.8 * cex, col = col[i]) } else { if(mtext == "") { mtext(side = 3, line = shift.lines - 1 - i, text = label.text[[i]], col = col[i], cex = 0.8 * cex) } } } ##FUN by R Luminescence Team if(fun==TRUE){sTeve()} } if(output == TRUE) { return(list(data = data, data.global = data.global, xlim = limits.x, ylim = limits.y, zlim = limits.z, r = r, plot.ratio = plot.ratio, ticks.major = ticks.major, ticks.minor = ticks.minor, labels = labels, polygons = polygons, ellipse.lims = ellipse.lims)) } } Luminescence/R/get_Layout.R0000644000176200001440000006176413231137116015340 0ustar liggesusers#' Collection of layout definitions #' #' This helper function returns a list with layout definitions for homogeneous #' plotting. #' #' The easiest way to create a user-specific layout definition is perhaps to #' create either an empty or a default layout object and fill/modify the #' definitions (`user.layout <- get_Layout(data = "empty")`). #' #' @param layout [character] or [list] object (**required**): #' name of the layout definition to be returned. If name is provided the #' respective definition is returned. One of the following #' supported layout definitions is possible: `"default"`, #' `"journal.1"`, `"small"`, `"empty"`. #' #' User-specific layout definitions must be provided as a list object of #' predefined structure, see details. #' #' @return A list object with layout definitions for plot functions. #' #' @section Function version: 0.1 #' #' @author Michael Dietze, GFZ Potsdam (Germany) #' #' @examples #' #' ## read example data set #' data(ExampleData.DeValues, envir = environment()) #' #' ## show structure of the default layout definition #' layout.default <- get_Layout(layout = "default") #' str(layout.default) #' #' ## show colour definitions for Abanico plot, only #' layout.default$abanico$colour #' #' ## set Abanico plot title colour to orange #' layout.default$abanico$colour$main <- "orange" #' #' ## create Abanico plot with modofied layout definition #' plot_AbanicoPlot(data = ExampleData.DeValues, #' layout = layout.default) #' #' ## create Abanico plot with predefined layout "journal" #' plot_AbanicoPlot(data = ExampleData.DeValues, #' layout = "journal") #' #' @md #' @export get_Layout <- function( layout ) { ## pre-defined layout selections if(is.character(layout) == TRUE & length(layout) == 1) { if(layout == "empty") { layout = list( ## empty Abanico plot ------------------------------------------------- abanico = list( font.type = list( main = character(1), xlab1 = character(1), xlab2 = character(1), ylab = character(1), zlab = character(1), xtck1 = character(1), xtck2 = character(1), xtck3 = character(1), ytck = character(1), ztck = character(1), mtext = character(1), summary = character(1), # optionally vector stats = character(1), # optionally vector legend = character(1) # optionally vector ), font.size = list( main = numeric(1), xlab1 = numeric(1), xlab2 = numeric(1), xlab3 = numeric(1), ylab = numeric(1), zlab = numeric(1), xtck1 = numeric(1), xtck2 = numeric(1), xtck3 = numeric(1), ytck = numeric(1), ztck = numeric(1), mtext = numeric(1), summary = numeric(1), # optionally vector stats = numeric(1), # optionally vector legend = numeric(1) # optionally vector ), font.deco = list( main = character(1), xlab1 = character(1), xlab2 = character(1), xlab3 = character(1), ylab = character(1), zlab = character(1), xtck1 = character(1), xtck2 = character(1), xtck3 = character(1), ytck = character(1), ztck = character(1), mtext = character(1), summary = character(1), # optionally vector stats = character(1), # optionally vector legend = character(1) # optionally vector ), colour = list( main = numeric(1), # plot title colour xlab1 = numeric(1), # left x-axis label colour xlab2 = numeric(1), # right x-axis label colour xlab3 = numeric(1), # right x-axis label colour ylab = numeric(1), # y-axis label colour zlab = numeric(1), # z-axis label colour xtck1 = numeric(1), # left x-axis tick colour xtck2 = numeric(1), # right x-axis tick colour xtck3 = numeric(1), # right x-axis tick colour ytck = numeric(1), # y-axis tick colour ztck = numeric(1), # z-axis tick colour mtext = numeric(1), # subheader text colour summary = numeric(1), # statistic summary colour stats = numeric(1), # value statistics colour legend = numeric(1), # legend colour centrality = numeric(1), # Centrality line colour value.dot = numeric(1), # De value dot colour value.bar = numeric(1), # De value error bar colour value.rug = numeric(1), # De value rug colour poly.line = numeric(1), # polygon line colour poly.fill = numeric(1), # polygon fill colour bar.line = numeric(1), # polygon line colour bar.fill = numeric(1), # polygon fill colour kde.line = numeric(1), kde.fill = numeric(1), grid.major = numeric(1), grid.minor = numeric(1), border = numeric(1), background = numeric(1)), dimension = list( figure.width = numeric(1), # figure width in mm figure.height = numeric(1), # figure height in mm margin = numeric(4), # margin sizes in mm main.line = numeric(1), # line height in % xlab1.line = numeric(1), # line height in % xlab2.line = numeric(1), # line height in % xlab3.line = numeric(1), # line height in % ylab.line = numeric(1), # line height in % zlab.line = numeric(1), # line height in % xtck1.line = numeric(1), # line height in % xtck2.line = numeric(1), # line height in % xtck3.line = numeric(1), # line height in % ytck.line = numeric(1), # line height in % ztck.line = numeric(1), # line height in % xtcl1 = numeric(1), # tick length in % xtcl2 = numeric(1), # tick length in % xtcl3 = numeric(1), # tick length in % ytcl = numeric(1), # tick length in % ztcl = numeric(1), # tick length in % rugl = numeric(1), # rug length in % mtext = numeric(1), # line height in % summary.line = numeric(1) # line height in % )), ## empty KDE plot ----------------------------------------------------- kde = list( font.type = list( main = character(1), xlab = character(1), ylab1 = character(1), ylab2 = character(1), xtck = character(1), ytck1 = character(1), ytck2 = character(1), stats = character(1), # optionally vector legend = character(1) # optionally vector ), font.size = list( main = numeric(1), xlab = numeric(1), ylab1 = numeric(1), ylab2 = numeric(1), xtck = numeric(1), ytck1 = numeric(1), ytck2 = numeric(1), stats = numeric(1), # optionally vector legend = numeric(1) # optionally vector ), font.deco = list( main = character(1), xlab = character(1), ylab1 = character(1), ylab2 = character(1), xtck = character(1), ytck1 = character(1), ytck2 = character(1), stats = character(1), # optionally vector legend = character(1) # optionally vector ), colour = list( main = numeric(1), # plot title colour xlab = numeric(1), # x-axis label colour ylab1 = numeric(1), # primary y-axis label colour ylab2 = numeric(1), # secondary y-axis label colour xtck = numeric(1), # x-axis tick colour ytck1 = numeric(1), # primary y-axis tick colour ytck2 = numeric(1), # secondary y-axis tick colour box = numeric(1), # plot frame box line colour mtext = numeric(1), # subheader text colour stats = numeric(1), # statistic summary colour kde.line = numeric(1), # KDE line colour kde.fill = numeric(1), # KDE fill colour value.dot = numeric(1), # De value dot colour value.bar = numeric(1), # De value error bar colour value.rug = numeric(1), # De value rug colour boxplot.line = numeric(1), # boxplot line colour boxplot.fill = numeric(1), # boxplot fill colour mean.line = numeric(1), # mean line colour sd.bar = numeric(1), # sd-line colour background = numeric(1)), # background colour dimension = list( figure.width = numeric(1), # figure width in mm figure.height = numeric(1), # figure height in mm margin = numeric(4), # margin sizes in mm main.line = numeric(1), # line height in % xlab.line = numeric(1), # line height in % ylab1.line = numeric(1), # line height in % ylab2.line = numeric(1), # line height in % xtck.line = numeric(1), # line height in % ytck1.line = numeric(1), # line height in % ytck2.line = numeric(1), # line height in % xtcl = numeric(1), # tick length in % ytcl1 = numeric(1), # tick length in % ytcl2 = numeric(1), # tick length in % stats.line = numeric(1) # line height in % ) ) ) } else if(layout == "default") { layout = list( ## default Abanico plot ----------------------------------------------- abanico = list( font.type = list( main = "", xlab1 = "", xlab2 = "", ylab = "", zlab = "", xtck1 = "", xtck2 = "", xtck3 = "", ytck = "", ztck = "", mtext = "", summary = "", # optionally vector stats = "", # optionally vector legend = "" # optionally vector ), font.size = list( main = 12, xlab1 = 12, xlab2 = 12, xlab3 = 12, ylab = 12, zlab = 12, xtck1 = 12, xtck2 = 12, xtck3 = 12, ytck = 12, ztck = 12, mtext = 10, summary = 10, # optionally vector stats = 10, # optionally vector legend = 10 # optionally vector ), font.deco = list( main = "bold", xlab1 = "normal", xlab2 = "normal", xlab3 = "normal", ylab = "normal", zlab = "normal", xtck1 = "normal", xtck2 = "normal", xtck3 = "normal", ytck = "normal", ztck = "normal", mtext = "normal", summary = "normal", # optionally vector stats = "normal", # optionally vector legend = "normal" # optionally vector ), colour = list( main = 1, # plot title colour xlab1 = 1, # left x-axis label colour xlab2 = 1, # right x-axis label colour xlab3 = 1, # right x-axis label colour ylab = 1, # y-axis label colour zlab = 1, # z-axis label colour xtck1 = 1, # left x-axis tick colour xtck2 = 1, # right x-axis tick colour xtck3 = 1, # right x-axis tick colour ytck = 1, # y-axis tick colour ztck = 1, # z-axis tick colour mtext = 1, # subheader text colour summary = 1, # statistic summary colour stats = 1, # value statistics colour legend = 1, # legend colour centrality = 1, # Centrality line colour value.dot = 1, # De value dot colour value.bar = 1, # De value error bar colour value.rug = 1, # De value rug colour poly.line = NA, # polygon line colour poly.fill = adjustcolor("grey75", alpha.f = 0.6), # polygon fill colour bar.line = NA, # polygon line colour bar.fill = "grey60", # bar fill colour kde.line = 1, kde.fill = NA, grid.major = "grey80", grid.minor = "none", border = 1, background = NA), dimension = list( figure.width = "auto", # figure width in mm figure.height = "auto", # figure height in mm margin = c(10, 10, 10, 10), # margin sizes in mm main.line = 100, # line height in % xlab1.line = 90, # line height in % xlab2.line = 90, # line height in % xlab3.line = 90, # line height in % ylab.line = 100, # line height in % zlab.line = 70, # line height in % xtck1.line = 100, # line height in % xtck2.line = 100, # line height in % xtck3.line = 100, # line height in % ytck.line = 100, # line height in % ztck.line = 100, # line height in % xtcl1 = 100, # tick length in % xtcl2 = 100, # tick length in % xtcl3 = 100, # tick length in % ytcl = 100, # tick length in % ztcl = 100, # tick length in % rugl = 100, # rug length in % mtext = 100, # line height in % summary.line = 100 # line height in % )), ## default KDE plot --------------------------------------------------- kde = list( font.type = list( main = "", xlab = "", ylab1 = "", ylab2 = "", xtck = "", ytck1 = "", ytck2 = "", stats = "", # optionally vector legend = "" # optionally vector ), font.size = list( main = 14, xlab = 12, ylab1 = 12, ylab2 = 12, xtck = 12, ytck1 = 12, ytck2 = 12, stats = 12, # optionally vector legend = 12 # optionally vector ), font.deco = list( main = "bold", xlab = "normal", ylab1 = "normal", ylab2 = "normal", xtck = "normal", ytck1 = "normal", ytck2 = "normal", stats = "normal", # optionally vector legend = "normal" # optionally vector ), colour = list( main = 1, # plot title colour xlab = 1, # x-axis label colour ylab1 = 1, # primary y-axis label colour ylab2 = 1, # secondary y-axis label colour xtck = 1, # x-axis tick colour ytck1 = 1, # primary y-axis tick colour ytck2 = 1, # secondary y-axis tick colour box = 1, # plot frame box line colour mtext = 2, # subheader text colour stats = 1, # statistic summary colour kde.line = 1, # KDE line colour kde.fill = NULL, # KDE fill colour value.dot = 1, # De value dot colour value.bar = 1, # De value error bar colour value.rug = 1, # De value rug colour boxplot.line = 1, # boxplot line colour boxplot.fill = NULL, # boxplot fill colour mean.point = 1, # mean line colour sd.line = 1, # sd bar colour background = NULL), # background colour dimension = list( figure.width = "auto", # figure width in mm figure.height = "auto", # figure height in mm margin = c(10, 10, 10, 10), # margin sizes in mm main.line = 100, # line height in % xlab.line = 100, # line height in % ylab1.line = 100, # line height in % ylab2.line = 100, # line height in % xtck.line = 100, # line height in % ytck1.line = 100, # line height in % ytck2.line = 100, # line height in % xtcl = 100, # tick length in % ytcl1 = 100, # tick length in % ytcl2 = 100, # tick length in % stats.line = 100 # line height in % ) ) ) } else if(layout == "journal") { layout = list( ## journal Abanico plot ----------------------------------------------- abanico = list( font.type = list( main = "", xlab1 = "", xlab2 = "", ylab = "", zlab = "", xtck1 = "", xtck2 = "", xtck3 = "", ytck = "", ztck = "", mtext = "", summary = "", # optionally vector stats = "", # optionally vector legend = "" # optionally vector ), font.size = list( main = 8, xlab1 = 7, xlab2 = 7, xlab3 = 7, ylab = 7, zlab = 7, xtck1 = 7, xtck2 = 7, xtck3 = 7, ytck = 7, ztck = 7, mtext = 6, summary = 6, # optionally vector stats = 6, # optionally vector legend = 6 # optionally vector ), font.deco = list( main = "bold", xlab1 = "normal", xlab2 = "normal", xlab3 = "normal", ylab = "normal", zlab = "normal", xtck1 = "normal", xtck2 = "normal", xtck3 = "normal", ytck = "normal", ztck = "normal", mtext = "normal", summary = "normal", # optionally vector stats = "normal", # optionally vector legend = "normal" # optionally vector ), colour = list( main = 1, # plot title colour xlab1 = 1, # left x-axis label colour xlab2 = 1, # right x-axis label colour xlab3 = 1, # right x-axis label colour ylab = 1, # y-axis label colour zlab = 1, # z-axis label colour xtck1 = 1, # left x-axis tick colour xtck2 = 1, # right x-axis tick colour xtck3 = 1, # right x-axis tick colour ytck = 1, # y-axis tick colour ztck = 1, # z-axis tick colour mtext = 1, # subheader text colour summary = 1, # statistic summary colour stats = 1, # value statistics colour legend = 1, # legend colour centrality = 1, # Centrality line colour value.dot = 1, # De value dot colour value.bar = 1, # De value error bar colour value.rug = 1, # De value rug colour poly.line = NA, # polygon line colour poly.fill = adjustcolor("grey75", alpha.f = 0.6), # polygon fill colour bar.line = NA, # polygon line colour bar.fill = "grey60", # bar fill colour kde.line = 1, kde.fill = NA, grid.major = "grey80", grid.minor = "none", border = 1, background = NA), dimension = list( figure.width = 100, # figure width in mm figure.height = 100, # figure height in mm margin = c(10, 10, 10, 10), # margin sizes in mm main.line = 70, # line height in % xlab1.line = 30, # line height in % xlab2.line = 65, # line height in % xlab3.line = 30, # line height in % ylab.line = 30, # line height in % zlab.line = 40, # line height in % xtck1.line = 50, # line height in % xtck2.line = 50, # line height in % xtck3.line = 50, # line height in % ytck.line = 70, # line height in % ztck.line = 70, # line height in % xtcl1 = 50, # tick length in % xtcl2 = 50, # tick length in % xtcl3 = 50, # tick length in % ytcl = 50, # tick length in % ztcl = 70, # tick length in % rugl = 70, # rug length in % mtext = 100, # line height in % summary.line = 70, # line height in % pch = 50 # point size in % )), ## journal KDE plot --------------------------------------------------- kde = list( font.type = list( main = "", xlab = "", ylab1 = "", ylab2 = "", xtck = "", ytck1 = "", ytck2 = "", stats = "", # optionally vector legend = "" # optionally vector ), font.size = list( main = 8, xlab = 7, ylab1 = 7, ylab2 = 7, xtck = 7, ytck1 = 7, ytck2 = 7, stats = 7, legend = 7 ), font.deco = list( main = "bold", xlab = "normal", ylab1 = "normal", ylab2 = "normal", xtck = "normal", ytck1 = "normal", ytck2 = "normal", stats = "normal", # optionally vector legend = "normal" # optionally vector ), colour = list( main = 1, # plot title colour xlab = 1, # x-axis label colour ylab1 = 1, # primary y-axis label colour ylab2 = 1, # secondary y-axis label colour xtck = 1, # x-axis tick colour ytck1 = 1, # primary y-axis tick colour ytck2 = 1, # secondary y-axis tick colour box = 1, # plot frame box line colour mtext = 1, # subheader text colour stats = "#2062B3", # statistic summary colour kde.line = "#2062B3", # KDE line colour kde.fill = NULL, # KDE fill colour value.dot = 1, # De value dot colour value.bar = 1, # De value error bar colour value.rug = 1, # De value rug colour boxplot.line = 1, # boxplot line colour boxplot.fill = NULL, # boxplot fill colour mean.line = adjustcolor(col = 1, alpha.f = 0.4), # mean line colour sd.bar = adjustcolor(col = 1, alpha.f = 0.4), # sd bar colour background = NULL), dimension = list( figure.width = 80, # figure width in mm figure.height = 80, # figure height in mm margin = c(10, 10, 10, 10), # margin sizes in mm main.line = 70, # line height in % xlab.line = 30, # line height in % ylab1.line = 40, # line height in % ylab2.line = 30, # line height in % xtck.line = 50, # line height in % ytck1.line = 65, # line height in % ytck2.line = 50, # line height in % xtcl = 50, # tick length in % ytcl1 = 20, # tick length in % ytcl2 = 50, # tick length in % stats.line = 70 # line height in % ) ) ) } else { warning("Layout definition not supported! Default layout is used.") layout <- get_Layout(layout = "default") } } else if(is.list(layout) == TRUE) { ## user-specific layout definition assignment layout <- layout } ## return layout parameters return(layout) } Luminescence/R/utils_DRAC.R0000644000176200001440000001762513231137116015152 0ustar liggesusers## FUNCTIONS ------------------------------------------------------------------- # subset the DRAC reference list # 'x' is the input table from use_DRAC() get_DRAC_references <- function(x) { refs <- DRAC_refs() refs_names <- names(refs) used <- list(refs = NULL, desc = NULL) # TI:4 - Conversion factors ref_tmp <- unique(x$`TI:4`) for (i in 1:length(ref_tmp)) { if (ref_tmp[i] == "X") next used$refs <- c(used$refs, refs[refs_names %in% ref_tmp[i]]) used$desc <- c(used$desc, "Conversion factors") } # TI:13 - External Rubidium ref_tmp <- unique(x$`TI:13`) if (any(ref_tmp == "Y")) { used$refs <- c(used$refs, refs["Mejdahl1987"]) used$desc <- c(used$desc, "External rubidium") } # TI:22 - Internal Rubidium ref_tmp <- unique(x$`TI:22`) if (any(ref_tmp == "Y")) { used$refs <- c(used$refs, refs["Mejdahl1987"]) used$desc <- c(used$desc, "Internal rubidium") } # TI:31 - Gamma dose rate scaling ref_tmp <- unique(x$`TI:31`) if (any(ref_tmp == "Y")) { used$refs <- c(used$refs, refs["Aitken1985"]) used$desc <- c(used$desc, "Gamma dose rate scaling") } # TI:34 - alpha grain size attenuation ref_tmp <- unique(x$`TI:34`) for (i in 1:length(ref_tmp)) { if (ref_tmp[i] == "X") next used$refs <- c(used$refs, refs[refs_names %in% ref_tmp[i]]) used$desc <- c(used$desc, "Alpha grain size attenuation factors") } # TI:35 - Beta grain size attenuation ref_tmp <- unique(x$`TI:35`) for (i in 1:length(ref_tmp)) { if (ref_tmp[i] == "X") next used$refs <- c(used$refs, refs[refs_names %in% ref_tmp[i]]) used$desc <- c(used$desc, "Beta grain size attenuation factors") } # TI:38 - beta etch attenuation factor ref_tmp <- unique(x$`TI:38`) for (i in 1:length(ref_tmp)) { if (ref_tmp[i] == "X") next used$refs <- c(used$refs, refs[refs_names %in% ref_tmp[i]]) used$desc <- c(used$desc, "Beta etch attenuation factor") } # TI:50 - Cosmic dose rate ref_tmp <- unique(x$`TI:50`) if (any(ref_tmp == "X")) { used$refs <- c(used$refs, refs[c("PrescottHutton1994", "PrescottStephan1982")]) used$desc <- c(used$desc, c("Cosmic dose rate", "Cosmic dose rate")) } return(used) } ## REFERENCE LIST -------------------------------------------------------------- DRAC_refs <- function() { list( Aitken1985 = bibentry( bibtype = "Book", author = person("M.J.", "Aitken"), title = "Thermoluminescence Dating", year = "1985", publisher = "Academic Press", adress = "London" ), AitkenXie1990 = bibentry( bibtype = "Article", author = c( person("M.J.", "Aitken"), person("J.", "Xie") ), title = "Moisture correction for annual gamma dose", year = "1990", journal = "Ancient TL", volume = "8", pages = "6-9" ), AdamiecAitken1998 = bibentry( bibtype = "Article", author = c( person("G.", "Adamiec"), person("M.J.", "Aitken") ), title = "Dose-rate conversion factors: update", year = "1998", journal = "Ancient TL", volume = "16", pages = "37-46" ), Guerinetal2011 = bibentry( bibtype = "Article", author = c( person("G.", "Guerin"), person("N.", "Mercier"), person("G.", "Adamiec") ), title = "Dose-rate conversion factors: update", year = "2011", journal = "Ancient TL", volume = "29", pages = "5-8" ), Liritzisetal2013 = bibentry( bibtype = "Article", author = c( person("I.", "Liritzis"), person("K.", "Stamoulis"), person("C.", "Papachristodoulou"), person("K.", "Ioannides") ), title = "A re-evaluation of radiation dose-rate conversion factors. ", year = "2013", journal = "Mediterranean Archaeology and Archaeometry", volume = "13", pages = "1-15" ), Bell1979 = bibentry( bibtype = "Article", author = c( person("W.T.", "Bell") ), title = "Attenuation factors for the absorbed radiation dose in quartz inclusions for thermoluminescence dating", year = "1979", journal = "Ancient TL", volume = "8", pages = "1-12" ), Bell1980 = bibentry( bibtype = "Article", author = c( person("W.T.", "Bell") ), title = "Alpha attenuation in Quartz grains for Thermoluminescence Dating", year = "1980", journal = "Ancient TL", volume = "12", pages = "4-8" ), Brennanetal1991 = bibentry( bibtype = "Article", author = c( person("B.J.", "Brennan"), person("R.G.", "Lyons"), person("S.W.", "Phillips") ), title = "Attenuation of alpha particle track dose for spherical grains", year = "1991", journal = "International Journal of Radiation Applications and Instrumentation. Part D. Nuclear Tracks and Radiation Measurements", volume = "18", pages = "249-253" ), Mejdahl1979 = bibentry( bibtype = "Article", author = c( person("V.", "Mejdahl") ), title = "Thermoluminescence Dating: Beta-Dose Attenuation in Quartz Grains", year = "1979", journal = "Archaeometry", volume = "21", pages = "61-72" ), Mejdahl1987 = bibentry( bibtype = "Article", author = c( person("V.", "Mejdahl") ), title = "Internal radioactivity in quartz and feldspar grains", year = "1987", journal = "Ancient TL", volume = "5", pages = "10-17" ), Brennan2003 = bibentry( bibtype = "Article", author = c( person("B.J.", "Brennan") ), title = "Beta doses to spherical grains", year = "2003", journal = "Radiation Measurements", volume = "37", pages = "299-303" ), `Guerinetal2012-Q` = bibentry( bibtype = "Article", author = c( person("G.", "Guerin"), person("N.", "Mercier"), person("R.", "Nathan"), person("G.", "Adamiec"), person("Y.", "Lefrais") ), title = "On the use of the infinite matrix assumption and associated concepts: A critical review", year = "2012", journal = "Radiation Measurements", volume = "47", pages = "778-785" ), `Guerinetal2012-F` = bibentry( bibtype = "Article", author = c( person("G.", "Guerin"), person("N.", "Mercier"), person("R.", "Nathan"), person("G.", "Adamiec"), person("Y.", "Lefrais") ), title = "On the use of the infinite matrix assumption and associated concepts: A critical review", year = "2012", journal = "Radiation Measurements", volume = "47", pages = "778-785" ), PrescottHutton1994 = bibentry( bibtype = "Article", author = c( person("J.R.", "Prescott"), person("J.T.", "Hutton") ), title = "Cosmic ray contributions to dose rates for luminescence and ESR dating: Large depths and long-term time variations", year = "1994", journal = "Radiation Measurements", volume = "23", pages = "497-500" ), PrescottStephan1982 = bibentry( bibtype = "Article", author = c( person("J.R.", "Prescott"), person("L.G.", "Stephan") ), title = "The contribution of cosmic radiation to the environmental dose for thermoluminescence dating", year = "1982", journal = "PACT", volume = "6", pages = "17-25" ), Readhead2002 = bibentry( bibtype = "Article", author = c( person("M.L.", "ReadHead") ), title = "Absorbed dose fraction for 87Rb beta particles", year = "2002", journal = "Ancient TL", volume = "20", pages = "25-29" ) ) }Luminescence/R/get_Risoe.BINfileData.R0000644000176200001440000000210613231137116017166 0ustar liggesusers#' General accessor function for RLum S4 class objects #' #' Function calls object-specific get functions for RisoeBINfileData S4 class objects. #' #' The function provides a generalised access point for specific #' [Risoe.BINfileData-class] objects. \cr #' Depending on the input object, the corresponding get function will be selected. #' Allowed arguments can be found in the documentations of the corresponding #' [Risoe.BINfileData-class] class. #' #' @param object [Risoe.BINfileData-class] (**required**): #' S4 object of class `RLum` #' #' @param ... further arguments that one might want to pass to the specific #' get function #' #' @return Return is the same as input objects as provided in the list #' #' @section Function version: 0.1.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [Risoe.BINfileData-class] #' #' @keywords utilities #' #' @md #' @export setGeneric( name = "get_Risoe.BINfileData", def = function(object, ...) { standardGeneric("get_Risoe.BINfileData") }, package = "Luminescence" ) Luminescence/R/calc_Statistics.R0000644000176200001440000001711413231137116016326 0ustar liggesusers#' Function to calculate statistic measures #' #' This function calculates a number of descriptive statistics for estimates #' with a given standard error (SE), most fundamentally using error-weighted approaches. #' #' The option to use Monte Carlo Methods (`n.MCM`) allows calculating #' all descriptive statistics based on random values. The distribution of these #' random values is based on the Normal distribution with `De` values as #' means and `De_error` values as one standard deviation. Increasing the #' number of MCM-samples linearly increases computation time. On a Lenovo X230 #' machine evaluation of 25 Aliquots with n.MCM = 1000 takes 0.01 s, with #' n = 100000, ca. 1.65 s. It might be useful to work with logarithms of these #' values. See Dietze et al. (2016, Quaternary Geochronology) and the function #' [plot_AbanicoPlot] for details. #' #' @param data [data.frame] or [RLum.Results-class] object (**required**): #' for [data.frame] two columns: De (`data[,1]`) and De error (`data[,2]`). #' To plot several data sets in one plot the data sets must be provided #' as `list`, e.g. `list(data.1, data.2)`. #' #' @param weight.calc [character]: #' type of weight calculation. One out of `"reciprocal"` (weight is 1/error), #' `"square"` (weight is 1/error^2). Default is `"square"`. #' #' @param digits [integer] (*with default*): #' round numbers to the specified digits. #' If digits is set to `NULL` nothing is rounded. #' #' @param n.MCM [numeric] (*with default*): #' number of samples drawn for Monte Carlo-based statistics. #' `NULL` (the default) disables MC runs. #' #' @param na.rm [logical] (*with default*): #' indicating whether `NA` values should be stripped before the computation proceeds. #' #' @return Returns a list with weighted and unweighted statistic measures. #' #' @section Function version: 0.1.7 #' #' @keywords datagen #' #' @author Michael Dietze, GFZ Potsdam (Germany) #' #' @examples #' #' ## load example data #' data(ExampleData.DeValues, envir = environment()) #' #' ## show a rough plot of the data to illustrate the non-normal distribution #' plot_KDE(ExampleData.DeValues$BT998) #' #' ## calculate statistics and show output #' str(calc_Statistics(ExampleData.DeValues$BT998)) #' #' \dontrun{ #' ## now the same for 10000 normal distributed random numbers with equal errors #' x <- as.data.frame(cbind(rnorm(n = 10^5, mean = 0, sd = 1), #' rep(0.001, 10^5))) #' #' ## note the congruent results for weighted and unweighted measures #' str(calc_Statistics(x)) #' } #' #' @md #' @export calc_Statistics <- function( data, weight.calc = "square", digits = NULL, n.MCM = NULL, na.rm = TRUE ) { ## Check input data if(is(data, "RLum.Results") == FALSE & is(data, "data.frame") == FALSE) { stop("[calc_Statistics()] Input data is neither of type 'data.frame' nor 'RLum.Results'", call. = FALSE) } else { if(is(data, "RLum.Results")) { data <- get_RLum(data, "data")[,1:2] } } ##strip na values if(na.rm){ data <- na.exclude(data) } ## handle error-free data sets if(ncol(data) == 1) { data <- cbind(data, rep(NA, length(data))) } ## replace Na values in error by 0 data[is.na(data[,2]),2] <- 0 if(sum(data[,2]) == 0) { warning("[calc_Statistics()] All errors are NA or zero! Automatically set to 10^-9!", call. = FALSE) data[,2] <- rep(x = 10^-9, length(data[,2])) } if(weight.calc == "reciprocal") { S.weights <- 1 / data[,2] } else if(weight.calc == "square") { S.weights <- 1 / data[,2]^2 } else { stop ("[calc_Statistics()] Weight calculation type not supported!", call. = FALSE) } S.weights <- S.weights / sum(S.weights) ## create MCM data if (is.null(n.MCM)) { data.MCM <- cbind(data[, 1]) } else { data.MCM <- matrix(data = rnorm( n = n.MCM * nrow(data), mean = data[, 1], sd = data[, 2] ), ncol = n.MCM) } ## calculate n S.n <- nrow(data) ## calculate mean S.mean <- mean(x = data[,1], na.rm = na.rm) S.wg.mean <- weighted.mean(x = data[,1], w = S.weights, n.rm = na.rm) S.m.mean <- mean(x = data.MCM, na.rm = na.rm) ## calculate median S.median <- median(x = data[,1], na.rm = na.rm) S.wg.median <- S.median S.m.median <- median(x = data.MCM, na.rm = na.rm) ## calculate absolute standard deviation S.sd.abs <- sd(x = data[,1], na.rm = na.rm) S.wg.sd.abs <- sqrt(sum(S.weights * (data[,1] - S.wg.mean)^2) / (((S.n - 1) * sum(S.weights)) / S.n)) S.m.sd.abs <- sd(x = data.MCM, na.rm = na.rm) ## calculate relative standard deviation S.sd.rel <- S.sd.abs / S.mean * 100 S.wg.sd.rel <- S.wg.sd.abs / S.wg.mean * 100 S.m.sd.rel <- S.m.sd.abs / S.m.mean * 100 ## calculate absolute standard error of the mean S.se.abs <- S.sd.abs / sqrt(S.n) S.wg.se.abs <- S.wg.sd.abs / sqrt(S.n) S.m.se.abs <- S.m.sd.abs / sqrt(S.n) ## calculate relative standard error of the mean S.se.rel <- S.se.abs / S.mean * 100 S.wg.se.rel <- S.wg.se.abs / S.wg.mean * 100 S.m.se.rel <- S.m.se.abs / S.m.mean * 100 ## calculate skewness S.skewness <- 1 / S.n * sum(((data[,1] - S.mean) / S.sd.abs)^3) S.m.skewness <- 1 / S.n * sum(((data.MCM - S.m.mean) / S.m.sd.abs)^3) ## calculate kurtosis S.kurtosis <- 1 / S.n * sum(((data[,1] - S.mean) / S.sd.abs)^4) S.m.kurtosis <- 1 / S.n * sum(((data.MCM - S.m.mean) / S.m.sd.abs)^4) ## create list objects of calculation output S.weighted <- list(n = S.n, mean = S.wg.mean, median = S.wg.median, sd.abs = S.wg.sd.abs, sd.rel = S.wg.sd.rel, se.abs = S.wg.se.abs, se.rel = S.wg.se.rel, skewness = S.skewness, kurtosis = S.kurtosis) if(!is.null(digits)) { S.weighted <- sapply(names(S.weighted), simplify = FALSE, USE.NAMES = TRUE, function(x) { round(S.weighted[[x]], digits = digits)}) } S.unweighted <- list(n = S.n, mean = S.mean, median = S.median, sd.abs = S.sd.abs, sd.rel = S.sd.rel, se.abs = S.se.abs, se.rel = S.se.rel, skewness = S.skewness, kurtosis = S.kurtosis) if(!is.null(digits)){ S.unweighted <- sapply(names(S.unweighted), simplify = FALSE, USE.NAMES = TRUE, function(x) { round(S.unweighted [[x]], digits = digits)}) } S.MCM <- list(n = S.n, mean = S.m.mean, median = S.m.median, sd.abs = S.m.sd.abs, sd.rel = S.m.sd.rel, se.abs = S.m.se.abs, se.rel = S.m.se.rel, skewness = S.m.skewness, kurtosis = S.m.kurtosis) if(!is.null(digits)){ S.MCM <- sapply(names(S.MCM), simplify = FALSE, USE.NAMES = TRUE, function(x) { round(S.MCM [[x]], digits = digits)}) } list(weighted = S.weighted, unweighted = S.unweighted, MCM = S.MCM) } Luminescence/R/PSL2Risoe.BINfileData.R0000644000176200001440000001737013231137116017001 0ustar liggesusers#' Convert portable OSL data to an Risoe.BINfileData object #' #' Converts an `RLum.Analysis` object produced by the function `read_PSL2R()` to #' an `Risoe.BINfileData` object **(BETA)**. #' #' This function converts an [RLum.Analysis-class] object that was produced #' by the [read_PSL2R] function to an [Risoe.BINfileData-class]. #' The `Risoe.BINfileData` can be used to write a Risoe BIN file via #' [write_R2BIN]. #' #' @param object [RLum.Analysis-class] (**required**): #' `RLum.Analysis` object produced by [read_PSL2R] #' #' @param ... currently not used. #' #' @return #' Returns an S4 [Risoe.BINfileData-class] object that can be used to write a #' BIN file using [write_R2BIN]. #' #' @seealso [RLum.Analysis-class], [RLum.Data.Curve-class], #' [Risoe.BINfileData-class] #' #' @author #' Christoph Burow, University of Cologne (Germany) #' #' @section Function version: 0.0.1 #' #' @keywords IO #' #' @examples #' #' # (1) load and plot example data set #' data("ExampleData.portableOSL", envir = environment()) #' plot_RLum(ExampleData.portableOSL) #' #' # (2) merge all RLum.Analysis objects into one #' merged <- merge_RLum(ExampleData.portableOSL) #' merged #' #' # (3) convert to RisoeBINfile object #' bin <- PSL2Risoe.BINfileData(merged) #' bin #' #' # (4) write Risoe BIN file #' \dontrun{ #' write_R2BIN(bin, "~/portableOSL.binx") #' } #' #' @md #' @export PSL2Risoe.BINfileData <- function(object, ...) { ## INTEGRITY CHECKS ---- if (!inherits(object, "RLum.Analysis")) stop("Only objects of class 'RLum.Analysis' are allowed.", call. = FALSE) if (!all(sapply(object, class) == "RLum.Data.Curve")) stop("The 'RLum.Analysis' object must only contain objects of class 'RLum.Data.Curve'.", call. = FALSE) if (!all(sapply(object, function(x) x@originator) == "read_PSL2R")) stop("Only objects originating from 'read_PSL2R()' are allowed.", call. = FALSE) ## EXTRACT CURVE INFORMATION ---- curves <- get_RLum(object) ## COLLECT META INFORMATION ---- META <- do.call(rbind, lapply(curves, function(x) { NPOINTS <- as.integer(x@info$settings$stimulation_time) LTYPE <- x@info$settings$stimulation_unit COMMENT <- x@info$settings$measurement HIGH <- x@info$settings$stimulation_time DATE <- format(x@info$settings$Date, format = "%d%m%y") TIME <- x@info$settings$Time if (nchar(TIME) < 8) TIME <- paste0("0", TIME) SAMPLE <- x@info$settings$Sample FNAME <- x@info$settings$Filename SEQUENCE <- strtrim(paste(x@info$settings$Run_Name, x@info$settings$Sample_no), 8) return(data.frame(NPOINTS = NPOINTS, LTYPE = LTYPE, COMMENT = COMMENT, HIGH = HIGH, DATE = DATE, TIME = TIME, SAMPLE = SAMPLE, FNAME = FNAME, SEQUENCE = SEQUENCE)) })) ## SAVE DATA ---- DATA <- lapply(curves, function(x) { as.integer(x@data[ ,2]) }) # SAVE METADATA ---- METADATA <- data.frame(ID = seq(1, length(curves), 1), SEL = rep(TRUE, length(curves)), VERSION = rep(7, length(curves)), LENGTH = 447 + 4 * META$NPOINTS, PREVIOUS = 447 + 4 * META$NPOINTS, NPOINTS = META$NPOINTS, RUN = seq(1, length(curves), 1), SET = rep(1, length(curves)), POSITION = rep(1, length(curves)), GRAIN = rep(0, length(curves)), GRAINNUMBER = rep(0, length(curves)), CURVENO = rep(0, length(curves)), XCOORD = rep(0, length(curves)), YCOORD = rep(0, length(curves)), SAMPLE = META$SAMPLE, COMMENT = META$COMMENT, SYSTEMID = rep(0, length(curves)), FNAME = META$FNAME, USER = rep("RLum", length(curves)), TIME = META$TIME, DATE = META$DATE, DTYPE = rep("Natural", length(curves)), BL_TIME = rep(0, length(curves)), BL_UNIT = rep(0, length(curves)), NORM1 = rep(0, length(curves)), NORM2 = rep(0, length(curves)), NORM3 = rep(0, length(curves)), BG = rep(0, length(curves)), SHIFT = rep(0, length(curves)), TAG = rep(1, length(curves)), LTYPE = META$LTYPE, LIGHTSOURCE = rep("None", length(curves)), LPOWER = rep(100, length(curves)), LIGHTPOWER = rep(100, length(curves)), LOW = rep(0, length(curves)), HIGH = META$HIGH, RATE = rep(0, length(curves)), TEMPERATURE = rep(0, length(curves)), MEASTEMP = rep(0, length(curves)), AN_TEMP = rep(0, length(curves)), AN_TIME = rep(0, length(curves)), TOLDELAY = rep(0, length(curves)), TOLON = rep(0, length(curves)), TOLOFF = rep(0, length(curves)), IRR_TIME = rep(0, length(curves)), IRR_TYPE = rep(0L, length(curves)), IRR_UNIT = rep(0, length(curves)), IRR_DOSERATE = rep(0, length(curves)), IRR_DOSERATEERR = rep(0, length(curves)), TIMESINCEIRR = rep(-1, length(curves)), TIMETICK = rep(1e-07, length(curves)), ONTIME = rep(0, length(curves)), OFFTIME = rep(NA, length(curves)), STIMPERIOD = rep(0, length(curves)), GATE_ENABLED = rep(0, length(curves)), ENABLE_FLAGS = rep(0, length(curves)), GATE_START = rep(0, length(curves)), GATE_STOP = rep(0, length(curves)), PTENABLED = rep(0, length(curves)), DTENABLED = rep(0, length(curves)), DEADTIME = rep(0, length(curves)), MAXLPOWER = rep(0, length(curves)), XRF_ACQTIME = rep(0, length(curves)), XRF_HV = rep(0, length(curves)), XRF_CURR = rep(0, length(curves)), XRF_DEADTIMEF = rep(0, length(curves)), SEQUENCE = META$SEQUENCE, DETECTOR_ID = rep(NA, length(curves)), LOWERFILTER_ID = rep(NA, length(curves)), UPPERFILTER_ID = rep(NA, length(curves)), ENOISEFACTOR = rep(NA, length(curves)), MARKPOS_X1 = rep(0, length(curves)), MARKPOS_Y1 = rep(0, length(curves)), MARKPOS_X2 = rep(0, length(curves)), MARKPOS_Y2 = rep(0, length(curves)), MARKPOS_X3 = rep(0, length(curves)), MARKPOS_Y3 = rep(0, length(curves)), EXTR_START = rep(0, length(curves)), EXTR_END = rep(0, length(curves)), RECTYPE = rep(0, length(curves))) ## CREATE Risoe.BINfileData OBJECT ---- bin <- set_Risoe.BINfileData(METADATA = METADATA, DATA = DATA, .RESERVED = list()) ## RETURN VALUE ---- return(bin) } Luminescence/R/verify_SingleGrainData.R0000644000176200001440000003440213431413144017571 0ustar liggesusers#' Verify single grain data sets and check for invalid grains, i.e. #' zero-light level grains #' #' This function tries to identify automatically zero-light level curves (grains) #' from single grain data measurements. #' #' **How does the method work?** #' #' The function compares the expected values (\eqn{E(X)}) and the variance #' (\eqn{Var(X)}) of the count values for each curve. Assuming that the #' background roughly follows a poisson distribution the absolute difference #' of both values should be zero or at least around zero as #' #' \deqn{E(x) = Var(x) = \lambda} #' #' Thus the function checks for: #' #' \deqn{abs(E(x) - Var(x)) >= \Theta} #' #' With \eqn{\Theta} an arbitray, user defined, threshold. Values above the #' threshold indicating curves comprising a signal. #' #' Note: the absolute difference of \eqn{E(X)} and \eqn{Var(x)} instead of the #' ratio was chosen as both terms can become 0 which would result in 0 or `Inf`, #' if the ratio is calculated. #' #' @param object [Risoe.BINfileData-class] or [RLum.Analysis-class] (**required**): #' input object. The function also accepts a list with objects of allowed type. #' #' @param threshold [numeric] (*with default*): #' numeric threshold value for the allowed difference between the `mean` and #' the `var` of the count values (see details) #' #' @param cleanup [logical] (*with default*): #' if set to `TRUE` curves indentified as zero light level curves are #' automatically removed. Ouput is an object as same type as the input, i.e. #' either [Risoe.BINfileData-class] or [RLum.Analysis-class] #' #' @param cleanup_level [character] (*with default*): #' selects the level for the cleanup of the input data sets. #' Two options are allowed: `"curve"` or `"aliquot"`: #' #' - If `"curve"` is selected every single curve marked as `invalid` is removed. #' - If `"aliquot"` is selected, curves of one aliquot (grain or disc) can be #' marked as invalid, but will not be removed. An aliquot will be only removed #' if all curves of this aliquot are marked as invalid. #' #' @param verbose [logical] (*with default*): #' enables or disables the terminal feedback #' #' @param plot [logical] (*with default*): #' enables or disables the graphical feedback #' #' @return #' The function returns #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:****`@data`** #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `$unique_pairs` \tab `data.frame` \tab the unique position and grain pairs \cr #' `$selection_id` \tab `numeric` \tab the selection as record ID \cr #' `$selection_full` \tab `data.frame` \tab implemented models used in the baSAR-model core \cr #' } #' #' **slot:****`@info`** #' #' The original function call #' #' **Output variation** #' #' For `cleanup = TRUE` the same object as the input is returned, but cleaned up #' (invalid curves were removed). This means: Either an [Risoe.BINfileData-class] #' or an [RLum.Analysis-class] object is returned in such cases. #' An [Risoe.BINfileData-class] object can be exported to a BIN-file by #' using the function [write_R2BIN]. #' #' @note #' This function can work with [Risoe.BINfileData-class] objects or #' [RLum.Analysis-class] objects (or a list of it). However, the function is #' highly optimised for [Risoe.BINfileData-class] objects as it make sense to #' remove identify invalid grains before the conversion to an #' [RLum.Analysis-class] object. #' #' The function checking for invalid curves works rather robust and it is likely #' that Reg0 curves within a SAR cycle are removed as well. Therefore it is #' strongly recommended to use the argument `cleanup = TRUE` carefully. #' #' @section Function version: 0.2.1 #' #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' #' @seealso [Risoe.BINfileData-class], [RLum.Analysis-class], [write_R2BIN], #' [read_BIN2R] #' #' @keywords manip datagen #' #' @examples #' #' ##01 - basic example I #' ##just show how to apply the function #' data(ExampleData.XSYG, envir = environment()) #' #' ##verify and get data.frame out of it #' verify_SingleGrainData(OSL.SARMeasurement$Sequence.Object)$selection_full #' #' ##02 - basic example II #' data(ExampleData.BINfileData, envir = environment()) #' id <- verify_SingleGrainData(object = CWOSL.SAR.Data, #' cleanup_level = "aliquot")$selection_id #' #' \dontrun{ #' ##03 - advanced example I #' ##importing and exporting a BIN-file #' #' ##select and import file #' file <- file.choose() #' object <- read_BIN2R(file) #' #' ##remove invalid aliquots(!) #' object <- verify_SingleGrainData(object, cleanup = TRUE) #' #' ##export to new BIN-file #' write_R2BIN(object, paste0(dirname(file),"/", basename(file), "_CLEANED.BIN")) #' } #' #' @md #' @export verify_SingleGrainData <- function( object, threshold = 10, cleanup = FALSE, cleanup_level = 'aliquot', verbose = TRUE, plot = FALSE ){ ##three types of input are allowed: ##(1) RisoeBINfileData ##(2) RLum.Analysis ##(3) List of RLum.Analysis # Self Call ----------------------------------------------------------------------------------- if(is(object, "list")){ results <- lapply(1:length(object), function(x) { verify_SingleGrainData( object = object[[x]], threshold = threshold, cleanup = cleanup, cleanup_level = cleanup_level, verbose = verbose ) }) ##account for cleanup if(cleanup){ return(results) }else{ return(merge_RLum(results)) } } ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##RisoeBINfileData if(is(object, "Risoe.BINfileData")){ ##run test on DATA slot ##MEAN + SD temp.results_matrix <- lapply(X = object@DATA, FUN = function(x){ c(mean(x), var(x)) }) temp.results_matrix <- do.call(rbind, temp.results_matrix) ##DIFF temp.results_matrix_RATIO <- temp.results_matrix[,2]/temp.results_matrix[,1] ##SEL temp.results_matrix_VALID <- temp.results_matrix_RATIO > threshold ##combine everything to in a data.frame selection <- data.frame( POSITION = object@METADATA$POSITION, GRAIN = object@METADATA$GRAIN, MEAN = temp.results_matrix[, 1], VAR = temp.results_matrix[, 2], RATIO = temp.results_matrix_RATIO, THRESHOLD = rep_len(threshold, length(object@DATA)), VALID = temp.results_matrix_VALID ) ##get unique pairs for POSITION and GRAIN for VALID == TRUE unique_pairs <- unique( selection[selection[["VALID"]], c("POSITION", "GRAIN")]) if(cleanup_level == "aliquot"){ selection_id <- sort(unlist(lapply(1:nrow(unique_pairs), function(x) { which( .subset2(selection, 1) == .subset2(unique_pairs, 1)[x] & .subset2(selection, 2) == .subset2(unique_pairs, 2)[x] ) }))) }else{ ##reduce data to TRUE selection selection_id <- which(selection[["VALID"]]) } ##select output on the chosen input if(cleanup){ ##selected wanted elements object@DATA <- object@DATA[selection_id] object@METADATA <- object@METADATA[selection_id,] object@METADATA$ID <- 1:length(object@DATA) ##print message selection_id <- paste(selection_id, collapse = ", ") if(verbose){ cat(paste0("\n[verify_SingleGrainData()] Risoe.BINfileData object reduced to records: \n", selection_id)) cat("\n\n[verify_SingleGrainData()] Risoe.BINfileData object record index reset.") } ##return return_object <- object }else{ return_object <- set_RLum( class = "RLum.Results", data = list( unique_pairs = unique_pairs, selection_id = selection_id, selection_full = selection), info = list(call = sys.call()) ) } ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##RLum.Analysis and list with RLum.Analysis objects ## ... and yes it make sense not to mix that up with the code above }else if(is(object,"RLum.Analysis")){ ##first extract all count values from all curves object_list <- lapply(get_RLum(object), function(x){ ##yes, would work differently, but it is faster x@data[,2] }) ##MEAN + SD temp.results_matrix <- lapply(X = object_list, FUN = function(x){ c(mean(x), var(x)) }) temp.results_matrix <- do.call(rbind, temp.results_matrix) ##DIFF temp.results_matrix_RATIO <- temp.results_matrix[,2]/temp.results_matrix[,1] ##SEL temp.results_matrix_VALID <- temp.results_matrix_RATIO > threshold ##get structure for the RLum.Anlaysis object temp_structure <- structure_RLum(object, fullExtent = TRUE) ##now we have two cases, depending on where measurement is coming from if (object@originator == "Risoe.BINfileData2RLum.Analysis") { ##combine everything to in a data.frame selection <- data.frame( POSITION = temp_structure$info.POSITION, GRAIN = temp_structure$info.GRAIN, MEAN = temp.results_matrix[, 1], VAR = temp.results_matrix[, 2], RATIO = temp.results_matrix_RATIO, THRESHOLD = rep_len(threshold, length(object_list)), VALID = temp.results_matrix_VALID ) ##get unique pairs for POSITION and GRAIN for VALID == TRUE unique_pairs <- unique( selection[selection[["VALID"]], c("POSITION", "GRAIN")]) } else if (object@originator == "read_XSYG2R") { ##combine everything to in a data.frame selection <- data.frame( POSITION = if(any(grepl(pattern = "position", names(temp_structure)))){ temp_structure$info.position}else{ NA }, GRAIN = NA, MEAN = temp.results_matrix[, 1], VAR = temp.results_matrix[, 2], RATIO = temp.results_matrix_RATIO, THRESHOLD = rep_len(threshold, length(object_list)), VALID = temp.results_matrix_VALID ) ##get unique pairs for POSITION for VALID == TRUE unique_pairs <- unique( selection[["POSITION"]][selection[["VALID"]]]) } else{ stop("[verify_SingleGrainData()] I don't know what to do object 'originator' not supported!", call. = FALSE) } ##set up cleanup if(cleanup_level == "aliquot") { if (object@originator == "read_XSYG2R") { if(!is.na(unique_pairs)){ selection_id <- sort(unlist(lapply(1:nrow(unique_pairs), function(x) { which(.subset2(selection, 1) == .subset2(unique_pairs, 1)[x]) }))) }else{ selection_id <- NA } } else if (object@originator == "Risoe.BINfileData2RLum.Analysis") { selection_id <- sort(unlist(lapply(1:nrow(unique_pairs), function(x) { which( .subset2(selection, 1) == .subset2(unique_pairs, 1)[x] & .subset2(selection, 2) == .subset2(unique_pairs, 2)[x] ) }))) } ##make sure that we do not break subsequent code if(length(selection_id) == 0) selection_id <- NA } else{ ##reduce data to TRUE selection selection_id <- which(selection[["VALID"]]) } ##return value ##select output on the chosen input if(cleanup && !any(is.na(selection_id))){ ##print message if(verbose){ selection_id_text <- paste(selection_id, collapse = ", ") cat(paste0("\n[verify_SingleGrainData()] RLum.Analysis object reduced to records: ", selection_id_text)) } ##selected wanted elements if (length(selection_id) == 0) { object <- set_RLum( class = "RLum.Analysis", originator = object@originator, protocol = object@protocol, records = list(), info = list( unique_pairs = unique_pairs, selection_id = selection_id, selection_full = selection) ) } else{ object <- set_RLum( class = "RLum.Analysis", records = get_RLum(object, record.id = selection_id, drop = FALSE), info = list( unique_pairs = unique_pairs, selection_id = selection_id, selection_full = selection) ) } ##return return_object <- object }else{ if(any(is.na(selection_id))){ warning("[verify_SingleGrainData()] selection_id is NA, nothing removed, everything selected!", call. = FALSE) } return_object <- set_RLum( class = "RLum.Results", data = list( unique_pairs = unique_pairs, selection_id = selection_id, selection_full = selection), info = list(call = sys.call()) ) } }else{ stop(paste0("[verify_SingleGrainData()] Input type '", is(object)[1], "' is not allowed for this function!"), call. = FALSE) } # Plot ---------------------------------------------------------------------------------------- if(plot){ ##plot area plot( NA, NA, xlim = c(1,nrow(selection)), ylim = range(selection[["RATIO"]]), log = "y", xlab = "Record index", ylab = "Calculated ratio [a.u.]", main = "Record selection" ) ##plot points above the threshold points(x = which(selection[["VALID"]]), y = selection[["RATIO"]][selection[["VALID"]]], pch = 20, col = "darkgreen") points(x = which(!selection[["VALID"]]), y = selection[["RATIO"]][!selection[["VALID"]]], pch = 20, col = rgb(0,0,0,0.5)) abline(h = threshold, col = "red", lty = 1, lwd = 2) mtext( side = 3, text = paste0( "(total: ", nrow(selection), " | valid: ", length(which(selection[["VALID"]])), " | invalid: ", length(which(!selection[["VALID"]])), ")"), cex = 0.9 * par()$cex) } # Return -------------------------------------------------------------------------------------- return(return_object) } Luminescence/R/CW2pHMi.R0000644000176200001440000002622313231137116014364 0ustar liggesusers#' Transform a CW-OSL curve into a pHM-OSL curve via interpolation under #' hyperbolic modulation conditions #' #' This function transforms a conventionally measured continuous-wave (CW) #' OSL-curve to a pseudo hyperbolic modulated (pHM) curve under hyperbolic #' modulation conditions using the interpolation procedure described by Bos & #' Wallinga (2012). #' #' The complete procedure of the transformation is described in Bos & Wallinga #' (2012). The input `data.frame` consists of two columns: time (t) and #' count values (CW(t)) #' #' **Internal transformation steps** #' #' (1) log(CW-OSL) values #' #' (2) #' Calculate t' which is the transformed time: #' \deqn{t' = t-(1/\delta)*log(1+\delta*t)} #' #' (3) #' Interpolate CW(t'), i.e. use the log(CW(t)) to obtain the count values #' for the transformed time (t'). Values beyond `min(t)` and `max(t)` #' produce `NA` values. #' #' (4) #' Select all values for t' < `min(t)`, i.e. values beyond the time #' resolution of t. Select the first two values of the transformed data set #' which contain no `NA` values and use these values for a linear fit #' using [lm]. #' #' (5) #' Extrapolate values for t' < `min(t)` based on the previously #' obtained fit parameters. #' #' (6) #' Transform values using #' \deqn{pHM(t) = (\delta*t/(1+\delta*t))*c*CW(t')} #' \deqn{c = (1+\delta*P)/\delta*P} #' \deqn{P = length(stimulation~period)} #' #' (7) Combine all values and truncate all values for t' > `max(t)` #' #' #' **NOTE:** #' The number of values for t' < `min(t)` depends on the stimulation rate #' parameter `delta`. To avoid the production of too many artificial data #' at the raising tail of the determined pHM curve, it is recommended to use #' the automatic estimation routine for `delta`, i.e. provide no value for #' `delta`. #' #' @param values [RLum.Data.Curve-class] or [data.frame] (**required**): #' [RLum.Data.Curve-class] or [data.frame] with measured curve data of type #' stimulation time (t) (`values[,1]`) and measured counts (cts) (`values[,2]`). #' #' @param delta [vector] (*optional*): #' stimulation rate parameter, if no value is given, the optimal value is #' estimated automatically (see details). Smaller values of delta produce more #' points in the rising tail of #' the curve. #' #' @return #' The function returns the same data type as the input data type with #' the transformed curve values. #' #' #' **`RLum.Data.Curve`** #' #' \tabular{ll}{ #' `$CW2pHMi.x.t` \tab: transformed time values \cr #' `$CW2pHMi.method` \tab: used method for the production of the new data points #' } #' #' **`data.frame`** #' #' \tabular{ll}{ #' `$x` \tab: time\cr #' `$y.t` \tab: transformed count values\cr #' `$x.t` \tab: transformed time values \cr #' `$method` \tab: used method for the production of the new data points #' } #' #' @note #' According to Bos & Wallinga (2012), the number of extrapolated points #' should be limited to avoid artificial intensity data. If `delta` is #' provided manually and more than two points are extrapolated, a warning #' message is returned. #' #' The function [approx] may produce some `Inf` and `NaN` data. #' The function tries to manually interpolate these values by calculating #' the `mean` using the adjacent channels. If two invalid values are succeeding, #' the values are removed and no further interpolation is attempted. #' In every case a warning message is shown. #' #' @section Function version: 0.2.2 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr #' Based on comments and suggestions from:\cr #' Adrie J.J. Bos, Delft University of Technology, The Netherlands #' #' @seealso [CW2pLM], [CW2pLMi], [CW2pPMi], [fit_LMCurve], [lm], #' [RLum.Data.Curve-class] #' #' @references #' Bos, A.J.J. & Wallinga, J., 2012. How to visualize quartz OSL #' signal components. Radiation Measurements, 47, 752-758.\cr #' #' **Further Reading** #' #' Bulur, E., 1996. An Alternative Technique For #' Optically Stimulated Luminescence (OSL) Experiment. Radiation Measurements, #' 26, 701-709. #' #' Bulur, E., 2000. A simple transformation for converting CW-OSL curves to #' LM-OSL curves. Radiation Measurements, 32, 141-145. #' #' @keywords manip #' #' @examples #' #' ##(1) - simple transformation #' #' ##load CW-OSL curve data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' #' ##transform values #' values.transformed<-CW2pHMi(ExampleData.CW_OSL_Curve) #' #' ##plot #' plot(values.transformed$x, values.transformed$y.t, log = "x") #' #' ##(2) - load CW-OSL curve from BIN-file and plot transformed values #' #' ##load BINfile #' #BINfileData<-readBIN2R("[path to BIN-file]") #' data(ExampleData.BINfileData, envir = environment()) #' #' ##grep first CW-OSL curve from ALQ 1 #' curve.ID<-CWOSL.SAR.Data@@METADATA[CWOSL.SAR.Data@@METADATA[,"LTYPE"]=="OSL" & #' CWOSL.SAR.Data@@METADATA[,"POSITION"]==1 #' ,"ID"] #' #' curve.HIGH<-CWOSL.SAR.Data@@METADATA[CWOSL.SAR.Data@@METADATA[,"ID"]==curve.ID[1] #' ,"HIGH"] #' #' curve.NPOINTS<-CWOSL.SAR.Data@@METADATA[CWOSL.SAR.Data@@METADATA[,"ID"]==curve.ID[1] #' ,"NPOINTS"] #' #' ##combine curve to data set #' #' curve<-data.frame(x = seq(curve.HIGH/curve.NPOINTS,curve.HIGH, #' by = curve.HIGH/curve.NPOINTS), #' y=unlist(CWOSL.SAR.Data@@DATA[curve.ID[1]])) #' #' #' ##transform values #' #' curve.transformed <- CW2pHMi(curve) #' #' ##plot curve #' plot(curve.transformed$x, curve.transformed$y.t, log = "x") #' #' #' ##(3) - produce Fig. 4 from Bos & Wallinga (2012) #' #' ##load data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' values <- CW_Curve.BosWallinga2012 #' #' ##open plot area #' plot(NA, NA, #' xlim=c(0.001,10), #' ylim=c(0,8000), #' ylab="pseudo OSL (cts/0.01 s)", #' xlab="t [s]", #' log="x", #' main="Fig. 4 - Bos & Wallinga (2012)") #' #' values.t<-CW2pLMi(values, P=1/20) #' lines(values[1:length(values.t[,1]),1],CW2pLMi(values, P=1/20)[,2], #' col="red" ,lwd=1.3) #' text(0.03,4500,"LM", col="red" ,cex=.8) #' #' values.t<-CW2pHMi(values, delta=40) #' lines(values[1:length(values.t[,1]),1],CW2pHMi(values, delta=40)[,2], #' col="black", lwd=1.3) #' text(0.005,3000,"HM", cex=.8) #' #' values.t<-CW2pPMi(values, P=1/10) #' lines(values[1:length(values.t[,1]),1],CW2pPMi(values, P=1/10)[,2], #' col="blue", lwd=1.3) #' text(0.5,6500,"PM", col="blue" ,cex=.8) #' #' @md #' @export CW2pHMi<- function( values, delta ){ ##(1) data.frame or RLum.Data.Curve object? if(is(values, "data.frame") == FALSE & is(values, "RLum.Data.Curve") == FALSE){ stop("[CW2pHMi()] 'values' object has to be of type 'data.frame' or 'RLum.Data.Curve'!", call. = FALSE) } ##(2) if the input object is an 'RLum.Data.Curve' object check for allowed curves if(is(values, "RLum.Data.Curve") == TRUE){ if(!grepl("OSL", values@recordType) & !grepl("IRSL", values@recordType)){ stop(paste("[CW2pHMi()] recordType ",values@recordType, " is not allowed for the transformation!", sep=""), call. = FALSE) }else{ temp.values <- as(values, "data.frame") } }else{ temp.values <- values } # (1) Transform values ------------------------------------------------------ ##log transformation of the CW-OSL count values CW_OSL.log<-log(temp.values[,2]) ##time transformation t >> t' t<-temp.values[,1] ##set delta ##if no values for delta is set selected a delta value for a maximum of ##two extrapolation points if(missing(delta)==TRUE){ i<-10 delta<-i t.transformed<-t-(1/delta)*log(1+delta*t) while(length(t.transformed[t.transformed2){ delta<-i t.transformed<-t-(1/delta)*log(1+delta*t) i<-i+10 } }else{ t.transformed<-t-(1/delta)*log(1+delta*t) } # (2) Interpolation --------------------------------------------------------- ##interpolate values, values beyond the range return NA values CW_OSL.interpolated <- approx(t,CW_OSL.log, xout=t.transformed, rule=1) ##combine t.transformed and CW_OSL.interpolated in a data.frame temp <- data.frame(x=t.transformed, y=unlist(CW_OSL.interpolated$y)) ##Problem: I some cases the interpolation algorithm is not working properely ##and Inf or NaN values are returned ##fetch row number of the invalid values invalid_values.id <- c(which(is.infinite(temp[,2]) | is.nan(temp[,2]))) if(length(invalid_values.id) > 0){ warning(paste(length(invalid_values.id)," values have been found and replaced the mean of the nearest values." )) } ##interpolate between the lower and the upper value invalid_values.interpolated<-sapply(1:length(invalid_values.id), function(x) { mean(c(temp[invalid_values.id[x]-1,2], temp[invalid_values.id[x]+1,2])) } ) ##replace invalid values in data.frame with newly interpolated values if(length(invalid_values.id)>0){ temp[invalid_values.id,2]<-invalid_values.interpolated } # (3) Extrapolate first values of the curve --------------------------------- ##(a) - find index of first rows which contain NA values (needed for extrapolation) temp.sel.id<-min(which(is.na(temp[,2])==FALSE)) ##(b) - fit linear function fit.lm<-lm(y ~ x,data.frame(x=t[1:2],y=CW_OSL.log[1:2])) ##select values to extrapolate and predict (extrapolate) values based on the fitted function x.i<-data.frame(x=temp[1:(min(temp.sel.id)-1),1]) y.i<-predict(fit.lm,x.i) ##replace NA values by extrapolated values temp[1:length(y.i),2]<-y.i ##set method values temp.method<-c(rep("extrapolation",length(y.i)),rep("interpolation",(length(temp[,2])-length(y.i)))) ##print a warning message for more than two extrapolation points if(length(y.i)>2){warning("t' is beyond the time resolution and more than two data points have been extrapolated!")} # (4) Convert, transform and combine values --------------------------------- ##unlog CW-OSL count values, i.e. log(CW) >> CW CW_OSL<-exp(temp$y) ##set values for c and P ##P is the stimulation period P<-max(temp.values[,1]) ##c is a dimensionless constant c<-(1+(delta*P))/(delta*P) ##transform CW-OSL values to pLM-OSL values pHM<-((delta*t)/(1+(delta*t)))*c*CW_OSL ##combine all values and exclude NA values temp.values <- data.frame(x=t,y.t=pHM,x.t=t.transformed,method=temp.method) temp.values <- na.exclude(temp.values) # (5) Return values --------------------------------------------------------- ##returns the same data type as the input if(is(values, "data.frame") == TRUE){ values <- temp.values return(values) }else{ ##add old info elements to new info elements temp.info <- c(values@info, CW2pHMi.x.t = list(temp.values$x.t), CW2pHMi.method = list(temp.values$method)) newRLumDataCurves.CW2pHMi <- set_RLum( class = "RLum.Data.Curve", recordType = values@recordType, data = as.matrix(temp.values[,1:2]), info = temp.info) return(newRLumDataCurves.CW2pHMi) } } Luminescence/R/calc_Huntley2006.R0000644000176200001440000011437213431144032016135 0ustar liggesusers#' Apply the Huntley (2006) model #' #' A function to calculate the expected sample specific fraction of saturation #' based on the model of Huntley (2006) using the approach as implemented #' in Kars et al. (2008) or Guralnik et al. (2015). #' #' This function applies the approach described in Kars et al. (2008) or Guralnik et al. (2015), #' which are both developed from the model of Huntley (2006) to calculate the expected sample #' specific fraction of saturation of a feldspar and also to calculate fading #' corrected age using this model. \eqn{\rho}' (`rhop`), the density of recombination #' centres, is a crucial parameter of this model and must be determined #' separately from a fading measurement. The function [analyse_FadingMeasurement] #' can be used to calculate the sample specific \eqn{\rho}' value. #' #' **Kars et al. (2008) - Single saturating exponential** #' #' To apply the approach after Kars et al. (2008) use `fit.method = "EXP"`. #' #' Firstly, the unfaded D0 value is determined through applying equation 5 of #' Kars et al. (2008) to the measured LxTx data as a function of irradiation #' time, and fitting the data with a single saturating exponential of the form: #' #' \deqn{LxTx(t*) = A x \phi(t*) x (1 - exp(-(t* / D0)))} #' #' where #' #' \deqn{\phi(t*) = exp(-\rho' x ln(1.8 x s_tilde x t*)^3)} #' #' after King et al. (2016) where `A` is a pre-exponential factor, #' `t*` (s) is the irradiation time, starting at the mid-point of #' irradiation (Auclair et al. 2003) and `s_tilde` (3x10^15 s^-1) is the athermal #' frequency factor after Huntley (2006). \cr #' #' Using fit parameters `A` and `D0`, the function then computes a natural dose #' response curve using the environmental dose rate, `D_dot` (Gy/s) and equations #' `[1]` and `[2]`. Computed LxTx values are then fitted using the #' [plot_GrowthCurve] function and the laboratory measured LnTn can then #' be interpolated onto this curve to determine the fading corrected #' De value, from which the fading corrected age is calculated. #' #' **Guralnik et al. (2015) - General-order kinetics** #' #' To apply the approach after Guralnik et al. (2015) use `fit.method = "GOK"`. #' #' The approach of Guralnik et al. (2015) is very similar to that of #' Kars et al. (2008), but instead of using a single saturating exponential #' the model fits a general-order kinetics function of the form: #' #' \deqn{LxTx(t*) = A x \phi(t*) x (1-(1+(1/D0) x t* x c)^(-1/c))} #' #' where `A`, \eqn{\phi}, `t*` and `D0` are the same as above and `c` is a #' dimensionless kinetic order modifier (cf. equation 10 in #' Guralnik et al., 2015). #' #' **Level of saturation** #' #' The `calc_Huntley2006` function also calculates the level of saturation (n/N) #' and the field saturation (i.e. athermal steady state, (n/N)_SS) value for #' the sample under investigation using the sample specific \eqn{\rho}', #' unfaded `D0` and `D_dot` values, following the approach of Kars et al. (2008). #' #' **Uncertainties** #' #' Uncertainties are reported at 1 sigma and are assumed to be normally #' distributed and are estimated using monte-carlo resamples (`n.MC = 1000`) #' of \eqn{\rho}' and LxTx during dose response curve fitting, and of \eqn{\rho}' #' in the derivation of (n/N) and (n/N)_SS. #' #' #' **Age calculated from 2\*D0 of the simulated natural DRC** #' #' In addition to the age calculated from the equivalent dose derived from #' `Ln/Tn` projected on the simulated natural dose response curve (DRC), this function #' also calculates an age from twice the characteristic saturation dose (`D0`) #' of the simulated natural DRC. This can be a useful information for #' (over)saturated samples (ie. no intersect of `Ln/Tn` on the natural DRC) #' to obtain at least a "minimum age" estimate of the sample. In the console #' output this value is denoted by *"Age @2D0 (ka):"*. #' #' @param data [data.frame] (**required**): #' A `data.frame` with one of the following structures: #' - A **three column** data frame with numeric values on a) dose (s), b) LxTx and and #' c) LxTx error. #' - If a **two column** data frame is provided it is automatically #' assumed that errors on LxTx are missing. A third column will be attached #' with an arbitrary 5 \% error on the provided LxTx values. #' - Can also be a **wide table**, i.e. a [data.frame] with a number of colums divisible by 3 #' and where each triplet has the aforementioned column structure. #' #' ``` #' (optional) #' | dose (s)| LxTx | LxTx error | #' | [ ,1] | [ ,2]| [ ,3] | #' |---------|------|------------| #' [1, ]| 0 | LnTn | LnTn error | (optional, see arg 'LnTn') #' [2, ]| R1 | L1T1 | L1T1 error | #' ... | ... | ... | ... | #' [x, ]| Rx | LxTx | LxTx error | #' #' ``` #' **NOTE:** The function assumes the first row of the function to be the #' `Ln/Tn`-value. If you want to provide more than one `Ln/Tn`-value consider #' using the argument `LnTn`. #' #' @param LnTn [data.frame] (**optional**): #' This argument should **only** be used to provide more than one `Ln/Tn`-value. #' It assumes a two column data frame with the following structure: #' #' ``` #' | LnTn | LnTn error | #' | [ ,1] | [ ,2] | #' |--------|--------------| #' [1, ]| LnTn_1 | LnTn_1 error | #' [2, ]| LnTn_2 | LnTn_2 error | #' ... | ... | ... | #' [x, ]| LnTn_x | LnTn_x error | #' ``` #' #' The function will calculate a **mean** `Ln/Tn`-value and uses either the #' standard deviation or the highest individual error, whichever is larger. If #' another mean value (e.g. a weighted mean or median) or error is preferred, #' this value must be calculated beforehand and used in the first row in the #' data frame for argument `data`. #' #' **NOTE:** If you provide `LnTn`-values with this argument the data frame #' for the `data`-argument **must not** contain any `LnTn`-values! #' #' @param rhop [numeric] (**required**): #' The density of recombination centres (\eqn{\rho}') and its error (see Huntley 2006), #' given as numeric vector of length two. Note that \eqn{\rho}' must **not** be #' provided as the common logarithm. Example: `rhop = c(2.92e-06, 4.93e-07)`. #' #' @param ddot [numeric] (**required**): #' Environmental dose rate and its error, given as a numeric vector of length two. #' Expected unit: Gy/ka. Example: `ddot = c(3.7, 0.4)`. #' #' @param readerDdot [numeric] (**required**): #' Dose rate of the irradiation source of the OSL reader and its error, #' given as a numeric vector of length two. #' Expected unit: Gy/s. Example: `readerDdot = c(0.08, 0.01)`. #' #' @param fit.method [character] (*with default*): #' Fit function of the dose response curve. Can either be `EXP` (the default) #' or `GOK`. Note that `EXP` (single saturating exponential) is the original #' function the model after Huntley (2006) and Kars et al. (2008) was #' designed to use. The use of a general-order kinetics function (`GOK`) #' is an experimental adaption of the model and should be used #' with great care. #' #' @param lower.bounds [numeric] (*with default*): #' Only applicable for `fit.method = 'GOK'`. A vector of length 3 that #' contains the lower bound values for fitting the general-order kinetics #' function using [minpack.lm::nlsLM]. In most cases, the default values #' (c(`-Inf, -Inf, -Inf`)) are appropriate for finding a best fit, but #' sometimes it may be useful to restrict the lower bounds to e.g. #' c(`0, 0, 0`). The values of the vector are for parameters #' `a`, `D0` and `c` in that particular order (see details in #' [Luminescence::plot_GrowthCurve]). #' #' @param normalise [logical] (*with default*): #' If `TRUE` (the default) all measured and computed LxTx values are #' normalised by the pre-exponential factor A (see details). #' #' @param summary [logical] (*with default*): #' If `TRUE` (the default) various parameters provided by the user #' and calculated by the model are added as text on the right-hand side of the #' plot. #' #' @param plot [logical] (*with default*): #' enables/disables plot output. #' #' @param ... #' Further parameters: #' - `verbose` [logical]: Show or hide console output #' - `n.MC` [numeric]: Number of Monte Carlo iterations (default = `100000`). #' **Note** that it is generally advised to have a large number of Monte Carlo #' iterations for the results to converge. Decreasing the number of iterations #' will often result in unstable estimates. #' #' All other arguments are passed to [plot] and [plot_GrowthCurve]. #' #' @return An [RLum.Results-class] object is returned: #' #' Slot: **@data**\cr #' #' \tabular{lll}{ #' **OBJECT** \tab **TYPE** \tab **COMMENT**\cr #' `results` \tab [data.frame] \tab results of the of Kars et al. 2008 model \cr #' `data` \tab [data.frame] \tab original input data \cr #' `Ln` \tab [numeric] \tab Ln and its error \cr #' `LxTx_tables` \tab `list` \tab A `list` of `data.frames` containing data on dose, #' LxTx and LxTx error for each of the dose response curves. #' Note that these **do not** contain the natural Ln signal, which is provided separately. \cr #' `fits` \tab `list` \tab A `list` of `nls` objects produced by [minpack.lm::nlsLM] when fitting the dose response curves \cr #' } #' #' Slot: **@info**\cr #' #' \tabular{lll}{ #' **OBJECT** \tab **TYPE** \tab **COMMENT** \cr #' `call` \tab `call` \tab the original function call \cr #' `args` \tab `list` \tab arguments of the original function call \cr #' } #' #' @section Function version: 0.4.1 #' #' @author #' Georgina E. King, University of Bern (Switzerland) \cr #' Christoph Burow, University of Cologne (Germany) #' #' @note **This function has BETA status and should not be used for publication work!** #' #' @keywords datagen #' #' @references #' #' Kars, R.H., Wallinga, J., Cohen, K.M., 2008. A new approach towards anomalous fading correction for feldspar #' IRSL dating-tests on samples in field saturation. Radiation Measurements 43, 786-790. doi:10.1016/j.radmeas.2008.01.021 #' #' Guralnik, B., Li, B., Jain, M., Chen, R., Paris, R.B., Murray, A.S., Li, S.-H., Pagonis, P., #' Herman, F., 2015. Radiation-induced growth and isothermal decay of infrared-stimulated luminescence #' from feldspar. Radiation Measurements 81, 224-231. #' #' Huntley, D.J., 2006. An explanation of the power-law decay of luminescence. #' Journal of Physics: Condensed Matter 18, 1359-1365. doi:10.1088/0953-8984/18/4/020 #' #' King, G.E., Herman, F., Lambert, R., Valla, P.G., Guralnik, B., 2016. #' Multi-OSL-thermochronometry of feldspar. Quaternary Geochronology 33, 76-87. doi:10.1016/j.quageo.2016.01.004 #' #' #' **Further reading** #' #' Morthekai, P., Jain, M., Cunha, P.P., Azevedo, J.M., Singhvi, A.K., 2011. An attempt to correct #' for the fading in million year old basaltic rocks. Geochronometria 38(3), 223-230. #' #' @examples #' #' ## Load example data (sample UNIL/NB123, see ?ExampleData.Fading) #' data("ExampleData.Fading", envir = environment()) #' #' ## (1) Set all relevant parameters #' # a. fading measurement data (IR50) #' fading_data <- ExampleData.Fading$fading.data$IR50 #' #' # b. Dose response curve data #' data <- ExampleData.Fading$equivalentDose.data$IR50 #' #' ## (2) Define required function parameters #' ddot <- c(7.00, 0.004) #' readerDdot <- c(0.134, 0.0067) #' #' # Analyse fading measurement and get an estimate of rho'. #' # Note that the RLum.Results object can be directly used for further processing. #' # The number of MC runs is reduced for this example #' rhop <- analyse_FadingMeasurement(fading_data, plot = TRUE, verbose = FALSE, n.MC = 10) #' #' ## (3) Apply the Kars et al. (2008) model to the data #' kars <- calc_Huntley2006(data = data, #' rhop = rhop, #' ddot = ddot, #' readerDdot = readerDdot, #' n.MC = 25) #' #' #' \dontrun{ #' # You can also provide LnTn values separately via the 'LnTn' argument. #' # Note, however, that the data frame for 'data' must then NOT contain #' # a LnTn value. See argument descriptions! #' LnTn <- data.frame(LnTn = c(1.84833, 2.24833), #' LnTn.error = c(0.17, 0.22)) #' #' LxTx <- data[2:nrow(data), ] #' #' kars <- calc_Huntley2006(data = LxTx, #' LnTn = LnTn, #' rhop = rhop, #' ddot = ddot, #' readerDdot = readerDdot, #' n.MC = 25) #' } #' @md #' @export calc_Huntley2006 <- function(data, LnTn = NULL, rhop, ddot, readerDdot, normalise = TRUE, fit.method = c("EXP", "GOK")[1], lower.bounds = c(-Inf, -Inf, -Inf), summary = TRUE, plot = TRUE, ...) { ## Validate Input ------------------------------------------------------------ ## Check fit method if (!fit.method %in% c("EXP", "GOK")) stop("[calc_Huntley2006] Invalid fit option ('", fit.method, "'). Only 'EXP' and 'GOK' allowed for argument 'fit.method'.", call. = FALSE) ## Check length of lower.bounds if (fit.method == "GOK" && length(lower.bounds) != 3) stop("[calc_Huntley2006] Argument 'lower.bounds' must be of length 3 exactly.", call. = FALSE) ## Check 'data' # must be a data frame if (is.data.frame(data)) { if (ncol(data) == 2) { warning("[calc_Huntley2006] 'data' only had two columns. We assumed that", " the errors on LxTx were missing and automatically added a", " 5 % error.\n Please provide a data frame with three columns", " if you wish to use actually measured LxTx errors.", call. = FALSE) data[ ,3] <- data[ ,2] * 0.05 } # Check if 'LnTn' is used and overwrite 'data' if (!is.null(LnTn)) { if (!is.data.frame(LnTn)) stop("Value for 'LnTn' must be a data frame!", call. = FALSE) if (ncol(LnTn) != 2) stop("Data frame for 'LnTn' must have two columns!", call. = FALSE) if (ncol(data) > 3) stop("Argument 'LnTn' requires the data frame 'data' to have 2 or 3 columns only!", call. = FALSE) # case 1: only one LnTn value if (nrow(LnTn) == 1) { LnTn <- setNames(cbind(0, LnTn), names(data)) data <- rbind(LnTn, data) # case 2: >1 LnTn value } else { LnTn_mean <- mean(LnTn[ ,1]) LnTn_sd <- sd(LnTn[ ,1]) LnTn_error <- max(LnTn_sd, LnTn[ ,2]) LnTn <- setNames(data.frame(0, LnTn_mean, LnTn_error), names(data)) data <- rbind(LnTn, data) } } # check number of columns if (ncol(data) %% 3 != 0) { stop("[calc_Huntley2006] the number of columns in 'data' must be a multiple of 3.", call. = FALSE) } else { # extract all LxTx values data_tmp <- do.call(rbind, lapply(seq(1, ncol(data), 3), function(col) { setNames(data[2:nrow(data), col:c(col+2)], c("dose", "LxTx", "LxTxError")) }) ) # extract the LnTn values (assumed to be the first row) and calculate the column mean LnTn_tmp <- do.call(rbind, lapply(seq(1, ncol(data), 3), function(col) { setNames(data[1, col:c(col+2)], c("dose", "LxTx", "LxTxError")) }) ) # check whether the standard deviation of LnTn estimates or the largest # individual error is highest, and take the larger one LnTn_error_tmp <- max(c(sd(LnTn_tmp[ ,2]), mean(LnTn_tmp[ ,3])), na.rm = TRUE) LnTn_tmp <- colMeans(LnTn_tmp) # re-bind the data frame data <- rbind(LnTn_tmp, data_tmp) data[1, 3] <- LnTn_error_tmp data <- data[complete.cases(data), ] } } else { stop("\n[calc_Huntley2006] 'data' must be a data frame.", call. = FALSE) } ## Check 'rhop' # check if numeric if (is.numeric(rhop)) { ### TODO: can be of length 2 if error if (length(rhop) != 2) stop("\n[calc_Huntley2006] 'rhop' must be a vector of length two.", call. = FALSE) # alternatively, and RLum.Results object produced by analyse_FadingMeasurement() # can be provided } else if (inherits(rhop, "RLum.Results")) { if (rhop@originator == "analyse_FadingMeasurement") rhop <- c(rhop@data$rho_prime$MEAN, rhop@data$rho_prime$SD) else stop("\n[calc_Huntley2006] Only an 'RLum.Results' object produced by", " 'analyse_FadingMeasurement()' is allowed as input for 'rhop'.", call. = FALSE) } # check if 'rhop' is actually a positive value if (any(is.na(rhop)) || !rhop[1] > 0 || any(is.infinite(rhop))) { stop("\n[calc_Huntley2006] 'rhop' must be a positive number. Provided value", " was: ", signif(rhop[1], 3), " \u2213 " , signif(rhop[2], 3), call. = FALSE) } ## Check ddot & readerDdot # check if numeric if (any(sapply(list(ddot, readerDdot), is.numeric) == FALSE)) stop("\n[calc_Huntley2006] 'ddot' and 'readerDdot' must be numeric values.", call. = FALSE) # check if length == 2 if (any(sapply(list(ddot, readerDdot), function(x) length(x) == 2) == FALSE)) stop("\n[calc_Huntley2006] 'ddot' and 'readerDdot' must be of length 2.", call. = FALSE) ## Settings ------------------------------------------------------------------ settings <- list(verbose = TRUE, n.MC = 100000) settings <- modifyList(settings, list(...)) ## Define Constants ---------------------------------------------------------- kb <- 8.617343 * 1e-5 alpha <- 1 Hs <- 3e15 # s value after Huntley (2006) Ma <- 1e6 * 365.25 * 24 * 3600 #in seconds ka <- Ma / 1000 #in seconds ## Define Functions ---------------------------------------------------------- # fit data using using Eq 5. from Kars et al (2008) employing # theta after King et al. (2016) theta <- function(t, r) { res <- exp(-r * log(1.8 * Hs * (0.5 * t))^3) res[!is.finite(res)] <- 0 return(res) } ## Preprocessing ------------------------------------------------------------- readerDdot.error <- readerDdot[2] readerDdot <- readerDdot[1] ddot.error <- ddot[2] ddot <- ddot[1] colnames(data) <- c("dose", "LxTx", "LxTx.Error") dosetime <- data[["dose"]][2:nrow(data)] LxTx.measured <- data[["LxTx"]][2:nrow(data)] LxTx.measured.error <- data[["LxTx.Error"]][2:nrow(data)] #Keep LnTn separate for derivation of measured fraction of saturation Ln <- data[["LxTx"]][1] Ln.error <- data[["LxTx.Error"]][1] ## (1) MEASURED ---------------------------------------------------- if (settings$verbose) cat("\n") data.tmp <- data data.tmp[ ,1] <- data.tmp[ ,1] * readerDdot GC.settings <- list(sample = data.tmp, mode = "interpolation", fit.method = fit.method, fit.bounds = TRUE, output.plot = plot, main = "Measured dose response curve", xlab = "Dose (Gy)", verbose = FALSE) GC.settings <- modifyList(GC.settings, list(...)) GC.settings$verbose <- FALSE GC.measured <- try(do.call(plot_GrowthCurve, GC.settings)) if (inherits(GC.measured, "try-error")) stop("\n[calc_Huntley2006()] Unable to fit growth curve to measured data", call. = FALSE) # extract results and calculate age GC.results <- get_RLum(GC.measured) fit_measured <- GC.measured@data$Fit De.measured <- GC.results$De De.measured.error <- GC.results$De.Error D0.measured <- GC.results$D01 D0.measured.error <- GC.results$D01.ERROR Age.measured <- De.measured/ ddot Age.measured.error <- Age.measured * sqrt( (De.measured.error / De.measured)^2 + (readerDdot.error / readerDdot)^2 + (ddot.error / ddot)^2) ## (2) SIMULATED ----------------------------------------------------- # create MC samples rhop_MC <- rnorm(n = settings$n.MC, mean = rhop[1], sd = rhop[2]) # fitcoef <- do.call(rbind, sapply(rhop_MC, function(rhop_i) { if (fit.method == "EXP") { fit_sim <- try(minpack.lm::nlsLM(LxTx.measured ~ a * theta(dosetime, rhop_i) * (1 - exp(-dosetime / D0)), start = list(a = max(LxTx.measured), D0 = D0.measured / readerDdot), control = list(maxiter = settings$maxiter)), silent = TRUE) } else if (fit.method == "GOK") { fit_sim <- try(minpack.lm::nlsLM(LxTx.measured ~ a * theta(dosetime, rhop_i) * (1-(1+(1/D0)*dosetime*c)^(-1/c)), start = list(a = coef(fit_measured)[["a"]], D0 = D0.measured / readerDdot, c = coef(fit_measured)[["c"]]), lower = lower.bounds, control = list(maxiter = settings$maxiter)), silent = TRUE) } if (!inherits(fit_sim, "try-error")) coefs <- coef(fit_sim) else coefs <- c(NA, NA) return(coefs) }, simplify = FALSE)) # final fit for export # fit_simulated <- minpack.lm::nlsLM(LxTx.measured ~ a * theta(dosetime, rhop[1]) * (1 - exp(-dosetime / D0)), # start = list(a = max(LxTx.measured), D0 = D0.measured / readerDdot)) # scaling factor A <- mean(fitcoef[, 1], na.rm = TRUE) A.error <- sd(fitcoef[ ,1], na.rm = TRUE) # calculate measured fraction of saturation nN <- Ln / A nN.error <- nN * sqrt( (Ln.error / Ln)^2 + (A.error / A)^2) # compute a natural dose response curve following the assumptions of # Morthekai et al. 2011, Geochronometria # natdosetime <- seq(0, 1e14, length.out = settings$n.MC) # natdosetimeGray <- natdosetime * ddot / ka # calculate D0 dose in seconds computedD0 <- (fitcoef[ ,2] * readerDdot) / (ddot / ka) # Legacy code: # This is an older approximation to calculate the natural dose response curve, # which sometimes tended to slightly underestimate nN_ss. This is now replaced # with the newer approach below. # compute natural dose response curve # LxTx.sim <- A * theta(natdosetime, rhop[1]) * (1 - exp(-natdosetime / mean(computedD0, na.rm = TRUE) )) # warning("LxTx Curve: ", round(max(LxTx.sim) / A, 3), call. = FALSE) # compute natural dose response curve ddots <- ddot / ka natdosetimeGray <- c(0, exp(seq(1, log(max(data[ ,1]) * 2), length.out = 999))) natdosetime <- natdosetimeGray rprime <- seq(0.01, 5, length.out = 500) pr <- 3 * rprime^2 * exp(-rprime^3) # Huntley 2006, eq. 3 K <- Hs * exp(-rhop[1]^-(1/3) * rprime) TermA <- matrix(NA, nrow = length(rprime), ncol = length(natdosetime)) UFD0 <- mean(fitcoef[ ,2], na.rm = TRUE) * readerDdot if (fit.method == "GOK") c_gok <- mean(fitcoef[ ,3], na.rm = TRUE) for (j in 1:length(natdosetime)) { for (k in 1:length(rprime)) { if (fit.method == "EXP") { TermA[k,j] <- A * pr[k] * ((ddots / UFD0) / (ddots / UFD0 + K[k]) * (1 - exp(-natdosetime[j] * (1 / UFD0 + K[k]/ddots)))) } else if (fit.method == "GOK") { TermA[k,j] <- A * pr[k] * (ddots / UFD0) / (ddots / UFD0 + K[k]) * (1-(1+(1/UFD0 + K[k]/ddots) * natdosetime[j] * c_gok)^(-1/c_gok)) } }} LxTx.sim <- colSums(TermA) / sum(pr) # warning("LxTx Curve (new): ", round(max(LxTx.sim) / A, 3), call. = FALSE) # calculate Age positive <- which(diff(LxTx.sim) > 0) data.unfaded <- data.frame(dose = c(0, natdosetimeGray[positive]), LxTx = c(Ln, LxTx.sim[positive]), LxTx.error = c(Ln.error, LxTx.sim[positive] * A.error/A)) data.unfaded$LxTx.error[2] <- 0.0001 GC.settings <- list(sample = data.unfaded, mode = "interpolation", fit.method = fit.method, fit.bounds = TRUE, output.plot = plot, verbose = FALSE, main = "Simulated dose response curve", xlab = "Dose (Gy)") GC.settings <- modifyList(GC.settings, list(...)) GC.settings$verbose <- FALSE suppressWarnings( GC.simulated <- try(do.call(plot_GrowthCurve, GC.settings)) ) if (!inherits(GC.simulated, "try-error")) { GC.simulated.results <- get_RLum(GC.simulated) fit_simulated <- get_RLum(GC.simulated, "Fit") De.sim <- GC.simulated.results$De De.error.sim <- GC.simulated.results$De.Error # derive simulated D0 D0.sim.Gy <- GC.simulated.results$D01 D0.sim.Gy.error <- GC.simulated.results$D01.ERROR Age.sim <- De.sim / ddot Age.sim.error <- Age.sim * sqrt( ( De.error.sim/ De.sim)^2 + (readerDdot.error / readerDdot)^2 + (ddot.error / ddot)^2) Age.sim.2D0 <- 2 * D0.sim.Gy / ddot Age.sim.2D0.error <- Age.sim.2D0 * sqrt( ( D0.sim.Gy.error / D0.sim.Gy)^2 + (readerDdot.error / readerDdot)^2 + (ddot.error / ddot)^2) } else { De.sim <- De.error.sim <- Age.sim <- Age.sim.error <- fit_simulated <- D0.sim.Gy <- D0.sim.Gy.error <- NA Age.sim.2D0 <- Age.sim.2D0.error <- NA } if (Ln > max(LxTx.sim) * 1.1) warning("[calc_Huntley2006] Ln is >10 % larger than the maximum computed LxTx value.", " The De and age should be regarded as infinite estimates.", call. = FALSE) # Estimate nN_(steady state) by Monte Carlo Simulation ddot_MC <- rnorm(n = settings$n.MC, mean = ddot, sd = ddot.error) UFD0_MC <- rnorm(n = settings$n.MC, mean = D0.sim.Gy, sd = D0.sim.Gy.error) nN_SS_MC <- mapply(function(rhop_i, ddot_i, UFD0_i) { rprime <- seq(0.01, 5, length.out = settings$n.MC) rho <- 3 * alpha^3 * rhop_i / (4 * pi) r <- rprime / (4 * pi * rho / 3)^(1 / 3) pr <- 3 * rprime^2 * exp(-rprime^3) tau <- ((1 / Hs) * exp(1)^(alpha * r)) / ka Ls <- 1 / (1 + UFD0_i / (ddot_i * tau)) Lstrap <- (pr * Ls) / sum(pr) # field saturation nN_SS_i <- sum(Lstrap) return(nN_SS_i) }, rhop_MC, ddot_MC, UFD0_MC, SIMPLIFY = TRUE) nN_SS <- suppressWarnings(exp(mean(log(nN_SS_MC), na.rm = TRUE))) nN_SS.error <- suppressWarnings(nN_SS * abs(sd(log(nN_SS_MC), na.rm = TRUE) / mean(log(nN_SS_MC), na.rm = TRUE))) ## legacy code for debugging purposes ## nN_SS is often lognormally distributed, so we now take the mean and sd ## of the log values. # warning(mean(nN_SS_MC, na.rm = TRUE)) # warning(sd(nN_SS_MC, na.rm = TRUE)) ## (3) UNFADED --------------------------------------------------------------- LxTx.unfaded <- LxTx.measured / theta(dosetime, rhop[1]) LxTx.unfaded[is.nan((LxTx.unfaded))] <- 0 LxTx.unfaded[is.infinite(LxTx.unfaded)] <- 0 dosetimeGray <- dosetime * readerDdot if (fit.method == "EXP") { fit_unfaded <- minpack.lm::nlsLM(LxTx.unfaded ~ a * (1 - exp(-dosetimeGray / D0)), start = list(a = max(LxTx.unfaded), D0 = D0.measured / readerDdot), control = list(maxiter = settings$maxiter)) } else if (fit.method == "GOK") { fit_unfaded <- minpack.lm::nlsLM(LxTx.unfaded ~ a * (1-(1+(1/D0)*dosetimeGray*c)^(-1/c)), start = list(a = coef(fit_simulated)[["a"]], D0 = coef(fit_simulated)[["b"]] / readerDdot, c = coef(fit_simulated)[["c"]]), lower = lower.bounds, control = list(maxiter = settings$maxiter)) } D0.unfaded <- coef(fit_unfaded)[["D0"]] D0.error.unfaded <- summary(fit_unfaded)$coefficients["D0", "Std. Error"] ## Create LxTx tables -------------------------------------------------------- # normalise by A (saturation point of the un-faded curve) if (normalise) { LxTx.measured.relErr <- (LxTx.measured.error / LxTx.measured) LxTx.measured <- LxTx.measured / A LxTx.measured.error <- LxTx.measured * LxTx.measured.relErr LxTx.sim <- LxTx.sim / A LxTx.unfaded <- LxTx.unfaded / A Ln.relErr <- Ln.error / Ln Ln <- Ln / A Ln.error <- Ln * Ln.relErr } # combine all computed LxTx values LxTx_measured <- data.frame( dose = dosetimeGray, LxTx = LxTx.measured, LxTx.Error = LxTx.measured.error) LxTx_simulated <- data.frame( dose = natdosetimeGray, LxTx = LxTx.sim, LxTx.Error = LxTx.sim * A.error / A) LxTx_unfaded <- data.frame( dose = dosetimeGray, LxTx = LxTx.unfaded, LxTx.Error = LxTx.unfaded * A.error / A) ## Plot settings ------------------------------------------------------------- plot.settings <- list(main = "Dose response curves", xlab = "Dose (Gy)", ylab = ifelse(normalise, "normalised LxTx (a.u.)", "LxTx (a.u.)") ) plot.settings <- modifyList(plot.settings, list(...)) ## Plotting ------------------------------------------------------------------ if (plot) { # set plot parameters par.old.full <- par(no.readonly = TRUE) # set graphical parameters par(mar = c(5, 4, 4, 4), cex = 0.8) if (summary) par(oma = c(0, 3, 0, 9)) else par(oma = c(0, 9, 0, 9)) # Find a good estimate of the x-axis limits xlim <- range(pretty(dosetimeGray)) if (!is.na(De.sim) & De.sim > xlim[2]) xlim <- range(pretty(c(min(dosetimeGray), De.sim))) # Create figure after Kars et al. (2008) contrasting the dose response curves plot(dosetimeGray, LxTx_measured$LxTx, main = plot.settings$main, xlab = plot.settings$xlab, ylab = plot.settings$ylab, pch = 16, ylim = c(0, max(do.call(rbind, list(LxTx_measured, LxTx_unfaded))[["LxTx"]])), xlim = xlim ) # LxTx error bars segments(x0 = dosetimeGray, y0 = LxTx_measured$LxTx + LxTx_measured$LxTx.Error, x1 = dosetimeGray, y1 = LxTx_measured$LxTx - LxTx_measured$LxTx.Error, col = "black") # re-calculate the measured dose response curve in Gray xRange <- range(pretty(dosetimeGray)) xNew <- seq(xRange[1], xRange[2], length.out = 200) yNew <- predict(GC.measured@data$Fit, list(x = xNew)) if (normalise) yNew <- yNew / A # add line lines(xNew, yNew, col = "black") # add error polygon polygon(x = c(natdosetimeGray, rev(natdosetimeGray)), y = c(LxTx_simulated$LxTx + LxTx_simulated$LxTx.Error, rev(LxTx_simulated$LxTx - LxTx_simulated$LxTx.Error)), col = adjustcolor("grey", alpha.f = 0.5), border = NA) # computed LxTx values points(natdosetimeGray, LxTx_simulated$LxTx, type = "l", lty = 2) # Ln and DE as points points(x = c(0, De.measured), y = c(Ln, Ln), col = "red", pch = c(1, 16)) # Ln error bar segments(x0 = 0, y0 = Ln - Ln.error, x1 = 0, y1 = Ln + Ln.error, col = "red") # Ln as a horizontal line lines(x = c(0, max(c(De.measured, De.sim), na.rm = TRUE)), y = c(Ln, Ln), col = "black", lty = 3) # vertical line of measured DE lines(x = c(De.measured, De.measured), y = c(0, Ln), col = "black", lty = 3) # add legends legend("bottomright", legend = c("Unfaded DRC", "Measured DRC", "Simulated natural DRC"), lty = c(5, 1, 2), bty = "n") # add vertical line of simulated De if (!is.na(De.sim)) { lines(x = c(De.sim, De.sim), y = c(0, Ln), col = "black", lty = 3) points(x = De.sim, y = Ln, col = "red" , pch = 16) } else { lines(x = c(De.measured, xlim[2]), y = c(Ln, Ln), col = "black", lty = 3) } # add unfaded DRC xRange <- range(pretty(dosetimeGray)) xNew <- seq(xRange[1], xRange[2], length.out = 200) yNew <- predict(fit_unfaded, list(dosetimeGray = xNew)) if (normalise) yNew <- yNew / A lines(xNew, yNew, col = "black", lty = 5) points(x = dosetimeGray, y = LxTx_unfaded$LxTx, col = "black") # LxTx error bars segments(x0 = dosetimeGray, y0 = LxTx_unfaded$LxTx + LxTx_unfaded$LxTx.Error, x1 = dosetimeGray, y1 = LxTx_unfaded$LxTx - LxTx_unfaded$LxTx.Error, col = "black") # add text if (summary) { # define labels as expressions labels.text <- list( bquote(dot(D) == .(format(ddot, digits = 2, nsmall = 2)) %+-% .(round(as.numeric(format(ddot.error, digits = 3, nsmall = 3)), 3)) ~ frac(Gy, ka)), bquote(dot(D)["Reader"] == .(format(readerDdot, digits = 2, nsmall = 2)) %+-% .(round(as.numeric(format(readerDdot.error, digits = 3, nsmall = 3)), 3)) ~ frac(Gy, s)), bquote(log[10]~(rho~"'") == .(format(log10(rhop[1]), digits = 2, nsmall = 2)) %+-% .(round(as.numeric(format(rhop[2] / (rhop[1] * log(10, base = exp(1))), digits = 2, nsmall = 2)), 2)) ), bquote(bgroup("(", frac(n, N), ")") == .(format(nN, digits = 2, nsmall = 2)) %+-% .(round(as.numeric(format(nN.error, digits = 2, nsmall = 2)), 2)) ), bquote(bgroup("(", frac(n, N), ")")[SS] == .(format(nN_SS, digits = 2, nsmall = 2)) %+-% .(round(as.numeric(format(nN_SS.error, digits = 2, nsmall = 2)), 2)) ), bquote(D["E,sim"] == .(format(De.sim, digits = 0, nsmall = 0)) %+-% .(format(De.error.sim, digits = 0, nsmall = 0)) ~ Gy), bquote(D["0,sim"] == .(format(D0.sim.Gy, digits = 0, nsmall = 0)) %+-% .(format(D0.sim.Gy.error, digits = 0, nsmall = 0)) ~ Gy), bquote(Age["sim"] == .(format(Age.sim, digits = 0, nsmall = 0)) %+-% .(format(Age.sim.error, digits = 0, nsmall = 0)) ~ ka) ) # each of the labels is positioned at 1/10 of the availalbe y-axis space ypos <- seq(range(axTicks(2))[2], range(axTicks(2))[1], length.out = 10)[1:length(labels.text)] # allow overprinting par(xpd = NA) # add labels iteratively mapply(function(label, pos) { text(x = max(axTicks(1)) * 1.05, y = pos, labels = label, pos = 4) }, labels.text, ypos) } # recover plot parameters on.exit(par(par.old.full)) } ## Results ------------------------------------------------------------------- results <- set_RLum( class = "RLum.Results", data = list( results = data.frame("nN" = nN, "nN.error" = nN.error, "nN_SS" = nN_SS, "nN_SS.error" = nN_SS.error, "Meas_De" = De.measured, "Meas_De.error" = De.measured.error, "Meas_D0" = D0.measured, "Meas_D0.error" = D0.measured.error, "Meas_Age" = Age.measured, "Meas_Age.error" = Age.measured.error, "Sim_De" = De.sim, "Sim_De.error" = De.error.sim, "Sim_D0" = D0.sim.Gy, "Sim_D0.error" = D0.sim.Gy.error, "Sim_Age" = Age.sim, "Sim_Age.error" = Age.sim.error, "Sim_Age_2D0" = Age.sim.2D0, "Sim_Age_2D0.error" = Age.sim.2D0.error, "Unfaded_D0" = D0.unfaded, "Unfaded_D0.error" = D0.error.unfaded, row.names = NULL), data = data, Ln = c(Ln, Ln.error), LxTx_tables = list( simulated = LxTx_simulated, measured = LxTx_measured, unfaded = LxTx_unfaded), fits = list( simulated = fit_simulated, measured = fit_measured, unfaded = fit_unfaded ) ), info = list(call = sys.call(), args = as.list(sys.call())[-1]) ) ## Console output ------------------------------------------------------------ if (settings$verbose) { cat("\n[calc_Huntley2006()]\n") cat("\n -------------------------------") cat("\n (n/N) [-]:\t", round(results@data$results$nN, 2), "\u00b1", round(results@data$results$nN.error, 2)) cat("\n (n/N)_SS [-]:\t", round(results@data$results$nN_SS, 2),"\u00b1", round(results@data$results$nN_SS.error, 2)) cat("\n\n ---------- Measured -----------") cat("\n DE [Gy]:\t", round(results@data$results$Meas_De, 2), "\u00b1", round(results@data$results$Meas_De.error, 2)) cat("\n D0 [Gy]:\t", round(results@data$results$Meas_D0, 2), "\u00b1", round(results@data$results$Meas_D0.error, 2)) if (fit.method == "GOK") { cat("\n c [-]:\t\t", round(summary(fit_measured)$coefficients["c", "Estimate"], 2), "\u00b1", round(summary(fit_measured)$coefficients["c", "Std. Error"], 2)) } cat("\n Age [ka]:\t", round(results@data$results$Meas_Age, 2), "\u00b1", round(results@data$results$Meas_Age.error, 2)) cat("\n\n ---------- Un-faded -----------") cat("\n D0 [Gy]:\t", round(results@data$results$Unfaded_D0, 2), "\u00b1", round(results@data$results$Unfaded_D0.error, 2)) if (fit.method == "GOK") { cat("\n c [-]:\t\t", round(summary(fit_unfaded)$coefficients["c", "Estimate"], 2), "\u00b1", round(summary(fit_unfaded)$coefficients["c", "Std. Error"], 2)) } cat("\n\n ---------- Simulated ----------") cat("\n DE [Gy]:\t", round(results@data$results$Sim_De, 2), "\u00b1", round(results@data$results$Sim_De.error, 2)) cat("\n D0 [Gy]:\t", round(results@data$results$Sim_D0, 2), "\u00b1", round(results@data$results$Sim_D0.error, 2)) if (fit.method == "GOK") { cat("\n c [-]:\t\t", round(summary(fit_simulated)$coefficients["c", "Estimate"], 2), "\u00b1", round(summary(fit_simulated)$coefficients["c", "Std. Error"], 2)) } cat("\n Age [ka]:\t", round(results@data$results$Sim_Age, 2), "\u00b1", round(results@data$results$Sim_Age.error, 2)) cat("\n Age @2D0 [ka]:\t", round(results@data$results$Sim_Age_2D0, 2), "\u00b1", round(results@data$results$Sim_Age_2D0.error, 2)) cat("\n -------------------------------\n\n") } ## Return value -------------------------------------------------------------- return(results) } Luminescence/R/analyse_baSAR.R0000644000176200001440000026063013604172511015663 0ustar liggesusers#' Bayesian models (baSAR) applied on luminescence data #' #' This function allows the application of Bayesian models on luminescence data, measured #' with the single-aliquot regenerative-dose (SAR, Murray and Wintle, 2000) protocol. In particular, #' it follows the idea proposed by Combes et al., 2015 of using an hierarchical model for estimating #' a central equivalent dose from a set of luminescence measurements. This function is (I) the adaption #' of this approach for the R environment and (II) an extension and a technical refinement of the #' published code. #' #' Internally the function consists of two parts: (I) The Bayesian core for the Bayesian calculations #' and applying the hierchical model and (II) a data pre-processing part. The Bayesian core can be run #' independently, if the input data are sufficient (see below). The data pre-processing part was #' implemented to simplify the analysis for the user as all needed data pre-processing is done #' by the function, i.e. in theory it is enough to provide a BIN/BINX-file with the SAR measurement #' data. For the Bayesian analysis for each aliquot the following information are needed from the SAR analysis. #' LxTx, the LxTx error and the dose values for all regeneration points. #' #' **How the systematic error contribution is calculated?** #' #' Standard errors (so far) provided with the source dose rate are considered as systematic uncertainties #' and added to final central dose by: #' #' \deqn{systematic.error = 1/n \sum SE(source.doserate)} #' #' \deqn{SE(central.dose.final) = \sqrt{SE(central.dose)^2 + systematic.error^2}} #' #' Please note that this approach is rather rough and can only be valid if the source dose rate #' errors, in case different readers had been used, are similar. In cases where more than #' one source dose rate is provided a warning is given. #' #' **Input / output scenarios** #' #' Various inputs are allowed for this function. Unfortunately this makes the function handling rather #' complex, but at the same time very powerful. Available scenarios: #' #' **(1) - `object` is BIN-file or link to a BIN-file** #' #' Finally it does not matter how the information of the BIN/BINX file are provided. The function #' supports **(a)** either a path to a file or directory or a `list` of file names or paths or #' **(b)** a [Risoe.BINfileData-class] object or a list of these objects. The latter one can #' be produced by using the function [read_BIN2R], but this function is called automatically #' if only a filename and/or a path is provided. In both cases it will become the data that can be #' used for the analysis. #' #' `[XLS_file = NULL]` #' #' If no XLS file (or data frame with the same format) is provided the functions runs an automatic process that #' consists of the following steps: #' #' 1. Select all valid aliquots using the function [verify_SingleGrainData] #' 2. Calculate Lx/Tx values using the function [calc_OSLLxTxRatio] #' 3. Calculate De values using the function [plot_GrowthCurve] #' #' These proceeded data are subsequently used in for the Bayesian analysis #' #' `[XLS_file != NULL]` #' #' If an XLS-file is provided or a `data.frame` providing similar information the pre-processing #' steps consists of the following steps: #' #' 1. Calculate Lx/Tx values using the function [calc_OSLLxTxRatio] #' 2. Calculate De values using the function [plot_GrowthCurve] #' #' Means, the XLS file should contain a selection of the BIN-file names and the aliquots selected #' for the further analysis. This allows a manual selection of input data, as the automatic selection #' by [verify_SingleGrainData] might be not totally sufficient. #' #' #' **(2) - `object` `RLum.Results object`** #' #' If an [RLum.Results-class] object is provided as input and(!) this object was #' previously created by the function `analyse_baSAR()` itself, the pre-processing part #' is skipped and the function starts directly the Bayesian analysis. This option is very powerful #' as it allows to change parameters for the Bayesian analysis without the need to repeat #' the data pre-processing. If furthermore the argument `aliquot_range` is set, aliquots #' can be manually excluded based on previous runs. #' #' **`method_control`** #' #' These are arguments that can be passed directly to the Bayesian calculation core, supported arguments #' are: #' #' \tabular{lll}{ #' **Parameter** \tab **Type** \tab **Descritpion**\cr #' `lower_centralD` \tab [numeric] \tab sets the lower bound for the expected De range. Change it only if you know what you are doing!\cr #' `upper_centralD` \tab [numeric] \tab sets the upper bound for the expected De range. Change it only if you know what you are doing!\cr #' `n.chains` \tab [integer] \tab sets number of parallel chains for the model (default = 3) (cf. [rjags::jags.model])\cr #' `inits` \tab [list] \tab option to set initialisation values (cf. [rjags::jags.model]) \cr #' `thin` \tab [numeric] \tab thinning interval for monitoring the Bayesian process (cf. [rjags::jags.model])\cr #' `variable.names` \tab [character] \tab set the variables to be monitored during the MCMC run, default: #' `'central_D'`, `'sigma_D'`, `'D'`, `'Q'`, `'a'`, `'b'`, `'c'`, `'g'`. #' Note: only variables present in the model can be monitored. #' } #' #' **User defined models**\cr #' #' The function provides the option to modify and to define own models that can be used for #' the Bayesian calculation. In the case the user wants to modify a model, a new model #' can be piped into the funtion via the argument `baSAR_model` as `character`. #' The model has to be provided in the JAGS dialect of the BUGS language (cf. [rjags::jags.model]) #' and parameter names given with the pre-defined names have to be respected, otherwise the function #' will break. #' #' **FAQ** #' #' Q: How can I set the seed for the random number generator (RNG)? #' #' A: Use the argument `method_control`, e.g., for three MCMC chains #' (as it is the default): #' #' ``` #' method_control = list( #' inits = list( #' list(.RNG.name = "base::Wichmann-Hill", .RNG.seed = 1), #' list(.RNG.name = "base::Wichmann-Hill", .RNG.seed = 2), #' list(.RNG.name = "base::Wichmann-Hill", .RNG.seed = 3) #' )) #' ``` #' #' This sets a reproducible set for every chain separately.\cr #' #' Q: How can I modify the output plots? #' #' A: You can't, but you can use the function output to create own, modified plots. #' #' #' Q: Can I change the boundaries for the central_D? #' #' A: Yes, we made it possible, but we DO NOT recommend it, except you know what you are doing!\cr #' Example: `method_control = list(lower_centralD = 10))` #' #' Q: The lines in the baSAR-model appear to be in a wrong logical order?\cr #' #' A: This is correct and allowed (cf. JAGS manual) #' #' #' **Additional arguments support via the `...` argument** #' #' This list summarizes the additional arguments that can be passed to the internally used #' functions. #' #' \tabular{llll}{ #' **Supported argument** \tab **Corresponding function** \tab **Default** \tab **Short description **\cr #' `threshold` \tab [verify_SingleGrainData] \tab `30` \tab change rejection threshold for curve selection \cr #' `sheet` \tab [readxl::read_excel] \tab `1` \tab select XLS-sheet for import\cr #' `col_names` \tab [readxl::read_excel] \tab `TRUE` \tab first row in XLS-file is header\cr #' `col_types` \tab [readxl::read_excel] \tab `NULL` \tab limit import to specific columns\cr #' `skip` \tab [readxl::read_excel] \tab `0` \tab number of rows to be skipped during import\cr #' `n.records` \tab [read_BIN2R] \tab `NULL` \tab limit records during BIN-file import\cr #' `duplicated.rm` \tab [read_BIN2R] \tab `TRUE` \tab remove duplicated records in the BIN-file\cr #' `pattern` \tab [read_BIN2R] \tab `TRUE` \tab select BIN-file by name pattern\cr #' `position` \tab [read_BIN2R] \tab `NULL` \tab limit import to a specific position\cr #' `background.count.distribution` \tab [calc_OSLLxTxRatio] \tab `"non-poisson"` \tab set assumed count distribution\cr #' `fit.weights` \tab [plot_GrowthCurve] \tab `TRUE` \tab enables / disables fit weights\cr #' `fit.bounds` \tab [plot_GrowthCurve] \tab `TRUE` \tab enables / disables fit bounds\cr #' `NumberIterations.MC` \tab [plot_GrowthCurve] \tab `100` \tab number of MC runs for error calculation\cr #' `output.plot` \tab [plot_GrowthCurve] \tab `TRUE` \tab enables / disables dose response curve plot\cr #' `output.plotExtended` \tab [plot_GrowthCurve] \tab `TRUE` \tab enables / disables extended dose response curve plot\cr #' } #' #' #' @param object [Risoe.BINfileData-class], [RLum.Results-class], [list] of [RLum.Analysis-class], #' [character] or [list] (**required**): #' input object used for the Bayesian analysis. If a `character` is provided the function #' assumes a file connection and tries to import a BIN/BINX-file using the provided path. If a `list` is #' provided the list can only contain either `Risoe.BINfileData` objects or `character`s #' providing a file connection. Mixing of both types is not allowed. If an [RLum.Results-class] #' is provided the function directly starts with the Bayesian Analysis (see details) #' #' @param XLS_file [character] (*optional*): #' XLS_file with data for the analysis. This file must contain 3 columns: #' the name of the file, the disc position and the grain position #' (the last being 0 for multi-grain measurements).\cr #' Alternatively a `data.frame` of similar structure can be provided. #' #' @param aliquot_range [numeric] (*optional*): #' allows to limit the range of the aliquots used for the analysis. #' This argument has only an effect if the argument `XLS_file` is used or #' the input is the previous output (i.e. is [RLum.Results-class]). In this case the #' new selection will add the aliquots to the removed aliquots table. #' #' @param source_doserate [numeric] **(required)**: #' source dose rate of beta-source used for the measuremnt and its uncertainty #' in Gy/s, e.g., `source_doserate = c(0.12, 0.04)`. Paramater can be provided #' as `list`, for the case that more than one BIN-file is provided, e.g., #' `source_doserate = list(c(0.04, 0.004), c(0.05, 0.004))`. #' #' @param signal.integral [vector] (**required**): #' vector with the limits for the signal integral used for the calculation, #' e.g., `signal.integral = c(1:5)`. Ignored if `object` is an [RLum.Results-class] object. #' The parameter can be provided as `list`, see `source_doserate`. #' #' @param signal.integral.Tx [vector] (*optional*): #' vector with the limits for the signal integral for the Tx curve. I #' f nothing is provided the value from `signal.integral` is used and it is ignored #' if `object` is an [RLum.Results-class] object. #' The parameter can be provided as `list`, see `source_doserate`. #' #' @param background.integral [vector] (**required**): #' vector with the bounds for the background integral. #' Ignored if `object` is an [RLum.Results-class] object. #' The parameter can be provided as `list`, see `source_doserate`. #' #' @param background.integral.Tx [vector] (*optional*): #' vector with the limits for the background integral for the Tx curve. #' If nothing is provided the value from `background.integral` is used. #' Ignored if `object` is an [RLum.Results-class] object. #' The parameter can be provided as `list`, see `source_doserate`. #' #' @param irradiation_times [numeric] (*optional*): if set this vector replaces all irradiation #' times for one aliquot and one cycle (Lx and Tx curves) and recycles it for all others cycles and aliquots. #' Plesae note that if this argument is used, for every(!) single curve #' in the dataset an irradiation time needs to be set. #' #' @param sigmab [numeric] (*with default*): #' option to set a manual value for the overdispersion (for LnTx and TnTx), #' used for the Lx/Tx error calculation. The value should be provided as #' absolute squared count values, cf. [calc_OSLLxTxRatio]. #' The parameter can be provided as `list`, see `source_doserate`. #' #' @param sig0 [numeric] (*with default*): #' allow adding an extra component of error to the final Lx/Tx error value #' (e.g., instrumental errror, see details is [calc_OSLLxTxRatio]). #' The parameter can be provided as `list`, see `source_doserate`. #' #' @param distribution [character] (*with default*): #' type of distribution that is used during Bayesian calculations for #' determining the Central dose and overdispersion values. #' Allowed inputs are `"cauchy"`, `"normal"` and `"log_normal"`. #' #' @param baSAR_model [character] (*optional*): #' option to provide an own modified or new model for the Bayesian calculation #' (see details). If an own model is provided the argument `distribution` is #' ignored and set to `'user_defined'` #' #' @param n.MCMC [integer] (*with default*): #' number of iterations for the Markov chain Monte Carlo (MCMC) simulations #' #' @param fit.method [character] (*with default*): #' equation used for the fitting of the dose-response curve using the function #' [plot_GrowthCurve] and then for the Bayesian modelling. Here supported methods: `EXP`, `EXP+LIN` and `LIN` #' #' @param fit.force_through_origin [logical] (*with default*): #' force fitting through origin #' #' @param fit.includingRepeatedRegPoints [logical] (*with default*): #' includes the recycling point (assumed to be measured during the last cycle) #' #' @param method_control [list] (*optional*): #' named list of control parameters that can be directly #' passed to the Bayesian analysis, e.g., `method_control = list(n.chains = 4)`. #' See details for further information #' #' @param digits [integer] (*with default*): #' round output to the number of given digits #' #' @param distribution_plot [character] (*with default*): sets the final distribution plot that #' shows equivalent doses obtained using the frequentist approach and sets in the central dose #' as comparison obtained using baSAR. Allowed input is `'abanico'` or `'kde'`. If set to `NULL` nothing is plotted. #' #' @param plot [logical] (*with default*): #' enables or disables plot output #' #' @param plot_reduced [logical] (*with default*): #' enables or disables the advanced plot output #' #' @param plot.single [logical] (*with default*): #' enables or disables single plots or plots arranged by `analyse_baSAR` #' #' @param verbose [logical] (*with default*): #' enables or disables verbose mode #' #' @param ... parameters that can be passed to the function [calc_OSLLxTxRatio] #' (almost full support), [readxl::read_excel] (full support), [read_BIN2R] (`n.records`, #' `position`, `duplicated.rm`), see details. #' #' #' @return Function returns results numerically and graphically: #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `$summary` \tab `data.frame` \tab statistical summary, including the central dose \cr #' `$mcmc` \tab `mcmc` \tab [coda::mcmc.list] object including raw output \cr #' `$models` \tab `character` \tab implemented models used in the baSAR-model core \cr #' `$input_object` \tab `data.frame` \tab summarising table (same format as the XLS-file) including, e.g., Lx/Tx values\cr #' `$removed_aliquots` \tab `data.frame` \tab table with removed aliquots (e.g., NaN, or Inf Lx/Tx values). If nothing was removed `NULL` is returned #' } #' #'**slot:** **`@info`** #' #' The original function call #' #' ------------------------\cr #' `[ PLOT OUTPUT ]`\cr #' ------------------------\cr #' #' - (A) Ln/Tn curves with set integration limits, #' - (B) trace plots are returned by the baSAR-model, showing the convergence of the parameters (trace) #' and the resulting kernel density plots. If `plot_reduced = FALSE` for every(!) dose a trace and #' a density plot is returned (this may take a long time), #' - (C) dose plots showing the dose for every aliquot as boxplots and the marked #' HPD in within. If boxes are coloured 'orange' or 'red' the aliquot itself should be checked, #' - (D) the dose response curve resulting from the monitoring of the Bayesian modelling are #' provided along with the Lx/Tx values and the HPD. Note: The amount for curves displayed #' is limited to 1000 (random choice) for performance reasons, #' - (E) the final plot is the De distribution as calculated using the conventional (frequentist) approach #' and the central dose with the HPDs marked within. This figure is only provided for a comparison, #' no further statistical conclusion should be drawn from it. #' #' #' **Please note: If distribution was set to `log_normal` the central dose is given as geometric mean!** #' #' #' @section Function version: 0.1.33 #' #' @author #' Norbert Mercier, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) \cr #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) \cr #' The underlying Bayesian model based on a contribution by Combes et al., 2015. #' #' @seealso [read_BIN2R], [calc_OSLLxTxRatio], [plot_GrowthCurve], #' [readxl::read_excel], [verify_SingleGrainData], #' [rjags::jags.model], [rjags::coda.samples], [boxplot.default] #' #' #' @references #' #' Combes, B., Philippe, A., Lanos, P., Mercier, N., Tribolo, C., Guerin, G., Guibert, P., Lahaye, C., 2015. #' A Bayesian central equivalent dose model for optically stimulated luminescence dating. #' Quaternary Geochronology 28, 62-70. doi:10.1016/j.quageo.2015.04.001 #' #' Mercier, N., Kreutzer, S., Christophe, C., Guerin, G., Guibert, P., Lahaye, C., Lanos, P., Philippe, A., #' Tribolo, C., 2016. Bayesian statistics in luminescence dating: The 'baSAR'-model and its implementation #' in the R package 'Luminescence'. Ancient TL 34, 14-21. #' #' **Further reading** #' #' Gelman, A., Carlin, J.B., Stern, H.S., Dunson, D.B., Vehtari, A., Rubin, D.B., 2013. #' Bayesian Data Analysis, Third Edition. CRC Press. #' #' Murray, A.S., Wintle, A.G., 2000. Luminescence dating of quartz using an improved single-aliquot #' regenerative-dose protocol. Radiation Measurements 32, 57-73. doi:10.1016/S1350-4487(99)00253-X #' #' Plummer, M., 2017. JAGS Version 4.3.0 user manual. https://sourceforge.net/projects/mcmc-jags/files/Manuals/4.x/jags_user_manual.pdf/download #' #' @note #' **If you provide more than one BIN-file**, it is **strongly** recommanded to provide #' a `list` with the same number of elements for the following parameters: #' #' `source_doserate`, `signal.integral`, `signal.integral.Tx`, `background.integral`, #' `background.integral.Tx`, `sigmab`, `sig0`. #' #' Example for two BIN-files: `source_doserate = list(c(0.04, 0.006), c(0.05, 0.006))` #' #' **The function is currently limited to work with standard Risoe BIN-files only!** #' #' @keywords datagen #' #' @examples #' #' ##(1) load package test data set #' data(ExampleData.BINfileData, envir = environment()) #' #' ##(2) selecting relevant curves, and limit dataset #' CWOSL.SAR.Data <- subset( #' CWOSL.SAR.Data, #' subset = POSITION%in%c(1:3) & LTYPE == "OSL") #' #' \dontrun{ #' ##(3) run analysis #' ##please not that the here selected parameters are #' ##choosen for performance, not for reliability #' results <- analyse_baSAR( #' object = CWOSL.SAR.Data, #' source_doserate = c(0.04, 0.001), #' signal.integral = c(1:2), #' background.integral = c(80:100), #' fit.method = "LIN", #' plot = FALSE, #' n.MCMC = 200 #' #' ) #' #' print(results) #' #' #' ##XLS_file template #' ##copy and paste this the code below in the terminal #' ##you can further use the function write.csv() to export the example #' #' XLS_file <- #' structure( #' list( #' BIN_FILE = NA_character_, #' DISC = NA_real_, #' GRAIN = NA_real_), #' .Names = c("BIN_FILE", "DISC", "GRAIN"), #' class = "data.frame", #' row.names = 1L #' ) #' #' } #' #' @md #' @export analyse_baSAR <- function( object, XLS_file = NULL, aliquot_range = NULL, source_doserate = NULL, signal.integral, signal.integral.Tx = NULL, background.integral, background.integral.Tx = NULL, irradiation_times = NULL, sigmab = 0, sig0 = 0.025, distribution = "cauchy", baSAR_model = NULL, n.MCMC = 100000, fit.method = "EXP", fit.force_through_origin = TRUE, fit.includingRepeatedRegPoints = TRUE, method_control = list(), digits = 3L, distribution_plot = "kde", plot = TRUE, plot_reduced = TRUE, plot.single = FALSE, verbose = TRUE, ... ){ ##//////////////////////////////////////////////////////////////////////////////////////////////// ##FUNCTION TO BE CALLED to RUN the Bayesian Model ##//////////////////////////////////////////////////////////////////////////////////////////////// ##START .baSAR_function <- function(Nb_aliquots, distribution, data.Dose, data.Lum, data.sLum, fit.method, n.MCMC, fit.force_through_origin, fit.includingRepeatedRegPoints, method_control, baSAR_model, verbose) { ##lower and uppder De, grep from method_control ... for sure we find it here, ##as it was set before the function call lower_centralD <- method_control[["lower_centralD"]] upper_centralD <- method_control[["upper_centralD"]] ##number of MCMC n.chains <- if (is.null(method_control[["n.chains"]])) { 3 } else{ method_control[["n.chains"]] } ##inits inits <- if (is.null(method_control[["inits"]])) { NULL } else{ method_control[["inits"]] } ##thin thin <- if (is.null(method_control[["thin"]])) { if(n.MCMC >= 1e+05){ thin <- n.MCMC/1e+05 * 250 }else{ thin <- 10 } } else{ method_control[["thin"]] } ##variable.names variable.names <- if (is.null(method_control[["variable.names"]])) { c('central_D', 'sigma_D', 'D', 'Q', 'a', 'b', 'c', 'g') } else{ method_control[["variable.names"]] } #check whether this makes sense at all, just a direty and quick test stopifnot(lower_centralD >= 0) Limited_cycles <- vector() if (fit.method == "EXP") {ExpoGC <- 1 ; LinGC <- 0 } if (fit.method == "LIN") {ExpoGC <- 0 ; LinGC <- 1 } if (fit.method == "EXP+LIN") {ExpoGC <- 1 ; LinGC <- 1 } if (fit.force_through_origin == TRUE) {GC_Origin <- 1} else {GC_Origin <- 0} ##Include or exclude repeated dose points if (fit.includingRepeatedRegPoints) { for (i in 1:Nb_aliquots) { Limited_cycles[i] <- length(stats::na.exclude(data.Dose[,i])) } }else{ for (i in 1:Nb_aliquots) { temp.logic <- !duplicated(data.Dose[,i], incomparables=c(0)) # logical excluding 0 m <- length(which(!temp.logic)) data.Dose[,i] <- c(data.Dose[,i][temp.logic], rep(NA, m)) data.Lum[,i] <- c(data.Lum[,i][temp.logic], rep(NA, m)) data.sLum[,i] <- c(data.sLum[,i][temp.logic], rep(NA, m)) rm(m) rm(temp.logic) } for (i in 1:Nb_aliquots) { Limited_cycles[i] <- length(data.Dose[, i]) - length(which(is.na(data.Dose[, i]))) } } ##check and correct for distribution name if(!is.null(baSAR_model)){ if(distribution != "user_defined"){ distribution <- "user_defined" warning("[analyse_baSAR()] 'distribution' set to 'user_defined'.", call. = FALSE) } } # Bayesian Models ---------------------------------------------------------------------------- # INFO: > # > sometimes lines apear to be in a wrong logical order, however, this is allowed in the # > model definition since: # > "The data block is not limited to logical relations, but may also include stochastic relations." # > (Plummer, 2017. JAGS Version 4.3.0 user manual, p. 9) baSAR_model <- list( cauchy = "model { central_D ~ dunif(lower_centralD,upper_centralD) precision_D ~ dt(0, pow(0.16*central_D, -2), 1)T(0, ) sigma_D <- 1/sqrt(precision_D) for (i in 1:Nb_aliquots) { a[i] ~ dnorm(6.5 , 1/(9.2^2) ) T(0, ) b[i] ~ dnorm(50 , 1/(1000^2) ) T(0, ) c[i] ~ dnorm(1.002 , 1/(0.9^2) ) T(0, ) g[i] ~ dnorm(0.5 , 1/(2.5^2) ) I(-a[i], ) sigma_f[i] ~ dexp (20) D[i] ~ dt ( central_D , precision_D, 1) # Cauchy distribution S_y[1,i] <- 1/(sLum[1,i]^2 + sigma_f[i]^2) Lum[1,i] ~ dnorm ( Q[1,i] , S_y[1,i]) Q[1,i] <- GC_Origin * g[i] + LinGC * (c[i] * D[i] ) + ExpoGC * (a[i] * (1 - exp (-D[i] /b[i])) ) for (m in 2:Limited_cycles[i]) { S_y[m,i] <- 1/(sLum[m,i]^2 + sigma_f[i]^2) Lum[m,i] ~ dnorm( Q[m,i] , S_y[m,i] ) Q[m,i] <- GC_Origin * g[i] + LinGC * (c[i] * Dose[m,i]) + ExpoGC * (a[i] * (1 - exp (-Dose[m,i]/b[i])) ) } } }", normal = "model { central_D ~ dunif(lower_centralD,upper_centralD) sigma_D ~ dunif(0.01, 1 * central_D) for (i in 1:Nb_aliquots) { a[i] ~ dnorm(6.5 , 1/(9.2^2) ) T(0, ) b[i] ~ dnorm(50 , 1/(1000^2) ) T(0, ) c[i] ~ dnorm(1.002 , 1/(0.9^2) ) T(0, ) g[i] ~ dnorm(0.5 , 1/(2.5^2) ) I(-a[i], ) sigma_f[i] ~ dexp (20) D[i] ~ dnorm ( central_D , 1/(sigma_D^2) ) # Normal distribution S_y[1,i] <- 1/(sLum[1,i]^2 + sigma_f[i]^2) Lum[1,i] ~ dnorm ( Q[1,i] , S_y[1,i]) Q[1,i] <- GC_Origin * g[i] + LinGC * (c[i] * D[i] ) + ExpoGC * (a[i] * (1 - exp (-D[i] /b[i])) ) for (m in 2:Limited_cycles[i]) { S_y[m,i] <- 1/(sLum[m,i]^2 + sigma_f[i]^2) Lum[m,i] ~ dnorm( Q[m,i] , S_y[m,i] ) Q[m,i] <- GC_Origin * g[i] + LinGC * (c[i] * Dose[m,i]) + ExpoGC * (a[i] * (1 - exp (-Dose[m,i]/b[i])) ) } } }", log_normal = "model { central_D ~ dunif(lower_centralD,upper_centralD) log_central_D <- log(central_D) - 0.5 * l_sigma_D^2 l_sigma_D ~ dunif(0.01, 1 * log(central_D)) sigma_D <- sqrt((exp(l_sigma_D^2) -1) * exp( 2*log_central_D + l_sigma_D^2) ) for (i in 1:Nb_aliquots) { a[i] ~ dnorm(6.5 , 1/(9.2^2) ) T(0, ) b[i] ~ dnorm(50 , 1/(1000^2) ) T(0, ) c[i] ~ dnorm(1.002 , 1/(0.9^2) ) T(0, ) g[i] ~ dnorm(0.5 , 1/(2.5^2) ) I(-a[i], ) sigma_f[i] ~ dexp (20) log_D[i] ~ dnorm ( log_central_D , 1/(l_sigma_D^2) ) # Log-Normal distribution D[i] <- exp(log_D[i]) S_y[1,i] <- 1/(sLum[1,i]^2 + sigma_f[i]^2) Lum[1,i] ~ dnorm ( Q[1,i] , S_y[1,i]) Q[1,i] <- GC_Origin * g[i] + LinGC * (c[i] * D[i] ) + ExpoGC * (a[i] * (1 - exp (-D[i] /b[i])) ) for (m in 2:Limited_cycles[i]) { S_y[m,i] <- 1/(sLum[m,i]^2 + sigma_f[i]^2) Lum[m,i] ~ dnorm( Q[m,i] , S_y[m,i] ) Q[m,i] <- GC_Origin * g[i] + LinGC * (c[i] * Dose[m,i]) + ExpoGC * (a[i] * (1 - exp (-Dose[m,i]/b[i])) ) } } }", user_defined = baSAR_model ) ##check whether the input for distribution was sufficient if(!any(distribution%in%names(baSAR_model))){ stop(paste0("[analyse_baSAR()] No model is pre-defined for the requested distribution. Please select ", paste(rev(names(baSAR_model))[-1], collapse = ", ")), " or define an own model using the argument 'baSAR_model'!", call. = FALSE) }else{ if(is.null(baSAR_model)){ stop("[analyse_baSAR()] You have specified a 'user_defined' distribution, but you have not provided a model via 'baSAR_model'!", call. = FALSE) } } ### Bayesian inputs data_Liste <- list( 'Dose' = data.Dose, 'Lum' = data.Lum, 'sLum' = data.sLum, 'LinGC' = LinGC, 'ExpoGC' = ExpoGC, 'GC_Origin' = GC_Origin, 'Limited_cycles' = Limited_cycles, 'lower_centralD' = lower_centralD, 'upper_centralD' = upper_centralD, 'Nb_aliquots' = Nb_aliquots ) if(verbose){ cat("\n[analyse_baSAR()] ---- baSAR-model ---- \n") cat("\n++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n") cat("[analyse_baSAR()] Bayesian analysis in progress ...\n") message(paste(".. >> bounds set to: lower_centralD =", lower_centralD, "| upper_centralD =", upper_centralD)) } Nb_Iterations <- n.MCMC if (verbose) { message(paste0( ".. >> calculation will be done assuming a '", distribution, "' distribution\n" )) } ##set model jagsfit <- rjags::jags.model( file = textConnection(baSAR_model[[distribution]]), data = data_Liste, inits = inits, n.chains = n.chains, n.adapt = Nb_Iterations, quiet = if(verbose){FALSE}else{TRUE} ) ##update jags model (it is a S3-method) update( object = jagsfit, n.iter = Nb_Iterations, progress.bar = if(verbose){"text"}else{NULL} ) ##get data ... full and reduced, the reduced one to limit the plot output sampling <- rjags::coda.samples( model = jagsfit, variable.names = variable.names, n.iter = Nb_Iterations, thin = thin ) ##this we need for the output of the terminal ##Why sampling reduced? Because the summary() method produces a considerable overhead while ##running over all the variables sampling_reduced <- rjags::coda.samples( model = jagsfit, variable.names = c('central_D', 'sigma_D'), n.iter = Nb_Iterations, thin = thin ) pt_zero <- 0 nb_decal <- 2 pt_zero <- Nb_aliquots ##standard error and mean output.mean <- round(summary(sampling_reduced)[[1]][c("central_D", "sigma_D"), 1:2], digits) ##calculate geometric mean for the case that the distribution is log-normal if(distribution == "log_normal"){ temp.vector <- unlist(lapply(sampling_reduced, function(x){as.vector(x[,1])})) gm <- round(exp(sum(log(temp.vector))/length(temp.vector)),digits) rm(temp.vector) }else{ gm <- NULL } ##quantiles ##68% + 95% output.quantiles <- round(summary(sampling_reduced, quantiles = c(0.025, 0.16, 0.84, 0.975))[[2]][c("central_D", "sigma_D"), 1:4], digits) #### output data.frame with results baSAR.output <- data.frame( DISTRIBUTION = distribution, NB_ALIQUOTS = Nb_aliquots, N.CHAINS = n.chains, N.MCMC = n.MCMC, FIT_METHOD = fit.method, CENTRAL = if(is.null(gm)){output.mean[1,1]}else{gm}, CENTRAL.SD = output.mean[1,2], SIGMA = output.mean[2,1], SIGMA.SD = output.mean[2,2], CENTRAL_Q_.16 = output.quantiles[1,2], CENTRAL_Q_.84 = output.quantiles[1,3], SIGMA_Q_.16 = output.quantiles[2,2], SIGMA_Q_.84 = output.quantiles[2,3], CENTRAL_Q_.025 = output.quantiles[1,1], CENTRAL_Q_.975 = output.quantiles[1,4], SIGMA_Q_.025 = output.quantiles[2,1], SIGMA_Q_.975 = output.quantiles[2,4] ) return( baSAR.output = list( baSAR.output_summary = baSAR.output, baSAR.output_mcmc = sampling, models = list( cauchy = baSAR_model[["cauchy"]], normal = baSAR_model[["normal"]], log_normal = baSAR_model[["log_normal"]], user_defined = baSAR_model[["user_defined"]] ) ) ) } ##END ##//////////////////////////////////////////////////////////////////////////////////////////////// # Integrity tests ----------------------------------------------------------------------------- ##check whether rjags is available ##code snippet taken from ##http://r-pkgs.had.co.nz/description.html if (!requireNamespace("rjags", quietly = TRUE)) { stop("[analyse_baSAR()] To use this function you have to first install the package 'rjags'.", call. = FALSE) } if (!requireNamespace("coda", quietly = TRUE)) { stop("[analyse_baSAR()] To use this function you have to first install the package 'coda'.", call. = FALSE) } #capture additional piped arguments additional_arguments <- list( ##verify_SingleGrainData threshold = 30, ##calc_OSLLxTxRatio() background.count.distribution = "non-poisson", ##readxl::read_excel() sheet = 1, col_names = TRUE, col_types = NULL, skip = 0, ##read_BIN2R() n.records = NULL, duplicated.rm = TRUE, position = NULL, pattern = NULL, ##plot_GrowthCurve() fit.weights = TRUE, fit.bounds = TRUE, NumberIterations.MC = 100, output.plot = if(plot){TRUE}else{FALSE}, output.plotExtended = if(plot){TRUE}else{FALSE} ) #modify this list on purpose additional_arguments <- modifyList(x = additional_arguments, val = list(...)) ##set function arguments function_arguments <- NULL ##SET fit.method if (fit.method != "EXP" & fit.method != "EXP+LIN" & fit.method != "LIN"){ stop("[analyse_baSAR()] Unsupported fitting method. Supported: 'EXP', 'EXP+LIN' and 'LIN'", call. = FALSE) } # Set input ----------------------------------------------------------------------------------- ##if the input is alreayd of type RLum.Results, use the input and do not run ##all pre-calculations again if(is(object, "RLum.Results")){ if(object@originator == "analyse_baSAR"){ ##We want to use previous function arguments and recycle them ##(1) get information you need as input from the RLum.Results object function_arguments <- as.list(object@info$call) ##(2) overwrite by current provided arguments ##by using a new argument we have the choise which argument is allowed for ##changes function_arguments.new <- modifyList(x = function_arguments, val = as.list(match.call())) ##get maximum cycles max_cycles <- max(object$input_object[["CYCLES_NB"]]) ##set Nb_aliquots Nb_aliquots <- nrow(object$input_object) ##return NULL if not a minium of three aliquots are used for the calculation if(Nb_aliquots < 2){ try(stop("[analyse_baSAR()] number of aliquots < 3, this makes no sense, NULL returned!", call. = FALSE)) return(NULL) } ##set variables ##Why is.null() ... it prevents that in case of a function crash is nothing is provided ... ##set changeable function arguments ##distribution if(!is.null(function_arguments.new$distribution)){ distribution <- function_arguments.new$distribution } ##n.MCMC if(!is.null(function_arguments.new$n.MCMC)){ n.MCMC <- function_arguments.new$n.MCMC } ##fit.method if(!is.null(function_arguments.new$fit.method)){ fit.method <- function_arguments.new$fit.method } ## fit.force_through_origin if(!is.null(function_arguments.new$fit.force_through_origin)){ fit.force_through_origin <- function_arguments.new$fit.force_through_origin } ##fit.includingRepeatedRegPoints if(!is.null(function_arguments.new$fit.includingRepeatedRegPoints)){ fit.includingRepeatedRegPoints <- function_arguments.new$fit.includingRepeatedRegPoints } ##source_doserate if(length(as.list(match.call())$source_doserate) > 0){ warning("[analyse_baSAR()] Argument 'source_doserate' is ignored in this modus, as it was alreay set.", call. = FALSE) } ##aliquot_range if(!is.null(function_arguments.new$aliquot_range)){ aliquot_range <- eval(function_arguments.new$aliquot_range) } ##method_control if(!is.null(function_arguments.new$method_control)){ method_control <- eval(function_arguments.new$method_control) } ##baSAR_model if(!is.null(function_arguments.new$baSAR_model)){ baSAR_model <- eval(function_arguments.new$baSAR_model) } ##plot if(!is.null(function_arguments.new$plot)){ plot <- function_arguments.new$plot } ##verbose if(!is.null(function_arguments.new$verbose)){ verbose <- function_arguments.new$verbose } ##limit according to aliquot_range ##TODO Take care of the case that this was provided, otherwise more and more is removed! if (!is.null(aliquot_range)) { if (max(aliquot_range) <= nrow(object$input_object)) { input_object <- object$input_object[aliquot_range, ] ##update list of removed aliquots removed_aliquots <-rbind(object$removed_aliquots, object$input_object[-aliquot_range,]) ##correct Nb_aliquots Nb_aliquots <- nrow(input_object) } else{ try(stop("[analyse_basAR()] aliquot_range out of bounds! Input ignored!", call. = FALSE)) ##reset aliquot range aliquot_range <- NULL ##take entire object input_object <- object$input_object ##set removed aliquots removed_aliquots <- object$removed_aliquots } } else{ ##set the normal case input_object <- object$input_object ##set removed aliquots removed_aliquots <- object$removed_aliquots } ##set non function arguments Doses <- t(input_object[,9:(8 + max_cycles)]) LxTx <- t(input_object[,(9 + max_cycles):(8 + 2 * max_cycles)]) LxTx.error <- t(input_object[,(9 + 2 * max_cycles):(8 + 3 * max_cycles)]) rm(max_cycles) }else{ stop("[analyse_baSAR()] 'object' is of type 'RLum.Results', but has not been produced by analyse_baSAR()!", call. = FALSE) } }else{ if(verbose){ cat("\n[analyse_baSAR()] ---- PRE-PROCESSING ----\n") } ##Supported input types are: ## (1) BIN-file ## .. list ## .. character ## (2) RisoeBINfileData object ## .. list ## .. S4 ## (3) RLum.Analyis objects ## .. list ## .. S4 ##In case an RLum.Analysis object is provided we try an ugly conversion only if(class(object) == "list" && all(vapply(object, function(x){class(x) == "RLum.Analysis"}, logical(1)))){ if(verbose) cat("[analyse_baSAR()] List of RLum.Analysis-objects detected .. ") ##stop for only one element if(length(object) < 2) stop("[analyse_baSAR()] At least two aliquots are needed for the calculation!", call. = FALSE) ##set number of objects if(class(object) == "list"){ n_objects <- length(object) }else{ n_objects <- 1 } ##extract wanted curves if(verbose) cat("\n\t\t .. extract 'OSL (UVVIS)' and 'irradiation (NA)'") object <- get_RLum(object, recordType = c("OSL (UVVIS)", "irradiation (NA)"), drop = FALSE) ##extract irradiation times if(is.null(irradiation_times)){ if(verbose) cat("\n\t\t .. extract irradiation times") irradiation_times <- extract_IrradiationTimes(object[[1]])$irr.times$IRR_TIME } ##run conversion if(verbose) cat("\n\t\t .. run conversion") object <- try(convert_RLum2Risoe.BINfileData(object), silent = TRUE) ##create fallback if(class(object) == "try-error"){ stop("[analyse_baSAR()] Object conversion failed. Return NULL!", call. = FALSE) return(NULL) } ##assign irradiation times if(is.null(irradiation_times)){ if(verbose) cat("\n\t\t .. set irradiation times") object@METADATA[["IRR_TIME"]] <- rep(irradiation_times,n_objects) } ##remove none-OSL curves if(verbose && !all("OSL" %in% object@METADATA[["LTYPE"]])){ cat("\n\t\t .. remove non-OSL curves") rm_id <- which(object@METADATA[["LTYPE"]] != "OSL") object@METADATA <- object@METADATA[-rm_id,] object@DATA[rm_id] <- NULL ##reset index object@METADATA[["ID"]] <- 1:length(object@METADATA[["ID"]]) ##delete objects rm(rm_id) } } if (is(object, "Risoe.BINfileData")) { fileBIN.list <- list(object) } else if (is(object, "list")) { ##check what the list containes ... object_type <- unique(unlist(lapply( 1:length(object), FUN = function(x) { is(object[[x]])[1] } ))) if (length(object_type) == 1) { if (object_type == "Risoe.BINfileData") { fileBIN.list <- object } else if (object_type == "character") { fileBIN.list <- read_BIN2R( file = object, position = additional_arguments$position, duplicated.rm = additional_arguments$duplicated.rm, n.records = additional_arguments$n.records, pattern = additional_arguments$pattern, verbose = verbose ) } else{ stop( "[analyse_baSAR()] data type in the input list provided for 'object' is not supported!", call. = FALSE ) } } else{ stop("[analyse_baSAR()] 'object' only accepts a list with objects of similar type!", call. = FALSE) } } else if (is(object, "character")) { fileBIN.list <- list( read_BIN2R( file = object, position = additional_arguments$position, duplicated.rm = additional_arguments$duplicated.rm, n.records = additional_arguments$n.records, verbose = verbose ) ) } else{ stop( paste0( "[analyse_baSAR()] '", is(object)[1], "' as input is not supported. Check manual for allowed input objects." ), call. = FALSE ) } ##Problem ... the user might have made a pre-selection in the Analyst software, if this the ##we respect this selection if(!all(unlist(lapply(fileBIN.list, FUN = function(x){(x@METADATA[["SEL"]])})))){ fileBIN.list <- lapply(fileBIN.list, function(x){ ##reduce data x@DATA <- x@DATA[x@METADATA[["SEL"]]] x@METADATA <- x@METADATA[x@METADATA[["SEL"]], ] ##reset index x@METADATA[["ID"]] <- 1:nrow(x@METADATA) return(x) }) if(verbose){ cat("\n[analyse_baSAR()] Record pre-selection in BIN-file detected >> record reduced to selection") } } # Declare variables --------------------------------------------------------------------------- Dose <- list() LxTx <- list() sLxTx <- list() Disc <- list() Grain <- list() Disc_Grain.list <- list() Nb_aliquots <- 0 previous.Nb_aliquots <- 0 object.file_name <- list() Mono_grain <- TRUE Limited_cycles <- vector() ##set information for (i in 1 : length(fileBIN.list)) { Disc[[i]] <- list() Grain[[i]] <- list() ##get BIN-file name object.file_name[[i]] <- unique(fileBIN.list[[i]]@METADATA[["FNAME"]]) } ##check for duplicated entries; remove them as they would cause a function crash if(any(duplicated(unlist(object.file_name)))){ ##provide messages if(verbose){ message(paste0( "[analyse_baSAR()] '", paste( object.file_name[which(duplicated(unlist(object.file_name)))], collapse = ", ", "' is a duplicate and therefore removed from the input!" ) )) } warning(paste0( "[analyse_baSAR()] '", paste( object.file_name[which(duplicated(unlist(object.file_name)))], collapse = ", ", "' is a duplicate and therefore removed from the input!" ) )) ##remove entry Disc[which(duplicated(unlist(object.file_name)))] <- NULL Grain[which(duplicated(unlist(object.file_name)))] <- NULL fileBIN.list[which(duplicated(unlist(object.file_name)))] <- NULL object.file_name[which(duplicated(unlist(object.file_name)))] <- NULL } # Expand parameter list ----------------------------------------------------------------------- ##test_parameter = source_doserate if(!is.null(source_doserate)){ if(is(source_doserate, "list")){ source_doserate <- rep(source_doserate, length = length(fileBIN.list)) }else{ source_doserate <- rep(list(source_doserate), length = length(fileBIN.list)) } }else{ stop("[analyse_baSAR()] 'source_doserate' is missing, but required as the current implementation expects dose values in Gy!", call. = FALSE) } ##sigmab if(is(sigmab, "list")){ sigmab <- rep(sigmab, length = length(fileBIN.list)) }else{ sigmab <- rep(list(sigmab), length = length(fileBIN.list)) } ##sig0 if(is(sig0, "list")){ sig0 <- rep(sig0, length = length(fileBIN.list)) }else{ sig0 <- rep(list(sig0), length = length(fileBIN.list)) } ##test_parameter = signal.integral if(is(signal.integral, "list")){ signal.integral <- rep(signal.integral, length = length(fileBIN.list)) }else{ signal.integral <- rep(list(signal.integral), length = length(fileBIN.list)) } ##test_parameter = signal.integral.Tx if (!is.null(signal.integral.Tx)) { if (is(signal.integral.Tx, "list")) { signal.integral.Tx <- rep(signal.integral.Tx, length = length(fileBIN.list)) } else{ signal.integral.Tx <- rep(list(signal.integral.Tx), length = length(fileBIN.list)) } } ##test_parameter = background.integral if(is(background.integral, "list")){ background.integral <- rep(background.integral, length = length(fileBIN.list)) }else{ background.integral <- rep(list(background.integral), length = length(fileBIN.list)) } ##test_parameter = background.integral if(is(background.integral, "list")){ background.integral <- rep(background.integral, length = length(fileBIN.list)) }else{ background.integral <- rep(list(background.integral), length = length(fileBIN.list)) } ##test_parameter = background.integral.Tx if (!is.null(background.integral.Tx)) { if (is(background.integral.Tx, "list")) { background.integral.Tx <- rep(background.integral.Tx, length = length(fileBIN.list)) } else{ background.integral.Tx <- rep(list(background.integral.Tx), length = length(fileBIN.list)) } } # Read EXCEL sheet ---------------------------------------------------------------------------- if(is.null(XLS_file)){ ##select aliquots giving light only, this function accepts also a list as input if(verbose){ cat("\n[analyse_baSAR()] No XLS-file provided, running automatic grain selection ...") } for (k in 1:length(fileBIN.list)) { ##if the uses provides only multiple grain data (GRAIN == 0), the verification ##here makes not really sense and should be skipped if(length(unique(fileBIN.list[[k]]@METADATA[["GRAIN"]])) > 1){ aliquot_selection <- verify_SingleGrainData( object = fileBIN.list[[k]], cleanup_level = "aliquot", threshold = additional_arguments$threshold, cleanup = FALSE ) ##remove grain position 0 (this are usually TL measurements on the cup or we are talking about multipe aliquot) if (sum(aliquot_selection$unique_pairs[["GRAIN"]] == 0, na.rm = TRUE) > 0) { warning( paste( "[analyse_baSAR()] Automatic grain selection:", sum(aliquot_selection$unique_pairs[["GRAIN"]] == 0, na.rm = TRUE), "curve(s) with grain index 0 had been removed from the dataset." ), call. = FALSE ) } datalu <- aliquot_selection$unique_pairs[!aliquot_selection$unique_pairs[["GRAIN"]] == 0,] if(nrow(datalu) == 0){ try(stop("[analyse_baSAR()] Sorry, nothing was left after the automatic grain selection! NULL returned!", call. = FALSE)) return(NULL) } }else{ warning("[analyse_baSAR()] Only multiple grain data provided, automatic selection skipped!", call. = FALSE) datalu <- unique(fileBIN.list[[k]]@METADATA[, c("POSITION", "GRAIN")]) ##set mono grain to FALSE Mono_grain <- FALSE aliquot_selection <- NA } ##get number of aliquots (one aliquot has a position and a grain number) Nb_aliquots <- nrow(datalu) ##write information in variables Disc[[k]] <- datalu[["POSITION"]] Grain[[k]] <- datalu[["GRAIN"]] ##free memory rm(datalu, aliquot_selection) } rm(k) } else if (is(XLS_file, "data.frame") || is(XLS_file, "character")) { ##load file if we have an XLS file if (is(XLS_file, "character")) { ##test for valid file if(!file.exists(XLS_file)){ stop("[analyse_baSAR()] XLS_file does not exist!", call. = FALSE) } ##import Excel sheet datalu <- as.data.frame(readxl::read_excel( path = XLS_file, sheet = additional_arguments$sheet, col_names = additional_arguments$col_names, col_types = additional_arguments$col_types, skip = additional_arguments$skip ), stringsAsFactors = FALSE) ###check whether data format is somehow odd, check only the first three columns if(!all(grepl(colnames(datalu), pattern = " ")[1:3])){ stop("[analyse_baSAR()] One of the first three columns in your XLS_file has no column header. Your XLS_file requires at least three columns for 'BIN_file', 'DISC' and 'GRAIN'", call. = FALSE) } ##get rid of empty rows if the BIN_FILE name column is empty datalu <- datalu[!is.na(datalu[[1]]), ] } else{ datalu <- XLS_file ##check number of number of columns in data.frame if(ncol(datalu) < 3){ stop("[analyse_baSAR()] The data.frame provided via XLS_file should consist of at least three columns (see manual)!", call. = FALSE) } ##problem: the first column should be of type character, the others are ##of type numeric, unfortunately it is too risky to rely on the user, we do the ##proper conversion by ourself ... datalu[[1]] <- as.character(datalu[[1]]) datalu[[2]] <- as.numeric(datalu[[2]]) datalu[[3]] <- as.numeric(datalu[[3]]) } ##limit aliquot range if (!is.null(aliquot_range)) { datalu <- datalu[aliquot_range,] } Nb_ali <- 0 k <- NULL for (nn in 1:length((datalu[, 1]))) { if (!is.na(datalu[nn, 1])) { ##check wether one file fits if (any(grepl( pattern = strsplit( x = basename(datalu[nn, 1]), split = ".", fixed = TRUE )[[1]][1], x = unlist(object.file_name) ))) { k <- grep(pattern = strsplit( x = basename(datalu[nn, 1]), split = ".", fixed = TRUE )[[1]][1], x = unlist(object.file_name)) nj <- length(Disc[[k]]) + 1 Disc[[k]][nj] <- as.numeric(datalu[nn, 2]) Grain[[k]][nj] <- as.numeric(datalu[nn, 3]) Nb_ali <- Nb_ali + 1 if (is.na(Grain[[k]][nj]) || Grain[[k]][nj] == 0) { Mono_grain <- FALSE } }else{ warning( paste0("[analyse_baSAR] '", (datalu[nn, 1]), "' not recognized or not loaded; skipped!"), call. = FALSE ) } } else{ if (Nb_ali == 0) { stop("[analyse_baSAR()] Nb. discs/grains = 0 !", call. = FALSE) } break() } } ##if k is NULL it means it was not set so far, so there was ##no corresponding BIN-file found if(is.null(k)){ stop("[analyse_baSAR()] BIN-file names in XLS-file do not fit to the loaded BIN-files!", call. = FALSE) } } else{ stop("[analyse_baSAR()] input type for 'XLS_file' not supported!", call. = FALSE) } ###################################### loops on files_number for (k in 1:length(fileBIN.list)) { Disc_Grain.list[[k]] <- list() # data.file number n_aliquots_k <- length((Disc[[k]])) if(n_aliquots_k == 0){ fileBIN.list[[k]] <- NULL if(verbose){ message(paste("[analyse_baSAR()] No data has been seletecd from BIN-file", k, ">> BIN-file removed from input!")) } warning(paste("[analyse_baSAR()] No data has been seletecd from BIN-file", k, ">> BIN-file removed from input!"), call. = FALSE) next() } for (d in 1:n_aliquots_k) { dd <- as.integer(unlist(Disc[[k]][d])) Disc_Grain.list[[k]][[dd]] <- list() # data.file number , disc_number } for (d in 1:n_aliquots_k) { dd <- as.integer(unlist(Disc[[k]][d])) if (Mono_grain == FALSE) { gg <- 1 } if (Mono_grain == TRUE) { gg <- as.integer(unlist(Grain[[k]][d]))} Disc_Grain.list[[k]][[dd]][[gg]] <- list() # data.file number , disc_number, grain_number for (z in 1:6) { Disc_Grain.list[[k]][[dd]][[gg]][[z]] <- list() # 1 = index numbers, 2 = irradiation doses, 3 = LxTx , 4 = sLxTx, 5 = N d'aliquot, 6 = De +- D0 +- (4 values) } } } if(verbose){ cat("\n[analyse_baSAR()] Preliminary analysis in progress ... ") cat("\n[analyse_baSAR()] Hang on, this may take a while ... \n") } for (k in 1:length(fileBIN.list)) { n_index.vector <- vector("numeric") measured_discs.vector <- vector("numeric") measured_grains.vector <- vector("numeric") measured_grains.vector_list <- vector("numeric") irrad_time.vector <- vector("numeric") disc_pos <- vector("numeric") grain_pos <- vector("numeric") ### METADATA length_BIN <- length(fileBIN.list[[k]]) n_index.vector <- fileBIN.list[[k]]@METADATA[["ID"]][1:length_BIN] # curves indexes vector measured_discs.vector <- fileBIN.list[[k]]@METADATA[["POSITION"]][1:length_BIN] # measured discs vector measured_grains.vector <- fileBIN.list[[k]]@METADATA[["GRAIN"]][1:length_BIN] # measured grains vector if(is.null(irradiation_times)){ irrad_time.vector <- fileBIN.list[[k]]@METADATA[["IRR_TIME"]][1:length_BIN] # irradiation durations vector }else{ irrad_time.vector <- rep(irradiation_times,n_objects) } ##if all irradiation times are 0 we should stop here if (length(unique(irrad_time.vector)) == 1) { try(stop( "[analyse_baSAR()] It appears the the irradiation times are all the same. Analysis stopped and NULL returned!", call. = FALSE )) return(NULL) } disc_pos <- as.integer(unlist(Disc[[k]])) grain_pos <- as.integer(unlist(Grain[[k]])) ### Automatic Filling - Disc_Grain.list for (i in 1: length(Disc[[k]])) { disc_selected <- as.integer(Disc[[k]][i]) if (Mono_grain == TRUE) {grain_selected <- as.integer(Grain[[k]][i])} else { grain_selected <-0} ##hard break if the disc number or grain number does not fit ##disc (position) disc_logic <- (disc_selected == measured_discs.vector) if (!any(disc_logic)) { try(stop( paste0( "[analyse_baSAR()] In BIN-file '", unique(fileBIN.list[[k]]@METADATA[["FNAME"]]), "' position number ", disc_selected, " does not exist! NULL returned!" ), call. = FALSE )) return(NULL) } ##grain grain_logic <- (grain_selected == measured_grains.vector) if (!any(grain_logic)) { try(stop( paste0( "[analyse_baSAR()] In BIN-file '", unique(fileBIN.list[[k]]@METADATA[["FNAME"]]), "' grain number ", grain_selected, " does not exist! NULL returned!" ), call. = FALSE )) return(NULL) } ##if the test passed, compile index list index_liste <- n_index.vector[disc_logic & grain_logic] if (Mono_grain == FALSE) {grain_selected <-1} for (kn in 1: length(index_liste)) { t <- index_liste[kn] ##check if the source_doserate is NULL or not if(!is.null(unlist(source_doserate))){ dose.value <- irrad_time.vector[t] * unlist(source_doserate[[k]][1]) }else{ dose.value <- irrad_time.vector[t] } s <- 1 + length( Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[1]] ) Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[1]][s] <- n_index.vector[t] # indexes if ( s%%2 == 1) { Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[2]][as.integer(1+s/2)] <- dose.value } # irradiation doses } } } ###################### Data associated with a single Disc/Grain max_cycles <- 0 count <- 1 calc_OSLLxTxRatio_warning <- list() for (k in 1:length(fileBIN.list)) { if (Mono_grain == TRUE) (max.grains <- 100) else (max.grains <- 1) ##plot Ln and Tn curves if wanted ##we want to plot the Ln and Tn curves to get a better feeling ##The approach here is rather rough coded, but it works if (plot) { curve_index <- vapply((1:length(Disc[[k]])), function(i) { disc_selected <- as.integer(Disc[[k]][i]) if (Mono_grain == TRUE) { grain_selected <- as.integer(Grain[[k]][i]) } else { grain_selected <- 1 } Ln_index <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[1]][1]) Tn_index <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[1]][2]) return(c(Ln_index, Tn_index)) }, FUN.VALUE = vector(mode = "numeric", length = 2)) ##set matrix for Ln values Ln_matrix <- cbind(1:length(fileBIN.list[[k]]@DATA[[curve_index[1, 1]]]), matrix(unlist(fileBIN.list[[k]]@DATA[curve_index[1, ]]), ncol = ncol(curve_index))) Tn_matrix <- cbind(1:length(fileBIN.list[[k]]@DATA[[curve_index[2, 1]]]), matrix(unlist(fileBIN.list[[k]]@DATA[curve_index[2, ]]), ncol = ncol(curve_index))) ##open plot are if(!plot.single){ par.default <- par()$mfrow par(mfrow = c(1, 2)) } ##get natural curve and combine them in matrix graphics::matplot( x = Ln_matrix[, 1], y = Ln_matrix[, -1], col = rgb(0, 0, 0, 0.3), ylab = "Luminescence [a.u.]", xlab = "Channel", main = expression(paste(L[n], " - curves")), type = "l" ) ##add integration limits abline(v = range(signal.integral[[k]]), lty = 2, col = "green") abline(v = range(background.integral[[k]]), lty = 2, col = "red") mtext(paste0("ALQ: ",count, ":", count + ncol(curve_index))) graphics::matplot( x = Tn_matrix[, 1], y = Tn_matrix[, -1], col = rgb(0, 0, 0, 0.3), ylab = "Luminescence [a.u.]", xlab = "Channel", main = expression(paste(T[n], " - curves")), type = "l" ) ##add integration limits depending on the choosen value if(is.null(signal.integral.Tx[[k]])){ abline(v = range(signal.integral[[k]]), lty = 2, col = "green") }else{ abline(v = range(signal.integral.Tx[[k]]), lty = 2, col = "green") } if(is.null(background.integral.Tx[[k]])){ abline(v = range(background.integral[[k]]), lty = 2, col = "red") }else{ abline(v = range(background.integral.Tx[[k]]), lty = 2, col = "red") } mtext(paste0("ALQ: ",count, ":", count + ncol(curve_index))) ##reset par if(!plot.single){ par(mfrow = par.default) } ##remove some variables rm(curve_index, Ln_matrix, Tn_matrix) } for (i in 1:length(Disc[[k]])) { disc_selected <- as.integer(Disc[[k]][i]) if (Mono_grain == TRUE) { grain_selected <- as.integer(Grain[[k]][i]) } else { grain_selected <- 1 } # Data for the selected Disc-Grain for (nb_index in 1:((length(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[1]]))/2 )) { index1 <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[1]][2*nb_index-1]) index2 <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[1]][2*nb_index]) Lx.data <- data.frame(seq(1:length( fileBIN.list[[k]]@DATA[[index1]])), fileBIN.list[[k]]@DATA[[index1]]) Tx.data <- data.frame(seq(1:length( fileBIN.list[[k]]@DATA[[index2]])), fileBIN.list[[k]]@DATA[[index2]]) ## call calc_OSLLxTxRatio() ## we run this function with a warnings catcher to reduce the load of warnings for the user temp_LxTx <- withCallingHandlers( calc_OSLLxTxRatio( Lx.data = Lx.data, Tx.data = Tx.data, signal.integral = signal.integral[[k]], signal.integral.Tx = signal.integral.Tx[[k]], background.integral = background.integral[[k]], background.integral.Tx = background.integral.Tx[[k]], background.count.distribution = additional_arguments$background.count.distribution, sigmab = sigmab[[k]], sig0 = sig0[[k]] ), warning = function(c) { calc_OSLLxTxRatio_warning[[i]] <<- c invokeRestart("muffleWarning") } ) ##get LxTx table LxTx.table <- temp_LxTx$LxTx.table Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[3]][nb_index] <- LxTx.table[[9]] Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[4]][nb_index] <- LxTx.table[[10]] Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[5]][nb_index] <- LxTx.table[[7]] ##free memory rm(LxTx.table) rm(temp_LxTx) } # Fitting Growth curve and Plot sample_dose <- unlist(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[2]]) sample_LxTx <- unlist(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[3]]) sample_sLxTx <- unlist(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[4]]) TnTx <- unlist(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[5]]) ##create needed data.frame (this way to make sure that rows are doubled if something is missing) selected_sample <- as.data.frame(cbind(sample_dose, sample_LxTx, sample_sLxTx, TnTx)) print(additional_arguments) ##call plot_GrowthCurve() to get De and De value fitcurve <- suppressWarnings(plot_GrowthCurve( sample = selected_sample, na.rm = TRUE, fit.method = fit.method, fit.force_through_origin = fit.force_through_origin, fit.weights = additional_arguments$fit.weights, fit.includingRepeatedRegPoints = fit.includingRepeatedRegPoints, fit.bounds = additional_arguments$fit.bounds, NumberIterations.MC = additional_arguments$NumberIterations.MC, output.plot = additional_arguments$output.plot, output.plotExtended = additional_arguments$output.plotExtended, txtProgressBar = FALSE, verbose = verbose, main = paste0("ALQ: ", count," | POS: ", Disc[[k]][i], " | GRAIN: ", Grain[[k]][i]) )) ##get data.frame with De values if(!is.null(fitcurve)){ fitcurve_De <- get_RLum(fitcurve, data.object = "De") Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]][1] <- fitcurve_De[["De"]] Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]][2] <- fitcurve_De[["De.Error"]] Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]][3] <- fitcurve_De[["D01"]] Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]][4] <- fitcurve_De[["D01.ERROR"]] }else{ ##we have to do this, otherwise the grains will be sorted out Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]][1:4] <- NA } Limited_cycles[previous.Nb_aliquots + i] <- length(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[2]]) if (length(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[2]]) > max_cycles) { max_cycles <- length(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[2]]) } previous.Nb_aliquots <- length(stats::na.exclude(Limited_cycles)) # Total count of aliquots count <- count + 1 } } ## END of loop on BIN files rm(count) ##evaluate warnings from calc_OSLLxTxRatio() if(length(calc_OSLLxTxRatio_warning)>0){ w_table <- table(unlist(calc_OSLLxTxRatio_warning)) w_table_names <- names(w_table) for(w in 1:length(w_table)){ warning(paste(w_table_names[w], "This warning occurred", w_table[w], "times!"), call. = FALSE) } rm(w_table) rm(w_table_names) } rm(calc_OSLLxTxRatio_warning) Nb_aliquots <- previous.Nb_aliquots ##create results matrix OUTPUT_results <- matrix(nrow = Nb_aliquots, ncol = (8 + 3 * max_cycles), byrow = TRUE) ## set column name (this makes it much easier to debug) colnames(OUTPUT_results) <- c( "INDEX_BINfile", "DISC", "GRAIN", "DE", "DE.SD", "D0", "D0.SD", "CYCLES_NB", paste0("DOSE_", 1:max_cycles), paste0("LxTx_", 1:max_cycles), paste0("LxTx_", 1:max_cycles, ".SD") ) comptage <- 0 for (k in 1:length(fileBIN.list)) { for (i in 1:length(Disc[[k]])) { disc_selected <- as.numeric(Disc[[k]][i]) if (Mono_grain == TRUE) { grain_selected <- as.numeric(Grain[[k]][i]) } else { grain_selected <- 1 } comptage <- comptage + 1 OUTPUT_results[comptage, 1] <- k OUTPUT_results[comptage, 2] <- as.numeric(disc_selected) if (Mono_grain == TRUE) { OUTPUT_results[comptage, 3] <- grain_selected } else { OUTPUT_results[comptage, 3] <- 0 } if (length(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]]) != 0) { ##DE OUTPUT_results[comptage, 4] <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]][1]) ##DE.SD OUTPUT_results[comptage, 5] <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]][2]) ##D0 OUTPUT_results[comptage, 6] <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]][3]) ##D0.SD OUTPUT_results[comptage, 7] <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[6]][4]) ##CYCLES_NB OUTPUT_results[comptage, 8] <- length(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[2]]) ##auxillary variable llong <- length(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[2]]) ##Dose OUTPUT_results[comptage, 9:(8 + llong)] <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[2]]) ##LxTx values OUTPUT_results[comptage, (9 + max_cycles):(8 + max_cycles + llong)] <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[3]]) ##LxTx SD values OUTPUT_results[comptage, (9 + 2*max_cycles):(8 + 2*max_cycles + llong)] <- as.numeric(Disc_Grain.list[[k]][[disc_selected]][[grain_selected]][[4]]) } } } ##Clean matrix and remove all unwanted entries ##remove all NA columns, means all NA columns in POSITION and DISC ##this NA values are no calculation artefacts, but coming from the data processing and have ##no further value OUTPUT_results <- OUTPUT_results[!is.na(OUTPUT_results[,2]),] ##clean up NaN values in the LxTx and corresponding error values ##the transposition of the matrix may increase the performance for very large matricies OUTPUT_results_reduced <- t(OUTPUT_results) selection <- vapply(X = 1:ncol(OUTPUT_results_reduced), FUN = function(x){ !any(is.nan(OUTPUT_results_reduced[9:(8+3*max_cycles), x]) | is.infinite(OUTPUT_results_reduced[9:(8+3*max_cycles), x])) }, FUN.VALUE = vector(mode = "logical", length = 1)) removed_aliquots <- t(OUTPUT_results_reduced[,!selection]) OUTPUT_results_reduced <- t(OUTPUT_results_reduced[,selection]) ##finally, check for difference in the number of dose points ... they should be the same if(length(unique(OUTPUT_results_reduced[,"CYCLES_NB"])) > 1){ warning("[analyse_baSAR()] The number of dose points differs across your data set. Check your data!", call. = FALSE) } ##correct number of aliquots if necessary if(Nb_aliquots > nrow(OUTPUT_results_reduced)) { Nb_aliquots <- nrow(OUTPUT_results_reduced) warning( paste0( "[analyse_baSAR()] 'Nb_aliquots' corrected due to NaN or Inf values in Lx and/or Tx to ", Nb_aliquots, ". You might want to check 'removed_aliquots' in the function output."), call. = FALSE) } ##Prepare for Bayesian analysis Doses <- t(OUTPUT_results_reduced[,9:(8 + max_cycles)]) LxTx <- t(OUTPUT_results_reduced[, (9 + max_cycles):(8 + 2 * max_cycles)]) LxTx.error <- t(OUTPUT_results_reduced[, (9 + 2 * max_cycles):(8 + 3 * max_cycles)]) ##prepare data frame for output that can used as input input_object <- data.frame( BIN_FILE = unlist(object.file_name)[OUTPUT_results_reduced[[1]]], OUTPUT_results_reduced[, -1], stringsAsFactors = FALSE ) ##prepare data frame for output that shows rejected aliquots if (length(removed_aliquots) > 0) { removed_aliquots <- as.data.frame(removed_aliquots, stringsAsFactors = FALSE) removed_aliquots <- cbind(BIN_FILE = unlist(object.file_name)[removed_aliquots[[1]]], removed_aliquots[, -1]) }else{ removed_aliquots <- NULL } } # Call baSAR-function ------------------------------------------------------------------------- ##check for the central_D bound settings ##Why do we use 0 and 1000: Combes et al., 2015 wrote ## that "We set the bounds for the prior on the central dose D, Dmin = 0 Gy and ## Dmax = 1000 Gy, to cover the likely range of possible values for D. ##check if something is set in method control, if not, set it if (is.null(method_control[["upper_centralD"]])) { method_control <- c(method_control, upper_centralD = 1000) }else{ if(distribution == "normal" | distribution == "cauchy" | distribution == "log_normal"){ warning("[analyse_baSAR()] You have modified the upper central_D boundary, while applying a predefined model. This is possible but not recommended!", call. = FALSE) } } ##we do the same for the lower_centralD, just to have everthing in one place if (is.null(method_control[["lower_centralD"]])) { method_control <- c(method_control, lower_centralD = 0) }else{ if(distribution == "normal" | distribution == "cauchy" | distribution == "log_normal"){ warning("[analyse_baSAR()] You have modified the lower central_D boundary while applying a predefined model. This is possible but not recommended!", call. = FALSE) } } if(min(input_object[["DE"]][input_object[["DE"]] > 0], na.rm = TRUE) < method_control$lower_centralD | max(input_object[["DE"]], na.rm = TRUE) > method_control$upper_centralD){ warning("[analyse_baSAR()] Your set lower_centralD and/or upper_centralD value seem to do not fit to your input data. This may indicate a wronlgy set 'source_doserate'.", call. = FALSE) } ##>> try here is much better, as the user might run a very long preprocessing and do not ##want to fail here results <- try(.baSAR_function( Nb_aliquots = Nb_aliquots, distribution = distribution, data.Dose = Doses, data.Lum = LxTx, data.sLum = LxTx.error, fit.method = fit.method, n.MCMC = n.MCMC, fit.force_through_origin = fit.force_through_origin, fit.includingRepeatedRegPoints = fit.includingRepeatedRegPoints, method_control = method_control, baSAR_model = baSAR_model, verbose = verbose )) ##check whether this became NULL if(!is(results, "try-error")){ ##how do we add the systematic error? ##(1) source_doserate is a list, not a vector, but the user can ##provide many source dose rates and he can provide only a single vector (no error) if(!is.null(unlist(source_doserate)) || !is.null(function_arguments$source_doserate)){ ##if it comes from the previous call, it is, unfortunately not that simple if(!is.null(function_arguments$source_doserate)){ source_doserate <- eval(function_arguments$source_doserate) if(!is(source_doserate, "list")){ source_doserate <- list(source_doserate) } } systematic_error <- unlist(lapply(source_doserate, function(x){ if(length(x) == 2) { x[2] } else{ NULL } })) }else{ systematic_error <- 0 } ##state are warning for very different errors if(mean(systematic_error) != systematic_error[1]){ warning("[analyse_baSAR()] Provided source dose rate errors differ. The mean was taken, but the calculated systematic error might be not valid!", .call = FALSE) } ##add to the final de DE_FINAL.ERROR <- sqrt(results[[1]][["CENTRAL.SD"]]^2 + mean(systematic_error)^2) ##consider the case that we get NA and this might be confusing if(is.na(DE_FINAL.ERROR)){ DE_FINAL.ERROR <- results[[1]][["CENTRAL.SD"]] } ##combine results[[1]] <- cbind(results[[1]], DE_FINAL = results[[1]][["CENTRAL"]], DE_FINAL.ERROR = DE_FINAL.ERROR) }else{ results <- NULL verbose <- FALSE plot <- FALSE } # Terminal output ----------------------------------------------------------------------------- if(verbose){ cat("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n\n") cat("\n[analyse_baSAR()] ---- RESULTS ---- \n") cat("------------------------------------------------------------------\n") cat(paste0("Used distribution:\t\t", results[[1]][["DISTRIBUTION"]],"\n")) if(!is.null(removed_aliquots)){ if(!is.null(aliquot_range)){ cat(paste0("Number of aliquots used:\t", results[[1]][["NB_ALIQUOTS"]],"/", results[[1]][["NB_ALIQUOTS"]] + nrow(removed_aliquots), " (manually removed: " ,length(aliquot_range),")\n")) }else{ cat(paste0("Number of aliquots used:\t", results[[1]][["NB_ALIQUOTS"]],"/", results[[1]][["NB_ALIQUOTS"]] + nrow(removed_aliquots),"\n")) } }else{ cat(paste0("Number of aliquots used:\t", results[[1]][["NB_ALIQUOTS"]],"/", results[[1]][["NB_ALIQUOTS"]],"\n")) } if(!is.null(baSAR_model)){ cat(paste0("Considered fitting method:\t", results[[1]][["FIT_METHOD"]]," (user defined)\n")) }else{ cat(paste0("Considered fitting method:\t", results[[1]][["FIT_METHOD"]],"\n")) } cat(paste0("Number of independent chains:\t", results[[1]][["N.CHAINS"]],"\n")) cat(paste0("Number MCMC iterations/chain:\t", results[[1]][["N.MCMC"]],"\n")) cat("------------------------------------------------------------------\n") if(distribution == "log_normal"){ cat("\t\t\t\tmean*\tsd\tHPD\n") }else{ cat("\t\t\t\tmean\tsd\tHPD\n") } cat(paste0(">> Central dose:\t\t", results[[1]][["CENTRAL"]],"\t", results[[1]][["CENTRAL.SD"]],"\t", "[", results[[1]][["CENTRAL_Q_.16"]]," ; ", results[[1]][["CENTRAL_Q_.84"]], "]**\t")) cat(paste0("\n\t\t\t\t\t\t[", results[[1]][["CENTRAL_Q_.025"]]," ; ", results[[1]][["CENTRAL_Q_.975"]],"]***")) cat(paste0("\n>> sigma_D:\t\t\t", results[[1]][["SIGMA"]],"\t", results[[1]][["SIGMA.SD"]], "\t", "[",results[[1]][["SIGMA_Q_.16"]]," ; ", results[[1]][["SIGMA_Q_.84"]], "]**\t")) cat(paste0("\n\t\t\t\t\t\t[",results[[1]][["SIGMA_Q_.025"]]," ; ", results[[1]][["SIGMA_Q_.975"]], "]***")) cat(paste0("\n>> Final central De:\t\t", results[[1]][["DE_FINAL"]],"\t", round(results[[1]][["DE_FINAL.ERROR"]], digits = digits), "\t", " - \t -")) cat("\n------------------------------------------------------------------\n") cat( paste("(systematic error contribution to final De:", format((1-results[[1]][["CENTRAL.SD"]]/results[[1]][["DE_FINAL.ERROR"]])*100, scientific = TRUE), "%)\n") ) if(distribution == "log_normal"){ cat("* mean of the central dose is the geometric mean\n") } cat("** 68 % level | *** 95 % level\n") } # Plotting ------------------------------------------------------------------------------------ if(plot){ ##get colours from the package Luminescence col <- get("col", pos = .LuminescenceEnv) ##get list of variable names (we need them later) varnames <- coda::varnames(results[[2]]) ##//////////////////////////////////////////////////////////////////////////////////////////// ##TRACE AND DENSITY PLOT ####////////////////////////////////////////////////////////////////////////////////////////// if(plot_reduced){ plot_check <- try(plot(results[[2]][,c("central_D","sigma_D"),drop = FALSE]), silent = TRUE) ##show error if(is(plot_check, "try-error")){ stop("[analyse_baSAR()] Plots for 'central_D' and 'sigma_D' could not be produced. You are probably monitoring the wrong variables!", .call = FALSE) } }else{ try(plot(results[[2]])) } ##//////////////////////////////////////////////////////////////////////////////////////////// ##TRUE DOSE PLOT AND DECISION MAKER ####////////////////////////////////////////////////////////////////////////////////////////// if (!plot.single) { par(mfrow = c(2, 2)) } ##get list with D values ##get list out of it plot_matrix <- as.matrix(results[[2]][,grep(x = varnames, pattern = "D[", fixed = TRUE)]) aliquot_quantiles <- t(matrixStats::colQuantiles(x = plot_matrix, probs = c(0.25,0.75))) ##define boxplot colours ... we have red and orange box.col <- vapply(1:ncol(aliquot_quantiles), function(x){ if(aliquot_quantiles[2,x] < results[[1]][,c("CENTRAL_Q_.025")] | aliquot_quantiles[1,x] > results[[1]][,c("CENTRAL_Q_.975")] ){ col[2] }else if(aliquot_quantiles[2,x] < results[[1]][,c("CENTRAL_Q_.16")] | aliquot_quantiles[1,x] > results[[1]][,c("CENTRAL_Q_.84")]){ "orange" }else{ "white" } }, FUN.VALUE = vector(mode = "character", length = 1)) ##to assure a minium of quality not more then 15 boxes a plotted in each plot i <- 1 while(i < ncol(plot_matrix)){ step <- if((i + 14) > ncol(plot_matrix)){ncol(plot_matrix)}else{i + 14} plot_check <- try(boxplot( x = plot_matrix[,i:step], use.cols = TRUE, horizontal = TRUE, outline = TRUE, col = box.col[i:step], xlab = if(is.null(unlist(source_doserate))){"Dose [s]"}else{"Dose [Gy]"}, ylab = "Aliquot index", yaxt = "n", xlim = c(1,19), main = paste0("Individual Doses | ALQ: ", i,":",step) )) if(!is(plot_check, "try-error")){ if(step == ncol(plot_matrix)){ axis(side = 2, at = 1:15, labels = as.character(c(i:step, rep(" ", length = 15 - length(i:step)))), cex.axis = 0.8 ) }else{ axis(side = 2, at = 1:15, labels = as.character(i:step), cex.axis = 0.8) } ##add HPD with text ##HPD - 68% lines( x = c( results[[1]][, c("CENTRAL_Q_.16")], results[[1]][, c("CENTRAL_Q_.16")], results[[1]][, c("CENTRAL_Q_.84")], results[[1]][, c("CENTRAL_Q_.84")]), y = c(par()$usr[3], 16, 16, par()$usr[3]), lty = 3, col = col[3], lwd = 1.5 ) text( x = results[[1]][, c("CENTRAL")], y = 16, labels = "68 %", pos = 3, col = col[3], cex = 0.9 * par()$cex ) ##HPD - 98 %% lines( x = c( results[[1]][, c("CENTRAL_Q_.025")], results[[1]][, c("CENTRAL_Q_.025")], results[[1]][, c("CENTRAL_Q_.975")], results[[1]][, c("CENTRAL_Q_.975")]), y = c(par()$usr[3], 17.5, 17.5, par()$usr[3]), lty = 3, col = col[2], lwd = 1.5 ) text( x = results[[1]][, c("CENTRAL")], y = 17.5, labels = "95 %", pos = 3, col = col[2], cex = 0.9 * par()$cex) } ##update counter i <- i + 15 } rm(plot_matrix) if(!plot.single){ par(mfrow = c(1,2)) on.exit(par(mfrow = c(1,1), bg = "white", xpd = FALSE)) } ##//////////////////////////////////////////////////////////////////////////////////////////// ##DOSE RESPONSE CURVES AND Lx/Tx VALUES ####////////////////////////////////////////////////////////////////////////////////////////// ##define selection vector selection <- c("a[", "b[", "c[", "g[", "Q[1,") ##get list out of it list_selection <- lapply(X = selection, FUN = function(x){ unlist(results[[2]][,grep(x = varnames, pattern = x, fixed = TRUE)]) }) ##create matrix plot_matrix <- t(do.call(what = "cbind", args = list_selection)) ##free memory rm(list_selection) ##make selection according to the model for the curve plotting if (fit.method == "EXP") {ExpoGC <- 1 ; LinGC <- 0 } if (fit.method == "LIN") {ExpoGC <- 0 ; LinGC <- 1 } if (fit.method == "EXP+LIN") {ExpoGC <- 1 ; LinGC <- 1 } if (fit.force_through_origin) {GC_Origin <- 0} else {GC_Origin <- 1} ##add choise for own provided model if(!is.null(baSAR_model)){ fit.method_plot <- paste(fit.method, "(user defined)") }else{ fit.method_plot <- fit.method } ##open plot area ##for the xlim and ylim we have to identify the proper ranges based on the input xlim <- c(0, max(input_object[,grep(x = colnames(input_object), pattern = "DOSE")], na.rm = TRUE)*1.1) ylim <- c( min(input_object[,grep(x = colnames(input_object), pattern = "LxTx")], na.rm = TRUE), max(input_object[,grep(x = colnames(input_object), pattern = "LxTx")], na.rm = TRUE)*1.1) ##check for position of the legend ... we can do better if(results[[1]][["CENTRAL_Q_.975"]] < max(xlim)/2){ legend_pos <- "topright" }else{ legend_pos <- "topleft" } ##set plot area plot_check <- try(plot( NA, NA, ylim = ylim, xlim = xlim, ylab = expression(paste(L[x] / T[x])), xlab = if(is.null(unlist(source_doserate))){"Dose [s]"}else{"Dose [Gy]"}, main = "baSAR Dose Response Curves" )) if (!is(plot_check, "try-error")) { ##add mtext mtext(side = 3, text = paste("Fit:", fit.method_plot)) ##check whether we have all data we need (might be not the case of the user ##selects own variables) if (ncol(plot_matrix) != 0) { ##plot individual dose response curves x <- NA for (i in seq(1, ncol(plot_matrix), length.out = 1000)) { curve( GC_Origin * plot_matrix[4, i] + LinGC * (plot_matrix[3, i] * x) + ExpoGC * (plot_matrix[1, i] * (1 - exp ( -x / plot_matrix[2, i] ))), add = TRUE, col = rgb(0, 0, 0, .1) ) } }else{ try(stop("[analyse_baSAR()] Wrong 'variable.names' monitored, dose responses curves could not be plotted!", call. = FALSE)) } ##add dose points n.col <- length(input_object[, grep(x = colnames(input_object), pattern = "DOSE")]) ##add rug with natural Lx/Tx rug(side = 2, x = input_object[[9 + n.col]]) ##plot Lx/Tx values .. without errors ... this is enough here for (i in 2:length(input_object[, grep(x = colnames(input_object), pattern = "DOSE")])) { ##add error bars segments( x0 = input_object[[8 + i]], x1 = input_object[[8 + i]], y0 = input_object[[8 + n.col + i]] - input_object[[8 + 2 * n.col + i]], y1 = input_object[[8 + n.col + i]] + input_object[[8 + 2 * n.col + i]], col = "grey" ) ##add points in the top of it points( x = input_object[[8 + i]], y = input_object[[8 + n.col + i]], pch = 21, col = col[11], bg = "grey" ) } ##add ablines abline( v = results[[1]][, c("CENTRAL_Q_.16", "CENTRAL_Q_.84")], lty = 3, col = col[3], lwd = 1.2 ) abline(v = results[[1]][, c("CENTRAL_Q_.025", "CENTRAL_Q_.975")], lty = 2, col = col[2]) ##add legend1 legend( legend_pos, bty = "n", horiz = FALSE, lty = c(3, 2), col = c(col[3], col[2]), legend = c("HPD - 68 %", "HPD - 95 %") ) ##add legend2 legend( "bottomright", bty = "n", horiz = FALSE, pch = 21, col = col[11], bg = "grey", legend = "measured dose points" ) } ##remove object, it might be rather big rm(plot_matrix) ##03 Abanico Plot if(distribution_plot == "abanico"){ plot_check <- plot_AbanicoPlot( data = input_object[, c("DE", "DE.SD")], zlab = if(is.null(unlist(source_doserate))){expression(paste(D[e], " [s]"))}else{expression(paste(D[e], " [Gy]"))}, log.z = if (distribution != "log_normal") { FALSE } else{ TRUE }, z.0 = results[[1]]$CENTRAL, y.axis = FALSE, polygon.col = FALSE, line = results[[1]][,c( "CENTRAL_Q_.16", "CENTRAL_Q_.84", "CENTRAL_Q_.025", "CENTRAL_Q_.975")], line.col = c(col[3], col[3], col[2], col[2]), line.lty = c(3,3,2,2), output = TRUE, mtext = paste0( nrow(input_object) - length(which(is.na(input_object[, c("DE", "DE.SD")]))), "/", nrow(input_object), " plotted (removed are NA values)" ) ) if (!is.null(plot_check)) { legend( "topleft", legend = c("Central dose", "HPD - 68%", "HPD - 95 %"), lty = c(2, 3, 2), col = c("black", col[3], col[2]), bty = "n", cex = par()$cex * 0.8 ) } }else{ plot_check <- NULL } ##In case the Abanico plot will not work because of negative values ##provide a KDE if(is.null(plot_check) && distribution_plot == "kde"){ plot_check <- try(suppressWarnings(plot_KDE( data = input_object[, c("DE", "DE.SD")], xlab = if(is.null(unlist(source_doserate))){expression(paste(D[e], " [s]"))}else{expression(paste(D[e], " [Gy]"))}, mtext = paste0( nrow(input_object) - length(which(is.na(input_object[, c("DE", "DE.SD")]))), "/", nrow(input_object), " (removed are NA values)" ) ))) if(!is(plot_check, "try-error")) { abline(v = results[[1]]$CENTRAL, lty = 2) abline( v = results[[1]][, c("CENTRAL_Q_.16", "CENTRAL_Q_.84")], lty = 3, col = col[3], lwd = 1.2 ) abline(v = results[[1]][, c("CENTRAL_Q_.025", "CENTRAL_Q_.975")], lty = 2, col = col[2]) ##check for position of the legend if(results[[1]][["CENTRAL_Q_.975"]] < max(xlim)/2){ legend_pos <- "right" }else{ legend_pos <- "topleft" } legend( legend_pos, legend = c("Central dose", "HPD - 68%", "HPD - 95 %"), lty = c(2, 3, 2), col = c("black", col[3], col[2]), bty = "n", cex = par()$cex * 0.8 ) } } } # Return -------------------------------------------------------------------------------------- return(set_RLum( class = "RLum.Results", data = list( summary = results[[1]], mcmc = results[[2]], models = results[[3]], input_object = input_object, removed_aliquots = removed_aliquots ), info = list(call = sys.call()) )) } Luminescence/R/calc_OSLLxTxRatio.R0000644000176200001440000004274013571743147016471 0ustar liggesusers#' Calculate Lx/Tx ratio for CW-OSL curves #' #' Calculate Lx/Tx ratios from a given set of CW-OSL curves assuming late light #' background subtraction. #' #' The integrity of the chosen values for the signal and background integral is #' checked by the function; the signal integral limits have to be lower than #' the background integral limits. If a [vector] is given as input instead #' of a [data.frame], an artificial [data.frame] is produced. The #' error calculation is done according to Galbraith (2002). #' #' **Please note:** In cases where the calculation results in `NaN` values (for #' example due to zero-signal, and therefore a division of 0 by 0), these `NaN` #' values are replaced by 0. #' #' **sigmab** #' #' The default value of `sigmab` is calculated assuming the background is #' constant and **would not** applicable when the background varies as, #' e.g., as observed for the early light substraction method. #' #' **sig0** #' #' This argument allows to add an extra component of error to the final Lx/Tx #' error value. The input will be treated as factor that is multiplied with #' the already calculated LxTx and the result is add up by: #' #' \deqn{se(LxTx) = \sqrt(se(LxTx)^2 + (LxTx * sig0)^2)} #' #' #' **background.count.distribution** #' #' This argument allows selecting the distribution assumption that is used for #' the error calculation. According to Galbraith (2002, 2014) the background #' counts may be overdispersed (i.e. do not follow a poisson distribution, #' which is assumed for the photomultiplier counts). In that case (might be the #' normal case) it has to be accounted for the overdispersion by estimating #' \eqn{\sigma^2} (i.e. the overdispersion value). Therefore the relative #' standard error is calculated as: #' #' - `poisson` #' \deqn{rse(\mu_{S}) \approx \sqrt(Y_{0} + Y_{1}/k^2)/Y_{0} - Y_{1}/k} #' - `non-poisson` #' \deqn{rse(\mu_{S}) \approx \sqrt(Y_{0} + Y_{1}/k^2 + \sigma^2(1+1/k))/Y_{0} - Y_{1}/k} #' #' **Please note** that when using the early background subtraction method in #' combination with the 'non-poisson' distribution argument, the corresponding Lx/Tx error #' may considerably increase due to a high sigmab value. #' Please check whether this is valid for your data set and if necessary #' consider to provide an own sigmab value using the corresponding argument `sigmab`. #' #' @param Lx.data [RLum.Data.Curve-class] or [data.frame] (**required**): #' requires a CW-OSL shine down curve (x = time, y = counts) #' #' @param Tx.data [RLum.Data.Curve-class] or [data.frame] (*optional*): #' requires a CW-OSL shine down curve (x = time, y = counts). If no #' input is given the Tx.data will be treated as `NA` and no Lx/Tx ratio #' is calculated. #' #' @param signal.integral [vector] (**required**): #' vector with the limits for the signal integral. #' #' @param signal.integral.Tx [vector] (*optional*): #' vector with the limits for the signal integral for the Tx curve. If nothing is provided the #' value from `signal.integral` is used. #' #' @param background.integral [vector] (**required**): #' vector with the bounds for the background integral. #' #' @param background.integral.Tx [vector] (*optional*): #' vector with the limits for the background integral for the Tx curve. #' If nothing is provided the value from `background.integral` is used. #' #' @param background.count.distribution [character] (*with default*): #' sets the count distribution assumed for the error calculation. #' Possible arguments `poisson` or `non-poisson`. See details for further information #' #' @param use_previousBG [logical] (*with default*): #' If set to `TRUE` the background of the Lx-signal is substracted also #' from the Tx-signal. Please note that in this case separat #' signal integral limits for the Tx signal are not allowed and will be reset. #' #' @param sigmab [numeric] (*optional*): #' option to set a manual value for the overdispersion (for LnTx and TnTx), #' used for the Lx/Tx error calculation. The value should be provided as #' absolute squared count values, e.g. `sigmab = c(300,300)`. #' **Note:** If only one value is provided this value is taken for both (LnTx and TnTx) signals. #' #' @param sig0 [numeric] (*with default*): #' allow adding an extra component of error to the final Lx/Tx error value #' (e.g., instrumental errror, see details). #' #' @param digits [integer] (*with default*): #' round numbers to the specified digits. #' If digits is set to `NULL` nothing is rounded. #' #' @return #' Returns an S4 object of type [RLum.Results-class]. #' #' Slot `data` contains a [list] with the following structure: #' #' **@data** #' ``` #' $LxTx.table (data.frame) #' .. $ LnLx #' .. $ LnLx.BG #' .. $ TnTx #' .. $ TnTx.BG #' .. $ Net_LnLx #' .. $ Net_LnLx.Error #' .. $ Net_TnTx.Error #' .. $ LxTx #' .. $ LxTx.Error #' $ calc.parameters (list) #' .. $ sigmab.LnTx #' .. $ sigmab.TnTx #' .. $ k #' ``` #' #' **@info** #' ``` #' $ call (original function call) #' ``` #' #' @note #' The results of this function have been cross-checked with the Analyst #' (vers. 3.24b). Access to the results object via [get_RLum]. #' #' **Caution:** If you are using early light subtraction (EBG), please either provide your #' own `sigmab` value or use `background.count.distribution = "poisson"`. #' #' @section Function version: 0.7.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Data.Curve-class], [Analyse_SAR.OSLdata], [plot_GrowthCurve], #' [analyse_SAR.CWOSL] #' #' @references Duller, G., 2018. Analyst v4.57 - User Manual. #' [http://users.aber.ac.uk/ggd/]()\cr #' #' Galbraith, R.F., 2002. A note on the variance of a background-corrected OSL #' count. Ancient TL, 20 (2), 49-51. #' #' Galbraith, R.F., 2014. A further note on the variance of a #' background-corrected OSL count. Ancient TL, 31 (2), 1-3. #' #' @keywords datagen #' #' @examples #' #' ##load data #' data(ExampleData.LxTxOSLData, envir = environment()) #' #' ##calculate Lx/Tx ratio #' results <- calc_OSLLxTxRatio(Lx.data, Tx.data, signal.integral = c(1:2), #' background.integral = c(85:100)) #' #' ##get results object #' get_RLum(results) #' #' @md #' @export calc_OSLLxTxRatio <- function( Lx.data, Tx.data = NULL, signal.integral, signal.integral.Tx = NULL, background.integral, background.integral.Tx = NULL, background.count.distribution = "non-poisson", use_previousBG = FALSE, sigmab = NULL, sig0 = 0, digits = NULL ){ ##--------------------------------------------------------------------------## ##(1) - integrity checks if(!is.null(Tx.data)){ ##(a) - check data type if(is(Lx.data)[1]!=is(Tx.data)[1]){ stop("[calc_OSLLxTxRatio()] Data type of Lx and Tx data differs!") } ##(b) - test if data.type is valid in general if(is(Lx.data)[1] == "RLum.Data.Curve"){ Lx.data <- as(Lx.data, "data.frame") Tx.data <- as(Tx.data, "data.frame") }else{ ##go further if((is(Lx.data)[1] != "data.frame" & is(Lx.data)[1] != "numeric") & is(Lx.data)[1] != "matrix"){ stop("[calc_OSLLxTxRatio()] Data type error! Required types are data.frame or numeric vector.") } } ##(c) - convert vector to data.frame if nescessary if(is(Lx.data)[1] != "data.frame" & is(Lx.data)[1] != "matrix"){ Lx.data <- data.frame(x=1:length(Lx.data),y=Lx.data) Tx.data <- data.frame(x=1:length(Tx.data),y=Tx.data) } ##(d) - check if Lx and Tx curves have the same channel length if(length(Lx.data[,2]) != length(Tx.data[,2])){ stop("[calc_OSLLxTxRatio()] Channel numbers of Lx and Tx data differ!")} }else{ Tx.data <- data.frame(x = NA,y = NA) ##support RLum.objects if(is(Lx.data)[1] == "RLum.Data.Curve"){ Lx.data <- as(Lx.data, "data.frame") } ##check for matrix if(is(Lx.data)[1] == "matrix"){ Lx.data <- as.data.frame(Lx.data) } ##no it should be a data.frame, if not, try to produce one if(is(Lx.data)[1]!="data.frame") { Lx.data <- data.frame(x = 1:length(Lx.data),y = Lx.data) } }#endif::missing Tx.data ##(e) - check if signal integral is valid if(min(signal.integral) < 1 | max(signal.integral>length(Lx.data[,2]))){ stop("[calc_OSLLxTxRatio()] signal.integral is not valid!")} ##(f) - check if background integral is valid if(min(background.integral)<1 | max(background.integral>length(Lx.data[,2]))){ stop(paste("[calc_OSLLxTxRatio()] background.integral is not valid! Max: ",length(Lx.data[,2]),sep=""))} ##(g) - check if signal and background integral overlapping if(min(background.integral)<=max(signal.integral)){ stop("[calc_OSLLxTxRatio()] Overlapping of 'signal.integral' and 'background.integral' is not permitted!")} ##(h) - similar procedure for the Tx limits if(all(c(!is.null(signal.integral.Tx),!is.null(background.integral.Tx)))){ if(use_previousBG){ warning("[calc_OSLLxTxRatio()] For option use_previousBG = TRUE independent Lx and Tx integral limits are not allowed. Integral limits of Lx used for Tx.", call. = FALSE) signal.integral.Tx <- signal.integral background.integral.Tx <- background.integral } if(min(signal.integral.Tx) < 1 | max(signal.integral.Tx>length(Tx.data[,2]))){ stop("[calc_OSLLxTxRatio()] signal.integral.Tx is not valid!")} if(min(background.integral.Tx)<1 | max(background.integral.Tx>length(Tx.data[,2]))){ stop(paste("[calc_OSLLxTxRatio()] background.integral.Tx is not valid! Max: ",length(Tx.data[,2]),sep=""))} if(min(background.integral.Tx)<=max(signal.integral.Tx)){ stop("[calc_OSLLxTxRatio()] Overlapping of 'signal.integral.Tx' and 'background.integral.Tx' is not permitted!")} }else if(!all(c(is.null(signal.integral.Tx),is.null(background.integral.Tx)))){ stop("[calc_OSLLxTxRatio()] You have to provide both: signal.integral.Tx and background.integral.Tx!", call. = FALSE) }else{ signal.integral.Tx <- signal.integral background.integral.Tx <- background.integral } ##check sigmab if (!is.null(sigmab)) { if (!is(sigmab, "numeric")) { stop("[calc_OSLLxTxRatio()] 'sigmab' has to be of type numeric.") } if (length(sigmab) > 2) { stop("[calc_OSLLxTxRatio()] Maximum allowed vector length for 'sigmab' is 2.", call. = FALSE) } } ##--------------------------------------------------------------------------## ##(2) - read data and produce background subtracted values ## calculate k value - express the background as mutiple value from the number ## of signal integral channels, however, it can be < 1 also n <- length(signal.integral) m <- length(background.integral) k <- m/n n.Tx <- length(signal.integral.Tx) ##use previous BG and account for the option to set different integral limits if(use_previousBG){ m.Tx <- m }else{ m.Tx <- length(background.integral.Tx) } k.Tx <- m.Tx/n.Tx ##LnLx (comments are corresponding variables to Galbraith, 2002) Lx.curve <- Lx.data[,2] Lx.signal <- sum(Lx.curve[signal.integral]) #Y.0 Lx.background <- sum(Lx.curve[background.integral]) #Y.1 Lx.background <- Lx.background*1/k #mu.B LnLx <- Lx.signal - Lx.background ##TnTx Tx.curve <- ifelse(is.na(Tx.data[,1])==FALSE, Tx.data[,2], NA) Tx.signal <- sum(Tx.curve[signal.integral.Tx]) ##use previous BG if(use_previousBG){ Tx.background <- Lx.background }else{ Tx.background <- sum(Tx.curve[background.integral.Tx])*1/k.Tx } TnTx <- (Tx.signal-Tx.background) ##--------------------------------------------------------------------------## ##(3) ## calculate Lx/Tx Errors according Galbraith (2002) and the personal ## communication of Galbraith (2014) via e-mail ## Nomenclature as stated in the articles ##(a) ## set Y.0 (sum OSL signal including the background) and ## Y.1 (total counts over m later channels) Y.0 <- Lx.signal Y.0_TnTx <- Tx.signal Y.1 <- sum(Lx.curve[background.integral]) Y.1_TnTx <- sum(Tx.curve[background.integral.Tx]) ##(b) estimate overdispersion (here called sigmab), see equation (4) in ## Galbraith (2002), Galbraith (2014) ## If else condition for the case that k < 2 if(round(k,digits = 1) >= 2 & ((min(background.integral) + length(signal.integral)*(2+1)) <= length(Lx.curve))){ ##(b)(1)(1) ## note that m = n*k = multiple of background.integral from signal.integral Y.i <- vapply(0:round(k,digits=0), function(i){ sum(Lx.curve[ (min(background.integral)+length(signal.integral)*i): (min(background.integral)+length(signal.integral)+length(signal.integral)*i)]) }, FUN.VALUE = vector(mode = "numeric", length = 1L)) Y.i <- na.exclude(Y.i) sigmab.LnLx <- abs(var(Y.i) - mean(Y.i)) ##sigmab is denoted as sigma^2 = s.Y^2-Y.mean ##therefore here absolute values are given }else{ ## provide warning if m is < 25, as suggested by Rex Galbraith ## low number of degree of freedom if (m < 25) { warning("[calc_OSLLxTxRatio()] Number of background channels for Lx < 25; error estimation might be not reliable!", call. = FALSE) } sigmab.LnLx <- abs((var(Lx.curve[background.integral]) - mean(Lx.curve[background.integral])) * n) } if (round(k.Tx, digits = 1) >= 2 & (( min(background.integral.Tx) + length(signal.integral.Tx) * (2 + 1) ) <= length(Tx.curve))) { ##(b)(1)(1) ## note that m.Tx = n.Tx*k.Tx = multiple of background.integral.Tx from signal.integral.Tx ## also for the TnTx signal Y.i_TnTx <- vapply(0:round(k.Tx, digits = 0), function(i) { sum(Tx.curve[(min(background.integral.Tx) + length(signal.integral.Tx) * i):( min(background.integral.Tx) + length(signal.integral.Tx) + length(signal.integral.Tx) * i )]) }, FUN.VALUE = vector(mode = "numeric", length = 1L)) Y.i_TnTx <- na.exclude(Y.i_TnTx) sigmab.TnTx <- abs(var(Y.i_TnTx) - mean(Y.i_TnTx)) } else{ ## provide warning if m is < 25, as suggested by Rex Galbraith ## low number of degree of freedom if (m.Tx < 25 && use_previousBG == FALSE) { warning("[calc_OSLLxTxRatio()] Number of background channels for Tx < 25; error estimation might be not reliable!", call. = FALSE) } sigmab.TnTx <- abs((var(Tx.curve[background.integral.Tx]) - mean(Tx.curve[background.integral.Tx])) * n.Tx) } ##account for a manually set sigmab value if (!is.null(sigmab)) { if (length(sigmab) == 2) { sigmab.LnLx <- sigmab[1] sigmab.TnTx <- sigmab[2] }else{ sigmab.LnLx <- sigmab[1] sigmab.TnTx <- sigmab[1] } } ##(c) ## Calculate relative error of the background subtracted signal ## according to Galbratith (2002), equation (6) with changes ## from Galbraith (2014), equation 6 ## Discussion with Rex Galbraith via e-mail (2014-02-27): ## Equation 6 is approriate to be implemented as standard if(background.count.distribution == "poisson"){ ##(c.1) estimate relative standard error for assuming a poisson distribution LnLx.relError <- sqrt((Y.0 + Y.1/k^2))/(Y.0-Y.1/k) ## rse(mu.s) TnTx.relError <- sqrt((Y.0_TnTx + Y.1_TnTx/k^2))/(Y.0_TnTx-Y.1_TnTx/k) }else{ ##(c.2) estimate relative standard error for a non-poisson distribution if(background.count.distribution != "non-poisson"){ warning("Unknown method for background.count.distribution. A non-poisson distribution is assumed!")} LnLx.relError <- sqrt(Y.0 + Y.1/k^2 + sigmab.LnLx*(1+1/k))/ (Y.0 - Y.1/k) TnTx.relError <- sqrt(Y.0_TnTx + Y.1_TnTx/k^2 + sigmab.TnTx*(1+1/k))/ (Y.0_TnTx - Y.1_TnTx/k) } ##(d) ##calculate absolute standard error LnLx.Error <- abs(LnLx*LnLx.relError) TnTx.Error <- abs(TnTx*TnTx.relError) ##we do not want to have NaN values, as they are mathematically correct, but make ##no sense and would result in aliquots that become rejected later if(is.nan(LnLx.Error)) LnLx.Error <- 0 if(is.nan(TnTx.Error)) TnTx.Error <- 0 ##combine results LnLxTnTx <- cbind( Lx.signal, Lx.background, Tx.signal, Tx.background, LnLx, LnLx.Error, TnTx, TnTx.Error ) ##--------------------------------------------------------------------------## ##(4) Calculate LxTx error according Galbraith (2014) #transform results in a data.frame LnLxTnTx <- as.data.frame((LnLxTnTx)) #add col names colnames(LnLxTnTx)<-c("LnLx", "LnLx.BG", "TnTx", "TnTx.BG", "Net_LnLx", "Net_LnLx.Error", "Net_TnTx", "Net_TnTx.Error") ##calculate Ln/Tx LxTx <- LnLxTnTx$Net_LnLx/LnLxTnTx$Net_TnTx ##set NaN if(is.nan(LxTx)) LxTx <- 0 ##calculate Ln/Tx error LxTx.relError <- sqrt(LnLx.relError^2 + TnTx.relError^2) LxTx.Error <- abs(LxTx * LxTx.relError) ##set NaN if(is.nan(LxTx.Error)) LxTx.Error <- 0 ##add an extra component of error LxTx.Error <- sqrt(LxTx.Error^2 + (sig0 * LxTx)^2) ##return combined values temp <- cbind(LnLxTnTx,LxTx,LxTx.Error) ##apply digits if wanted if(!is.null(digits)){ temp[1,] <- round(temp[1,], digits = digits) } calc.parameters <- list(sigmab.LnLx = sigmab.LnLx, sigmab.TnTx = sigmab.TnTx, k = k) ##set results object temp.return <- set_RLum( class = "RLum.Results", data = list( LxTx.table = temp, calc.parameters = calc.parameters), info = list(call = sys.call()) ) invisible(temp.return) } Luminescence/R/template_DRAC.R0000644000176200001440000006600413243267664015637 0ustar liggesusers#' Create a DRAC input data template (v1.2) #' #' This function returns a DRAC input template (v1.2) to be used in conjunction #' with the use_DRAC() function #' #' @param nrow [integer] (*with default*): #' specifies the number of rows of the template (i.e., the number of data #' sets you want to submit). #' #' @param preset [character] (*optional*): #' By default, all values of the template are set to `NA`, which means that #' the user needs to fill in **all** data first before submitting to DRAC #' using `use_DRAC()`. To reduce the number of values that need to be #' provided, `preset` can be used to create a template with at least #' a minimum of reasonable preset values. #' #' `preset` can be one of the following: #' - `quartz_coarse` #' - `quartz_fine` #' - `feldspar_coarse` #' - `polymineral_fine` #' - `DRAC-example_quartz` #' - `DRAC-example_feldspar` #' - `DRAC-example_polymineral` #' #' Note that the last three options can be used to produce a template #' with values directly taken from the official DRAC input .csv file. #' #' @param notification [logical] (*with default*): #' show or hide the notification #' #' @return A list. #' #' @author #' Christoph Burow, University of Cologne (Germany), Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @references #' Durcan, J.A., King, G.E., Duller, G.A.T., 2015. DRAC: Dose Rate and Age Calculator for trapped charge dating. #' Quaternary Geochronology 28, 54-61. doi:10.1016/j.quageo.2015.03.012 #' #' @seealso [as.data.frame], [list] #' #' @examples #' #' # create a new DRAC input input #' input <- template_DRAC(preset = "DRAC-example_quartz") #' #' # show content of the input #' print(input) #' print(input$`Project ID`) #' print(input[[4]]) #' #' #' ## Example: DRAC Quartz example #' # note that you only have to assign new values where they #' # are different to the default values #' input$`Project ID` <- "DRAC-Example" #' input$`Sample ID` <- "Quartz" #' input$`Conversion factors` <- "AdamiecAitken1998" #' input$`External U (ppm)` <- 3.4 #' input$`errExternal U (ppm)` <- 0.51 #' input$`External Th (ppm)` <- 14.47 #' input$`errExternal Th (ppm)` <- 1.69 #' input$`External K (%)` <- 1.2 #' input$`errExternal K (%)` <- 0.14 #' input$`Calculate external Rb from K conc?` <- "N" #' input$`Calculate internal Rb from K conc?` <- "N" #' input$`Scale gammadoserate at shallow depths?` <- "N" #' input$`Grain size min (microns)` <- 90 #' input$`Grain size max (microns)` <- 125 #' input$`Water content ((wet weight - dry weight)/dry weight) %` <- 5 #' input$`errWater content %` <- 2 #' input$`Depth (m)` <- 2.2 #' input$`errDepth (m)` <- 0.22 #' input$`Overburden density (g cm-3)` <- 1.8 #' input$`errOverburden density (g cm-3)` <- 0.1 #' input$`Latitude (decimal degrees)` <- 30.0000 #' input$`Longitude (decimal degrees)` <- 70.0000 #' input$`Altitude (m)` <- 150 #' input$`De (Gy)` <- 20 #' input$`errDe (Gy)` <- 0.2 #' #' # use DRAC #' \dontrun{ #' output <- use_DRAC(input) #' } #' #' @md #' @export template_DRAC <- function( nrow = 1, preset = NULL, notification = TRUE ){ ## TODO: # 1 - allow mineral specific presets; new argument 'mineral' # 2 - add option to return the DRAC example data set if (nrow < 0 | nrow > 33) stop("'nrow' must be a number between 0 and 33.", call. = FALSE) ## PRESETS ---- valid_presets <- c("quartz_coarse", "quartz_fine", "feldspar_coarse", "polymineral_fine", "DRAC-example_quartz", "DRAC-example_feldspar", "DRAC-example_polymineral") if (!is.null(preset)) { if (length(preset) != 1 || !is.character(preset)) stop("\n[template_DRAC()]: Argument 'preset' must be a 'character' of length 1.", call. = FALSE) if (!preset %in% valid_presets) stop("\n[template_DRAC()]: Invalid preset. Please use on of the following: ", paste(valid_presets, collapse = ", "), call. = FALSE) } ## LEGAL NOTICE ---- messages <- list("\n", "\t-------------------- IMPORTANT NOTE ------------------------\n", "\t This function returns a DRAC input template to be used in ", "\t conjunction with the use_DRAC() function. \n", "\t The template was reproduced with great care, but we do not", "\t take any responsibility and we are not liable for any ", "\t mistakes or unforeseen misbehaviour.", "\t Note that this template is only compatible with DRAC", "\t version 1.1. Before using this template make sure that", "\t this is the correct version, otherwise expect unspecified", "\t errors.\n", "\t Please ensure you cite the use of DRAC in your work,", "\t published or otherwise. Please cite the website name and", "\t version (e.g. DRAC v1.1) and the accompanying journal", "\t article:", "\t Durcan, J.A., King, G.E., Duller, G.A.T., 2015.", "\t DRAC: Dose rate and age calculation for trapped charge", "\t dating. Quaternary Geochronology 28, 54-61. \n", "\t Set 'notification = FALSE' to hide this message. \n", "\t-------------------- IMPORTANT NOTE ------------------------", "\n") if (notification) lapply(messages, message) # CREATE TEMPLATE ---- template <- list( `Project ID` = structure(rep(NA_character_, nrow), required = TRUE, allowsX = FALSE, key = "TI:1", description = "Inputs can be alphabetic, numeric or selected symbols (/ - () [] _). Spaces are not permitted."), # `Sample ID` = structure(rep(NA_character_, nrow), required = TRUE, allowsX = FALSE, key = "TI:2", description = "Inputs can be alphabetic, numeric or selected symbols (/ - () [] _). Spaces are not permitted."), # `Mineral` = structure(factor(rep(NA_character_, nrow), c("Q", "F", "PM")), required = TRUE, allowsX = FALSE, key = "TI:3", description = "The mineral used for dating: quartz, feldspar or polymineral. Input must be 'Q', 'F' or 'PM'."), # `Conversion factors` = structure(factor(rep(NA_character_, nrow), c("AdamiecAitken1998", "Guerinetal2011", "Liritzisetal2013", "X")), required = FALSE, allowsX = TRUE, key = "TI:4", description = "The conversion factors required to calculate dose rates from radionuclide concentrations. Users have the option of datasets from Adamiec and Aitken (1998), Guerin et al. (2011) or Liritzis et al. (2013). Input must be 'AdamiecAitken1998', 'Guerinetal2011', 'Liritzisetal2013' or 'X' if conversion factors are not required."), # `External U (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:5", description = "Radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `errExternal U (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:6", description = "Radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `External Th (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:7", description = "Radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `errExternal Th (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:8", description = "Radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `External K (%)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:9", description = "Radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `errExternal K (%)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:10", description = "Radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `External Rb (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:11", description = "Radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `errExternal Rb (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:12", description = "Radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `Calculate external Rb from K conc?` = structure(factor(rep(NA_character_, nrow), c("Y", "N")), required = FALSE, allowsX = FALSE, key = "TI:13", description = "Option to calculate a Rubidium concentration from Potassium, using the 270:1 ratio suggested by Mejdahl (1987). Input should be yes 'Y' or no 'N'."), # `Internal U (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:14", description = "Internal radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `errInternal U (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:15", description = "Internal radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `Internal Th (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:16", description = "Internal radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `errInternal Th (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:17", description = "Internal radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `Internal K (%)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:18", description = "Internal radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `errInternal K (%)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:19", description = "Internal radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `Rb (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:20", description = "Internal radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `errRb (ppm)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:21", description = "Internal radionuclide concentrations in parts per million for Uranium, Thorium and Rubidium and % for Potassium. Inputs must be 0 or positive and should not be left blank."), # `Calculate internal Rb from K conc?` = structure(factor(rep(NA_character_, nrow), c("Y", "N", "X")), required = FALSE, allowsX = TRUE, key = "TI:22", description = "Option to calculate an internal Rubidium concentration from Potassium, using the 270:1 ratio suggested by Mejdahl (1987). Input should be yes 'Y' or no 'N'."), # `User external alphadoserate (Gy.ka-1)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:23", description = "Users may input directly measured values for external alpha, beta and gamma dose rates (in Gy.ka-1). Any positive inputs in these fields will override dose rates calculated from radionuclide concentrations. Inputs should be 0 or positive and should not be left blank"), # `errUser external alphadoserate (Gy.ka-1)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:24", description = "Users may input directly measured values for external alpha, beta and gamma dose rates (in Gy.ka-1). Any positive inputs in these fields will override dose rates calculated from radionuclide concentrations. Inputs should be 0 or positive and should not be left blank"), # `User external betadoserate (Gy.ka-1)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:25", description = "Users may input directly measured values for external alpha, beta and gamma dose rates (in Gy.ka-1). Any positive inputs in these fields will override dose rates calculated from radionuclide concentrations. Inputs should be 0 or positive and should not be left blank"), # `errUser external betadoserate (Gy.ka-1)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:26", description = "Users may input directly measured values for external alpha, beta and gamma dose rates (in Gy.ka-1). Any positive inputs in these fields will override dose rates calculated from radionuclide concentrations. Inputs should be 0 or positive and should not be left blank"), # `User external gamma doserate (Gy.ka-1)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:27", description = "Users may input directly measured values for external alpha, beta and gamma dose rates (in Gy.ka-1). Any positive inputs in these fields will override dose rates calculated from radionuclide concentrations. Inputs should be 0 or positive and should not be left blank"), # `errUser external gammadoserate (Gy.ka-1)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:28", description = "Users may input directly measured values for external alpha, beta and gamma dose rates (in Gy.ka-1). Any positive inputs in these fields will override dose rates calculated from radionuclide concentrations. Inputs should be 0 or positive and should not be left blank"), # `User internal doserate (Gy.ka-1)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:29", description = "Users may input an internal dose rate (either alpha, beta or the sum of the two; in Gy.ka-1). DRAC will assume that this value has already been corrected for attenuation. Inputs in this field will override dose rates calculated from radionuclide concentrations. Inputs should be 0 or positive and not left blank."), # `errUser internal doserate (Gy.ka-1)` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:30", description = "Users may input an internal dose rate (either alpha, beta or the sum of the two; in Gy.ka-1). DRAC will assume that this value has already been corrected for attenuation. Inputs in this field will override dose rates calculated from radionuclide concentrations. Inputs should be 0 or positive and not left blank."), # `Scale gammadoserate at shallow depths?` = structure(factor(rep(NA_character_, nrow), c("Y", "N")), required = FALSE, allowsX = FALSE, key = "TI:31", description = "Users may choose to scale gamma dose rates for samples taken within 0.3 m of the ground surface. The scaling factors of Aitken (1985) are used. Input should be yes 'Y' or no 'N'."), # `Grain size min (microns)` = structure(rep(NA_real_, nrow), required = TRUE, allowsX = FALSE, key = "TI:32", description = "The grain size range analysed. DRAC can be used for the grain size ranges between 1 and 1000 microns. Inputs should range between 1 and 1000 and not be left blank."), # `Grain size max (microns)` = structure(rep(NA_real_, nrow), required = TRUE, allowsX = FALSE, key = "TI:33", description = "The grain size range analysed. DRAC can be used for the grain size ranges between 1 and 1000 microns. Inputs should range between 1 and 1000 and not be left blank."), # `alpha-Grain size attenuation` = structure(factor(rep(NA_character_, nrow), c("Bell1980", "Brennanetal1991")), required = TRUE, allowsX = FALSE, key = "TI:34", description = "The grain size attenuation factors for the alpha dose rate. Users have the option of datasets from Bell (1980) and Brennan et al. (1991). Input must be 'Bell1980' or 'Brennanetal1991'."), # `beta-Grain size attenuation ` = structure(factor(rep(NA_character_, nrow), c("Mejdahl1979", "Brennan2003", "Guerinetal2012-Q", "Guerinetal2012-F")), required = TRUE, allowsX = FALSE, key = "TI:35", description = "The grain size attenuation factors for the beta dose rate. Users have the option of datasets from Mejdahl (1979), Brennan (2003) and Guerin et al. (2012) for quartz or feldspar. Input must be 'Mejdahl1979', 'Brennan2003', 'Guerinetal2012-Q' or 'Guerinetal2012-F' ."), # `Etch depth min (microns)` = structure(rep(NA_real_, nrow), required = TRUE, allowsX = FALSE, key = "TI:36", description = "The user defined etch depth range (microns). Inputs should range between 0 and 30 and not be left blank."), # `Etch depth max (microns)` = structure(rep(NA_real_, nrow), required = TRUE, allowsX = FALSE, key = "TI:37", description = "The user defined etch depth range (microns). Inputs should range between 0 and 30 and not be left blank."), # `beta-Etch depth attenuation factor` = structure(factor(rep(NA_character_, nrow), c("Bell1979", "Brennan2003", "X")), required = FALSE, allowsX = TRUE, key = "TI:38", description = "The etch depth attenuation factors for the beta dose rate. Users have the option of datasets from Bell (1979) and Brennan (2003). Input must be 'Bell1979' or 'Brennan2003'. Note: only the dataset of Bell (1980) is provided for attenuation of the alpha dose rate by etching."), # `a-value` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = TRUE, key = "TI:39", description = "Alpha track efficiency value and uncertainty defined by the user. Inputs should be 0 or positive and not left blank."), # `erra-value` = structure(rep(NA_real_, nrow), required = TRUE, allowsX = TRUE, key = "TI:40", description = "Alpha track efficiency value and uncertainty defined by the user. Inputs should be 0 or positive and not left blank."), # `Water content ((wet weight - dry weight)/dry weight) %` = structure(rep(NA_real_, nrow), required = TRUE, allowsX = FALSE, key = "TI:41", description = "Sediment water content (%) over the burial period. Inputs should be 0 or positive and not be left blank."), # `errWater content %` = structure(rep(NA_real_, nrow), required = FALSE, allowsX = FALSE, key = "TI:42", description = "Sediment water content (%) over the burial period. Inputs should be 0 or positive and not be left blank."), # `Depth (m)` = structure(rep(NA_character_, nrow), required = FALSE, allowsX = TRUE, key = "TI:43", description = "Depth and uncertainty from which sample was extracted beneath the ground surface. Inputs should be 0 or positive and not left blank."), # `errDepth (m)` = structure(rep(NA_character_, nrow), required = FALSE, allowsX = TRUE, key = "TI:44", description = "Depth and uncertainty from which sample was extracted beneath the ground surface. Inputs should be 0 or positive and not left blank."), # `Overburden density (g cm-3)` = structure(rep(NA_real_, nrow), required = TRUE, allowsX = FALSE, key = "TI:45", description = "Density of the overlying sediment matrix from which the sample was taken. Inputs should be 0 or positive and not be left blank. The scaling calculation will use the overburden density and uncertainty provided."), # `errOverburden density (g cm-3)` = structure(rep(NA_real_, nrow), required = TRUE, allowsX = FALSE, key = "TI:46", description = "Density of the overlying sediment matrix from which the sample was taken. Inputs should be 0 or positive and not be left blank. The scaling calculation will use the overburden density and uncertainty provided."), # `Latitude (decimal degrees)` = structure(rep(NA_character_, nrow), required = FALSE, allowsX = TRUE, key = "TI:47", description = "Latitude and longitude of sample location (in degree decimals). Positive values should be used for northern latitudes and eastern longitudes and negative values for southern latitudes and western longitudes. Inputs should range from -90 to 90 degrees for latitudes and -180 to 180 degrees for longitude."), # `Longitude (decimal degrees)` = structure(rep(NA_character_, nrow), required = FALSE, allowsX = TRUE, key = "TI:48", description = "Latitude and longitude of sample location (in degree decimals). Positive values should be used for northern latitudes and eastern longitudes and negative values for southern latitudes and western longitudes. Inputs should range from -90 to 90 degrees for latitudes and -180 to 180 degrees for longitude."), # `Altitude (m)` = structure(rep(NA_character_, nrow), required = FALSE, allowsX = TRUE, key = "TI:49", description = "Altitude of sample location in metres above sea level. Input should be less than 5000 and not left blank."), # `User cosmicdoserate (Gy.ka-1)` = structure(rep(NA_character_, nrow), required = FALSE, allowsX = TRUE, key = "TI:50", description = "Users may input a cosmic dose rate (in Gy.ka-1). Inputs in these fields will override the DRAC calculated cosmic dose rate. Inputs should be positive or 'X' if not required, and not left blank."), # `errUser cosmicdoserate (Gy.ka-1)` = structure(rep(NA_character_, nrow), required = FALSE, allowsX = TRUE, key = "TI:51", description = "Users may input a cosmic dose rate (in Gy.ka-1). Inputs in these fields will override the DRAC calculated cosmic dose rate. Inputs should be positive or 'X' if not required, and not left blank."), # `De (Gy)` = structure(rep(NA_character_, nrow), required = FALSE, allowsX = TRUE, key = "TI:52", description = "Sample De and uncertainty (in Gy). Inputs should be positive or 'X' if not required, and not left blank."), # `errDe (Gy)` = structure(rep(NA_character_, nrow), required = FALSE, allowsX = TRUE, key = "TI:53", description = "Sample De and uncertainty (in Gy). Inputs should be positive or 'X' if not required, and not left blank.") # ) ## RETURN VALUE ---- # add an additional DRAC class so we can define our own S3 method for as.data.frame class(template) <- c("DRAC.list", "list") # set preset if (!is.null(preset)) template <- .preset_DRAC(template, preset) invisible(template) } .preset_DRAC <- function(x, preset) { preset_list <- list( ## DRAC COLUMNS (TI:xx) --- TI:1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 "quartz_coarse" = list("RLum_preset", "quartz_coarse", "Q", "Guerinetal2011", "X", "X", "X", "X", "X", "X", "X", "X", "N", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "Y", 100, 200, "Brennanetal1991", "Guerinetal2012-Q", 20, 5, "Bell1979", 0.035, 0.01, 0, 0, 0, 0, 1.8, 0.1, "X", "X", 0, "X", "X", "X", "X"), "quartz_fine" = list("RLum_preset", "quartz_fine", "Q", "Guerinetal2011", "X", "X", "X", "X", "X", "X", "X", "X", "N", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "Y", 4, 11, "Brennanetal1991", "Guerinetal2012-Q", 0, 0, "Bell1979", 0.035, 0.01, 0, 0, 0, 0, 1.8, 0.1, "X", "X", 0, "X", "X", "X", "X"), "feldspar_coarse" = list("RLum_preset", "feldspar_coarse", "F", "Guerinetal2011", "X", "X", "X", "X", "X", "X", "X", "X", "Y", "X", "X", "X", "X", 12.5, 0.5, "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "Y", 100, 200, "Brennanetal1991", "Guerinetal2012-F", 0, 0, "Bell1979", 0.08, 0.01, 0, 0, 0, 0, 1.8, 0.1, "X", "X", 0, "X", "X", "X", "X"), "polymineral_fine" = list("RLum_preset", "polymineral_fine", "PM", "Guerinetal2011", "X", "X", "X", "X", "X", "X", "X", "X", "Y", "X", "X", "X", "X", 12.5, 0.5, "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "Y", 4, 11, "Brennanetal1991", "Guerinetal2012-F", 0, 0, "Bell1979", 0.08, 0.01, 0, 0, 0, 0, 1.8, 0.1, "X", "X", 0, "X", "X", "X", "X"), "DRAC-example_quartz" = list("DRAC-example", "Quartz", "Q", "Guerinetal2011", 3.4, 0.51, 14.47, 1.69, 1.2, 0.14, 0, 0, "N", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "X", "N", 90, 125, "Brennanetal1991", "Guerinetal2012-Q", 8, 10, "Bell1979", 0, 0, 5, 2, 2.22, 0.05, 1.8, 0.1, 30, 70, 150, "X", "X", 20, 0.2), "DRAC-example_feldspar" = list( "DRAC-example", "Feldspar", "F", "AdamiecAitken1998", 2, 0.2, 8, 0.4, 1.75, 0.05, 0, 0, "Y", "X", "X", "X", "X", 12.5, 0.5, "X", "X", "N", "X", "X", "X", "X", "X", "X", "X", "X", "Y", 180, 212, "Bell1980", "Mejdahl1979", 0, 0, "Bell1979", 0.15, 0.05, 10, 3, 0.15, 0.02, 1.8, 0.1, 60, 100, 200, "X", "X", 15, 1.5), "DRAC-example_polymineral" = list("DRAC-example", "Polymineral", "PM", "AdamiecAitken1998", 4, 0.4, 12, 0.12, 0.83, 0.08, 0, 0, "Y", "X", "X", "X", "X", 12.5, 0.5, "X", "X", "N", "X", "X", 2.5, 0.15, "X", "X", "X", "X", "Y", 4, 11, "Bell1980", "Mejdahl1979", 0, 0, "Bell1979", 0.086, 0.0038, 10, 5, 0.2, 0.02, 1.8, 0.1, 46, 118, 200, 0.2, 0.1, 204.47, 2.69) ) n <- length(x[[1]]) for (i in 1:length(x)) x[[i]] <- rep(preset_list[[preset]][[i]], n) return(x) } Luminescence/R/get_Quote.R0000644000176200001440000001177713604172511015161 0ustar liggesusers#' Function to return essential quotes #' #' This function returns one of the collected essential quotes in the #' growing library. If called without any parameters, a random quote is #' returned. #' #' @param ID [character] (*optional*): qoute ID to be returned. #' #' @param separated [logical] (*with default*): return result in separated form. #' #' @return Returns a character with quote and respective (false) author. #' #' @section Function version: 0.1.4 #' #' @author Michael Dietze, GFZ Potsdam (Germany), Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France), Dirk Mittelstraß, TU Dresden (Germany) #' #' @examples #' #' ## ask for an arbitrary qoute #' get_Quote() #' #' @md #' @export get_Quote <- function( ID, separated = FALSE ) { ## definition of the ever growing quote data set quotes <- rbind( c("Anonymous student hotel employee", "Let me double check this."), c("The ordinary reviewer", "I love it when a plan comes together."), c("A tunnelling electron", "God does not play dice."), c("Goldfinger", "You cannot get this machine better and cheaper than from us."), c("A PhD supervisor", "Live long and in prosper."), c("A PhD supervisor", "You are not depressive, you simply have a crappy life."), c("A trapped charge", "I want to break free."), c("The R-package Luminescence manual", "Call unto me, and I will answer thee, and will shew thee great things, and difficult, which thou knowest not."), c("A stimulated feldspar grain", "I'm so excited and I just can't hide it."), c("The true age", "How many roads..."), c("The undecided OSL component", "Should I stay or should I go?"), c("A fluvially transported quartz grain at night", "Always look at the bright side of life."), c("An arctic sediment outcrop", "Marmor, Stein und Eisen bricht..."), c("A common luminescence reader customer", "If anything can go wrong, it will."), c("A blue LED to a trapped electron", "Resistance is futile."), c("A trapped electron to a yellow LED", "Well, that's all you've got?"), c("A weathering rock", "Who wants to live forever?"), c("A new pIRIR derivative", "20,000 miles below the sea."), c("Robert Oppenheimer", "I want this thing to work by just pressing one button."), c("An arbitrary member of the CRAN team", "No shirt, no shoes, no service!"), c("Rubber mallet to steel cylinder", "Let's rock and roll."), c("A data import function", "Better late than never."), c("A luminescence lab staff member to its customer", "Tell me the age, I tell you the price."), c("The NSA", "O'zapft is."), c("The natural dose", "You only live once."), c("A Windows user", "An apple a day keeps the doctor away."), c("The authors of sTeve", "We love to entertain you."), c("Any arbitrary independent OSL device manufacturer", "Sure it will work, it was me who built it!"), c("Response to the reviewer", "You are right, it was just a guess."), c("An aliquot disc", "The answer [...] is: 48"), c("Push Pin", "Made of used sample carriers"), c("A motivated R-Team member", "We are doing this not just for statistical reasons, there is real science behind it!"), c("An enthusiastic cabaret artist", "Political elections are like brushing teeth: if you don't do it, things become brown."), c("An unbiased reviewer", "The data is too poor to be published in QG, try a higher ranked journal."), c("R Team member, asked about statistical details", "No idea, I'm just here for the visualisation."), c("An arbitrary unexperienced RLum-user", "Little by little, the bird builds its nest."), c("The answer to life, the universe and everything", "get_rightAnswer()"), c("Der Tatortreiniger", "Dreck ist nur Materie am falschen Ort."), c("Die Ex vom Tatortreiniger", "Das Ziel ist im Weg."), c("Bright grain to dim grains", "I'm so shiny!"), c("Fast component to slow component", "Life is short!"), c("Fast component to slow component", "What are you waiting for?"), c("Violet photon to deep trap electron", "Today I'm kicking you out of your comfort zone!"), c("Deep trap electron to infrared photon", "Don't bother me, I need to rest."), c("A single grain", "I feel so lonley."), c("Luminescence data to Bayesian process", "Don't you ever touch me again."), c("Quartz grain to heating plate", "Go ahead, I need a phase change."), c("Photon to electron", "I am in charge!"), c("You cannot spell 'data analysis' without 'daily satan'") ) ## Check input data if(missing(ID) == TRUE) { ID <- sample(x = seq(from = 1, to = nrow(quotes)), size = 1) } ## check for correct ID and generate qoute if(length(ID) < 1 | ID > nrow(quotes)) { quote.out <- "Sorry, but this was an impossible task!" } else { ## generate qoute(s) if(separated == FALSE) { quote.out <- paste(quotes[ID,1], ": '", quotes[ID,2], "'", sep = "") } else { quote.out <- quotes[ID,] } } ## return quotes return(quote.out) } Luminescence/R/analyse_Al2O3C_Measurement.R0000644000176200001440000005616213604172512020227 0ustar liggesusers#' Al2O3:C Passive Dosimeter Measurement Analysis #' #' The function provides the analysis routines for measurements on a #' FI lexsyg SMART reader using Al2O3:C chips according to Kreutzer et al., 2018 #' #' **Working with a travel dosimeter** #' #' The function allows to define particular aliquots as travel dosimeters. For example: #' `travel_dosimeter = c(1,3,5)` sets aliquots 1, 3 and 5 as travel dosimeters. These dose values #' of this dosimeters are combined and automatically subtracted from the obtained dose values #' of the other dosimeters. #' #' **Calculate TL dose ** #' #' The argument `calculate_TL_dose` provides the possibility to experimentally calculate a TL-dose, #' i.e. an apparent dose value derived from the TL curve ratio. However, it should be noted that #' this value is only a fallback in case something went wrong during the measurement of the optical #' stimulation. The TL derived dose value is corrected for cross-talk and for the irradiation time, #' but not considered if a travel dosimeter is defined. #' #' **Test parameters** #' #' `TL_peak_shift` [numeric] (default: `15`): #' #' Checks whether the TL peak shift is bigger > 15 K, indicating a problem with the #' thermal contact of the chip. #' #' `stimulation_power` [numeric] (default: `0.05`): #' #' So far available, information on the delievered optical stimulation are compared. Compared are #' the information from the first curves with all others. If the ratio differs more from #' unity than the defined by the threshold, a warning is returned. #' #' #' @param object [RLum.Analysis-class] **(required)**: #' measurement input #' #' @param signal_integral [numeric] (*optional*): signal integral, used for the signal #' and the background. Example: `c(1:10)` for the first 10 channels. #' If nothing is provided the full range is used #' #' @param dose_points [numeric] (*with default*): #' vector with dose points, if dose points are repeated, only the general #' pattern needs to be provided. Default values follow the suggestions #' made by Kreutzer et al., 2018 #' #' @param recordType [character] (*with default*): input curve selection, which is passed to #' function [get_RLum]. To deactivate the automatic selection set the argument to `NULL` #' #' @param irradiation_time_correction [numeric] or [RLum.Results-class] (*optional*): #' information on the used irradiation time correction obained by another experiements. #' I a `numeric` is provided it has to be of length two: mean, standard error #' #' @param calculate_TL_dose [logical] (*with default*): Enables/disables experimental dose estimation #' based on the TL curves. Taken is the ratio of the peak sums of each curves +/- 5 channels. #' #' @param cross_talk_correction [numeric] or [RLum.Results-class] (*optional*): #' information on the used irradiation time correction obained by another experiements. #' If a `numeric` vector is provided it has to be of length three: #' mean, 2.5 % quantile, 97.5 % quantile. #' #' @param travel_dosimeter [numeric] (*optional*): specify the position of the travel dosimeter #' (so far measured a the same time). The dose of travel dosimeter will be subtracted from all #' other values. #' #' @param test_parameters [list] (*with default*): #' set test parameters. Supported parameters are: `TL_peak_shift` All input: [numeric] #' values, `NA` and `NULL` (s. Details) #' #' @param verbose [logical] (*with default*): #' enable/disable verbose mode #' #' @param plot [logical] (*with default*): enable/disable plot output, if `object` is of type [list], #' a [numeric] vector can be provided to limit the plot output to certain aliquots #' #' @param ... further arguments that can be passed to the plot output, supported are `norm`, `main`, `mtext`, #' `title` (for self-call mode to specify, e.g., sample names) #' #' @return Function returns results numerically and graphically: #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `$data` \tab `data.frame` \tab the estimated equivalent dose \cr #' `$data_table` \tab `data.frame` \tab full dose and signal table \cr #' `test_parameters` \tab `data.frame` \tab results with test paramaters \cr #' `data_TDcorrected` \tab `data.frame` \tab travel dosimeter corrected results (only if TD was provided)\cr #' } #' #' *Note: If correction the irradiation time and the cross-talk correction method is used, the De #' values in the table `data` table are already corrected, i.e. if you want to get an uncorrected value, #' you can use the column `CT_CORRECTION` remove the correction* #' #'**slot:** **`@info`** #' #' The original function call #' #' ------------------------\cr #' `[ PLOT OUTPUT ]`\cr #' ------------------------\cr #' #' - OSL and TL curves, combined on two plots. #' #' #' @section Function version: 0.2.5 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @seealso [analyse_Al2O3C_ITC] #' #' @references #' #' Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., 2018. Environmental Dose Rate #' Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. #' Geochromometria 45, 56-67. \doi{10.1515/geochr-2015-0086} #' #' @keywords datagen #' #' @examples #' ##load data #' data(ExampleData.Al2O3C, envir = environment()) #' #' ##run analysis #' analyse_Al2O3C_Measurement(data_CrossTalk) #' #' @md #' @export analyse_Al2O3C_Measurement <- function( object, signal_integral = NULL, dose_points = c(0,4), recordType = c("OSL (UVVIS)", "TL (UVVIS)"), calculate_TL_dose = FALSE, irradiation_time_correction = NULL, cross_talk_correction = NULL, travel_dosimeter = NULL, test_parameters = NULL, verbose = TRUE, plot = TRUE, ... ){ # Self call ----------------------------------------------------------------------------------- if(is(object, "list")){ if(!all(unlist(lapply(object, function(x){is(x, "RLum.Analysis")})))){ stop("[analyse_Al2O3C_Measurement()] The elements in 'object' are not all of type 'RLum.Analsyis'", call. = FALSE) } ##expand input arguments if(!is.null(signal_integral)){ signal_integral <- rep(list(signal_integral), length = length(object)) } ##dose points if(is(dose_points, "list")){ dose.points <- rep(dose_points, length = length(object)) }else{ dose_points <- rep(list(dose_points), length = length(object)) } ##irradiation time correction if(is(irradiation_time_correction, "list")){ irradiation_time_correction <- rep(irradiation_time_correction, length = length(object)) }else{ irradiation_time_correction <- rep(list(irradiation_time_correction), length = length(object)) } ##cross talk correction if(is( cross_talk_correction, "list")){ cross_talk_correction <- rep( cross_talk_correction, length = length(object)) }else{ cross_talk_correction <- rep(list( cross_talk_correction), length = length(object)) } ##test_parameters if(is(test_parameters[[1]], "list")){ test_parameters <- rep(test_parameters, length = length(object)) }else{ test_parameters <- rep(list(test_parameters), length = length(object)) } ##verbose ##plot if(is(plot, "logical")){ plot <- rep(x = plot, length(object)) }else{ plot <- 1:length(object)%in%plot } ##run analyis results <- lapply(1:length(object), function(x) { temp <- analyse_Al2O3C_Measurement( object = object[[x]], signal_integral = signal_integral[[x]], dose_points = dose_points[[x]], irradiation_time_correction = irradiation_time_correction[[x]], cross_talk_correction = cross_talk_correction[[x]], test_parameters = test_parameters[[x]], calculate_TL_dose = calculate_TL_dose, verbose = verbose, plot = plot[x], ... ) ##adjusting the terminal output, to avoid confusions if(verbose) cat(" ... (#",x, " | ALQ POS: ", temp$data$POSITION,")\n", sep = "") ##add running number to the plot, but only of we had a plot here... if(plot[x]){ title(main = paste0(list(...)$title[x], " ","#", x), adj = 1, line = 3) } return(temp) }) ##merge results results <- merge_RLum(results) ##correct sys.call, otherwise it gets a little bit strange ##why this is not implemented in the merge_RLum() method ... because here it would be wrong! results@info[names(results@info) == "call"] <- NULL results@info$call <- sys.call() ##travel dosimeter ##check for travel dosimeter and subtract the values so far this is meaningful at all if(!is.null(travel_dosimeter)){ ##check data type if(!is(travel_dosimeter, "numeric")) stop("[analyse_Al2O3C_Measurement()] Input for `travel_dosimeter` is not numeric!", call. = FALSE) ##check whether everything is subtracted from everything ... you never know, users do weird stuff if(length(travel_dosimeter) == nrow(results$data)) try(stop("[analyse_Al2O3C_Measurement()] You specified every position as travel dosimeter, nothing corrected!", call. = FALSE)) ##check if the position is valid if(!any(travel_dosimeter%in%results$data$POSITION)) try(stop("[analyse_Al2O3C_Measurement()] Invalid position in 'travel_dosimeter', nothing corrected!", call. = FALSE)) ##correct for the travel dosimeter calculating the weighted mean and the sd (as new error) ##if only one value is given just take it if(length(travel_dosimeter) == 1 && nrow(results$data[travel_dosimeter==results$data$POSITION,c(1,2)]) == 1){ correction <- as.numeric(results$data[travel_dosimeter==results$data$POSITION,c(1,2)]) }else{ temp.correction <- results$data[results$data$POSITION%in%travel_dosimeter,c(1,2)] correction <- c( stats::weighted.mean( x = temp.correction[[1]], w = if(all(temp.correction[[2]]==0)){rep(1, length(temp.correction[[2]]))} else {temp.correction[[2]]}), sd(temp.correction[,1])) rm(temp.correction) } ##subtract all the values, in a new data frame, we do not touch the original data data_TDcorrected <- data.frame( DE = results@data$data[!results$data$POSITION%in%travel_dosimeter,1] - correction[1], DE_ERROR = sqrt(results@data$data[!results$data$POSITION%in%travel_dosimeter,2]^2 + correction[2]^2), POSITION = results@data$data[!results$data$POSITION%in%travel_dosimeter, "POSITION"] ) ##however, we set information on the travel dosimeter in the corresponding column results@data$data$TRAVEL_DOSIMETER <- results$data$POSITION%in%travel_dosimeter ##attach the new element to the results output results@data <- c(results@data, list(data_TDcorrected = data_TDcorrected)) ##return message if(verbose) cat("\n ...+ travel dosimeter correction applied.\n ...+ results stored in object $data_TDcorrected.\n\n") } ##end travel dosimeter ##return results return(results) } # Integretiy check --------------------------------------------------------------------------- ##TODO ... do more, push harder ##Add sufficient unit tests # Preparation --------------------------------------------------------------------------------- ##select curves based on the recordType selection; if not NULL if(!is.null(recordType)){ object_raw <- object object <- get_RLum(object, recordType = recordType, drop = FALSE) } ##set signal integral if(is.null(signal_integral)){ signal_integral <- c(1:nrow(object[[1]][])) }else{ ##check whether the input is valid, otherwise make it valid if(min(signal_integral) < 1 | max(signal_integral) > nrow(object[[1]][])){ signal_integral <- c(1:nrow(object[[1]][])) warning( paste0( "[analyse_Al2O3C_Measurement()] Input for 'signal_integral' corrected to 1:", nrow(object[[1]][]) ), call. = FALSE ) } } ## Set Irradiation Time Correction --------------- if (!is.null(irradiation_time_correction)) { if (is(irradiation_time_correction, "RLum.Results")) { if (irradiation_time_correction@originator == "analyse_Al2O3C_ITC") { irradiation_time_correction <- get_RLum(irradiation_time_correction) ##consider the case for more than one observation ... if(nrow(irradiation_time_correction)>1){ irradiation_time_correction <- c(mean(irradiation_time_correction[[1]]), sd(irradiation_time_correction[[1]])) }else{ irradiation_time_correction <- c(irradiation_time_correction[[1]], irradiation_time_correction[[2]]) } } else{ stop( "[analyse_Al2O3C_Measurement()] The object provided for the argument 'irradiation_time_correction' was created by an unsupported function!", call. = FALSE ) } } } ## Set Cross Talk Correction --------------- ##check wehther the information on the position was stored in the input ##object if(!is.null(get_RLum(object = object[[1]], info.object = "position"))){ POSITION <- get_RLum(object = object[[1]], info.object = "position") }else{ message("[analyse_Al2O3_Measurement()] Aliquot position number was not found. No cross talk correction was applied!") cross_talk_correction <- c(0,0,0) POSITION <- NA } if(is.null(cross_talk_correction)){ cross_talk_correction <- c(0,0,0) }else{ ##check whether the input is of type RLum.Results and check orignator if (is(cross_talk_correction, "RLum.Results") && cross_talk_correction@originator == "analyse_Al2O3C_CrossTalk") { ##grep cross talk correction and calculate values for ##this particular carousel position cross_talk_correction <- as.numeric(predict(cross_talk_correction$fit, newdata = data.frame(x = POSITION), interval = "confidence")) }else{ stop( "[analyse_Al2O3C_Measurement()] The object provided for the argument 'cross_talk_correction' was created by an unsupported function or has a wrong originator!", call. = FALSE ) } } # Calculation --------------------------------------------------------------------------------- ##we have two dose points, and one background curve, we do know only the 2nd dose ##set test parameters test_parameters.default <- list( TL_peak_shift = 15, stimulation_power = 0.05 ) ##modify default values by given input if(!is.null(test_parameters)){ test_parameters <- modifyList(test_parameters.default, test_parameters) ##remove NULL elements from list test_parameters <- test_parameters[!sapply(test_parameters, is.null)] }else{ test_parameters <- test_parameters.default } ##calculate integrated light values NATURAL <- sum(object@records[[1]]@data[signal_integral, 2]) REGENERATED <- sum(object@records[[3]]@data[signal_integral, 2]) BACKGROUND <- sum(object@records[[5]]@data[signal_integral, 2]) ##do the same for the TL if(calculate_TL_dose){ NATURAL_TL <- try(sum( object@records[[2]]@data[ (which.max(object@records[[2]]@data[,2])-5):(which.max(object@records[[2]]@data[,2])+5),2]), silent = TRUE) REGENERATED_TL <- try(sum( object@records[[4]]@data[ (which.max(object@records[[4]]@data[,2])-5):(which.max(object@records[[4]]@data[,2])+5),2]), silent = TRUE) ##catch errors if the integration fails if(class(NATURAL_TL) == "try-error"){ NATURAL_TL <- NA warning("[analyse_Al2O3_Measurement()] Natural TL signal out of bounds, NA returned!", call. = FALSE, immediate. = TRUE) } if(class(REGENERATED_TL) == "try-error"){ REGENERATED_TL <- NA warning("[analyse_Al2O3_Measurement()] Regenerated TL signal out of bounds, NA returned!", call. = FALSE, immediate. = TRUE) } }else{ NATURAL_TL <- NA REGENERATED_TL <- NA } ##combine into data.frame temp_df <- data.frame( POSITION = POSITION, DOSE = if(!is.null(irradiation_time_correction)){ dose_points + irradiation_time_correction[1] }else{ dose_points }, DOSE_ERROR = if(!is.null(irradiation_time_correction)){ dose_points * irradiation_time_correction[2]/irradiation_time_correction[1] }else{ 0 }, STEP = c("NATURAL", "REGENERATED"), INTEGRAL = c(NATURAL, REGENERATED), BACKGROUND = c(BACKGROUND, BACKGROUND), NET_INTEGRAL = c(NATURAL - BACKGROUND, REGENERATED - BACKGROUND), NATURAL_TL = NATURAL_TL, REGENERATED_TL = REGENERATED_TL, row.names = NULL ) ##0 dose points should not be biased by the correction .. ##Note: it does not mean that 0 s beneath the source has a dose of 0, however, in the certain ##case aliquot was never moved under the source id_zero <- which(dose_points == 0) temp_df$DOSE[id_zero] <- 0 temp_df$DOSE_ERROR[id_zero] <- 0 ##calculate DE by using the irradiation time correction AND the cross talk correction ##(1) sample dose point values with irradiation time corrections (random) if(!is.null(irradiation_time_correction)){ DOSE_MC <- rnorm(1000, mean = temp_df$DOSE[2], sd = temp_df$DOSE_ERROR[2]) }else{ DOSE_MC <- temp_df$DOSE[2] } ##(2) random sampling from cross-irradiation CT <- runif(1000, min = cross_talk_correction[2], max = cross_talk_correction[3]) ##(3) signal ratio INTEGRAL_RATIO <- temp_df$NET_INTEGRAL[1]/temp_df$NET_INTEGRAL[2] ##(4) calculate DE temp_DE <- (DOSE_MC * INTEGRAL_RATIO) ##(5) substract cross-talk value from DE temp_DE <- temp_DE - CT ##(5.1) calculate TL based DE ##calculate a dose based on TL ##Note: we use irradiation time correction and CT correction based on GSL measurements if(calculate_TL_dose){ TL_Ratio <- NATURAL_TL/REGENERATED_TL temp_TL_DE <- (DOSE_MC * TL_Ratio) - CT TL_DE <- mean(temp_TL_DE) TL_DE.ERROR <- sd(temp_TL_DE) }else{ TL_DE <- NA TL_DE.ERROR <- NA } ##(6) create final data.frame data <- data.frame( DE = mean(temp_DE), DE_ERROR = sd(temp_DE), POSITION, INTEGRAL_RATIO, TRAVEL_DOSIMETER = NA, CT_CORRECTION = cross_talk_correction[1], CT_CORRECTION_Q2.5 = cross_talk_correction[2], CT_CORRECTION_Q97.5 = cross_talk_correction[3], TL_DE = TL_DE, TL_DE.ERROR = TL_DE.ERROR, row.names = NULL ) ##calculate test parameters ##TL_peak_shift ##check TL peak positions, if it differes more than the threshold, return a message ##can be done better, but should be enough here. if(any("TL_peak_shift"%in%names(test_parameters))){ ##calculate value TP_TL_peak_shift.value <- abs((object[[2]][which.max(object[[2]][,2]),1] - object[[4]][which.max(object[[4]][,2]),1])) ##compare TP_TL_peak_shift.status <- TP_TL_peak_shift.value > test_parameters$TL_peak_shift ##return warning if(TP_TL_peak_shift.status) warning("TL peak shift detected for aliquot position ",POSITION, "! Check curves!", call. = FALSE) ##set data.frame TP_TL_peak_shift <- data.frame( CRITERIA = "TL_peak_shift", THRESHOLD = test_parameters$TL_peak_shift, VALUE = TP_TL_peak_shift.value, STATUS = TP_TL_peak_shift.status, stringsAsFactors = FALSE) }else{ TP_TL_peak_shift <- data.frame(stringsAsFactors = FALSE) } ##stimulation_power if(any("stimulation_power"%in%names(test_parameters))){ ##get curves ids holding the information on the stimulation power temp_curves_OSL <- get_RLum(object_raw, recordType = "OSL", curveType = "measured") temp_curves_OSL <- lapply(temp_curves_OSL, function(o){ if("stimulator"%in%names(o@info)){ if(grepl(o@info$stimulator, pattern = "LED", fixed = TRUE)){ return(o) } } return(NULL) }) ##remove NULL temp_curves_OSL <- temp_curves_OSL[!sapply(temp_curves_OSL, is.null)] ##check whether something is left if(length(temp_curves_OSL) < 2){ TP_stimulation_power.value <- NA TP_stimulation_power.status <- FALSE }else{ ##calculate sum of the power TP_stimulation_power.value <- vapply(temp_curves_OSL, function(x){ sum(x@data[,2]) }, numeric(1)) ##estimate a theoretical value based on the first value ... it does not ##matter which value is correct or not TP_stimulation_power.value <- abs(1 - sum(TP_stimulation_power.value)/(TP_stimulation_power.value[1] * length(TP_stimulation_power.value))) TP_stimulation_power.status <- TP_stimulation_power.value > test_parameters$stimulation_power if(TP_stimulation_power.status) warning("Stimulation power was not stable for ALQ ",POSITION, "! Results are likely to be wrong!", call. = FALSE) } ##remove object rm(temp_curves_OSL) ##set data.frame TP_stimulation_power <- data.frame( CRITERIA = "stimulation_power", THRESHOLD = test_parameters$stimulation_power, VALUE = TP_stimulation_power.value, STATUS = TP_stimulation_power.status, stringsAsFactors = FALSE) }else{ TP_stimulation_power <- data.frame(stringsAsFactors = FALSE) } ##compile all test parameter df df_test_parameters <- rbind( TP_TL_peak_shift, TP_stimulation_power) # Terminal output ----------------------------------------------------------------------------- if(verbose){ cat(" [analyse_Al2O3_Measurement()] #",POSITION, " ", "DE: ", round(data$DE, 2), " \u00B1 ", round(data$DE_ERROR,2), "\n", sep = "") } # Plotting ------------------------------------------------------------------------------------ ##enable or disable plot ... we cannot put the condition higher, because we here ##calculate something we are going to need later if (plot) { ##get plot settings par.default <- par()$mfrow on.exit(par(mfrow = par.default)) ##settings plot_settings <- list( main = c(paste("ALQ POS:", POSITION, "| OSL"), paste("ALQ POS:", POSITION, "| TL")), norm = TRUE, mtext = "" ) ##modify on request plot_settings <- modifyList(x = plot_settings, val = list(...),) ##plot curves par(mfrow = c(1,2)) plot_RLum( object, plot.single = TRUE, combine = TRUE, mtext = list(paste0("DE: ", round(data$DE,2), " \u00b1 ", round(data$DE_ERROR,2)), ""), xlab = list("Simulation [s]", "Temperature [\u00B0C]"), legend.text = list(list("#1 NAT", "#3 REG", "#5 BG"), list("#2 NAT", "#4 REG")), legend.pos = list("topright", "topleft"), main = as.list(plot_settings$main), norm = plot_settings$norm ) } # Output -------------------------------------------------------------------------------------- UID <- create_UID() output <- set_RLum( class = "RLum.Results", data = list( data = cbind(data, UID), data_table = cbind(temp_df, UID), test_parameters = cbind(df_test_parameters, UID) ), info = list( call = sys.call() ) ) } Luminescence/R/read_BIN2R.R0000644000176200001440000013140213604172511015020 0ustar liggesusers#' Import Risø BIN/BINX-files into R #' #' Import a *.bin or a *.binx file produced by a Risoe DA15 and DA20 TL/OSL #' reader into R. #' #' The binary data file is parsed byte by byte following the data structure #' published in the Appendices of the Analyst manual p. 42. #' #' For the general BIN/BINX-file structure, the reader is referred to the #' Risoe website: [http://www.nutech.dtu.dk/]() #' #' @param file [character] or [list] (**required**): path and file name of the #' BIN/BINX file (URLs are supported). If input is a `list` it should comprise #' only `character`s representing each valid path and BIN/BINX-file names. #' Alternatively the input character can be just a directory (path), in this case the #' the function tries to detect and import all BIN/BINX files found in the directory. #' #' @param show.raw.values [logical] (*with default*): #' shows raw values from BIN file for `LTYPE`, `DTYPE` and `LIGHTSOURCE` without #' translation in characters. Can be provided as `list` if `file` is a `list`. #' #' @param n.records [raw] (*optional*): #' limits the number of imported records. Can be used in combination with #' `show.record.number` for debugging purposes, e.g. corrupt BIN-files. #' Can be provided as `list` if `file` is a `list`. #' #' @param zero_data.rm [logical] (*with default*): #' remove erroneous data with no count values. As such data are usally not #' needed for the subsequent data analysis they will be removed by default. #' Can be provided as `list` if `file` is a `list`. #' #' @param duplicated.rm [logical] (*with default*): #' remove duplicated entries if `TRUE`. This may happen due to an erroneous #' produced BIN/BINX-file. This option compares only predeccessor and successor. #' Can be provided as `list` if `file` is a `list`. #' #' @param position [numeric] (*optional*): #' imports only the selected position. Note: the import performance will not #' benefit by any selection made here. #' Can be provided as `list` if `file` is a `list`. #' #' @param fastForward [logical] (*with default*): #' if `TRUE` for a more efficient data processing only a list of `RLum.Analysis` #' objects is returned instead of a [Risoe.BINfileData-class] object. #' Can be provided as `list` if `file` is a `list`. #' #' @param show.record.number [logical] (*with default*): #' shows record number of the imported record, for debugging usage only. #' Can be provided as `list` if `file` is a `list`. #' #' @param txtProgressBar [logical] (*with default*): #' enables or disables [txtProgressBar]. #' #' @param forced.VersionNumber [integer] (*optional*): #' allows to cheat the version number check in the function by own values for #' cases where the BIN-file version is not supported. #' Can be provided as `list` if `file` is a `list`. #' #' **Note:** The usage is at own risk, only supported BIN-file versions have been tested. #' #' @param ignore.RECTYPE [logical] (*with default*): #' this argument allows to ignore values in the byte 'RECTYPE' (BIN-file version 08), #' in case there are not documented or faulty set. In this case the corrupted records are skipped. #' #' @param pattern [character] (*optional*): #' argument that is used if only a path is provided. The argument will than be #' passed to the function [list.files] used internally to construct a `list` #' of wanted files #' #' @param verbose [logical] (*with default*): #' enables or disables verbose mode #' #' @param ... further arguments that will be passed to the function #' [Risoe.BINfileData2RLum.Analysis]. Please note that any matching argument #' automatically sets `fastForward = TRUE` #' #' @return #' Returns an S4 [Risoe.BINfileData-class] object containing two #' slots: #' #' \item{METADATA}{A [data.frame] containing all variables stored in the bin-file.} #' \item{DATA}{A [list] containing a numeric [vector] of the measured data. The ID corresponds to the record ID in METADATA.} #' #' If `fastForward = TRUE` a list of [RLum.Analysis-class] object is returned. The #' internal coercing is done using the function [Risoe.BINfileData2RLum.Analysis] #' #' @note #' The function works for BIN/BINX-format versions 03, 04, 05, 06, 07 and 08. The #' version number depends on the used Sequence Editor. #' #' **ROI data sets introduced with BIN-file version 8 are not supported and skipped durint import.** #' #' @section Function version: 0.16.2 #' #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France)\cr #' Margret C. Fuchs, HZDR Freiberg, (Germany) \cr #' based on information provided by Torben Lapp and Karsten Bracht Nielsen (Risø DTU, Denmark) #' #' #' @seealso [write_R2BIN], [Risoe.BINfileData-class], #' [base::readBin], [merge_Risoe.BINfileData], [RLum.Analysis-class] #' [utils::txtProgressBar], [list.files] #' #' #' @references #' DTU Nutech, 2016. The Squence Editor, Users Manual, February, 2016. #' [http://www.nutech.dtu.dk/english/products-and-services/radiation-instruments/tl_osl_reader/manuals]() #' #' #' @keywords IO #' #' @examples #' #' ##(1) import Risoe BIN-file to R (uncomment for usage) #' #' #FILE <- file.choose() #' #temp <- read_BIN2R(FILE) #' #temp #' #' @md #' @export read_BIN2R <- function( file, show.raw.values = FALSE, position = NULL, n.records = NULL, zero_data.rm = TRUE, duplicated.rm = FALSE, fastForward = FALSE, show.record.number = FALSE, txtProgressBar = TRUE, forced.VersionNumber = NULL, ignore.RECTYPE = FALSE, pattern = NULL, verbose = TRUE, ... ){ # Self Call ----------------------------------------------------------------------------------- # Option (a): Input is a list, every element in the list will be treated as file connection # with that many file can be read in at the same time # Option (b): The input is just a path, the function tries to grep ALL BIN/BINX files in the # directory and import them, if this is detected, we proceed as list if (is.character(file)) { if (is.null(pattern)) { ##If this is not really a path we skip this here if (dir.exists(file) & length(dir(file)) > 0) { if (verbose) { cat( "[read_BIN2R()] Directory detected, trying to extract '*.bin'/'*.binx' files ...\n" ) } ##get files file <- as.list(list.files( path = file, recursive = FALSE, pattern = "\\.bin*", full.names = TRUE, ignore.case = TRUE )) } }else if(dir.exists(file)){ file <- as.list(list.files(file, pattern = pattern, full.names = TRUE, recursive = TRUE)) } } if (is.list(file)) { ##extend list of parameters ##position position <- if(is.list(position)){ rep(position, length = length(file)) }else{ rep(list(position), length = length(file)) } ##n.records n.records <- if(is.list(n.records)){ rep(n.records, length = length(file)) }else{ rep(list(n.records), length = length(file)) } ##zero_data.rm zero_data.rm<- if(is.list(zero_data.rm)){ rep(zero_data.rm, length = length(file)) }else{ rep(list(zero_data.rm), length = length(file)) } ##duplicated.rm duplicated.rm <- if(is.list(duplicated.rm)){ rep(duplicated.rm, length = length(file)) }else{ rep(list(duplicated.rm), length = length(file)) } ## show.raw.values show.raw.values <- if(is.list(show.raw.values)){ rep( show.raw.values, length = length(file)) }else{ rep(list( show.raw.values), length = length(file)) } ## show.record.number show.record.number <- if(is.list(show.record.number)){ rep(show.record.number, length = length(file)) }else{ rep(list(show.record.number), length = length(file)) } ##forced.VersionNumber forced.VersionNumber <- if(is.list(forced.VersionNumber)){ rep(forced.VersionNumber, length = length(file)) }else{ rep(list(forced.VersionNumber), length = length(file)) } temp.return <- lapply(1:length(file), function(x) { temp <- read_BIN2R( file = file[[x]], fastForward = fastForward, position = position[[x]], n.records = n.records[[x]], duplicated.rm = duplicated.rm[[x]], show.raw.values = show.raw.values[[x]], show.record.number = show.record.number[[x]], txtProgressBar = txtProgressBar, forced.VersionNumber = forced.VersionNumber[[x]], verbose = verbose, ... ) }) ##return if (fastForward) { return(unlist(temp.return, recursive = FALSE)) }else{ return(temp.return) } } # Config -------------------------------------------------------------------------------------- ##set file_link for internet downloads file_link <- NULL on_exit <- function(){ ##unlink internet connection if(!is.null(file_link)){ unlink(file_link) } ##close connection if(!is.null(con)){ close(con) } } on.exit(expr = on_exit()) # Integrity checks ------------------------------------------------------ ##check if file exists if(!file.exists(file)){ ##check whether the file as an URL if(grepl(pattern = "http", x = file, fixed = TRUE)){ if(verbose){ cat("[read_BIN2R()] URL detected, checking connection ... ") } ##check URL if(!httr::http_error(file)){ if(verbose) cat("OK") ##dowload file file_link <- tempfile("read_BIN2R_FILE") download.file(file, destfile = file_link, quiet = if(verbose){FALSE}else{TRUE}) }else{ cat("FAILED") con <- NULL stop("[read_BIN2R()] File does not exist!", call. = FALSE) } }else{ con <- NULL stop("[read_BIN2R()] File does not exist!", call. = FALSE) } } ##check if file is a BIN or BINX file if(!(TRUE%in%(c("BIN", "BINX", "bin", "binx")%in%sub(pattern = "%20", replacement = "", x = tail( unlist(strsplit(file, split = "\\.")), n = 1), fixed = TRUE)))){ try( stop( paste0("[read_BIN2R()] '", file,"' is not a file or not of type 'BIN' or 'BINX'! Skipped!"), call. = FALSE)) return(NULL) } ##set correct file name of file_link was set if(!is.null(file_link)){ file <- file_link } # Config ------------------------------------------------------------------ ##set supported BIN format version VERSION.supported <- as.raw(c(03, 04, 05, 06, 07, 08)) # Short file parsing to get number of records ------------------------------------------------- #open connection con<-file(file, "rb") ##get information about file size file.size <- file.info(file) ##read data up to the end of con ##set ID temp.ID <- 0 ##start for BIN-file check up while(length(temp.VERSION<-readBin(con, what="raw", 1, size=1, endian="little"))>0) { ##force version number if(!is.null(forced.VersionNumber)){ temp.VERSION <- as.raw(forced.VersionNumber) } ##stop input if wrong VERSION if((temp.VERSION%in%VERSION.supported) == FALSE){ if(temp.ID > 0){ if(is.null(n.records)){ warning(paste0("[read_BIN2R()] BIN-file appears to be corrupt. Import limited to the first ", temp.ID," record(s)."), call. = FALSE) }else{ warning(paste0("[read_BIN2R()] BIN-file appears to be corrupt. 'n.records' reset to ", temp.ID,"."), call. = FALSE) } ##set or reset n.records n.records <- temp.ID break() }else{ ##show error message error.text <- paste("[read_BIN2R()] Found BIN/BINX-format version (",temp.VERSION,") is not supported or the BIN/BINX-file is broken.! Supported version numbers are: ",paste(VERSION.supported,collapse=", "),".",sep="") ##show error stop(error.text, call. = FALSE) } } #empty byte position EMPTY<-readBin(con, what = "raw", 1, size = 1, endian = "little") if(temp.VERSION == 06 | temp.VERSION == 07 | temp.VERSION == 08){ ##GET record LENGTH temp.LENGTH <- readBin(con, what="int", 1, size=4, endian="little") STEPPING <- readBin(con, what="raw", temp.LENGTH-6, size=1, endian="little") }else{ ##GET record LENGTH temp.LENGTH <- readBin(con, what="int", 1, size=2, endian="little") STEPPING <- readBin(con, what="raw", temp.LENGTH-4, size=1, endian="little") } temp.ID<-temp.ID+1 if(!is.null(n.records) && temp.ID == n.records){ break() } } ##set n.records if(is.null(n.records)){ n.records <- temp.ID } rm(temp.ID) close(con) ##we have to close the connection here # Set Lookup tables -------------------------------------------------------------------------- ##LTYPE LTYPE.lookup <- c( "0" = "TL", "1" = "OSL", "2" = "IRSL", "3" = "M-IR", "4" = "M-VIS", "5" = "TOL", "6" = "TRPOSL", "7" = "RIR", "8" = "RBR", "9" = "USER", "10" = "POSL", "11" = "SGOSL", "12" = "RL", "13" = "XRF" ) ##DTYPE DTYPE.lookup <- c( "0" = "Natural", "1" = "N+dose", "2" = "Bleach", "3" = "Bleach+dose", "4" = "Natural (Bleach)", "5" = "N+dose (Bleach)", "6" = "Dose", "7" = "Background" ) ##LIGHTSOURCE LIGHTSOURCE.lookup <- c( "0" = "None", "1" = "Lamp", "2" = "IR diodes/IR Laser", "3" = "Calibration LED", "4" = "Blue Diodes", "5" = "White light", "6" = "Green laser (single grain)", "7" = "IR laser (single grain)" ) ##PRESET VALUES temp.CURVENO <- NA temp.FNAME <- NA temp.MEASTEMP <- NA temp.IRR_UNIT <- NA temp.IRR_DOSERATE <- NA temp.IRR_DOSERATEERR <- NA temp.TIMESINCEIRR <- NA temp.TIMETICK <- NA temp.ONTIME <- NA temp.OFFTIME <- NA temp.STIMPERIOD <- NA temp.GATE_ENABLED <- raw(length = 1) temp.ENABLE_FLAGS <- raw(length = 1) temp.GATE_START <- NA temp.GATE_STOP <- NA temp.GATE_END <- NA temp.PTENABLED <- raw(length = 1) temp.DTENABLED <- raw(length = 1) temp.DEADTIME <- NA temp.MAXLPOWER <- NA temp.XRF_ACQTIME <- NA temp.XRF_HV <- NA temp.XRF_CURR <- NA temp.XRF_DEADTIMEF <- NA temp.DETECTOR_ID <- NA temp.LOWERFILTER_ID <- NA temp.UPPERFILTER_ID <- NA temp.ENOISEFACTOR <- NA temp.SEQUENCE <- NA temp.GRAIN <- NA temp.GRAINNUMBER <- NA temp.LIGHTPOWER <- NA temp.LPOWER <- NA temp.RECTYPE <- 0 temp.MARKPOS_X1 <- NA temp.MARKPOS_Y1 <- NA temp.MARKPOS_X2 <- NA temp.MARKPOS_Y2 <- NA temp.MARKPOS_X3 <- NA temp.MARKPOS_Y3 <- NA temp.EXTR_START <- NA temp.EXTR_END <- NA ##SET length of entire record n.length <- n.records ##initialise data.frame results.METADATA <- data.table::data.table( ##1 to 7 ID = integer(length = n.length), SEL = logical(length = n.length), VERSION = numeric(length = n.length), LENGTH = integer(length = n.length), PREVIOUS = integer(length = n.length), NPOINTS = integer(length = n.length), RECTYPE = integer(length = n.length), #8 to 17 RUN = integer(length = n.length), SET = integer(length = n.length), POSITION = integer(length = n.length), GRAIN = integer(length = n.length), GRAINNUMBER = integer(length = n.length), CURVENO = integer(length = n.length), XCOORD = integer(length = n.length), YCOORD = integer(length = n.length), SAMPLE = character(length = n.length), COMMENT = character(length = n.length), #18 to 22 SYSTEMID = integer(length = n.length), FNAME = character(length = n.length), USER = character(length = n.length), TIME = character(length = n.length), DATE = character(length = n.length), ##23 to 31 DTYPE = character(length = n.length), BL_TIME = numeric(length = n.length), BL_UNIT = integer(length = n.length), NORM1 = numeric(length = n.length), NORM2 = numeric(length = n.length), NORM3 = numeric(length = n.length), BG = numeric(length = n.length), SHIFT = integer(length = n.length), TAG = integer(length = n.length), ##32 to 67 LTYPE = character(length = n.length), LIGHTSOURCE = character(length = n.length), LPOWER = numeric(length = n.length), LIGHTPOWER = numeric(length = n.length), LOW = numeric(length = n.length), HIGH = numeric(length = n.length), RATE = numeric(length = n.length), TEMPERATURE = numeric(length = n.length), MEASTEMP = numeric(length = n.length), AN_TEMP = numeric(length = n.length), AN_TIME = numeric(length = n.length), TOLDELAY = integer(length = n.length), TOLON = integer(length = n.length), TOLOFF = integer(length = n.length), IRR_TIME = numeric(length = n.length), IRR_TYPE = integer(length = n.length), IRR_UNIT = integer(length = n.length), IRR_DOSERATE = numeric(length = n.length), IRR_DOSERATEERR = numeric(length = n.length), TIMESINCEIRR = numeric(length = n.length), TIMETICK = numeric(length = n.length), ONTIME = numeric(length = n.length), OFFTIME = numeric(length = n.length), STIMPERIOD = integer(length = n.length), GATE_ENABLED = numeric(length = n.length), ENABLE_FLAGS = numeric(length = n.length), GATE_START = numeric(length = n.length), GATE_STOP = numeric(length = n.length), PTENABLED = numeric(length = n.length), DTENABLED = numeric(length = n.length), DEADTIME = numeric(length = n.length), MAXLPOWER = numeric(length = n.length), XRF_ACQTIME = numeric(length = n.length), XRF_HV = numeric(length = n.length), XRF_CURR = numeric(length = n.length), XRF_DEADTIMEF = numeric(length = n.length), #68 to 79 DETECTOR_ID = integer(length = n.length), LOWERFILTER_ID = integer(length = n.length), UPPERFILTER_ID = integer(length = n.length), ENOISEFACTOR = numeric(length = n.length), MARKPOS_X1 = numeric(length = n.length), MARKPOS_Y1 = numeric(length = n.length), MARKPOS_X2 = numeric(length = n.length), MARKPOS_Y2 = numeric(length = n.length), MARKPOS_X3 = numeric(length = n.length), MARKPOS_Y3 = numeric(length = n.length), EXTR_START = numeric(length = n.length), EXTR_END = numeric(length = n.length), ##80 SEQUENCE = character(length = n.length) ) #end set data table #set variable for DPOINTS handling results.DATA<-list() ##set list for RESERVED values results.RESERVED <- rep(list(list()), n.length) # Open Connection --------------------------------------------------------- ##show warning if version number check has been cheated if(!is.null(forced.VersionNumber)){ warning("Argument 'forced.VersionNumber' has been used. BIN-file version might be not supported!") } #open connection con <- file(file, "rb") ##get information about file size file.size<-file.info(file) ##output if(verbose){cat(paste("\n[read_BIN2R()]\n\t >> ",file,sep=""), fill=TRUE)} ##set progressbar if(txtProgressBar & verbose){ pb<-txtProgressBar(min=0,max=file.size$size, char="=", style=3) } ##read data up to the end of con ##set ID temp.ID <- 0 # LOOP -------------------------------------------------------------------- ##start loop for import BIN data while(length(temp.VERSION<-readBin(con, what="raw", 1, size=1, endian="little"))>0) { ##force version number if(!is.null(forced.VersionNumber)){ temp.VERSION <- as.raw(forced.VersionNumber) } ##stop input if wrong VERSION if((temp.VERSION%in%VERSION.supported) == FALSE){ ##show error message error.text <- paste("[read_BIN2R()] BIN-format version (",temp.VERSION,") of this file is currently not supported! Supported version numbers are: ",paste(VERSION.supported,collapse=", "),".",sep="") stop(error.text) } ##print record ID for debugging purposes if(verbose){ if(show.record.number == TRUE){ cat(temp.ID,",", sep = "") if(temp.ID%%10==0){ cat("\n") } } } #empty byte position EMPTY<-readBin(con, what="raw", 1, size=1, endian="little") # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # BINX FORMAT SUPPORT ----------------------------------------------------- if(temp.VERSION == 05 | temp.VERSION == 06 | temp.VERSION == 07 | temp.VERSION == 08){ ##(1) Header size and strucutre ##LENGTH, PREVIOUS, NPOINTS, LTYPE temp <- readBin(con, what="int", 3, size=4, endian="little") temp.LENGTH <- temp[1] temp.PREVIOUS <- temp[2] temp.NPOINTS <- temp[3] #for temp.VERSION == 08 #RECTYPE if(temp.VERSION == 08){ temp.RECTYPE <- readBin(con, what="int", 1, size=1, endian="little", signed = FALSE) if(temp.RECTYPE != 0 & temp.RECTYPE != 1){ ##jump to the next record by stepping the record length minus the alread read bytes STEPPING <- readBin(con, what = "raw", size = 1, n = temp.LENGTH - 15) if(temp.RECTYPE == 128){ warning(paste0("[read_BIN2R()] ROI definition in data set #",temp.ID+1, "detected, but currently not supported, record skipped!", call. = FALSE)) }else{ if(!ignore.RECTYPE){ stop(paste0("[read_BIN2R()] Byte RECTYPE = ",temp.RECTYPE," is not supported in record #",temp.ID+1,"! Check your BIN-file!"), call. = FALSE) }else{ if(verbose) cat(paste0("\n[read_BIN2R()] Byte RECTYPE = ",temp.RECTYPE," is not supported in record #",temp.ID+1,", record skipped!")) temp.ID <- temp.ID + 1 } } next } } ##(2) Sample characteristics ##RUN, SET, POSITION, GRAINNUMBER, CURVENO, XCOORD, YCOORD temp <- readBin(con, what="int", 7, size=2, endian="little") temp.RUN <- temp[1] temp.SET <- temp[2] temp.POSITION <- temp[3] temp.GRAINNUMBER <- temp[4] temp.CURVENO <- temp[5] temp.XCOORD <- temp[6] temp.YCOORD <- temp[7] ##SAMPLE, COMMENT ##SAMPLE SAMPLE_SIZE<-readBin(con, what="int", 1, size=1, endian="little") temp.SAMPLE<-readChar(con, SAMPLE_SIZE, useBytes=TRUE) #however it should be set to 20 #step forward in con if(20-c(SAMPLE_SIZE)>0){ STEPPING<-readBin(con, what="raw", (20-c(SAMPLE_SIZE)), size=1, endian="little") } ##COMMENT COMMENT_SIZE<-readBin(con, what="int", 1, size=1, endian="little") temp.COMMENT<-readChar(con, COMMENT_SIZE, useBytes=TRUE) #set to 80 (manual) #step forward in con if(80-c(COMMENT_SIZE)>0){ STEPPING<-readBin(con, what="raw", (80-c(COMMENT_SIZE)), size=1, endian="little") } ##(3) Instrument and sequence characteristic ##SYSTEMID temp.SYSTEMID <- readBin(con, what="int", 1, size=2, endian="little") ##FNAME FNAME_SIZE<-readBin(con, what="int", 1, size=1, endian="little") ##correct for 0 file name length if(length(FNAME_SIZE)>0){ temp.FNAME<-readChar(con, FNAME_SIZE, useBytes=TRUE) #set to 100 (manual) }else{ FNAME_SIZE <- 0 } #step forward in con if(100-c(FNAME_SIZE)>0){ STEPPING<-readBin(con, what="raw", (100-c(FNAME_SIZE)), size=1, endian="little") } ##USER USER_SIZE<-readBin(con, what="int", 1, size=1, endian="little") ##correct for 0 user size length if (length(USER_SIZE) > 0) { temp.USER <- readChar(con, USER_SIZE, useBytes = TRUE) #set to 30 (manual) }else{ USER_SIZE <- 0 } #step forward in con if(30-c(USER_SIZE)>0){ STEPPING<-readBin(con, what="raw", (30-c(USER_SIZE)), size=1, endian="little") } ##TIME TIME_SIZE <- readBin(con, what="int", 1, size=1, endian="little") ##time size corrections for wrong time formats; set n to 6 for all values ##accoording the handbook by Geoff Duller, 2007 if(length(TIME_SIZE)>0){ temp.TIME<-readChar(con, TIME_SIZE, useBytes=TRUE) ##correct the mess by others if(nchar(temp.TIME) == 5) temp.TIME <- paste(c("0", temp.TIME), collapse = "") }else{ TIME_SIZE <- 0 } if(6-TIME_SIZE>0){ STEPPING<-readBin(con, what="raw", (6-TIME_SIZE), size=1, endian="little") } ##DATE DATE_SIZE<-readBin(con, what="int", 1, size=1, endian="little") ##date size corrections for wrong date formats; set n to 6 for all values ##accoording the handbook of Geoff Duller, 2007 DATE_SIZE<-6 temp.DATE<-readChar(con, DATE_SIZE, useBytes=TRUE) ##(4) Analysis ##DTYPE temp.DTYPE<-readBin(con, what="int", 1, size=1, endian="little") ##BL_TIME temp.BL_TIME<-readBin(con, what="double", 1, size=4, endian="little") ##BL_UNIT temp.BL_UNIT<-readBin(con, what="int", 1, size=1, endian="little") ##NORM1, NORM2, NORM3, BG temp <- readBin(con, what="double", 4, size=4, endian="little") temp.NORM1 <- temp[1] temp.NORM2 <- temp[2] temp.NORM3 <- temp[3] temp.BG <- temp[4] ##SHIFT temp.SHIFT<- readBin(con, what="integer", 1, size=2, endian="little") ##TAG temp.TAG <- readBin(con, what="int", 1, size=1, endian="little") ##RESERVED temp.RESERVED1 <-readBin(con, what="raw", 20, size=1, endian="little") ##(5) Measurement characteristics ##LTYPE temp.LTYPE <- readBin(con, what="int", 1, size=1, endian="little") ##LTYPESOURCE temp.LIGHTSOURCE <- readBin(con, what="int", 1, size=1, endian="little") ##LIGHTPOWER, LOW, HIGH, RATE temp <- readBin(con, what="double", 4, size=4, endian="little") temp.LIGHTPOWER <- temp[1] temp.LOW <- temp[2] temp.HIGH <- temp[3] temp.RATE <- temp[4] ##TEMPERATURE temp.TEMPERATURE <- readBin(con, what="int", 1, size=2, endian="little") ##MEASTEMP temp.MEASTEMP <- readBin(con, what="integer", 1, size=2, endian="little") ##AN_TEMP temp.AN_TEMP <- readBin(con, what="double", 1, size=4, endian="little") ##AN_TIME temp.AN_TIME <- readBin(con, what="double", 1, size=4, endian="little") ##DELAY, ON, OFF temp <- readBin(con, what="int", 3, size=2, endian="little") temp.TOLDELAY <- temp[1] temp.TOLON <- temp[2] temp.TOLOFF <- temp[3] ##IRR_TIME temp.IRR_TIME <- readBin(con, what="double", 1, size=4, endian="little") ##IRR_TYPE temp.IRR_TYPE <- readBin(con, what="int", 1, size=1, endian="little") ##IRR_DOSERATE temp.IRR_DOSERATE <- readBin(con, what="double", 1, size=4, endian="little") ##IRR_DOSERATEERR if(temp.VERSION != 05) temp.IRR_DOSERATEERR <- readBin(con, what="double", 1, size=4, endian="little") ##TIMESINCEIRR temp.TIMESINCEIRR <- readBin(con, what="integer", 1, size=4, endian="little") ##TIMETICK temp.TIMETICK <- readBin(con, what="double", 1, size=4, endian="little") ##ONTIME temp.ONTIME <- readBin(con, what="integer", 1, size=4, endian="little") ##STIMPERIOD temp.STIMPERIOD <- readBin(con, what="integer", 1, size=4, endian="little") ##GATE_ENABLED temp.GATE_ENABLED <- readBin(con, what="raw", 1, size=1, endian="little") ##GATE_START temp.GATE_START <- readBin(con, what="integer", 1, size=4, endian="little") ##GATE_STOP temp.GATE_STOP <- readBin(con, what="integer", 1, size=4, endian="little") ##PTENABLED temp.PTENABLED <- readBin(con, what="raw", 1, size=1, endian="little") ##DTENABLED temp.DTENABLED <- readBin(con, what="raw", 1, size=1, endian="little") ##DEADTIME, MAXLPOWER, XRF_ACQTIME, XRF_HV temp <- readBin(con, what="double", 4, size=4, endian="little") temp.DEADTIME <- temp[1] temp.MAXLPOWER <- temp[2] temp.XRF_ACQTIME <- temp[3] temp.XRF_HV <- temp[4] ##XRF_CURR temp.XRF_CURR <- readBin(con, what="integer", 1, size=4, endian="little") ##XRF_DEADTIMEF temp.XRF_DEADTIMEF <- readBin(con, what="double", 1, size=4, endian="little") ###Account for differences between V5, V6 and V7 if(temp.VERSION == 06){ ##RESERVED temp.RESERVED2<-readBin(con, what="raw", 24, size=1, endian="little") }else if(temp.VERSION == 05){ ##RESERVED temp.RESERVED2<-readBin(con, what="raw", 4, size=1, endian="little") }else{ ##DETECTOR_ID temp.DETECTOR_ID <- readBin(con, what="int", 1, size=1, endian="little") ##LOWERFILTER_ID, UPPERFILTER_ID temp <- readBin(con, what="int", 2, size=2, endian="little") temp.LOWERFILTER_ID <- temp[1] temp.UPPERFILTER_ID <- temp[2] ##ENOISEFACTOR temp.ENOISEFACTOR <- readBin(con, what="double", 1, size=4, endian="little") ##CHECK FOR VERSION 08 if(temp.VERSION == 07){ ##RESERVED for version 07 temp.RESERVED2<-readBin(con, what="raw", 15, size=1, endian="little") }else{ ##MARKER_POSITION temp <- readBin(con, what="double", 6, size=4, endian="little") temp.MARPOS_X1 <- temp[1] temp.MARPOS_Y1 <- temp[2] temp.MARPOS_X2 <- temp[3] temp.MARPOS_Y2 <- temp[4] temp.MARPOS_X3 <- temp[5] temp.MARPOS_Y3 <- temp[6] ###EXTR_START, EXTR_END temp <- readBin(con, what="double", 2, size=4, endian="little") temp.EXTR_START <- temp[1] temp.EXTR_END <- temp[2] temp.RESERVED2<-readBin(con, what="raw", 42, size=1, endian="little") } } #DPOINTS temp.DPOINTS<-readBin(con, what="integer", temp.NPOINTS, size=4, endian="little") }else if(temp.VERSION == 04 | temp.VERSION == 03){ ## ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ##START BIN FILE FORMAT SUPPORT (vers. 03 and 04) ##LENGTH, PREVIOUS, NPOINTS, LTYPE temp <- readBin(con, what="int", 3, size=2, endian="little") temp.LENGTH <- temp[1] temp.PREVIOUS <- temp[2] temp.NPOINTS <- temp[3] ##LTYPE temp.LTYPE<-readBin(con, what="int", 1, size=1, endian="little") ##LOW, HIGH, RATE temp <- readBin(con, what="double", 3, size=4, endian="little") temp.LOW <- temp[1] temp.HIGH <- temp[2] temp.RATE <- temp[3] temp.TEMPERATURE<-readBin(con, what="integer", 1, size=2, endian="little") ##XCOORD, YCOORD, TOLDELAY, TOLON, TOLOFF temp <- readBin(con, what="integer", 5, size=2, endian="little") temp.XCOORD <- temp[1] temp.YCOORD <- temp[2] temp.TOLDELAY <- temp[3] temp.TOLON <- temp[4] temp.TOLOFF <- temp[5] ##POSITION temp.POSITION<-readBin(con, what="int", 1, size=1, endian="little") ##RUN temp.RUN<-readBin(con, what="int", 1, size=1, endian="little") ##TIME TIME_SIZE<-readBin(con, what="int", 1, size=1, endian="little") ##time size corrections for wrong time formats; set n to 6 for all values ##accoording the handbook of Geoff Duller, 2007 TIME_SIZE<-6 temp.TIME<-readChar(con, TIME_SIZE, useBytes=TRUE) ##DATE DATE_SIZE<-readBin(con, what="int", 1, size=1, endian="little") ##date size corrections for wrong date formats; set n to 6 for all values ##accoording the handbook of Geoff Duller, 2007 DATE_SIZE<-6 temp.DATE<-readChar(con, DATE_SIZE, useBytes=TRUE) ##SEQUENCE SEQUENCE_SIZE<-readBin(con, what="int", 1, size=1, endian="little") temp.SEQUENCE<-readChar(con, SEQUENCE_SIZE, useBytes=TRUE) #step forward in con if(8-SEQUENCE_SIZE>0){ STEPPING<-readBin(con, what="raw", (8-c(SEQUENCE_SIZE)),size=1, endian="little") } ##USER USER_SIZE<-readBin(con, what="int", 1, size=1, endian="little") temp.USER<-readChar(con, USER_SIZE, useBytes=FALSE) #step forward in con if(8-c(USER_SIZE)>0){ STEPPING<-readBin(con, what="raw", (8-c(USER_SIZE)), size=1, endian="little") } ##DTYPE temp.DTYPE<-readBin(con, what="int", 1, size=1, endian="little") ##IRR_TIME temp.IRR_TIME<-readBin(con, what="double", 1, size=4, endian="little") ##IRR_TYPE temp.IRR_TYPE<-readBin(con, what="int", 1, size=1, endian="little") ##IRR_UNIT temp.IRR_UNIT<-readBin(con, what="int", 1, size=1, endian="little") ##BL_TIME temp.BL_TIME<-readBin(con, what="double", 1, size=4, endian="little") ##BL_UNIT temp.BL_UNIT<-readBin(con, what="int", 1, size=1, endian="little") ##AN_TEMP, AN_TIME, NORM1, NORM2, NORM3, BG temp <- readBin(con, what="double", 6, size=4, endian="little") temp.AN_TEMP <- temp[1] temp.AN_TIME <- temp[2] temp.NORM1 <- temp[3] temp.NORM2 <- temp[4] temp.NORM3 <- temp[5] temp.BG <- temp[6] ##SHIFT temp.SHIFT<-readBin(con, what="integer", 1, size=2, endian="little") ##SAMPLE SAMPLE_SIZE<-readBin(con, what="int", 1, size=1, endian="little") temp.SAMPLE<-readChar(con, SAMPLE_SIZE, useBytes=TRUE) #however it should be set to 20 #step forward in con if(20-c(SAMPLE_SIZE)>0){ STEPPING<-readBin(con, what="raw", (20-c(SAMPLE_SIZE)), size=1, endian="little") } ##COMMENT COMMENT_SIZE<-readBin(con, what="int", 1, size=1, endian="little") temp.COMMENT<-readChar(con, COMMENT_SIZE, useBytes=TRUE) #set to 80 (manual) #step forward in con if(80-c(COMMENT_SIZE)>0){ STEPPING<-readBin(con, what="raw", (80-c(COMMENT_SIZE)), size=1, endian="little") } ##LIGHTSOURCE, SET, TAG temp <- readBin(con, what="int", 3, size=1, endian="little") temp.LIGHTSOURCE <- temp[1] temp.SET <- temp[2] temp.TAG <- temp[3] ##GRAIN temp.GRAIN<-readBin(con, what="integer", 1, size=2, endian="little") ##LPOWER temp.LPOWER<-readBin(con, what="double", 1, size=4, endian="little") ##SYSTEMID temp.SYSTEMID<-readBin(con, what="integer", 1, size=2, endian="little") ##Unfortunately an inconsitent BIN-file structure forces a differenciation ... if(temp.VERSION == 03){ ##RESERVED temp.RESERVED1<-readBin(con, what="raw", 36, size=1, endian="little") ##ONTIME, OFFTIME temp <- readBin(con, what="double", 2, size=4, endian="little") temp.ONTIME <- temp[1] temp.OFFTIME <- temp[2] ##Enable flags #GateEnabled for v 06 temp.ENABLE_FLAGS <- readBin(con, what="raw", 1, size=1, endian="little") temp.GATE_ENABLED <- temp.ENABLE_FLAGS ##ONGATEDELAY, OFFGATEDELAY temp <- readBin(con, what="double", 2, size=4, endian="little") temp.GATE_START <- temp[1] temp.GATE_STOP <- temp[2] ##RESERVED temp.RESERVED2<-readBin(con, what="raw", 1, size=1, endian="little") }else{ ##RESERVED temp.RESERVED1<-readBin(con, what="raw", 20, size=1, endian="little") ##CURVENO temp.CURVENO <- readBin(con, what="integer", 1, size=2, endian="little") ##TIMETICK temp.TIMETICK <- readBin(con, what="double", 1, size=4, endian="little") ##ONTIME, STIMPERIOD temp <- readBin(con, what="integer", 2, size=4, endian="little") temp.ONTIME <- temp[1] temp.STIMPERIOD <- temp[2] ##GATE_ENABLED temp.GATE_ENABLED <- readBin(con, what="raw", 1, size=1, endian="little") ##ONGATEDELAY, OFFGATEDELAY temp <- readBin(con, what="double", 2, size=4, endian="little") temp.GATE_START <- temp[1] temp.GATE_END <- temp[2] temp.GATE_STOP <- temp.GATE_END ##PTENABLED temp.PTENABLED <- readBin(con, what="raw", 1, size=1, endian="little") ##RESERVED temp.RESERVED2<-readBin(con, what="raw", 10, size=1, endian="little") } #DPOINTS temp.DPOINTS<-readBin(con, what="integer", temp.NPOINTS, size=4, endian="little") }else{ stop("[read_BIN2R()] Unsupported BIN/BINX-file version.") } #endif:format support ##END BIN FILE FORMAT SUPPORT ## ==========================================================================# #SET UNIQUE ID temp.ID <- temp.ID+1 ##update progress bar if(txtProgressBar & verbose){ setTxtProgressBar(pb, seek(con,origin="current")) } ##set for equal values with different names if(!is.na(temp.GRAINNUMBER)){temp.GRAIN <- temp.GRAINNUMBER} if(!is.na(temp.GRAIN)){temp.GRAINNUMBER <- temp.GRAIN} if(!is.na(temp.LIGHTPOWER)){temp.LPOWER <- temp.LIGHTPOWER} if(!is.na(temp.LPOWER)){temp.LIGHTPOWER <- temp.LPOWER} temp.SEL <- if(temp.TAG == 1){TRUE}else{FALSE} ##replace values in the data.table with values results.METADATA[temp.ID, `:=` ( ID = temp.ID, SEL = temp.SEL, VERSION = as.numeric(temp.VERSION), LENGTH = temp.LENGTH, PREVIOUS = temp.PREVIOUS, NPOINTS = temp.NPOINTS, RECTYPE = temp.RECTYPE, RUN = temp.RUN, SET = temp.SET, POSITION = temp.POSITION, GRAIN = temp.GRAIN, GRAINNUMBER = temp.GRAINNUMBER, CURVENO = temp.CURVENO, XCOORD = temp.XCOORD, YCOORD = temp.YCOORD, SAMPLE = temp.SAMPLE, COMMENT = temp.COMMENT, SYSTEMID = temp.SYSTEMID, FNAME = temp.FNAME, USER = temp.USER, TIME = temp.TIME, DATE = temp.DATE, DTYPE = as.character(temp.DTYPE), BL_TIME = temp.BL_TIME, BL_UNIT = temp.BL_UNIT, NORM1 = temp.NORM1, NORM2 = temp.NORM2, NORM3 = temp.NORM3, BG = temp.BG, SHIFT = temp.SHIFT, TAG = temp.TAG, LTYPE = as.character(temp.LTYPE), LIGHTSOURCE = as.character(temp.LIGHTSOURCE), LPOWER = temp.LPOWER, LIGHTPOWER = temp.LIGHTPOWER, LOW = temp.LOW, HIGH = temp.HIGH, RATE = temp.RATE, TEMPERATURE = temp.TEMPERATURE, MEASTEMP = temp.MEASTEMP, AN_TEMP = temp.AN_TEMP, AN_TIME = temp.AN_TIME, TOLDELAY = temp.TOLDELAY, TOLON = temp.TOLON, TOLOFF = temp.TOLOFF, IRR_TIME = temp.IRR_TIME, IRR_TYPE = temp.IRR_TYPE, IRR_UNIT = temp.IRR_UNIT, IRR_DOSERATE = temp.IRR_DOSERATE, IRR_DOSERATEERR = temp.IRR_DOSERATEERR, TIMESINCEIRR = temp.TIMESINCEIRR, TIMETICK = temp.TIMETICK, ONTIME = temp.ONTIME, OFFTIME = temp.OFFTIME, STIMPERIOD = temp.STIMPERIOD, GATE_ENABLED = as.numeric(temp.GATE_ENABLED), ENABLE_FLAGS = as.numeric(temp.ENABLE_FLAGS), GATE_START = temp.GATE_START, GATE_STOP = temp.GATE_STOP, PTENABLED = as.numeric(temp.PTENABLED), DTENABLED = as.numeric(temp.DTENABLED), DEADTIME = temp.DEADTIME, MAXLPOWER = temp.MAXLPOWER, XRF_ACQTIME = temp.XRF_ACQTIME, XRF_HV = temp.XRF_HV, XRF_CURR = temp.XRF_CURR, XRF_DEADTIMEF = temp.XRF_DEADTIMEF, DETECTOR_ID = temp.DETECTOR_ID, LOWERFILTER_ID = temp.LOWERFILTER_ID, UPPERFILTER_ID = temp.UPPERFILTER_ID, ENOISEFACTOR = temp.ENOISEFACTOR, MARKPOS_X1 = temp.MARKPOS_X1, MARKPOS_Y1 = temp.MARKPOS_Y1, MARKPOS_X2 = temp.MARKPOS_X2, MARKPOS_Y2 = temp.MARKPOS_Y2, MARKPOS_X3 = temp.MARKPOS_X3, MARKPOS_Y3 = temp.MARKPOS_Y3, SEQUENCE = temp.SEQUENCE )] results.DATA[[temp.ID]] <- temp.DPOINTS results.RESERVED[[temp.ID]][[1]] <- temp.RESERVED1 results.RESERVED[[temp.ID]][[2]] <- temp.RESERVED2 ##BREAK ##stop loop if record limit is reached if (!is.null(n.records)) { if (n.records == temp.ID) { break() } } ##reset values temp.GRAINNUMBER <- NA temp.GRAIN <- NA }#endwhile::end lopp ##close if(txtProgressBar & verbose){close(pb)} ##output if(verbose){cat(paste("\t >> ",temp.ID," records have been read successfully!\n\n", sep=""))} # Further limitation -------------------------------------------------------------------------- if(!is.null(position)){ ##check whether the position is valid at all if (all(position %in% results.METADATA[["POSITION"]])) { results.METADATA <- results.METADATA[which(results.METADATA[["POSITION"]] %in% position),] results.DATA <- results.DATA[results.METADATA[["ID"]]] ##re-calculate ID ... otherwise it will not match results.METADATA[["ID"]] <- 1:length(results.DATA ) ##show a message message("[read_BIN2R()] The record index has been recalculated!") }else{ valid.position <- paste(unique(results.METADATA[["POSITION"]]), collapse = ", ") warning( paste0( "Position limitation omitted. At least one position number is not valid, valid position numbers are: ", valid.position ) ) } } ##check for position that have no data at all (error during the measurement) if(zero_data.rm){ zero_data.check <- which(sapply(results.DATA, length) == 0) ##remove records if there is something to remove if(length(zero_data.check) != 0){ results.METADATA <- results.METADATA[-zero_data.check, ] results.DATA[zero_data.check] <- NULL ##recalculate record index results.METADATA[["ID"]] <- 1:nrow(results.METADATA) warning( paste0( "\n[read_BIN2R()] ", length(zero_data.check), " zero data records detected and removed: ", paste(zero_data.check, collapse = ", "), ". \n\n >> Record index re-calculated." ) ) } } ##check for duplicated entries and remove them if wanted, but only if we have more than 2 records if (n.records > 1) { duplication.check <- suppressWarnings(which(c( 0, vapply( 2:length(results.DATA), FUN = function(x) { all(results.DATA[[x - 1]] == results.DATA[[x]]) }, FUN.VALUE = 1 ) ) == 1)) if (length(duplication.check) != 0) { if (duplicated.rm) { ##remove records results.METADATA <- results.METADATA[-duplication.check, ] results.DATA[duplication.check] <- NULL ##recalculate record index results.METADATA[["ID"]] <- 1:nrow(results.METADATA) ##message if(verbose) { message( paste0( "[read_BIN2R()] duplicated record(s) detected and removed: ", paste(duplication.check, collapse = ", "), ". Record index re-calculated." ) ) } } else{ warning( paste0( "[read_BIN2R()] duplicated record(s) detected: ", paste(duplication.check, collapse = ", "), ". \n\n >> You should consider 'duplicated.rm = TRUE'." ) ) } } } ##produce S4 object for output object <- set_Risoe.BINfileData( METADATA = results.METADATA, DATA = results.DATA, .RESERVED = results.RESERVED) # Convert Translation Matrix Values --------------------------------------- if (!show.raw.values) { ##LIGHTSOURCE CONVERSION object@METADATA[["LIGHTSOURCE"]] <- unname(LIGHTSOURCE.lookup[object@METADATA[["LIGHTSOURCE"]]]) ##LTYPE CONVERSION object@METADATA[["LTYPE"]] <- unname(LTYPE.lookup[object@METADATA[["LTYPE"]]]) ##DTYPE CONVERSION object@METADATA[["DTYPE"]] <- unname(DTYPE.lookup[object@METADATA[["DTYPE"]]]) ##CHECK for oddly set LTYPES, this may happen in old BIN-file versions if (object@METADATA[["VERSION"]][1] == 3) { object@METADATA[["LTYPE"]] <- sapply(1:length(object@METADATA[["LTYPE"]]), function(x) { if (object@METADATA[["LTYPE"]][x] == "OSL" & object@METADATA[["LIGHTSOURCE"]][x] == "IR diodes/IR Laser") { return("IRSL") } else{ return(object@METADATA[["LTYPE"]][x]) } }) } ##TIME CONVERSION, do not do for odd time formats as this could cause problems during export if (TIME_SIZE == 6) { object@METADATA[["TIME"]] <- format(strptime(as.character(object@METADATA[["TIME"]]), "%H%M%S"), "%H:%M:%S") } } ## check for empty BIN-files names ... if so, set the name of the file as BIN-file name ## This can happen if the user uses different equipment if(all(is.na(object@METADATA[["FNAME"]]))){ object@METADATA[["FNAME"]] <- strsplit(x = basename(file), split = ".", fixed = TRUE)[[1]][1] } # Fast Forward -------------------------------------------------------------------------------- ## set fastForward to TRUE if one of this arguments is used if(any(names(list(...)) %in% names(formals(Risoe.BINfileData2RLum.Analysis))[-1]) & fastForward == FALSE) { fastForward <- TRUE warning("[read_BIN2R()] automatically reset 'fastForward = TRUE'", call. = FALSE) } ##return values ##with fast fastForward they will be converted directly to a list of RLum.Analysis objects if(fastForward){ object <- Risoe.BINfileData2RLum.Analysis(object, ...) ##because we expect a list if(!is.list(object)){ object <- list(object) } } return(object) } Luminescence/R/calc_IEU.R0000644000176200001440000003777613232373766014654 0ustar liggesusers#' Apply the internal-external-uncertainty (IEU) model after Thomsen et al. #' (2007) to a given De distribution #' #' Function to calculate the IEU De for a De data set. #' #' This function uses the equations of Thomsen et al. (2007). The parameters a #' and b are estimated from dose-recovery experiments. #' #' @param data [RLum.Results-class] or [data.frame] (**required**): #' for [data.frame]: two columns with De `(data[,1])` and #' De error `(values[,2])` #' #' @param a [numeric] (**required**): #' slope #' #' @param b [numeric] (**required**): #' intercept #' #' @param interval [numeric] (**required**): #' fixed interval (e.g. 5 Gy) used for iteration of Dbar, from the mean to #' Lowest.De used to create Graph.IEU `[Dbar.Fixed vs Z]` #' #' @param decimal.point [numeric] (*with default*): #' number of decimal points for rounding calculations (e.g. 2) #' #' @param plot [logical] (*with default*): #' plot output #' #' @param ... further arguments (`trace, verbose`). #' #' @return #' Returns a plot (*optional*) and terminal output. In addition an #' [RLum.Results-class] object is returned containing the #' following elements: #' #' \item{.$summary}{[data.frame] summary of all relevant model results.} #' \item{.$data}{[data.frame] original input data} #' \item{.$args}{[list] used arguments} #' \item{.$call}{[call] the function call} #' \item{.$tables}{[list] a list of data frames containing all calculation tables} #' #' The output should be accessed using the function [get_RLum]. #' #' @section Function version: 0.1.1 #' #' @author #' Rachel Smedley, Geography & Earth Sciences, Aberystwyth University (United Kingdom) \cr #' Based on an excel spreadsheet and accompanying macro written by Kristina Thomsen. #' #' @seealso [plot], [calc_CommonDose], [calc_CentralDose], [calc_FiniteMixture], #' [calc_FuchsLang2001], [calc_MinDose] #' #' @references #' Smedley, R.K., 2015. A new R function for the Internal External Uncertainty (IEU) model. #' Ancient TL 33, 16-21. #' #' Thomsen, K.J., Murray, A.S., Boetter-Jensen, L. & Kinahan, J., #' 2007. Determination of burial dose in incompletely bleached fluvial samples #' using single grains of quartz. Radiation Measurements 42, 370-379. #' #' @examples #' #' ## load data #' data(ExampleData.DeValues, envir = environment()) #' #' ## apply the IEU model #' ieu <- calc_IEU(ExampleData.DeValues$CA1, a = 0.2, b = 1.9, interval = 1) #' #' @md #' @export calc_IEU <- function( data, a, b, interval, decimal.point = 2, plot = TRUE, ... ) { ##==========================================================================## ## CONSISTENCY CHECK OF INPUT DATA ##==========================================================================## if(missing(data)==FALSE){ if(is(data, "data.frame") == FALSE & is(data,"RLum.Results") == FALSE){ stop("[calc_IEU()] 'data' object has to be of type 'data.frame' or 'RLum.Results'!", call = FALSE) }else{ if(is(data, "RLum.Results") == TRUE){ data <- get_RLum(data) } } } ##==========================================================================## ## ... ARGUMENTS ##==========================================================================## extraArgs <- list(...) ## console output if ("verbose" %in% names(extraArgs)) { verbose <- extraArgs$verbose } else { verbose <- TRUE } # trace calculations if ("trace" %in% names(extraArgs)) { trace <- extraArgs$trace } else { trace <- FALSE } # TODO: main, xlab, ylab, xlim, ylim, pch, col ##============================================================================## ## CALCULATIONS ##============================================================================## empty <- NULL Table.Fixed.Iteration <- data.frame(matrix(nrow = 0, ncol = 9)) colnames(data) <- c("De", "De.Error") data <- data[order(data$De), ] Mean <- mean(data$De) Dbar <- round(Mean, decimal.point) Lowest.De <- round(data$De[1], decimal.point) # (a) Calculate IEU at fixed intervals of Dbar starting from the Mean and # subtracting the interval until Dbar is < Lowest.De; this creates a plot N <- nrow(data) Rank.number <- t(c(1:N)) De.Total.Error <- sqrt((data$De.Error^2) + (((a * Dbar) + b)^2)) Table.Calculations <- data.frame(Rank.number = c(Rank.number), De = c(data$De), De.Total.Error = c(De.Total.Error)) Z.top <- cumsum(Table.Calculations$De/(Table.Calculations$De.Total.Error^2)) Z.bottom <- cumsum(1/(Table.Calculations$De.Total.Error^2)) Z <- Z.top/Z.bottom Table.Calculations["Z"] <- Z temp <- NULL for (j in 1:N) { for (i in j) { Z <- Table.Calculations$Z[j] x <- ((Table.Calculations$De[1:i] - Z)^2)/((Table.Calculations$De.Total.Error[1:i])^2) y <- (sum(x)) temp <- rbind(temp, data.frame(y)) } } EXT.top <- temp EXT.bottom <- (Table.Calculations$Rank.number - 1) * Z.bottom EXT <- EXT.top/EXT.bottom INT <- 1/Z.bottom R <- sqrt(INT/EXT) R.Error <- (2 * (Table.Calculations$Rank.number - 1))^(-0.5) Table.IEU <- data.frame(Table.Calculations$Rank.number, Table.Calculations$De, Table.Calculations$De.Total.Error, Table.Calculations$Z, EXT.top, EXT, INT, R, R.Error) colnames(Table.IEU) <- c("Rank.number", "De", "De.Error", "Z", "EXT.top", "EXT", "INT", "R", "R.Uncertainty") Unity <- Table.IEU[R >= 1, ] Max <- max(Unity$Rank.number, na.rm = TRUE) Above.Z <- Table.IEU[Max, 4] Above.Error <- Table.IEU[Max, 6] Below.Z <- Table.IEU[Max + 1, 4] Below.Error <- Table.IEU[Max + 1, 6] Above.R <- Table.IEU[Max, 8] Below.R <- Table.IEU[Max + 1, 8] Slope <- (Above.R - Below.R)/(Above.Z - Below.Z) Intercept <- Above.R - (Slope * Above.Z) IEU.De <- round(((1 - Intercept)/Slope), decimal.point) IEU.Error <- max(sqrt(Above.Error), sqrt(Below.Error)) IEU.Error <- round(IEU.Error, decimal.point) n <- Max + 1 Dbar.Fixed <- Dbar - interval Dbar.Mean <- c(1, Dbar, Dbar.Fixed, IEU.De, IEU.Error, n, Below.R, a, b) repeat { if (Dbar.Fixed < Lowest.De) { break } else { Dbar <- Dbar.Fixed } De.Total.Error <- sqrt((data$De.Error^2) + (((a * Dbar) + b)^2)) Table.Calculations <- data.frame(Rank.number = c(Rank.number), De = c(data$De), De.Total.Error = c(De.Total.Error)) Z.top <- cumsum(Table.Calculations$De/(Table.Calculations$De.Total.Error^2)) Z.bottom <- cumsum(1/(Table.Calculations$De.Total.Error^2)) Z <- Z.top/Z.bottom Table.Calculations["Z"] <- Z temp <- NULL for (j in 1:N) { for (i in j) { Z <- Table.Calculations$Z[j] x <- ((Table.Calculations$De[1:i] - Z)^2)/((Table.Calculations$De.Total.Error[1:i])^2) y <- (sum(x)) temp <- rbind(temp, data.frame(y)) } } EXT.top <- temp EXT.bottom <- (Table.Calculations$Rank.number - 1) * Z.bottom EXT <- EXT.top/EXT.bottom INT <- 1/Z.bottom R <- sqrt(INT/EXT) R.Error <- (2 * (Table.Calculations$Rank.number - 1))^(-0.5) Table.IEU <- data.frame(Table.Calculations$Rank.number, Table.Calculations$De, Table.Calculations$De.Total.Error, Table.Calculations$Z, EXT.top, EXT, INT, R, R.Error) colnames(Table.IEU) <- c("Rank.number", "De", "De.Error", "Z", "EXT.top", "EXT", "INT", "R", "R.Uncertainty") Unity <- Table.IEU[R >= 1, ] Max <- max(Unity$Rank.number, na.rm = TRUE) Above.Z <- Table.IEU[Max, 4] Above.Error <- Table.IEU[Max, 6] Below.Z <- Table.IEU[Max + 1, 4] Below.Error <- Table.IEU[Max + 1, 6] Above.R <- Table.IEU[Max, 8] Below.R <- Table.IEU[Max + 1, 8] Slope <- (Above.R - Below.R)/(Above.Z - Below.Z) Intercept <- Above.R - (Slope * Above.Z) Zbar <- round(((1 - Intercept)/Slope), decimal.point) Zbar.Error <- max(sqrt(Above.Error), sqrt(Below.Error)) Zbar.Error <- round(IEU.Error, decimal.point) n <- Max + 1 Dbar.Fixed <- Dbar - interval Table.Fixed.Iteration <- rbind(Table.Fixed.Iteration, cbind(1, Dbar, Dbar.Fixed, Zbar, Zbar.Error, n, Below.R, a, b)) } Table.Fixed.Iteration <- rbind(Dbar.Mean, Table.Fixed.Iteration) colnames(Table.Fixed.Iteration) <- c(FALSE, "Dbar", "Dbar.Fixed", "Zbar", "Zbar.Error", "n", "Below.R", "a", "b") if (plot) { plot(Table.Fixed.Iteration$Dbar, Table.Fixed.Iteration$Zbar, type = "b", ylab = "Zbar, weighted mean (Gy)", xlab = "Dbar (Gy)", asp = 1/1) arrows(Table.Fixed.Iteration$Dbar, Table.Fixed.Iteration$Zbar + Table.Fixed.Iteration$Zbar.Error, Table.Fixed.Iteration$Dbar, Table.Fixed.Iteration$Zbar - Table.Fixed.Iteration$Zbar.Error, col = 1, angle = 90, length = 0.05, code = 3) abline(0, 1, untf = FALSE, lty = 3) } # (b) Calculate Dbar by iteration from [Dbar = Lowest.De] until [IEU.De = Dbar]; # this calculates the IEU De Dbar <- Lowest.De N <- nrow(data) Rank.number <- t(c(1:N)) De.Total.Error <- sqrt((data$De.Error^2) + (((a * Dbar) + b)^2)) Table.Calculations <- data.frame(Rank.number = c(Rank.number), De = c(data$De), De.Total.Error = c(De.Total.Error)) Z.top <- cumsum(Table.Calculations$De/(Table.Calculations$De.Total.Error^2)) Z.bottom <- cumsum(1/(Table.Calculations$De.Total.Error^2)) Z <- Z.top/Z.bottom Table.Calculations["Z"] <- Z temp <- NULL for (j in 1:N) { for (i in j) { Z <- Table.Calculations$Z[j] x <- ((Table.Calculations$De[1:i] - Z)^2)/((Table.Calculations$De.Total.Error[1:i])^2) y <- (sum(x)) temp <- rbind(temp, data.frame(y)) } } EXT.top <- temp EXT.bottom <- (Table.Calculations$Rank.number - 1) * Z.bottom EXT <- EXT.top/EXT.bottom INT <- 1/Z.bottom R <- sqrt(INT/EXT) R.Error <- (2 * (Table.Calculations$Rank.number - 1))^(-0.5) Table.IEU <- data.frame(Table.Calculations$Rank.number, Table.Calculations$De, Table.Calculations$De.Total.Error, Table.Calculations$Z, EXT.top, EXT, INT, R, R.Error) colnames(Table.IEU) <- c("Rank.number", "De", "De.Error", "Z", "EXT.top", "EXT", "INT", "R", "R.Uncertainty") Unity <- Table.IEU[R >= 1, ] Max <- max(Unity$Rank.number, na.rm = TRUE) Above.Z <- Table.IEU[Max, 4] Above.Error <- Table.IEU[Max, 6] Below.Z <- Table.IEU[Max + 1, 4] Below.Error <- Table.IEU[Max + 1, 6] Above.R <- Table.IEU[Max, 8] Below.R <- Table.IEU[Max + 1, 8] Slope <- (Above.R - Below.R)/(Above.Z - Below.Z) Intercept <- Above.R - (Slope * Above.Z) IEU.De <- round(((1 - Intercept)/Slope), decimal.point) IEU.Error <- max(sqrt(Above.Error), sqrt(Below.Error)) IEU.Error <- round(IEU.Error, decimal.point) n <- Max + 1 repeat { if (IEU.De <= Dbar) { break } else { Dbar <- IEU.De } De.Total.Error <- sqrt((data$De.Error^2) + (((a * Dbar) + b)^2)) Table.Calculations <- data.frame(Rank.number = c(Rank.number), De = c(data$De), De.Total.Error = c(De.Total.Error)) Z.top <- cumsum(Table.Calculations$De/(Table.Calculations$De.Total.Error^2)) Z.bottom <- cumsum(1/(Table.Calculations$De.Total.Error^2)) Z <- round((Z.top/Z.bottom), decimal.point) Table.Calculations["Z"] <- Z temp <- NULL for (j in 1:N) { for (i in j) { Z <- Table.Calculations$Z[j] x <- ((Table.Calculations$De[1:i] - Z)^2)/((Table.Calculations$De.Total.Error[1:i])^2) y <- (sum(x)) temp <- rbind(temp, data.frame(y)) } } EXT.top <- temp EXT.bottom <- (Table.Calculations$Rank.number - 1) * Z.bottom EXT <- EXT.top/EXT.bottom INT <- 1/Z.bottom R <- sqrt(INT/EXT) R.Error <- (2 * (Table.Calculations$Rank.number - 1))^(-0.5) Table.IEU <- data.frame(Table.Calculations$Rank.number, Table.Calculations$De, Table.Calculations$De.Total.Error, Table.Calculations$Z, EXT.top, EXT, INT, R, R.Error) colnames(Table.IEU) <- c("Rank.number", "De", "De.Error", "Z", "EXT.top", "EXT", "INT", "R", "R.Error") # to reduce the number of plots and increase perfomance # intermediate calculations are only plotted when trace = TRUE if (plot && trace) { ymin <- min(Table.IEU$R[2:nrow(Table.IEU)] - Table.IEU$R.Error[2:nrow(Table.IEU)]) ymax <- max(Table.IEU$R[2:nrow(Table.IEU)] + Table.IEU$R.Error[2:nrow(Table.IEU)]) ylim <- c(ifelse(ymin > 0, 0, ymin), ymax) plot(Table.IEU$Z, Table.IEU$R, type = "b", ylab = expression(paste("R = [", alpha["in"], "/", alpha["ex"],"]")), xlab = "Z [Gy]", ylim = ylim) arrows(Table.IEU$Z, Table.IEU$R + Table.IEU$R.Error, Table.IEU$Z, Table.IEU$R - Table.IEU$R.Error, col = 1, angle = 90, length = 0.05, code = 3) abline(1, 0, untf = FALSE, lty = 3) } Unity <- Table.IEU[R >= 1, ] Max <- max(Unity$Rank.number, na.rm = TRUE) Above.Z <- Table.IEU[Max, 4] Above.Error <- Table.IEU[Max, 6] Below.Z <- Table.IEU[Max + 1, 4] Below.Error <- Table.IEU[Max + 1, 6] Above.R <- Table.IEU[Max, 8] Below.R <- Table.IEU[Max + 1, 8] Slope <- (Above.R - Below.R)/(Above.Z - Below.Z) Intercept <- Above.R - (Slope * Above.Z) IEU.De <- round(((1 - Intercept)/Slope), decimal.point) IEU.Error <- max(sqrt(Above.Error), sqrt(Below.Error)) IEU.Error <- round(IEU.Error, decimal.point) n <- Max + 1 if (trace) { message(sprintf("[Iteration of Dbar] \n Dbar: %.4f \n IEU.De: %.4f \n IEU.Error: %.4f \n n: %i \n R: %.4f \n", Dbar, IEU.De, IEU.Error, n, Below.R)) } } # final plot if (plot) { ymin <- min(Table.IEU$R[2:nrow(Table.IEU)] - Table.IEU$R.Error[2:nrow(Table.IEU)]) ymax <- max(Table.IEU$R[2:nrow(Table.IEU)] + Table.IEU$R.Error[2:nrow(Table.IEU)]) ylim <- c(ifelse(ymin > 0, 0, ymin), ymax) plot(Table.IEU$Z, Table.IEU$R, type = "b", ylab = expression(paste("R = [", alpha["in"], "/", alpha["ex"],"]")), xlab = "Z [Gy]", ylim = ylim) arrows(Table.IEU$Z, Table.IEU$R + Table.IEU$R.Error, Table.IEU$Z, Table.IEU$R - Table.IEU$R.Error, col = 1, angle = 90, length = 0.05, code = 3) abline(1, 0, untf = FALSE, lty = 3) } Table.Results <- data.frame(Dbar, IEU.De, IEU.Error, n, a, b) colnames(Table.Results) <- c("Dbar", "IEU.De (Gy)", "IEU.Error (Gy)", "Number of De", "a", "b") ##==========================================================================## ## TERMINAL OUTPUT ##==========================================================================## if (verbose) { message(sprintf( "\n [calc_IEU] \n\n Dbar: %.2f \n IEU.De (Gy): %.2f \n IEU.Error (Gy): %.2f Number of De: %.0f \n a: %.4f \n b: %.4f", Table.Results[1], Table.Results[2], Table.Results[3], Table.Results[4], Table.Results[5], Table.Results[6])) } ##==========================================================================## ## RETURN VALUES ##==========================================================================## summary <- Table.Results[ ,c(-1, -5, -6)] colnames(summary) <- c("de", "de_err", "n") call <- sys.call() args <- list(a = a, b = b, interval = interval, decimal.point = decimal.point, plot = plot) newRLumResults.calc_IEU <- set_RLum( class = "RLum.Results", data = list(summary = summary, data = data, args = args, call = call, tables = list( Table.IEUCalculations = Table.IEU, Table.Fixed.Iteration = Table.Fixed.Iteration, Table.IEUResults = Table.Results ))) invisible(newRLumResults.calc_IEU) } Luminescence/R/convert_Wavelength2Energy.R0000644000176200001440000001644113571743147020332 0ustar liggesusers#'@title Emission Spectra Conversion from Wavelength to Energy Scales #' #'@description The function provides a convenient and fast way to convert emission spectra wavelength #'to energy scales. The function works on [RLum.Data.Spectrum-class], [data.frame] and [matrix] and #'a [list] of such objects. The function was written to smooth the workflow while analysing #'emission spectra data. This is in particular useful if you want to further treat your data #'and apply, e.g., a signal deconvolution. #' #'@details #' #' The intensity of the spectrum is re-calcualted using the following approach to recalulate #' wavelength and corresponding intensity values #' (e.g., Appendix 4 in Blasse and Grabmeier, 1994; Mooney and Kambhampati, 2013): #' #' \deqn{\phi_{E} = \phi_{\lambda} * \lambda^2 / (hc)} #' #' with #' \eqn{\phi_{E}} the intensity per interval of energy \eqn{E} (1/eV), #' \eqn{\phi_{\lambda}} the intensity per interval of wavelength \eqn{\lambda} #' (1/nm) and #' \eqn{h} (eV * s) the Planck constant and \eqn{c} (nm/s) the velocity of light. #' #' For transforming the wavelength axis (x-values) the equation as follow is used #' #' \deqn{E = hc/\lambda} #' #' @param object [RLum.Data.Spectrum-class], [data.frame], [matrix] (**required**): input object to be converted. #' If the input is not an [RLum.Data.Spectrum-class], the first column is always treated as the wavelength #' column. The function supports a list of allowed input objects. #' #' @param digits [integer] (*with default*): set the number of digits on the returned energy axis #' #' @param order [logical] (*with default*): enables/disables sorting of the values in ascending energy #' order. After the conversion the longest wavelength has the lowest energy value and the shortest #' wavelength the highest. While this is correct, some R functions expect increasing x-values. #' #' @return The same object class as provided as input is returned. #' #' @note This conversion works solely for emission spectra. In case of absorption spectra only #' the x-axsis has to be converted. #' #' @section Function version: 0.1.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @seealso [RLum.Data.Spectrum-class], [plot_RLum] #' #' @keywords IO #' #' @references #' #' Blasse, G., Grabmaier, B.C., 1994. Luminescent Materials. Springer. #' #' Mooney, J., Kambhampati, P., 2013. Get the Basics Right: Jacobian Conversion of Wavelength and #' Energy Scales for Quantitative Analysis of Emission Spectra. J. Phys. Chem. Lett. 4, 3316–3318. #' \doi{10.1021/jz401508t} #' #' Mooney, J., Kambhampati, P., 2013. Correction to “Get the Basics Right: Jacobian Conversion of #' Wavelength and Energy Scales for Quantitative Analysis of Emission Spectra.” J. Phys. Chem. Lett. 4, #' 3316–3318. \doi{10.1021/jz401508t} #' #' **Further reading** #' #' Angulo, G., Grampp, G., Rosspeintner, A., 2006. Recalling the appropriate representation of #' electronic spectra. Spectrochimica Acta Part A: Molecular and Biomolecular Spectroscopy 65, #' 727–731. \doi{10.1016/j.saa.2006.01.007} #' #' Wang, Y., Townsend, P.D., 2013. Potential problems in collection and data processing of #' luminescence signals. Journal of Luminescence 142, 202–211. \doi{10.1016/j.jlumin.2013.03.052} #' #' @examples #' #' ##=====================## #' ##(1) Literature example after Mooney et al. (2013) #' ##(1.1) create matrix #' m <- matrix( #' data = c(seq(400, 800, 50), rep(1, 9)), ncol = 2) #' #'##(1.2) set plot function to reproduce the #'##literature figure #'p <- function(m) { #' plot(x = m[, 1], y = m[, 2]) #' polygon( #' x = c(m[, 1], rev(m[, 1])), #' y = c(m[, 2], rep(0, nrow(m)))) #' for (i in 1:nrow(m)) { #' lines(x = rep(m[i, 1], 2), y = c(0, m[i, 2])) #' } #'} #' #'##(1.3) plot curves #' par(mfrow = c(1,2)) #' p(m) #' p(convert_Wavelength2Energy(m)) #' #'##=====================## #'##(2) Another example using density curves #' ##create dataset #' xy <- density( #' c(rnorm(n = 100, mean = 500, sd = 20), #' rnorm(n = 100, mean = 800, sd = 20))) #' xy <- data.frame(xy$x, xy$y) #' #' ##plot #' par(mfrow = c(1,2)) #' plot( #' xy, #' type = "l", #' xlim = c(150, 1000), #' xlab = "Wavelength [nm]", #' ylab = "Luminescence [a.u.]" #' ) #'plot( #' convert_Wavelength2Energy(xy), #' xy$y, #' type = "l", #' xlim = c(1.23, 8.3), #' xlab = "Energy [eV]", #' ylab = "Luminescence [a.u.]" #' ) #' #'@md #'@export convert_Wavelength2Energy <- function( object, digits = 3L, order = FALSE ){ # Self-call ----------------------------------------------------------------------------------- if(class(object)[1] == "list"){ return(lapply(object, convert_Wavelength2Energy)) } # Conversion function ------------------------------------------------------------------------- ##this treats the matrix; in either caes and we play safe, means, we create in either case ##colnames and rownames, but remove them later depending on the input .conv_intensity <- function(m){ h <- 4.135667662e-15 #eV * s c <- 299792458e+09 #nm/s ##convert count values m[] <- m * as.numeric(rownames(m))^2 / (h * c) ##modify rownames rownames(m) <- round((h * c) / as.numeric(rownames(m)),digits) ##return results return(m) } # Treat input data ---------------------------------------------------------------------------- if(class(object)[1] == "RLum.Data.Spectrum"){ ##check whether the object might have this scale already ##this only works on RLum.Data.Spectrum objects and is sugar for using RLum-objects if(any("curveDescripter" %in% names(object@info))){ if(any(grepl(pattern = "energy", x = tolower(object@info$curveDescripter), fixed = TRUE))){ message("[convert_Wavelength2Energy()] Your object has already an energy scale, nothing done!") return(object) } } ##convert data object@data <- .conv_intensity(object@data) #sort values if needed if(order){ object@data <- object@data[order(as.numeric(rownames(object@data))),] rownames(object@data) <- sort(as.numeric(rownames(object@data))) } ##correct $curveDescripter (we do not attach the table, otherwise the object gets too big) if(any("curveDescripter" %in% names(object@info))){ temp_descripter <- strsplit(object@info$curveDescripter, ";", TRUE)[[1]] temp_descripter[grepl(x = temp_descripter,pattern = "wavelength", fixed = TRUE)] <- "energy [eV]" object@info$curveDescripter <- paste(temp_descripter, collapse = ";") } ##return new object return(object) }else if(class(object)[1] == "matrix" || class(object)[1] == "data.frame"){ temp <- as.matrix(object[,2:ncol(object)]) ##set rownames rownames(temp) <- object[,1] ##convert values temp <- .conv_intensity(temp) ##construct new full matrix temp <- cbind(as.numeric(rownames(temp)), temp) rownames(temp) <- rownames(object) colnames(temp) <- colnames(object) ##order on request (remember, here it is the first column) if(order) temp <- temp[order(temp[,1]),] ##return if(class(object)[1] == "data.frame") return(as.data.frame(temp)) return(temp) }else{ stop( paste0( "[convert_Wavelength2Energy()] Class '", class(object)[1], "' not supported as input!" ), call. = FALSE ) } } Luminescence/R/plot_GrowthCurve.R0000644000176200001440000020205213604172511016526 0ustar liggesusers#' Fit and plot a growth curve for luminescence data (Lx/Tx against dose) #' #' A dose response curve is produced for luminescence measurements using a #' regenerative or additive protocol. The function supports interpolation and #' extraxpolation to calculate the equivalent dose. #' #' **Fitting methods** #' #' For all options (except for the `LIN`, `QDR` and the `EXP OR LIN`), #' the [minpack.lm::nlsLM] function with the `LM` (Levenberg-Marquardt algorithm) #' algorithm is used. Note: For historical reasons for the Monte Carlo #' simulations partly the function [nls] using the `port` algorithm. #' #' The solution is found by transforming the function or using [uniroot]. #' #' `LIN`: fits a linear function to the data using #' [lm]: \deqn{y = m*x+n} #' #' `QDR`: fits a linear function to the data using #' [lm]: \deqn{y = a + b * x + c * x^2} #' #' `EXP`: try to fit a function of the form #' \deqn{y = a*(1-exp(-(x+c)/b))} #' Parameters b and c are approximated by a linear fit using [lm]. Note: b = D0 #' #' `EXP OR LIN`: works for some cases where an `EXP` fit fails. #' If the `EXP` fit fails, a `LIN` fit is done instead. #' #' `EXP+LIN`: tries to fit an exponential plus linear function of the #' form: #' \deqn{y = a*(1-exp(-(x+c)/b)+(g*x))} #' The De is calculated by iteration. #' #' **Note:** In the context of luminescence dating, this #' function has no physical meaning. Therefore, no D0 value is returned. #' #' `EXP+EXP`: tries to fit a double exponential function of the form #' \deqn{y = (a1*(1-exp(-(x)/b1)))+(a2*(1-exp(-(x)/b2)))} #' This fitting procedure is not robust against wrong start parameters and #' should be further improved. #' #' `GOK`: tries to fit the general-order kinetics function after #' Guralnik et al. (2015) of the form of #' #' \deqn{y = a*(1-(1+(1/b)*x*c)^(-1/c))} #' #' where **c > 0** is a kinetic order modifier #' (not to be confused with **c** in `EXP` or `EXP+LIN`!). #' #' **Fit weighting** #' #' If the option `fit.weights = TRUE` is chosen, weights are calculated using #' provided signal errors (Lx/Tx error): #' \deqn{fit.weights = 1/error/(sum(1/error))} #' #' **Error estimation using Monte Carlo simulation** #' #' Error estimation is done using a Monte Carlo (MC) simulation approach. A set of Lx/Tx values is #' constructed by randomly drawing curve data from samled from normal #' distributions. The normal distribution is defined by the input values (mean #' = value, sd = value.error). Then, a growth curve fit is attempted for each #' dataset resulting in a new distribution of single De values. The [sd] #' of this distribution is becomes then the error of the De. With increasing #' iterations, the error value becomes more stable. #' **Note:** It may take some calculation time with increasing MC runs, #' especially for the composed functions (`EXP+LIN` and `EXP+EXP`).\cr #' Each error estimation is done with the function of the chosen fitting method. #' #' **Subtitle information** #' #' To avoid plotting the subtitle information, provide an empty user mtext #' `mtext = ""`. To plot any other subtitle text, use `mtext`. #' #' @param sample [data.frame] (**required**): #' data frame with three columns for x=Dose,y=LxTx,z=LxTx.Error, y1=TnTx. #' The column for the test dose response is optional, but requires 'TnTx' as #' column name if used. For exponential fits at least three dose points #' (including the natural) should be provided. #' #' @param na.rm [logical] (*with default*): #' excludes `NA` values from the data set prior to any further operations. #' #' @param mode [character] (*with default*): #' selects calculation mode of the function. #' - `"interpolation"` (default) calculates the De by interpolation, #' - `"extrapolation"` calculates the De by extrapolation and #' - `"alternate"` calculates no De and just fits the data points. #' #' Please note that for option `"regenrative"` the first point is considered #' as natural dose #' #' @param fit.method [character] (*with default*): #' function used for fitting. Possible options are: #' - `LIN`, #' - `QDR`, #' - `EXP`, #' - `EXP OR LIN`, #' - `EXP+LIN`, #' - `EXP+EXP` or #' - `GOK`. #' #' See details. #' #' @param fit.force_through_origin [logical] (*with default*) #' allow to force the fitted function through the origin. #' For `method = "EXP+EXP"` and `method = "GOK"` the function will go through the origin in either case, #' so this option will have no effect. #' #' @param fit.weights [logical] (*with default*): #' option whether the fitting is done with or without weights. See details. #' #' @param fit.includingRepeatedRegPoints [logical] (*with default*): #' includes repeated points for fitting (`TRUE`/`FALSE`). #' #' @param fit.NumberRegPoints [integer] (*optional*): #' set number of regeneration points manually. By default the number of all (!) #' regeneration points is used automatically. #' #' @param fit.NumberRegPointsReal [integer] (*optional*): #' if the number of regeneration points is provided manually, the value of the #' real, regeneration points = all points (repeated points) including reg 0, #' has to be inserted. #' #' @param fit.bounds [logical] (*with default*): #' set lower fit bounds for all fitting parameters to 0. Limited for the use #' with the fit methods `EXP`, `EXP+LIN`, `EXP OR LIN` and `GOK`. #' Argument to be inserted for experimental application only! #' #' @param NumberIterations.MC [integer] (*with default*): #' number of Monte Carlo simulations for error estimation. See details. #' #' @param output.plot [logical] (*with default*): #' plot output (`TRUE/FALSE`). #' #' @param output.plotExtended [logical] (*with default*): #' If' `TRUE`, 3 plots on one plot area are provided: #' 1. growth curve, #' 2. histogram from Monte Carlo error simulation and #' 3. a test dose response plot. #' #' If `FALSE`, just the growth curve will be plotted. #' **Requires:** `output.plot = TRUE`. #' #' @param output.plotExtended.single [logical] (*with default*): #' single plot output (`TRUE/FALSE`) to allow for plotting the results in #' single plot windows. Requires `output.plot = TRUE` and #' `output.plotExtended = TRUE`. #' #' @param cex.global [numeric] (*with default*): #' global scaling factor. #' #' @param txtProgressBar [logical] (*with default*): #' enables or disables txtProgressBar. If `verbose = FALSE` also no #' txtProgressBar is shown. #' #' @param verbose [logical] (*with default*): #' enables or disables terminal feedback. #' #' @param ... Further arguments and graphical parameters to be passed. Note: #' Standard arguments will only be passed to the growth curve plot. Supported: #' `xlim`, `ylim`, `main`, `xlab`, `ylab` #' #' @return #' Along with a plot (so far wanted) an `RLum.Results` object is returned containing, #' the slot `data` contains the following elements: #' #' \tabular{lll}{ #' **DATA.OBJECT** \tab **TYPE** \tab **DESCRIPTION** \cr #' `..$De` : \tab `data.frame` \tab Table with De values \cr #' `..$De.MC` : \tab `numeric` \tab Table with De values from MC runs \cr #' `..$Fit` : \tab [nls] or [lm] \tab object from the fitting for `EXP`, `EXP+LIN` and `EXP+EXP`. #' In case of a resulting linear fit when using `LIN`, `QDR` or `EXP OR LIN` \cr #' `..$Formula` : \tab [expression] \tab Fitting formula as R expression \cr #' `..$call` : \tab `call` \tab The original function call\cr #' } #' #' @section Function version: 1.10.10 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France)\cr #' Michael Dietze, GFZ Potsdam (Germany) #' #' @references #' #' Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. #' #' Guralnik, B., Li, B., Jain, M., Chen, R., Paris, R.B., Murray, A.S., Li, S.-H., Pagonis, P., #' Herman, F., 2015. Radiation-induced growth and isothermal decay of infrared-stimulated luminescence #' from feldspar. Radiation Measurements 81, 224-231. #' #' @seealso [nls], [RLum.Results-class], [get_RLum], [minpack.lm::nlsLM], #' [lm], [uniroot] #' #' @examples #' #' ##(1) plot growth curve for a dummy data.set and show De value #' data(ExampleData.LxTxData, envir = environment()) #' temp <- plot_GrowthCurve(LxTxData) #' get_RLum(temp) #' #' ##(1b) horizontal plot arrangement #' layout(mat = matrix(c(1,1,2,3), ncol = 2)) #' plot_GrowthCurve(LxTxData, output.plotExtended.single = TRUE) #' #' ##(1c) to access the fitting value try #' get_RLum(temp, data.object = "Fit") #' #' ##(2) plot the growth curve only - uncomment to use #' ##pdf(file = "~/Desktop/Growth_Curve_Dummy.pdf", paper = "special") #' plot_GrowthCurve(LxTxData) #' ##dev.off() #' #' ##(3) plot growth curve with pdf output - uncomment to use, single output #' ##pdf(file = "~/Desktop/Growth_Curve_Dummy.pdf", paper = "special") #' plot_GrowthCurve(LxTxData, output.plotExtended.single = TRUE) #' ##dev.off() #' #' ##(4) plot resulting function for given intervall x #' x <- seq(1,10000, by = 100) #' plot( #' x = x, #' y = eval(temp$Formula), #' type = "l" #' ) #' #' ##(5) plot using the 'extrapolation' mode #' LxTxData[1,2:3] <- c(0.5, 0.001) #' print(plot_GrowthCurve(LxTxData,mode = "extrapolation")) #' #' ##(6) plot using the 'alternate' mode #' LxTxData[1,2:3] <- c(0.5, 0.001) #' print(plot_GrowthCurve(LxTxData,mode = "alternate")) #' #' ##(7) import and fit test data set by Berger & Huntley 1989 #' QNL84_2_unbleached <- #' read.table(system.file("extdata/QNL84_2_unbleached.txt", package = "Luminescence")) #' #' results <- plot_GrowthCurve( #' QNL84_2_unbleached, #' mode = "extrapolation", #' plot = FALSE, #' verbose = FALSE) #' #' #calculate confidence interval for the parameters #' #as alternative error estimation #' confint(results$Fit, level = 0.68) #' #' #' \dontrun{ #' QNL84_2_bleached <- #' read.table(system.file("extdata/QNL84_2_bleached.txt", package = "Luminescence")) #' STRB87_1_unbleached <- #' read.table(system.file("extdata/STRB87_1_unbleached.txt", package = "Luminescence")) #' STRB87_1_bleached <- #' read.table(system.file("extdata/STRB87_1_bleached.txt", package = "Luminescence")) #' #' print( #' plot_GrowthCurve( #' QNL84_2_bleached, #' mode = "alternate", #' plot = FALSE, #' verbose = FALSE)$Fit) #' #' print( #' plot_GrowthCurve( #' STRB87_1_unbleached, #' mode = "alternate", #' plot = FALSE, #' verbose = FALSE)$Fit) #' #' print( #' plot_GrowthCurve( #' STRB87_1_bleached, #' mode = "alternate", #' plot = FALSE, #' verbose = FALSE)$Fit) #' } #' #' @md #' @export plot_GrowthCurve <- function( sample, na.rm = TRUE, mode = "interpolation", fit.method = "EXP", fit.force_through_origin = FALSE, fit.weights = TRUE, fit.includingRepeatedRegPoints = TRUE, fit.NumberRegPoints = NULL, fit.NumberRegPointsReal = NULL, fit.bounds = TRUE, NumberIterations.MC = 100, output.plot = TRUE, output.plotExtended = TRUE, output.plotExtended.single = FALSE, cex.global = 1, txtProgressBar = TRUE, verbose = TRUE, ... ) { ##1. Check input variable switch( class(sample), matrix = sample <- as.data.frame(sample), list = sample <- as.data.frame(sample), numeric = stop( "[plot_GrowthCurve()] 'sample' needs to be of type 'data.frame'!", call. = FALSE ) ) ##2. check if sample contains a least three rows if(length(sample[[1]])<3 & fit.method != "LIN"){ stop("\n [plot_GrowthCurve()] At least two regeneration points are needed!", call. = FALSE) } ##2.1 check column numbers; we assume that in this particular case no error value ##was provided, e.g., set all errors to 0 if(ncol(sample) == 2){ sample <- cbind(sample, 0) } ##2.2 check for inf data in the data.frame if(any(is.infinite(unlist(sample)))){ #https://stackoverflow.com/questions/12188509/cleaning-inf-values-from-an-r-dataframe #This is slow, but it does not break with previous code sample <- do.call(data.frame, lapply(sample, function(x) replace(x, is.infinite(x),NA))) warning("[plot_GrowthCurve()] Inf values found, replaced by NA!", call. = FALSE) } ##2.3 check whether the dose value is equal all the time if(sum(abs(diff(sample[[1]])), na.rm = TRUE) == 0){ try(stop("[plot_GrowthCurve()] All points have the same dose. NULL returned!", call. = FALSE)) return(NULL) } ## optionally, count and exclude NA values and print result if(na.rm) { n.NA <- sum(!complete.cases(sample)) if (n.NA == 1) { warning("[plot_GrowthCurve()] 1 NA value excluded.", call. = FALSE) } else if (n.NA > 1) { warning(paste(" [plot_GrowthCurve()]", n.NA, "NA values excluded."), call. = FALSE) } sample <- na.exclude(sample) ##Check if anything is left after removal if(nrow(sample) == 0){ warning("[plot_GrowthCurve()] Sorry, after NA removal nothing is left from the data set! NULL returned") return(NULL) } }else{ stop("[plot_GrowthCurve()] Sorry, the argument 'na.rm' is defunct and will be removed in future!", call. = FALSE) } ##3. verbose mode if(!verbose){ txtProgressBar <- FALSE } ##remove rownames from data.frame, as this could causes errors for the reg point calculation rownames(sample) <- NULL ##zero values in the data.frame are not allowed for the y-column if(length(sample[sample[,2]==0,2])>0){ warning(paste("[plot_GrowthCurve()]", length(sample[sample[,2]==0,2]), "values with 0 for Lx/Tx detected; replaced by ", .Machine$double.eps), call. = FALSE) sample[sample[, 2] == 0, 2] <- .Machine$double.eps } ##1. INPUT #1.0.1 calculate number of reg points if not set if(is.null(fit.NumberRegPoints)){ fit.NumberRegPoints<-length(sample[-1,1]) } if(is.null(fit.NumberRegPointsReal)){ fit.RegPointsReal <- which(!duplicated(sample[,1]) | sample[,1] != 0) fit.NumberRegPointsReal <- length(fit.RegPointsReal) } #1.1 Produce dataframe from input values, two options for different modes if(mode == "interpolation"){ xy <- data.frame(x=sample[2:(fit.NumberRegPoints+1),1],y=sample[2:(fit.NumberRegPoints+1),2]) y.Error <- sample[2:(fit.NumberRegPoints+1),3] }else if (mode == "extrapolation" || mode == "alternate") { xy <- data.frame( x = sample[1:(fit.NumberRegPoints+1),1], y = sample[1:(fit.NumberRegPoints+1),2]) y.Error <- sample[1:(fit.NumberRegPoints+1),3] }else{ stop("[plot_GrowthCurve()] Unknown input for argument 'mode'", call. = FALSE) } ##1.1.1 produce weights for weighted fitting if(fit.weights){ fit.weights <- 1 / abs(y.Error) / sum(1 / abs(y.Error)) if(is.na(fit.weights[1])){ fit.weights <- NA warning("[plot_GrowthCurve()] 'fit.weights' set to NA since the error column is invalid or 0.", call. = FALSE) } }else{ fit.weights <- rep(1, length(abs(y.Error))) } #1.2 Prepare data sets regeneration points for MC Simulation if (mode == "interpolation") { data.MC <- t(vapply( X = seq(2, fit.NumberRegPoints + 1, by = 1), FUN = function(x) { sample(rnorm( n = 10000, mean = sample[x, 2], sd = abs(sample[x, 3]) ), size = NumberIterations.MC, replace = TRUE) }, FUN.VALUE = vector("numeric", length = NumberIterations.MC) )) #1.3 Do the same for the natural signal data.MC.De <- numeric(NumberIterations.MC) data.MC.De <- sample(rnorm(10000, mean = sample[1, 2], sd = abs(sample[1, 3])), NumberIterations.MC, replace = TRUE) }else{ data.MC <- t(vapply( X = seq(1, fit.NumberRegPoints + 1, by = 1), FUN = function(x) { sample(rnorm( n = 10000, mean = sample[x, 2], sd = abs(sample[x, 3]) ), size = NumberIterations.MC, replace = TRUE) }, FUN.VALUE = vector("numeric", length = NumberIterations.MC) )) } #1.3 set x.natural x.natural <- vector("numeric", length = NumberIterations.MC) x.natural <- NA ##1.4 set initialise variables De <- NA De.Error <- NA ##============================================================================## # FITTING ---------------------------------------------------------------------- ##============================================================================## ##3. Fitting values with nonlinear least-squares estimation of the parameters ##set functions for fitting #EXP fit.functionEXP <- function(a,b,c,x) {a*(1-exp(-(x+c)/b))} fit.formulaEXP <- y ~ a * (1 - exp(-(x+c)/b)) #EXP+LIN fit.functionEXPLIN<-function(a,b,c,g,x) {a*(1-exp(-(x+c)/b)+(g*x))} fit.formulaEXPLIN <- y ~ a*(1-exp(-(x+c)/b)+(g*x)) #EXP+EXP fit.functionEXPEXP<-function(a1,a2,b1,b2,x){(a1*(1-exp(-(x)/b1)))+(a2*(1-exp(-(x)/b2)))} fit.formulaEXPEXP <- y ~ (a1*(1-exp(-(x)/b1)))+(a2*(1-exp(-(x)/b2))) #GOK fit.functionGOK <- function(a,b,c,x) { a*(1-(1+(1/b)*x*c)^(-1/c)) } fit.formulaGOK <- y ~ a*(1-(1+(1/b)*x*c)^(-1/c)) ##input data for fitting; exclude repeated RegPoints if (fit.includingRepeatedRegPoints == FALSE) { data <- data.frame(x = xy[[1]][!duplicated(xy[[1]])], y = xy[[2]][!duplicated(xy[[1]])]) fit.weights <- fit.weights[!duplicated(xy[[1]])] data.MC <- data.MC[!duplicated(xy[[1]]),,drop = FALSE] y.Error <- y.Error[!duplicated(xy[[1]])] xy <- xy[!duplicated(xy[[1]]),,drop = FALSE] }else{ data <- data.frame(xy) } ## for unknown reasons with only two points the nls() function is trapped in ## an endless mode, therefore the minimum length for data is 3 ## (2016-05-17) if((fit.method == "EXP" | fit.method == "EXP+LIN" | fit.method == "EXP+EXP" | fit.method == "EXP OR LIN") && length(data[,1])<=2){ ##set to LIN fit.method <- "LIN" warning("[plot_GrowthCurve()] fitting using an exponential term requires at least 3 dose points! fit.method set to 'LIN'") if(verbose){ if(verbose) message("[plot_GrowthCurve()] fit.method set to 'LIN', see warnings()") } } ##START PARAMETER ESTIMATION ##--------------------------------------------------------------------------## ##general setting of start parameters for fitting ##a - estimation for a a the maxium of the y-values (Lx/Tx) a <- max(data[,2]) ##b - get start parameters from a linear fit of the log(y) data ## (suppress the warning in case one parameter is negative) fit.lm <- try(lm(suppressWarnings(log(data$y))~data$x)) if(class(fit.lm) == "try-error"){ b <- 1 }else{ b <- as.numeric(1/fit.lm$coefficients[2]) } ##c - get start parameters from a linear fit - offset on x-axis fit.lm<-lm(data$y~data$x) c <- as.numeric(abs(fit.lm$coefficients[1]/fit.lm$coefficients[2])) #take slope from x - y scaling g <- max(data[,2]/max(data[,1])) #set D01 and D02 (in case of EXp+EXP) D01 <- NA D01.ERROR <- NA D02 <- NA D02.ERROR <- NA ##--------------------------------------------------------------------------## ##to be a little bit more flexible the start parameters varries within a normal distribution ##draw 50 start values from a normal distribution a start values if (fit.method != "LIN") { a.MC <- suppressWarnings(rnorm(50, mean = a, sd = a / 100)) if (!is.na(b)) { b.MC <- suppressWarnings(rnorm(50, mean = b, sd = b / 100)) } else{ b <- NA } c.MC <- suppressWarnings(rnorm(50, mean = c, sd = c / 100)) g.MC <- suppressWarnings(rnorm(50, mean = g, sd = g / 1)) ##set start vector (to avoid errors witin the loop) a.start <- NA b.start <- NA c.start <- NA g.start <- NA } ##--------------------------------------------------------------------------## #===========================================================================## #QDR# if (fit.method == "QDR"){ ##Do fitting with option to force curve through the origin if(fit.force_through_origin){ ##linear fitting ... polynomial fit <- lm(data$y ~ 0 + I(data$x) + I(data$x^2), weights = fit.weights) ##give function for uniroot De.fs <- function(x, y) { 0 + coef(fit)[1] * x + coef(fit)[2] * x ^ 2 - y } }else{ ##linear fitting ... polynomial fit <- lm(data$y ~ I(data$x) + I(data$x^2), weights = fit.weights) ##give function for uniroot De.fs <- function(x, y) { coef(fit)[1] + coef(fit)[2] * x + coef(fit)[3] * x ^ 2 - y } } ##solve and get De if (mode == "interpolation") { De.uniroot <- try(uniroot(De.fs, y = sample[1, 2], lower = 0, upper = max(sample[, 1]) * 1.5), silent = TRUE) if (!inherits(De.uniroot, "try-error")) { De <- De.uniroot$root if (verbose) { if (mode != "alternate") { writeLines(paste0("[plot_GrowthCurve()] Fit: ", fit.method, " (", mode,") ", "| De = ", round(De,2))) } } } else{ if (verbose) writeLines("[plot_GrowthCurve()] no solution found for QDR fit") De <- NA } }else if (mode == "extrapolation"){ De.uniroot <- try(uniroot(De.fs, y = 0, lower = -1e06, upper = max(sample[, 1]) * 1.5), silent = TRUE) if (!inherits(De.uniroot, "try-error")) { De <- De.uniroot$root if (verbose) { if (mode != "alternate") { writeLines(paste0("[plot_GrowthCurve()] Fit: ", fit.method, " (", mode,") ", "| De = ", round(abs(De), 2))) } } } else{ if (verbose) writeLines("[plot_GrowthCurve()] no solution found for QDR fit") De <- NA } }else{ De <- NA } # +++++++++++++++++++++++++++++++++++++++++ ##set progressbar if(txtProgressBar){ cat("\n\t Run Monte Carlo loops for error estimation of the QDR fit\n") pb<-txtProgressBar(min=0,max=NumberIterations.MC, char="=", style=3) } #start loop for Monte Carlo Error estimation fit.MC <- sapply(1:NumberIterations.MC, function(i){ data <- data.frame(x=xy$x, y=data.MC[,i]) if(fit.force_through_origin){ ##linear fitting ... polynomial fit.MC <- lm(data$y ~ 0 + I(data$x) + I(data$x^2), weights = fit.weights) ##give function for uniroot De.fs.MC <- function(x, y) { 0 + coef(fit.MC)[1] * x + coef(fit.MC)[2] * x ^ 2 - y 0 + coef(fit.MC)[1] * x + coef(fit.MC)[2] * x ^ 2 - y } }else{ ##linear fitting ... polynomial fit.MC <- lm(data$y ~ I(data$x) + I(data$x^2), weights = fit.weights) ##give function for uniroot De.fs.MC <- function(x, y) { coef(fit.MC)[1] + coef(fit.MC)[2] * x + coef(fit.MC)[3] * x ^ 2 - y } } if (mode == "interpolation") { ##solve and get De De.uniroot.MC <- try(uniroot( De.fs.MC, y = data.MC.De[i], lower = 0, upper = max(sample[, 1]) * 1.5 ), silent = TRUE) if (!inherits(De.uniroot.MC, "try-error")) { De.MC <- De.uniroot.MC$root } else{ De.MC <- NA } }else if (mode == "extrapolation"){ ##solve and get De De.uniroot.MC <- try(uniroot( De.fs.MC, y = 0, lower = -1e6, upper = max(sample[, 1]) * 1.5 ), silent = TRUE) if (!inherits(De.uniroot.MC, "try-error")) { De.MC <- De.uniroot.MC$root } else{ De.MC <- NA } }else{ De.MC <- NA } ##update progress bar if(txtProgressBar) setTxtProgressBar(pb, i) return(De.MC) }) if(txtProgressBar) close(pb) x.natural<- fit.MC } #===========================================================================## #EXP --------------- if (fit.method=="EXP" | fit.method=="EXP OR LIN" | fit.method=="LIN"){ if((is.na(a) | is.na(b) | is.na(c)) && fit.method != "LIN"){ try(stop("[plot_GrowthCurve()] Fit could not be applied for this data set. NULL returned!", call. = FALSE)) return(NULL) } if(fit.method!="LIN"){ ##FITTING on GIVEN VALUES## # --use classic R fitting routine to fit the curve ##try to create some start parameters from the input values to make ## the fitting more stable for(i in 1:50){ a <- a.MC[i] b <- b.MC[i] c <- c.MC[i] fit.initial <- suppressWarnings(try(nls( y ~ fit.functionEXP(a, b, c, x), data = data, start = c(a = a, b = b, c = c), trace = FALSE, algorithm = "port", lower = c(a = 0, b > 0, c = 0), nls.control( maxiter = 100, warnOnly = TRUE, minFactor = 1 / 2048 ) ), silent = TRUE )) if(class(fit.initial)!="try-error"){ #get parameters out of it parameters<-(coef(fit.initial)) b.start[i] <- as.vector((parameters["b"])) a.start[i] <- as.vector((parameters["a"])) c.start[i] <- as.vector((parameters["c"])) } } ##used median as start parameters for the final fitting a <- median(na.exclude(a.start)) b <- median(na.exclude(b.start)) c <- median(na.exclude(c.start)) ##exception for b: if b is 1 it is likely to b wrong and should be reset if(!is.na(b) && b == 1) b <- mean(b.MC) #FINAL Fit curve on given values fit <- try(minpack.lm::nlsLM( formula = fit.formulaEXP, data = data, start = list(a = a, b = b,c = 0), weights = fit.weights, trace = FALSE, algorithm = "LM", lower = if (fit.bounds) { c(0,0,0) }else{ c(-Inf,-Inf,-Inf) }, upper = if (fit.force_through_origin) { c(Inf, Inf, 0) }else{ c(Inf, Inf, Inf) }, control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = TRUE ) if (inherits(fit, "try-error") & inherits(fit.initial, "try-error")){ if(verbose) writeLines("[plot_GrowthCurve()] try-error for EXP fit") }else{ ##this is to avoid the singular convergence failure due to a perfect fit at the beginning ##this may happen especially for simulated data if(inherits(fit, "try-error") & !inherits(fit.initial, "try-error")){ fit <- fit.initial rm(fit.initial) } #get parameters out of it parameters <- (coef(fit)) b <- as.vector((parameters["b"])) a <- as.vector((parameters["a"])) c <- as.vector((parameters["c"])) #calculate De if(mode == "interpolation"){ De <- suppressWarnings(-c-b*log(1-sample[1,2]/a)) }else if (mode == "extrapolation"){ De <- suppressWarnings(-c-b*log(1-0/a)) }else{ De <- NA } #print D01 value D01 <- b if (verbose) { if (mode != "alternate") { writeLines(paste0( "[plot_GrowthCurve()] Fit: ", fit.method, " (", mode, ")", " | De = ", round(abs(De), digits = 2), " | D01 = ", round(D01, 2) )) } } #EXP MC ----- ##Monte Carlo Simulation # --Fit many curves and calculate a new De +/- De_Error # --take De_Error #set variables var.b<-vector(mode="numeric", length=NumberIterations.MC) var.a<-vector(mode="numeric", length=NumberIterations.MC) var.c<-vector(mode="numeric", length=NumberIterations.MC) #start loop for (i in 1:NumberIterations.MC) { ##set data set data <- data.frame(x = xy$x,y = data.MC[,i]) fit.MC <- try(minpack.lm::nlsLM( formula = fit.formulaEXP, data = data, start = list(a = a, b = b,c = c), weights = fit.weights, trace = FALSE, algorithm = "LM", lower = if (fit.bounds) { c(0,0,0) }else{ c(-Inf,-Inf,-Inf) }, upper = if (fit.force_through_origin) { c(Inf, Inf, 0) }else{ c(Inf, Inf, Inf) }, control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = TRUE ) #get parameters out of it including error handling if (class(fit.MC)=="try-error") { x.natural[i] <- NA }else { #get parameters out parameters<-coef(fit.MC) var.b[i]<-as.vector((parameters["b"])) #D0 var.a[i]<-as.vector((parameters["a"])) #Imax var.c[i]<-as.vector((parameters["c"])) #calculate x.natural for error calculation if(mode == "interpolation"){ x.natural[i]<-suppressWarnings( -var.c[i]-var.b[i]*log(1-data.MC.De[i]/var.a[i])) }else if(mode == "extrapolation"){ x.natural[i]<-suppressWarnings( abs(-var.c[i]-var.b[i]*log(1-0/var.a[i]))) }else{ x.natural[i] <- NA } } }#end for loop ##write D01.ERROR D01.ERROR <- sd(var.b, na.rm = TRUE) ##remove values rm(var.b, var.a, var.c) }#endif::try-error fit }#endif:fit.method!="LIN" # ======================================================================== # ##LIN ----- ##two options: just linear fit or LIN fit after the EXP fit failed #set fit object, if fit objekt was not set before if(exists("fit")==FALSE){fit<-NA} if ((fit.method=="EXP OR LIN" & class(fit)=="try-error") | fit.method=="LIN" | length(data[,1])<2) { ##Do fitting again as just allows fitting through the origin if(fit.force_through_origin){ fit.lm<-lm(data$y ~ 0 + data$x, weights = fit.weights) #calculate De if(mode == "interpolation"){ De <- sample[1,2]/fit.lm$coefficients[1] }else{ De <- 0 } }else{ fit.lm<-lm(data$y ~ data$x, weights = fit.weights) #calculate De if(mode == "interpolation"){ De <- (sample[1,2]-fit.lm$coefficients[1])/fit.lm$coefficients[2] }else if(mode == "extrapolation"){ De <- (0-fit.lm$coefficients[1])/fit.lm$coefficients[2] } } ##remove vector labels De <- as.numeric(as.character(De)) if (verbose) { if (mode != "alternate") { writeLines(paste0( "[plot_GrowthCurve()] Fit: ", fit.method, " (", mode, ") ", "| De = ", round(abs(De), 2) )) } } #start loop for Monte Carlo Error estimation #LIN MC --------- for (i in 1:NumberIterations.MC) { data <- data.frame(x = xy$x, y = data.MC[, i]) if(fit.force_through_origin){ ##do fitting fit.lmMC <- lm(data$y ~ 0 + data$x, weights=fit.weights) #calculate x.natural if(mode == "interpolation"){ x.natural[i] <- data.MC.De[i]/fit.lmMC$coefficients[1] }else if (mode == "extrapolation"){ x.natural[i] <- 0 } }else{ ##do fitting fit.lmMC <- lm(data$y~ data$x, weights=fit.weights) #calculate x.natural if(mode == "interpolation"){ x.natural[i] <- (data.MC.De[i]-fit.lmMC$coefficients[1])/ fit.lmMC$coefficients[2] }else if (mode == "extrapolation"){ x.natural[i] <- abs((0-fit.lmMC$coefficients[1])/ fit.lmMC$coefficients[2]) } } }#endfor::loop for MC #correct for fit.method fit.method <- "LIN" ##set fit object if(fit.method=="LIN"){fit<-fit.lm} }else{fit.method<-"EXP"}#endif::LIN }#end if EXP (this includes the LIN fit option) #=========================================================================== # #=========================================================================== # # EXP+LIN ---- else if (fit.method=="EXP+LIN") { ##try some start parameters from the input values to makes the fitting more stable for(i in 1:length(a.MC)){ a<-a.MC[i];b<-b.MC[i];c<-c.MC[i];g<-g.MC[i] ##---------------------------------------------------------## ##start: with EXP function fit.EXP<-try(nls(y~fit.functionEXP(a,b,c,x), data=data, start=c(a=a,b=b,c=c), trace=FALSE, algorithm="port", lower=c(a=0,b>10,c=0), nls.control(maxiter=100,warnOnly=FALSE,minFactor=1/1024) ),silent=TRUE) if(class(fit.EXP)!="try-error"){ #get parameters out of it parameters<-(coef(fit.EXP)) b<-as.vector((parameters["b"])) a<-as.vector((parameters["a"])) c<-as.vector((parameters["c"])) ##end: with EXP function ##---------------------------------------------------------## } fit<-try(nls(y~fit.functionEXPLIN(a,b,c,g,x), data=data, start=c(a=a,b=b,c=c,g=g), trace=FALSE, algorithm="port", lower = if(fit.bounds){c(a=0,b>10,c=0,g=0)}else{c(a = -Inf,b = -Inf,c = -Inf,g = -Inf)}, nls.control(maxiter=500,warnOnly=FALSE,minFactor=1/2048) #increase max. iterations ),silent=TRUE) if(class(fit)!="try-error"){ #get parameters out of it parameters<-(coef(fit)) b.start[i]<-as.vector((parameters["b"])) a.start[i]<-as.vector((parameters["a"])) c.start[i]<-as.vector((parameters["c"])) g.start[i]<-as.vector((parameters["g"])) } }##end for loop ##used mean as start parameters for the final fitting a<-median(na.exclude(a.start)) b<-median(na.exclude(b.start)) c<-median(na.exclude(c.start)) g<-median(na.exclude(g.start)) ##perform final fitting fit <- try(minpack.lm::nlsLM( formula = fit.formulaEXPLIN, data = data, start = list(a = a, b = b,c = c, g = g), weights = fit.weights, trace = FALSE, algorithm = "LM", lower = if (fit.bounds) { c(0,10,0,0) }else{ c(-Inf,-Inf,-Inf,-Inf) }, upper = if (fit.force_through_origin) { c(Inf, Inf, 0, Inf) }else{ c(Inf, Inf, Inf, Inf) }, control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = TRUE ) #if try error stop calculation if(class(fit)!="try-error"){ #get parameters out of it parameters<-(coef(fit)) b<-as.vector((parameters["b"])) a<-as.vector((parameters["a"])) c<-as.vector((parameters["c"])) g<-as.vector((parameters["g"])) #problem: analytically it is not easy to calculate x, #use uniroot to solve that problem ... readjust function first if (mode == "interpolation") { f.unirootEXPLIN <- function(a, b, c, g, x, LnTn) { fit.functionEXPLIN(a, b, c, g, x) - LnTn } temp.De <- try(uniroot( f = f.unirootEXPLIN, interval = c(0, max(xy$x) * 1.5), tol = 0.001, a = a, b = b, c = c, g = g, LnTn = sample[1, 2], extendInt = "yes", maxiter = 3000 ), silent = TRUE) if (class(temp.De) != "try-error") { De <- temp.De$root } else{ De <- NA } }else if(mode == "extrapolation"){ f.unirootEXPLIN <- function(a, b, c, g, x, LnTn) { fit.functionEXPLIN(a, b, c, g, x) - LnTn } temp.De <- try(uniroot( f = f.unirootEXPLIN, interval = c(-1e06, max(xy$x) * 1.5), tol = 0.001, a = a, b = b, c = c, g = g, LnTn = 0, extendInt = "yes", maxiter = 3000 ), silent = TRUE) if (class(temp.De) != "try-error") { De <- temp.De$root } else{ De <- NA } }else{ De <- NA } if (verbose) { if (mode != "alternate") { writeLines(paste0( "[plot_GrowthCurve()] Fit: ", fit.method, " (", mode, ")" , " | De = ", round(abs(De),2) )) } } ##Monte Carlo Simulation for error estimation # --Fit many curves and calculate a new De +/- De_Error # --take De_Error #set variables var.b <- vector(mode="numeric", length=NumberIterations.MC) var.a <- vector(mode="numeric", length=NumberIterations.MC) var.c <- vector(mode="numeric", length=NumberIterations.MC) var.g <- vector(mode="numeric", length=NumberIterations.MC) ##set progressbar if(txtProgressBar){ cat("\n\t Run Monte Carlo loops for error estimation of the EXP+LIN fit\n") pb<-txtProgressBar(min=0,max=NumberIterations.MC, char="=", style=3) } #start Monto Carlo loops for(i in 1:NumberIterations.MC){ data <- data.frame(x=xy$x,y=data.MC[,i]) ##perform MC fitting fit.MC <- try(minpack.lm::nlsLM( formula = fit.formulaEXPLIN, data = data, start = list(a = a, b = b,c = c, g = g), weights = fit.weights, trace = FALSE, algorithm = "LM", lower = if (fit.bounds) { c(0,10,0,0) }else{ c(-Inf,-Inf,-Inf, -Inf) }, control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = TRUE ) #get parameters out of it including error handling if (class(fit.MC)=="try-error") { x.natural[i]<-NA }else { parameters <- coef(fit.MC) var.b[i] <- as.vector((parameters["b"])) var.a[i] <- as.vector((parameters["a"])) var.c[i] <- as.vector((parameters["c"])) var.g[i] <- as.vector((parameters["g"])) #problem: analytical it is not easy to calculate x, #use uniroot to solve this problem if (mode == "interpolation") { temp.De.MC <- try(uniroot( f = f.unirootEXPLIN, interval = c(0, max(xy$x) * 1.5), tol = 0.001, a = var.a[i], b = var.b[i], c = var.c[i], g = var.g[i], LnTn = data.MC.De[i] ), silent = TRUE) if (class(temp.De.MC) != "try-error") { x.natural[i] <- temp.De.MC$root } else{ x.natural[i] <- NA } } else if (mode == "extrapolation"){ temp.De.MC <- try(uniroot( f = f.unirootEXPLIN, interval = c(-1e6, max(xy$x) * 1.5), tol = 0.001, a = var.a[i], b = var.b[i], c = var.c[i], g = var.g[i], LnTn = 0 ), silent = TRUE) if (class(temp.De.MC) != "try-error") { x.natural[i] <- abs(temp.De.MC$root) } else{ x.natural[i] <- NA } }else{ x.natural[i] <- NA } } ##update progress bar if(txtProgressBar) setTxtProgressBar(pb, i) }#end for loop ##close if(txtProgressBar) close(pb) ##remove objects rm(var.b, var.a, var.c, var.g) }else{ #print message if (verbose) { if (mode != "alternate") { writeLines(paste0( "[plot_GrowthCurve()] Fit: ", fit.method, " | De = NA (fitting FAILED)" )) } } } #end if "try-error" Fit Method } #End if EXP+LIN #========================================================================== #=========================================================================== #EXP+EXP# else if (fit.method=="EXP+EXP") { a1.start <- NA a2.start <- NA b1.start <- NA b2.start <- NA ## try to create some start parameters from the input values to make the fitting more stable for(i in 1:50) { a1 <- a.MC[i];b1 <- b.MC[i]; a2 <- a.MC[i] / 2; b2 <- b.MC[i] / 2 fit.start <- try(nls( y ~ fit.functionEXPEXP(a1,a2,b1,b2,x), data = data, start = c( a1 = a1,a2 = a2,b1 = b1,b2 = b2 ), trace = FALSE, algorithm = "port", lower = c(a1 > 0,a2 > 0,b1 > 0,b2 > 0), nls.control( maxiter = 500,warnOnly = FALSE,minFactor = 1 / 2048 ) #increase max. iterations ),silent = TRUE) if (class(fit.start) != "try-error") { #get parameters out of it parameters <- coef(fit.start) a1.start[i] <- as.vector((parameters["a1"])) b1.start[i] <- as.vector((parameters["b1"])) a2.start[i] <- as.vector((parameters["a2"])) b2.start[i] <- as.vector((parameters["b2"])) } } ##use obtained parameters for fit input a1.start <- median(a1.start, na.rm = TRUE) b1.start <- median(b1.start, na.rm = TRUE) a2.start <- median(a2.start, na.rm = TRUE) b2.start <- median(b2.start, na.rm = TRUE) ##perform final fitting fit <- try(minpack.lm::nlsLM( formula = fit.formulaEXPEXP, data = data, start = list(a1 = a1, b1 = b1, a2 = a2, b2 = b2), weights = fit.weights, trace = FALSE, algorithm = "LM", lower = if (fit.bounds) { c(0,0,0,0) }else{ c(-Inf,-Inf,-Inf, -Inf) }, control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = TRUE ) ##insert if for try-error if (class(fit)!="try-error") { #get parameters out of it parameters <- (coef(fit)) b1 <- as.vector((parameters["b1"])) b2 <- as.vector((parameters["b2"])) a1 <- as.vector((parameters["a1"])) a2 <- as.vector((parameters["a2"])) ##set D0 values D01 <- round(b1,digits = 2) D02 <- round(b2,digits = 2) #problem: analytically it is not easy to calculate x, use uniroot if (mode == "interpolation") { f.unirootEXPEXP <- function(a1, a2, b1, b2, x, LnTn) { fit.functionEXPEXP(a1, a2, b1, b2, x) - LnTn } temp.De <- try(uniroot( f = f.unirootEXPEXP, interval = c(0, max(xy$x) * 1.5), tol = 0.001, a1 = a1, a2 = a2, b1 = b1, b2 = b2, LnTn = sample[1, 2], extendInt = "yes", maxiter = 3000 ), silent = TRUE) if (class(temp.De) != "try-error") { De <- temp.De$root } else{ De <- NA } ##remove object rm(temp.De) }else if (mode == "extrapolation"){ stop("[plot_GrowthCurve()] mode 'extrapolation' for this fitting method currently not supported!", call. = FALSE) } else{ De <- NA } #print D0 and De value values if(verbose){ if(mode != "alternate"){ writeLines(paste0("[plot_GrowthCurve()] Fit: ", fit.method, " | De = ", De, "| D01 = ",D01, " | D02 = ",D02)) } } ##Monte Carlo Simulation for error estimation # --Fit many curves and calculate a new De +/- De_Error # --take De_Error from the simulation # --comparison of De from the MC and original fitted De gives a value for quality #set variables var.b1<-vector(mode="numeric", length=NumberIterations.MC) var.b2<-vector(mode="numeric", length=NumberIterations.MC) var.a1<-vector(mode="numeric", length=NumberIterations.MC) var.a2<-vector(mode="numeric", length=NumberIterations.MC) ##progress bar if(txtProgressBar){ cat("\n\t Run Monte Carlo loops for error estimation of the EXP+EXP fit\n") pb<-txtProgressBar(min=0,max=NumberIterations.MC, initial=0, char="=", style=3) } #start Monto Carlo loops for (i in 1:NumberIterations.MC) { #update progress bar if(txtProgressBar) setTxtProgressBar(pb,i) data<-data.frame(x=xy$x,y=data.MC[,i]) ##perform final fitting fit.MC <- try(minpack.lm::nlsLM( formula = fit.formulaEXPEXP, data = data, start = list(a1 = a1, b1 = b1, a2 = a2, b2 = b2), weights = fit.weights, trace = FALSE, algorithm = "LM", lower = if (fit.bounds) { c(0,0,0,0) }else{ c(-Inf,-Inf,-Inf, -Inf) }, control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = TRUE ) #get parameters out of it including error handling if (class(fit.MC)=="try-error") { x.natural[i]<-NA }else { parameters <- (coef(fit.MC)) var.b1[i] <- as.vector((parameters["b1"])) var.b2[i] <- as.vector((parameters["b2"])) var.a1[i] <- as.vector((parameters["a1"])) var.a2[i] <- as.vector((parameters["a2"])) #problem: analytically it is not easy to calculate x, here an simple approximation is made temp.De.MC <- try(uniroot( f = f.unirootEXPEXP, interval = c(0,max(xy$x) * 1.5), tol = 0.001, a1 = var.a1[i], a2 = var.a2[i], b1 = var.b1[i], b2 = var.b2[i], LnTn = data.MC.De[i] ), silent = TRUE) if (class(temp.De.MC) != "try-error") { x.natural[i] <- temp.De.MC$root }else{ x.natural[i] <- NA } } #end if "try-error" MC simulation } #end for loop ##write D01.ERROR D01.ERROR <- sd(var.b1, na.rm = TRUE) D02.ERROR <- sd(var.b2, na.rm = TRUE) ##remove values rm(var.b1, var.b2, var.a1, var.a2) }else{ #print message if(verbose){ writeLines(paste0("[plot_GrowthCurve()] Fit: ", fit.method, " | De = NA (fitting FAILED)")) } } #end if "try-error" Fit Method ##close if(txtProgressBar) if(exists("pb")){close(pb)} } else if (fit.method=="GOK") { #========================================================================== #========================================================================== # GOK ----- # FINAL Fit fit <- try(minpack.lm::nlsLM( formula = fit.formulaGOK, data = data, start = list(a = a, b = b, c = 1), weights = fit.weights, trace = FALSE, algorithm = "LM", lower = if (fit.bounds) { c(0,0,0) }else{ c(-Inf,-Inf,-Inf) }, upper = c(Inf, Inf, Inf), control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = TRUE) if (inherits(fit, "try-error")){ if(verbose) writeLines("[plot_GrowthCurve()] try-error for GOK fit") }else{ #get parameters out of it parameters <- (coef(fit)) b <- as.vector((parameters["b"])) a <- as.vector((parameters["a"])) c <- as.vector((parameters["c"])) #calculate De if(mode == "interpolation"){ De <- suppressWarnings(-(b * (( (a - sample[1,2])/a)^c - 1) * ( ((a - sample[1,2])/a)^-c )) / c) }else if (mode == "extrapolation"){ De <- suppressWarnings(-(b * (( (a - 0)/a)^c - 1) * ( ((a - 0)/a)^-c )) / c) }else{ De <- NA } #print D01 value D01 <- b if (verbose) { if (mode != "alternate") { writeLines(paste0( "[plot_GrowthCurve()] Fit: ", fit.method, " (", mode, ")", " | De = ", round(abs(De), digits = 2), " | D01 = ", round(D01,2), " | c = ", round(c, digits = 2) )) } } #EXP MC ----- ##Monte Carlo Simulation # --Fit many curves and calculate a new De +/- De_Error # --take De_Error #set variables var.b<-vector(mode="numeric", length=NumberIterations.MC) var.a<-vector(mode="numeric", length=NumberIterations.MC) var.c<-vector(mode="numeric", length=NumberIterations.MC) #start loop for (i in 1:NumberIterations.MC) { ##set data set data <- data.frame(x = xy$x,y = data.MC[,i]) fit.MC <- try(minpack.lm::nlsLM( formula = fit.formulaGOK, data = data, start = list(a = a, b = b, c = 1), weights = fit.weights, trace = FALSE, algorithm = "LM", lower = if (fit.bounds) { c(0,0,0) }else{ c(-Inf,-Inf,-Inf) }, upper = c(Inf, Inf, Inf), control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = TRUE ) # get parameters out of it including error handling if (class(fit.MC)=="try-error") { x.natural[i] <- NA } else { # get parameters out parameters<-coef(fit.MC) var.b[i]<-as.vector((parameters["b"])) #D0 var.a[i]<-as.vector((parameters["a"])) #Imax var.c[i]<-as.vector((parameters["c"])) #kinetic order modifier # calculate x.natural for error calculation if(mode == "interpolation"){ x.natural[i]<-suppressWarnings( -(var.b[i] * (( (var.a[i] - data.MC.De[i])/var.a[i])^var.c[i] - 1) * ( ((var.a[i] - data.MC.De[i])/var.a[i])^-var.c[i] )) / var.c[i]) }else if(mode == "extrapolation"){ x.natural[i]<-suppressWarnings( abs(-(var.b[i] * (( (var.a[i] - 0)/var.a[i])^var.c[i] - 1) * ( ((var.a[i] - 0)/var.a[i])^-var.c[i] )) / var.c[i]) ) }else{ x.natural[i] <- NA } } }#end for loop ##write D01.ERROR D01.ERROR <- sd(var.b, na.rm = TRUE) ##remove values rm(var.b, var.a, var.c) }#endif::try-error fit #=========================================================================== }#End if Fit Method #Get De values from Monto Carlo simulation #calculate mean and sd (ignore NaN values) De.MonteCarlo <- mean(na.exclude(x.natural)) #De.Error is Error of the whole De (ignore NaN values) De.Error <- sd(na.exclude(x.natural)) ##choose format in dependency of the size of the error De.Error <- ifelse(De.Error <= 0.01, format(De.Error, scientific = TRUE, digits = 2), round(De.Error, digits = 2)) # Formula creation -------------------------------------------------------- if(!is(fit,"try-error") & !is.na(fit[1])){ if(fit.method == "EXP") { f <- parse(text = paste0(format(coef(fit)[1], scientific = TRUE), " * (1 - exp( - ( x + ", format(coef(fit)[3], scientific = TRUE), ") / ", format(coef(fit)[2], scientific = TRUE), "))")) } if(fit.method == "EXP+LIN") { f <- parse(text = paste0(format(coef(fit)[1], scientific = TRUE), " * (1-exp(-(x+", format(coef(fit)[3], scientific = TRUE), ") / ", format(coef(fit)[2], scientific = TRUE), ")+(", format(coef(fit)[4], scientific = TRUE), " * x))")) } if(fit.method == "EXP+EXP") { f <- parse(text = paste0(format(coef(fit)[1], scientific = TRUE), " * (1 - exp( -x / ", format(coef(fit)[2], scientific = TRUE), ")) + ", format(coef(fit)[3], scientific = TRUE), " * (1 - exp( -x / ", format(coef(fit)[4], scientific = TRUE), "))")) } if(fit.method == "LIN" & fit.force_through_origin) { f <- parse(text = paste0(format(fit.lm$coefficients[1], scientific = TRUE), " * x")) } if(fit.method == "LIN" & !fit.force_through_origin) { f <- parse(text = paste0(format(fit.lm$coefficients[2], scientific = TRUE), "* x + ", format(fit.lm$coefficients[1], scientific = TRUE))) } if(fit.method == "QDR" & fit.force_through_origin) { f <- parse(text = paste0(format(coef(fit)[1], scientific = TRUE), " * x ", " + ", format(coef(fit)[2], scientific = TRUE), " * x^2" )) } if(fit.method == "QDR" & !fit.force_through_origin) { f <- parse(text = paste0(format(coef(fit)[1], scientific = TRUE), " + ", format(coef(fit)[2], scientific = TRUE), " * x ", " + ", format(coef(fit)[3], scientific = TRUE), " * x^2" )) } if(fit.method == "GOK") { f <- parse(text = paste0( format(coef(fit)[1], scientific = TRUE), " * (1 - (1 + (1/", format(coef(fit)[2], scientific = TRUE), ") * x * ", format(coef(fit)[3], scientific = TRUE), ")^(-1 / ", format(coef(fit)[3], scientific = TRUE), "))" )) } }else{ f <- NA } # Plotting ------------------------------------------------------------------------------------ ##5. Plotting if plotOutput==TRUE if(output.plot) { # Deal with extra arguments ----------------------------------------------- ##deal with addition arguments extraArgs <- list(...) main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {"Growth curve"} xlab <- if("xlab" %in% names(extraArgs)) {extraArgs$xlab} else {"Dose [s]"} ylab <- if("ylab" %in% names(extraArgs)) {extraArgs$ylab} else { if(mode == "regenration"){ expression(L[x]/T[x]) }else{ "Luminescence [a.u.]" } } if("cex" %in% names(extraArgs)) {cex.global <- extraArgs$cex} ylim <- if("ylim" %in% names(extraArgs)) { extraArgs$ylim } else { if(fit.force_through_origin | mode == "extrapolation"){ c(0-max(y.Error),(max(xy$y)+if(max(xy$y)*0.1>1.5){1.5}else{max(xy$y)*0.2})) }else{ c(min(xy$y)-max(y.Error),(max(xy$y)+if(max(xy$y)*0.1>1.5){1.5}else{max(xy$y)*0.2})) } } xlim <- if("xlim" %in% names(extraArgs)) {extraArgs$xlim} else { if(mode != "extrapolation"){ c(0,(max(xy$x)+if(max(xy$x)*0.4>50){50}else{max(xy$x)*0.4})) }else{ if(!is.na(De)){ if(De > 0){ c(0,(max(xy$x)+if(max(xy$x)*0.4>50){50}else{max(xy$x)*0.4})) }else{ c(De * 2,(max(xy$x)+if(max(xy$x)*0.4>50){50}else{max(xy$x)*0.4})) } }else{ c(-min(xy$x) * 2,(max(xy$x)+if(max(xy$x)*0.4>50){50}else{max(xy$x)*0.4})) } } } fun <- if("fun" %in% names(extraArgs)) {extraArgs$fun} else {FALSE} ##set plot check plot_check <- NULL ##cheat the R check x<-NULL; rm(x) #PAR #open plot area if(output.plot== TRUE & output.plotExtended== TRUE & output.plotExtended.single == FALSE ){ ####grep recent plot parameter for later reset par.default.complex <- par(no.readonly = TRUE) on.exit(par(par.default.complex)) ##set new parameter layout(matrix(c(1,1,1,1,2,3), 3, 2, byrow=TRUE), respect=TRUE) par(cex=0.8*cex.global) }else{ par.default.single <- par(no.readonly = TRUE)$cex on.exit(par(cex = par.default.single)) par(cex=cex.global) } #PLOT #Plot input values ##Make selection to support manual number of reg points input if(exists("fit.RegPointsReal")==TRUE){ ##here the object sample has to be used otherwise the first regeneration point is not plotted. temp.xy.plot <- sample[fit.RegPointsReal,] }else{ temp.xy.plot <- xy[1:fit.NumberRegPointsReal,] } plot_check <- try(plot( temp.xy.plot[, 1:2], ylim = ylim, xlim = xlim, pch = 19, xlab = xlab, ylab = ylab ), silent = TRUE) if (!is(plot_check, "try-error")) { if(mode == "extrapolation"){ abline(v = 0, lty = 1, col = "grey") abline(h = 0, lty = 1, col = "grey") } #ADD HEADER title(main = main, line = 3) #CURVE #plot fitted curve if (fit.method == "EXP+LIN") { try(curve(a * (1 - exp(-(x + c) / b) + (g * x)), lwd = 1.5, add = TRUE)) } else if (fit.method == "LIN" & fit.force_through_origin) { curve(fit.lm$coefficients[1] * x, lwd = 1.5, add = TRUE) } else if (fit.method == "LIN") { curve(fit.lm$coefficients[2] * x + fit.lm$coefficients[1], lwd = 1.5, add = TRUE) } else if (fit.method == "QDR" & fit.force_through_origin) { curve(coef(fit)[1] * x + coef(fit)[2] * x ^ 2, lwd = 1.5, add = TRUE) } else if (fit.method == "QDR") { curve(coef(fit)[1] + coef(fit)[2] * x + coef(fit)[3] * x ^ 2, lwd = 1.5, add = TRUE) } else if (fit.method == "EXP") { try(curve(fit.functionEXP(a, b, c, x), lwd = 1.5, add = TRUE)) } else if (fit.method == "EXP+EXP") { try(curve(fit.functionEXPEXP(a1, a2, b1, b2, x), lwd = 1.5, add = TRUE)) } else if (fit.method == "GOK") { try(curve(fit.functionGOK(a, b, c, x), lwd = 1.5, add = TRUE)) } ##POINTS #Plot Reg0 and Repeated Points #Natural value if(mode == "interpolation"){ points(sample[1, 1:2], col = "red") segments(sample[1, 1], sample[1, 2] - sample[1, 3], sample[1, 1], sample[1, 2] + sample[1, 3], col = "red") }else if (mode == "extrapolation"){ points(x = De, y = 0, col = "red") } #Repeated Point points(xy[which(duplicated(xy[, 1])), 1], xy[which(duplicated(xy[, 1])), 2], pch = 2) #Reg Point 0 points(xy[which(xy == 0), 1], xy[which(xy == 0), 2], pch = 1, cex = 1.5 * cex.global) ##ARROWS #y-error Bar segments(xy$x, xy$y - y.Error, xy$x, xy$y + y.Error) ##LINES #Insert Ln/Tn if (mode == "interpolation") { if (is.na(De)) { lines( c(0, max(sample[, 1]) * 2), c(sample[1, 2], sample[1, 2]), col = "red", lty = 2, lwd = 1.25 ) } else{ try(lines( c(0, De), c(sample[1, 2], sample[1, 2]), col = "red", lty = 2, lwd = 1.25 ), silent = TRUE) } try(lines(c(De, De), c(0, sample[1, 2]), col = "red", lty = 2, lwd = 1.25), silent = TRUE) try(points(De, sample[1, 2], col = "red", pch = 19), silent = TRUE) } else if (mode == "extrapolation"){ if(!is.na(De)){ abline(v = De, lty = 2, col = "red") lines(x = c(0,De), y = c(0,0), lty = 2, col = "red") } } ## check/set mtext mtext <- if ("mtext" %in% names(list(...))) { list(...)$mtext } else { if(mode != "alternate"){ substitute(D[e] == De, list(De = paste( round(abs(De), digits = 2), "\u00B1", round(as.numeric(De.Error), digits = 2), " | fit: ", fit.method ))) }else{ "" } } ##TEXT #Insert fit and result try(mtext(side = 3, mtext, line = 0.5, cex = 0.8 * cex.global), silent = TRUE) #write error message in plot if De is NaN try(if (De == "NaN") { text( sample[2, 1], 0, "Error: De could not be calculated!", adj = c(0, 0), cex = 0.8, col = "red" ) }, silent = TRUE) ##LEGEND #plot legend if (mode == "interpolation") { legend( "topleft", c("REG point", "REG point repeated", "REG point 0"), pch = c(19, 2, 1), cex = 0.8 * cex.global, bty = "n" ) }else{ legend( "bottomright", c("Dose point", "Dose point rep.", "Dose point 0"), pch = c(19, 2, 1), cex = 0.8 * cex.global, bty = "n" ) } ##plot only if wanted if (output.plot == TRUE & output.plotExtended == TRUE) { ##HIST #try to plot histogramm of De values from the Monte Carlo simulation if (output.plotExtended.single != TRUE) { par(cex = 0.7 * cex.global) } ##(A) Calculate histogram data try(histogram <- hist(x.natural, plot = FALSE), silent = TRUE) #to avoid errors plot only if histogram exists if (exists("histogram") && length(histogram$counts) > 2) { ##calculate normal distribution curves for overlay norm.curve.x <- seq(min(x.natural, na.rm = TRUE), max(x.natural, na.rm = TRUE), length = 101) norm.curve.y <- dnorm( norm.curve.x, mean = mean(x.natural, na.rm = TRUE), sd = sd(x.natural, na.rm = TRUE) ) ##plot histogram histogram <- try(hist( x.natural, xlab = xlab, ylab = "Frequency", main = expression(paste(D[e], " from MC simulation")), freq = FALSE, border = "white", axes = FALSE, ylim = c(0, max(norm.curve.y)), sub = paste( "n = ", NumberIterations.MC, ", valid fits =", length(na.exclude(x.natural)) ), col = "grey" ), silent = TRUE) if (!is(histogram, "try-error")) { ##add axes axis(side = 1) axis( side = 2, at = seq(min(histogram$density), max(histogram$density), length = 5), labels = round(seq( min(histogram$counts), max(histogram$counts), length = 5 ), digits = 0) ) ##add norm curve lines(norm.curve.x, norm.curve.y, col = "red") ##add rug rug(x.natural) ##write De + Error from Monte Carlo simulation + write quality of error estimation try(mtext(side = 3, substitute(D[e[MC]] == De, list( De = paste( round(De.MonteCarlo, 2), "\u00B1", round(De.Error,2), " | quality = ", round((1 - abs(De - De.MonteCarlo) / De) * 100, digits = 1), "%" ) )), cex = 0.6 * cex.global), silent = TRUE) }else{ plot_check <- histogram } } else { plot_check <- try(plot( NA, NA, xlim = c(0, 10), ylim = c(0, 10), main = expression(paste(D[e], " from Monte Carlo simulation"))), silent = TRUE ) if(!is(plot_check,"try-error")){ text(5, 5, "not available") } }#end ifelse ##PLOT #PLOT test dose response curve if available if not plot not available #plot Tx/Tn value for sensitiviy change if (!is(plot_check, "try-error")) { if ("TnTx" %in% colnames(sample) == TRUE) { plot( 1:length(sample[, "TnTx"]), sample[1:(length(sample[, "TnTx"])), "TnTx"] / sample[1, "TnTx"], xlab = "SAR cycle", ylab = expression(paste(T[x] / T[n])), main = "Test-dose response", type = "o", pch = 20, ) ##LINES #plot 1 line lines(c(1, length(sample[, "TnTx"])), c(1, 1), lty = 2, col = "gray") } else { plot( NA, NA, xlim = c(0, 10), ylim = c(0, 10), main = "Test dose response" ) text(5, 5, "not available\n no TnTx column") }#end if else } ## FUN by R Luminescence Team if (fun == TRUE) { sTeve() } }#endif::output.plotExtended }#end if plotOutput ##reset graphic device if the plotting failed! if(is(plot_check, "try-error")){ try(stop("[plot_GrowthCurve()] Figure margins too large, nothing plotted, but results returned!", call. = FALSE),) dev.off() } } ##RETURN - return De values and parameter output <- try(data.frame( De = abs(De), De.Error = De.Error, D01 = D01, D01.ERROR = D01.ERROR, D02 = D02, D02.ERROR = D02.ERROR, De.MC = De.MonteCarlo, Fit = fit.method ), silent = TRUE ) ##make RLum.Results object output.final <- set_RLum( class = "RLum.Results", data = list( De = output, De.MC = x.natural, Fit = fit, Formula = f ), info = list( call = sys.call() ) ) invisible(output.final) } Luminescence/R/merge_RLum.Data.Curve.R0000644000176200001440000002151513237102143017201 0ustar liggesusers#' Merge function for RLum.Data.Curve S4 class objects #' #' Function allows merging of RLum.Data.Curve objects in different ways #' #' This function simply allowing to merge [RLum.Data.Curve-class] #' objects without touching the objects itself. Merging is always applied on #' the 2nd colum of the data matrix of the object. #' #' **Supported merge operations are [RLum.Data.Curve-class]** #' #' `"sum"` #' #' All count values will be summed up using the function [rowSums]. #' #' `"mean"` #' #' The mean over the count values is calculated using the function #' [rowMeans]. #' #' `"median"` #' #' The median over the count values is calculated using the function #' [matrixStats::rowMedians]. #' #' `"sd"` #' #' The standard deviation over the count values is calculated using the function #' [matrixStats::rowSds]. #' #' `"var"` #' #' The variance over the count values is calculated using the function #' [matrixStats::rowVars]. #' #' `"min"` #' #' The min values from the count values is chosen using the function #' [matrixStats::rowMins][matrixStats::rowRanges]. #' #' `"max"` #' #' The max values from the count values is chosen using the function #' [matrixStats::rowMins][matrixStats::rowRanges]. #' #' `"append"` #' #' Appends count values of all curves to one combined data curve. The channel width #' is automatically re-calculated, but requires a constant channel width of the #' original data. #' #' `"-"` #' #' The row sums of the last objects are subtracted from the first object. #' #' `"*"` #' #' The row sums of the last objects are mutliplied with the first object. #' #' `"/"` #' #' Values of the first object are divided by row sums of the last objects. #' #' @param object [list] of [RLum.Data.Curve-class] (**required**): #' list of S4 objects of class `RLum.Curve`. #' #' @param merge.method [character] (**required**): #' method for combining of the objects, e.g. `'mean'`, `'sum'`, see details for #' further information and allowed methods. Note: Elements in slot info will #' be taken from the first curve in the list. #' #' @param method.info [numeric] (*optional*): #' allows to specify how info elements of the input objects are combined, #' e.g. `1` means that just the elements from the first object are kept, #' `2` keeps only the info elements from the 2 object etc. #' If nothing is provided all elements are combined. #' #' @return Returns an [RLum.Data.Curve-class] object. #' #' @note #' The information from the slot `recordType` is taken from the first #' [RLum.Data.Curve-class] object in the input list. The slot #' 'curveType' is filled with the name `merged`. #' #' @section S3-generic support: #' #' This function is fully operational via S3-generics: #' ``+``, ``-``, ``/``, ``*``, `merge` #' #' @section Function version: 0.2.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [merge_RLum], [RLum.Data.Curve-class] #' #' #' @keywords utilities internal #' #' @examples #' #' #' ##load example data #' data(ExampleData.XSYG, envir = environment()) #' #' ##grep first and 3d TL curves #' TL.curves <- get_RLum(OSL.SARMeasurement$Sequence.Object, recordType = "TL (UVVIS)") #' TL.curve.1 <- TL.curves[[1]] #' TL.curve.3 <- TL.curves[[3]] #' #' ##plot single curves #' plot_RLum(TL.curve.1) #' plot_RLum(TL.curve.3) #' #' ##subtract the 1st curve from the 2nd and plot #' TL.curve.merged <- merge_RLum.Data.Curve(list(TL.curve.3, TL.curve.1), merge.method = "/") #' plot_RLum(TL.curve.merged) #' #' @md #' @export merge_RLum.Data.Curve<- function( object, merge.method = "mean", method.info ){ # Ingegrity checks ---------------------------------------------------------------------------- ##(1) check if object is of class RLum.Data.Curve temp.recordType.test <- sapply(1:length(object), function(x){ if(is(object[[x]], "RLum.Data.Curve") == FALSE){ temp.text <- paste( "[merge_RLum.Data.Curve()]: At least object", x, "is not of class 'RLum.Data.Curve'!") stop(temp.text) } ##provide class of objects return(object[[x]]@recordType) }) ##(2) Check for similar record types if(length(unique(temp.recordType.test))>1){ stop.text <- paste0("[merge_RLum.Data.Curve()] only similar record types are supported, you are trying to merge: ", paste0("'",unique(temp.recordType.test),"'", collapse = ", ")) stop(stop.text) } # Merge objects ------------------------------------------------------------------------------- ##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ##merge data objects ##problem ... how to handle data with different resoultion or length? ##(1) build new data matrix ##first find shortest object check.length <- sapply(1:length(object),function(x){ nrow(object[[x]]@data) }) temp.matrix <- sapply(1:length(object), function(x){ ##check if the objects are of equal length if (length(unique(check.length)) != 1) { ##but we have to at least check the x-range if (object[[x]]@data[x,1] != object[[1]]@data[x,1]) { stop( "[merge_RLum.Data.Curve()] The objects seem not to have the same channel resolution!" ) } warning("[merge_RLum.Data.Curve()] The number of channels between the curves differes. Resulting curve has the length of shortest curve.") ##if this is ok, we cann continue and shorten the rest of the objects return(object[[x]]@data[1:min(check.length),2]) #stop("[merge_RLum.Data.Curve()] Input objects have to be of similar length.") ##find out which curve is the shortest element }else{ object[[x]]@data[,2] } }) ##(2) apply selected method for merging if(merge.method == "sum"){ temp.matrix <- rowSums(temp.matrix) }else if(merge.method == "mean"){ temp.matrix <- rowMeans(temp.matrix) }else if(merge.method == "median"){ temp.matrix <- matrixStats::rowMedians(temp.matrix) }else if(merge.method == "sd"){ temp.matrix <- matrixStats::rowSds(temp.matrix) }else if(merge.method == "var"){ temp.matrix <- matrixStats::rowVars(temp.matrix) }else if(merge.method == "max"){ temp.matrix <- matrixStats::rowMaxs(temp.matrix) }else if(merge.method == "min"){ temp.matrix <- matrixStats::rowMins(temp.matrix) }else if(merge.method == "append") { temp.matrix <- sapply(temp.matrix, c) }else if(merge.method == "-"){ if(ncol(temp.matrix) > 2){ temp.matrix <- temp.matrix[,1] - rowSums(temp.matrix[,-1]) }else{ temp.matrix <- temp.matrix[,1] - temp.matrix[,2] } }else if(merge.method == "*"){ if(ncol(temp.matrix) > 2){ temp.matrix <- temp.matrix[,1] * rowSums(temp.matrix[,-1]) }else{ temp.matrix <- temp.matrix[,1] * temp.matrix[,2] } }else if(merge.method == "/"){ if(ncol(temp.matrix) > 2){ temp.matrix <- temp.matrix[,1] / rowSums(temp.matrix[,-1]) }else{ temp.matrix <- temp.matrix[,1] / temp.matrix[,2] } ##get index of inf values id.inf <- which(is.infinite(temp.matrix) == TRUE) ##replace with 0 and provide warning temp.matrix[id.inf] <- 0 warning(paste0(length(id.inf), " 'inf' values have been replaced by 0 in the matrix.")) }else{ stop("[merge_RLum.Data.Curve()] unsupported or unknown merge method!") } ##add first column #If we append the data of the second to the first curve we have to recalculate #the x-values (probably time/channel). The difference should always be the #same, so we just expand the sequence if this is true. If this is not true, #we revert to the default behaviour (i.e., append the x values) if (merge.method == "append" & length(unique(diff(object[[1]]@data[,1])))) { step <- unique(diff(object[[1]]@data[,1])) newx <- seq(from = min(object[[1]]@data[,1]), by = step, length.out = sum(check.length)) temp.matrix <- cbind(newx, temp.matrix) } else { temp.matrix <- cbind(object[[1]]@data[1:min(check.length),1], temp.matrix) } ##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ##merge info objects as simple as possible ... just keep them all ... other possiblity ##would be to chose on the the input objects ##unlist is needed here, as otherwise i would cause unexpected bevavhiour further using ##the RLum.object if(missing(method.info)){ temp.info <- unlist(lapply(1:length(object), function(x){ object[[x]]@info }), recursive = FALSE) }else{ temp.info <- object[[method.info]]@info } # Build new RLum.Data.Curve object -------------------------------------------------------------- temp.new.Data.Curve <- set_RLum( class = "RLum.Data.Curve", originator = "merge_RLum.Data.Curve", recordType = object[[1]]@recordType, curveType = "merged", data = temp.matrix, info = temp.info, .pid = unlist(lapply(object, function(x) { x@.uid })) ) # Return object ------------------------------------------------------------------------------- return(temp.new.Data.Curve) } Luminescence/R/replicate_RLum.R0000644000176200001440000000125313231137116016116 0ustar liggesusers#' General replication function for RLum S4 class objects #' #' Function replicates RLum S4 class objects and returns a list for this objects #' #' @param object [RLum-class] (**required**): #' an [RLum-class] object #' #' @param times [integer] (*optional*): #' number for times each element is repeated element #' #' @return Returns a [list] of the object to be repeated #' #' @section Function version: 0.1.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum-class] #' #' @keywords utilities #' #' @md #' @export setGeneric("replicate_RLum", function (object, times = NULL) { standardGeneric("replicate_RLum") }) Luminescence/R/calc_FadingCorr.R0000644000176200001440000003406213231137116016213 0ustar liggesusers#' Apply a fading correction according to Huntley & Lamothe (2001) for a given #' g-value and a given tc #' #' This function solves the equation used for correcting the fading affected age #' including the error for a given g-value according to Huntley & Lamothe (2001). #' #' As the g-value sligthly depends on the time between irradiation and the prompt measurement, #' this is tc, always a tc value needs to be provided. If the g-value was normalised to a distinct #' time or evaluated with a different tc value (e.g., external irradiation), also the tc value #' for the g-value needs to be provided (argument `tc.g_value` and then the g-value is recalcualted #' to tc of the measurement used for estimating the age applying the following equation: #' #' \deqn{\kappa_{tc} = \kappa_{tc.g} / (1 - \kappa_{tc.g} * log(tc/tc.g))} #' #' where #' #' \deqn{\kappa_{tc.g} = g / 100 / log(10)} #' #' with `log` the natural logarithm. #' #' #' The error of the fading-corrected age is determined using a Monte Carlo #' simulation approach. Solving of the equation is realised using #' [uniroot]. Large values for `n.MC` will significantly #' increase the computation time.\cr #' #' **`n.MC = 'auto'`** #' #' The error estimation based on a stochastic process, i.e. for a small number of MC runs the calculated #' error varies considerably every time the function is called, even with the same input values. #' The argument option `n.MC = 'auto'` tries to find a stable value for the standard error, i.e. #' the standard deviation of values calculated during the MC runs (`age.corr.MC`), #' within a given precision (2 digits) by increasing the number of MC runs stepwise and #' calculating the corresponding error. #' #' If the determined error does not differ from the 9 values calculated previously #' within a precision of (here) 3 digits the calculation is stopped as it is assumed that the error #' is stable. Please note that (a) the duration depends on the input values as well as on #' the provided computation ressources and it may take a while, (b) the length (size) of the output #' vector `age.corr.MC`, where all the single values produced during the MC runs are stored, #' equals the number of MC runs (here termed observations). #' #' To avoid an endless loop the calculation is stopped if the number of observations exceeds 10^7. #' This limitation can be overwritten by setting the number of MC runs manually, #' e.g. `n.MC = 10000001`. Note: For this case the function is not checking whether the calculated #' error is stable.\cr #' #' **`seed`** #' #' This option allows to recreate previously calculated results by setting the seed #' for the R random number generator (see [set.seed] for details). This option #' should not be mixed up with the option **`n.MC = 'auto'`**. The results may #' appear similar, but they are not comparable!\cr #' #' **FAQ**\cr #' #' Q: Which tc value is expected?\cr #' #' A: tc is the time in seconds between irradiation and the prompt measurement applied during your #' De measurement. However, this tc might differ from the tc used for estimating the g-value. In the #' case of an SAR measurement tc should be similar, however, if it differs, you have to provide this #' tc value (the one used for estimating the g-value) using the argument `tc.g_value`.\cr #' #' @param age.faded [numeric] [vector] (**required**): #' uncorrected age with error in ka (see example) #' #' @param g_value [vector] (**required**): #' g-value and error obtained from separate fading measurements (see example). #' Alternatively an [RLum.Results-class] object can be provided produced by the function #' [analyse_FadingMeasurement], in this case tc is set automatically #' #' @param tc [numeric] (**required**): #' time in seconds between irradiation and the prompt measurement (cf. Huntley & Lamothe 2001). #' Argument will be ignored if `g_value` was an [RLum.Results-class] object #' #' @param tc.g_value [numeric] (*with default*): #' the time in seconds between irradiation and the prompt measurement used for estimating the g-value. #' If the g-value was normalised to, e.g., 2 days, this time in seconds (i.e., 172800) should be given here. #' If nothing is provided the time is set to tc, which is usual case for g-values obtained using the #' SAR method and g-values that had been not normalised to 2 days. #' #' @param n.MC [integer] (*with default*): #' number of Monte Carlo simulation runs for error estimation. #' If `n.MC = 'auto'` is used the function tries to find a 'stable' error for the age. #' **Note:** This may take a while! #' #' @param seed [integer] (*optional*): #' sets the seed for the random number generator in R using [set.seed] #' #' @param interval [numeric] (*with default*): #' a vector containing the end-points (age interval) of the interval to be searched for the root in 'ka'. #' This argument is passed to the function [stats::uniroot] used for solving the equation. #' #' @param txtProgressBar [logical] (*with default*): #' enables or disables [txtProgressBar] #' #' @param verbose [logical] (*with default*): #' enables or disables terminal output #' #' #' @return Returns an S4 object of type [RLum.Results-class].\cr #' #' Slot: **`@data`**\cr #' \tabular{lll}{ #' **Object** \tab **Type** \tab **Comment** \cr #' `age.corr` \tab [data.frame] \tab Corrected age \cr #' `age.corr.MC` \tab [numeric] \tab MC simulation results with all possible ages from that simulation \cr #' } #' #' Slot: **`@info`**\cr #' #' \tabular{lll}{ #' **Object** \tab **Type** \tab **Comment** \cr #' `info` \tab [character] \tab the original function call #' } #' #' #' @note Special thanks to Sebastien Huot for his support and clarification via e-mail. #' #' #' @section Function version: 0.4.2 #' #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' #' @seealso [RLum.Results-class], [get_RLum], [uniroot] #' #' #' @references #' Huntley, D.J., Lamothe, M., 2001. Ubiquity of anomalous fading #' in K-feldspars and the measurement and correction for it in optical dating. #' Canadian Journal of Earth Sciences, 38, 1093-1106. #' #' #' @keywords datagen #' #' #' @examples #' #' ##run the examples given in the appendix of Huntley and Lamothe, 2001 #' #' ##(1) faded age: 100 a #' results <- calc_FadingCorr( #' age.faded = c(0.1,0), #' g_value = c(5.0, 1.0), #' tc = 2592000, #' tc.g_value = 172800, #' n.MC = 100) #' #' ##(2) faded age: 1 ka #' results <- calc_FadingCorr( #' age.faded = c(1,0), #' g_value = c(5.0, 1.0), #' tc = 2592000, #' tc.g_value = 172800, #' n.MC = 100) #' #' ##(3) faded age: 10.0 ka #' results <- calc_FadingCorr( #' age.faded = c(10,0), #' g_value = c(5.0, 1.0), #' tc = 2592000, #' tc.g_value = 172800, #' n.MC = 100) #' #' ##access the last output #' get_RLum(results) #' #' @md #' @export calc_FadingCorr <- function( age.faded, g_value, tc = NULL, tc.g_value = tc, n.MC = 10000, seed = NULL, interval = c(0.01,500), txtProgressBar = TRUE, verbose = TRUE ){ ##TODO set link after the function analyse_FadingMeasurement was released ## ... this option should be tested as well # Integrity checks --------------------------------------------------------------------------- stopifnot(!missing(age.faded), !missing(g_value)) ##check input if(class(g_value)[1] == "RLum.Results"){ if(g_value@originator == "analyse_FadingMeasurement"){ tc <- get_RLum(g_value)[["TC"]] g_value <- as.numeric(get_RLum(g_value)[,c("FIT", "SD")]) }else{ try(stop("[calc_FadingCorr()] Unknown originator for the provided RLum.Results object via 'g_value'!", call. = FALSE)) return(NULL) } } ##check if tc is still NULL if(is.null(tc)){ try(stop("[calc_FadingCorr()] 'tc' needs to be set!", call. = FALSE)) return(NULL) } ##============================================================================## ##DEFINE FUNCTION ##============================================================================## f <- function(x, af,kappa,tc){1-kappa*(log(x/tc)-1) - (af/x)} ##============================================================================## ##CALCULATION ##============================================================================## ##recalculate the g-value to the given tc ... should be similar ##of tc = tc.g_value ##re-calulation thanks to the help by Sebastien Huot, e-mail: 2016-07-19 ##Please note that we take the vector for the g_value here k0 <- g_value / 100 / log(10) k1 <- k0 / (1 - k0 * log(tc[1]/tc.g_value[1])) g_value <- 100 * k1 * log(10) ##calculate kappa (equation [5] in Huntley and Lamothe, 2001) kappa <- g_value / log(10) / 100 ##transform tc in ka years ##duration of the year over a long term taken from http://wikipedia.org tc <- tc[1] / 60 / 60 / 24 / 365.2425 / 1000 tc.g_value <- tc.g_value[1] / 60 / 60 / 24 / 365.2425 / 1000 ##calculate mean value temp <- uniroot( f, interval = interval, tol = 0.001, tc = tc, af = age.faded[1], kappa = kappa[1], check.conv = FALSE ) ##--------------------------------------------------------------------------## ##Monte Carlo simulation for error estimation tempMC.sd.recent <- NA tempMC.sd.count <- 1:10 counter <- 1 ##show some progression bar of the process if (n.MC == 'auto') { n.MC.i <- 10000 cat("\n[calc_FadingCorr()] ... trying to find stable error value ...") if (txtProgressBar) { cat("\n -------------------------------------------------------------\n") cat(paste0(" ",paste0("(",0:9,")", collapse = " "), "\n")) } }else{ n.MC.i <- n.MC } # Start loop --------------------------------------------------------------------------------- ##set object and preallocate memory tempMC <- vector("numeric", length = 1e+07) tempMC[] <- NA i <- 1 j <- n.MC.i while(length(unique(tempMC.sd.count))>1 | j > 1e+07){ ##set previous if(!is.na(tempMC.sd.recent)){ tempMC.sd.count[counter] <- tempMC.sd.recent } ##set seed if (!is.null(seed)) set.seed(seed) ##pre-allocate memory g_valueMC <- vector("numeric", length = n.MC.i) age.fadeMC <- vector("numeric", length = n.MC.i) kappaMC <- vector("numeric", length = n.MC.i) ##set-values g_valueMC <- rnorm(n.MC.i,mean = g_value[1],sd = g_value[2]) age.fadedMC <- rnorm(n.MC.i,mean = age.faded[1],sd = age.faded[2]) kappaMC <- g_valueMC / log(10) / 100 ##calculate for all values tempMC[i:j] <- suppressWarnings(vapply(X = 1:length(age.fadedMC), FUN = function(x) { temp <- try(uniroot( f, interval = interval, tol = 0.001, tc = tc, af = age.fadedMC[[x]], kappa = kappaMC[[x]], check.conv = TRUE, maxiter = 1000, extendInt = "yes" ), silent = TRUE) ##otherwise the automatic error value finding ##will never work if(!is(temp,"try-error") && temp$root<1e8) { return(temp$root) } else{ return(NA) } }, FUN.VALUE = 1)) i <- j + 1 j <- j + n.MC.i ##stop here if a fixed value is set if(n.MC != 'auto'){ break } ##set recent tempMC.sd.recent <- round(sd(tempMC, na.rm = TRUE), digits = 3) if (counter %% 10 == 0) { counter <- 1 }else{ counter <- counter + 1 } ##show progress in terminal if (txtProgressBar) { text <- rep("CHECK",10) if (counter %% 2 == 0) { text[1:length(unique(tempMC.sd.count))] <- "-----" }else{ text[1:length(unique(tempMC.sd.count))] <- " CAL " } cat(paste("\r ",paste(rev(text), collapse = " "))) } } ##--------------------------------------------------------------------------## ##remove all NA values from tempMC tempMC <- tempMC[!is.na(tempMC)] ##obtain corrected age age.corr <- data.frame( AGE = round(temp$root, digits = 4), AGE.ERROR = round(sd(tempMC), digits = 4), AGE_FADED = age.faded[1], AGE_FADED.ERROR = age.faded[2], G_VALUE = g_value[1], G_VALUE.ERROR = g_value[2], KAPPA = kappa[1], KAPPA.ERROR = kappa[2], TC = tc, TC.G_VALUE = tc.g_value, n.MC = n.MC, OBSERVATIONS = length(tempMC), SEED = ifelse(is.null(seed), NA, seed) ) ##============================================================================## ##OUTPUT VISUAL ##============================================================================## if(verbose) { cat("\n\n[calc_FadingCorr()]\n") cat("\n >> Fading correction according to Huntley & Lamothe (2001)") if (tc != tc.g_value) { cat("\n >> g-value re-calculated for the given tc") } cat(paste( "\n\n .. used g-value:\t", round(g_value[1], digits = 3), " \u00b1 ", round(g_value[2], digits = 3), " %/decade", sep = "" )) cat(paste( "\n .. used tc:\t\t", format(tc, digits = 4, scientific = TRUE), " ka", sep = "" )) cat(paste0( "\n .. used kappa:\t\t", round(kappa[1], digits = 4), " \u00b1 ", round(kappa[2], digits = 4) )) cat("\n ----------------------------------------------") cat(paste0("\n seed: \t\t\t", ifelse(is.null(seed), NA, seed))) cat(paste0("\n n.MC: \t\t\t", n.MC)) cat(paste0( "\n observations: \t\t", format(length(tempMC), digits = 2, scientific = TRUE), sep = "" )) cat("\n ----------------------------------------------") cat(paste0( "\n Age (faded):\t\t", round(age.faded[1], digits = 4), " ka \u00b1 ", round(age.faded[2], digits = 4), " ka" )) cat(paste0( "\n Age (corr.):\t\t", round(age.corr[1], digits = 4), " ka \u00b1 ", round(age.corr[2], digits = 4), " ka" )) cat("\n ---------------------------------------------- \n") } ##============================================================================## ##OUTPUT RLUM ##============================================================================## return(set_RLum( class = "RLum.Results", data = list(age.corr = age.corr, age.corr.MC = tempMC), info = list(call = sys.call()) )) } Luminescence/R/calc_CosmicDoseRate.R0000644000176200001440000005144513437257676017073 0ustar liggesusers#' Calculate the cosmic dose rate #' #' This function calculates the cosmic dose rate taking into account the soft- #' and hard-component of the cosmic ray flux and allows corrections for #' geomagnetic latitude, altitude above sea-level and geomagnetic field #' changes. #' #' This function calculates the total cosmic dose rate considering both the #' soft- and hard-component of the cosmic ray flux. #' #' **Internal calculation steps** #' #' (1) #' Calculate total depth of all absorber in hg/cm^2 (1 hg/cm^2 = 100 g/cm^2) #' #' \deqn{absorber = depth_1*density_1 + depth_2*density_2 + ... + depth_n*density_n} #' #' #' (2) #' If `half.depth = TRUE` #' #' \deqn{absorber = absorber/2} #' #' #' (3) #' Calculate cosmic dose rate at sea-level and 55 deg. latitude #' #' a) If absorber is > 167 g/cm^2 (only hard-component; Allkofer et al. 1975): #' apply equation given by Prescott & Hutton (1994) (c.f. Barbouti & Rastin #' 1983) #' #' \deqn{D0 = C/(((absorber+d)^\alpha+a)*(absober+H))*exp(-B*absorber)} #' #' b) If absorber is < 167 g/cm^2 (soft- and hard-component): derive D0 from #' Fig. 1 in Prescott & Hutton (1988). #' #' #' (4) #' Calculate geomagnetic latitude (Prescott & Stephan 1982, Prescott & #' Hutton 1994) #' #' \deqn{\lambda = arcsin(0.203*cos(latitude)*cos(longitude-291)+0.979* #' sin(latitude))} #' #' #' (5) #' Apply correction for geomagnetic latitude and altitude above sea-level. #' Values for F, J and H were read from Fig. 3 shown in Prescott & Stephan #' (1982) and fitted with 3-degree polynomials for lambda < 35 degree and a #' linear fit for lambda > 35 degree. #' #' \deqn{Dc = D0*(F+J*exp((altitude/1000)/H))} #' #' #' (6) #' Optional: Apply correction for geomagnetic field changes in the last #' 0-80 ka (Prescott & Hutton 1994). Correction and altitude factors are given #' in Table 1 and Fig. 1 in Prescott & Hutton (1994). Values for altitude #' factor were fitted with a 2-degree polynomial. The altitude factor is #' operated on the decimal part of the correction factor. #' #' \deqn{Dc' = Dc*correctionFactor} #' #' #' **Usage of `depth` and `density`** #' #' (1) If only one value for depth and density is provided, the cosmic dose #' rate is calculated for exactly one sample and one absorber as overburden #' (i.e. `depth*density`). #' #' (2) In some cases it might be useful to calculate the cosmic dose rate for a #' sample that is overlain by more than one absorber, e.g. in a profile with #' soil layers of different thickness and a distinct difference in density. #' This can be calculated by providing a matching number of values for #' `depth` and `density` (e.g. `depth = c(1, 2), density = c(1.7, 2.4)`) #' #' (3) Another possibility is to calculate the cosmic dose rate for more than #' one sample of the same profile. This is done by providing more than one #' values for `depth` and only one for `density`. For example, #' `depth = c(1, 2, 3)` and `density = 1.7` will calculate the cosmic dose rate #' for three samples in 1, 2 and 3 m depth in a sediment of density 1.7 g/cm^3. #' #' @param depth [numeric] (**required**): #' depth of overburden (m). For more than one absorber use \cr #' `c(depth_1, depth_2, ..., depth_n)` #' #' @param density [numeric] (**required**): #' average overburden density (g/cm^3). For more than one absorber use \cr #' `c(density_1, density_2, ..., density_n)` #' #' @param latitude [numeric] (**required**): #' latitude (decimal degree), N positive #' #' @param longitude [numeric] (**required**): #' longitude (decimal degree), E positive #' #' @param altitude [numeric] (**required**): #' altitude (m above sea-level) #' #' @param corr.fieldChanges [logical] (*with default*): #' correct for geomagnetic field changes after Prescott & Hutton (1994). #' Apply only when justified by the data. #' #' @param est.age [numeric] (*with default*): #' estimated age range (ka) for geomagnetic field change correction (0-80 ka allowed) #' #' @param half.depth [logical] (*with default*): #' How to overcome with varying overburden thickness. If `TRUE` only half the #' depth is used for calculation. Apply only when justified, i.e. when a constant #' sedimentation rate can safely be assumed. #' #' @param error [numeric] (*with default*): #' general error (percentage) to be implemented on corrected cosmic dose rate estimate #' #' @param ... further arguments (`verbose` to disable/enable console output). #' #' @return #' Returns a terminal output. In addition an #' [RLum.Results-class]-object is returned containing the #' following element: #' #' \item{summary}{[data.frame] summary of all relevant calculation results.} #' \item{args}{[list] used arguments} #' \item{call}{[call] the function call} #' #' The output should be accessed using the function [get_RLum] #' #' @note #' Despite its universal use the equation to calculate the cosmic dose #' rate provided by Prescott & Hutton (1994) is falsely stated to be valid from #' the surface to 10^4 hg/cm^2 of standard rock. The original expression by #' Barbouti & Rastin (1983) only considers the muon flux (i.e. hard-component) #' and is by their own definition only valid for depths between 10-10^4 #' hg/cm^2. #' #' Thus, for near-surface samples (i.e. for depths < 167 g/cm^2) the equation #' of Prescott & Hutton (1994) underestimates the total cosmic dose rate, as it #' neglects the influence of the soft-component of the cosmic ray flux. For #' samples at zero depth and at sea-level the underestimation can be as large #' as ~0.1 Gy/ka. In a previous article, Prescott & Hutton (1988) give another #' approximation of Barbouti & Rastins equation in the form of #' #' \deqn{D = 0.21*exp(-0.070*absorber+0.0005*absorber^2)} #' #' which is valid for depths between 150-5000 g/cm^2. For shallower depths (< #' 150 g/cm^2) they provided a graph (Fig. 1) from which the dose rate can be #' read. #' #' As a result, this function employs the equation of Prescott & Hutton (1994) #' only for depths > 167 g/cm^2, i.e. only for the hard-component of the cosmic #' ray flux. Cosmic dose rate values for depths < 167 g/cm^2 were obtained from #' the "AGE" programm (Gruen 2009) and fitted with a 6-degree polynomial curve #' (and hence reproduces the graph shown in Prescott & Hutton 1988). However, #' these values assume an average overburden density of 2 g/cm^3. #' #' It is currently not possible to obtain more precise cosmic dose rate values #' for near-surface samples as there is no equation known to the author of this #' function at the time of writing. #' #' #' @section Function version: 0.5.2 #' #' @author #' Christoph Burow, University of Cologne (Germany) #' #' @seealso [BaseDataSet.CosmicDoseRate] #' #' @references #' Allkofer, O.C., Carstensen, K., Dau, W.D., Jokisch, H., 1975. #' Letter to the editor. The absolute cosmic ray flux at sea level. Journal of #' Physics G: Nuclear and Particle Physics 1, L51-L52. #' #' Barbouti, A.I., Rastin, B.C., 1983. A study of the absolute intensity of muons at sea level #' and under various thicknesses of absorber. Journal of Physics G: Nuclear and #' Particle Physics 9, 1577-1595. #' #' Crookes, J.N., Rastin, B.C., 1972. An #' investigation of the absolute intensity of muons at sea-level. Nuclear #' Physics B 39, 493-508. #' #' Gruen, R., 2009. The "AGE" program for the #' calculation of luminescence age estimates. Ancient TL 27, 45-46. #' #' Prescott, J.R., Hutton, J.T., 1988. Cosmic ray and gamma ray dosimetry for #' TL and ESR. Nuclear Tracks and Radiation Measurements 14, 223-227. #' #' Prescott, J.R., Hutton, J.T., 1994. Cosmic ray contributions to dose rates #' for luminescence and ESR dating: large depths and long-term time variations. #' Radiation Measurements 23, 497-500. #' #' Prescott, J.R., Stephan, L.G., 1982. The contribution of cosmic radiation to the environmental dose for #' thermoluminescence dating. Latitude, altitude and depth dependences. PACT 6, 17-25. #' #' @examples #' #' ##(1) calculate cosmic dose rate (one absorber) #' calc_CosmicDoseRate(depth = 2.78, density = 1.7, #' latitude = 38.06451, longitude = 1.49646, #' altitude = 364, error = 10) #' #' ##(2a) calculate cosmic dose rate (two absorber) #' calc_CosmicDoseRate(depth = c(5.0, 2.78), density = c(2.65, 1.7), #' latitude = 38.06451, longitude = 1.49646, #' altitude = 364, error = 10) #' #' ##(2b) calculate cosmic dose rate (two absorber) and #' ##correct for geomagnetic field changes #' calc_CosmicDoseRate(depth = c(5.0, 2.78), density = c(2.65, 1.7), #' latitude = 12.04332, longitude = 4.43243, #' altitude = 364, corr.fieldChanges = TRUE, #' est.age = 67, error = 15) #' #' #' ##(3) calculate cosmic dose rate and export results to .csv file #' #calculate cosmic dose rate and save to variable #' results<- calc_CosmicDoseRate(depth = 2.78, density = 1.7, #' latitude = 38.06451, longitude = 1.49646, #' altitude = 364, error = 10) #' #' # the results can be accessed by #' get_RLum(results, "summary") #' #' #export results to .csv file - uncomment for usage #' #write.csv(results, file = "c:/users/public/results.csv") #' #' ##(4) calculate cosmic dose rate for 6 samples from the same profile #' ## and save to .csv file #' #calculate cosmic dose rate and save to variable #' results<- calc_CosmicDoseRate(depth = c(0.1, 0.5 , 2.1, 2.7, 4.2, 6.3), #' density = 1.7, latitude = 38.06451, #' longitude = 1.49646, altitude = 364, #' error = 10) #' #' #export results to .csv file - uncomment for usage #' #write.csv(results, file = "c:/users/public/results_profile.csv") #' #' @md #' @export calc_CosmicDoseRate<- function( depth, density, latitude, longitude, altitude, corr.fieldChanges = FALSE, est.age = NA, half.depth = FALSE, error = 10, ... ) { ##============================================================================## ## ... ARGUMENTS ##============================================================================## settings <- list(verbose = TRUE) settings <- modifyList(settings, list(...)) ##============================================================================## ## CONSISTENCY CHECK OF INPUT DATA ##============================================================================## if(any(depth < 0) || any(density < 0)) { cat(paste("\nNo negative values allowed for depth and density")) stop(domain=NA) } if(corr.fieldChanges == TRUE) { if(is.na(est.age) == TRUE) { cat(paste("\nCorrection for geomagnetic field changes requires", "an age estimate."), fill = FALSE) stop(domain=NA) } if(est.age > 80) { cat(paste("\nCAUTION: No geomagnetic field change correction for samples", "older >80 ka possible!"), fill = FALSE) corr.fieldChanges<- FALSE } } if(length(density) > length(depth)) { stop("\nIf you provide more than one value for density please", " provide an equal number of values for depth.", call. = FALSE) } ##============================================================================## ## CALCULATIONS ##============================================================================## # initialize parameter for Prescott & Hutton (1994) equation C<- 6072 B<- 0.00055 d<- 11.6 alpha<- 1.68 a<- 75 H<- 212 #variable needed to check if cosmic dose rate is calculated for more #than one sample profile.mode<- FALSE #calculate absorber (hgcm) of one depth and one absorber [single sample] if(length(depth)==1) { hgcm<- depth*density if(half.depth == TRUE) { hgcm<- hgcm/2 } } #calculate total absorber of n depths and n densities [single sample] if(length(depth)==length(density)){ hgcm<- 0 for(i in 1:length(depth)) { hgcm<- hgcm + depth[i]*density[i] } if(half.depth == TRUE) { hgcm<- hgcm/2 } } #if there are >1 depths and only one density, calculate #absorber for each sample [multi sample] if(length(depth) > length(density) & length(density) == 1) { profile.mode<- TRUE hgcm<- 1:length(depth) for(i in 1:length(depth)) { hgcm[i]<- depth[i]*density } if(half.depth == TRUE) { hgcm<- hgcm/2 } profile.results<- data.frame(rbind(c(1:3)),cbind(1:length(depth))) colnames(profile.results)<- c("depth (m)", "d0 (Gy/ka)", "dc (Gy/ka)","dc_error (Gy/ka)") } for(i in 1:length(hgcm)) { # calculate cosmic dose rate at sea-level for geomagnetic latitude 55 degrees if(hgcm[i]*100 >= 167) { d0<- (C/((((hgcm[i]+d)^alpha)+a)*(hgcm[i]+H)))*exp(-B*hgcm[i]) } if(hgcm[i]*100 < 167) { temp.hgcm<- hgcm[i]*100 d0.ph<- (C/((((hgcm[i]+d)^alpha)+a)*(hgcm[i]+H)))*exp(-B*hgcm[i]) if(hgcm[i]*100 < 40) { d0<- -6*10^-8*temp.hgcm^3+2*10^-5*temp.hgcm^2-0.0025*temp.hgcm+0.2969 } else { d0<- 2*10^-6*temp.hgcm^2-0.0008*temp.hgcm+0.2535 } if(d0.ph > d0) { d0<- d0.ph } } # Calculate geomagnetic latitude gml.temp<- 0.203*cos((pi/180)*latitude)* cos(((pi/180)*longitude)-(291*pi/180))+0.979* sin((pi/180)*latitude) true.gml<- asin(gml.temp)/(pi/180) gml<- abs(asin(gml.temp)/(pi/180)) # Find values for F, J and H from graph shown in Prescott & Hutton (1994) # values were read from the graph and fitted with 3 degree polynomials and a # linear part if(gml < 36.5) { # Polynomial fit F_ph<- -7*10^-7*gml^3-8*10^-5*gml^2-0.0009*gml+0.3988 } else { # Linear fit F_ph<- -0.0001*gml + 0.2347 } if(gml < 34) { # Polynomial fit J_ph<- 5*10^-6*gml^3-5*10^-5*gml^2+0.0026*gml+0.5177 } else { # Linear fit J_ph<- 0.0005*gml + 0.7388 } if(gml < 36) { # Polynomial fit H_ph<- -3*10^-6*gml^3-5*10^-5*gml^2-0.0031*gml+4.398 } else { # Linear fit H_ph<- 0.0002*gml + 4.0914 } # Apply correction for geomagnetic latitude and altitude according to # Prescott & Hutton (1994) dc<- d0*(F_ph + J_ph*exp((altitude/1000)/H_ph)) ## Additional correction for geomagnetic field change if(corr.fieldChanges==TRUE) { if(gml <= 35) { # Correction matrix for geomagnetic field changes at # sea-level (Prescott & Hutton (1994), Table 1) corr.matrix<- data.frame(rbind(1:5),1:7) colnames(corr.matrix)<- c(0, 10, 20, 30, 35, ">35") rownames(corr.matrix)<- c("0-5","5-10","10-15","15-20","20-35","35-50", "50-80") corr.matrix[1,]<- c(0.97, 0.97, 0.98, 0.98, 0.98, 1.00) corr.matrix[2,]<- c(0.99, 0.99, 0.99, 0.99, 0.99, 1.00) corr.matrix[3,]<- c(1.00, 1.00, 1.00, 1.00, 1.00, 1.00) corr.matrix[4,]<- c(1.01, 1.01, 1.01, 1.00, 1.00, 1.00) corr.matrix[5,]<- c(1.02, 1.02, 1.02, 1.01, 1.00, 1.00) corr.matrix[6,]<- c(1.03, 1.03, 1.02, 1.01, 1.00, 1.00) corr.matrix[7,]<- c(1.02, 1.02, 1.02, 1.01, 1.00, 1.00) # Find corresponding correction factor for given geomagnetic latitude # determine column if(gml <= 5) { corr.c<- 1 } if(5 < gml) { if(gml <= 15) { corr.c<- 2 } } if(15 < gml){ if(gml <= 25) { corr.c<- 3 } } if(25 < gml){ if(gml <= 32.5) { corr.c<- 4 } } if(32.5 < gml){ if(gml <= 35) { corr.c<- 5 } } # find row if(est.age <= 5) { corr.fac<- corr.matrix[1,corr.c] } if(5 < est.age) { if(est.age <= 10) { corr.fac<- corr.matrix[2,corr.c] } } if(10 < est.age){ if(est.age <= 15) { corr.fac<- corr.matrix[3,corr.c] } } if(15 < est.age){ if(est.age <= 20) { corr.fac<- corr.matrix[4,corr.c] } } if(20 < est.age){ if(est.age <= 35) { corr.fac<- corr.matrix[5,corr.c] } } if(35 < est.age){ if(est.age <= 50) { corr.fac<- corr.matrix[6,corr.c] } } if(50 < est.age){ if(est.age <= 80) { corr.fac<- corr.matrix[7,corr.c] } } # Find altitude factor via fitted function 2-degree polynomial # This factor is only available for positive altitudes if(altitude > 0) { alt.fac<- -0.026*(altitude/1000)^2 + 0.6628*altitude/1000 + 1.0435 # Combine geomagnetic latitude correction with altitude # correction (figure caption of Fig. 1 in Precott and Hutton (1994)) diff.one<- corr.fac - 1 corr.fac<- corr.fac + diff.one * alt.fac } # Final correction of cosmic dose rate dc<- dc * corr.fac if (settings$verbose) print(paste("corr.fac",corr.fac,"diff.one",diff.one,"alt.fac",alt.fac)) } else { if (settings$verbose) cat(paste("\n No geomagnetic field change correction necessary for geomagnetic latitude >35 degrees!")) } } # calculate error dc.err<- dc*error/100 # save intermediate results before next sample is calculated if(profile.mode==TRUE) { profile.results[i,1]<- round(depth[i],2) profile.results[i,2]<- round(d0,4) profile.results[i,3]<- round(dc,4) profile.results[i,4]<- round(dc.err,4) } }#END.OF.LOOP call<- sys.call() args<- list(depth = depth, density = density, latitude = latitude, longitude = longitude, altitude = altitude, corr.fieldChanges = corr.fieldChanges, est.age = est.age, half.depth = half.depth, error = error) if(length(hgcm)==1) { ##============================================================================## ##TERMINAL OUTPUT ##============================================================================## if (settings$verbose) { cat("\n\n [calc_CosmicDoseRate]") cat(paste("\n\n ---------------------------------------------------------")) cat(paste("\n depth (m) :", depth)) cat(paste("\n density (g cm^-3) :", density)) cat(paste("\n latitude (N deg.) :", latitude)) cat(paste("\n longitude (E deg.) :", longitude)) cat(paste("\n altitude (m) :", altitude)) cat(paste("\n ---------------------------------------------------------")) cat(paste("\n total absorber (g cm^-2) :", round(hgcm[i]*100,3))) cat(paste("\n")) cat(paste("\n cosmic dose rate (Gy ka^-1) :", round(d0,4))) cat(paste("\n [@sea-level & 55 deg. N G.lat]")) cat(paste("\n")) cat(paste("\n geomagnetic latitude (deg.) :", round(true.gml,1))) cat(paste("\n")) cat(paste("\n cosmic dose rate (Gy ka^-1) :", round(dc,4),"+-", round(dc.err,4))) cat(paste("\n [corrected] ")) cat(paste("\n ---------------------------------------------------------\n\n")) } ##============================================================================## ##RETURN VALUES ##============================================================================## if(length(depth)==1) { temp1<- data.frame(depth=depth,density=density) } else { temp1a<- data.frame(rbind(c(1:length(depth)))) tmpcoln1<- 1:length(depth) for(i in 1:length(depth)) { temp1a[i]<- depth[i] tmpcoln1[i]<- paste("depth",i) } temp1b<- data.frame(rbind(c(1:length(density)))) tmpcoln2<- 1:length(density) for(i in 1:length(density)) { temp1b[i]<- density[i] tmpcoln2[i]<- paste("density",i) } colnames(temp1a)<- tmpcoln1 colnames(temp1b)<- tmpcoln2 temp1<- cbind(temp1a,temp1b) } temp2<- data.frame(latitude=latitude,longitude=longitude, altitude=altitude,total_absorber.gcm2=hgcm*100, d0=d0,geom_lat=true.gml,dc=dc) summary<- data.frame(cbind(temp1,temp2)) newRLumResults.calc_CosmicDoseRate <- set_RLum( class = "RLum.Results", data = list(summary=summary, args=args, call=call)) # Return values invisible(newRLumResults.calc_CosmicDoseRate) } else { #terminal output if (settings$verbose) { cat("\n\n [calc_CosmicDoseRate]") cat(paste("\n\n Calculating cosmic dose rate for",length(depth), "samples. \n\n")) print(profile.results) } #return value add.info<- data.frame(latitude=latitude,longitude=longitude, altitude=altitude,total_absorber.gcm2=hgcm*100, geom_lat=true.gml) add.info<- rbind(add.info*length(i)) colnames(profile.results)<- c("depth","d0","dc","dc_err") summary<- data.frame(cbind(profile.results,add.info)) newRLumResults.calc_CosmicDoseRate <- set_RLum( class = "RLum.Results", data = list(summary=summary, args=args, call=call)) # Return values invisible(newRLumResults.calc_CosmicDoseRate) } } Luminescence/R/Analyse_SAR.OSLdata.R0000644000176200001440000006405413231137116016606 0ustar liggesusers#' Analyse SAR CW-OSL measurements. #' #' The function analyses SAR CW-OSL curve data and provides a summary of the #' measured data for every position. The output of the function is optimised #' for SAR OSL measurements on quartz. #' #' The function works only for standard SAR protocol measurements introduced by #' Murray and Wintle (2000) with CW-OSL curves. For the calculation of the #' Lx/Tx value the function [calc_OSLLxTxRatio] is used. #' #' **Provided rejection criteria** #' #' `[recyling ratio]`: calculated for every repeated regeneration dose point. #' #' `[recuperation]`: recuperation rate calculated by comparing the Lx/Tx values of the zero #' regeneration point with the Ln/Tn value (the Lx/Tx ratio of the natural #' signal). For methodological background see Aitken and Smith (1988) #' #' `[IRSL/BOSL]`: the integrated counts (`signal.integral`) of an #' IRSL curve are compared to the integrated counts of the first regenerated #' dose point. It is assumed that IRSL curves got the same dose as the first #' regenerated dose point. **Note:** This is not the IR depletation ratio #' described by Duller (2003). #' #' @param input.data [Risoe.BINfileData-class] (**required**): #' input data from a Risoe BIN file, produced by the function [read_BIN2R]. #' #' @param signal.integral [vector] (**required**): #' channels used for the signal integral, e.g. `signal.integral=c(1:2)` #' #' @param background.integral [vector] (**required**): #' channels used for the background integral, e.g. `background.integral=c(85:100)` #' #' @param position [vector] (*optional*): #' reader positions that want to be analysed (e.g. `position=c(1:48)`. #' Empty positions are automatically omitted. If no value is given all #' positions are analysed by default. #' #' @param run [vector] (*optional*): #' range of runs used for the analysis. If no value is given the range of the #' runs in the sequence is deduced from the `Risoe.BINfileData` object. #' #' @param set [vector] (*optional*): #' range of sets used for the analysis. If no value is given the range of the #' sets in the sequence is deduced from the `Risoe.BINfileData` object. #' #' @param dtype [character] (*optional*): #' allows to further limit the curves by their data type (`DTYPE`), #' e.g., `dtype = c("Natural", "Dose")` limits the curves to this two data types. #' By default all values are allowed. #' See [Risoe.BINfileData-class] for allowed data types. #' #' @param keep.SEL [logical] (default): #' option allowing to use the `SEL` element of the [Risoe.BINfileData-class] manually. #' **NOTE:** In this case any limitation provided by `run`, `set` and `dtype` #' are ignored! #' #' @param info.measurement [character] (*with default*): #' option to provide information about the measurement on the plot #' output (e.g. name of the BIN or BINX file). #' #' @param output.plot [logical] (*with default*): #' plot output (`TRUE/FALSE`) #' #' @param output.plot.single [logical] (*with default*): #' single plot output (`TRUE/FALSE`) to allow for plotting the results in #' single plot windows. Requires `output.plot = TRUE`. #' #' @param cex.global [numeric] (*with default*): #' global scaling factor. #' #' @param ... further arguments that will be passed to the function #' [calc_OSLLxTxRatio] (supported: `background.count.distribution`, `sigmab`, #' `sig0`; e.g., for instrumental error) and can be used to adjust the plot. #' Supported" `mtext`, `log` #' #' @return #' A plot (*optional*) and [list] is returned containing the #' following elements: #' #' \item{LnLxTnTx}{[data.frame] of all calculated Lx/Tx values including signal, background counts and the dose points.} #' \item{RejectionCriteria}{[data.frame] with values that might by used as rejection criteria. NA is produced if no R0 dose point exists.} #' \item{SARParameters}{[data.frame] of additional measurement parameters obtained from the BIN file, e.g. preheat or read temperature #' (not valid for all types of measurements).} #' #' #' @note #' Rejection criteria are calculated but not considered during the #' analysis to discard values. #' #' **The analysis of IRSL data is not directly supported**. You may want to #' consider using the functions [analyse_SAR.CWOSL] or #' [analyse_pIRIRSequence] instead. #' #' **The development of this function will not be continued. We recommend to use the function [analyse_SAR.CWOSL] or instead.** #' #' #' @section Function version: 0.2.17 #' #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr #' Margret C. Fuchs, HZDR, Freiberg (Germany) #' #' @seealso [calc_OSLLxTxRatio], [Risoe.BINfileData-class], [read_BIN2R], [plot_GrowthCurve] #' #' @references #' Aitken, M.J. and Smith, B.W., 1988. Optical dating: recuperation #' after bleaching. Quaternary Science Reviews 7, 387-393. #' #' Duller, G., 2003. Distinguishing quartz and feldspar in single grain #' luminescence measurements. Radiation Measurements, 37 (2), 161-165. #' #' Murray, A.S. and Wintle, A.G., 2000. Luminescence dating of quartz using an #' improved single-aliquot regenerative-dose protocol. Radiation Measurements #' 32, 57-73. #' #' @keywords datagen dplot #' #' @examples #' ##load data #' data(ExampleData.BINfileData, envir = environment()) #' #' ##analyse data #' output <- Analyse_SAR.OSLdata(input.data = CWOSL.SAR.Data, #' signal.integral = c(1:5), #' background.integral = c(900:1000), #' position = c(1:1), #' output.plot = TRUE) #' #' ##combine results relevant for further analysis #' output.SAR <- data.frame(Dose = output$LnLxTnTx[[1]]$Dose, #' LxTx = output$LnLxTnTx[[1]]$LxTx, #' LxTx.Error = output$LnLxTnTx[[1]]$LxTx.Error) #' output.SAR #' #' @md #' @export Analyse_SAR.OSLdata <- function( input.data, signal.integral, background.integral, position, run, set, dtype, keep.SEL = FALSE, info.measurement = "unkown measurement", output.plot = FALSE, output.plot.single = FALSE, cex.global = 1, ... ){ ##============================================================================## ##CONFIG ##============================================================================## ##set colors gallery to provide more colors col <- get("col", pos = .LuminescenceEnv) ##============================================================================## ##ERROR HANDLING ##============================================================================## if(missing(input.data)==TRUE){stop("[Analyse_SAR.OSLdata] No input data given!") }else{sample.data<-input.data} if(missing(signal.integral)==TRUE){stop("[Analyse_SAR.OSLdata] No signal integral is given!")} if(missing(background.integral)==TRUE){stop("[Analyse_SAR.OSLdata] No background integral is given!")} ##set values for run and set if they are not defined by the user if(missing(position)==TRUE){position<-min(sample.data@METADATA[,"POSITION"]):max(sample.data@METADATA[,"POSITION"])} if(missing(run)==TRUE){run<-min(sample.data@METADATA[,"RUN"]):max(sample.data@METADATA[,"RUN"])} if(missing(set)==TRUE){set<-min(sample.data@METADATA[,"SET"]):max(sample.data@METADATA[,"SET"])} if(missing(dtype)){dtype <- c("Natural", "N+dose", "Bleach", "Bleach+dose", "Natural (Bleach)", "N+dose (Bleach)", "Dose", "Background")} # Deal with extra arguments ---------------------------------------------------- ##deal with addition arguments extraArgs <- list(...) background.count.distribution <- if ("background.count.distribution" %in% names(extraArgs)) { extraArgs$background.count.distribution } else { "non-poisson" } sigmab <- if("sigmab" %in% names(extraArgs)) {extraArgs$sigmab} else {NULL} ##============================================================================## ##CALCULATIONS ##============================================================================## ##loop over all positions for (i in position){ ##checking if position is valid if(length(which(sample.data@METADATA["POSITION"]==i))>0){ ##check if OSL curves are part of the data set if(nrow(sample.data@METADATA[sample.data@METADATA[,"LTYPE"]=="OSL",]) == 0){ stop("[Analyse_SAR.OSLdata()] No 'OSL' curves found!") } if(!keep.SEL){ ##select all OSL data depending on the run and set sample.data@METADATA[,"SEL"]<-FALSE sample.data@METADATA[sample.data@METADATA[,"LTYPE"]=="OSL" & sample.data@METADATA[,"RUN"]%in%run==TRUE & sample.data@METADATA[,"SET"]%in%set==TRUE & sample.data@METADATA[,"DTYPE"]%in%dtype==TRUE, "SEL"] <- TRUE } ##grep all OSL curve IDs OSL.curveID<-sample.data@METADATA[sample.data@METADATA["SEL"]==TRUE & sample.data@METADATA["POSITION"]==i,"ID"] ##estimate LnLx.curveID and TnTx.curveID from records LnLx.curveID<-OSL.curveID[seq(1,length(OSL.curveID),by=2)] TnTx.curveID<-OSL.curveID[seq(2,length(OSL.curveID),by=2)] ##Provide Values For Growth Curve Fitting ##(1) get dose information Dose<-sapply(1:length(LnLx.curveID),function(x){ Dose<-sample.data@METADATA[sample.data@METADATA["ID"]==LnLx.curveID[x],"IRR_TIME"] }) ##(2) set LxTx curves LnLxTnTx.curves<-(sapply(1:length(LnLx.curveID),function(x){ ##produce data.frames for Lx/Tx calculations Lx.HIGH<-sample.data@METADATA[sample.data@METADATA[,"ID"]==LnLx.curveID[x],"HIGH"] Lx.NPOINTS<-sample.data@METADATA[sample.data@METADATA[,"ID"]==LnLx.curveID[x],"NPOINTS"] Tx.HIGH<-sample.data@METADATA[sample.data@METADATA[,"ID"]==TnTx.curveID[x],"HIGH"] Tx.NPOINTS<-sample.data@METADATA[sample.data@METADATA[,"ID"]==TnTx.curveID[x],"NPOINTS"] Lx.curve<-data.frame(x=seq(Lx.HIGH/Lx.NPOINTS,Lx.HIGH,by=Lx.HIGH/Lx.NPOINTS), y=unlist(sample.data@DATA[LnLx.curveID[x]])) Tx.curve<-data.frame(x=seq(Tx.HIGH/Tx.NPOINTS,Tx.HIGH,by=Tx.HIGH/Tx.NPOINTS), y=unlist(sample.data@DATA[TnTx.curveID[x]])) return(list(Lx.curve,Tx.curve)) })) ##(3) calculate Lx/Tx ratio LnLxTnTx <- get_RLum( merge_RLum(lapply(1:length(LnLxTnTx.curves[1, ]), function(k) { calc_OSLLxTxRatio( Lx.data = as.data.frame(LnLxTnTx.curves[1, k]), Tx.data = as.data.frame(LnLxTnTx.curves[2, k]), signal.integral = signal.integral, background.integral = background.integral, background.count.distribution = background.count.distribution, sigmab = sigmab ) }))) ##finally combine to data.frame including the record ID for further analysis LnLxTnTx <- cbind(LnLxTnTx,LnLx.curveID,TnTx.curveID) ##(4.1) set info concerning the kind of regeneration points ##generate unique dose id - this are also the # for the generated points temp.DoseID<-c(0:(length(Dose)-1)) temp.DoseName<-paste("R",temp.DoseID,sep="") temp.DoseName<-cbind(Name=temp.DoseName,Dose) ##set natural temp.DoseName[temp.DoseName[,"Name"]=="R0","Name"]<-"Natural" ##set R0 temp.DoseName[temp.DoseName[,"Name"]!="Natural" & temp.DoseName[,"Dose"]==0,"Name"]<-"R0" ##find duplicated doses (including 0 dose - which means the Natural) temp.DoseDuplicated<-duplicated(temp.DoseName[,"Dose"]) ##combine temp.DoseName temp.DoseName<-cbind(temp.DoseName,Repeated=temp.DoseDuplicated) ##correct value for R0 (it is not really repeated) temp.DoseName[temp.DoseName[,"Dose"]==0,"Repeated"]<-FALSE ##(5) Combine all values in a data.frame temp.LnLxTnTx<-data.frame(Name=temp.DoseName[,"Name"], Dose=Dose, Repeated=as.logical(temp.DoseName[,"Repeated"])) LnLxTnTx<-cbind(temp.LnLxTnTx,LnLxTnTx) LnLxTnTx[,"Name"]<-as.character(LnLxTnTx[,"Name"]) ##(6) Calculate Recyling Ratio and Recuperation Rate ##(6.1) ##Calculate Recycling Ratio if(length(LnLxTnTx[LnLxTnTx[,"Repeated"]==TRUE,"Repeated"])>0){ ##identify repeated doses temp.Repeated<-LnLxTnTx[LnLxTnTx[,"Repeated"]==TRUE,c("Name","Dose","LxTx")] ##find corresponding previous dose for the repeated dose temp.Previous<-t(sapply(1:length(temp.Repeated[,1]),function(x){ LnLxTnTx[LnLxTnTx[,"Dose"]==temp.Repeated[x,"Dose"] & LnLxTnTx[,"Repeated"]==FALSE,c("Name","Dose","LxTx")] })) ##convert to data.frame temp.Previous<-as.data.frame(temp.Previous) ##set column names temp.ColNames<-sapply(1:length(temp.Repeated[,1]),function(x){ paste(temp.Repeated[x,"Name"],"/", temp.Previous[temp.Previous[,"Dose"]==temp.Repeated[x,"Dose"],"Name"] ,sep="") }) ##Calculate Recycling Ratio RecyclingRatio<-as.numeric(temp.Repeated[,"LxTx"])/as.numeric(temp.Previous[,"LxTx"]) ##Just transform the matrix and add column names RecyclingRatio<-t(RecyclingRatio) colnames(RecyclingRatio) <- unique(temp.ColNames) }else{RecyclingRatio<-NA} ##(6.2) ##Recuperation Rate if("R0" %in% LnLxTnTx[,"Name"]==TRUE){ Recuperation<-round(LnLxTnTx[LnLxTnTx[,"Name"]=="R0","LxTx"]/LnLxTnTx[LnLxTnTx[,"Name"]=="Natural","LxTx"],digits=4) }else{Recuperation<-NA} ##(6.3) IRSL ##Print IRSL Curves if IRSL curve is set sample.data@METADATA[,"SEL"]<-FALSE sample.data@METADATA[sample.data@METADATA["LTYPE"]=="IRSL" & sample.data@METADATA[,"RUN"]%in%run==TRUE & sample.data@METADATA[,"SET"]%in%set==TRUE,"SEL"]<-TRUE ##get IRSL curve ID & ID for Reg1 again IRSL.curveID<-sample.data@METADATA[sample.data@METADATA["SEL"]==TRUE & sample.data@METADATA["POSITION"]==i,"ID"] ##if no IRSL curve the length of the object is 0 if(length(IRSL.curveID)>0){ ##chose an IRSL curve with a dose of the first regeneration point Reg1again.curveID<-LnLxTnTx[LnLxTnTx[,"Repeated"]==TRUE & LnLxTnTx[,"Dose"]==LnLxTnTx[2,"Dose"],"LnLx.curveID"] if(length(Reg1again.curveID)>0){ ##BOSL/IRSL IRSL_BOSL<-round(sum(unlist(sample.data@DATA[IRSL.curveID])[signal.integral]) /sum(unlist(sample.data@DATA[Reg1again.curveID])[signal.integral]),digits=4) }else{IRSL_BOSL<-NA} }else{IRSL_BOSL<-NA} ##Combine the two values if(exists("RejectionCriteria")==FALSE){ RejectionCriteria<-cbind(RecyclingRatio,Recuperation,IRSL_BOSL) }else{ RejectionCriteria.temp<-cbind(RecyclingRatio,Recuperation,IRSL_BOSL) RejectionCriteria<-rbind(RejectionCriteria,RejectionCriteria.temp) } ##============================================================================## ##PLOTTING ##============================================================================## if(output.plot){ ##set plot settings plot.settings <- list( mtext = sample.data@METADATA[sample.data@METADATA[,"ID"]==LnLx.curveID[1],"SAMPLE"], log = "" ) ##modify arguments plot.settings <- modifyList(plot.settings, list(...)) if(output.plot.single==FALSE){ layout(matrix(c(1,2,1,2,3,4,3,5),4,2,byrow=TRUE)) } ##warning if number of curves exceed colour values if(length(col)0){ ##to ensure that the right TL curves are used the run and set number of the LnLx and TnTx curves are used LnLx.SET<-sapply(LnLx.curveID,function(x){sample.data@METADATA[sample.data@METADATA["ID"]==x,"SET"]}) LnLx.RUN<-sapply(LnLx.curveID,function(x){sample.data@METADATA[sample.data@METADATA["ID"]==x,"RUN"]}) TnTx.SET<-sapply(TnTx.curveID,function(x){sample.data@METADATA[sample.data@METADATA["ID"]==x,"SET"]}) TnTx.RUN<-sapply(TnTx.curveID,function(x){sample.data@METADATA[sample.data@METADATA["ID"]==x,"RUN"]}) ##get TL curve IDs in general considering the constraints TL.curveID<-sapply(1:length(TnTx.curveID),function(x){results<- sample.data@METADATA[sample.data@METADATA["SEL"]==TRUE & sample.data@METADATA["POSITION"]==i & sample.data@METADATA["SET"]>=LnLx.SET[x] & sample.data@METADATA["RUN"]>=LnLx.RUN[x] & sample.data@METADATA["SET"]<=TnTx.SET[x] & sample.data@METADATA["RUN"]<=TnTx.RUN[x],"ID"]}) ##get maximum value of TL curves TL.curveMax<-max(unlist(sample.data@DATA[TL.curveID])) ##get channel resolution (it should be the same for all values) HIGH<-unique(sample.data@METADATA[sample.data@METADATA["ID"]==TL.curveID[1],"HIGH"]) NPOINTS<-unique(sample.data@METADATA[sample.data@METADATA["ID"]==TL.curveID[1],"NPOINTS"]) xaxt.values<-seq(HIGH/NPOINTS,HIGH,by=HIGH/NPOINTS) ##get heating rate RATE<-unique(sample.data@METADATA[sample.data@METADATA["ID"]==TL.curveID[1],"RATE"]) ##open plot area for TL curves plot(NA,NA, xlab="T [\u00B0C]", ylab=paste("TL [cts/",HIGH/NPOINTS," \u00B0C]",sep=""), xlim=c(HIGH/NPOINTS,HIGH), ylim=c(1,TL.curveMax), main="Cutheat - TL curves", sub=paste("(",RATE," K/s)",sep=""), log=if(plot.settings$log=="y" | plot.settings$log=="xy"){"y"}else{""} ) ##plot curves and get legend values sapply(1:length(TL.curveID),function(x){ yaxt.values<-unlist(sample.data@DATA[TL.curveID[x]]) lines(xaxt.values,yaxt.values,col=col[x]) }) ##plot legend legend("topleft",as.character(LnLxTnTx$Name),lty=c(rep(1,length(TL.curveID))), cex=0.8*cex.global,col=col, bg="white", bty="n") ##sample name mtext(side=3,plot.settings$mtext,cex=0.7*cex.global) }else{ plot(NA,NA,xlim=c(0,100),ylim=c(0,100), main="Cutheat - TL curves") text(50,50,"no cutheat as TL curve detected") } ##======================================================================## ##Print IRSL Curves if IRSL curve is set if(is.na(IRSL_BOSL) == FALSE){ ##get channel resolution (it should be the same for all values) HIGH<-unique(sample.data@METADATA[sample.data@METADATA["ID"]==IRSL.curveID ,"HIGH"]) NPOINTS<-unique(sample.data@METADATA[sample.data@METADATA["ID"]==IRSL.curveID ,"NPOINTS"]) xaxt.values<-seq(HIGH/NPOINTS,HIGH,by=HIGH/NPOINTS) ##open plot IRSL curve plot(NA,NA, xlab="Time [s]", ylab=paste("OSL and IRSL [cts/",HIGH/NPOINTS," s]",sep=""), xlim=c(0,HIGH), ylim=c(0,max(unlist(sample.data@DATA[Reg1again.curveID]))), main="IRSLT" ) ##show integral limits abline(v=xaxt.values[min(signal.integral)], lty=2, col="gray") abline(v=xaxt.values[max(signal.integral)], lty=2, col="gray") ##print(length(sample.data@DATA[IRSL.curveID])) lines(xaxt.values,unlist(sample.data@DATA[IRSL.curveID]),col="red") lines(xaxt.values,unlist(sample.data@DATA[Reg1again.curveID[1]]),col="blue") ##legend legend("topright",c("R1 again","IRSL"),lty=c(1,1),col=c("blue","red"), bty="n") mtext(side=3,paste("IRSL/BOSL = ",IRSL_BOSL*100,"%",sep=""), cex=.8*cex.global ) } if(((is.na(IRSL_BOSL)==TRUE) & length(IRSL.curveID)>0) | ((is.na(IRSL_BOSL)==FALSE) & length(IRSL.curveID)>0)){ ##plot only IRSL curve plot(xaxt.values,unlist(sample.data@DATA[IRSL.curveID]), xlab="Time [s]", ylab=paste("IRSL [cts/",HIGH/NPOINTS," s]",sep=""), xlim=c(0,10), ylim=c(0,max(unlist(sample.data@DATA[IRSL.curveID]))), main="IRSL curve (10 s)", col="red", type="l" ) }else{ plot(NA,NA,xlim=c(0,10), ylim=c(0,10), main="IRSL curve") text(5,5,"no IRSL curve detected") } ##========================================================================= ##Plot header if(output.plot.single==TRUE){ mtext(side=3,paste("ALQ Pos. ",i,sep="")) }else{ mtext(side=3,paste("ALQ Pos. ",i,sep=""),outer=TRUE,line=-2.5) } ##Plot footer mtext(side=4,info.measurement,outer=TRUE,line=-1.5,cex=0.6*cex.global, col="blue") ##output on terminal for plot writeLines(paste("\n[Analyse_SAR.OSLdata()] >> Figure for position ",i," produced.",sep="")) ##reset mfrow par(mfrow=c(1,1)) }#endif for output.plot ##preprate output of values ##============================================================================== ##Add LnLxTnTx values to the list if(exists("LnLxTnTx_List")==FALSE){LnLxTnTx_List<-list()} LnLxTnTx_List[[i]]<-LnLxTnTx rm(LnLxTnTx) }else{writeLines(paste("[Analyse_SAR.OSLdata()] >> Position ",i," is not valid and has been omitted!",sep=""))} #end if position checking }#end for loop ##============================================================================## ##OUTPUT OF FUNCTION ##============================================================================## ##get further information from the position used ##this is what you get from the Risoe file readTemp<-unique(sample.data@METADATA[sample.data@METADATA[,"POSITION"]==min(position) & sample.data@METADATA[,"LTYPE"]!="TL","TEMPERATURE"]) cutheat<-unique(sample.data@METADATA[sample.data@METADATA[,"POSITION"]==min(position) & sample.data@METADATA[,"LTYPE"]=="TL","HIGH"]) if(length(cutheat)==0){cutheat=NA} systemID<-unique(sample.data@METADATA[sample.data@METADATA[,"POSITION"]==min(position),"SYSTEMID"]) SARParameters<-data.frame(readTemp=readTemp,cutheat=cutheat,systemID=systemID) return(list(LnLxTnTx=LnLxTnTx_List, RejectionCriteria=RejectionCriteria, SARParameters=SARParameters)) } Luminescence/R/install_DevelopmentVersion.R0000644000176200001440000000751213540751607020603 0ustar liggesusers#' Attempts to install the development version of the 'Luminescence' package #' #' This function is a convenient method for installing the development #' version of the R package 'Luminescence' directly from GitHub. #' #' This function uses [Luminescence::github_branches][Luminescence::GitHub-API] to check #' which development branches of the R package 'Luminescence' are currently #' available on GitHub. The user is then prompted to choose one of the branches #' to be installed. It further checks whether the R package 'devtools' is #' currently installed and available on the system. Finally, it prints R code #' to the console that the user can copy and paste to the R console in order #' to install the desired development version of the package. #' #' #' If `force_install=TRUE` the functions checks if 'devtools' is available #' and then attempts to install the chosen development branch via #' [devtools::remote-reexports]. #' #' @param force_install [logical] (*optional*): #' If `FALSE` (the default) the function produces and prints the required #' code to the console for the user to run manually afterwards. When `TRUE` #' and all requirements are fulfilled (see details) this function attempts to install #' the package itself. #' #' @return #' This function requires user input at the command prompt to choose the #' desired development branch to be installed. The required R code to install #' the package is then printed to the console. #' #' @examples #' #' \dontrun{ #' install_DevelopmentVersion() #' } #' #' @md #' @export install_DevelopmentVersion <- function(force_install = FALSE) { message("\n[install_DevelopmentVersion]\n") # check which branches are currently available # see ?github_branches for GitHub API implementation branches <- github_branches() index <- NULL # let user pick which branch he wants to install while(is.null(index)) { message(paste0("Which development branch do you want to install? \n", paste0(" [", 1:length(branches$BRANCH), "]: ", branches$BRANCH, collapse = "\n"))) message("\n [0]: ") index <- readline() if (index == 0) return(NULL) if (!index %in% seq_len(length(branches$BRANCH))) index <- NULL cat("\n") } # select the correct branch branch <- branches$BRANCH[as.numeric(index)] if (!force_install) { message("----\n", "Are all prerequisites installed? Make sure to have read\n", "https://github.com/R-Lum/Luminescence/blob/master/README.md\n", "----\n") message("Please copy and run the following code in your R command-line:\n") if (!requireNamespace("devtools", quietly = TRUE)) message("install.packages('devtools')") message(branches$INSTALL[as.numeric(index)], "\n") } else { reply <- NULL while(is.null(reply)) { message("Are all prerequisites installed?", " (https://github.com/R-Lum/Luminescence/blob/master/README.md)\n", " [n/N]: No\n", " [y/Y]: Yes\n") reply <- readline() if (reply == "n" || reply == "N") return(NULL) if (reply != "y" && reply != "Y") reply <- NULL } # check if 'devtools' is available and install if not if (!requireNamespace("devtools", quietly = TRUE)) { message("Please install the 'devtools' package first by running the following command:\n", "install.packages('devtools')") return(NULL) } # detach the 'Luminescence' package try(detach(name = "package:Luminescence", unload = TRUE, force = TRUE), silent = TRUE) # try to unload the dynamic library dynLibs <- sapply(.dynLibs(), function(x) x[["path"]] ) try(dyn.unload(dynLibs[grep("Luminescence", dynLibs)]), silent = TRUE) # install the development version devtools::install_github(paste0("r-lum/luminescence@", branch)) } } Luminescence/R/write_RLum2CSV.R0000644000176200001440000002055713431064067015754 0ustar liggesusers#' Export RLum-objects to CSV #' #' This function exports [RLum-class]-objects to CSV-files using the R function #' [utils::write.table]. All [RLum-class]-objects are supported, but the #' export is lossy, i.e. the pure numerical values are exported only. Information #' that cannot be coerced to a [data.frame] or a [matrix] are discarded as well as #' metadata. #' #' However, in combination with the implemented import functions, nearly every #' supported import data format can be exported to CSV-files, this gives a great #' deal of freedom in terms of compatibility with other tools. #' #' **Input is a list of objects** #' #' If the input is a [list] of objects all explicit function arguments can be provided #' as [list]. #' #' @param object [RLum-class] or a [list] of `RLum` objects (**required**): #' objects to be written #' #' @param path [character] (*optional*): #' character string naming folder for the output to be written. If nothing #' is provided `path` will be set to the working directory. #' **Note:** this argument is ignored if the the argument `export` is set to `FALSE`. #' #' @param prefix [character] (*with default*): #' optional prefix to name the files. This prefix is valid for all written files #' #' @param export [logical] (*with default*): #' enable or disable the file export. If set to `FALSE` nothing is written to #' the file connection, but a list comprising objects of type [data.frame] and [matrix] #' is returned instead #' #' @param compact [logical] (*with default*): if `TRUE` (the default) the output will be more #' simple but less comprehensive, means not all elements in the objects will be fully broken down. #' This is in particular useful for writing `RLum.Results` objects to CSV-files, such objects #' can be rather complex and not all information are needed in a CSV-file or can be meaningful translated #' to it. #' #' @param ... further arguments that will be passed to the function #' [utils::write.table]. All arguments except the argument `file` are supported #' #' #' @return #' The function returns either a CSV-file (or many of them) or for the #' option `export == FALSE` a list comprising objects of type [data.frame] and [matrix] #' #' #' @section Function version: 0.2.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @seealso [RLum.Analysis-class], [RLum.Data-class], [RLum.Results-class], #' [utils::write.table] #' #' @keywords IO #' #' @examples #' #' ##transform values to a list (and do not write) #' data(ExampleData.BINfileData, envir = environment()) #' object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data)[[1]] #' write_RLum2CSV(object, export = FALSE) #' #' \dontrun{ #' #' ##create temporary filepath #' ##(for usage replace by own path) #' temp_file <- tempfile(pattern = "output", fileext = ".csv") #' #' ##write CSV-file to working directory #' write_RLum2CSV(temp_file) #' #' } #' #' @md #' @export write_RLum2CSV <- function( object, path = NULL, prefix = "", export = TRUE, compact = TRUE, ... ){ # General tests ------------------------------------------------------------------------------- if(missing(object)){ stop("[write_RLum2CSV()] input object is missing!", call. = FALSE) } # Self-call ----------------------------------------------------------------------------------- ##this option allows to work on a list of RLum-objects if(is.list(object)){ ##extent the list of arguments if set ##path path <- rep(list(path), length = length(object)) ##prefix ... create automatic prefix if nothing is provided if(prefix == ""){ prefix <- as.list(paste0("[[",1:length(object),"]]_")) }else{ prefix <- rep(list(prefix), length = length(object)) } ##export export <- rep(list(export), length = length(object)) ##execute the self-call function temp <- lapply(1:length(object), function(x){ write_RLum2CSV( object = object[[x]], path = path[[x]], prefix = prefix[[x]], export = export[[x]], ... ) }) ##this prevents that we get a list of NULL if(is.null(unlist(temp))){ return(NULL) }else{ return(temp) } } # Integrity tests ----------------------------------------------------------------------------- ##check path ##if NULL condition if(export == TRUE && is.null(path)){ path <- getwd() message(paste0("[write_RLum2CSV()] Path automatically set to: ", path)) } ##non NULL conditon if(export == TRUE && !dir.exists(path)){ stop("[write_RLum2CSV()] Diretory provided via the argument 'path' does not exist!", call. = FALSE) } ## What do we need at the end of the day is a named list of data.frames or matrices we can export ## using the function write.table; the name of the list elements will become the file names if(inherits(object, "RLum")){ if(is(object, "RLum.Analysis") || is(object, "RLum.Data.Curve") || is(object, "RLum.Data.Spectrum") || is(object, "RLum.Data.Image")){ ##extract all elements ... depending on the input if(is(object, "RLum.Analysis")){ ##tricky, we cannot use get_RLum() as the function lapply calls as.list() for an object! object_list <- lapply(object, function(x){get_RLum(x)}) ##change names of the list and produce the right format straight away names(object_list) <- paste0(1:length(object_list),"_",names(object)) } else { ##get object and make list object_list <- list(get_RLum(object)) ##set new name names(object_list) <- paste0("1_",object@recordType) } }else if(is(object, "RLum.Results")){ ##unlist what ever comes, but do not break structures like matrices, numerics and names <- names(object@data) ##get elements object_list <- lapply(object@data, function(e){ ##only run something on the list of it is worth it and pack it in the list if(class(e) == "matrix" || class(e) == "numeric" || class(e) == "data.frame") return(list(e)) ##unlist the rest until the end if(!compact) return(unlist(e)) ##now we return whatever we have return(e) }) ##now unlist again one level object_list <- unlist(object_list, recursive = FALSE) ##sort out objects we do not like and we cannot procede ... object_list_rm <- vapply(object_list, function(x) { class(x) == "matrix" || class(x) == "numeric" || class(x) == "data.frame" }, vector(mode = "logical", length = 1)) ##remove unwanted objects object_list <- object_list[object_list_rm] ##set warning if(any(!object_list_rm)) warning(paste0("[write_RLum2CSV()] ", length(which(!object_list_rm)), " elements could not be converted to a CSV-structure!"), call. = FALSE) ##adjust the names names(object_list) <- paste0(1:length(object_list),"_",names(object_list)) }else{ try(stop("[write_RLum2CSV()] One particular RLum-object is not yet supported! NULL returned!", call. = FALSE)) return(NULL) } }else{ stop("[write_RLum2CSV()] Object needs to be a member of the object class RLum!", call. = FALSE) } # Export -------------------------------------------------------------------------------------- if(export){ ##set export settings for write.table export_settings.default <- list( append = FALSE, quote = TRUE, sep = ";", eol = "\n", na = "NA", dec = ".", row.names = FALSE, col.names = FALSE, qmethod = c("escape", "double"), fileEncoding = "" ) ##modify on demand export_settings <- modifyList(x = export_settings.default, val = list(...)) ##write files to file system for(i in 1:length(object_list)){ utils::write.table( x = object_list[[i]], file = paste0(path,"/",prefix, names(object_list)[i],".csv"), append = export_settings$append, quote = export_settings$quote, sep = export_settings$sep, eol = export_settings$eol, na = export_settings$na, dec = export_settings$dec, row.names = export_settings$row.names, col.names = export_settings$col.names, qmethod = export_settings$qmethod, fileEncoding = export_settings$fileEncoding) } }else{ return(object_list) } } Luminescence/R/calc_MinDose.R0000644000176200001440000011450113540751607015542 0ustar liggesusers#' Apply the (un-)logged minimum age model (MAM) after Galbraith et al. (1999) #' to a given De distribution #' #' Function to fit the (un-)logged three or four parameter minimum dose model #' (MAM-3/4) to De data. #' #' **Parameters** #' #' This model has four parameters: #' \tabular{rl}{ #' `gamma`: \tab minimum dose on the log scale \cr #' `mu`: \tab mean of the non-truncated normal distribution \cr #' `sigma`: \tab spread in ages above the minimum \cr #' `p0`: \tab proportion of grains at gamma \cr } #' #' If `par=3` (default) the 3-parameter minimum age model is applied, #' where `gamma=mu`. For `par=4` the 4-parameter model is applied instead. #' #' **(Un-)logged model** #' #' In the original version of the minimum dose model, the basic data are the natural #' logarithms of the De estimates and relative standard errors of the De #' estimates. The value for `sigmab` must be provided as a ratio #' (e.g, 0.2 for 20 \%). This model will be applied if `log = TRUE`. #' #' If `log=FALSE`, the modified un-logged model will be applied instead. This #' has essentially the same form as the original version. `gamma` and #' `sigma` are in Gy and `gamma` becomes the minimum true dose in the #' population. #' **Note** that the un-logged model requires `sigmab` to be in the same #' absolute unit as the provided De values (seconds or Gray). #' #' While the original (logged) version of the mimimum dose #' model may be appropriate for most samples (i.e. De distributions), the #' modified (un-logged) version is specially designed for modern-age and young #' samples containing negative, zero or near-zero De estimates (Arnold et al. #' 2009, p. 323). #' #' **Initial values & boundaries** #' #' The log likelihood calculations use the [nlminb] function for box-constrained #' optimisation using PORT routines. Accordingly, initial values for the four #' parameters can be specified via `init.values`. If no values are #' provided for `init.values` reasonable starting values are estimated #' from the input data. If the final estimates of *gamma*, *mu*, #' *sigma* and *p0* are totally off target, consider providing custom #' starting values via `init.values`. #' In contrast to previous versions of this function the boundaries for the #' individual model parameters are no longer required to be explicitly specified. #' If you want to override the default boundary values use the arguments #' `gamma.lower`, `gamma.upper`, `sigma.lower`, `sigma.upper`, `p0.lower`, `p0.upper`, #' `mu.lower` and `mu.upper`. #' #' **Bootstrap** #' #' When `bootstrap=TRUE` the function applies the bootstrapping method as #' described in Wallinga & Cunningham (2012). By default, the minimum age model #' produces 1000 first level and 3000 second level bootstrap replicates #' (actually, the number of second level bootstrap replicates is three times #' the number of first level replicates unless specified otherwise). The #' uncertainty on sigmab is 0.04 by default. These values can be changed by #' using the arguments `bs.M` (first level replicates), `bs.N` #' (second level replicates) and `sigmab.sd` (error on sigmab). With #' `bs.h` the bandwidth of the kernel density estimate can be specified. #' By default, `h` is calculated as #' #' \deqn{h = (2*\sigma_{DE})/\sqrt{n}} #' #' **Multicore support** #' #' This function supports parallel computing and can be activated by `multicore=TRUE`. #' By default, the number of available logical CPU cores is determined #' automatically, but can be changed with `cores`. The multicore support #' is only available when `bootstrap=TRUE` and spawns `n` R instances #' for each core to get MAM estimates for each of the N and M boostrap #' replicates. Note that this option is highly experimental and may or may not #' work for your machine. Also the performance gain increases for larger number #' of bootstrap replicates. Also note that with each additional core and hence #' R instance and depending on the number of bootstrap replicates the memory #' usage can significantly increase. Make sure that memory is always availabe, #' otherwise there will be a massive perfomance hit. #' #' **Likelihood profiles** #' #' The likelihood profiles are generated and plotted by the `bbmle` package. #' The profile likelihood plots look different to ordinary profile likelihood as #' #' "`[...]` the plot method for likelihood profiles displays the square root of #' the the deviance difference (twice the difference in negative log-likelihood from #' the best fit), so it will be V-shaped for cases where the quadratic approximation #' works well `[...]`." (Bolker 2016). #' #' For more details on the profile likelihood #' calculations and plots please see the vignettes of the `bbmle` package #' (also available here: [https://CRAN.R-project.org/package=bbmle]()). #' #' @param data [RLum.Results-class] or [data.frame] (**required**): #' for [data.frame]: two columns with De `(data[ ,1])` and De error `(data[ ,2])`. #' #' @param sigmab [numeric] (**required**): #' additional spread in De values. #' This value represents the expected overdispersion in the data should the sample be #' well-bleached (Cunningham & Walling 2012, p. 100). #' **NOTE**: For the logged model (`log = TRUE`) this value must be #' a fraction, e.g. 0.2 (= 20 \%). If the un-logged model is used (`log = FALSE`), #' sigmab must be provided in the same absolute units of the De values (seconds or Gray). #' See details. #' #' @param log [logical] (*with default*): #' fit the (un-)logged minimum dose model to De data. #' #' @param par [numeric] (*with default*): #' apply the 3- or 4-parameter minimum age model (`par=3` or `par=4`). The MAM-3 is #' used by default. #' #' @param bootstrap [logical] (*with default*): #' apply the recycled bootstrap approach of Cunningham & Wallinga (2012). #' #' @param init.values [numeric] (*optional*): #' a named list with starting values for gamma, sigma, p0 and mu #' (e.g. `list(gamma=100, sigma=1.5, p0=0.1, mu=100)`). If no values are provided reasonable values #' are tried to be estimated from the data. **NOTE** that the initial values must always be given #' in the absolute units. The the logged model is applied (`log = TRUE`), the provided `init.values` #' are automatically log transformed. #' #' @param level [logical] (*with default*): #' the confidence level required (defaults to 0.95). #' #' @param log.output [logical] (*with default*): #' If `TRUE` the console output will also show the logged values of the final parameter estimates #' and confidence intervals (only applicable if `log = TRUE`). #' #' @param plot [logical] (*with default*): #' plot output (`TRUE`/`FALSE`) #' #' @param multicore [logical] (*with default*): #' enable parallel computation of the bootstrap by creating a multicore SNOW cluster. Depending #' on the number of available logical CPU cores this may drastically reduce #' the computation time. Note that this option is highly experimental and may not #' work on all machines. (`TRUE`/`FALSE`) #' #' @param ... (*optional*) further arguments for bootstrapping #' (`bs.M, bs.N, bs.h, sigmab.sd`). See details for their usage. #' Further arguments are #' - `verbose` to de-/activate console output (logical), #' - `debug` for extended console output (logical) and #' - `cores` (integer) to manually specify the number of cores to be used when `multicore=TRUE`. #' #' @return Returns a plot (*optional*) and terminal output. In addition an #' [RLum.Results-class] object is returned containing the #' following elements: #' #' \item{.$summary}{[data.frame] summary of all relevant model results.} #' \item{.$data}{[data.frame] original input data} #' \item{args}{[list] used arguments} #' \item{call}{[call] the function call} #' \item{.$mle}{[mle2] object containing the maximum log likelhood functions for all parameters} #' \item{BIC}{[numeric] BIC score} #' \item{.$confint}{[data.frame] confidence intervals for all parameters} #' \item{.$profile}{[profile.mle2] the log likelihood profiles} #' \item{.$bootstrap}{[list] bootstrap results} #' #' The output should be accessed using the function [get_RLum] #' #' @note #' The default starting values for *gamma*, *mu*, *sigma* #' and *p0* may only be appropriate for some De data sets and may need to #' be changed for other data. This is especially true when the un-logged #' version is applied. \cr #' Also note that all R warning messages are suppressed #' when running this function. If the results seem odd consider re-running the #' model with `debug=TRUE` which provides extended console output and #' forwards all internal warning messages. #' #' @section Function version: 0.4.4 #' #' @author #' Christoph Burow, University of Cologne (Germany) \cr #' Based on a rewritten S script of Rex Galbraith, 2010 \cr #' The bootstrap approach is based on a rewritten MATLAB script of Alastair Cunningham. \cr #' Alastair Cunningham is thanked for his help in implementing and cross-checking the code. #' #' @seealso [calc_CentralDose], [calc_CommonDose], [calc_FiniteMixture], #' [calc_FuchsLang2001], [calc_MaxDose] #' #' @references #' Arnold, L.J., Roberts, R.G., Galbraith, R.F. & DeLong, S.B., #' 2009. A revised burial dose estimation procedure for optical dating of young #' and modern-age sediments. Quaternary Geochronology 4, 306-325. #' #' Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for mixed fission #' track ages. Nuclear Tracks Radiation Measurements 4, 459-470. #' #' Galbraith, R.F., Roberts, R.G., Laslett, G.M., Yoshida, H. & Olley, J.M., #' 1999. Optical dating of single grains of quartz from Jinmium rock shelter, #' northern Australia. Part I: experimental design and statistical models. #' Archaeometry 41, 339-364. #' #' Galbraith, R.F., 2005. Statistics for #' Fission Track Analysis, Chapman & Hall/CRC, Boca Raton. #' #' Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error #' calculation and display in OSL dating: An overview and some recommendations. #' Quaternary Geochronology 11, 1-27. #' #' Olley, J.M., Roberts, R.G., Yoshida, H., Bowler, J.M., 2006. Single-grain optical dating of grave-infill #' associated with human burials at Lake Mungo, Australia. Quaternary Science #' Reviews 25, 2469-2474. #' #' **Further reading** #' #' Arnold, L.J. & Roberts, R.G., 2009. Stochastic modelling of multi-grain equivalent dose #' (De) distributions: Implications for OSL dating of sediment mixtures. #' Quaternary Geochronology 4, 204-230. #' #' Bolker, B., 2016. Maximum likelihood estimation analysis with the bbmle package. #' In: Bolker, B., R Development Core Team, 2016. bbmle: Tools for General Maximum Likelihood Estimation. #' R package version 1.0.18. [https://CRAN.R-project.org/package=bbmle]() #' #' Bailey, R.M. & Arnold, L.J., 2006. Statistical modelling of single grain quartz De distributions and an #' assessment of procedures for estimating burial dose. Quaternary Science #' Reviews 25, 2475-2502. #' #' Cunningham, A.C. & Wallinga, J., 2012. Realizing the potential of fluvial archives using robust OSL chronologies. #' Quaternary Geochronology 12, 98-106. #' #' Rodnight, H., Duller, G.A.T., Wintle, A.G. & Tooth, S., 2006. Assessing the reproducibility and accuracy #' of optical dating of fluvial deposits. Quaternary Geochronology 1, 109-120. #' #' Rodnight, H., 2008. How many equivalent dose values are needed to #' obtain a reproducible distribution?. Ancient TL 26, 3-10. #' #' #' @examples #' #' ## Load example data #' data(ExampleData.DeValues, envir = environment()) #' #' # (1) Apply the minimum age model with minimum required parameters. #' # By default, this will apply the un-logged 3-parameter MAM. #' calc_MinDose(data = ExampleData.DeValues$CA1, sigmab = 0.1) #' #' \dontrun{ #' # (2) Re-run the model, but save results to a variable and turn #' # plotting of the log-likelihood profiles off. #' mam <- calc_MinDose(data = ExampleData.DeValues$CA1, #' sigmab = 0.1, #' plot = FALSE) #' #' # Show structure of the RLum.Results object #' mam #' #' # Show summary table that contains the most relevant results #' res <- get_RLum(mam, "summary") #' res #' #' # Plot the log likelihood profiles retroactively, because before #' # we set plot = FALSE #' plot_RLum(mam) #' #' # Plot the dose distribution in an abanico plot and draw a line #' # at the minimum dose estimate #' plot_AbanicoPlot(data = ExampleData.DeValues$CA1, #' main = "3-parameter Minimum Age Model", #' line = mam,polygon.col = "none", #' hist = TRUE, #' rug = TRUE, #' summary = c("n", "mean", "mean.weighted", "median", "in.ci"), #' centrality = res$de, #' line.col = "red", #' grid.col = "none", #' line.label = paste0(round(res$de, 1), "\U00B1", #' round(res$de_err, 1), " Gy"), #' bw = 0.1, #' ylim = c(-25, 18), #' summary.pos = "topleft", #' mtext = bquote("Parameters: " ~ #' sigma[b] == .(get_RLum(mam, "args")$sigmab) ~ ", " ~ #' gamma == .(round(log(res$de), 1)) ~ ", " ~ #' sigma == .(round(res$sig, 1)) ~ ", " ~ #' rho == .(round(res$p0, 2)))) #' #' #' #' # (3) Run the minimum age model with bootstrap #' # NOTE: Bootstrapping is computationally intensive #' # (3.1) run the minimum age model with default values for bootstrapping #' calc_MinDose(data = ExampleData.DeValues$CA1, #' sigmab = 0.15, #' bootstrap = TRUE) #' #' # (3.2) Bootstrap control parameters #' mam <- calc_MinDose(data = ExampleData.DeValues$CA1, #' sigmab = 0.15, #' bootstrap = TRUE, #' bs.M = 300, #' bs.N = 500, #' bs.h = 4, #' sigmab.sd = 0.06, #' plot = FALSE) #' #' # Plot the results #' plot_RLum(mam) #' #' # save bootstrap results in a separate variable #' bs <- get_RLum(mam, "bootstrap") #' #' # show structure of the bootstrap results #' str(bs, max.level = 2, give.attr = FALSE) #' #' # print summary of minimum dose and likelihood pairs #' summary(bs$pairs$gamma) #' #' # Show polynomial fits of the bootstrap pairs #' bs$poly.fits$poly.three #' #' # Plot various statistics of the fit using the generic plot() function #' par(mfcol=c(2,2)) #' plot(bs$poly.fits$poly.three, ask = FALSE) #' #' # Show the fitted values of the polynomials #' summary(bs$poly.fits$poly.three$fitted.values) #' } #' #' @md #' @export calc_MinDose <- function( data, sigmab, log = TRUE, par = 3, bootstrap = FALSE, init.values, level = 0.95, log.output = FALSE, plot = TRUE, multicore = FALSE, ... ){ ## ============================================================================## ## CONSISTENCY CHECK OF INPUT DATA ## ============================================================================## if (!missing(data)) { if (!is(data, "data.frame") & !is(data, "RLum.Results")) { stop("[calc_MinDose] Error: 'data' object has to be of type\n 'data.frame' or 'RLum.Results'!") } else { if (is(data, "RLum.Results")) { data <- get_RLum(data, "data") } } } if (any(!complete.cases(data))) { message(paste("\n[calc_MinDose] Warning:\nInput data contained NA/NaN values,", "which were removed prior to calculations!")) data <- data[complete.cases(data), ] } if (!missing(init.values) && length(init.values) != 4) { stop("[calc_MinDose] Error: Please provide initial values for all model parameters. ", "Missing parameter(s): ", paste(setdiff(c("gamma", "sigma", "p0", "mu"), names(init.values)), collapse = ", "), call. = FALSE) } ##============================================================================## ## ... ARGUMENTS ##============================================================================## extraArgs <- list(...) ## check if this function is called by calc_MaxDose() if ("invert" %in% names(extraArgs)) { invert <- extraArgs$invert if (!log) { log <- TRUE # overwrite user choice as max dose model currently only supports the logged version cat(paste("\n[WARNING] The maximum dose model only supports the logged version.", "'log' was automatically changed to TRUE.\n\n")) } } else { invert <- FALSE } ## console output if ("verbose" %in% names(extraArgs)) { verbose <- extraArgs$verbose } else { verbose <- TRUE } ## bootstrap replications # first level bootstrap if ("bs.M" %in% names(extraArgs)) { M <- as.integer(extraArgs$bs.M) } else { M <- 1000 } # second level bootstrap if ("bs.N" %in% names(extraArgs)) { N <- as.integer(extraArgs$bs.N) } else { N <- 3*M } # KDE bandwith if ("bs.h" %in% names(extraArgs)) { h <- extraArgs$bs.h } else { h <- (sd(data[ ,1])/sqrt(length(data[ ,1])))*2 } # standard deviation of sigmab if ("sigmab.sd" %in% names(extraArgs)) { sigmab.sd <- extraArgs$sigmab.sd } else { sigmab.sd <- 0.04 } if ("debug" %in% names(extraArgs)) { debug <- extraArgs$debug } else { debug <- FALSE } if ("cores" %in% names(extraArgs)) { cores <- extraArgs$cores } else { cores <- parallel::detectCores() if (multicore) message(paste("Logical CPU cores detected:", cores)) } ## WARNINGS ---- # if (!debug) # options(warn = -1) ##============================================================================## ## START VALUES ##============================================================================## if (missing(init.values)) { start <- list(gamma = ifelse(log, log(quantile(data[ ,1], probs = 0.25, na.rm = TRUE)), quantile(data[ ,1], probs = 0.25, na.rm = TRUE)), sigma = 1.2, p0 = 0.01, mu = ifelse(log, log(quantile(data[ ,1], probs = 0.25, na.rm = TRUE)), mean(data[ ,1]))) } else { start <- list(gamma = ifelse(log, log(init.values$gamma), init.values$gamma), sigma = ifelse(log, log(init.values$sigma), init.values$sigma), p0 = init.values$p0, mu = ifelse(log, log(init.values$mu), init.values$mu)) } ##============================================================================## ## ESTIMATE BOUNDARY PARAMETERS ##============================================================================## boundaries <- list( # gamma.lower = min(data[ ,1]/10), # gamma.upper = max(data[ ,1]*1.1), # sigma.lower = 0, # sigma.upper = 5, # mu.lower = min(data[ ,1])/10, # mu.upper = max(data[ ,1]*1.1) gamma.lower = -Inf, gamma.upper = Inf, sigma.lower = 0, sigma.upper = Inf, p0.lower = 0, p0.upper = 1, mu.lower = -Inf, mu.upper = Inf ) boundaries <- modifyList(boundaries, list(...)) # combine lower and upper boundary values to vectors if (log) { xlb <- c(ifelse(is.infinite(boundaries$gamma.lower), boundaries$gamma.lower, log(boundaries$gamma.lower)), boundaries$sigma.lower, boundaries$p0.lower) xub <- c(ifelse(is.infinite(boundaries$gamma.upper), boundaries$gamma.upper, log(boundaries$gamma.upper)), boundaries$sigma.upper, boundaries$p0.lower) } else { xlb <- c(boundaries$gamma.lower, boundaries$sigma.lower, boundaries$p0.lower) xub <- c(boundaries$gamma.upper, exp(boundaries$sigma.upper), boundaries$p0.lower) } if (par == 4) { xlb <- c(xlb, ifelse(log, ifelse(is.infinite(boundaries$mu.lower), -Inf, log(boundaries$mu.lower)), boundaries$mu.lower)) xub <- c(xub, ifelse(log, ifelse(is.infinite(boundaries$mu.upper), -Inf, log(boundaries$mu.upper)), boundaries$mu.upper)) } ##============================================================================## ## AUXILLARY FUNCTIONS ##============================================================================## # THIS FUNCTION CALCULATES THE NEGATIVE LOG LIKELIHOOD OF THE DATA Neglik_f <- function(gamma, sigma, p0, mu, data) { # this calculates the negative of the log likelihood of the # data (data) for a given set of parameters (gamma, sigma, p0) # data is a 2x2 matrix of data: De, rel_error (including sigma_b) # recover the data zi <- data[ ,1] si <- data[ ,2] n <- length(zi) # in the MAM-3 gamma and mu are assumed to be equal if (par == 3) mu <- gamma # calculate sigma^2 + seld^2, mu0 and sigma0 s2 <- sigma^2 + si^2 sigma0 <- 1/sqrt(1/sigma^2 + 1/si^2) mu0 <- (mu/sigma^2 + zi/si^2)/(1/sigma^2 + 1/si^2) # calculate the log-likelihood logsqrt2pi <- 0.5*log(2*pi) res0 <- (gamma - mu0)/sigma0 res1 <- (gamma - mu)/sigma lf1i <- log(p0) - log(si) - 0.5*((zi-gamma)/si)^2 - logsqrt2pi lf2i <- log(1-p0) - 0.5*log(s2) - 0.5*(zi-mu)^2/s2 - logsqrt2pi lf2i <- lf2i + log(1-pnorm(res0)) - log(1-pnorm(res1)) llik <- log( exp(lf1i) + exp(lf2i) ) negll <- -sum(llik) return(negll) } # THIS MAXIMIZES THE Neglik_f LIKELIHOOD FUNCTION AND RETURNS AN MLE OBJECT Get_mle <- function(data) { # TODO: PROPER ERROR HANDLING tryCatch({ suppressWarnings( mle <- bbmle::mle2(data = list(data = data), optimizer = "nlminb", lower=c(gamma = boundaries$gamma.lower, sigma = boundaries$sigma.lower, p0 = boundaries$p0.lower, mu = boundaries$mu.lower), upper=c(gamma = boundaries$gamma.upper, sigma = boundaries$sigma.upper, p0 = boundaries$p0.upper, mu = boundaries$mu.upper), minuslogl = Neglik_f, control = list(iter.max = 1000L), start = start) ) }, error = function(e) { stop(paste("Sorry, seems like I encountered an error...:", e), call. = FALSE) }) return(mle) } ##============================================================================## ## MAIN PROGRAM ##============================================================================## # combine errors if (log) { if (invert) { lcd <- log(data[ ,1])*-1 x.offset <- abs(min(lcd)) lcd <- lcd+x.offset } else { lcd <- log(data[ ,1]) } lse <- sqrt((data[ ,2]/data[ ,1])^2 + sigmab^2) } else { lcd <- data[ ,1] lse <- sqrt(data[ ,2]^2 + sigmab^2) } # create new data frame with DE and combined relative error dat <- cbind(lcd, lse) # get the maximum likelihood estimate ests <- Get_mle(dat) # check if any standard errors are NA or NaN coef_err <- suppressWarnings( t(as.data.frame(bbmle::summary(ests)@coef[ ,2])) ) if (debug) print(bbmle::summary(ests)) if (any(is.nan(coef_err))) coef_err[which(is.nan(coef_err))] <- t(as.data.frame(ests@coef))[which(is.nan(coef_err))] / 100 if (any(is.na(coef_err))) coef_err[which(is.na(coef_err))] <- t(as.data.frame(ests@coef))[which(is.na(coef_err))] / 100 if (par == 3) which <- c("gamma", "sigma", "p0") if (par == 4) which <- c("gamma", "sigma", "p0", "mu") # calculate profile log likelihoods prof <- suppressWarnings( bbmle::profile(ests, which = which, std.err = as.vector(coef_err), #try_harder = TRUE, quietly = TRUE, tol.newmin = Inf, skiperrs = TRUE, prof.lower=c(gamma = -Inf, sigma = 0, p0 = 0, mu = -Inf), prof.upper=c(gamma = Inf, sigma = Inf, p0 = 1, mu = Inf) ) ) # Fallback when profile() returns a 'better' fit maxsteps <- 100 cnt <- 1 while (!inherits(prof, "profile.mle2")) { message(paste0("## Trying to find a better fit (", cnt, "/10) ##")) if (maxsteps == 0L) stop(paste("Sorry, but I can't find a converging fit for the profile log-likelihood."), call. = FALSE) prof <- suppressWarnings( bbmle::profile(ests, which = which, std.err = as.vector(coef_err), try_harder = TRUE, quietly = TRUE, maxsteps = maxsteps, tol.newmin = Inf, skiperrs = TRUE, prof.lower=c(gamma = -Inf, sigma = 0, p0 = 0, mu = -Inf), prof.upper=c(gamma = Inf, sigma = Inf, p0 = 1, mu = Inf) ) ) maxsteps <- maxsteps - 10 cnt <- cnt + 1 } ## TODO: reduce the redundant code ## DELETE rows where z = -Inf/Inf prof@profile$gamma <- prof@profile$gamma[which(prof@profile$gamma["z"] != Inf), ] prof@profile$gamma <- prof@profile$gamma[which(prof@profile$gamma["z"] != -Inf), ] prof@profile$sigma <- prof@profile$sigma[which(prof@profile$sigma["z"] != Inf), ] prof@profile$sigma <- prof@profile$sigma[which(prof@profile$sigma["z"] != -Inf), ] prof@profile$p0 <- prof@profile$p0[which(prof@profile$p0["z"] != Inf), ] prof@profile$p0 <- prof@profile$p0[which(prof@profile$p0["z"] != -Inf), ] if (par == 4) { prof@profile$mu <- prof@profile$mu[which(prof@profile$mu["z"] != Inf), ] prof@profile$mu <- prof@profile$mu[which(prof@profile$mu["z"] != -Inf), ] } # calculate Bayesian Information Criterion (BIC) BIC <- BIC(ests) # retrieve results from mle2-object pal <- if (log) { if (invert) { exp((bbmle::coef(ests)[["gamma"]]-x.offset)*-1) } else { exp(bbmle::coef(ests)[["gamma"]]) } } else { bbmle::coef(ests)[["gamma"]] } sig <- bbmle::coef(ests)[["sigma"]] p0end <- bbmle::coef(ests)[["p0"]] if (par == 4) { muend <- ifelse(log, exp(bbmle::coef(ests)[["mu"]]), bbmle::coef(ests)[["mu"]]) } else { muend <- NA } ##============================================================================## ## ERROR CALCULATION #### METHOD 1: follow the instructions of Galbraith & Roberts (2012) #### # "If the likelihood profile is symmetrical about the parameter, an approximate standard error # can be calculated by dividing the length of this interval by 3.92" conf <- suppressWarnings( as.data.frame(bbmle::confint(prof, tol.newmin = Inf, quietly = TRUE, level = level)) ) class(conf[,1]) <- class(conf[,2]) <- "numeric" if (invert) { conf[1, ] <- (conf[1, ]-x.offset)*-1 t <- conf[1,1] conf[1,1] <- conf[1,2] conf[1,2] <- t } gamma_err <- if (log) { (exp(conf["gamma",2])-exp(conf["gamma",1]))/3.92 } else { (conf["gamma",2]-conf["gamma",1])/3.92 } ##============================================================================## ## AGGREGATE RESULTS summary <- data.frame(de=pal, de_err=gamma_err, ci_level = level, "ci_lower"=ifelse(log, exp(conf["gamma",1]), conf["gamma",1]), "ci_upper"=ifelse(log, exp(conf["gamma",2]), conf["gamma",2]), par=par, sig=ifelse(log, exp(sig), sig), p0=p0end, mu=muend, Lmax=-ests@min, BIC=BIC) call <- sys.call() args <- list(log=log, sigmab=sigmab, par = par, bootstrap=bootstrap, init.values=start, log.output = log.output, bs.M=M, bs.N=N, bs.h=h, sigmab.sd=sigmab.sd) ##============================================================================## ## BOOTSTRAP ##============================================================================## if (bootstrap) { ## BOOTSTRAP FUNCTIONS ---- # Function that draws N+M sets of integer values from 1:n and returns # both the indices and frequencies draw_Freq <- function() { f <- R <- matrix(0L, N+M, n) for (i in seq_len(N+M)) { R[i, ] <- sample(x = n, size = n, replace = TRUE) f[i, ] <- tabulate(R, n) } return(list(R = R, freq = f)) } # Function that adds the additional error sigmab to each individual DE error combine_Errors <- function(d, e) { if (log) { d[ ,2] <- sqrt((d[ ,2]/d[ ,1])^2 + e^2) d[ ,1] <- log(d[ ,1]) } else { d[ ,2] <- sqrt(d[ ,2]^2 + e^2) } return(d) } # Function that produces N+M replicates from the original data set using # randomly sampled indices with replacement and adding a randomly drawn # sigmab error create_Replicates <- function(f, s) { d <- apply(f$R, 1, function(x) data[x, ]) r <- mapply(function(x, y) combine_Errors(x, y), d, s, SIMPLIFY = FALSE) return(r) } # Function to extract the estimate of gamma from mle2 objects and converting # it back to the 'normal' scale save_Gamma <- function(d) { if (log) { if (invert) { m <- exp((bbmle::coef(d)[["gamma"]]-x.offset)*-1) } else { m <- exp(bbmle::coef(d)[["gamma"]]) } } else { m <- bbmle::coef(d)[["gamma"]] } return(m) } # Function that takes each of the N replicates and produces a kernel density # estimate of length n. The normalised values are then returned as a matrix # with dimensions [N, n] get_KDE <- function(d) { f <- approx(density(x=d[ ,1], kernel="gaussian", bw = h), xout = d[ ,1]) pStarTheta <- as.vector(f$y / sum(f$y)) x <- matrix(t(pStarTheta/(1/n)), N, n, byrow = TRUE) return(x) } # Function that calculates the product term of the recycled bootstrap get_ProductTerm <- function(Pmat, b2Pmatrix) { prodterm <- apply(Pmat^b2Pmatrix$freq[1:N, ], 1, prod) return(prodterm) } # Function that calculates the pseudo likelihoods for M replicates and # returns the dose-likelihood pairs make_Pairs <- function(theta, b2mamvec, prodterm) { pairs <- matrix(0, M, 2) for (i in seq_len(M)) { thetavec <- matrix(theta[i], N, 1) kdthis <- (thetavec-b2mamvec)/h kd1 <- dnorm(kdthis) kd2 <- kd1*prodterm[[i]] kd <- sum(kd2, na.rm = TRUE) likelihood <- (1/(N*h))*kd pairs[i, ] <- c(theta[i], likelihood) } return(pairs) } ## START BOOTSTRAP ---- msg <- sprintf(paste("\n [calc_MinDose] \n\nRecycled Bootstrap", "\n\nParameters:", "\n M = %d", "\n N = %d", "\n sigmab = %.2f \U00B1 %.2f", "\n h = %.2f", "\n\n Creating %d bootstrap replicates..."), M, N, sigmab, sigmab.sd, h, N+M) message(msg) n <- length(data[ ,1]) # Draw N+M samples of a normale distributed sigmab sigmab <- rnorm(N + M, sigmab, sigmab.sd) # Draw N+M random indices and their frequencies b2Pmatrix <- draw_Freq() # Finally draw N+M bootstrap replicates replicates <- create_Replicates(b2Pmatrix, sigmab) # MULTICORE: The call to 'Get_mle' is the bottleneck of the function. # Using multiple CPU cores can reduce the computation cost, but may # not work for all machines. if (multicore) { message(paste("\n Spawning", cores, "instances of R for parallel computation. This may take a few seconds...")) cl <- parallel::makeCluster(cores) message("\n Done! Applying the model to all replicates. This may take a while...") mle <- parallel::parLapply(cl, replicates, Get_mle) parallel::stopCluster(cl) } else { message("\n Applying the model to all replicates. This may take a while...") mle <- lapply(replicates, Get_mle) } # Final bootstrap calculations message("\n Calculating the likelihoods...") # Save 2nd- and 1st-level bootstrap results (i.e. estimates of gamma) b2mamvec <- as.matrix(sapply(mle[1:N], save_Gamma, simplify = TRUE)) theta <- sapply(mle[c(N+1):c(N+M)], save_Gamma) # Calculate the probality/pseudo-likelihood Pmat <- lapply(replicates[c(N+1):c(N+M)], get_KDE) prodterm <- lapply(Pmat, get_ProductTerm, b2Pmatrix) # Save the bootstrap results as dose-likelihood pairs pairs <- make_Pairs(theta, b2mamvec, prodterm) ## --------- FIT POLYNOMIALS -------------- ## message("\n Fit curves to dose-likelihood pairs...") # polynomial fits of increasing degrees ## if the input values are too close to zero, we may get ## Inf values >>> we remove them here with a warning if(any(is.infinite(pairs))){ inf_count <- length(which(is.infinite(pairs[,2])))/nrow(pairs) pairs <- pairs[!is.infinite(pairs[,2]),] warning( paste0("[calc_MinDose()] Inf values produced by bootstrapping removed for LOcal polynominal regrESSion fitting (loess)!\n The removed values represent ",round(inf_count * 100,2)," % of the total dataset. This message usually indicates that your values are close to 0."), call. = FALSE) } poly.three <- lm(pairs[ ,2] ~ poly(pairs[ ,1], degree = 3, raw = TRUE)) poly.four <- lm(pairs[ ,2] ~ poly(pairs[ ,1], degree = 4, raw = TRUE)) poly.five <- lm(pairs[ ,2] ~ poly(pairs[ ,1], degree = 5, raw = TRUE)) poly.six <- lm(pairs[ ,2] ~ poly(pairs[ ,1], degree = 6, raw = TRUE)) ## --------- FIT LOESS -------------- ## # Polynomials are probably not reasonable and often suffer badly from # overfitting, especially towards the margins of the fitted data. In this # particular use case polynomials may suggest a multimodal likelihood # distribution where actually none is given. The non-parametric # LOESS (LOcal polynomial regrESSion) often yields better results than # standard polynomials. loess <- loess(pairs[ ,2] ~ pairs[ ,1]) }#EndOf::Bootstrap ##============================================================================## ## CONSOLE PRINT ##============================================================================## if (verbose) { if (!bootstrap) { cat("\n----------- meta data -----------\n") print(data.frame(n=length(data[ ,1]), par=par, sigmab=sigmab, logged=log, Lmax=-ests@min, BIC=BIC, row.names = "")) cat("\n--- final parameter estimates ---\n") tmp <- round(data.frame( gamma=ifelse(!invert, ifelse(log, exp(bbmle::coef(ests)[["gamma"]]), bbmle::coef(ests)[["gamma"]]), ifelse(log, exp((bbmle::coef(ests)[["gamma"]]-x.offset)*-1),(bbmle::coef(ests)[["gamma"]]-x.offset)*-1) ), sigma=ifelse(log, exp(bbmle::coef(ests)[["sigma"]]), bbmle::coef(ests)[["sigma"]]), p0=bbmle::coef(ests)[["p0"]], mu=ifelse(par==4, muend, 0), row.names="", check.names = FALSE), 2) if (log && log.output) { tmp$`log(gamma)` = round(log(tmp$gamma),2) tmp$`log(sigma)` = round(log(tmp$sigma),2) if (par == 4) tmp$`log(mu)` = round(log(tmp$mu),2) } print(tmp) cat("\n------ confidence intervals -----\n") conf_print <- round(conf, 2) if (log) { logged_rows <- row.names(conf_print) != "p0" conf_print[logged_rows, ] <- exp(conf_print[logged_rows, ]) conf_print <- round(conf_print, 2) if (log.output) { conf_tmp <- round(conf, 2) conf_tmp[which(rownames(conf_tmp) == "p0"), ] <- "-" conf_print <- cbind(round(conf_print, 2), setNames(conf_tmp, names(conf_tmp))) conf_print <- rbind( setNames(data.frame("", "", "(logged)", "(logged)", row.names = "", stringsAsFactors = FALSE), names(conf_print)), conf_print) } } print(conf_print) cat("\n------ De (asymmetric error) -----\n") print(round(data.frame(De=pal, "lower"=ifelse(log, exp(conf["gamma",1]), conf["gamma",1]), "upper"=ifelse(log, exp(conf["gamma",2]), conf["gamma",2]), row.names=""), 2)) cat("\n------ De (symmetric error) -----\n") print(round(data.frame(De=pal, error=gamma_err, row.names=""), 2)) } else if (bootstrap) { message("\n Finished!") } } ##============================================================================## ## RETURN VALUES ##============================================================================## if (invert) prof@profile$gamma$par.vals[ ,"gamma"] <- rev((prof@profile$gamma$par.vals[ ,"gamma"] - x.offset)*-1) if (!bootstrap) pairs <- poly.three <- poly.four <- poly.five <- poly.six <- loess <- NULL newRLumResults.calc_MinDose <- set_RLum( class = "RLum.Results", originator = "calc_MinDose", data = list(summary = summary, data = data, args = args, call = call, mle = ests, BIC = BIC, confint = conf, profile = prof, bootstrap = list( pairs = list(gamma=pairs), poly.fits = list(poly.three = poly.three, poly.four = poly.four, poly.five = poly.five, poly.six = poly.six), loess.fit = loess))) ##=========## ## PLOTTING if (plot) try(plot_RLum.Results(newRLumResults.calc_MinDose, ...)) # if (!debug) # options(warn = 0) if (!is.na(summary$mu) && !is.na(summary$de)) { if (log(summary$de) > summary$mu) warning("Gamma is larger than mu. Consider re-running the model", " with new boundary values (see details '?calc_MinDose').", call. = FALSE) } invisible(newRLumResults.calc_MinDose) } Luminescence/R/write_R2BIN.R0000644000176200001440000011721213440202164015236 0ustar liggesusers#' Export Risoe.BINfileData into Risø BIN/BINX-file #' #' Exports a Risoe.BINfileData object in a *.bin or *.binx file that can be #' opened by the Analyst software or other Risoe software. #' #' The structure of the exported binary data follows the data structure #' published in the Appendices of the Analyst manual p. 42. #' #' If #' `LTYPE`, `DTYPE` and `LIGHTSOURCE` are not of type #' [character], no transformation into numeric values is done. #' #' @param object [Risoe.BINfileData-class] (**required**): #' input object to be stored in a bin file. #' #' @param file [character] (**required**): #' file name and path of the output file #' #' - `[WIN]`: `write_R2BIN(object, "C:/Desktop/test.bin")` #' - `[MAC/LINUX]`: `write_R2BIN("/User/test/Desktop/test.bin")` #' #' @param version [character] (*optional*): #' version number for the output file. If no value is provided the highest #' version number from the [Risoe.BINfileData-class] is taken automatically. #' #' **Note:** #' This argument can be used to convert BIN-file versions. #' #' @param compatibility.mode [logical] (*with default*): #' this option recalculates the position values if necessary and set the max. #' value to 48. The old position number is appended as comment (e.g., 'OP: 70). #' This option accounts for potential compatibility problems with the Analyst software. #' It further limits the maximum number of points per curve to 9,999. If a curve contains more #' data the curve data got binned using the smallest possible bin width. #' #' @param txtProgressBar [logical] (*with default*): #' enables or disables [txtProgressBar]. #' #' @return Write a binary file. #' #' @note #' The function just roughly checks the data structures. The validity of #' the output data depends on the user. #' #' The validity of the file path is not further checked. BIN-file conversions #' using the argument `version` may be a lossy conversion, depending on the #' chosen input andoutput data (e.g., conversion from version 08 to 07 to 06 to 05 to 04 or 03). #' #' **Warning** #' #' Although the coding was done carefully it seems that the BIN/BINX-files #' produced by Risø DA 15/20 TL/OSL readers slightly differ on the byte level. #' No obvious differences are observed in the METADATA, however, the #' BIN/BINX-file may not fully compatible, at least not similar to the once #' directly produced by the Risoe readers! #' #' @section Function version: 0.5.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @note #' ROI definitions (introduced in BIN-file version 8) are not supported! #' There are furthermore ignored by the function [read_BIN2R]. #' #' @seealso [read_BIN2R], [Risoe.BINfileData-class], [writeBin] #' #' @references #' DTU Nutech, 2016. The Squence Editor, Users Manual, February, 2016. #' [http://www.nutech.dtu.dk/english/products-and-services/radiation-instruments/tl_osl_reader/manuals]() #' #' @keywords IO #' #' @examples #' #' \dontrun{ #' #' ##load exampled dataset #' data(ExampleData.BINfileData, envir = environment()) #' #' ##create temporary filepath #' ##(for usage replace by own path) #' temp_file <- temp_file <- tempfile(pattern = "output", fileext = ".bin") #' #' ##export to temporary file path #' write_R2BIN(CWOSL.SAR.Data, file = temp_file) #' #' } #' #' @md #' @export write_R2BIN <- function( object, file, version, compatibility.mode = FALSE, txtProgressBar = TRUE ){ # Config ------------------------------------------------------------------ ##set supported BIN format version VERSION.supported <- as.raw(c(3, 4, 5, 6, 7, 8)) # Check integrity --------------------------------------------------------- ##check if input object is of type 'Risoe.BINfileData' if(is(object, "Risoe.BINfileData") == FALSE){ stop("[write_R2BIN()] Input object is not of type Risoe.BINfileData!", call. = FALSE) } ##check if it fullfills the latest definition ... if(ncol(object@METADATA) != ncol(set_Risoe.BINfileData()@METADATA)){ stop("[write_R2BIN()] The number of columns in your slot 'METADATA' does not fit to the latest definition. What you are probably trying to do is to export a Risoe.BINfileData object you generated by your own or you imported with an old package version some time ago. Please re-import the BIN-file using the function read_BIN2R().", call. = FALSE) } ##check if input file is of type 'character' if(is(file, "character") == FALSE){ stop("[write_R2BIN()] argument 'file' has to be of type character!", call. = FALSE) } # Check Risoe.BINfileData Struture ---------------------------------------- ##check wether the BIN-file DATA slot contains more than 9999 records; needs to be run all the time temp_check <- vapply(object@DATA, function(x){ if(length(x) > 9999){ TRUE }else{ FALSE } }, FUN.VALUE = logical(1)) ##force compatibility if(compatibility.mode && any(temp_check)){ ##drop warning warning("[write_R2BIN()] Compatibility mode selected: Some data sets are longer than 9,999 points and will be binned!", call. = FALSE) ##BIN data to reduce amount of data if the BIN-file is too long object@DATA <- lapply(object@DATA, function(x){ if(length(x) > 9999){ ##we want to have a minimum binning (smallest number possible) bin_width <- ceiling(length(x)/9999) ##it should be symatric, thus, remove values if((length(x)/bin_width)%%2 != 0){ x <- x[-length(x)] } ##create matrix and return colSums(matrix(x, nrow = bin_width)) }else{ x } }) ##reset temp_check temp_check <- FALSE ##get new number of points temp_NPOINTS <- sapply(object@DATA, length) ##correct LENGTH object@METADATA[["LENGTH"]] <- object@METADATA[["LENGTH"]] - (4 * object@METADATA[["NPOINTS"]]) + (temp_NPOINTS * 4) ##correct PREVIOUS object@METADATA[["PREVIOUS"]] <- c(0,object@METADATA[["LENGTH"]][2:length(object@METADATA[["LENGTH"]])]) ##correct NPOINTS object@METADATA[["NPOINTS"]] <- temp_NPOINTS ##write comment object@METADATA[["COMMENT"]] <- paste(object@METADATA[["COMMENT"]], " - binned") } if(any(temp_check)) stop(paste("[write_R2BIN()]", length(which(temp_check)), " out of ",length(temp_check), "records contain more than 9,999 data points. This violates the BIN/BINX-file definition!"), call. = FALSE) ##remove rm(temp_check) ##VERSION ##If missing version argument set to the highest value if(missing(version)){ version <- as.raw(max(as.numeric(object@METADATA[,"VERSION"]))) version.original <- version }else{ version.original <- as.raw(max(as.numeric(object@METADATA[,"VERSION"]))) version <- as.raw(version) object@METADATA[["VERSION"]] <- version ##Furthermore, entries length needed to be recalculated if(version.original != version){ ##stepping decision header.stepping <- switch(as.character(version), "08" = 507, "07" = 447, "06" = 447, "05" = 423, "04" = 272, "03" = 272) object@METADATA[,"LENGTH"] <- sapply(1:nrow(object@METADATA), function(x){ header.stepping + 4 * object@METADATA[x,"NPOINTS"] }) object@METADATA[,"PREVIOUS"] <- sapply(1:nrow(object@METADATA), function(x){ if(x == 1){ 0 }else{ header.stepping + 4 * object@METADATA[x-1,"NPOINTS"] } }) } } ##check whether this file can be exported without problems due to the latest specifications if(ncol(object@METADATA) != 80){ stop("[write_R2BIN()] Your Risoe.BINfileData object seems not to be compatible with the latest specification of this S4-class object. You are probably trying to export a Risoe.BINfileData from your workspace you produced manually or with an old version.", call. = FALSE) } ##Check if the BINfile object contains of unsupported versions if((as.raw(object@METADATA[1,"VERSION"]) %in% VERSION.supported) == FALSE || version %in% VERSION.supported == FALSE){ ##show error message error.text <- paste("[write_R2BIN()] Writing BIN-files in format version (", object@METADATA[1,"VERSION"],") is currently not supported! Supported version numbers are: ", paste(VERSION.supported,collapse=", "),".",sep="") stop(error.text) } ##CHECK file name for version == 06 it has to be *.binx and correct for it if(version == 05 | version == 06 | version == 07 | version == 08){ ##grep file ending temp.file.name <- unlist(strsplit(file, "[:.:]")) ##*.bin? >> correct to binx if(temp.file.name[length(temp.file.name)]=="bin"){ temp.file.name[length(temp.file.name)] <- "binx" file <- paste(temp.file.name, collapse=".") } } ##SEQUENCE if (suppressWarnings(max(nchar(as.character(object@METADATA[,"SEQUENCE"]), type = "bytes"), na.rm = TRUE)) > 8) { stop("[write_R2BIN()] Value in 'SEQUENCE' exceed storage limit!") } ##USER if (suppressWarnings(max(nchar(as.character(object@METADATA[,"USER"]), type = "bytes"), na.rm = TRUE)) > 8) { stop("[write_R2BIN()] 'USER' exceed storage limit!") } ##SAMPLE if (suppressWarnings(max(nchar(as.character(object@METADATA[,"SAMPLE"]), type = "bytes"), na.rm = TRUE)) > 20) { stop("[write_R2BIN()] 'SAMPLE' exceed storage limit!") } ##enables compatibility to the Analyst as the the max value for POSITION becomes 48 if(compatibility.mode){ ##just do if position values > 48 if(max(object@METADATA[,"POSITION"])>48){ ##grep relevant IDs temp.POSITION48.id <- which(object@METADATA[,"POSITION"]>48) ##find unique values temp.POSITION48.unique <- unique(object@METADATA[temp.POSITION48.id,"POSITION"]) ##set translation vector starting from 1 and ending at 48 temp.POSITION48.new <- rep_len(1:48, length.out = length(temp.POSITION48.unique)) ##recaluate POSITION and update comment for(i in 1:length(temp.POSITION48.unique)){ object@METADATA[object@METADATA[,"POSITION"] == temp.POSITION48.unique[i],"COMMENT"] <- paste0(object@METADATA[object@METADATA[,"POSITION"] == temp.POSITION48.unique[i],"COMMENT"], "OP:",object@METADATA[object@METADATA[,"POSITION"] == temp.POSITION48.unique[i],"POSITION"]) object@METADATA[object@METADATA[,"POSITION"] == temp.POSITION48.unique[i],"POSITION"] <- temp.POSITION48.new[i] } } } ##COMMENT if(max(nchar(as.character(object@METADATA[,"COMMENT"]), type="bytes"))>80){ stop("[write_R2BIN()] 'COMMENT' exceeds storage limit!", call. = FALSE) } # Tranlation Matrices ----------------------------------------------------- ##LTYPE LTYPE.TranslationMatrix <- matrix(NA, nrow=14, ncol=2) LTYPE.TranslationMatrix[,1] <- 0:13 LTYPE.TranslationMatrix[,2] <- c("TL", "OSL", "IRSL", "M-IR", "M-VIS", "TOL", "TRPOSL", "RIR", "RBR", "USER", "POSL", "SGOSL", "RL", "XRF") ##DTYPE DTYPE.TranslationMatrix <- matrix(NA, nrow=8, ncol=2) DTYPE.TranslationMatrix[,1] <- 0:7 DTYPE.TranslationMatrix[,2] <- c("Natural","N+dose","Bleach", "Bleach+dose","Natural (Bleach)", "N+dose (Bleach)","Dose","Background") ##LIGHTSOURCE LIGHTSOURCE.TranslationMatrix <- matrix(NA, nrow=8, ncol=2) LIGHTSOURCE.TranslationMatrix[,1] <- 0:7 LIGHTSOURCE.TranslationMatrix[,2] <- c("None", "Lamp", "IR diodes/IR Laser", "Calibration LED", "Blue Diodes", "White light", "Green laser (single grain)", "IR laser (single grain)" ) ##TRANSLATE VALUES IN METADATA ##LTYPE if(is(object@METADATA[1,"LTYPE"], "character") == TRUE | is(object@METADATA[1,"LTYPE"], "factor") == TRUE){ object@METADATA[,"LTYPE"]<- sapply(1:length(object@METADATA[,"LTYPE"]),function(x){ as.integer(LTYPE.TranslationMatrix[object@METADATA[x,"LTYPE"]==LTYPE.TranslationMatrix[,2],1]) }) } ##DTYPE if(is(object@METADATA[1,"DTYPE"], "character") == TRUE | is(object@METADATA[1,"DTYPE"], "factor") == TRUE){ object@METADATA[,"DTYPE"]<- sapply(1:length(object@METADATA[,"DTYPE"]),function(x){ as.integer(DTYPE.TranslationMatrix[object@METADATA[x,"DTYPE"]==DTYPE.TranslationMatrix[,2],1]) }) } ##LIGHTSOURCE if(is(object@METADATA[1,"LIGHTSOURCE"], "character") == TRUE | is(object@METADATA[1,"LIGHTSOURCE"], "factor") == TRUE){ object@METADATA[,"LIGHTSOURCE"]<- sapply(1:length(object@METADATA[,"LIGHTSOURCE"]),function(x){ as.integer(LIGHTSOURCE.TranslationMatrix[ object@METADATA[x,"LIGHTSOURCE"]==LIGHTSOURCE.TranslationMatrix[,2],1]) })} ##TIME object@METADATA[,"TIME"] <- vapply(1:length(object@METADATA[["TIME"]]),function(x){ if(is.na(object@METADATA[["TIME"]][x])){ "000000" }else{ as.character(gsub(":","",object@METADATA[["TIME"]][x])) } }, character(1)) ##TAG and SEL ##in TAG information on the SEL are storred, here the values are copied to TAG ##before export object@METADATA[,"TAG"] <- ifelse(object@METADATA[,"SEL"] == TRUE, 1, 0) # SET FILE AND VALUES ----------------------------------------------------- con<-file(file, "wb") ##get records n.records <- length(object@METADATA[,"ID"]) ##output cat(paste("\n[write_R2BIN()]\n\t >> ",file,sep=""), fill=TRUE) ##set progressbar if(txtProgressBar==TRUE){ pb<-txtProgressBar(min=0,max=n.records, char="=", style=3) } # LOOP ------------------------------------------------------------------- ID <- 1 if(version == 03 || version == 04){ ## version 03 and 04 ##start loop for export BIN data while(ID<=n.records) { ##VERSION writeBin(as.raw(object@METADATA[ID,"VERSION"]), con, size = 1, endian="little") ##stepping writeBin(raw(length=1), con, size = 1, endian="little") ##LENGTH, PREVIOUS, NPOINTS writeBin(c(as.integer(object@METADATA[ID,"LENGTH"]), as.integer(object@METADATA[ID,"PREVIOUS"]), as.integer(object@METADATA[ID,"NPOINTS"])), con, size = 2, endian="little") ##LTYPE writeBin(object@METADATA[ID,"LTYPE"], con, size = 1, endian="little") ##LOW, HIGH, RATE writeBin(c(as.double(object@METADATA[ID,"LOW"]), as.double(object@METADATA[ID,"HIGH"]), as.double(object@METADATA[ID,"RATE"])), con, size = 4, endian="little") ##TEMPERATURE, XCOORD, YCOORD, TOLDELAY; TOLON, TOLOFF writeBin(c(as.integer(object@METADATA[ID,"TEMPERATURE"]), as.integer(object@METADATA[ID,"XCOORD"]), as.integer(object@METADATA[ID,"YCOORD"]), as.integer(object@METADATA[ID,"TOLDELAY"]), as.integer(object@METADATA[ID,"TOLON"]), as.integer(object@METADATA[ID,"TOLOFF"])), con, size = 2, endian="little") ##POSITION, RUN writeBin(c(as.integer(object@METADATA[ID,"POSITION"]), as.integer(object@METADATA[ID,"RUN"])), con, size = 1, endian="little") ##TIME TIME_SIZE <- nchar(object@METADATA[ID,"TIME"]) writeBin(as.integer(TIME_SIZE), con, size = 1, endian="little") writeChar(object@METADATA[ID,"TIME"], con, nchars =TIME_SIZE, useBytes=TRUE, eos = NULL) if(6-TIME_SIZE>0){ writeBin(raw(length = c(6-TIME_SIZE)), con, size = 1, endian="little") } ##DATE writeBin(as.integer(6), con, size = 1 , endian="little") suppressWarnings(writeChar(as.character(object@METADATA[ID,"DATE"]), con, nchars = 6, useBytes=TRUE, eos = NULL)) ##SEQUENCE ##count number of characters SEQUENCE_SIZE <- as.integer(nchar(as.character(object@METADATA[["SEQUENCE"]][ID]), type = "bytes", keepNA = FALSE)) writeBin(SEQUENCE_SIZE, con, size = 1, endian="little") writeChar(as.character(object@METADATA[ID,"SEQUENCE"]), con, nchars = SEQUENCE_SIZE, useBytes=TRUE, eos = NULL) ##stepping if(8-SEQUENCE_SIZE>0){ writeBin(raw(length = (8-SEQUENCE_SIZE)), con, size = 1, endian="little") } ##USER USER_SIZE <- as.integer(nchar(as.character(object@METADATA[ID,"USER"]), type="bytes")) writeBin(USER_SIZE, con, size = 1, endian="little") writeChar(as.character(object@METADATA[ID,"USER"]), con, nchars = USER_SIZE, useBytes=TRUE, eos = NULL) ##stepping if(8-USER_SIZE>0){ writeBin(raw(length = (8-USER_SIZE)), con, size = 1, endian="little") } ##DTYPE writeBin(object@METADATA[ID,"DTYPE"], con, size = 1, endian="little") ##IRR_TIME writeBin(as.double(object@METADATA[ID,"IRR_TIME"]), con, size = 4, endian="little") ##IRR_TYPE, IRR_UNIT writeBin(c(object@METADATA[ID,"IRR_TYPE"], object@METADATA[ID,"IRR_UNIT"]), con, size = 1, endian="little") ##BL_TIME writeBin(as.double(object@METADATA[ID,"BL_TIME"]), con, size = 4, endian="little") ##BL_UNIT writeBin(as.integer(object@METADATA[ID,"BL_UNIT"]), con, size = 1, endian="little") ##AN_TEMP, AN_TIME, NORM1, NORM2, NORM2, BG writeBin(c(as.double(object@METADATA[ID,"AN_TEMP"]), as.double(object@METADATA[ID,"AN_TIME"]), as.double(object@METADATA[ID,"NORM1"]), as.double(object@METADATA[ID,"NORM2"]), as.double(object@METADATA[ID,"NORM3"]), as.double(object@METADATA[ID,"BG"])), con, size = 4, endian="little") ##SHIFT writeBin(as.integer(object@METADATA[ID,"SHIFT"]), con, size = 2, endian="little") ##SAMPLE SAMPLE_SIZE <- as.integer(nchar(as.character(object@METADATA[ID,"SAMPLE"]), type="bytes")) ##avoid problems with empty sample names if(SAMPLE_SIZE == 0){ SAMPLE_SIZE <- as.integer(2) object@METADATA[ID,"SAMPLE"] <- " " } writeBin(SAMPLE_SIZE, con, size = 1, endian="little") writeChar(as.character(object@METADATA[ID,"SAMPLE"]), con, nchars = SAMPLE_SIZE, useBytes=TRUE, eos = NULL) if((20-SAMPLE_SIZE)>0){ writeBin(raw(length = (20-SAMPLE_SIZE)), con, size = 1, endian="little") } ##COMMENT COMMENT_SIZE <- as.integer(nchar(as.character(object@METADATA[ID,"COMMENT"]), type="bytes")) ##avoid problems with empty comments if(COMMENT_SIZE == 0){ COMMENT_SIZE <- as.integer(2) object@METADATA[ID,"COMMENT"] <- " " } writeBin(COMMENT_SIZE, con, size = 1, endian="little") suppressWarnings(writeChar(as.character(object@METADATA[ID,"COMMENT"]), con, nchars = COMMENT_SIZE, useBytes=TRUE, eos = NULL)) if((80-COMMENT_SIZE)>0){ writeBin(raw(length = c(80-COMMENT_SIZE)), con, size = 1, endian="little") } ##LIGHTSOURCE, SET, TAG writeBin(c(as.integer(object@METADATA[ID,"LIGHTSOURCE"]), as.integer(object@METADATA[ID,"SET"]), as.integer(object@METADATA[ID,"TAG"])), con, size = 1, endian="little") ##GRAIN writeBin(as.integer(object@METADATA[ID,"GRAIN"]), con, size = 2, endian="little") ##LPOWER writeBin(as.double(object@METADATA[ID,"LPOWER"]), con, size = 4, endian="little") ##SYSTEMID writeBin(as.integer(object@METADATA[ID,"SYSTEMID"]), con, size = 2, endian="little") ##Further distinction needed to fully support format version 03 and 04 separately if(version == 03){ ##RESERVED 1 if(length(object@.RESERVED) == 0 || version.original != version){ writeBin(raw(length=36), con, size = 1, endian="little") }else{ writeBin(object@.RESERVED[[ID]][[1]], con, size = 1, endian="little") } ##ONTIME, OFFTIME writeBin(c(as.integer(object@METADATA[ID,"ONTIME"]), as.integer(object@METADATA[ID,"OFFTIME"])), con, size = 4, endian="little") ##GATE_ENABLED writeBin(as.integer(object@METADATA[ID,"GATE_ENABLED"]), con, size = 1, endian="little") ##GATE_START, GATE_STOP writeBin(c(as.integer(object@METADATA[ID,"GATE_START"]), as.integer(object@METADATA[ID,"GATE_STOP"])), con, size = 4, endian="little") ##RESERVED 2 if(length(object@.RESERVED) == 0 || version.original != version){ writeBin(raw(length=1), con, size = 1, endian="little") }else{ writeBin(object@.RESERVED[[ID]][[2]], con, size = 1, endian="little") } } else { ##version 04 ##RESERVED 1 if(length(object@.RESERVED) == 0 || version.original != version){ writeBin(raw(length=20), con, size = 1, endian="little") }else{ writeBin(object@.RESERVED[[ID]][[1]], con, size = 1, endian="little") } ##CURVENO writeBin(as.integer(object@METADATA[ID,"CURVENO"]), con, size = 2, endian="little") ##TIMETICK writeBin(c(as.double(object@METADATA[ID,"TIMETICK"])), con, size = 4, endian="little") ##ONTIME, STIMPERIOD writeBin(c(as.integer(object@METADATA[ID,"ONTIME"]), as.integer(object@METADATA[ID,"STIMPERIOD"])), con, size = 4, endian="little") ##GATE_ENABLED writeBin(as.integer(object@METADATA[ID,"GATE_ENABLED"]), con, size = 1, endian="little") ##GATE_START, GATE_STOP writeBin(c(as.integer(object@METADATA[ID,"GATE_START"]), as.integer(object@METADATA[ID,"GATE_STOP"])), con, size = 4, endian="little") ##PTENABLED writeBin(as.integer(object@METADATA[ID,"PTENABLED"]), con, size = 1, endian="little") ##RESERVED 2 if(length(object@.RESERVED) == 0 || version.original != version){ writeBin(raw(length=10), con, size = 1, endian="little") }else{ writeBin(object@.RESERVED[[ID]][[2]], con, size = 1, endian="little") } } ##DPOINTS writeBin(as.integer(unlist(object@DATA[ID])), con, size = 4, endian="little") #SET UNIQUE ID ID<-ID+1 ##update progress bar if(txtProgressBar==TRUE){ setTxtProgressBar(pb, ID) } } } ## ==================================================== ## version > 06 if(version == 05 | version == 06 | version == 07 | version == 08){ ##start loop for export BIN data while(ID<=n.records) { ##VERSION writeBin(as.raw(object@METADATA[ID,"VERSION"]), con, size = 1, endian="little") ##stepping writeBin(raw(length=1), con, size = 1, endian="little") ##LENGTH, PREVIOUS, NPOINTS writeBin(c(as.integer(object@METADATA[ID,"LENGTH"]), as.integer(object@METADATA[ID,"PREVIOUS"]), as.integer(object@METADATA[ID,"NPOINTS"])), con, size = 4, endian="little") if(version == 08){ writeBin(as.integer(object@METADATA[ID,"RECTYPE"]), con, size = 1, endian="little") } ##RUN, SET, POSITION, GRAINNUMBER, CURVENO, XCOORD, YCOORD writeBin(c(as.integer(object@METADATA[ID,"RUN"]), as.integer(object@METADATA[ID,"SET"]), as.integer(object@METADATA[ID,"POSITION"]), as.integer(object@METADATA[ID,"GRAINNUMBER"]), as.integer(object@METADATA[ID,"CURVENO"]), as.integer(object@METADATA[ID,"XCOORD"]), as.integer(object@METADATA[ID,"YCOORD"])), con, size = 2, endian="little") ##SAMPLE SAMPLE_SIZE <- as.integer(nchar(as.character(object@METADATA[ID,"SAMPLE"]), type="bytes")) ##avoid problems with empty sample names if(SAMPLE_SIZE == 0){ SAMPLE_SIZE <- as.integer(2) object@METADATA[ID,"SAMPLE"] <- " " } writeBin(SAMPLE_SIZE, con, size = 1, endian="little") writeChar(as.character(object@METADATA[ID,"SAMPLE"]), con, nchars = SAMPLE_SIZE, useBytes=TRUE, eos = NULL) if((20-SAMPLE_SIZE)>0){ writeBin(raw(length = (20-SAMPLE_SIZE)), con, size = 1, endian="little") } ##COMMENT COMMENT_SIZE <- as.integer(nchar(as.character(object@METADATA[ID,"COMMENT"]), type="bytes")) ##avoid problems with empty comments if(COMMENT_SIZE == 0){ COMMENT_SIZE <- as.integer(2) object@METADATA[ID,"COMMENT"] <- " " } writeBin(COMMENT_SIZE, con, size = 1, endian="little") writeChar(as.character(object@METADATA[ID,"COMMENT"]), con, nchars = COMMENT_SIZE, useBytes=TRUE, eos = NULL) if((80-COMMENT_SIZE)>0){ writeBin(raw(length = c(80-COMMENT_SIZE)), con, size = 1, endian="little") } ##Instrument and sequence characteristics ##SYSTEMID writeBin(as.integer(object@METADATA[ID,"SYSTEMID"]), con, size = 2, endian="little") ##FNAME FNAME_SIZE <- as.integer(nchar(as.character(object@METADATA[ID,"FNAME"]), type="bytes")) ##correct for case that this is of 0 length if(length(FNAME_SIZE) == 0){FNAME_SIZE <- as.integer(0)} writeBin(FNAME_SIZE, con, size = 1, endian="little") if(FNAME_SIZE>0) { writeChar( as.character(object@METADATA[ID,"FNAME"]), con, nchars = FNAME_SIZE, useBytes = TRUE, eos = NULL ) } if((100-FNAME_SIZE)>0){ writeBin(raw(length = c(100-FNAME_SIZE)), con, size = 1, endian="little") } ##USER USER_SIZE <- as.integer(nchar(as.character(object@METADATA[ID,"USER"]), type="bytes")) writeBin(USER_SIZE, con, size = 1, endian="little") writeChar(as.character(object@METADATA[ID,"USER"]), con, nchars = USER_SIZE, useBytes=TRUE, eos = NULL) if((30-USER_SIZE)>0){ writeBin(raw(length = c(30-USER_SIZE)), con, size = 1, endian="little") } ##TIME TIME_SIZE <- nchar(object@METADATA[ID,"TIME"]) writeBin(as.integer(TIME_SIZE), con, size = 1, endian="little") writeChar(object@METADATA[ID,"TIME"], con, nchars =TIME_SIZE, useBytes=TRUE, eos = NULL) if(6-TIME_SIZE>0){ writeBin(raw(length = c(6-TIME_SIZE)), con, size = 1, endian="little") } ##DATE writeBin(as.integer(6), con, size = 1 , endian="little") suppressWarnings(writeChar(as.character(object@METADATA[ID,"DATE"]), con, nchars = 6, useBytes=TRUE, eos = NULL)) ##Analysis ##DTYPE writeBin(object@METADATA[ID,"DTYPE"], con, size = 1, endian="little") ##BL_TIME writeBin(as.double(object@METADATA[ID,"BL_TIME"]), con, size = 4, endian="little") ##BL_UNIT writeBin(as.integer(object@METADATA[ID,"BL_UNIT"]), con, size = 1, endian="little") ##NORM1, NORM2, NORM3, BG writeBin(c(as.double(object@METADATA[ID,"NORM1"]), as.double(object@METADATA[ID,"NORM2"]), as.double(object@METADATA[ID,"NORM3"]), as.double(object@METADATA[ID,"BG"])), con, size = 4, endian="little") ##SHIFT writeBin(as.integer(object@METADATA[ID,"SHIFT"]), con, size = 2, endian="little") ##TAG writeBin(c(as.integer(object@METADATA[ID,"TAG"])), con, size = 1, endian="little") ##RESERVED 1 if(length(object@.RESERVED) == 0 || version.original != version){ writeBin(raw(length=20), con, size = 1, endian="little") }else{ writeBin(object@.RESERVED[[ID]][[1]], con, size = 1, endian="little") } ##Measurement characteristics ##LTYPE writeBin(object@METADATA[ID,"LTYPE"], con, size = 1, endian="little") ##LIGHTSOURCE writeBin(c(as.integer(object@METADATA[ID,"LIGHTSOURCE"])), con, size = 1, endian="little") ##LIGHTPOWER, LOW, HIGH, RATE writeBin(c(as.double(object@METADATA[ID,"LIGHTPOWER"]), as.double(object@METADATA[ID,"LOW"]), as.double(object@METADATA[ID,"HIGH"]), as.double(object@METADATA[ID,"RATE"])), con, size = 4, endian="little") ##TEMPERATURE, MEASTEMP writeBin(c(as.integer(object@METADATA[ID,"TEMPERATURE"]), as.integer(object@METADATA[ID,"MEASTEMP"])), con, size = 2, endian="little") ##AN_TEMP, AN_TIME writeBin(c(as.double(object@METADATA[ID,"AN_TEMP"]), as.double(object@METADATA[ID,"AN_TIME"])), con, size = 4, endian="little") ##TOLDELAY; TOLON, TOLOFF writeBin(c(as.integer(object@METADATA[ID,"TOLDELAY"]), as.integer(object@METADATA[ID,"TOLON"]), as.integer(object@METADATA[ID,"TOLOFF"])), con, size = 2, endian="little") ##IRR_TIME writeBin(as.double(object@METADATA[ID,"IRR_TIME"]), con, size = 4, endian="little") ##IRR_TYPE writeBin(c(object@METADATA[ID,"IRR_TYPE"]), con, size = 1, endian="little") ##IRR_DOSERATE, IRR_DOSERATEERR if(version == 05){ writeBin(as.double(object@METADATA[ID,"IRR_DOSERATE"]), con, size = 4, endian="little") }else{ writeBin(c(as.double(object@METADATA[ID,"IRR_DOSERATE"]), as.double(object@METADATA[ID,"IRR_DOSERATEERR"])), con, size = 4, endian="little") } ##TIMESINCEIRR writeBin(c(as.integer(object@METADATA[ID,"TIMESINCEIRR"])), con, size = 4, endian="little") ##TIMETICK writeBin(c(as.double(object@METADATA[ID,"TIMETICK"])), con, size = 4, endian="little") ##ONTIME, STIMPERIOD writeBin(c(suppressWarnings(as.integer(object@METADATA[ID,"ONTIME"])), as.integer(object@METADATA[ID,"STIMPERIOD"])), con, size = 4, endian="little") ##GATE_ENABLED writeBin(as.integer(object@METADATA[ID,"GATE_ENABLED"]), con, size = 1, endian="little") ##GATE_START, GATE_STOP writeBin(c(as.integer(object@METADATA[ID,"GATE_START"]), as.integer(object@METADATA[ID,"GATE_STOP"])), con, size = 4, endian="little") ##PTENABLED, DTENABLED writeBin(c(as.integer(object@METADATA[ID,"PTENABLED"]), as.integer(object@METADATA[ID,"DTENABLED"])), con, size = 1, endian="little") ##DEADTIME, MAXLPOWER, XRF_ACQTIME, XRF_HV writeBin(c(as.double(object@METADATA[ID,"DEADTIME"]), as.double(object@METADATA[ID,"MAXLPOWER"]), as.double(object@METADATA[ID,"XRF_ACQTIME"]), as.double(object@METADATA[ID,"XRF_HV"])), con, size = 4, endian="little") ##XRF_CURR writeBin(c(as.integer(object@METADATA[ID,"XRF_CURR"])), con, size = 4, endian="little") ##XRF_DEADTIMEF writeBin(c(as.double(object@METADATA[ID,"XRF_DEADTIMEF"])), con, size = 4, endian="little") ##add version support for V7 if(version == 05){ ##RESERVED 2 if(length(object@.RESERVED) == 0 || version.original != version){ writeBin(raw(length=4), con, size = 1, endian="little") }else{ writeBin(object@.RESERVED[[ID]][[2]], con, size = 1, endian="little") } }else if(version == 06){ ##RESERVED 2 if(length(object@.RESERVED) == 0 || version.original != version){ writeBin(raw(length=24), con, size = 1, endian="little") }else{ writeBin(object@.RESERVED[[ID]][[2]], con, size = 1, endian="little") } }else{ ##DETECTOR_ID writeBin(as.integer(object@METADATA[ID,"DETECTOR_ID"]), con, size = 1, endian="little") ##LOWERFILTER_ID, UPPERFILTER_ID writeBin(c(as.integer(object@METADATA[ID,"LOWERFILTER_ID"]), as.integer(object@METADATA[ID,"UPPERFILTER_ID"])), con, size = 2, endian="little") ##ENOISEFACTOR writeBin(as.double(object@METADATA[ID,"ENOISEFACTOR"]), con, size = 4, endian="little") ##VERSION 08 if(version == 07){ ##RESERVED 2 if(length(object@.RESERVED) == 0 || version.original != version){ writeBin(raw(length=15), con, size = 1, endian="little") }else{ writeBin(object@.RESERVED[[ID]][[2]], con, size = 1, endian="little") } }else{ ##MARKPOS POSITION and extraction writeBin( c( as.double(object@METADATA[ID, "MARKPOS_X1"]), as.double(object@METADATA[ID, "MARKPOS_Y1"]), as.double(object@METADATA[ID, "MARKPOS_X2"]), as.double(object@METADATA[ID, "MARKPOS_Y2"]), as.double(object@METADATA[ID, "MARKPOS_X3"]), as.double(object@METADATA[ID, "MARKPOS_Y3"]), as.double(object@METADATA[ID, "EXTR_START"]), as.double(object@METADATA[ID, "EXTR_END"]) ), con, size = 4, endian = "little" ) ##RESERVED 2 if(length(object@.RESERVED) == 0 || version.original != version){ writeBin(raw(length=42), con, size = 1, endian="little") }else{ writeBin(object@.RESERVED[[ID]][[2]], con, size = 1, endian="little") } } }#end if version decision ##DPOINTS writeBin(as.integer(unlist(object@DATA[ID])), con, size = 4, endian="little") #SET UNIQUE ID ID <- ID + 1 ##update progress bar if(txtProgressBar==TRUE){ setTxtProgressBar(pb, ID) } } } # ##close con close(con) # # ##close if(txtProgressBar==TRUE){close(pb)} ##output cat(paste("\t >> ",ID-1,"records have been written successfully!\n\n",paste="")) } Luminescence/R/fit_OSLLifeTimes.R0000644000176200001440000006057113571743237016336 0ustar liggesusers#' Fitting and Deconvolution of OSL Lifetime Components #' #' @details #' The function intends to provide an easy access to pulsed optically stimulated luminescence (POSL) data, #' in order determine signal lifetimes. The fitting is currently optimised to work with the off-time flank of POSL measurements #' only. For the signal deconvolution, a differential evolution optimisation is combined with nonlinear least-square fitting #' following the approach by Bluszcz & Adamiec (2006). #' #' **Component deconvolution algorithm** #' #' The component deconvolution consists of two steps: #' #' (1) Adaption phase #' #' In the adaption phase the function tries to figure out the optimal and statistically justified #' number of signal components following roughly the approach suggestd by Bluszcz & Adamiec (2006). In #' contrast to their work, for the optimisation by differential evolution here the package 'DEoptim' is used. #' #' The function to be optimized has the form: #' #' \deqn{\chi^2 = \sum(w * (n_i/c - \sum(A_i * exp(-x/(tau_i + t_p))))^2)} #' #' with \eqn{w = 1} for unweighted regression analysis (`method_control = list(weights = FALSE)`) or #' \eqn{w = c^2/n_i} for weighted regression analysis. The default values is `TRUE`. #' #' \deqn{F = (\Delta\chi^2 / 2) / (\chi^2/(N - 2*m - 2))} #' #' (2) Final fitting #' #' **`method_control`** #' #' \tabular{lll}{ #' **Parameter** \tab **Type** \tab **Description**\cr #' `p` \tab [numeric] \tab controls the probability for the F statistic reference values. For a significance level of 5 % a value of 0.95 (the default) should be added, for 1 %, a value of 0.99 is sufficient: 1 > p > 0 (cf. [stats::FDist])\cr #' `seed` \tab [numeric] \tab set the seed for the random number generator, provide a value here to get reproducible results \cr #' `DEoptim.trace` \tab [logical] \tab enables/disables the tracing of the differential evolution (cf. [DEoptim::DEoptim.control]) \cr #' `DEoptim.itermax` \tab [logical] \tab controls the number of the allowed generations (cf. [DEoptim::DEoptim.control]) \cr #' `weights` \tab [logical] \tab enables/disables the weighting for the start parameter estimation and fitting (see equations above). #' The default values is `TRUE` \cr #' `nlsLM.trace` \tab [logical] \tab enables/disables trace mode for the nls fitting ([minpack.lm::nlsLM]), can be used to identify convergence problems, default is `FALSE` \cr #' `nlsLM.upper` \tab [logical] \tab enables/disables upper parameter boundary, default is `TRUE` \cr #' `nlsLM.lower` \tab [logical] \tab enables/disables lower parameter boundary, default is `TRUE` #' } #' #' @param object [RLum.Data.Curve-class], [RLum.Analysis-class], [data.frame] or [matrix] **(required)**: #' Input object containing the data to be analysed. All objects can be provided also as list for an automated #' processing. Please note: `NA` values are automatically removed and the dataset should comprise at least 5 data points. #' #' @param tp [numeric] (*with default*): option to account for the stimulation pulse width. For off-time measurements #' the default value is 0. `tp` has the same unit as the measurement data, e.g., µs. Please set this parameter #' carefully, if it all, otherwise you may heavily bias your fit results. #' #' @param signal_range [numeric] (*optional*): allows to set a channel range, by default all channels are used, e.g. #' `signal_range = c(2,100)` considers only channels 2 to 100 and `signal_range = c(2)` considers only channels #' from channel 2 onwards. #' #' @param n.components [numeric] (*optional*): Fix the number of components. If set the algorithm will try #' to fit the number of predefined components. If nothing is set, the algorithm will try to find the best number #' of components. #' #' @param method_control [list] (*optonal*): Named to allow a more fine control of the fitting process. See details #' for allowed options. #' #' @param plot [logical] (*with default*): Enable/disable plot output #' #' @param plot_simple [logical] (*with default*): Enable/disable reduced plot output. If `TRUE`, no #' residual plot is shown, however, plot output can be combined using the standard R layout obtions, #' such as `par(mfrow = c(2,2))`. #' #' @param verbose [logical] (*with default*): Enable/disable terminal feedback #' #' @param ... parameters passed to [plot.default] to control the plot output, supported are: #' `main`, `xlab`, `ylab`, `log`, `xlim`, `ylim`, `col`, `lty`, `legend.pos`, `legend.text`. If the input #' object is of type [RLum.Analysis-class] this arguments can be provided as a [list]. #' #' @return #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `$data` \tab `matrix` \tab the final fit matrix \cr #' `$start_matrix` \tab `matrix` \tab the start matrix used for the fitting \cr #' `$total_counts` \tab `integer` \tab Photon count sum \cr #' `$fit` \tab `nls` \tab the fit object returned by [minpack.lm::nls.lm] \cr #' } #' #' #'**slot:** **`@info`** #' #' The original function call #' #' ------------------------\cr #' `[ TERMINAL OUTPUT ]`\cr #' ------------------------\cr #' #' Terminal output is only shown of the argument `verbose = TRUE`. #' #' *(1) Start parameter and component adapation*\cr #' Trave of the parameter adaption process #' #' *(2) Fitting results (sorted by ascending tau)*\cr #' The fitting results sorted by ascending tau value. Please note #' that if you access the `nls` fitting object, the values are not sorted. #' #' *(3) Further information*\cr #' - The photon count sum #' - Durbin-Watson residual statistic to asses whether the residuals are correlated, ideally #' the residuals should be not correlated at all. Rough measures are: \cr #' D = 0: the residuls are systematically correlated \cr #' D = 2: the residuals are randomly distributed \cr #' D = 4: the residuals are systematically anticorrlated\cr #' #' You should be suspicious if D differs largely from 2. #' #' #' ------------------------\cr #' `[ PLOT OUTPUT ]`\cr #' ------------------------\cr #' #' A plot showing the original data and the fit so far possible. The lower plot shows the #' residuals of the fit. #' #' @section Function version: 0.1.5 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS-Université Bordeaux Montaigne (France), #' Christoph Schmidt, University of Bayreuth (Germany) #' #' @seealso [minpack.lm::nls.lm], [DEoptim::DEoptim] #' #' @references #' Bluszcz, A., Adamiec, G., 2006. Application of differential evolution to fitting OSL decay curves. #' Radiation Measurements 41, 886-891. doi:10.1016/j.radmeas.2006.05.016\cr #' #' Durbin, J., Watson, G.S., 1950. Testing for Serial Correlation in Least Squares Regression: I. #' Biometrika 37, 409-21. doi:10.2307/2332391 #' #' **Further reading** #' #' Hughes, I., Hase, T., 2010. Measurements and Their Uncertainties. Oxford University Press. #' #' Storn, R., Price, K., 1997. Differential Evolution – #' A Simple and Efficient Heuristic for Global Optimization over Continuous Spaces. #' Journal of Global Optimization 11, 341–359. #' #'@examples #' #'##load example data #'data(ExampleData.TR_OSL, envir = environment()) #' #'##fit lifetimes (short run) #'fit_OSLLifeTimes( #' object = ExampleData.TR_OSL, #' n.components = 1) #' #'##long example #'\dontrun{ #'fit_OSLLifeTimes( #' object = ExampleData.TR_OSL) #' } #' #'@md #'@export fit_OSLLifeTimes <- function( object, tp = 0, signal_range = NULL, n.components = NULL, method_control = list(), plot = TRUE, plot_simple = FALSE, verbose = TRUE, ... ){ # Self-call ----------------------------------------------------------------------------------- if(class(object) == "list" || class(object) == "RLum.Analysis"){ ##allow RLum.Analysis objects if(all(vapply(object, function(x){ class(x) == "RLum.Analysis"}, logical(1)))){ object <- lapply(object, function(x){x@records}) object <- .unlist_RLum(object) } ##expand parameters ##n.components if(!is.null(n.components)) n.components <- as.list(rep(n.components, length(object))) ##tp tp <- as.list(rep(tp, length(object))) ## names of extra arguments arg_names <- names(list(...)) ##pretreat some of the ... settings to avoid ## expand all arguments if(!is.null(arg_names)){ arg_list <- lapply(arg_names , function(x){ unlist(rep(list(...)[[x]], length.out = length(object))) }) ## make sure we organise this list (not nice but it works) arg_list <- lapply(1:length(object), function(x){ args <- lapply(1:length(arg_names), function(y){ arg_list[[y]][[x]] }) names(args) <- arg_names args }) } else{ arg_list <- NULL } ##run function temp_results <- lapply(1:length(object), function(x){ temp <- try(do.call(what = fit_OSLLifeTimes, c(list( object = object[[x]], tp = tp[[x]], signal_range = signal_range, n.components = n.components[[x]], method_control = method_control, plot = plot, plot_simple = plot_simple, verbose = verbose ), arg_list[[x]]) ), silent = FALSE) if(class(temp) == "try-error"){ return(NULL) }else{ return(temp) } }) ##combine results and return results <- merge_RLum(temp_results) if(!is.null(results)) results@originator <- "fit_OSLLifeTimes" ##return return(results) } # Input integrity tests ------------------------------------------------------------------ if(class(object) == "RLum.Data.Curve"){ if(!grepl(pattern = "POSL", x = object@recordType, fixed = TRUE)) stop(paste0("[fit_OSLLifeTime()] recordType ",object@recordType, " not supported for input object!"), call. = FALSE) df <- as.data.frame(object@data) }else if(class(object) == "data.frame"){ df <- object[,1:2] } else if(class(object) == "matrix"){ df <- as.data.frame(object[,1:2]) }else{ try(stop(paste0("[fit_OSLLifeTime()] Class '",class(object), "' not supported as input, NULL returned!"), call. = FALSE)) return(NULL) } ##remove NA values, whatever it is worth for if(any(is.na(df))){ df <- na.exclude(df) warning("[fit_OSLLifeTimes()] NA values detected and removed from dataset.",call. = TRUE) } ##rename columns for data.frame colnames(df) <- c("x","y") ##make sure that we have a minimum of data points available if(nrow(df) < 5){ try(stop("[fit_OSLLifeTimes()] Your input dataset has less than 5 data points. NULL returned!", call. = FALSE)) return(NULL) } #check for 0 data in dataset ... we opt for hard stop if(any(df[[2]] == 0)){ warning("[fit_OSLLifeTimes()] Your dataset contains 0. A value of 0.1 has been added to your count values!",call. = TRUE) df[[2]] <- df[[2]] + 0.1 } ##save original data for later df_raw <- df ##signal_range if(!is.null(signal_range)){ if(class(signal_range) != "numeric") stop("[fit_OSLLifeTimes()] Argument 'signal_range' must by of type numeric!", call. = FALSE) ##check lengths if(length(signal_range) == 1) signal_range <- c(signal_range, nrow(df)) if(length(signal_range) > 2) warning("[fit_OSLLifeTimes()] 'signal_range' has more than two elements, take only the first two!", call. = FALSE) if(signal_range[2] > nrow(df)){ warning("[fit_OSLLifeTimes()] 'signal_range' > number of channels, reset to maximum!", call. = FALSE) signal_range[2] <- nrow(df) } if(signal_range[1] > signal_range[2]){ warning("[fit_OSLLifeTimes()] 'signal_range' first element > last element, reset to default", call. = FALSE) signal_range <- c(1, nrow(df)) } ##set range df <- df[signal_range[1]:signal_range[2],] } # Fitting ------------------------------------------------------------------------------------- ##(0) CONTROL +++++++++++++++++++++++++++++++++++++++++++++++++++++ method_control_setting <- list( p = 0.95, seed = NULL, DEoptim.trace = FALSE, DEoptim.itermax = 1000, weights = TRUE, nlsLM.trace = FALSE, nlsLM.upper = TRUE, nlsLM.lower = TRUE ) ##udpate list if the user did something method_control_setting <- modifyList(x = method_control_setting, val = method_control) ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##(A) SETTINGS ## ## ##(1) >> set fitting function for minpack.lm x <- 0 #cheat R check routine fit_forumla <- function(n.components, tp){ A <- paste0("A.",1:n.components) tau <- paste0("tau.",1:n.components) as.formula(paste0("y ~ ", paste(A," * exp(- x/(",tau," + ", tp, "))", collapse = " + "))) } ## ## ##(2) create formula for differential evolution run fn_constructor <- function(m){ ##get length of x-vector x_len <- 1:(2 * m) ##generate term term <- vapply(seq(1,length(x_len), by = 2), function(i){ paste0("(x[", i, "] * exp(-t/(x[", i + 1, "] + tp)))") },character(1)) ##parse term <- paste(term, collapse = " + ") ##set weight (should be given as character) if(method_control_setting$weights){ w <- "c^2/n" }else{ w <- "1" } ##combine term <- paste0("sum(",w," * ((n/c) - (",term,"))^2)") ##parse ... if we do it here, we boost the speed of the evaluation parse(text = eval(term)) } ## ## ##(3) initialse objects chi_squared <- c(NA, NA) F <- c(Inf, Inf) start <- NULL if(is.null(n.components)){ m <- 1 }else{ m <- n.components } ## ## ##(4) set seed if(!is.null(method_control_setting$seed)) set.seed(method_control_setting$seed) ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##(B) RUN DIFFERENTIAL EVOLUTION TO DETERMINE NUMBER OF COMPONENTS ##prevent collateral demage, so we want a data.frame that has at least 10 rows if(verbose){ cat("\n[fit_OSLLifeTime()]\n") cat("\n(1) Start parameter and component adapation") cat("\n---------------------(start adaption)------------------------------------") } while(!is.na(suppressWarnings(qf(method_control_setting$p, df1 = 2, df2 = length(df[[2]]) - 2 * m - 2))) && ( F[2] > qf(method_control_setting$p, df1 = 2, df2 = length(df[[2]]) - 2 * m - 2) & F[1] >= F[2])){ ##set F F[1] <- F[2] ##construct formula outside of the loop; needs to be done here, otherwise the performance ##goes down formula_string <- fn_constructor(m) ##set fn set_tp <- tp set_c <- diff(c(0,df[[1]])) set_t <- df[[1]] set_n <- df[[2]] ##set function ##Personal reminder: ##Why this function is not written in C++ ... because it adds basically nothing ##in terms of speed ~ 10 µs faster, but needed to be compiled and thus cannot changed ##directly in the code fn <- function(x, tp = set_tp, n = set_n, c = set_c, t = set_t, term = formula_string){ eval(formula_string) } ##set start parameters if(!is.null(start)) start_parameters <- start$optim$bestmem ##run differential evolution start <- DEoptim::DEoptim( fn = fn, lower = rep(0, 2 * m), upper = rep(c(10 * sum(df[[2]]), 10000), m), control = DEoptim::DEoptim.control( trace = method_control_setting$DEoptim.trace, itermax = method_control_setting$DEoptim.itermax, c = .5, strategy = 2, parallelType = 0 #Does it make sense to use parallel processing here: no, it does not scale well ) ) ##set chi^2 value and calculate F for the 2nd run chi_squared[2] <- start$optim$bestval if(!is.na(chi_squared[1])){ F[2] <- (abs(diff(chi_squared))/2) / (chi_squared[2]/(nrow(df) - 2 * m - 2)) } ##terminal feedback if(verbose){ cat("\n>> + adaption for",m, "comp.", ": ", round(F[2],2), "(calc.) <> ", round(qf(method_control_setting$p, df1 = 2, df2 = length(df[[2]]) - 2 * m - 2), 2), "(ref.)") if(F[2] > qf(method_control_setting$p, df1 = 2, df2 = length(df[[2]]) - 2 * m - 2) & F[1] >= F[2]){ cat(" >> [add comp.]") }else{ cat(" >> [stop]\n") cat("---------------------(end adaption)--------------------------------------\n\n") } } ##break here if n.components was set others than NULL, in such case we force the number if(!is.null(n.components)){ if(verbose){ cat(" >> [forced stop]\n") cat("---------------------(end adaption)--------------------------------------\n\n") } start_parameters <- start$optim$bestmem break() } ##update objects chi_squared[1] <- chi_squared[2] m <- m + 1 } ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##(C) RUN LM-FITTING ## ##reduce m by 2, why 2? ## - the last component violated the F statistic, so was obviously not the best call ## - the loop adds everytime another component if(is.null(n.components)){ ##this covers the extrem case that the process stops after the first run if(m == 2){ m <- 1 start_parameters <- start$optim$bestmem }else{ m <- m - 2 } } A <- start_parameters[seq(1,length(start_parameters), by = 2)] tau <- start_parameters[seq(2,length(start_parameters), by = 2)] names(A) <- paste0("A.", 1:(m)) names(tau) <- paste0("tau.", 1:(m)) ##create start_matrix start_matrix <- matrix(data = c(A,tau), ncol = 2) colnames(start_matrix) <- c("A", "tau") rownames(start_matrix) <- paste0("Comp.", 1:(m)) ##add terminal feedback if(verbose){ cat("\n>> Applied component matrix\n") print(start_matrix) cat("\n\n") } ##run fitting using the Levenberg-Marquardt algorithm fit <- try(minpack.lm::nlsLM( formula = fit_forumla(n.components = m, tp = tp), data = df, start = c(A, tau), upper = if(method_control_setting$nlsLM.upper){ c(rep(sum(df[[2]]), length(A)), rep(Inf,length(tau))) }else{ NULL }, lower = if(method_control_setting$nlsLM.lower){ c(rep(0,2*length(A))) }else{ NULL }, na.action = "na.exclude", weights = if(method_control_setting$weights){ set_c^2/df[,2] }else{ rep(1,nrow(df)) }, trace = method_control_setting$nlsLM.trace, control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = FALSE) # Post-processing ----------------------------------------------------------------------------- if (class(fit) != 'try-error') { ##extract coefficients A <- coef(fit)[1:(m)] tau <- coef(fit)[(m + 1):(2 * m)] ##order coef o <- order(tau) tau <- tau[o] A <- A[o] ##summary matrix summary_matrix <- summary(fit)$coefficients ##return warning if one parameter is negative, this can happen if the user let the boundaries ##free float if(any(summary_matrix[,1]<0)) warning("[fit_OSLLifeTimes()] At least one parameter is negative. Please carefully check your results!", call. = FALSE) ##order matrix by tau, but keep the rownames temp_rownames <- rownames(summary_matrix) summary_matrix <- summary_matrix[c(o,o + length(A)),] rownames(summary_matrix) <- temp_rownames rm(temp_rownames) ##calculate Durbin-Watson statistic R <- residuals(fit) D <- round(sum((R - c(0,R[-length(R)]))^2) / sum(R^2),2) rm(R) }else{ m <- 1 A <- NA tau <- NA summary_matrix <- NA D <- NA } # Terminal output ----------------------------------------------------------------------------- if(verbose){ if (class(fit) != 'try-error') { cat("(2) Fitting results (sorted by ascending tau)\n") cat("-------------------------------------------------------------------------\n") print(summary_matrix) cat("-------------------------------------------------------------------------\n") }else{ try(stop("[fit_OSLLifeTimes()] The fitting was not sucessful, consider to try again!", call. = FALSE)) } cat("\n(3) Further information\n") cat("-------------------------------------------------------------------------\n") cat("Photon count sum: ", sum(df[[2]]),"\n") cat("Durbin-Watson residual statistic: ", D,"") if(!is.na(D)){ string <- c("[",rep(" ",(D * 10)/4),"<>",rep(" ",10 - (D * 10)/4),"]\n") }else{ string <- NA } cat(paste(string, collapse = "")) cat("\n") } # Plotting ------------------------------------------------------------------------------------ if(plot) { ##set plot default settings plot_settings <- list( main = "OSL Lifetimes", xlab = "Time [a.u.]", ylab = "POSL [a.u.]", log = "", xlim = c(0,max(df_raw[[1]])), ylim = c(0,max(df_raw[[2]])), col = get("col", pos = .LuminescenceEnv)[-1], lty = rep(1, (m + 1)), legend.pos = "topright", legend.text = c("sum", paste0("comp. ", 1:m)) ) ##modify settings on request plot_settings <- modifyList(x = plot_settings, val = list(...)) ##catch log scale if(grepl(pattern = "x", plot_settings$log, fixed = TRUE)){ if(plot_settings$xlim[1] == 0){ plot_settings$xlim[1] <- if(min(df_raw[[1]]) == 0) 1e-04 else min(df_raw[[1]]) warning(paste0("[fit_OSLLifeTime()] log-scale requires x-values > 0, set min xlim to ", round(plot_settings$xlim[1],4), "!"), immediate. = TRUE, call. = FALSE) } } if(grepl(pattern = "y", plot_settings$log, fixed = TRUE)){ if(plot_settings$ylim[1] == 0){ plot_settings$ylim[1] <- if(min(df_raw[[2]]) == 0) 1e-04 else min(df_raw[[2]]) warning(paste0("[fit_OSLLifeTime()] log-scale requires y-values > 0, set min ylim to ", round(plot_settings$ylim[1],4), "!"), immediate. = TRUE, call. = FALSE) } } ##plot if the fitting was a sucess if (class(fit) != 'try-error') { if(!plot_simple){ ##make sure that the screen closes if something is wrong on.exit(close.screen(all.screens = TRUE)) split.screen(rbind( c(0.1,1,0.32, 0.98), c(0.1,1,0.1, 0.32))) screen(1) par(mar = c(0, 4, 3, 4)) } plot(NA,NA, xaxt = if(plot_simple) "s" else "n", xlab = if(plot_simple) plot_settings$xlab else "", ylab = plot_settings$ylab, ylim = plot_settings$ylim, xlim = plot_settings$xlim, log = plot_settings$log, main = plot_settings$main ) ##add used points points(df, col = rgb(0,0,0,0.8)) ##add not used points df_raw (this solution avoids overplotting) if(nrow(df) != nrow(df_raw)) points(df_raw[!df_raw[[1]]%in%df[[1]],], col = "grey") ##+ add some curve lines( df$x, fitted(fit), col = plot_settings$col[1], lwd = 1.3, lty = plot_settings$lty[1] ) ##+ add components for(i in 1:m) { if (length(plot_settings$lty) < 2) plot_settings$lty <- rep(plot_settings$lty, 1 + m) if (length(plot_settings$col) < 2) plot_settings$col <- rep(plot_settings$col, 1 + m) curve( A[i] * exp(-x / (tau[i] + tp)), add = TRUE, col = plot_settings$col[i + 1], lty = plot_settings$lty[i + 1] ) } ##+ add legend legend( plot_settings$legend.pos, legend = plot_settings$legend.text, lty = plot_settings$lty, col = plot_settings$col[c(1, 2:(m + 2))], bty = "n" ) if(!plot_simple){ screen(2) par(mar = c(4, 4, 0, 4)) plot( x = df[[1]], y = residuals(fit), xlab = plot_settings$xlab, type = "b", pch = 20, xlim = plot_settings$xlim, log = if(plot_settings$log == "x"){"x"}else{""}, ylab = "\u03B5" ) } }else{ plot( df, xlab = plot_settings$xlab, ylab = plot_settings$ylab, col = rgb(0, 0, 0, 0.8), main = plot_settings$main, xlim = plot_settings$xlim, ylim = plot_settings$ylim, log = plot_settings$log ) } }#if plot # Return -------------------------------------------------------------------------------------- ##create return object return( set_RLum( class = "RLum.Results", data = list( data = summary_matrix, start_matrix = start_matrix, total_counts = sum(df[[2]]), fit = fit ), info = list( call = sys.call() ) ) ) } Luminescence/R/zzz.R0000644000176200001440000002176713231137116014060 0ustar liggesusers##////////////////////////////////////////////////////////////////////////////// ##//zzz.R ##////////////////////////////////////////////////////////////////////////////// ## ##============================================================================== ##author: R Luminescence Package Team ##organisation: ##version.: 0.2.1 ##date: 2013-11-10 ##============================================================================== # Set namespace .LuminescenceEnv ------------------------------------------ .LuminescenceEnv <- new.env(parent = emptyenv()) # Assign variables to Namespace ------------------------------------------- ##variable col to define colours in the functions for output assign("col", unlist(colors())[c(261,552,51,62,76,151,451,474,654,657,100,513,23,612,129,27,551,393,80,652,555)], pos = ".LuminescenceEnv", envir = .LuminescenceEnv) ##============================================================================== ##on Attach .onAttach <- function(libname,pkgname){ ##set startup message try(packageStartupMessage(paste("Welcome to the R package Luminescence version ", packageDescription(pkg="Luminescence")$Version, " [Built: ", trimws(strsplit(packageDescription(pkg="Luminescence")$Built, ";")[[1]][3]), "]", sep=""), "\n", get_Quote()), silent=TRUE) } ##============================================================================== # DO NOT TOUCH! ----------------------------------------------------------- #' sTeve - sophisticated tool for efficient data validation and evaluation #' #' This function provides a sophisticated routine for comprehensive #' luminescence dating data analysis. #' #' This amazing sophisticated function validates your data seriously. #' #' @param n_frames [integer] (*with default*): #' n frames #' #' @param t_animation [integer] (*with default*): #' t animation #' #' @param n.tree [integer] (*with default*): #' how many trees do you want to cut? #' #' @param type [integer] (*optional*): #' Make a decision: 1, 2 or 3 #' #' @return Validates your data. #' #' @note This function should not be taken too seriously. #' #' @author R Luminescence Team, 2012-2046 #' #' @seealso [plot_KDE] #' #' @keywords manip #' @examples #' #' ##no example available #' #' @md #' @export sTeve<- function(n_frames = 10, t_animation = 2, n.tree = 7, type) { ## allow new overlay plot par(new = TRUE) ## infer month of year month <- as.numeric(strsplit(x = as.character(Sys.Date()), split = "-")[[1]][2]) ## select showtime item based on month or user-defined type if(missing(type) == TRUE) { if(month >= 1 & month <= 3) { type <- 1 } else if(month >3 & month <= 11) { type <- 2 } else if(month > 11 & month <= 12) { type <- 3 } } if(type == 1) { ## SHOWTIME OPTION 1 Sys.sleep(5) shape::emptyplot() shape::filledrectangle(wx = 0.9, wy = 0.4, mid = c(0.5, 0.5), lcol ="red", lwd=1, col=0, angle = 45) text(x=0.5, y=0.5, labels="NOT FUNNY", cex=2, col="red", font=2, srt=45) } else if(type == 2) { ## SHOWTIME OPTION 2 plot(NA, xlim = c(0, 10), ylim = c(0, 10), main = "", xlab = "", ylab = "", axes = FALSE, frame.plot = FALSE) n_frames <- n_frames t_animation <- t_animation dt <- t_animation / n_frames x1 <- seq(0, 10, length.out = n_frames) y1 <- rep(1.5, n_frames) r1 <- 0.5 x2 <- seq(0, 16, length.out = n_frames) y2 <- rep(8.5, n_frames) r2 <- 0.5 x4 <- seq(11, 0, length.out = n_frames) y4 <- rep(5, n_frames) r4 <- 0.5 # set angles for each step of mouth opening angles_mouth <- rep(c(0.01, 0.25, 0.5, 0.25), length.out = n_frames) for(i in 1:n_frames){ # define pacman circles shape::filledcircle(r1 = r1, r2 = 0.00001, mid = c(x1[i], y1[i]), from = angles_mouth[i], to = 2 * pi - angles_mouth[i], col = "yellow") shape::filledcircle(r1 = r2, r2 = 0.00001, mid = c(x2[i], y2[i]), from = angles_mouth[i], to = 2 * pi - angles_mouth[i], col = "yellow") shape::filledcircle(r1 = r4, r2 = 0.00001, mid = c(x4[i], y4[i]), from = angles_mouth[i] + 3, to = 2 * pi - angles_mouth[i] + 3, col = "yellow") # dinfine eyes for pacman points(x1[i] + 0.2, y1[i] + 0.75, pch = 21, bg = 1, cex = 0.7) points(x2[i] + 0.2, y2[i] + 0.75, pch = 21, bg = 1, cex = 0.7) points(x4[i] - 0.05, y4[i] + 0.75, pch = 21, bg = 1, cex = 0.7) Sys.sleep(dt) shape::plotcircle(r = 1.1 * r1, mid = c(x1[i], y1[i]), col = "white", lcol = "white") shape::plotcircle(r = 1.1 * r2, mid = c(x2[i], y2[i]), col = "white", lcol = "white") shape::plotcircle(r = 1.1 * r4, mid = c(x4[i], y4[i]), col = "white", lcol = "white") } } else if(type == 3) { ## calculate display ratio f <- par()$pin[2] / par()$pin[1] ## create new overlay plot plot(NA, xlim = c(0, 100), ylim = c(0, 100), axes = F, frame.plot = FALSE, xlab = "", ylab = "") ## create semi-transparent layer polygon(x = c(-100, -100, 200, 200), y = c(-100, 200, 200, -100), col = rgb(1,1,1, 0.8), lty = 0) ## draw christmas trees n = n.tree tree.x <- runif(n, 10, 90) tree.y <- runif(n, 10, 90) tree.size <- runif(n, 0.3, 1.5) for(i in 1:n) { ## stem polygon(x = c(tree.x[i] - 1.5 * tree.size[i], tree.x[i] - 1.5 * tree.size[i], tree.x[i] + 1.5 * tree.size[i], tree.x[i] + 1.5 * tree.size[i]) , y = c(tree.y[i] - 12 * tree.size[i], tree.y[i] - 1 * tree.size[i], tree.y[i] - 1 * tree.size[i], tree.y[i] - 12* tree.size[i]), col = "rosybrown4", lty = 0) ## branch one shape::filledellipse(rx1 = 10 * tree.size[i], rx2 = 0.00001, mid = c(tree.x[i], tree.y[i] + 3 * tree.size[i]), col = "darkgreen", from = 4.0143, to = 5.41052) ## branch two shape::filledellipse(rx1 = 8 * tree.size[i], rx2 = 0.00001, mid = c(tree.x[i], tree.y[i] + 7 * tree.size[i]), col = "darkgreen", from = 4.0143, to = 5.41052) ## branch three shape::filledellipse(rx1 = 6 * tree.size[i], rx2 = 0.00001, mid = c(tree.x[i], tree.y[i] + 9 * tree.size[i]), col = "darkgreen", from = 4.0143, to = 5.41052) ## branch four shape::filledellipse(rx1 = 4 * tree.size[i], rx2 = 0.00001, mid = c(tree.x[i], tree.y[i] + 11 * tree.size[i]), col = "darkgreen", from = 4.0143, to = 5.41052) ## sphere one shape::filledellipse(rx1 = 1 * f * tree.size[i], ry1 = 1 * tree.size[i], mid = c(tree.x[i] + 2 * tree.size[i], tree.y[i] + 5 * tree.size[i]), col = shape::shadepalette(n = 20, endcol = "darkred")) ## sphere two shape::filledellipse(rx1 = 0.8 * f * tree.size[i], ry1 = 0.8 * tree.size[i], mid = c(tree.x[i] - 1 * tree.size[i], tree.y[i] + -3 * tree.size[i]), col = shape::shadepalette(n = 20, endcol = "orange")) ## sphere three shape::filledellipse(rx1 = 1.2 * f * tree.size[i], ry1 = 1.2 * tree.size[i], mid = c(tree.x[i] - 1.7 * tree.size[i], tree.y[i] + 2 * tree.size[i]), col = shape::shadepalette(n = 20, endcol = "yellow3")) ## sphere four shape::filledellipse(rx1 = 1 * f * tree.size[i], ry1 = 1 * tree.size[i], mid = c(tree.x[i] + 3 * tree.size[i], tree.y[i] - 4 * tree.size[i]), col = shape::shadepalette(n = 20, endcol = "darkblue")) Sys.sleep(0.1) } ## add snow points(runif(300, 0, 100), runif(300, 0, 100), pch = 8, col = "lightgrey") } }#end function Luminescence/R/calc_WodaFuchs2008.R0000644000176200001440000001507213237047572016406 0ustar liggesusers#' Obtain the equivalent dose using the approach by Woda and Fuchs 2008 #' #' The function generates a histogram-like reorganisation of the data, to #' assess counts per bin. The log-transformed counts per bin are used to #' calculate the second derivative of the data (i.e., the curvature of the #' curve) and to find the central value of the bin hosting the distribution #' maximum. A normal distribution model is fitted to the counts per bin #' data to estimate the dose distribution parameters. The uncertainty of the #' model is estimated based on all input equivalent doses smaller that of the #' modelled central value. #' #' @param data [data.frame] or [RLum.Results-class] object (**required**): #' for [data.frame]: two columns: De (`values[,1]`) and De error (`values[,2]`). #' For plotting multiple data sets, these must be provided as `list` #' (e.g. `list(dataset1, dataset2)`). #' #' @param breaks [numeric]: #' Either number or locations of breaks. See `[hist]` for details. #' If missing, the number of breaks will be estimated based on the bin width #' (as function of median error). #' #' @param plot [logical] (*with default*): #' enable plot output. #' #' @param ... Further plot arguments passed to the function. #' #' @section Function version: 0.2.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France),\cr #' Michael Dietze, GFZ Potsdam (Germany) #' #' @seealso [calc_FuchsLang2001], [calc_CentralDose] #' #' @references #' Woda, C., Fuchs, M., 2008. On the applicability of the leading edge method to #' obtain equivalent doses in OSL dating and dosimetry. Radiation Measurements 43, 26-37. #' #' @examples #' #' ## read example data set #' data(ExampleData.DeValues, envir = environment()) #' #' results <- calc_WodaFuchs2008( #' data = ExampleData.DeValues$CA1, #' xlab = expression(paste(D[e], " [Gy]")) #' ) #' #' @md #' @export calc_WodaFuchs2008 <- function( data, breaks = NULL, plot = TRUE, ... ) { ##TODO # - complete manual # - add statistics to the plot # - check whether this makes sense at all ... ## check data and parameter consistency ------------------------------------- if(is(data, "RLum.Results") == FALSE & is(data, "data.frame") == FALSE & is.numeric(data) == FALSE) { warning(paste("[calc_WodaFuchs()] Input data format is neither", "'data.frame', 'RLum.Results' nor 'numeric'.", "No output generated!")) return(NULL) } else { if(is(data, "RLum.Results") == TRUE) { data <- get_RLum(data, "data") } if(length(data) < 2) { data <- cbind(data, rep(x = NA, times = length(data))) } } ## read additional arguments if("trace" %in% names(list(...))) { trace <- list(...)$trace } else { trace <- FALSE } ## calculations ------------------------------------------------------------- ## estimate bin width based on Woda and Fuchs (2008) if(sum(is.na(data[,2]) == nrow(data))) { message("[calc_WodFuchs2008()] No errors provided. Bin width set by 10 percent of input data!") bin_width <- median(data[,1] / 10, na.rm = TRUE) } else { bin_width <- median(data[,2], na.rm = TRUE) } ## optionally estimate class breaks based on bin width if(is.null(breaks)) { n_breaks <- (max(data[,1], na.rm = TRUE) - min(data[,1], na.rm = TRUE) / bin_width) } else { n_breaks <- breaks } ## calculate histogram H <- hist(x = data[,1], breaks = n_breaks, plot = FALSE) ## check/do log-normal model fit if needed if(n_breaks <= 3) { warning("[calc_WodaFuchs()] Less than four bins, now set to four!") ## calculate histogram H <- hist(x = data[,1], breaks = 4, plot = FALSE) } ## extract values from histogram object H_c <- H$counts H_m <- H$mids ## log counts counts_log <- log(H_c) ## estimate curvature curvature <- (counts_log[1] - counts_log[2]) / (counts_log[1] - counts_log[3]) ## do some other black magic class_center <- H$mids[H_c == max(H_c)] ## optionally print warning if(length(class_center) != 1) { warning("[calc_WodaFuchs()] More than one maximum. Fit may be invalid!", call. = FALSE) class_center <- class_center[1] } ## fit normal distribution to data fit <- nls(H_c ~ (A / sqrt(2 * pi * sigma^2)) * exp(-(H_m - class_center)^2 / (2 * sigma^2)), start = c(A = mean(H_m), sigma = bin_width), control = c(maxiter = 5000), algorithm = "port", trace = trace) ## extract fitted parameters A <- coef(fit)["A"] sigma <- coef(fit)["sigma"] ## estimate dose D_estimate <- as.numeric(x = class_center - sigma) ## count number of values smaller than center class count_ID <- length(which(H_m <= class_center)) ## extract H_m values smaller than center class H_m_smaller <- H_m[1:count_ID] ## calculate uncertainty according to Woda and Fuchs (2008) s <- round(sqrt(sum((H_m_smaller - D_estimate)^2) / (count_ID - 1)), digits = 2) ## plot output -------------------------------------------------------------- if(plot) { ##define plot settings plot_settings <- list( xlab = expression(paste(D[e], " [s]")), ylab = "Frequency", xlim = range(data[,1], na.rm = TRUE) + c(-10, 20), ylim = NULL, main = expression(paste(D[e]," applying Woda and Fuchs (2008)")), sub = NULL ) plot_settings <- modifyList(x = plot_settings, val = list(...), keep.null = TRUE) plot( x = H, xlab = plot_settings$xlab, ylab = plot_settings$ylab, xlim = plot_settings$xlim, ylim = plot_settings$ylim, main = plot_settings$main, sub = plot_settings$sub ) ## add curve with normal distribution x <- 0 rm(x) curve((A / sqrt(2 * pi * sigma^2)) * exp(-(x- class_center)^2 / (2 * sigma^2)), add = TRUE, to = class_center, col = "red" ) } ## return output ------------------------------------------------------------ return(set_RLum( class = "RLum.Results", data = list( D_estimate = data.frame( DP = D_estimate, DP.ERROR = s, CLASS_CENTER = class_center, BIN_WIDTH = bin_width, SIGMA = sigma, A = A, row.names = NULL ), breaks = H$breaks ), info = list(call = sys.call()) )) } Luminescence/R/RLum.Data.Curve-class.R0000644000176200001440000003471413517522344017144 0ustar liggesusers#' @include get_RLum.R set_RLum.R names_RLum.R length_RLum.R bin_RLum.Data.R smooth_RLum.R NULL #' Class `"RLum.Data.Curve"` #' #' Class for representing luminescence curve data. #' #' @name RLum.Data.Curve-class #' #' @docType class #' #' @slot recordType #' Object of class "character" containing the type of the curve (e.g. "TL" or "OSL") #' #' @slot curveType #' Object of class "character" containing curve type, allowed values are measured or predefined #' #' @slot data #' Object of class [matrix] containing curve x and y data. #' 'data' can also be of type `RLum.Data.Curve` to change object values without #' deconstructing the object. For example: #' ``` #' set_RLum(class = 'RLum.Data.Curve', #' data = Your.RLum.Data.Curve, #' recordType = 'never seen before') #' ``` #' would just change the recordType. Missing arguements the value is taken #' from the input object in 'data' (which is already an RLum.Data.Curve object #' in this example) #' #' #' @note #' The class should only contain data for a single curve. For additional #' elements the slot `info` can be used (e.g. providing additional heating #' ramp curve). Objects from the class `RLum.Data.Curve` are produced by other #' functions (partyl within [RLum.Analysis-class] objects), #' namely: [Risoe.BINfileData2RLum.Analysis], [read_XSYG2R] #' #' @section Create objects from this Class: #' Objects can be created by calls of the form #' `set_RLum(class = "RLum.Data.Curve", ...)`. #' #' @section Class version: 0.5.1 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @seealso [RLum-class], [RLum.Data-class], [plot_RLum], [merge_RLum] #' #' @keywords classes #' #' @examples #' #' showClass("RLum.Data.Curve") #' #' ##set empty curve object #' set_RLum(class = "RLum.Data.Curve") #' #' @md #' @export setClass("RLum.Data.Curve", slots = list( recordType = "character", curveType = "character", data = "matrix" ), contains = "RLum.Data", prototype = list ( recordType = NA_character_, curveType = NA_character_, data = matrix(data = 0, ncol = 2) ) ) # as() ---------------------------------------------------------------------------------------- ##LIST ##COERCE RLum.Data.Curve >> list AND list >> RLum.Data.Curve #' as() - RLum-object coercion #' #' for `[RLum.Data.Curve-class]` #' #' **[RLum.Data.Curve-class]** #' #' \tabular{ll}{ #' **from** \tab **to**\cr #' `list` \tab `list` \cr #' `data.frame` \tab `data.frame`\cr #' `matrix` \tab `matrix` #' } #' #' @param from [RLum-class], [list], [data.frame], [matrix] (**required**): #' object to be coerced from #' #' @param to [character] (**required**): #' class name to be coerced to #' #' @seealso [methods::as] #' #' @note #' Due to the complex structure of the `RLum` objects itself a coercing to standard #' R data structures will be always loosely! #' #' @md #' @name as setAs("list", "RLum.Data.Curve", function(from,to){ new(to, recordType = "unkown curve type", curveType = NA_character_, data = matrix(unlist(from), ncol = 2), info = list()) }) setAs("RLum.Data.Curve", "list", function(from){ list(x = from@data[,1], y = from@data[,2]) }) ##DATA.FRAME ##COERCE RLum.Data.Curve >> data.frame AND data.frame >> RLum.Data.Curve setAs("data.frame", "RLum.Data.Curve", function(from,to){ new(to, recordType = "unkown curve type", curveType = NA_character_, data = as.matrix(from), info = list()) }) setAs("RLum.Data.Curve", "data.frame", function(from){ data.frame(x = from@data[,1], y = from@data[,2]) }) ##MATRIX ##COERCE RLum.Data.Curve >> matrix AND matrix >> RLum.Data.Curve setAs("matrix", "RLum.Data.Curve", function(from,to){ new(to, recordType = "unkown curve type", curveType = NA_character_, data = from, info = list()) }) setAs("RLum.Data.Curve", "matrix", function(from){ from@data }) # show() -------------------------------------------------------------------------------------- #' @describeIn RLum.Data.Curve #' Show structure of `RLum.Data.Curve` object #' #' @keywords internal #' #' @md #' @export setMethod("show", signature(object = "RLum.Data.Curve"), function(object){ ##print information cat("\n [RLum.Data.Curve-class]") cat("\n\t recordType:", object@recordType) cat("\n\t curveType:", object@curveType) cat("\n\t measured values:", length(object@data[,1])) cat("\n\t .. range of x-values:", suppressWarnings(range(object@data[,1]))) cat("\n\t .. range of y-values:", suppressWarnings(min(object@data[,2], na.rm = TRUE)), suppressWarnings(max(object@data[,2], na.rm = TRUE)), if(anyNA(object@data[,2])){"(contains NA values)"}else{""} ) cat("\n\t additional info elements:", length(object@info)) #cat("\n\t\t >> names:", names(object@info)) } ) # set_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Data.Curve #' Construction method for RLum.Data.Curve object. The slot info is optional #' and predefined as empty list by default. #' #' @param class [`set_RLum`]; [character] (**required**): #' name of the `RLum` class to create #' #' @param originator [`set_RLum`]; [character] (*automatic*): #' contains the name of the calling function (the function that produces this object); #' can be set manually. #' #' @param .uid [`set_RLum`]; [character] (*automatic*): #' sets an unique ID for this object using the internal C++ function `create_UID`. #' #' @param .pid [`set_RLum`]; [character] (*with default*): #' option to provide a parent id for nesting at will. #' #' @param recordType [`set_RLum`]; [character] (*optional*): #' record type (e.g., "OSL") #' #' @param curveType [`set_RLum`]; [character] (*optional*): #' curve type (e.g., "predefined" or "measured") #' #' @param data [`set_RLum`]; [matrix] (**required**): #' raw curve data. If `data` itself is a `RLum.Data.Curve`-object this can be #' used to re-construct the object (s. details), i.e. modified parameters except #' `.uid`, `.pid` and `originator`. The rest will be subject to copy and paste unless provided. #' #' @param info [`set_RLum`]; [list] (*optional*): #' info elements #' #' @return #' #' **`set_RLum`** #' #' Returns an [RLum.Data.Curve-class] object. #' #' @md #' @export setMethod( "set_RLum", signature = signature("RLum.Data.Curve"), definition = function( class, originator, .uid, .pid, recordType = NA_character_, curveType = NA_character_, data = matrix(0, ncol = 2), info = list()) { ##The case where an RLum.Data.Curve object can be provided ##with this RLum.Data.Curve objects can be provided to be reconstructed if (is(data, "RLum.Data.Curve")) { ##check for missing curveType if (missing(curveType)) curveType <- data@curveType ##check for missing recordType if(missing(recordType)) recordType <- data@recordType ##check for missing data ... not possible as data is the object itself ##check for missing info if(missing(info)) info <- data@info ##check for missing .uid and .pid and originator ##>> no this is always taken from the old object here ##set empty class from object newRLumDataCurve <- new("RLum.Data.Curve") ##fill - this is the faster way, filling in new() costs ... newRLumDataCurve@recordType <- recordType newRLumDataCurve@curveType <- curveType newRLumDataCurve@data <- data@data newRLumDataCurve@info <- info newRLumDataCurve@originator <- data@originator newRLumDataCurve@.uid <- data@.uid newRLumDataCurve@.pid <- data@.pid } else { ##set empty class form object newRLumDataCurve <- new("RLum.Data.Curve") ##fill - this is the faster way, filling in new() costs ... newRLumDataCurve@originator <- originator newRLumDataCurve@recordType <- recordType newRLumDataCurve@curveType <- curveType newRLumDataCurve@data <- data newRLumDataCurve@info <- info newRLumDataCurve@.uid <- .uid newRLumDataCurve@.pid <- .pid } return(newRLumDataCurve) } ) # get_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Data.Curve #' Accessor method for RLum.Data.Curve object. The argument info.object is #' optional to directly access the info elements. If no info element name is #' provided, the raw curve data (matrix) will be returned. #' #' @param object [`get_RLum`], [`length_RLum`], [`names_RLum`] (**required**): #' an object of class [RLum.Data.Curve-class] #' #' @param info.object [`get_RLum`] [character] (*optional*): #' name of the wanted info element #' #' @return #' #' **`get_RLum`** #' #' 1. A [matrix] with the curve values or #' 2. only the info object if `info.object` was set. #' #' @md #' @export setMethod("get_RLum", signature("RLum.Data.Curve"), definition = function(object, info.object = NULL) { ##if info.object == NULL just show the curve values if(!is.null(info.object)) { if(info.object %in% names(object@info)){ unlist(object@info[info.object]) }else{ ##check for entries if(length(object@info) == 0){ warning("[get_RLum()] This RLum.Data.Curve object has no info objects! NULL returned!)") return(NULL) }else{ ##grep names temp.element.names <- paste(names(object@info), collapse = ", ") warning.text <- paste("[get_RLum()] Invalid info.object name. Valid names are:", temp.element.names) warning(warning.text, call. = FALSE) return(NULL) } } }else{ object@data } }) # length_RLum() ------------------------------------------------------------------------------- #' @describeIn RLum.Data.Curve #' Returns the length of the curve object, which is the maximum of the #' value time/temperature of the curve (corresponding to the stimulation length) #' #' @return #' #' **`length_RLum`** #' #' Number of channels in the curve (row number of the matrix) #' #' @md #' @export setMethod("length_RLum", "RLum.Data.Curve", function(object){ max(object@data[,1]) }) # names_RLum() -------------------------------------------------------------------------------- #' @describeIn RLum.Data.Curve #' Returns the names info elements coming along with this curve object #' #' @return #' #' **`names_RLum`** #' #' Names of the info elements (slot `info`) #' #' @export setMethod("names_RLum", "RLum.Data.Curve", function(object){ names(object@info) }) # bin_RLum.Data() ----------------------------------------------------------------------------- #' @describeIn RLum.Data.Curve #' Allows binning of specific objects #' #' @param bin_size [integer] (*with default*): #' set number of channels used for each bin, e.g. `bin_size = 2` means that #' two channels are binned. #' #' @return #' #' **`bin_RLum.Data`** #' #' Same object as input, after applying the binning. #' #' @md #' @export setMethod(f = "bin_RLum.Data", signature = "RLum.Data.Curve", function(object, bin_size = 2) { ##check for invalid bin_size values if (!is.null(bin_size) && bin_size > 0) { ##set stepping vector stepping <- seq(1, nrow(object@data), by = bin_size) ##get bin vector bin_vector <- object@data[, 2] ##set desired length of the vector ##to avoid add effects later length(bin_vector) <- suppressWarnings(prod(dim(matrix( bin_vector, ncol = length(stepping) )))) ##define new matrix for binning bin_matrix <- matrix(bin_vector, ncol = length(stepping)) ##calcuate column sums and replace matrix ##this is much faster than anly apply loop object@data <- matrix(c(object@data[stepping], colSums(bin_matrix, na.rm = TRUE)), ncol = 2) ##set matrix return(set_RLum(class = "RLum.Data.Curve", data = object)) } else{ warning("Argument 'bin_size' invalid, nothing was done!") ##just return the object return(object) } }) # smooth_RLum() ------------------------------------------------------------------------------- #' @describeIn RLum.Data.Curve #' Smoothing of RLum.Data.Curve objects using the function [zoo::rollmean] or [zoo::rollmedian][zoo::rollmean]. #' In particular the internal function `.smoothing` is used. #' #' @param k [`smooth_RLum`]; [integer] (*with default*): #' window for the rolling mean; must be odd for rollmedian. #' If nothing is set k is set automatically #' #' @param fill [`smooth_RLum`]; [numeric] (*with default*): #' a vector defining the left and the right hand data #' #' @param align [`smooth_RLum`]; [character] (*with default*): #' specifying whether the index of the result should be left- or right-aligned #' or centered (default) compared to the rolling window of observations, allowed #' `"right"`, `"center"` and `"left"` #' #' @param method [`smooth_RLum`]; [character] (*with default*): #' defines which method should be applied for the smoothing: `"mean"` or `"median"` #' #' @return #' #' **`smooth_RLum`** #' #' Same object as input, after smoothing #' #' @md #' @export setMethod( f = "smooth_RLum", signature = "RLum.Data.Curve", function(object, k = NULL, fill = NA, align = "right", method = "mean") { object@data[,2] <- .smoothing( x = object@data[,2], k = k, fill = fill, align = align, method = method) ##return via set function to get a new id set_RLum(class = "RLum.Data.Curve", originator = "smooth_RLum", data = object) } ) Luminescence/R/merge_Risoe.BINfileData.R0000644000176200001440000001445613417222471017526 0ustar liggesusers #' Merge Risoe.BINfileData objects or Risoe BIN-files #' #' Function allows merging Risoe BIN/BINX files or Risoe.BINfileData objects. #' #' The function allows merging different measurements to one file or one #' object. The record IDs are recalculated for the new object. Other values #' are kept for each object. The number of input objects is not limited. #' #' `position.number.append.gap` option #' #' If the option `keep.position.number = FALSE` is used, the position #' numbers of the new data set are recalculated by adding the highest position #' number of the previous data set to the each position number of the next data #' set. For example: The highest position number is 48, then this number will #' be added to all other position numbers of the next data set (e.g. 1 + 48 = #' 49) #' #' However, there might be cases where an additional addend (summand) is needed #' before the next position starts. Example: #' #' - Position number set (A): `1,3,5,7` #' - Position number set (B): `1,3,5,7` #' #' With no additional summand the new position numbers would be: #' `1,3,5,7,8,9,10,11`. That might be unwanted. Using the argument #' `position.number.append.gap = 1` it will become: #' `1,3,5,7,9,11,13,15,17`. #' #' @param input.objects [character] with [Risoe.BINfileData-class] objects (**required**): #' Character vector with path and files names #' (e.g. `input.objects = c("path/file1.bin", "path/file2.bin")` or #' [Risoe.BINfileData-class] objects (e.g. `input.objects = c(object1, object2)`). #' Alternatively a `list` is supported. #' #' #' @param output.file [character] (*optional*): #' File output path and name. If no value is given, a [Risoe.BINfileData-class] is #' returned instead of a file. #' #' #' @param keep.position.number [logical] (*with default*): #' Allows keeping the original position numbers of the input objects. #' Otherwise the position numbers are recalculated. #' #' #' @param position.number.append.gap [integer] (*with default*): #' Set the position number gap between merged BIN-file sets, if the option #' `keep.position.number = FALSE` is used. See details for further #' information. #' #' #' @return Returns a `file` or a [Risoe.BINfileData-class] object. #' #' #' @note #' The validity of the output objects is not further checked. #' #' #' @section Function version: 0.2.7 #' #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' #' @seealso [Risoe.BINfileData-class], [read_BIN2R], [write_R2BIN] #' #' #' @references #' Duller, G., 2007. Analyst. #' #' #' @keywords IO manip #' #' #' @examples #' #' ##merge two objects #' data(ExampleData.BINfileData, envir = environment()) #' #' object1 <- CWOSL.SAR.Data #' object2 <- CWOSL.SAR.Data #' #' object.new <- merge_Risoe.BINfileData(c(object1, object2)) #' #' @md #' @export merge_Risoe.BINfileData <- function( input.objects, output.file, keep.position.number = FALSE, position.number.append.gap = 0 ){ # Integrity Checks -------------------------------------------------------- if(length(input.objects) < 2){ stop("[merge_Risoe.BINfileData()] At least two input objects are needed!") } if(is(input.objects, "character") == TRUE){ for(i in 1:length(input.objects)){ if(file.exists(input.objects[i])==FALSE){ stop("[merge_Risoe.BINfileData()] File ",input.objects[i]," does not exist!", call. = FALSE) } } }else{ if(is(input.objects, "list") == TRUE){ for(i in 1:length(input.objects)){ if(is(input.objects[[i]], "Risoe.BINfileData") == FALSE){ stop("[merge_Risoe.BINfileData()] Input list does not contain Risoe.BINfileData objects!") } } }else{ stop("[merge_Risoe.BINfileData()] Input object is not a 'character' nor a 'list'!") } } # Import Files ------------------------------------------------------------ ##loop over all files to store the results in a list ##or the input is already a list if(is(input.objects, "character") == TRUE){ temp <- lapply(input.objects, read_BIN2R) }else{ temp <- input.objects } # Get POSITION values ------------------------------------------------------- ##grep maximum position value from the first file temp.position.max <- max(temp[[1]]@METADATA[["POSITION"]]) ##grep all position values except from the first file temp.position.values <- unlist(sapply(2:length(temp), function(x){ temp <- temp[[x]]@METADATA[["POSITION"]] + temp.position.max + position.number.append.gap assign(x = "temp.position.max", value = max(temp), envir = parent.env(environment())) return(temp) })) temp.position.values <- c(temp[[1]]@METADATA[["POSITION"]], temp.position.values) # Get overall record length ----------------------------------------------- temp.record.length <- sum(sapply(1:length(temp), function(x){ length(temp[[x]]@METADATA[,"ID"]) })) # Merge Files ------------------------------------------------------------- ##loop for similar input objects for(i in 1:length(input.objects)){ if(exists("temp.new.METADATA") == FALSE){ temp.new.METADATA <- temp[[i]]@METADATA temp.new.DATA <- temp[[i]]@DATA if(inherits(try(temp[[i]]@.RESERVED, silent = TRUE), "try-error")){ temp.new.RESERVED <- list() }else{ temp.new.RESERVED <- temp[[i]]@.RESERVED } }else{ temp.new.METADATA <- rbind(temp.new.METADATA, temp[[i]]@METADATA) temp.new.DATA <- c(temp.new.DATA, temp[[i]]@DATA) if(inherits(try(temp[[i]]@.RESERVED, silent = TRUE), "try-error")){ temp.new.RESERVED <- c(temp.new.RESERVED, list()) }else{ temp.new.RESERVED <- c(temp.new.RESERVED, temp[[i]]@.RESERVED) } } } ##SET RECORD ID in METADATA temp.new.METADATA$ID <- 1:temp.record.length ##SET POSITION VALUES if(keep.position.number == FALSE){ temp.new.METADATA$POSITION <- temp.position.values } ##TODO version number? # Produce BIN file object ------------------------------------------------- temp.new <- set_Risoe.BINfileData( METADATA = temp.new.METADATA, DATA = temp.new.DATA, .RESERVED = temp.new.RESERVED ) # OUTPUT ------------------------------------------------------------------ if(missing(output.file) == FALSE){ write_R2BIN(temp.new, output.file) }else{ return(temp.new) } } Luminescence/R/CW2pPMi.R0000644000176200001440000002134013231137116014367 0ustar liggesusers#' Transform a CW-OSL curve into a pPM-OSL curve via interpolation under #' parabolic modulation conditions #' #' Transforms a conventionally measured continuous-wave (CW) OSL-curve into a #' pseudo parabolic modulated (pPM) curve under parabolic modulation conditions #' using the interpolation procedure described by Bos & Wallinga (2012). #' #' The complete procedure of the transformation is given in Bos & Wallinga #' (2012). The input `data.frame` consists of two columns: time (t) and #' count values (CW(t)) #' #' **Nomenclature** #' #' - P = stimulation time (s) #' - 1/P = stimulation rate (1/s) #' #' **Internal transformation steps** #' #' (1) #' log(CW-OSL) values #' #' (2) #' Calculate t' which is the transformed time: #' \deqn{t' = (1/3)*(1/P^2)t^3} #' #' (3) #' Interpolate CW(t'), i.e. use the log(CW(t)) to obtain the count values for #' the transformed time (t'). Values beyond `min(t)` and `max(t)` #' produce `NA` values. #' #' (4) #' Select all values for t' < `min(t)`, i.e. values beyond the time resolution #' of t. Select the first two values of the transformed data set which contain #' no `NA` values and use these values for a linear fit using [lm]. #' #' (5) #' Extrapolate values for t' < `min(t)` based on the previously obtained #' fit parameters. The extrapolation is limited to two values. Other values at #' the beginning of the transformed curve are set to 0. #' #' (6) #' Transform values using #' \deqn{pLM(t) = t^2/P^2*CW(t')} #' #' (7) #' Combine all values and truncate all values for t' > `max(t)` #' #' **NOTE:** #' The number of values for t' < `min(t)` depends on the stimulation #' period `P`. To avoid the production of too many artificial data at the #' raising tail of the determined pPM curve, it is recommended to use the #' automatic estimation routine for `P`, i.e. provide no value for #' `P`. #' #' @param values [RLum.Data.Curve-class] or [data.frame] (**required**): #' [RLum.Data.Curve-class] or `data.frame` with measured curve data of type #' stimulation time (t) (`values[,1]`) and measured counts (cts) (`values[,2]`) #' #' @param P [vector] (*optional*): #' stimulation period in seconds. If no value is given, the optimal value is #' estimated automatically (see details). Greater values of P produce more #' points in the rising tail of the curve. #' #' @return #' The function returns the same data type as the input data type with #' the transformed curve values. #' #' `RLum.Data.Curve` #' #' \tabular{rl}{ #' `$CW2pPMi.x.t` \tab: transformed time values \cr #' `$CW2pPMi.method` \tab: used method for the production of the new data points #' } #' #' `data.frame` #' #' \tabular{rl}{ #' `$x` \tab: time\cr #' `$y.t` \tab: transformed count values\cr #' `$x.t` \tab: transformed time values \cr #' `$method` \tab: used method for the production of the new data points #' } #' #' @note #' According to Bos & Wallinga (2012), the number of extrapolated points #' should be limited to avoid artificial intensity data. If `P` is #' provided manually, not more than two points are extrapolated. #' #' @section Function version: 0.2.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' Based on comments and suggestions from:\cr #' Adrie J.J. Bos, Delft University of Technology, The Netherlands #' #' @seealso [CW2pLM], [CW2pLMi], [CW2pHMi], [fit_LMCurve], [RLum.Data.Curve-class] #' #' @references #' Bos, A.J.J. & Wallinga, J., 2012. How to visualize quartz OSL #' signal components. Radiation Measurements, 47, 752-758. #' #' **Further Reading** #' #' Bulur, E., 1996. An Alternative Technique For #' Optically Stimulated Luminescence (OSL) Experiment. Radiation Measurements, #' 26, 701-709. #' #' Bulur, E., 2000. A simple transformation for converting CW-OSL curves to #' LM-OSL curves. Radiation Measurements, 32, 141-145. #' #' @keywords manip #' #' @examples #' #' #' ##(1) #' ##load CW-OSL curve data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' #' ##transform values #' values.transformed <- CW2pPMi(ExampleData.CW_OSL_Curve) #' #' ##plot #' plot(values.transformed$x,values.transformed$y.t, log = "x") #' #' ##(2) - produce Fig. 4 from Bos & Wallinga (2012) #' #' ##load data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' values <- CW_Curve.BosWallinga2012 #' #' ##open plot area #' plot(NA, NA, #' xlim = c(0.001,10), #' ylim = c(0,8000), #' ylab = "pseudo OSL (cts/0.01 s)", #' xlab = "t [s]", #' log = "x", #' main = "Fig. 4 - Bos & Wallinga (2012)") #' #' values.t <- CW2pLMi(values, P = 1/20) #' lines(values[1:length(values.t[,1]),1],CW2pLMi(values, P = 1/20)[,2], #' col = "red",lwd = 1.3) #' text(0.03,4500,"LM", col = "red", cex = .8) #' #' values.t <- CW2pHMi(values, delta = 40) #' lines(values[1:length(values.t[,1]),1], CW2pHMi(values, delta = 40)[,2], #' col = "black", lwd = 1.3) #' text(0.005,3000,"HM", cex = .8) #' #' values.t <- CW2pPMi(values, P = 1/10) #' lines(values[1:length(values.t[,1]),1], CW2pPMi(values, P = 1/10)[,2], #' col = "blue", lwd = 1.3) #' text(0.5,6500,"PM", col = "blue", cex = .8) #' #' @md #' @export CW2pPMi<- function( values, P ){ # (0) Integrity checks ------------------------------------------------------ ##(1) data.frame or RLum.Data.Curve object? if(is(values, "data.frame") == FALSE & is(values, "RLum.Data.Curve") == FALSE){ stop("[CW2pPMi()] 'values' object has to be of type 'data.frame' or 'RLum.Data.Curve'!", call. = FALSE) } ##(2) if the input object is an 'RLum.Data.Curve' object check for allowed curves if(is(values, "RLum.Data.Curve") == TRUE){ if(!grepl("OSL", values@recordType) & !grepl("IRSL", values@recordType)){ stop(paste("[CW2pPMi()] recordType ",values@recordType, " is not allowed for the transformation!", sep=""), call. = FALSE) }else{ temp.values <- as(values, "data.frame") } }else{ temp.values <- values } # (3) Transform values ------------------------------------------------------ ##log transformation of the CW-OSL count values CW_OSL.log<-log(temp.values[,2]) ##time transformation t >> t' t<-temp.values[,1] ##set P ##if no values for P is set selected a P value for a maximum of ##two extrapolation points if(missing(P)==TRUE){ i<-1 P<-1/i t.transformed<-(1/3)*(1/P^2)*t^3 while(length(t.transformed[t.transformed2){ P<-1/i t.transformed<-(1/3)*(1/P^2)*t^3 i<-i+1 } }else{ t.transformed<-(1/3)*(1/P^2)*t^3 } # (4) Interpolation --------------------------------------------------------- ##interpolate values, values beyond the range return NA values CW_OSL.interpolated <- approx(t, CW_OSL.log, xout=t.transformed, rule=1 ) ##combine t.transformed and CW_OSL.interpolated in a data.frame temp<-data.frame(x=t.transformed, y = unlist(CW_OSL.interpolated$y)) # (5) Extrapolate first values of the curve --------------------------------- ##(a) - find index of first rows which contain NA values (needed for extrapolation) temp.sel.id<-min(which(is.na(temp[,2])==FALSE)) ##(b) - fit linear function fit.lm<-lm(y ~ x,data.frame(x=t[1:2],y=CW_OSL.log[1:2])) ##select values to extrapolate and predict (extrapolate) values based on the fitted function x.i<-data.frame(x=temp[1:(min(temp.sel.id)-1),1]) y.i<-predict(fit.lm,x.i) ##replace NA values by extrapolated values temp[1:length(y.i),2]<-y.i ##set method values temp.method<-c(rep("extrapolation",length(y.i)),rep("interpolation",(length(temp[,2])-length(y.i)))) ##print a warning message for more than two extrapolation points if(temp.sel.id>2){warning("t' is beyond the time resolution. Only two data points have been extrapolated, the first ",temp.sel.id-3, " points have been set to 0!")} # (6) Convert, transform and combine values --------------------------------- ##unlog CW-OSL count values, i.e. log(CW) >> CW CW_OSL<-exp(temp$y) ##transform CW-OSL values to pPM-OSL values pPM<-(t^2/P^2)*CW_OSL ##combine all values and exclude NA values temp.values <- data.frame(x=t, y.t=pPM, x.t=t.transformed, method=temp.method) temp.values <- na.exclude(temp.values) # (7) Return values --------------------------------------------------------- ##returns the same data type as the input if(is(values, "data.frame") == TRUE){ values <- temp.values return(values) }else{ ##add old info elements to new info elements temp.info <- c(values@info, CW2pPMi.x.t = list(temp.values$x.t), CW2pPMi.method = list(temp.values$method)) newRLumDataCurves.CW2pPMi <- set_RLum( class = "RLum.Data.Curve", recordType = values@recordType, data = as.matrix(temp.values[,1:2]), info = temp.info) return(newRLumDataCurves.CW2pPMi) } } Luminescence/R/RLum.Results-class.R0000644000176200001440000002513213517523470016644 0ustar liggesusers#' @include get_RLum.R set_RLum.R length_RLum.R names_RLum.R NULL #' Class `"RLum.Results"` #' #' Object class contains results data from functions (e.g., [analyse_SAR.CWOSL]). #' #' @name RLum.Results-class #' #' @docType class #' #' @slot data #' Object of class "list" containing output data #' #' @note #' The class is intended to store results from functions to be used by #' other functions. The data in the object should always be accessed by the #' method `get_RLum`. #' #' @section Objects from the Class: #' Objects can be created by calls of the form `new("RLum.Results", ...)`. #' #' @section Class version: 0.5.2 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @seealso [RLum-class], [plot_RLum], [merge_RLum] #' #' @keywords classes methods #' #' @examples #' #' showClass("RLum.Results") #' #' ##create an empty object from this class #' set_RLum(class = "RLum.Results") #' #' ##use another function to show how it works #' #' ##Basic calculation of the dose rate for a specific date #' dose.rate <- calc_SourceDoseRate( #' measurement.date = "2012-01-27", #' calib.date = "2014-12-19", #' calib.dose.rate = 0.0438, #' calib.error = 0.0019) #' #' ##show object #' dose.rate #' #' ##get results #' get_RLum(dose.rate) #' #' ##get parameters used for the calcualtion from the same object #' get_RLum(dose.rate, data.object = "parameters") #' #' ##alternatively objects can be accessed using S3 generics, such as #' dose.rate$parameters #' #' @md #' @export setClass( Class = "RLum.Results", slots = list(data = "list"), contains = "RLum", prototype = list (data = list()) ) # as() ---------------------------------------------------------------------------------------- ##LIST ##COERCE RLum.Results >> list AND list >> RLum.Results #' as() - RLum-object coercion #' #' for `[RLum.Results-class]` #' #' **[RLum.Results-class]** #' #' \tabular{ll}{ #' **from** \tab **to**\cr #' `list` \tab `list`\cr #' } #' #' Given that the [list] consits of [RLum.Results-class] objects. #' #' @md #' @name as setAs("list", "RLum.Results", function(from,to){ new(to, originator = "coercion", data = from) }) setAs("RLum.Results", "list", function(from){ from@data }) # show() -------------------------------------------------------------------------------------- #' @describeIn RLum.Results #' Show structure of `RLum.Results` object #' #' @keywords internal #' #' @md #' @export setMethod("show", signature(object = "RLum.Results"), function(object) { ##data elements temp.names <- names(object@data) if (length(object) > 0) { temp.type <- sapply(1:length(object@data), function(x) { paste("\t .. $", temp.names[x], " : ", is(object@data[[x]])[1], sep = "") }) } else{ temp.type <- paste0("\t .. $", temp.names, " : ", is(object@data)[1]) } temp.type <- paste(temp.type, collapse = "\n") ##print information cat("\n [RLum.Results-class]") cat("\n\t originator: ", object@originator, "()", sep = "") cat("\n\t data:", length(object@data)) cat("\n", temp.type) cat("\n\t additional info elements: ", length(object@info)) }) # set_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Results #' Construction method for an RLum.Results object. #' #' @param class [`set_RLum`]; [character] **(required)**: #' name of the `RLum` class to create #' #' @param originator [`set_RLum`]; [character] (*automatic*): #' contains the name of the calling function (the function that produces this object); #' can be set manually. #' #' @param .uid [`set_RLum`]; [character] (*automatic*): #' sets an unique ID for this object using the internal C++ function `create_UID`. #' #' @param .pid [`set_RLum`]; [character] (*with default*): #' option to provide a parent id for nesting at will. #' #' @param data [`set_RLum`]; [list] (*optional*): #' a list containing the data to #' be stored in the object #' #' @param info [`set_RLum`]; [list] (*optional*): #' a list containing additional info data for the object #' #' @return #' #' **`set_RLum`**: #' #' Returns an object from the class [RLum.Results-class] #' #' @md #' @export setMethod("set_RLum", signature = signature("RLum.Results"), function(class, originator, .uid, .pid, data = list(), info = list()) { ##create new class newRLumReuslts <- new("RLum.Results") ##fill object newRLumReuslts@originator <- originator newRLumReuslts@data <- data newRLumReuslts@info <- info newRLumReuslts@.uid <- .uid newRLumReuslts@.pid <- .pid return(newRLumReuslts) }) # get_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Results #' Accessor method for RLum.Results object. The argument data.object allows #' directly accessing objects delivered within the slot data. The default #' return object depends on the object originator (e.g., `fit_LMCurve`). #' If nothing is specified always the first `data.object` will be returned. #' #' Note: Detailed specification should be made in combination with the originator slot in the #' receiving function if results are pipped. #' #' @param object [`get_RLum`]; [RLum.Results-class] (**required**): #' an object of class [RLum.Results-class] to be evaluated #' #' @param data.object [`get_RLum`]; [character] or [numeric]: #' name or index of the data slot to be returned #' #' @param info.object [`get_RLum`]; [character] (*optional*): #' name of the wanted info element #' #' @param drop [`get_RLum`]; [logical] (*with default*): #' coerce to the next possible layer (which are data objects, `drop = FALSE` #' keeps the original `RLum.Results` #' #' @return #' #' **`get_RLum`**: #' #' Returns: #' #' 1. Data object from the specified slot #' 2. [list] of data objects from the slots if 'data.object' is vector or #' 3. an [RLum.Results-class] for `drop = FALSE`. #' #' #' @md #' @export setMethod( "get_RLum", signature = signature("RLum.Results"), definition = function(object, data.object, info.object = NULL, drop = TRUE) { ##if info.object is set, only the info objects are returned if (!is.null(info.object)) { if (info.object %in% names(object@info)) { unlist(object@info[info.object]) } else { ##check for entries if (length(object@info) == 0) { warning("[get_RLum] This RLum.Results object has no info objects! NULL returned!)", call. = FALSE) } else { warning(paste0( "[get_RLum] Invalid info.object name. Valid names are: ", paste(names(object@info), collapse = ", ") ), call. = FALSE) } return(NULL) } } else{ if (!missing(data.object)) { ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##CASE1: data.object is of type 'character' if (is(data.object, "character")) { #check if the provided names are available if (all(data.object %in% names(object@data))) { ##account for multiple inputs if (length(data.object) > 1) { temp.return <- sapply(data.object, function(x) { object@data[[x]] }) } else{ temp.return <- list(data.object = object@data[[data.object]]) } } else { stop(paste0("[get_RLum()] data.object(s) unknown, valid names are: ", paste(names(object@data), collapse = ", ")), call. = FALSE) } } ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##CASE2: data.object is of type 'numeric' else if (is(data.object, "numeric")) { ##check if index is valid if (max(data.object) > length(object@data)) { stop("[get_RLum] 'data.object' index out of bounds!") } else if (length(data.object) > 1) { temp.return <- lapply(data.object, function(x) { object@data[[x]] }) } else { temp.return <- list(object@data[[data.object]]) } ##restore names as that get los with this method names(temp.return) <- names(object@data)[data.object] } ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##CASE3: data.object is of an unsupported type else{ stop("[get_RLum] 'data.object' has to be of type character or numeric!", call. = FALSE) } ##the CASE data.object is missing } else{ ##return always the first object if nothing is specified temp.return <- object@data[1] } ##CHECK whether an RLum.Results object needs to be produced ... ##This will just be the case if the funtion havn't returned something before if (drop) { ##we need to access the list here, otherwise we get unexpected behaviour as drop = TRUE ##should always return the lowest possible element here return(temp.return[[1]]) } else{ return(set_RLum( "RLum.Results", originator = object@originator, data = temp.return )) } } } ) # length_RLum() ------------------------------------------------------------------------------- #' @describeIn RLum.Results #' Returns the length of the object, i.e., number of stored data.objects #' #' @return #' #' **`length_RLum`** #' #' Returns the number of data elements in the `RLum.Results` object. #' #' @md #' @export setMethod("length_RLum", "RLum.Results", function(object){ length(object@data) }) # names_RLum() -------------------------------------------------------------------------------- #' @describeIn RLum.Results #' Returns the names data.objects #' #' @return #' #' **`names_RLum`** #' #' Returns the names of the data elements in the object. #' #' @md #' @export setMethod("names_RLum", "RLum.Results", function(object){ names(object@data) }) Luminescence/R/merge_RLum.Results.R0000644000176200001440000001047513231137116016713 0ustar liggesusers#' Merge function for RLum.Results S4-class objects #' #' Function merges objects of class [RLum.Results-class]. The slots in the objects #' are combined depending on the object type, e.g., for [data.frame] and [matrix] #' rows are appended. #' #' @note The originator is taken from the first element and not reset to `merge_RLum` #' #' @param objects [list] (**required**): #' a list of [RLum.Results-class] objects #' #' @section Function version: 0.2.0 #' #' @keywords internal #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @md #' @export merge_RLum.Results <- function( objects){ ##------------------------------------------------------------- ##Some integrity checks ##check if input object is a list if(!is(objects, "list")){ stop("[merge_RLum.Results()] 'objects' has to of type 'list'!") }else{ ##check if objects in the list are of type RLum.Results temp.originator <- sapply(1:length(objects), function(x){ if(is(objects[[x]], "RLum.Results") == FALSE){ stop("[merge_RLum.Results()] Objects to merge have to be of type 'RLum.Results'!") } objects[[x]]@originator }) } ##check if originator is different if(length(unique(temp.originator))>1){ stop("[merge_RLum.Results()] 'RLum.Results' object originator differs!") } ##------------------------------------------------------------- ##merge objects depending on the data structure for(i in 1:length(objects[[1]]@data)){ ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##numeric vector or data.frame or matrix if(is(objects[[1]]@data[[i]], "data.frame")|| is(objects[[1]]@data[[i]], "numeric") || is(objects[[1]]@data[[i]], "matrix")){ ##grep elements and combine them into a list temp.list <- lapply(1:length(objects), function(x) { objects[[x]]@data[[i]] }) ##check whetger the objects can be combined by rbind if(length(unique(unlist(lapply(temp.list, FUN = ncol)))) > 1){ stop("[merge_RLum.Results()] Objects cannot be combined, number of columns differs.") } ##combine them using rbind or data.table::rbindList (depends on the data type) if(is(objects[[1]]@data[[i]], "numeric")){ objects[[1]]@data[[i]] <- unlist(temp.list) }else if(is(objects[[1]]@data[[i]], "matrix")){ objects[[1]]@data[[i]] <- do.call("rbind", temp.list) }else{ objects[[1]]@data[[i]] <- as.data.frame(data.table::rbindlist(temp.list)) } }else{ ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##all other elements ##grep elements and write them into a list objects[[1]]@data[[i]] <- lapply(1:length(objects), function(x){ objects[[x]]@data[[i]] }) ##unlist to flatten list if necessary for the elements if(is(objects[[1]]@data[[i]][[1]])[1] == "list"){ objects[[1]]@data[[i]] <- unlist(objects[[1]]@data[[i]], recursive = FALSE) } } }##end loop #return by setting a new RLum.Results (for the .uid) #the originator is not reset objects_merged <- set_RLum( class = "RLum.Results", originator = objects[[1]]@originator, data = objects[[1]]@data, info = unlist(lapply(objects, function(x) { x@info }), recursive = FALSE), .pid = unlist(lapply(objects, function(x) { x@.uid })) ) return(objects_merged) } Luminescence/R/plot_RLum.Data.Spectrum.R0000644000176200001440000007074113571743147017623 0ustar liggesusers#' Plot function for an RLum.Data.Spectrum S4 class object #' #' The function provides a standardised plot output for spectrum data of an #' RLum.Data.Spectrum S4 class object #' #' **Matrix structure** \cr (cf. [RLum.Data.Spectrum-class]) #' #' - `rows` (x-values): wavelengths/channels (xlim, xlab) #' - `columns` (y-values): time/temperature (ylim, ylab) #' - `cells` (z-values): count values (zlim, zlab) #' #' *Note: This nomenclature is valid for all plot types of this function!* #' #' **Nomenclature for value limiting** #' #' - `xlim`: Limits values along the wavelength axis #' - `ylim`: Limits values along the time/temperature axis #' - `zlim`: Limits values along the count value axis #' #' **Details on the plot functions** #' #' Spectrum is visualised as 3D or 2D plot. Both plot types are based on #' internal R plot functions. #' #'**`plot.type = "persp"`** #' #' Arguments that will be passed to [persp]: #' #' - `shade`: default is `0.4` #' - `phi`: default is `15` #' - `theta`: default is `-30` #' - `expand`: default is `1` #' - `ticktype`: default is `detailed`, `r`: default is `10` #' #' **Note:** Further parameters can be adjusted via `par`. For example #' to set the background transparent and reduce the thickness of the lines use: #' `par(bg = NA, lwd = 0.7)` previous the function call. #' #'**`plot.type = "single"`** #' #' Per frame a single curve is returned. Frames are time or temperature #' steps. #' #'**`plot.type = "multiple.lines"`** #' #' All frames plotted in one frame. #' #'**`plot.type = "transect"`** #' #' Depending on the selected wavelength/channel range a transect over the #' time/temperature (y-axis) will be plotted along the wavelength/channels #' (x-axis). If the range contains more than one channel, values (z-values) are #' summed up. To select a transect use the `xlim` argument, e.g. #' `xlim = c(300,310)` plot along the summed up count values of channel #' 300 to 310. #' #' **Further arguments that will be passed (depending on the plot type)** #' #' `xlab`, `ylab`, `zlab`, `xlim`, `ylim`, #' `zlim`, `main`, `mtext`, `pch`, `type` ("single", "multiple.lines", "interactive"), #' `col`, `border`, `box` `lwd`, `bty`, `showscale` ("interactive") #' #' @param object [RLum.Data.Spectrum-class] or [matrix] (**required**): #' S4 object of class `RLum.Data.Spectrum` or a `matrix` containing count #' values of the spectrum.\cr #' Please note that in case of a matrix rownames and colnames are set #' automatically if not provided. #' #' @param par.local [logical] (*with default*): #' use local graphical parameters for plotting, e.g. the plot is shown in one column and one row. #' If `par.local = FALSE` global parameters are inherited. #' @param plot.type [character] (*with default*): plot type, for #' 3D-plot use `persp`, or `interactive`, for a 2D-plot `contour`, #' `single` or `multiple.lines` (along the time or temperature axis) #' or `transect` (along the wavelength axis) \cr #' #' @param optical.wavelength.colours [logical] (*with default*): #' use optical wavelength colour palette. Note: For this, the spectrum range is #' limited: `c(350,750)`. Own colours can be set with the argument `col`. If you provide already #' binned spectra, the colour assignment is likely to be wrong, since the colour gradients are calculated #' using the bin number. #' #' @param bg.spectrum [RLum.Data.Spectrum-class] or [matrix] (*optional*): Spectrum #' used for the background subtraction. By definition, the background spectrum should have been #' measured with the same setting as the signal spectrum. If a spectrum is provided, the #' argument `bg.channels` works only on the provided background spectrum. #' #' @param bg.channels [vector] (*optional*): #' defines channel for background subtraction If a vector is provided the mean #' of the channels is used for subtraction. If a spectrum is provided via `bg.spectrum`, this #' argument only works on the `bg.spectrum`. #' #' **Note:** Background subtraction is applied prior to channel binning #' #' @param bin.rows [integer] (*with default*): #' allow summing-up wavelength channels (horizontal binning), #' e.g. `bin.rows = 2` two channels are summed up. #' Binning is applied after the background subtraction. #' #' @param bin.cols [integer] (*with default*): #' allow summing-up channel counts (vertical binning) for plotting, #' e.g. `bin.cols = 2` two channels are summed up. #' Binning is applied after the background subtraction. #' #' @param norm [character] (*optional*): Normalise data to the maximum (`norm = "max"`) or #' minimum (`norm = "min"`) count values. The normalisation is applied after the binning. #' #' @param rug [logical] (*with default*): #' enables or disables colour rug. Currently only implemented for plot #' type `multiple.lines` and `single` #' #' @param limit_counts [numeric] (*optional*): #' value to limit all count values to this value, i.e. all count values above #' this threshold will be replaced by this threshold. This is helpful #' especially in case of TL-spectra. #' #' @param xaxis.energy [logical] (*with default*): enables or disables energy instead of #' wavelength axis. For the conversion the function [convert_Wavelength2Energy] is used. #' #' **Note:** This option means not only simnply redrawing the axis, #' instead the spectrum in terms of intensity is recalculated, s. details. #' #' @param legend.text [character] (*with default*): #' possiblity to provide own legend text. This argument is only considered for #' plot types providing a legend, e.g. `plot.type="transect"` #' #' @param ... further arguments and graphical parameters that will be passed #' to the `plot` function. #' #' @return Returns a plot. #' #' @note Not all additional arguments (`...`) will be passed similarly! #' #' @section Function version: 0.6.2 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @seealso [RLum.Data.Spectrum-class], [convert_Wavelength2Energy], [plot], [plot_RLum], [persp], #' [plotly::plot_ly], [contour] #' #' @keywords aplot #' #' @examples #' #' ##load example data #' data(ExampleData.XSYG, envir = environment()) #' #' ##(1)plot simple spectrum (2D) - contour #' plot_RLum.Data.Spectrum(TL.Spectrum, #' plot.type="contour", #' xlim = c(310,750), #' ylim = c(0,300), #' bin.rows=10, #' bin.cols = 1) #' #' ##(2) plot spectrum (3D) #' plot_RLum.Data.Spectrum( #' TL.Spectrum, #' plot.type="persp", #' xlim = c(310,750), #' ylim = c(0,100), #' bin.rows=10, #' bin.cols = 1) #' #'##(3) plot spectrum on energy axis #'##please note the background subtraction #'plot_RLum.Data.Spectrum(TL.Spectrum, #' plot.type="persp", #' ylim = c(0,200), #' bin.rows=10, #' bg.channels = 10, #' bin.cols = 1, #' xaxis.energy = TRUE) #' #' ##(4) plot multiple lines (2D) - multiple.lines (with ylim) #' plot_RLum.Data.Spectrum( #' TL.Spectrum, #' plot.type="multiple.lines", #' xlim = c(310,750), #' ylim = c(0,100), #' bin.rows=10, #' bin.cols = 1) #' #' \dontrun{ #' ##(4) interactive plot using the package plotly ("surface") #' plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="interactive", #' xlim = c(310,750), ylim = c(0,300), bin.rows=10, #' bin.cols = 1) #' #' ##(5) interactive plot using the package plotly ("contour") #' plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="interactive", #' xlim = c(310,750), ylim = c(0,300), bin.rows=10, #' bin.cols = 1, #' type = "contour", #' showscale = TRUE) #' #' ##(6) interactive plot using the package plotly ("heatmap") #' plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="interactive", #' xlim = c(310,750), ylim = c(0,300), bin.rows=10, #' bin.cols = 1, #' type = "heatmap", #' showscale = TRUE) #' #' ##(7) alternative using the package fields #' fields::image.plot(get_RLum(TL.Spectrum)) #' contour(get_RLum(TL.Spectrum), add = TRUE) #' #' } #' #' @md #' @export plot_RLum.Data.Spectrum <- function( object, par.local = TRUE, plot.type = "contour", optical.wavelength.colours = TRUE, bg.spectrum = NULL, bg.channels = NULL, bin.rows = 1, bin.cols = 1, norm = NULL, rug = TRUE, limit_counts = NULL, xaxis.energy = FALSE, legend.text, ... ){ # Integrity check ----------------------------------------------------------- ##check if object is of class RLum.Data.Spectrum if(class(object)[1] != "RLum.Data.Spectrum"){ if(class(object)[1] == "matrix"){ if(is.null(colnames(object))){ colnames(object) <- 1:ncol(object) } if(is.null(rownames(object))){ rownames(object) <- 1:nrow(object) } object <- set_RLum(class = "RLum.Data.Spectrum", data = object) message("[plot_RLum.Data.Spectrum()] Input has been converted to a RLum.Data.Spectrum object using set_RLum()") }else{ stop("[plot_RLum.Data.Spectrum()] Input object neither of class 'RLum.Data.Spectrum' nor 'matrix'.", call. = FALSE) } } ##XSYG ##check for curveDescripter if("curveDescripter" %in% names(object@info) == TRUE){ temp.lab <- strsplit(object@info$curveDescripter, split = ";")[[1]] xlab <- if(xaxis.energy == FALSE){ temp.lab[2]}else{"Energy [eV]"} ylab <- temp.lab[1] zlab <- temp.lab[3] }else{ xlab <- if(xaxis.energy == FALSE){ "Row values [a.u.]"}else{"Energy [eV]"} ylab <- "Column values [a.u.]" zlab <- "Cell values [a.u.]" } # Do energy axis conversion ------------------------------------------------------------------- if (xaxis.energy){ ##conversion object <- convert_Wavelength2Energy(object, digits = 5) ##modify row order (otherwise subsequent functions, like persp, have a problem) object@data[] <- object@data[order(as.numeric(rownames(object@data))),] rownames(object@data) <- sort(as.numeric(rownames(object@data))) } ##deal with addition arguments extraArgs <- list(...) main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {"RLum.Data.Spectrum"} zlab <- if("zlab" %in% names(extraArgs)) {extraArgs$zlab} else {ifelse(plot.type == "multiple.lines", ylab, zlab)} xlab <- if("xlab" %in% names(extraArgs)) {extraArgs$xlab} else {xlab} ylab <- if("ylab" %in% names(extraArgs)) {extraArgs$ylab} else {ifelse(plot.type == "single" | plot.type == "multiple.lines", "Luminescence [cts/channel]", ylab)} xlim <- if("xlim" %in% names(extraArgs)) {extraArgs$xlim} else {c(min(as.numeric(rownames(object@data))), max(as.numeric(rownames(object@data))))} ylim <- if("ylim" %in% names(extraArgs)) {extraArgs$ylim} else {c(min(as.numeric(colnames(object@data))), max(as.numeric(colnames(object@data))))} #for zlim see below mtext <- if("mtext" %in% names(extraArgs)) {extraArgs$mtext} else {""} cex <- if("cex" %in% names(extraArgs)) {extraArgs$cex} else {1} phi <- if("phi" %in% names(extraArgs)) {extraArgs$phi} else {15} theta <- if("theta" %in% names(extraArgs)) {extraArgs$theta} else {-30} r <- if("r" %in% names(extraArgs)) {extraArgs$r} else {10} shade <- if("shade" %in% names(extraArgs)) {extraArgs$shade} else {0.4} expand <- if("expand" %in% names(extraArgs)) {extraArgs$expand} else {0.6} border <- if("border" %in% names(extraArgs)) {extraArgs$border} else {NULL} box <- if("box" %in% names(extraArgs)) {extraArgs$box} else {TRUE} ticktype <- if("ticktype" %in% names(extraArgs)) {extraArgs$ticktype} else {"detailed"} log<- if("log" %in% names(extraArgs)) {extraArgs$log} else {""} type<- if("type" %in% names(extraArgs)) {extraArgs$type} else { if (plot.type == "interactive") { "surface" } else{ "l" } } pch<- if("pch" %in% names(extraArgs)) {extraArgs$pch} else {1} lwd<- if("lwd" %in% names(extraArgs)) {extraArgs$lwd} else {1} bty <- if("bty" %in% names(extraArgs)) {extraArgs$bty} else {NULL} sub<- if("sub" %in% names(extraArgs)) {extraArgs$sub} else {""} #for plotly::plot_ly showscale<- if("showscale" %in% names(extraArgs)) {extraArgs$showscale} else {FALSE} # prepare values for plot --------------------------------------------------- ##copy data temp.xyz <- object@data ##check for NULL column names if(is.null(colnames(temp.xyz))){ colnames(temp.xyz) <- 1:ncol(temp.xyz) } if(is.null(rownames(temp.xyz))){ rownames(temp.xyz) <- 1:nrow(temp.xyz) } ##check for the case of a single column matrix if(ncol(temp.xyz)>1){ ##reduce for xlim temp.xyz <- temp.xyz[as.numeric(rownames(temp.xyz)) >= xlim[1] & as.numeric(rownames(temp.xyz)) <= xlim[2],] ##reduce for ylim temp.xyz <- temp.xyz[, as.numeric(colnames(temp.xyz)) >= ylim[1] & as.numeric(colnames(temp.xyz)) <= ylim[2]] } ## wavelength x <- as.numeric(rownames(temp.xyz)) ## time/temp y <- as.numeric(colnames(temp.xyz)) # Background spectrum ------------------------------------------------------------------------- if(!is.null(bg.spectrum)){ if(class(bg.spectrum)[1] == "RLum.Data.Spectrum" || class(bg.spectrum)[1] == "matrix"){ ##case RLum if(class(bg.spectrum)[1] == "RLum.Data.Spectrum") bg.xyz <- bg.spectrum@data ##case matrix if(class(bg.spectrum)[1] == "matrix") bg.xyz <- bg.spectrum ##take care of channel settings, otherwise set bg.channels if(is.null(bg.channels)) bg.channels <- c(1:ncol(bg.xyz)) ##set rownames if(is.null(rownames(bg.xyz))) rownames(bg.xyz) <- 1:nrow(bg.xyz) ##convert to energy scale if needed if(xaxis.energy){ #conversion bg.xyz <- convert_Wavelength2Energy(cbind(as.numeric(rownames(bg.xyz)), bg.xyz), digits = 5) rownames(bg.xyz) <- bg.xyz[,1] bg.xyz <- bg.xyz[,-1, drop = FALSE] ##modify row order (otherwise subsequent functions, like persp, have a problem) bg.xyz <- bg.xyz[order(as.numeric(rownames(bg.xyz))),,drop = FALSE] rownames(bg.xyz) <- sort(as.numeric(rownames(bg.xyz))) } ##reduce for xlim bg.xyz <- bg.xyz[as.numeric(rownames(bg.xyz)) >= xlim[1] & as.numeric(rownames(bg.xyz)) <= xlim[2],,drop = FALSE] }else{ stop("[plot_RLum.Data.Spectrum()] Input for 'bg.spectrum' not supported, please check manual!", call. = FALSE) } } # Background subtraction --------------------------------------------------- if(!is.null(bg.channels)){ ##set background object if not available if(is.null(bg.spectrum)) bg.xyz <- temp.xyz if(max(bg.channels) > ncol(bg.xyz) || bg.channels <= 0){ ##correct the mess bg.channels <- sort(unique(bg.channels)) bg.channels[bg.channels <= 0] <- 1 bg.channels[bg.channels >= ncol(bg.xyz)] <- ncol(bg.xyz) warning( paste0( "[plot_RLum.Data.Spectrum()] 'bg.channels' out of range, corrected to: ", min(bg.channels), ":", max(bg.channels) ), call. = FALSE) } if(length(bg.channels) > 1){ temp.bg.signal <- rowMeans(bg.xyz[,bg.channels]) temp.xyz <- temp.xyz - temp.bg.signal }else{ temp.xyz <- temp.xyz - bg.xyz[,bg.channels] } ##set values < 0 to 0 temp.xyz[temp.xyz < 0] <- 0 ##check worst case if(sum(temp.xyz) == 0){ message("[plot_RLum.Data.Spectrum()] After background subtraction all counts < 0. Nothing plotted, NULL returned!") return(NULL) } } # Channel binning --------------------------------------------------------- ##rewrite arguments; makes things easier bin.cols <- bin.cols[1] bin.rows <- bin.rows[1] ##fatal check (not needed anymore, but never change running code) if(bin.cols < 1 | bin.rows < 1) stop("[plot_RLum.Data.Spectrum()] 'bin.cols' and 'bin.rows' have to be > 1!", call. = FALSE) if(bin.rows > 1){ temp.xyz <- .matrix_binning(temp.xyz, bin_size = bin.rows, bin_col = FALSE, names = "mean") x <- as.numeric(rownames(temp.xyz)) ##remove last channel (this is the channel that included less data) if(length(x)%%bin.rows != 0){ ##return warning warning( paste0("[plot_RLum.Data.Spectrum()] ",length(x)%%bin.rows, " channel(s) removed due to row (wavelength) binning."), call. = FALSE) ##do it temp.xyz <- temp.xyz[-length(x),] x <- x[-length(x)] } } if(bin.cols > 1){ temp.xyz <- .matrix_binning(temp.xyz, bin_size = bin.cols, bin_col = TRUE, names = "groups") y <- as.numeric(colnames(temp.xyz)) ##remove last channel (this is the channel that included less data) if(length(y)%%bin.cols != 0){ ##return warning warning( paste0("[plot_RLum.Data.Spectrum()] ",length(y)%%bin.cols, " channel(s) removed due to column (frame) binning."), call. = FALSE) ##do it temp.xyz <- temp.xyz[,-length(y)] y <- y[-length(y)] } } ##limit z-values if requested, this idea was taken from the Diss. by Thomas Schilles, 2002 if(!is.null(limit_counts)){ temp.xyz[temp.xyz[]>limit_counts] <- limit_counts } # Normalise if wanted ------------------------------------------------------------------------- if(!is.null(norm)){ if(norm == "min") temp.xyz <- temp.xyz/min(temp.xyz) if(norm == "max") temp.xyz <- temp.xyz/max(temp.xyz) } ##check for zlim zlim <- if("zlim" %in% names(extraArgs)) {extraArgs$zlim} else {range(temp.xyz)} # set color values -------------------------------------------------------- if("col" %in% names(extraArgs) == FALSE | plot.type == "single" | plot.type == "multiple.lines"){ if(optical.wavelength.colours == TRUE | (rug == TRUE & (plot.type != "persp" & plot.type != "interactive"))){ ##make different colour palette for energy values if (xaxis.energy) { col.violet <- c(2.76, ifelse(max(xlim) <= 4.13, max(xlim), 4.13)) col.blue <- c(2.52, 2.76) col.green <- c(2.18, 2.52) col.yellow <- c(2.10, 2.18) col.orange <- c(2.00, 2.10) col.red <- c(1.57, 2.00) col.infrared <- c(1.55, ifelse(min(xlim) >= 1.55, min(xlim), 1.57)) #set colour palette col <- unlist(sapply(1:length(x), function(i){ if(x[i] >= col.violet[1] & x[i] < col.violet[2]){"#EE82EE"} else if(x[i] >= col.blue[1] & x[i] < col.blue[2]){"#0000FF"} else if(x[i] >= col.green[1] & x[i] < col.green[2]){"#00FF00"} else if(x[i] >= col.yellow[1] & x[i] < col.yellow[2]){"#FFFF00"} else if(x[i] >= col.orange[1] & x[i] < col.orange[2]){"#FFA500"} else if(x[i] >= col.red[1] & x[i] < col.red[2]){"#FF0000"} else if(x[i] <= col.infrared[2]){"#BEBEBE"} })) }else{ ##wavelength colours for wavelength axis col.violet <- c(ifelse(min(xlim) <= 300, min(xlim), 300),450) col.blue <- c(450,495) col.green <- c(495,570) col.yellow <- c(570,590) col.orange <- c(590,620) col.red <- c(620,790) col.infrared <- c(790, ifelse(max(xlim) >= 800, max(xlim), 800)) #set colour palette col <- unlist(sapply(1:length(x), function(i){ if(x[i] >= col.violet[1] & x[i] < col.violet[2]){"#EE82EE"} else if(x[i] >= col.blue[1] & x[i] < col.blue[2]){"#0000FF"} else if(x[i] >= col.green[1] & x[i] < col.green[2]){"#00FF00"} else if(x[i] >= col.yellow[1] & x[i] < col.yellow[2]){"#FFFF00"} else if(x[i] >= col.orange[1] & x[i] < col.orange[2]){"#FFA500"} else if(x[i] >= col.red[1] & x[i] < col.red[2]){"#FF0000"} else if(x[i] >= col.infrared[1]){"#BEBEBE"} })) } ##find unique colours col.unique <- unique(col) ##if only one colour value, then skip gradient calculation as it causes ##an error if(length(col.unique) > 1){ ##set colour function for replacement colfunc <- colorRampPalette(col.unique) ##get index for colour values to be cut from the current palette col.unique.index <- vapply(col.unique, function(i) { max(which(col == i)) }, numeric(1)) ##remove last index (no colour gradient needed), for energy axis use the first value col.unique.index <- col.unique.index[-length(col.unique.index)] ##set borders for colour gradient recalculation col.unique.index.min <- col.unique.index - (50/bin.rows) col.unique.index.max <- col.unique.index + (50/bin.rows) ##set negative values to the lowest index col.unique.index.min[col.unique.index.min<=0] <- 1 ##build up new index sequence (might be better) col.gradient.index <- as.vector(unlist(( sapply(1:length(col.unique.index.min), function(j){ seq(col.unique.index.min[j],col.unique.index.max[j], by = 1) })))) ##generate colour ramp and replace values col.new <- colfunc(length(col.gradient.index)) col[col.gradient.index] <- col.new ##correct for overcharged colour values (causes zebra colour pattern) if (diff(c(length(col), nrow(temp.xyz))) < 0) { col <- col[1:c(length(col) - diff(c(length(col), nrow(temp.xyz))))] }else if(diff(c(length(col), nrow(temp.xyz))) > 0){ col <- col[1:c(length(col) + diff(c(length(col), nrow(temp.xyz))))] } } }else{ col <- "black" } }else{ col <- extraArgs$col } # Do log scaling if needed ------------------------------------------------- ##x if(grepl("x", log)==TRUE){x <- log10(x)} ##y if(grepl("y", log)==TRUE){y <- log10(y)} ##z if(grepl("z", log)==TRUE){temp.xyz <- log10(temp.xyz)} # PLOT -------------------------------------------------------------------- ##par setting for possible combination with plot method for RLum.Analysis objects if(par.local == TRUE){par(mfrow=c(1,1), cex = cex)} ##rest plot type for 1 column matrix if(ncol(temp.xyz) == 1 && plot.type != "single"){ plot.type <- "single" warning("[plot_RLum.Data.Spectrum()] Single column matrix: plot.type has been automatically reset to 'single'", call. = FALSE) } ##do not let old code break down ... if(plot.type == "persp3d"){ plot.type <- "interactive" warning("[plot_RLum.Data.Spectrum()] 'plot.type' has been automatically reset to interactive!", call. = FALSE) } if(plot.type == "persp" && ncol(temp.xyz) > 1){ ## ==========================================================================# ##perspective plot ## ==========================================================================# persp(x, y, temp.xyz, shade = shade, phi = phi, theta = theta, xlab = xlab, ylab = ylab, zlab = zlab, zlim = zlim, scale = TRUE, col = col[1:(length(x)-1)], ##needed due to recycling of the colours main = main, expand = expand, border = border, box = box, r = r, ticktype = ticktype) ##plot additional mtext mtext(mtext, side = 3, cex = cex*0.8) }else if(plot.type == "interactive" && ncol(temp.xyz) > 1) { ## ==========================================================================# ##interactive plot and former persp3d ## ==========================================================================# ## Plot: interactive ---- ##http://r-pkgs.had.co.nz/description.html if (!requireNamespace("plotly", quietly = TRUE)) { stop("[plot_RLum.Data.Spectrum()] Package 'plotly' needed for this plot type. Please install it.", call. = FALSE) } ##set up plot p <- plotly::plot_ly( z = temp.xyz, x = as.numeric(colnames(temp.xyz)), y = as.numeric(rownames(temp.xyz)), type = type, showscale = showscale #colors = col[1:(length(col)-1)], ) ##change graphical parameters p <- plotly::layout( p = p, scene = list( xaxis = list( title = ylab ), yaxis = list( title = xlab ), zaxis = list(title = zlab) ), title = main ) print(p) on.exit(return(p)) }else if(plot.type == "contour" && ncol(temp.xyz) > 1) { ## ==========================================================================# ##contour plot ## ==========================================================================# contour(x,y,temp.xyz, xlab = xlab, ylab = ylab, main = main, col = "black" ) ##plot additional mtext mtext(mtext, side = 3, cex = cex*0.8) } else if(plot.type == "single") { ## ==========================================================================# ## single plot ## ==========================================================================# col.rug <- col col<- if("col" %in% names(extraArgs)) {extraArgs$col} else {"black"} for(i in 1:length(y)){ if("zlim" %in% names(extraArgs) == FALSE){zlim <- range(temp.xyz[,i])} plot(x, temp.xyz[,i], xlab = xlab, ylab = ylab, main = main, xlim = xlim, ylim = zlim, col = col, sub = paste( "(frame ",i, " | ", ifelse(i==1, paste("0.0 :", round(y[i], digits = 1)), paste(round(y[i-1], digits = 1),":", round(y[i], digits =1))),")", sep = ""), type = type, pch = pch) if(rug == TRUE){ ##rug als continous polygons for(i in 1:length(x)){ polygon(x = c(x[i],x[i+1],x[i+1],x[i]), y = c(min(zlim),min(zlim), par("usr")[3], par("usr")[3]), border = col.rug[i], col = col.rug[i]) } } } ##plot additional mtext mtext(mtext, side = 3, cex = cex*0.8) }else if(plot.type == "multiple.lines" && ncol(temp.xyz) > 1) { ## ========================================================================# ## multiple.lines plot ## ========================================================================# col.rug <- col col<- if("col" %in% names(extraArgs)) {extraArgs$col} else {"black"} ##change graphic settings par.default <- par()[c("mfrow", "mar", "xpd")] par(mfrow = c(1,1), mar=c(5.1, 4.1, 4.1, 8.1), xpd = TRUE) ##grep zlim if("zlim" %in% names(extraArgs) == FALSE){zlim <- range(temp.xyz)} ##open plot area plot(NA, NA, xlab = xlab, ylab = ylab, main = main, xlim = xlim, ylim = zlim, sub = sub, bty = bty) if(rug == TRUE){ ##rug als continous polygons for(i in 1:length(x)){ polygon(x = c(x[i],x[i+1],x[i+1],x[i]), y = c(min(zlim),min(zlim), par("usr")[3], par("usr")[3]), border = col.rug[i], col = col.rug[i]) } } ##add lines for(i in 1:length(y)){ lines(x, temp.xyz[,i], lty = i, lwd = lwd, type = type, col = col) } ##for missing values - legend.text if(missing(legend.text)){ legend.text <- as.character(paste(round(y,digits=1), zlab)) } ##legend legend(x = par()$usr[2], y = par()$usr[4], legend = legend.text, lwd= lwd, lty = 1:length(y), bty = "n", cex = 0.6*cex) ##plot additional mtext mtext(mtext, side = 3, cex = cex*0.8) ##reset graphic settings par(par.default) rm(par.default) }else if(plot.type == "transect" && ncol(temp.xyz) > 1) { ## ========================================================================# ## transect plot ## ========================================================================# ##sum up rows (column sum) temp.xyz <- colSums(temp.xyz) ##consider differences within the arguments #check for zlim zlim <- if("zlim" %in% names(extraArgs)) {extraArgs$zlim} else {c(0,max(temp.xyz))} #check for zlim zlab <- if("ylab" %in% names(extraArgs)) {extraArgs$ylab} else {paste("Counts [1/summed channels]")} plot(y, temp.xyz, xlab = ylab, ylab = zlab, main = main, xlim = ylim, ylim = zlim, col = col, sub = paste("(channel range: ", min(xlim), " : ", max(xlim), ")", sep=""), type = type, pch = pch) ##plot additional mtext mtext(mtext, side = 3, cex = cex*0.8) }else{ stop("[plot_RLum.Data.Spectrum()] Unknown plot type.", call. = FALSE) } } Luminescence/R/plot_KDE.R0000644000176200001440000012133713540751607014670 0ustar liggesusers#' Plot kernel density estimate with statistics #' #' Plot a kernel density estimate of measurement values in combination with the #' actual values and associated error bars in ascending order. If enabled, the #' boxplot will show the usual distribution parameters (median as #' bold line, box delimited by the first and third quartile, whiskers defined #' by the extremes and outliers shown as points) and also the mean and #' standard deviation as pale bold line and pale polygon, respectively. #' #' The function allows passing several plot arguments, such as `main`, #' `xlab`, `cex`. However, as the figure is an overlay of two #' separate plots, `ylim` must be specified in the order: c(ymin_axis1, #' ymax_axis1, ymin_axis2, ymax_axis2) when using the cumulative values plot #' option. See examples for some further explanations. For details on the #' calculation of the bin-width (parameter `bw`) see #' [density]. #' #' #' A statistic summary, i.e. a collection of statistic measures of #' centrality and dispersion (and further measures) can be added by specifying #' one or more of the following keywords: #' - `"n"` (number of samples) #' - `"mean"` (mean De value) #' - `"median"` (median of the De values) #' - `"sd.rel"` (relative standard deviation in percent) #' - `"sd.abs"` (absolute standard deviation) #' - `"se.rel"` (relative standard error) #' - `"se.abs"` (absolute standard error) #' - `"in.2s"` (percent of samples in 2-sigma range) #' - `"kurtosis"` (kurtosis) #' - `"skewness"` (skewness) #' #' #' **Note** that the input data for the statistic summary is sent to the function #' `calc_Statistics()` depending on the log-option for the z-scale. If #' `"log.z = TRUE"`, the summary is based on the logarithms of the input #' data. If `"log.z = FALSE"` the linearly scaled data is used. #' #' **Note** as well, that `"calc_Statistics()"` calculates these statistic #' measures in three different ways: `unweighted`, `weighted` and #' `MCM-based` (i.e., based on Monte Carlo Methods). By default, the #' MCM-based version is used. If you wish to use another method, indicate this #' with the appropriate keyword using the argument `summary.method`. #' #' #' @param data [data.frame] or [RLum.Results-class] object (**required**): #' for `data.frame`: two columns: De (`values[,1]`) and De error (`values[,2]`). #' For plotting multiple data sets, these must be provided as #' `list` (e.g. `list(dataset1, dataset2)`). #' #' @param na.rm [logical] (*with default*): #' exclude NA values from the data set prior to any further operation. #' #' @param values.cumulative [logical] (*with default*): #' show cumulative individual data. #' #' @param order [logical]: #' Order data in ascending order. #' #' @param boxplot [logical] (*with default*): #' optionally show a boxplot (depicting median as thick central line, #' first and third quartile as box limits, whiskers denoting +/- 1.5 #' interquartile ranges and dots further outliers). #' #' @param rug [logical] (*with default*): #' optionally add rug. #' #' @param summary [character] (*optional*): #' add statistic measures of centrality and dispersion to the plot. Can be one #' or more of several keywords. See details for available keywords. #' #' @param summary.pos [numeric] or [character] (*with default*): #' optional position coordinates or keyword (e.g. `"topright"`) #' for the statistical summary. Alternatively, the keyword `"sub"` may be #' specified to place the summary below the plot header. However, this latter #' option in only possible if `mtext` is not used. In case of coordinate #' specification, y-coordinate refers to the right y-axis. #' #' @param summary.method [character] (*with default*): #' keyword indicating the method used to calculate the statistic summary. #' One out of `"unweighted"`, `"weighted"` and `"MCM"`. #' See [calc_Statistics] for details. #' #' @param bw [character] (*with default*): #' bin-width, chose a numeric value for manual setting. #' #' @param output [logical]: #' Optional output of numerical plot parameters. These can be useful to #' reproduce similar plots. Default is `TRUE`. #' #' @param ... further arguments and graphical parameters passed to [plot]. #' #' @note #' The plot output is no 'probability density' plot (cf. the discussion #' of Berger and Galbraith in Ancient TL; see references)! #' #' @section Function version: 3.6.0 #' #' @author #' Michael Dietze, GFZ Potsdam (Germany)\cr #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS-Université Bordeaux Montaigne, France #' #' @seealso [density], [plot] #' #' @examples #' #' ## read example data set #' data(ExampleData.DeValues, envir = environment()) #' ExampleData.DeValues <- #' Second2Gray(ExampleData.DeValues$BT998, c(0.0438,0.0019)) #' #' ## create plot straightforward #' plot_KDE(data = ExampleData.DeValues) #' #' ## create plot with logarithmic x-axis #' plot_KDE(data = ExampleData.DeValues, #' log = "x") #' #' ## create plot with user-defined labels and axes limits #' plot_KDE(data = ExampleData.DeValues, #' main = "Dose distribution", #' xlab = "Dose (s)", #' ylab = c("KDE estimate", "Cumulative dose value"), #' xlim = c(100, 250), #' ylim = c(0, 0.08, 0, 30)) #' #' ## create plot with boxplot option #' plot_KDE(data = ExampleData.DeValues, #' boxplot = TRUE) #' #' ## create plot with statistical summary below header #' plot_KDE(data = ExampleData.DeValues, #' summary = c("n", "median", "skewness", "in.2s")) #' #' ## create plot with statistical summary as legend #' plot_KDE(data = ExampleData.DeValues, #' summary = c("n", "mean", "sd.rel", "se.abs"), #' summary.pos = "topleft") #' #' ## split data set into sub-groups, one is manipulated, and merge again #' data.1 <- ExampleData.DeValues[1:15,] #' data.2 <- ExampleData.DeValues[16:25,] * 1.3 #' data.3 <- list(data.1, data.2) #' #' ## create plot with two subsets straightforward #' plot_KDE(data = data.3) #' #' ## create plot with two subsets and summary legend at user coordinates #' plot_KDE(data = data.3, #' summary = c("n", "median", "skewness"), #' summary.pos = c(110, 0.07), #' col = c("blue", "orange")) #' #' ## example of how to use the numerical output of the function #' ## return plot output to draw a thicker KDE line #' KDE_out <- plot_KDE(data = ExampleData.DeValues, #' output = TRUE) #' #' @md #' @export plot_KDE <- function( data, na.rm = TRUE, values.cumulative = TRUE, order = TRUE, boxplot = TRUE, rug = TRUE, summary, summary.pos, summary.method = "MCM", bw = "nrd0", output = TRUE, ... ) { ## check data and parameter consistency ------------------------------------- ## account for depreciated arguments if("centrality" %in% names(list(...))) { boxplot <- TRUE warning(paste("[plot_KDE()] Argument 'centrality' no longer supported. ", "Replaced by 'boxplot = TRUE'.")) } if("dispersion" %in% names(list(...))) { boxplot <- TRUE warning(paste("[plot_KDE()] Argument 'dispersion' no longer supported. ", "Replaced by 'boxplot = TRUE'.")) } if("polygon.col" %in% names(list(...))) { boxplot <- TRUE warning(paste("[plot_KDE()] Argument 'polygon.col' no longer supported. ", "Replaced by 'boxplot = TRUE'.")) } if("weights" %in% names(list(...))) { warning(paste("[plot_KDE()] Argument 'weights' no longer supported. ", "Weights are omitted.")) } ## Homogenise input data format if(is(data, "list") == FALSE) { data <- list(data) } ## check/adjust input data structure for(i in 1:length(data)) { if(is(data[[i]], "RLum.Results") == FALSE & is(data[[i]], "data.frame") == FALSE & is.numeric(data[[i]]) == FALSE) { stop(paste("[plot_KDE()] Input data format is neither", "'data.frame', 'RLum.Results' nor 'numeric'"), call. = FALSE) } else { ##extract RLum.Results if(is(data[[i]], "RLum.Results") == TRUE) { data[[i]] <- get_RLum(data[[i]], "data")[,1:2] } ##make sure we only take the first two columns data[[i]] <- data[[i]][,1:2] ##account for very short datasets if(length(data[[i]]) < 2) { data[[i]] <- cbind(data[[i]], rep(NA, length(data[[i]]))) } } ##check for Inf values and remove them if need if(any(is.infinite(unlist(data[[i]])))){ Inf_id <- which(is.infinite(unlist(data[[i]]))[1:nrow(data[[i]])/ncol(data[[i]])]) warning(paste("[plot_KDE()] Inf values removed in row(s):", paste(Inf_id, collapse = ", "), "in data.frame", i), call. = FALSE) data[[i]] <- data[[i]][-Inf_id,] rm(Inf_id) ##check if empty if(nrow(data[[i]]) == 0){ data[i] <- NULL } } } ##check if list is empty if(length(data) == 0) stop("[plot_KDE()] Your input is empty, intentionally or maybe after Inf removal? Nothing plotted!", call. = FALSE) ## check/set function parameters if(missing(summary) == TRUE) { summary <- "" } if(missing(summary.pos) == TRUE) { summary.pos <- "sub" } ## set mtext output if("mtext" %in% names(list(...))) { mtext <- list(...)$mtext } else { mtext <- "" } ## check/set layout definitions if("layout" %in% names(list(...))) { layout <- get_Layout(layout = list(...)$layout) } else { layout <- get_Layout(layout = "default") } ## data preparation steps --------------------------------------------------- ## optionally, count and exclude NA values and print result if(na.rm == TRUE) { for(i in 1:length(data)) { n.NA <- sum(is.na(data[[i]][,1])) if(n.NA == 1) { message(paste("1 NA value excluded from data set", i, ".")) } else if(n.NA > 1) { message(paste(n.NA, "NA values excluded from data set", i, ".")) } data[[i]] <- na.exclude(data[[i]]) } } ## optionally, order data set if(order == TRUE) { for(i in 1:length(data)) { data[[i]] <- data[[i]][order(data[[i]][,1]),] } } ## calculate and paste statistical summary De.stats <- matrix(nrow = length(data), ncol = 12) colnames(De.stats) <- c("n", "mean", "median", "kde.max", "sd.abs", "sd.rel", "se.abs", "se.rel", "q.25", "q.75", "skewness", "kurtosis") De.density <- list(NA) ## loop through all data sets for(i in 1:length(data)) { statistics <- calc_Statistics(data[[i]], na.rm = na.rm)[[summary.method]] De.stats[i,1] <- statistics$n De.stats[i,2] <- statistics$mean De.stats[i,3] <- statistics$median De.stats[i,5] <- statistics$sd.abs De.stats[i,6] <- statistics$sd.rel De.stats[i,7] <- statistics$se.abs De.stats[i,8] <- statistics$se.rel De.stats[i,9] <- quantile(data[[i]][,1], 0.25) De.stats[i,10] <- quantile(data[[i]][,1], 0.75) De.stats[i,11] <- statistics$skewness De.stats[i,12] <- statistics$kurtosis if(nrow(data[[i]]) >= 2){ De.density[[length(De.density) + 1]] <- density(data[[i]][,1], kernel = "gaussian", bw = bw) }else{ De.density[[length(De.density) + 1]] <- NA warning("[plot_KDE()] Less than 2 points provided, no density plotted.", call. = FALSE) } } ## remove dummy list element De.density[[1]] <- NULL ## create global data set De.global <- data[[1]][,1] De.error.global <- data[[1]][,2] De.density.range <- matrix(nrow = length(data), ncol = 4) for(i in 1:length(data)) { ##global De and De.error vector De.global <- c(De.global, data[[i]][,1]) De.error.global <- c(De.error.global, data[[i]][,2]) ## density range if(!all(is.na(De.density[[i]]))){ De.density.range[i,1] <- min(De.density[[i]]$x) De.density.range[i,2] <- max(De.density[[i]]$x) De.density.range[i,3] <- min(De.density[[i]]$y) De.density.range[i,4] <- max(De.density[[i]]$y) ## position of maximum KDE value De.stats[i,4] <- De.density[[i]]$x[which.max(De.density[[i]]$y)] }else{ De.density.range[i,1:4] <- NA De.stats[i,4] <- NA } } ## Get global range of densities De.density.range <- c(min(De.density.range[,1]), max(De.density.range[,2]), min(De.density.range[,3]), max(De.density.range[,4])) label.text = list(NA) if(summary.pos[1] != "sub") { n.rows <- length(summary) for(i in 1:length(data)) { stops <- paste(rep("\n", (i - 1) * n.rows), collapse = "") summary.text <- character(0) for(j in 1:length(summary)) { summary.text <- c(summary.text, paste( "", ifelse("n" %in% summary[j] == TRUE, paste("n = ", De.stats[i,1], "\n", sep = ""), ""), ifelse("mean" %in% summary[j] == TRUE, paste("mean = ", round(De.stats[i,2], 2), "\n", sep = ""), ""), ifelse("median" %in% summary[j] == TRUE, paste("median = ", round(De.stats[i,3], 2), "\n", sep = ""), ""), ifelse("kde.max" %in% summary[j] == TRUE, paste("kdemax = ", round(De.stats[i,4], 2), " \n ", sep = ""), ""), ifelse("sd.abs" %in% summary[j] == TRUE, paste("sd = ", round(De.stats[i,5], 2), "\n", sep = ""), ""), ifelse("sd.rel" %in% summary[j] == TRUE, paste("rel. sd = ", round(De.stats[i,6], 2), " %", "\n", sep = ""), ""), ifelse("se.abs" %in% summary[j] == TRUE, paste("se = ", round(De.stats[i,7], 2), "\n", sep = ""), ""), ifelse("se.rel" %in% summary[j] == TRUE, paste("rel. se = ", round(De.stats[i,8], 2), " %", "\n", sep = ""), ""), ifelse("skewness" %in% summary[j] == TRUE, paste("skewness = ", round(De.stats[i,11], 2), "\n", sep = ""), ""), ifelse("kurtosis" %in% summary[j] == TRUE, paste("kurtosis = ", round(De.stats[i,12], 2), "\n", sep = ""), ""), ifelse("in.2s" %in% summary[j] == TRUE, paste("in 2 sigma = ", round(sum(data[[i]][,1] > (De.stats[i,2] - 2 * De.stats[i,5]) & data[[i]][,1] < (De.stats[i,2] + 2 * De.stats[i,5])) / nrow(data[[i]]) * 100 , 1), " %", sep = ""), ""), sep = "")) } summary.text <- paste(summary.text, collapse = "") label.text[[length(label.text) + 1]] <- paste(stops, summary.text, stops, sep = "") } } else { for(i in 1:length(data)) { summary.text <- character(0) for(j in 1:length(summary)) { summary.text <- c(summary.text, ifelse("n" %in% summary[j] == TRUE, paste("n = ", De.stats[i,1], " | ", sep = ""), ""), ifelse("mean" %in% summary[j] == TRUE, paste("mean = ", round(De.stats[i,2], 2), " | ", sep = ""), ""), ifelse("median" %in% summary[j] == TRUE, paste("median = ", round(De.stats[i,3], 2), " | ", sep = ""), ""), ifelse("kde.max" %in% summary[j] == TRUE, paste("kdemax = ", round(De.stats[i,4], 2), " | ", sep = ""), ""), ifelse("sd.rel" %in% summary[j] == TRUE, paste("rel. sd = ", round(De.stats[i,6], 2), " %", " | ", sep = ""), ""), ifelse("sd.abs" %in% summary[j] == TRUE, paste("abs. sd = ", round(De.stats[i,5], 2), " | ", sep = ""), ""), ifelse("se.rel" %in% summary[j] == TRUE, paste("rel. se = ", round(De.stats[i,8], 2), " %", " | ", sep = ""), ""), ifelse("se.abs" %in% summary[j] == TRUE, paste("abs. se = ", round(De.stats[i,7], 2), " | ", sep = ""), ""), ifelse("skewness" %in% summary[j] == TRUE, paste("skewness = ", round(De.stats[i,11], 2), " | ", sep = ""), ""), ifelse("kurtosis" %in% summary[j] == TRUE, paste("kurtosis = ", round(De.stats[i,12], 2), " | ", sep = ""), ""), ifelse("in.2s" %in% summary[j] == TRUE, paste("in 2 sigma = ", round(sum(data[[i]][,1] > (De.stats[i,2] - 2 * De.stats[i,5]) & data[[i]][,1] < (De.stats[i,2] + 2 * De.stats[i,5])) / nrow(data[[i]]) * 100 , 1), " % ", sep = ""), "") ) } summary.text <- paste(summary.text, collapse = "") label.text[[length(label.text) + 1]] <- paste( " ", summary.text, sep = "") } ## remove outer vertical lines from string for(i in 2:length(label.text)) { label.text[[i]] <- substr(x = label.text[[i]], start = 3, stop = nchar(label.text[[i]]) - 3) } } ## remove dummy list element label.text[[1]] <- NULL ## read out additional parameters ------------------------------------------- if("main" %in% names(list(...))) { main <- list(...)$main } else { main <- expression(bold(paste(D[e], " distribution"))) } if("sub" %in% names(list(...))) { sub <- list(...)$sub } else { sub <- NULL } if("xlab" %in% names(list(...))) { xlab <- list(...)$xlab } else { xlab <- expression(paste(D[e], " [Gy]")) } if("ylab" %in% names(list(...))) { ylab <- list(...)$ylab } else { ylab <- c("Density", "Cumulative frequency") } if("xlim" %in% names(list(...))) { xlim.plot <- list(...)$xlim } else { xlim.plot <- c(min(c(De.global - De.error.global), De.density.range[1], na.rm = TRUE), max(c(De.global + De.error.global), De.density.range[2], na.rm = TRUE)) } if("ylim" %in% names(list(...))) { ylim.plot <- list(...)$ylim } else { if(!is.na(De.density.range[1])){ ylim.plot <- c(De.density.range[3], De.density.range[4], 0, max(De.stats[,1])) }else{ ylim.plot <- c(0, max(De.stats[,1]), 0, max(De.stats[,1])) } } if("log" %in% names(list(...))) { log.option <- list(...)$log } else { log.option <- "" } if("col" %in% names(list(...))) { col.main <- list(...)$col col.xlab <- 1 col.ylab1 <- 1 col.ylab2 <- 1 col.xtck <- 1 col.ytck1 <- 1 col.ytck2 <- 1 col.box <- 1 col.mtext <- 1 col.stats <- list(...)$col col.kde.line <- list(...)$col col.kde.fill <- NA col.value.dot <- list(...)$col col.value.bar <- list(...)$col col.value.rug <- list(...)$col col.boxplot <- list(...)$col col.boxplot.line <- list(...)$col col.boxplot.fill <- NA col.mean.line <- adjustcolor(col = list(...)$col, alpha.f = 0.4) col.sd.bar <- adjustcolor(col = list(...)$col, alpha.f = 0.4) col.background <- NA } else { if(length(layout$kde$colour$main) == 1) { col.main <- c(layout$kde$colour$main, 2:length(data)) } else { col.main <- layout$kde$colour$main } if(length(layout$kde$colour$xlab) == 1) { col.xlab <- c(layout$kde$colour$xlab, 2:length(data)) } else { col.xlab <- layout$kde$colour$xlab } if(length(layout$kde$colour$ylab1) == 1) { col.ylab1 <- c(layout$kde$colour$ylab1, 2:length(data)) } else { col.ylab1 <- layout$kde$colour$ylab1 } if(length(layout$kde$colour$ylab2) == 1) { col.ylab2 <- c(layout$kde$colour$ylab2, 2:length(data)) } else { col.ylab2 <- layout$kde$colour$ylab2 } if(length(layout$kde$colour$xtck) == 1) { col.xtck <- c(layout$kde$colour$xtck, 2:length(data)) } else { col.xtck <- layout$kde$colour$xtck } if(length(layout$kde$colour$ytck1) == 1) { col.ytck1 <- c(layout$kde$colour$ytck1, 2:length(data)) } else { col.ytck1 <- layout$kde$colour$ytck1 } if(length(layout$kde$colour$ytck2) == 1) { col.ytck2 <- c(layout$kde$colour$ytck2, 2:length(data)) } else { col.ytck2 <- layout$kde$colour$ytck2 } if(length(layout$kde$colour$box) == 1) { col.box <- c(layout$kde$colour$box, 2:length(data)) } else { col.box <- layout$kde$colour$box } if(length(layout$kde$colour$mtext) == 1) { col.mtext <- c(layout$kde$colour$mtext, 2:length(data)) } else { col.mtext <- layout$kde$colour$mtext } if(length(layout$kde$colour$stats) == 1) { col.stats <- c(layout$kde$colour$stats, 2:length(data)) } else { col.stats <- layout$kde$colour$stats } if(length(layout$kde$colour$kde.line) == 1) { col.kde.line <- c(layout$kde$colour$kde.line, 2:length(data)) } else { col.kde.line <- layout$kde$colour$kde.line } if(length(layout$kde$colour$kde.fill) == 1) { col.kde.fill <- c(layout$kde$colour$kde.fill, 2:length(data)) } else { col.kde.fill <- layout$kde$colour$kde.fill } if(length(layout$kde$colour$value.dot) == 1) { col.value.dot <- c(layout$kde$colour$value.dot, 2:length(data)) } else { col.value.dot <- layout$kde$colour$value.dot } if(length(layout$kde$colour$value.bar) == 1) { col.value.bar <- c(layout$kde$colour$value.bar, 2:length(data)) } else { col.value.bar <- layout$kde$colour$value.bar } if(length(layout$kde$colour$value.rug) == 1) { col.value.rug <- c(layout$kde$colour$value.rug, 2:length(data)) } else { col.value.rug <- layout$kde$colour$value.rug } if(length(layout$kde$colour$boxplot.line) == 1) { col.boxplot.line <- c(layout$kde$colour$boxplot.line, 2:length(data)) } else { col.boxplot.line <- layout$kde$colour$boxplot.line } if(length(layout$kde$colour$boxplot.fill) == 1) { col.boxplot.fill <- c(layout$kde$colour$boxplot.fill, 2:length(data)) } else { col.boxplot.fill <- layout$kde$colour$boxplot.fill } if(length(layout$kde$colour$mean.line) == 1) { col.mean.line <- adjustcolor(col = 1:length(data), alpha.f = 0.4) } else { col.mean.line <- layout$kde$colour$mean.point } if(length(layout$kde$colour$sd.bar) == 1) { col.sd.bar <- c(layout$kde$colour$sd.bar, 2:length(data)) } else { col.sd.bar <- layout$kde$colour$sd.line } if(length(layout$kde$colour$background) == 1) { col.background <- c(layout$kde$colour$background, 2:length(data)) } else { col.background <- layout$kde$colour$background } } if("lty" %in% names(list(...))) { lty <- list(...)$lty } else { lty <- rep(1, length(data)) } if("lwd" %in% names(list(...))) { lwd <- list(...)$lwd } else { lwd <- rep(1, length(data)) } if("cex" %in% names(list(...))) { cex <- list(...)$cex } else { cex <- 1 } if("fun" %in% names(list(...))) { fun <- list(...)$fun } else { fun <- FALSE } ## convert keywords into summary placement coordinates if(missing(summary.pos) == TRUE) { summary.pos <- c(xlim.plot[1], ylim.plot[2]) summary.adj <- c(0, 1) } else if(length(summary.pos) == 2) { summary.pos <- summary.pos summary.adj <- c(0, 1) } else if(summary.pos[1] == "topleft") { summary.pos <- c(xlim.plot[1], ylim.plot[2]) summary.adj <- c(0, 1) } else if(summary.pos[1] == "top") { summary.pos <- c(mean(xlim.plot), ylim.plot[2]) summary.adj <- c(0.5, 1) } else if(summary.pos[1] == "topright") { summary.pos <- c(xlim.plot[2], ylim.plot[2]) summary.adj <- c(1, 1) } else if(summary.pos[1] == "left") { summary.pos <- c(xlim.plot[1], mean(ylim.plot[1:2])) summary.adj <- c(0, 0.5) } else if(summary.pos[1] == "center") { summary.pos <- c(mean(xlim.plot), mean(ylim.plot[1:2])) summary.adj <- c(0.5, 0.5) } else if(summary.pos[1] == "right") { summary.pos <- c(xlim.plot[2], mean(ylim.plot[1:2])) summary.adj <- c(1, 0.5) }else if(summary.pos[1] == "bottomleft") { summary.pos <- c(xlim.plot[1], ylim.plot[1]) summary.adj <- c(0, 0) } else if(summary.pos[1] == "bottom") { summary.pos <- c(mean(xlim.plot), ylim.plot[1]) summary.adj <- c(0.5, 0) } else if(summary.pos[1] == "bottomright") { summary.pos <- c(xlim.plot[2], ylim.plot[1]) summary.adj <- c(1, 0) } ## plot data sets ----------------------------------------------------------- ## setup plot area if(length(summary) >= 1 & summary.pos[1] == "sub") { toplines <- length(data) } else { toplines <- 1 } ## extract original plot parameters par(bg = layout$kde$colour$background) bg.original <- par()$bg par(mar = c(5, 5.5, 2.5 + toplines, 4.5), xpd = FALSE, cex = cex) if(layout$kde$dimension$figure.width != "auto" | layout$kde$dimension$figure.height != "auto") { par(mai = layout$kde$dimension$margin / 25.4, pin = c(layout$kde$dimension$figure.width / 25.4 - layout$kde$dimension$margin[2] / 25.4 - layout$kde$dimension$margin[4] / 25.4, layout$kde$dimension$figure.height / 25.4 - layout$kde$dimension$margin[1] / 25.4 - layout$kde$dimension$margin[3]/25.4)) } ## create empty plot to get plot dimensions plot(NA, xlim = xlim.plot, ylim = ylim.plot[1:2], sub = sub, log = log.option, axes = FALSE, ann = FALSE) ## get line height in xy coordinates l_height <- par()$cxy[2] ## optionally update ylim if(boxplot == TRUE) { ylim.plot[1] <- ylim.plot[1] - 1.4 * l_height } ## create empty plot to set adjusted plot dimensions par(new = TRUE) plot(NA, xlim = xlim.plot, ylim = ylim.plot[1:2], log = log.option, cex = cex, axes = FALSE, ann = FALSE) ## add box box(which = "plot", col = layout$kde$colour$box) ## add x-axis axis(side = 1, col = layout$kde$colour$xtck, col.axis = layout$kde$colour$xtck, labels = NA, tcl = -layout$kde$dimension$xtcl / 200, cex = cex) axis(side = 1, line = 2 * layout$kde$dimension$xtck.line / 100 - 2, lwd = 0, col = layout$kde$colour$xtck, family = layout$kde$font.type$xtck, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$kde$font.deco$xtck], col.axis = layout$kde$colour$xtck, cex.axis = layout$kde$font.size$xlab/12) mtext(text = xlab, side = 1, line = 3 * layout$kde$dimension$xlab.line / 100, col = layout$kde$colour$xlab, family = layout$kde$font.type$xlab, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$kde$font.deco$xlab], cex = cex * layout$kde$font.size$xlab/12) ## add left y-axis axis(side = 2, at = pretty(x = range(De.density.range[3:4])), col = layout$kde$colour$ytck1, col.axis = layout$kde$colour$ytck1, labels = NA, tcl = -layout$kde$dimension$ytck1 / 200, cex = cex) axis(side = 2, at = pretty(x = range(De.density.range[3:4])), line = 2 * layout$kde$dimension$ytck1.line / 100 - 2, lwd = 0, col = layout$kde$colour$ytck1, family = layout$kde$font.type$ytck1, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$kde$font.deco$ytck1], col.axis = layout$kde$colour$ytck1, cex.axis = layout$kde$font.size$ylab1/12) mtext(text = ylab[1], side = 2, line = 3 * layout$kde$dimension$ylab1.line / 100, col = layout$kde$colour$ylab1, family = layout$kde$font.type$ylab1, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$kde$font.deco$ylab1], cex = cex * layout$kde$font.size$ylab1/12) for(i in 1:length(data)) { if(!all(is.na(De.density[[i]]))){ polygon(x = c(par()$usr[1], De.density[[i]]$x, par()$usr[2]), y = c(min(De.density[[i]]$y),De.density[[i]]$y, min(De.density[[i]]$y)), border = col.kde.line[i], col = col.kde.fill[i], lty = lty[i], lwd = lwd[i]) } } ## add plot title cex.old <- par()$cex par(cex = layout$kde$font.size$main / 12) title(main = main, family = layout$kde$font.type$main, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$kde$font.deco$main], col.main = layout$kde$colour$main, line = (toplines + 1.2) * layout$kde$dimension$main / 100) par(cex = cex.old) ## optionally add mtext line if(mtext != "") { mtext(text = mtext, side = 3, line = 0.5, family = layout$kde$font.type$mtext, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$kde$font.deco$mtext], col.main = layout$kde$colour$mtext, cex = layout$kde$font.size$mtext / 12) } ## add summary content for(i in 1:length(data)) { if(summary.pos[1] != "sub") { text(x = summary.pos[1], y = summary.pos[2], adj = summary.adj, labels = label.text[[i]], col = col.stats[i], cex = layout$kde$font.size$stats / 12) } else { if(mtext == "") { mtext(side = 3, line = (toplines + 0.3 - i) * layout$kde$dimension$stats.line / 100, text = label.text[[i]], col = col.stats[i], cex = layout$kde$font.size$stats / 12) } } } if(values.cumulative == TRUE) { ## create empty overlay plot par(new = TRUE) # adjust plot options ## add empty plot, scaled to preliminary secondary plot content plot(x = NA, xlim = xlim.plot, ylim = ylim.plot[3:4], log = log.option, ann = FALSE, axes = FALSE ) ## get line height in xy coordinates l_height <- par()$cxy[2] ## optionally update ylim if(boxplot == TRUE) { ylim.plot[3] <- ylim.plot[3] - 1.4 * l_height } ## create correctly scaled empty overlay plot par(new = TRUE) # adjust plot options ## add empty plot, scaled to secondary plot content plot(NA, xlim = xlim.plot, ylim = ylim.plot[3:4], log = log.option, ann = FALSE, axes = FALSE) ## optionally add boxplot if(boxplot == TRUE) { ## add zero line abline(h = 0) ## get extended boxplot data boxplot.data <- list(NA) for(i in 1:length(data)) { boxplot.i <- boxplot(x = data[[i]][,1], plot = FALSE) boxplot.i$group <- mean(x = data[[i]][,1], na.rm = TRUE) boxplot.i$names <- sd(x = data[[i]][,1], na.rm = TRUE) boxplot.data[[length(boxplot.data) + 1]] <- boxplot.i } ## remove dummy list object boxplot.data[[1]] <- NULL ## get new line hights l_height <- par()$cxy[2] for(i in 1:length(data)) { # ## draw sd line # lines(x = c(boxplot.data[[i]]$group[1] - boxplot.data[[i]]$names[1], # boxplot.data[[i]]$group[1] + boxplot.data[[i]]$names[1]), # y = c(-5/8 * l_height, # -5/8 * l_height), # col = col.mean.line[i]) # # ## draw mean line # points(x = boxplot.data[[i]]$group[1], # y = -5/8 * l_height, # pch = 18, # col = col.mean.line[i]) ## draw median line lines(x = c(boxplot.data[[i]]$stats[3,1], boxplot.data[[i]]$stats[3,1]), y = c(-11/8 * l_height, -7/8 * l_height), lwd = 2, col = col.boxplot.line[i]) ## draw q25-q75-polygon polygon(x = c(boxplot.data[[i]]$stats[2,1], boxplot.data[[i]]$stats[2,1], boxplot.data[[i]]$stats[4,1], boxplot.data[[i]]$stats[4,1]), y = c(-11/8 * l_height, -7/8 * l_height, -7/8 * l_height, -11/8 * l_height), col = col.boxplot.fill[i], border = col.boxplot.line[i]) ## draw whiskers lines(x = c(boxplot.data[[i]]$stats[2,1], boxplot.data[[i]]$stats[1,1]), y = c(-9/8 * l_height, -9/8 * l_height), col = col.boxplot.line[i]) lines(x = c(boxplot.data[[i]]$stats[1,1], boxplot.data[[i]]$stats[1,1]), y = c(-10/8 * l_height, -8/8 * l_height), col = col.boxplot.line[i]) lines(x = c(boxplot.data[[i]]$stats[4,1], boxplot.data[[i]]$stats[5,1]), y = c(-9/8 * l_height, -9/8 * l_height), col = col.boxplot.line[i]) lines(x = c(boxplot.data[[i]]$stats[5,1], boxplot.data[[i]]$stats[5,1]), y = c(-10/8 * l_height, -8/8 * l_height), col = col.boxplot.line[i]) ## draw outliers points(x = boxplot.data[[i]]$out, y = rep(-9/8 * l_height, length(boxplot.data[[i]]$out)), col = col.boxplot.line[i], cex = cex * 0.8) } } ## optionally add rug if(rug == TRUE) { for(i in 1:length(data)) { for(j in 1:nrow(data[[i]])) { lines(x = c(data[[i]][j,1], data[[i]][j,1]), y = c(0, -2/8 * l_height), col = col.value.rug[i]) } } } ## add secondary y-axis ticks_axis <- pretty(x = c(1, ylim.plot[4])) ticks_axis <- ifelse(test = ticks_axis == 0, yes = NA, no = ticks_axis) ## add right y-axis axis(side = 4, at = ticks_axis, col = layout$kde$colour$ytck2, col.axis = layout$kde$colour$ytck2, labels = NA, tcl = -layout$kde$dimension$ytck2 / 200, cex = cex) axis(side = 4, at = ticks_axis, line = 2 * layout$kde$dimension$ytck2.line / 100 - 2, lwd = 0, col = layout$kde$colour$ytck2, family = layout$kde$font.type$ytck2, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$kde$font.deco$ytck2], col.axis = layout$kde$colour$ytck2, cex.axis = layout$kde$font.size$ylab2/12) mtext(text = ylab[2], side = 4, line = 3 * layout$kde$dimension$ylab2.line / 100, col = layout$kde$colour$ylab2, family = layout$kde$font.type$ylab2, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$kde$font.deco$ylab2], cex = cex * layout$kde$font.size$ylab2/12) ## add De error bars for(i in 1:length(data)) { arrows(data[[i]][,1] - data[[i]][,2], 1:length(data[[i]][,1]), data[[i]][,1] + data[[i]][,2], 1:length(data[[i]][,1]), code = 3, angle = 90, length = 0.05, col = col.value.bar[i]) ## add De measurements points(data[[i]][,1], 1:De.stats[i,1], col = col.value.dot[i], pch = 20) } } ## add empty plot par(new = TRUE) plot(NA, ann = FALSE, axes = FALSE, xlim = xlim.plot, ylim = ylim.plot[1:2], log = log.option, cex = cex, cex.lab = cex, cex.main = cex, cex.axis = cex) ## FUN by R Luminescence Team if(fun==TRUE){sTeve()} if(output == TRUE) { return(invisible(list(De.stats = De.stats, summary.pos = summary.pos, De.density = De.density))) } } Luminescence/R/read_Daybreak2R.R0000644000176200001440000003562113231137116016136 0ustar liggesusers#' Import measurement data produced by a Daybreak TL/OSL reader into R #' #' Import a TXT-file (ASCII file) or a DAT-file (binary file) produced by a #' Daybreak reader into R. The import of the DAT-files is limited to the file #' format described for the software TLAPLLIC v.3.2 used for a Daybreak, model 1100. #' #' @param file [character] or [list] (**required**): #' path and file name of the file to be imported. Alternatively a list of file #' names can be provided or just the path a folder containing measurement data. #' Please note that the specific, common, file extension (txt) is likely #' leading to function failures during import when just a path is provided. #' #' @param raw [logical] (*with default*): #' if the input is a DAT-file (binary) a [data.table::data.table] instead of #' the [RLum.Analysis-class] object can be returned for debugging purposes. #' #' @param verbose [logical] (*with default*): #' enables or disables terminal feedback #' #' @param txtProgressBar [logical] (*with default*): #' enables or disables [txtProgressBar]. #' #' @return #' A list of [RLum.Analysis-class] objects (each per position) is provided. #' #' @note #' **`[BETA VERSION]`** #' This function still needs to be tested properly. In particular #' the function has underwent only very rough rests using a few files. #' #' @section Function version: 0.3.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr #' Anotine Zink, C2RMF, Palais du Louvre, Paris (France) #' #' The ASCII-file import is based on a suggestion by Willian Amidon and Andrew Louis Gorin #' #' @seealso [RLum.Analysis-class], [RLum.Data.Curve-class], [data.table::data.table] #' #' @keywords IO #' #' @examples #' #' \dontrun{ #' file <- file.choose() #' temp <- read_Daybreak2R(file) #' } #' #' @md #' @export read_Daybreak2R <- function( file, raw = FALSE, verbose = TRUE, txtProgressBar = TRUE ){ ##TODO ## - run tests ## - check where the warning messages are comming from ## - implement further integegrity tests (ASCII import) # Self Call ----------------------------------------------------------------------------------- # Option (a): Input is a list, every element in the list will be treated as file connection # with that many file can be read in at the same time # Option (b): The input is just a path, the function tries to grep ALL Daybreaks-txt files in the # directory and import them, if this is detected, we proceed as list if(is(file, "character")) { ##If this is not really a path we skip this here if (dir.exists(file) & length(dir(file)) > 0) { if(verbose){ cat("[read_Daybreak2R()] Directory detected, trying to extract '*.txt' files ...\n") } file <- as.list(paste0(file,dir( file, recursive = FALSE, pattern = ".txt" ))) } } ##if the input is already a list if (is(file, "list")) { temp.return <- lapply(1:length(file), function(x) { read_Daybreak2R( file = file[[x]], txtProgressBar = txtProgressBar ) }) ##return return(temp.return) } # Integrity checks ---------------------------------------------------------------------------- ##check if file exists if(!file.exists(file)){ stop("[read_Daybreak2R()] file name does not seem to exist.", call. = FALSE) } ##check for file extension ... distinguish between TXT and DAT if(substr(file, start = nchar(file) - 3, stop = nchar(file)) == ".DAT"){ # Read DAT-file ------------------------------------------------------------------------------ ##screen file to get information on the number of stored records con<-file(file,"rb") file.data <- file.info(file) max.pt<-readBin(con,what="int",6,size=2,endian="little")[6] file.size<-file.data$size n.length<-file.size/(190+8*(max.pt+1)) ##190 is is size of the header for each data set close(con) ##import data con <- file(file, "rb") ##pre-define data.table results.DATA <- data.table::data.table( ID = integer(length = n.length), MAXPT = integer(length = n.length), SPACING = integer(length = n.length), NDISK = integer(length = n.length), NRUN = integer(length = n.length), D1 = integer(length = n.length), NPT = integer(length = n.length), NATL = logical(length = n.length), TLRUN = logical(length = n.length), BEFORE_IRRAD = logical(length = n.length), SHIFT = double(length = n.length), RAMPRATE = double(length = n.length), GRATE = double(length = n.length), BRATE = double(length = n.length), ARATE = double(length = n.length), GAMMADOSE = double(length = n.length), BETADOSE = double(length = n.length), ALPHADOSE = double(length = n.length), BLEACHINGTIME = double(length = n.length), GRUNIT = character(length = n.length), BRUNIT = character(length = n.length), ARUNIT = character(length = n.length), BFILTER = character(length = n.length), GSOURCE = character(length = n.length), BSOURCE = character(length = n.length), ASOURCE = character(length = n.length), IRRAD_DATE = character(length = n.length), RUNREMARK = character(length = n.length), DATA = list() ) ##TERMINAL FEEDBACK if(verbose){ cat("\n[read_Daybreak2R()]") cat(paste("\n >> Importing:", file[1],"\n")) } ##PROGRESS BAR if(txtProgressBar & verbose){ pb <- txtProgressBar(min=0,max=n.length, char = "=", style=3) } ##LOOP over file i <- 1 while (i> Importing:", file[1],"\n")) } ##PROGRESS BAR if(txtProgressBar & verbose){ pb <- txtProgressBar(min=0,max=length(data.list), char = "=", style=3) } ##(2) ##Loop over the list to create RLum.Data.Curve objects RLum.Data.Curve.list <- lapply(1:length(data.list), function(x){ ##get length of record record.length <- length(data.list[[x]]) ##get header length until the argument 'Points' header.length <- grep(pattern = "Points", x = data.list[[x]]) if(length(header.length)>0){ temp.meta_data <- unlist(strsplit(data.list[[x]][2:header.length], split = "=", fixed = TRUE)) }else{ temp.meta_data <- unlist(strsplit(data.list[[x]][2:length(data.list[[x]])], split = "=", fixed = TRUE)) } ##get list names for the info element list info.names <- temp.meta_data[seq(1,length(temp.meta_data), by = 2)] ##info elements info <- as.list(temp.meta_data[seq(2,length(temp.meta_data), by = 2)]) names(info) <- info.names ##add position, which is 'Disk' info <- c(info, position = as.integer(info$Disk)) if(length(header.length)>0){ ##get measurement data temp.data <- unlist(strsplit(unlist(strsplit( data.list[[x]][12:length(data.list[[x]])], split = "=" )), split = ";")) ##grep only data of interest point.x <- suppressWarnings(as.numeric(gsub("^\\s+|\\s+$", "", temp.data[seq(2, length(temp.data), by = 4)]))) point.y <- suppressWarnings(as.numeric(gsub("^\\s+|\\s+$", "", temp.data[seq(3,length(temp.data), by = 4)]))) ##combine it into a matrix data <- matrix(c(point.x,point.y), ncol = 2) }else{ ##we presume this should be irradiation ... if ("IrradTime" %in% names(info)) { point.x <- 1:as.numeric(info$IrradTime) point.y <- rep(1, length(point.x)) data <- matrix(c(point.x,point.y), ncol = 2) } } ##update progress bar if (txtProgressBar & verbose) { setTxtProgressBar(pb, x) } ##return RLum object return( set_RLum( class = "RLum.Data.Curve", originator = "read_Daybreak2R", recordType = sub(" ", replacement = "_", x = info$DataType), curveType = "measured", data = data, info = info ) ) }) ##close ProgressBar if(txtProgressBar & verbose){close(pb)} ##(3) ##Now we have to find out how many aliquots we do have positions.id <- sapply(RLum.Data.Curve.list, function(x){ get_RLum(x, info.object = "position") }) ##(4) ##now combine everyting in an RLum.Analysis object in accordance to the position number RLum.Analysis.list <- lapply(unique(positions.id), function(x){ ##get list ids for position number n <- which(positions.id == x) ##make list temp.list <- lapply(n, function(x){ RLum.Data.Curve.list[[x]] }) ##put in RLum.Analysis object object <- set_RLum( class = "RLum.Analysis", originator = "read_Daybreak2R", protocol = "Custom", records = temp.list ) ##set parent id of records object <- .set_pid(object) return(object) }) ##TERMINAL FEEDBACK if(verbose){ cat(paste0("\n ",length(unlist(get_RLum(RLum.Analysis.list))), " records have been read sucessfully!\n")) } return(RLum.Analysis.list) } } Luminescence/R/convert_Daybreak2CSV.R0000644000176200001440000000522413231137116017131 0ustar liggesusers#' Export measurement data produced by a Daybreak luminescence reader to CSV-files #' #' This function is a wrapper function around the functions [read_Daybreak2R] and #' [write_RLum2CSV] and it imports an Daybreak-file (TXT-file, DAT-file) #' and directly exports its content to CSV-files. If nothing is set for the #' argument `path` ([write_RLum2CSV]) the input folder will become the output folder. #' #' @param file [character] (**required**): #' name of the Daybreak-file (TXT-file, DAT-file) to be converted to CSV-files #' #' @param ... further arguments that will be passed to the function #' [read_Daybreak2R] and [write_RLum2CSV] #' #' @return #' The function returns either a CSV-file (or many of them) or for the option `export = FALSE` #' a list comprising objects of type [data.frame] and [matrix] #' #' @section Function version: 0.1.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Analysis-class], [RLum.Data-class], [RLum.Results-class], #' [utils::write.table], [write_RLum2CSV], [read_Daybreak2R] #' #' @keywords IO #' #' @examples #' #' \dontrun{ #' ##select your BIN-file #' file <- file.choose() #' #' ##convert #' convert_Daybreak2CSV(file) #' #' } #' #' @md #' @export convert_Daybreak2CSV <- function( file, ... ){ # General tests ------------------------------------------------------------------------------- ##file is missing? if(missing(file)){ stop("[convert_Daybreak2R()] file is missing!", call. = FALSE) } ##set input arguments convert_Daybreak2R_settings.default <- list( raw = FALSE, verbose = TRUE, txtProgressBar = TRUE, export = TRUE ) ##modify list on demand convert_Daybreak2R_settings <- modifyList(x = convert_Daybreak2R_settings.default, val = list(...)) # Import file --------------------------------------------------------------------------------- if(!inherits(file, "RLum")){ object <- read_Daybreak2R( file = file, raw = convert_Daybreak2R_settings$raw, verbose = convert_Daybreak2R_settings$raw, txtProgressBar = convert_Daybreak2R_settings$raw ) }else{ object <- file } # Export to CSV ------------------------------------------------------------------------------- ##get all arguments we want to pass and remove the doubled one arguments <- c(list(object = object, export = convert_Daybreak2R_settings$export), list(...)) arguments[duplicated(names(arguments))] <- NULL ##this if-condition prevents NULL in the terminal if(convert_Daybreak2R_settings$export == TRUE){ invisible(do.call("write_RLum2CSV", arguments)) }else{ do.call("write_RLum2CSV", arguments) } } Luminescence/R/fit_LMCurve.R0000644000176200001440000011131413231137116015366 0ustar liggesusers#' Nonlinear Least Squares Fit for LM-OSL curves #' #' The function determines weighted nonlinear least-squares estimates of the #' component parameters of an LM-OSL curve (Bulur 1996) for a given number of #' components and returns various component parameters. The fitting procedure #' uses the function [nls] with the `port` algorithm. #' #' **Fitting function** #' #' The function for the fitting has the general #' form: #' #' \deqn{y = (exp(0.5)*Im_1*x/xm_1)*exp(-x^2/(2*xm_1^2)) + ,\ldots, + exp(0.5)*Im_i*x/xm_i)*exp(-x^2/(2*xm_i^2))} #' #' where \eqn{1 < i < 8} #' #' This function and the equations for the conversion to b (detrapping probability) #' and n0 (proportional to initially trapped charge) have been taken from Kitis #' et al. (2008): #' #' \deqn{xm_i=\sqrt{max(t)/b_i}} #' \deqn{Im_i=exp(-0.5)n0/xm_i} #' #' **Background subtraction** #' #' Three methods for background subtraction #' are provided for a given background signal (`values.bg`). #' #' - `polynomial`: default method. A polynomial function is fitted using [glm] #' and the resulting function is used for background subtraction: #' \deqn{y = a*x^4 + b*x^3 + c*x^2 + d*x + e} #' #' - `linear`: a linear function is fitted using [glm] and the resulting function #' is used for background subtraction: #' \deqn{y = a*x + b} #' #' - `channel`: the measured #' background signal is subtracted channelwise from the measured signal. #' #' #' **Start values** #' #' The choice of the initial parameters for the `nls`-fitting is a crucial #' point and the fitting procedure may mainly fail due to ill chosen start #' parameters. Here, three options are provided: #' #' **(a)** #' If no start values (`start_values`) are provided by the user, a cheap guess is made #' by using the detrapping values found by Jain et al. (2003) for quartz for a #' maximum of 7 components. Based on these values, the pseudo start parameters #' xm and Im are recalculated for the given data set. In all cases, the fitting #' starts with the ultra-fast component and (depending on `n.components`) #' steps through the following values. If no fit could be achieved, an error #' plot (for `plot = TRUE`) with the pseudo curve (based on the #' pseudo start parameters) is provided. This may give the opportunity to #' identify appropriate start parameters visually. #' #' **(b)** #' If start values are provided, the function works like a simple [nls] #' fitting approach. #' #' **(c)** #' If no start parameters are provided and #' the option `fit.advanced = TRUE` is chosen, an advanced start paramter #' estimation is applied using a stochastical attempt. Therefore, the #' recalculated start parameters **(a)** are used to construct a normal #' distribution. The start parameters are then sampled randomly from this #' distribution. A maximum of 100 attempts will be made. **Note:** This #' process may be time consuming. #' #' **Goodness of fit** #' #' The goodness of the fit is given by a pseudoR^2 value (pseudo coefficient of #' determination). According to Lave (1970), the value is calculated as: #' #' \deqn{pseudoR^2 = 1 - RSS/TSS} #' #' where \eqn{RSS = Residual~Sum~of~Squares} #' and \eqn{TSS = Total~Sum~of~Squares} #' #' **Error of fitted component parameters** #' #' The 1-sigma error for the components is calculated using #' the function [confint]. Due to considerable calculation time, this #' option is deactived by default. In addition, the error for the components #' can be estimated by using internal R functions like [summary]. See the #' [nls] help page for more information. #' #' *For more details on the nonlinear regression in R, see Ritz & Streibig (2008).* #' #' @param values [RLum.Data.Curve-class] or [data.frame] (**required**): #' x,y data of measured values (time and counts). See examples. #' #' @param values.bg [RLum.Data.Curve-class] or [data.frame] (*optional*): #' x,y data of measured values (time and counts) for background subtraction. #' #' @param n.components [integer] (*with default*): #' fixed number of components that are to be recognised during fitting #' (min = 1, max = 7). #' #' @param start_values [data.frame] (*optional*): #' start parameters for lm and xm data for the fit. If no start values are given, #' an automatic start value estimation is attempted (see details). #' #' @param input.dataType [character] (*with default*): #' alter the plot output depending on the input data: "LM" or "pLM" (pseudo-LM). #' See: [CW2pLM] #' #' @param fit.method [character] (*with default*): #' select fit method, allowed values: `'port'` and `'LM'`. `'port'` uses the 'port' #' routine usint the funtion [nls] `'LM'` utilises the function `nlsLM` from #' the package `minpack.lm` and with that the Levenberg-Marquardt algorithm. #' #' @param sample_code [character] (*optional*): #' sample code used for the plot and the optional output table (mtext). #' #' @param sample_ID [character] (*optional*): #' additional identifier used as column header for the table output. #' #' @param LED.power [numeric] (*with default*): #' LED power (max.) used forintensity ramping in mW/cm^2. #' **Note:** This value is used for the calculation of the absolute #' photoionisation cross section. #' #' @param LED.wavelength [numeric] (*with default*): #' LED wavelength in nm used for stimulation. #' **Note:** This value is used for the calculation of the absolute #' photoionisation cross section. #' #' @param fit.trace [logical] (*with default*): #' traces the fitting process on the terminal. #' #' @param fit.advanced [logical] (*with default*): #' enables advanced fitting attempt for automatic start parameter recognition. #' Works only if no start parameters are provided. #' **Note:** It may take a while and it is not compatible with `fit.method = "LM"`. #' #' @param fit.calcError [logical] (*with default*): #' calculate 1-sigma error range of components using [confint]. #' #' @param bg.subtraction [character] (*with default*): #' specifies method for background subtraction (`polynomial`, `linear`, `channel`, #' see Details). **Note:** requires input for `values.bg`. #' #' @param verbose [logical] (*with default*): #' terminal output with fitting results. #' #' @param plot [logical] (*with default*): #' returns a plot of the fitted curves. #' #' @param plot.BG [logical] (*with default*): #' returns a plot of the background values with the fit used for the #' background subtraction. #' #' @param ... Further arguments that may be passed to the plot output, e.g. #' `xlab`, `xlab`, `main`, `log`. #' #' @return #' Various types of plots are returned. For details see above. Furthermore an #' `RLum.Results` object is returned with the following structure: #' #' **`@data:`** #' #' `.. $data` : [data.frame] with fitting results\cr #' `.. $fit` : nls ([nls] object)\cr #' `.. $component.contribution.matrix` : [list] component distribution matrix #' #' #' **`info:`** #' #' `.. $call` : [call] the original function call #' #' Matrix structure for the distribution matrix: #' #' Column 1 and 2: time and `rev(time)` values\cr #' Additional columns are used for the components, two for each component, #' containing I0 and n0. The last columns `cont.` provide information on #' the relative component contribution for each time interval including the row #' sum for this values. #' #' @note #' The pseudo-R^2 may not be the best parameter to describe the goodness #' of the fit. The trade off between the `n.components` and the pseudo-R^2 #' value currently remains unconsidered. #' #' The function **does not** ensure that the fitting procedure has reached a #' global minimum rather than a local minimum! In any case of doubt, the use of #' manual start values is highly recommended. #' #' @section Function version: 0.3.2 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [fit_CWCurve], [plot], [nls], [minpack.lm::nlsLM], [get_RLum] #' #' @references #' Bulur, E., 1996. An Alternative Technique For Optically #' Stimulated Luminescence (OSL) Experiment. Radiation Measurements, 26, 5, #' 701-709. #' #' Jain, M., Murray, A.S., Boetter-Jensen, L., 2003. Characterisation of #' blue-light stimulated luminescence components in different quartz samples: #' implications for dose measurement. Radiation Measurements, 37 (4-5), #' 441-449. #' #' Kitis, G. & Pagonis, V., 2008. Computerized curve deconvolution analysis for #' LM-OSL. Radiation Measurements, 43, 737-741. #' #' Lave, C.A.T., 1970. The Demand for Urban Mass Transportation. The Review of #' Economics and Statistics, 52 (3), 320-323. #' #' Ritz, C. & Streibig, J.C., 2008. Nonlinear Regression with R. R. Gentleman, #' K. Hornik, & G. Parmigiani, eds., Springer, p. 150. #' #' @keywords dplot models #' #' @examples #' #' ##(1) fit LM data without background subtraction #' data(ExampleData.FittingLM, envir = environment()) #' fit_LMCurve(values = values.curve, n.components = 3, log = "x") #' #' ##(2) fit LM data with background subtraction and export as JPEG #' ## -alter file path for your preferred system #' ##jpeg(file = "~/Desktop/Fit_Output\%03d.jpg", quality = 100, #' ## height = 3000, width = 3000, res = 300) #' data(ExampleData.FittingLM, envir = environment()) #' fit_LMCurve(values = values.curve, values.bg = values.curveBG, #' n.components = 2, log = "x", plot.BG = TRUE) #' ##dev.off() #' #' ##(3) fit LM data with manual start parameters #' data(ExampleData.FittingLM, envir = environment()) #' fit_LMCurve(values = values.curve, #' values.bg = values.curveBG, #' n.components = 3, #' log = "x", #' start_values = data.frame(Im = c(170,25,400), xm = c(56,200,1500))) #' #' @md #' @export fit_LMCurve<- function( values, values.bg, n.components = 3, start_values, input.dataType = "LM", fit.method = "port", sample_code = "", sample_ID = "", LED.power = 36, LED.wavelength = 470, fit.trace = FALSE, fit.advanced = FALSE, fit.calcError = FALSE, bg.subtraction = "polynomial", verbose = TRUE, plot = TRUE, plot.BG = FALSE, ... ){ # (0) Integrity checks ------------------------------------------------------- ##(1) data.frame or RLum.Data.Curve object? if(is(values, "data.frame") == FALSE & is(values, "RLum.Data.Curve") == FALSE){ stop("[fit_LMCurve()] 'values' object has to be of type 'data.frame' or 'RLum.Data.Curve'!") }else{ if(is(values, "RLum.Data.Curve") == TRUE && ( values@recordType!="RBR" & values@recordType!="LM-OSL")){ stop("[fit_LMCurve()] recordType should be 'RBR' or 'LM-OSL'! Consider as(object,'data.frame') if you had used the pseudo transformation functions.") }else if(is(values, "RLum.Data.Curve") == TRUE){ values <- as(values,"data.frame") } } ##(2) data.frame or RLum.Data.Curve object? if(missing(values.bg)==FALSE){ if(is(values.bg, "data.frame") == FALSE & is(values.bg, "RLum.Data.Curve") == FALSE){ stop("[fit_LMCurve()] 'values.bg' object has to be of type 'data.frame' or 'RLum.Data.Curve'!") }else{ if(is(values, "RLum.Data.Curve") == TRUE && values@recordType!="RBR"){ stop("[fit_LMCurve()] recordType should be 'RBR'!") }else if(is(values.bg, "RLum.Data.Curve") == TRUE){ values.bg <- as(values.bg,"data.frame") } } } ## Set plot format parameters ----------------------------------------------- extraArgs <- list(...) # read out additional arguments list log <- if("log" %in% names(extraArgs)) {extraArgs$log} else {""} xlim <- if("xlim" %in% names(extraArgs)) {extraArgs$xlim} else {c(min(values[,1]),max(values[,1]))} ylim <- if("ylim" %in% names(extraArgs)) {extraArgs$ylim} else { if(input.dataType=="pLM"){ c(0,max(values[,2]*1.1)) }else{ c(min(values[,2]),max(values[,2]*1.1)) } } xlab <- if("xlab" %in% names(extraArgs)) {extraArgs$xlab} else { if(input.dataType=="LM"){"Time [s]"}else{"u [s]"} } ylab <- if("ylab" %in% names(extraArgs)) {extraArgs$ylab} else { if(input.dataType=="LM"){ paste("LM-OSL [cts/",round(max(values[,1])/length(values[,1]),digits=2)," s]",sep="") }else{"pLM-OSL [a.u.]"} } main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {"Default"} cex <- if("cex" %in% names(extraArgs)) {extraArgs$cex} else {0.8} fun <- if("fun" %in% names(extraArgs)) {extraArgs$fun} else {FALSE} ##============================================================================## ## BACKGROUND SUBTRACTION ##============================================================================## # ##perform background subtraction if background LM measurment exists if(missing(values.bg)==FALSE){ #set graphical parameters par.default <- par(mfrow=c(1,1), cex=1.5*cex) ##check if length of bg and signal is consistent if(length(values[,2])!=length(values.bg[,2])){stop("[fit_LMCurve] Length of values and values.bg differs!")} if(bg.subtraction=="polynomial"){ #fit polynom function to background glm.fit<-glm(values.bg[,2] ~ values.bg[,1]+I(values.bg[,1]^2)+I(values.bg[,1]^3)) glm.coef<-coef(glm.fit) #subtract background with fitted function values[,2]<-values[,2]- (glm.coef[4]*values[,1]^3+glm.coef[3]*values[,1]^2+glm.coef[2]*values[,1]+glm.coef[1]) writeLines("[fit_LMCurve] >> Background subtracted (method=\"polynomial\")!") ##plot Background measurement if needed if(plot.BG==TRUE){ plot(values.bg, ylab="LM-OSL [a.u.]", xlab="Time [s]", main="Background") curve((glm.coef[4]*x^3+glm.coef[3]*x^2+glm.coef[2]*x+glm.coef[1]),add=TRUE,col="red",lwd=2) text(0,max(values.bg[,2]),paste("y = ", round(glm.coef[4],digits=2), "*x^3+", round(glm.coef[3],digits=2), "*x^2+", round(glm.coef[2],digits=2), "*x+", round(glm.coef[1],digits=2), sep=""),pos=4) mtext(side=3,sample_code,cex=.8*cex) } }else if(bg.subtraction=="linear"){ #fit linear function to background glm.fit<-glm(values.bg[,2] ~ values.bg[,1]) glm.coef<-coef(glm.fit) ##substract bg values[,2]<-values[,2]-(glm.coef[2]*values[,1]+glm.coef[1]) writeLines("[fit_LMCurve.R] >> Background subtracted (method=\"linear\")!") ##plot Background measurement if needed if(plot.BG){ plot(values.bg, ylab="LM-OSL [a.u.]", xlab="Time [s]", main="Background") curve((glm.coef[2]*x+glm.coef[1]),add=TRUE,col="red",lwd=1.5) text(0,max(values.bg[,2]),paste("y = ", round(glm.coef[2],digits=2), "*x+", round(glm.coef[1],digits=2), sep=""),pos=4) mtext(side=3,sample_code,cex=.8*cex) }#endif::plot BG }else if(bg.subtraction=="channel"){ values[,2]<-values[,2]-values.bg[,2] writeLines("[fit_LMCurve.R] >> Background subtracted (method=\"channel\")!") if(plot.BG==TRUE){ plot(values.bg, ylab="LM-OSL [a.u.]", xlab="Time [s]", main="Background") mtext(side=3,sample_code,cex=.8*cex) } }else{stop("Error: Invalid method for background subtraction")} ##reset par values par(par.default) rm(par.default) } ##============================================================================## ## FITTING ##============================================================================## ##------------------------------------------------------------------------## ##set function for fit equation (according Kitis and Pagonis, 2008) ##////equation used for fitting////(start) fit.equation<-function(Im.i,xm.i){ equation<-parse( text=paste("exp(0.5)*Im[",Im.i,"]*(values[,1]/xm[",xm.i,"])*exp(-values[,1]^2/(2*xm[",xm.i,"]^2))", collapse="+",sep="")) return(equation) } ##////equation used for fitting///(end) ##------------------------------------------------------------------------## ##set formula elements for fitting functions ## the upper two funtions should be removed ... but chances are needed ... TODO ##////equation used for fitting////(start) fit.formula <- function(n.components){ Im <- paste0("Im.",1:n.components) xm <- paste0("xm.",1:n.components) as.formula(paste0("y ~ ", paste("(exp(0.5) * ", Im, "* x/", xm, ") * exp(-x^2/(2 *",xm,"^2))", collapse=" + "))) } ##////equation used for fitting///(end) ##------------------------------------------------------------------------## ##automatic start parameter estimation ##set fit function fit.function<-fit.equation(Im.i=1:n.components,xm.i=1:n.components) if(missing(start_values)){ ##set b (detrapping) values for a 7-component function taken from Jain et al. (2003) b.pseudo<-c(32,2.5,0.65,0.15,0.025,0.0025,0.00030) ##calculate xm parameters from values set based on the pseudo curves xm.pseudo<-sqrt(max(values[,1])/b.pseudo) ##the Im values obtaind by calculating residuals xm.residual<-sapply(1:length(b.pseudo),function(x){abs(values[,1]-xm.pseudo[x])}) xm.residual<-cbind(xm.residual,values[,1]) Im.pseudo<-sapply(1:length(xm.pseudo),function(x){ min(xm.residual[which(xm.residual[,x]==min(xm.residual[,x])),8])#8 is time index }) ##set additional variables b.pseudo_start<-1 b.pseudo_end<-0 fit.trigger<-FALSE while(fit.trigger==FALSE){ xm <- xm.pseudo[b.pseudo_start:(n.components + b.pseudo_end)] Im <- Im.pseudo[b.pseudo_start:(n.components + b.pseudo_end)] if(fit.advanced){ ##---------------------------------------------------------------## ##MC for fitting parameter ##make the fitting more stable by small variations of the parameters ##sample input parameters values from a normal distribution xm.MC<-sapply(1:length(xm),function(x){ xm.MC<-sample(rnorm(30,mean=xm[x],sd=xm[x]/10), replace=TRUE) }) Im.MC<-sapply(1:length(xm),function(x){ Im.MC<-sample(rnorm(30,mean=Im[x],sd=Im[x]/10), replace=TRUE) }) ##---------------------------------------------------------------## for(i in 1:length(xm.MC[,1])){ ##NLS ##try fit fit<-try(nls(y~eval(fit.function), trace=fit.trace, data=data.frame(x=values[,1],y=values[,2]), algorithm="port", start=list(Im=Im.MC[i,],xm=xm.MC[i,]),#end start values input nls.control( maxiter=500 ),#end nls control lower=c(xm=min(values[,1]),Im=0), upper=c(xm=max(values[,1]),Im=max(values[,2]*1.1)) ),# nls silent=TRUE)# end try ##graphical output if(i==1){cat(paste("[fit_LMCurve()] >> advanced fitting attempt (#", b.pseudo_start,"): ",sep=""))} cat("*") if(inherits(fit,"try-error") == FALSE){break} }#end::forloop cat("\n") }else{ if(fit.method == "port") { fit <- try(nls( y ~ eval(fit.function), trace = fit.trace, data = data.frame(x = values[,1],y = values[,2]), algorithm = "port", start = list(Im = Im,xm = xm),#end start values input nls.control(maxiter = 500),#end nls control lower = c(xm = 0,Im = 0) ),# nls silent = TRUE) # end try }else if (fit.method == "LM") { ##re-name for method == "LM" names(Im) <- paste0("Im.", 1:n.components) names(xm) <- paste0("xm.", 1:n.components) start.list <- c(as.list(Im), as.list(xm)) lower <- vapply(start.list, function(x) { start.list[[x]] <- 0 }, FUN.VALUE = vector(mode = "numeric", length = 1)) fit <- try(minpack.lm::nlsLM( fit.formula(n.components), data = data.frame(x = values[,1], y = values[,2]), start = start.list, lower = lower, trace = fit.trace, control = minpack.lm::nls.lm.control(maxiter = 500) ), silent = TRUE) }else{ stop("[fit_LMCurve()] unknow method for 'fit.method'") } }#endifelse::fit.advanced if(inherits(fit,"try-error")==FALSE){fit.trigger<-TRUE} else{ if((n.components+b.pseudo_end)==7){fit.trigger<-TRUE }else{ b.pseudo_start<-b.pseudo_start+1 b.pseudo_end<-b.pseudo_end+1 }#endif::maximum loops }#endif::try-error }#end:whileloop fit trigger }else{#endif::missing start values ##------------------------------------------------------------------------## fit<-try(nls(y~eval(fit.function), trace=fit.trace, data.frame(x=values[,1],y=values[,2]), algorithm="port", start=list(Im=start_values[,1],xm=start_values[,2]),#end start values input nls.control(maxiter=500), lower=c(xm=0,Im=0), #upper=c(xm=max(x),Im=max(y)*1.1)# set lower boundaries for components )# nls )# end try }#endif::startparameter ##------------------------------------------------------------------------## ##grep parameters if(inherits(fit,"try-error")==FALSE){ parameters<-coef(fit) ##write parameters in vectors and order parameters Im<-parameters[1:(length(parameters)/2)] Im.names <- names(Im) xm<-parameters[(1+(length(parameters)/2)):length(parameters)] xm.names <- names(xm) ##order parameters o <- order(xm) xm <- xm[o] names(xm) <- xm.names Im <- Im[o] names(Im) <- Im.names if (verbose){ ##print rough fitting information - use the nls() control for more information writeLines("\n[fit_LMCurve()]") writeLines(paste("\nFitting was done using a ",n.components, "-component function:\n",sep="")) ##print parameters print(c(xm, Im)) #print some additional information writeLines("\n(equation used for fitting according Kitis & Pagonis, 2008)") }#end if ##============================================================================## ## Additional Calculations ##============================================================================## ##calculate stimulation intensity Schmidt (2008) ##Energy - E = h*v h<-6.62606957e-34 #in W*s^2 - Planck constant ny<-299792458/(LED.wavelength/10^9) #frequency of the light E<-h*ny ##transform LED.power in W/cm^2 LED.power<-LED.power/1000 stimulation_intensity<-LED.power/E ##calculate b and n from the equation of Bulur(1996) to compare results ##Using Equation 5 and 6 from Kitis (2008) b<-as.vector(max(values[,1])/xm^2) #detrapping probability n0<-as.vector((Im/exp(-0.5))*xm) ##CALCULATE 1- sigma CONFIDENCE INTERVAL ##------------------------------------------------------------------------## b.error<-rep(NA, n.components) n0.error<-rep(NA, n.components) if(fit.calcError==TRUE){ ##option for confidence interval values.confint<-confint(fit, level=0.68) Im.confint<-values.confint[1:(length(values.confint[,1])/2),] xm.confint<-values.confint[((length(values.confint[,1])/2)+1):length(values.confint[,1]),] ##error calculation b.error<-as.vector(abs((max(values[,1])/xm.confint[,1]^2)-(max(values[,1])/xm.confint[,2]^2))) n0.error<-as.vector(abs(((Im.confint[,1]/exp(-0.5))*xm.confint[,1]) - ((Im.confint[,2]/exp(-0.5))*xm.confint[,2]))) } ##------------------------------------------------------------------------## ##calculate photoionisation cross section and print on terminal ##using EQ (5) in Kitis cs<-as.vector((max(values[,1])/xm^2)/stimulation_intensity) rel_cs<-round(cs/cs[1],digits=4) ##coefficient of determination after law RSS <- sum(residuals(fit)^2) #residual sum of squares TSS <- sum((values[,2] - mean(values[,2]))^2) #total sum of squares pR<-round(1-RSS/TSS,digits=4) ##============================================================================## ## COMPONENT TO SUM CONTRIBUTION MATRIX ##============================================================================## ##+++++++++++++++++++++++++++++++ ##set matrix ##set polygon matrix for optional plot output component.contribution.matrix <- matrix(NA, nrow = length(values[,1]), ncol = (2*length(xm)) + 2) ##set x-values component.contribution.matrix[,1] <- values[,1] component.contribution.matrix[,2] <- rev(values[,1]) ##+++++++++++++++++++++++++++++++ ##set 1st polygon ##1st polygon (calculation) y.contribution_first <- (exp(0.5)*Im[1]*values[,1]/ xm[1]*exp(-values[,1]^2/(2*xm[1]^2))/ (eval(fit.function))*100) ##avoid NaN values (might happen with synthetic curves) y.contribution_first[is.nan(y.contribution_first)==TRUE] <- 0 ##set values in matrix component.contribution.matrix[,3] <- 100 component.contribution.matrix[,4] <- 100-rev(y.contribution_first) ##+++++++++++++++++++++++++++++++ ##set polygons in between ##polygons in between (calculate and plot) if (length(xm)>2){ y.contribution_prev <- y.contribution_first i<-2 ##matrix stepping k <- seq(3, ncol(component.contribution.matrix), by=2) while (i<=length(xm)-1) { y.contribution_next<-(exp(0.5)*Im[i]*values[,1]/ xm[i]*exp(-values[,1]^2/(2*xm[i]^2))/ (eval(fit.function))*100) ##avoid NaN values y.contribution_next[is.nan(y.contribution_next)==TRUE] <- 0 ##set values in matrix component.contribution.matrix[, k[i]] <- 100-y.contribution_prev component.contribution.matrix[, k[i]+1] <- rev(100-y.contribution_prev- y.contribution_next) y.contribution_prev <- y.contribution_prev + y.contribution_next i<-i+1 }#end while loop }#end if ##+++++++++++++++++++++++++++++++ ##set last polygon ##last polygon (calculation) y.contribution_last<-(exp(0.5)*Im[length(xm)]*values[,1]/ xm[length(xm)]*exp(-values[,1]^2/ (2*xm[length(xm)]^2))/ (eval(fit.function))*100) ##avoid NaN values y.contribution_last[is.nan(y.contribution_last)==TRUE]<-0 component.contribution.matrix[,((2*length(xm))+1)] <- y.contribution_last component.contribution.matrix[,((2*length(xm))+2)] <- 0 ##change names of matrix to make more easy to understand component.contribution.matrix.names <- c("x", "rev.x", paste(c("y.c","rev.y.c"),rep(1:n.components,each=2), sep="")) ##calculate area for each component, for each time interval component.contribution.matrix.area <- sapply( seq(3,ncol(component.contribution.matrix),by=2), function(x){ matrixStats::rowDiffs(cbind(rev(component.contribution.matrix[,(x+1)]), component.contribution.matrix[,x])) }) ##append to existing matrix component.contribution.matrix <- cbind( component.contribution.matrix, component.contribution.matrix.area, rowSums(component.contribution.matrix.area) ) ##set final column names colnames(component.contribution.matrix) <- c( component.contribution.matrix.names, paste(c("cont.c"),rep(1:n.components,each=1), sep=""), "cont.sum") ##============================================================================## ## Terminal Output (advanced) ##============================================================================## if (verbose){ ##write fill lines writeLines("------------------------------------------------------------------------------") writeLines("(1) Corresponding values according the equation in Bulur, 1996 for b and n0:\n") for (i in 1:length(b)){ writeLines(paste("b",i," = ",format(b[i],scientific=TRUE)," +/- ",format(b.error[i],scientific=TRUE),sep="")) writeLines(paste("n0",i," = ",format(n0[i],scientific=TRUE)," +/- ",format(n0.error[i],scientific=TRUE),"\n",sep="")) }#end for loop ##write photoionisation cross section on terminal for (i in 1:length(cs)){ writeLines(paste("cs from component.",i," = ",format(cs[i],scientific=TRUE, digits=4), " cm^2", "\t >> relative: ",round(cs[i]/cs[1],digits=4),sep="")) }#end for loop writeLines(paste( "\n(stimulation intensity value used for calculation: ",format(stimulation_intensity,scientific=TRUE)," 1/s 1/cm^2)",sep="")) writeLines("(errors quoted as 1-sigma uncertainties)") writeLines("------------------------------------------------------------------------------\n") #sum of squares writeLines(paste("pseudo-R^2 = ",pR,sep="")) }#end if ##============================================================================## ## COMPOSE RETURN VALUES (data.frame) ##============================================================================## ##write output table if values exists if (exists("fit")){ ##set data.frame for a max value of 7 components output.table <- data.frame(NA,NA,NA,NA,NA,NA,NA,NA, NA,NA,NA,NA,NA,NA,NA,NA, NA,NA,NA,NA,NA,NA,NA,NA, NA,NA,NA,NA,NA,NA,NA,NA, NA,NA,NA,NA,NA,NA,NA,NA, NA,NA,NA,NA,NA,NA,NA,NA, NA,NA,NA,NA,NA,NA,NA,NA) output.tableColNames<-c("Im1","xm1", "b1","b1.error","n01","n01.error", "cs1","rel_cs1", "Im2","xm2", "b2","b2.error","n02","n02.error", "cs2","rel_cs2", "Im3","xm3", "b3","b3.error","n03","n03.error", "cs3","rel_cs3", "Im4","xm4", "b4","b4.error","n04","n04.error", "cs4","rel_cs4", "Im5","xm5", "b5","b5.error","n05","n05.error", "cs5","rel_cs5", "Im6","xm6", "b6","b6.error","n06","n06.error", "cs6","rel_cs6", "Im7","xm7", "b7","b7.error","n07","n07.error", "cs7","rel_cs7") ##write components in output table i<-0 k<-1 while(i<=n.components*8){ output.table[1,i+1]<-Im[k] output.table[1,i+2]<-xm[k] output.table[1,i+3]<-b[k] output.table[1,i+4]<-b.error[k] output.table[1,i+5]<-n0[k] output.table[1,i+6]<-n0.error[k] output.table[1,i+7]<-cs[k] output.table[1,i+8]<-rel_cs[k] i<-i+8 k<-k+1 } ##add pR and n.components output.table<-cbind(sample_ID,sample_code,n.components,output.table,pR) ###alter column names colnames(output.table)<-c("ID","sample_code","n.components",output.tableColNames,"pseudo-R^2") ##----------------------------------------------------------------------------## }#endif::exists fit }else{ output.table <- NA component.contribution.matrix <- NA writeLines("[fit_LMCurve] Fitting Error: Plot without fit produced!") } ##============================================================================## ## PLOTTING ##============================================================================## if(plot){ ##cheat the R check routine x <- NULL; rm(x) ##grep package colour gallery col <- get("col", pos = .LuminescenceEnv) ##change xlim values in case of the log plot the avoid problems if((log == "x" | log == "xy") && xlim[1] == 0){ warning("[fit_LMCurve()] x-axis limitation change to avoid 0 values for log-scale!", call. = FALSE) xlim <- c(2^0.5/2 * max(values[,1])/length(values[,1]), xlim[2]) } ##set plot frame par.default <- par(no.readonly = TRUE) layout(matrix(c(1,2,3),3,1,byrow=TRUE),c(1.6,1,1), c(1,0.3,0.4),TRUE) par(oma=c(1,1,1,1),mar=c(0,4,3,0), cex=cex) ##==uppper plot==## ##open plot area plot( NA, NA, xlim = xlim, ylim = ylim, xlab = "", xaxt = "n", main = main, log = log, ylab = ylab )#endplot mtext(side=3,sample_code,cex=0.8*cex) ##plotting measured signal points(values[, 1], values[, 2], pch = 20, col = rgb(0.4, 0.4, 0.4, 0.5)) ##==pseudo curve==##------------------------------------------------------# ##curve for used pseudo values if(inherits(fit,"try-error")==TRUE & missing(start_values)==TRUE){ fit.function<-fit.equation(Im.i=1:n.components,xm.i=1:n.components) Im<-Im.pseudo[1:n.components] xm<-xm.pseudo[1:n.components] ##draw pseudo curve lines(values[,1],eval(fit.function), lwd=2, col="red", lty=2) axis(side=1) mtext(side=1,xlab, cex=.9*cex,line=2) mtext(side=4,paste(n.components, " component pseduo function is shown",sep=""),cex=0.7, col="blue") ##draw information text on plot text(min(values[,1]),max(values[,2]),"FITTING ERROR!",pos=4) ##additional legend legend("topright",c("pseudo sum function"),lty=2,lwd=2,col="red",bty="n") } ##==pseudo curve==##------------------------------------------------------## ##plot sum function if(inherits(fit,"try-error")==FALSE){ lines(values[,1],eval(fit.function), lwd=2, col="black") legend.caption<-"sum curve" curve.col<-1 ##plot signal curves ##plot curve for additional parameters for (i in 1:length(xm)) { curve(exp(0.5)*Im[i]*x/xm[i]*exp(-x^2/(2*xm[i]^2)),col=col[i+1], lwd=2,add=TRUE) legend.caption<-c(legend.caption,paste("component ",i,sep="")) curve.col<-c(curve.col,i+1) } ##plot legend legend(if(log=="x"| log=="xy"){ if(input.dataType=="pLM"){"topright"}else{"topleft"}}else{"topright"}, legend.caption,lty=1,lwd=2,col=col[curve.col], bty="n") ##==lower plot==## ##plot residuals par(mar=c(4.2,4,0,0)) plot(values[,1],residuals(fit), xlim=xlim, xlab=xlab, type="l", col="grey", ylab="Residual", lwd=2, log=log) ##ad 0 line abline(h=0) ##------------------------------------------------------------------------# ##++component to sum contribution plot ++## ##------------------------------------------------------------------------# ##plot component contribution to the whole signal #open plot area par(mar=c(4,4,3.2,0)) plot(NA,NA, xlim=xlim, ylim=c(0,100), ylab="Contribution [%]", xlab=xlab, main="Component contribution to sum curve", log=if(log=="xy"){"x"}else{log}) stepping <- seq(3,length(component.contribution.matrix),2) for(i in 1:length(xm)){ polygon(c(component.contribution.matrix[,1], component.contribution.matrix[,2]), c(component.contribution.matrix[,stepping[i]], component.contribution.matrix[,stepping[i]+1]), col = col[i+1]) } rm(stepping) ##reset par par(par.default) rm(par.default) ##------------------------------------------------------------------------## }#end if try-error for fit if(fun==TRUE){sTeve()} } ##----------------------------------------------------------------------------- ##remove objects try(unlist("parameters")) ##============================================================================# ## Return Values ##============================================================================# newRLumResults.fit_LMCurve <- set_RLum( class = "RLum.Results", data = list( data = output.table, fit = fit, component.contribution.matrix = list(component.contribution.matrix) ), info = list(call = sys.call()) ) invisible(newRLumResults.fit_LMCurve) } Luminescence/R/internal_as.latex.table.R0000644000176200001440000002552013604172511017715 0ustar liggesusers#' Create LaTex tables from data.frames and RLum objects #' #' This function takes a data.frame and returns a table in LaTex code that #' can be copied in any tex document. #' #' @param x [data.frame] or `RLum` object (**required**) #' #' @param row.names currently unused #' #' @param col.names currently unused #' #' @param comments [logical] (*with default*): #' insert LaTex comments #' #' @param pos [character] (*with default*): #' `character` of length one specifying the alignment of each column, e.g., #' pos'clr' for a three column data frame and center, left #' and right alignment #' #' @param digits [numeric] (*with default*): #' number of digits (numeric fields) #' #' @param rm.zero [logical] (*with default*): remove columns containing #' only zeros, however this might not be wanted in all cases #' #' @param select [character] (*optional*): #' a [character] vector passed to [subset] #' #' @param split [integer] (*optional*): #' an [integer] specifying the number of individual tables #' the data frame is split into. Useful for wide tables. Currently unnused. #' #' @param tabular_only [logical] (*with default*): if `TRUE` on the tablular but not the #' table environment is returned. This gives a lot of additional flexibility at hand #' #' @param ... options: `verbose` #' #' @section TODO: #' - Improve by using RegEx to dynamically find error fields, eg. ( "([ ]err)|(^err)" ) #' - #' #' @return #' Returns LaTex code #' #' @examples #' df <- data.frame(x = 1:10, y = letters[1:10]) #' .as.latex.table(df) #' .as.latex.table(df, pos = "lr") #' .as.latex.table(df, select = "y", pos = "r") #' #' @md #' @noRd .as.latex.table <- function(x, row.names = NULL, col.names = NULL, comments = TRUE, pos = "c", digits = 3, rm.zero = TRUE, select, split = NULL, tabular_only = FALSE, ...) { args <- list(x = x, row.names = row.names, col.names = col.names, comments = comments, pos = pos, digits = digits, rm.zero = rm.zero, split = split, tabular_only = tabular_only, ... = ...) if (!missing(select)) args$select <- select switch(class(x)[1], data.frame = do.call(".as.latex.table.data.frame", args), DRAC.highlights = do.call(".as.latex.table.data.frame", args), RLum.Results = do.call(".as.latex.table.RLum.Results", args)) } ################################################################################ ## "Method" RLum.Results ## ##----------------------------------------------------------------------------## .as.latex.table.RLum.Results <- function(x, row.names = NULL, col.names = NULL, comments = TRUE, pos = "c", digits = 3, rm.zero = TRUE, select, split = NULL, ...) { ## Object: DRAC.highlights if (x@originator == "use_DRAC") { x <- get_RLum(x)$highlights x <- .digits(x, digits) ##remove columns containing zero values ... they add no information if(rm.zero){ x <- x[sapply(x, function(y){ y <- suppressWarnings(as.numeric(y)) if(anyNA(y) || sum(y, na.rm = TRUE) != 0){ TRUE }else{ FALSE } })] } ##add +/- symbol and remove the columns we don't need fields.w.error <- (grep(names(x), pattern = "err", fixed = TRUE) - 1) for(i in fields.w.error) x[ ,i] <- paste0(x[ ,i], "\\,$\\pm$\\,", trimws(x[ ,i + 1])) x <- x[-c(fields.w.error + 1)] ##create latex table text <- .as.latex.table(x, comments = comments, pos = pos, split = split, ...) ##split table text <- strsplit(text[[1]], split = "\n", fixed = TRUE) ##exchange columns ... or delete them at all (2nd step) ##Mineral ID for(i in 1:length(text)){ text[[i]][grepl(pattern = "Mineral", x = text[[i]], fixed = TRUE)] <- "\t\\multicolumn{1}{p{0.5cm}}{\\centering \\textbf{M.}} & " } ##put things again together (single character) text <- paste(text[[1]], collapse = "\n") ##replace some latex stuff text <- gsub(pattern = "p{2cm}", replacement = "p{1.5cm}", x = text, fixed = TRUE) text <- gsub(pattern = "Gy.ka-1", replacement = "Gy~ka$^{-1}$", x = text, fixed = TRUE) text <- gsub(pattern = "De", replacement = "$D_{E}$", x = text, fixed = TRUE) text <- gsub(pattern = "alphadoserate", replacement = "$\\dot{D}_{\\alpha}$", x = text, fixed = TRUE) text <- gsub(pattern = "betadoserate", replacement = "$\\dot{D}_{\\beta}$", x = text, fixed = TRUE) text <- gsub(pattern = "gammadoserate", replacement = "$\\dot{D}_{\\gamma}$", x = text, fixed = TRUE) text <- gsub(pattern = "Cosmicdoserate", replacement = "$\\dot{D}_{cosm.}$", x = text, fixed = TRUE) text <- gsub(pattern = "External \\\\ doserate", replacement = "$\\dot{D}_{ext.}$", x = text, fixed = TRUE) text <- gsub(pattern = "Internal \\\\ doserate", replacement = "$\\dot{D}_{int.}$", x = text, fixed = TRUE) text <- gsub(pattern = "Environmental \\\\ Dose \\\\ Rate", replacement = "$\\dot{D}_{env.}$", x = text, fixed = TRUE) ##retrun result return(text) }# EndOf::use_DRAC } ################################################################################ ## "Method" data.frame ## ##----------------------------------------------------------------------------## .as.latex.table.data.frame <- function(x, row.names = NULL, col.names = NULL, comments = TRUE, pos = "c", digits = 3, select, split = NULL, tabular_only = FALSE, ...) { ## Integrity checks ---- if (!is.data.frame(x)) stop("x must be a data frame", call. = FALSE) if (!is.null(col.names) && length(col.names) != ncol(x)) stop("length of col.names does not match the number of columns", call. = FALSE) if (!is.null(row.names) && length(row.names) != nrow(x)) stop("length of row.names does not match the number of rows", call. = FALSE) if (length(pos) != 1) stop("length of pos does not match the number of columns", call. = FALSE) ## Default settings ---- options <- list(verbose = TRUE) ## Override settings ---- options <- modifyList(options, list(...)) ## Subset data frame ---- if (!missing(select)) { is.name <- select %in% names(x) if (any(!is.name)) stop("Undefined columns selected. Please check provided column names in 'select'.", call. = FALSE) x <- subset(x, select = select) } ## Format numeric fields ---- x <- .digits(x, digits) ## Split the table if (is.null(split)) split <- 1 chunks <- ceiling(ncol(x) / split) chunks.start <- seq(1, ncol(x), chunks) chunks.end <- chunks.start + chunks - 1 chunks.end[length(chunks.end)] <- ncol(x) tex.table.list <- vector("list", split) for (i in 1:length(tex.table.list)) { x.chunk <- x[ ,chunks.start[i]:chunks.end[i]] if (ncol(x) == 1) { x.chunk <- as.data.frame(x.chunk) colnames(x.chunk) <- names(x[i]) } ## Comments ---- tex.comment.usePackage <- ifelse(comments, "% add usepackage{adjustbox} to latex preamble \n", "") ## Header ---- col.names <- tex.table.header <- gsub(pattern = " ", x = names(x.chunk), replacement = " \\\\\\\\ ") tex.table.header <- paste0("\t", paste("\\multicolumn{1}{p{2cm}}{\\centering", col.names, "}", collapse = " & \n\t"), "\\\\ \n") ## Rows ---- tex.table.rows <- "" for (j in 1:nrow(x.chunk)) { tex.table.rows <- paste0(tex.table.rows, paste(paste(x.chunk[j, ], collapse = " & "), "\\\\ \n")) } ## catch potential latex problems with underscores - after all are numbers, in can be only ## on the ID tex.table.rows <- gsub("_", "\\_", tex.table.rows, fixed = TRUE) ## Tex table ---- if (nchar(pos) != 1 && nchar(pos) != ncol(x)) pos <- "c" if (!any(strsplit(pos, split = "")[[1]] %in% c("l", "c", "r"))) pos <- "c" if (nchar(pos) == 1) pos <- paste0(rep(pos, ncol(x)), collapse = "") if(tabular_only){ tex.table.begin <- paste0(paste(" \\begin{tabular}{", pos, "}\n"), " \\hline \n") tex.table.end <- paste0(" \\hline \n", " \\end{tabular}") }else{ tex.table.begin <- paste0("\\begin{table}[ht] \n", " \\centering \n", " \\begin{adjustbox}{max width=\\textwidth} \n", paste(" \\begin{tabular}{", pos, "}\n"), " \\hline \n") tex.table.end <- paste0(" \\hline \n", " \\end{tabular} \n", " \\end{adjustbox} \n", "\\end{table}") } tex.table <- paste0(tex.comment.usePackage, tex.table.begin, tex.table.header, "\\hline \n", tex.table.rows, tex.table.end) if (options$verbose) cat(tex.table) tex.table.list[[i]] <- tex.table } invisible(tex.table.list) } # This function takes a data.frame, checks each column and tries to # force the specified amount of digits if numeric or coercable to numeric .digits <- function(x, digits) { for (i in 1:ncol(x)) { if (is.factor(x[ ,i])) x[ ,i] <- as.character(x[ ,i]) test.numeric <- suppressWarnings(as.numeric(x[ ,i])) if (!is.na(test.numeric[1])) x[ ,i] <- format(round(test.numeric, digits), nsmall = digits, digits = digits) } return(x) } Luminescence/R/addins_RLum.R0000644000176200001440000000472313231137116015415 0ustar liggesusers################################################################################## ## Luminescence - RStudio Add Ins ## ################################################################################## ##<> ## ## - Add-ins should support more expierenced users. For all others we have the package 'RLumShiny' ## ## - Add-ins should be provided as non-exported function only, having the a name with a leading dot, e.g., .addin. ## This prevents further chaos in the manuel. ## ## - Interative add-ins are not desired, except they are implemented in the package 'RLumShiny' or they ## are only available if the package 'RLumShiny' is available. ## ## ##<> ## ## Q. Why are the add-ins non-interactive ... had you been too lazy? ## A. No, but interactivity would require the installation of 'shiny" by default, which is not ## desired. ## ## Q. The add-ins are not shown in the 'Addin' menu? ## A. Well, if you read this information you are an advanced used, so please install the ## package 'rstudioapi', 'devtools' and get happy. #'Install package development version #' #'The function uses the GitHub APconnection provided by Christoph Burow #' #'@author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montagine, France #' #'@noRd .installDevelopmentVersion <- function(){ install_DevelopmentVersion(force_install = TRUE) } #'Search for TODOs in the source code and list them in the terminal #' #'This add-in is a tool developers may want to use to briefly list all open #'issues in the terminal, instead of using search and stepping through the results. #' #'@author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montagine, France #' #'@noRd .listTODO <- function(){ ##check if package is installed if(!requireNamespace("rstudioapi", quietly = TRUE)){ message("Package 'rstudioapi' is not installed but needed to search for TODOs, do you want to install it?\n\n", " [n/N]: No (default)\n", " [y/Y]: Yes\n") ##parse answer answer <- readline() if(tolower(answer) == "y"){ utils::install.packages("rstudioapi", dependencies = TRUE) } }else{ ##parse code code <- rstudioapi::getActiveDocumentContext()$contents ##get lines with ##TODO id <- grep(pattern = "#\\s*TODO", x = code, fixed = FALSE) ##list lines cat("\n", "[", length(id), " issue(s)]\n", sep = "") for(i in id){ cat(" line ", i, ": ->", code[i], "\n", sep = "") } } } Luminescence/R/plot_RLum.Data.Image.R0000644000176200001440000001420013231137116017011 0ustar liggesusers#' Plot function for an `RLum.Data.Image` S4 class object #' #' The function provides a standardised plot output for image data of an #' `RLum.Data.Image`S4 class object, mainly using the plot functions #' provided by the [raster] package. #' #' **Details on the plot functions** #' #' Image is visualised as 2D plot usinng generic plot types provided by other #' packages. #' #' Supported plot types: #' #' **`plot.type = "plot.raster"`** #' #' Uses the standard plot function for raster data from the package #' [raster::raster]: [raster::plot]. For each raster layer in a #' raster brick one plot is produced. #' #' Arguments that are passed through the function call: #' #' `main`,`axes`, `xlab`, `ylab`, `xlim`, `ylim`, #' `col` #' #' **`plot.type = "plotRGB"`** #' #' Uses the function [raster::plotRGB] from the #' [raster::raster] package. Only one image plot is produced as all layers #' in a brick a combined. This plot type is useful to see whether any signal #' is recorded by the camera.\cr #' Arguments that are passed through the function call: #' #' `main`,`axes`, `xlab`, `ylab`, `ext`, `interpolate`, `maxpixels`, #' `alpha`, `colNA`, `stretch` #' #' **`plot.type = "contour"`** #' #' Uses the function contour plot function from the [raster] #' function ([raster::contour]). For each raster layer one contour #' plot is produced. Arguments that are passed through the function call:\cr #' #' `main`,`axes`, `xlab`, `ylab`, `xlim`, `ylim`, #' `col` #' #' @param object [RLum.Data.Image-class] (**required**): S4 #' object of class `RLum.Data.Image` #' #' @param par.local [logical] (*with default*): use local graphical #' parameters for plotting, e.g. the plot is shown in one column and one row. #' If `par.local = FALSE` global parameters are inherited. #' #' @param plot.type [character] (*with default*): plot types. #' Supported types are `plot.raster`, `plotRGB` or `contour` #' #' @param ... further arguments and graphical parameters that will be passed #' to the specific plot functions. #' #' @return Returns a plot. #' #' @note #' This function has been created to faciliate the plotting of image data #' imported by the function [read_SPE2R]. However, so far the #' function is not optimized to handle image data > ca. 200 MByte and thus #' plotting of such data is extremely slow. #' #' @section Function version: 0.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Data.Image-class], [plot], [plot_RLum], [raster::raster] #' #' @keywords aplot #' #' @examples #' #' ##load data #' data(ExampleData.RLum.Data.Image, envir = environment()) #' #' ##plot data #' plot_RLum.Data.Image(ExampleData.RLum.Data.Image) #' #' @md #' @export plot_RLum.Data.Image <- function( object, par.local = TRUE, plot.type = "plot.raster", ... ){ # Integrity check ----------------------------------------------------------- ##check if object is of class RLum.Data.Image if(class(object) != "RLum.Data.Image"){ stop("[plot_RLum.Data.Image()] Input object is not of type RLum.Data.Image") } ##deal with addition arguments extraArgs <- list(...) ##TODO main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {"RLum.Data.Image"} axes <- if("axes" %in% names(extraArgs)) {extraArgs$axes} else {TRUE} xlab <- if("xlab" %in% names(extraArgs)) {extraArgs$xlab} else {"Length [px]"} ylab <- if("ylab" %in% names(extraArgs)) {extraArgs$ylab} else {"Height [px]"} xlim <- if("xlim" %in% names(extraArgs)) {extraArgs$xlim} else {c(0,dim(get_RLum(object))[2])} ylim <- if("ylim" %in% names(extraArgs)) {extraArgs$ylim} else {c(0,dim(get_RLum(object))[1])} ##plotRGB::ext ext <- if("ext" %in% names(extraArgs)) {extraArgs$ext} else {NULL} ##plotRGB::interpolate interpolate <- if("interpolate" %in% names(extraArgs)) {extraArgs$interpolate} else {FALSE} ##plotRGB::stretch stretch <- if("stretch" %in% names(extraArgs)) {extraArgs$stretch} else {"hist"} ##plotRGB::maxpixels maxpixels <- if("maxpixels" %in% names(extraArgs)) {extraArgs$maxpixels} else {dim(get_RLum(object))[1]*dim(get_RLum(object))[2]} ##plotRGB::alpha alpha <- if("alpha" %in% names(extraArgs)) {extraArgs$alpha} else {255} ##plotRGB::colNA colNA <- if("colNA" %in% names(extraArgs)) {extraArgs$colNA} else {"white"} col <- if("col" %in% names(extraArgs)) {extraArgs$col} else {topo.colors(255)} cex <- if("cex" %in% names(extraArgs)) {extraArgs$cex} else {1} ##par setting for possible combination with plot method for RLum.Analysis objects if(par.local == TRUE){ par(mfrow=c(1,1), cex = cex) } ##grep raster if(plot.type == "plotRGB"){ ## ==========================================================================# ## standard raster plotRGB (package raster) ## ==========================================================================# raster::plotRGB( get_RLum(object), main = main, axes = TRUE, xlab = xlab, ylab = ylab, ext = ext, interpolate = interpolate, maxpixels = maxpixels, alpha = alpha, colNA = colNA, stretch = stretch) ## ==========================================================================# ## standard raster plot (package raster) ## ==========================================================================# }else if(plot.type == "plot.raster"){ plot(get_RLum(object), main = main, xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, col = col) ## ==========================================================================# ## standard contour (package raster) ## ==========================================================================# }else if(plot.type == "contour"){ for(i in 1:raster::nlayers(get_RLum(object))){ raster::contour(raster::raster(get_RLum(object), layer = i), main = main, xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, col = col) } }else{ stop("[plot_RLum.Data.Image()] Unknown plot type.") } } Luminescence/R/calc_FiniteMixture.R0000644000176200001440000005405513420131716016775 0ustar liggesusers#' Apply the finite mixture model (FMM) after Galbraith (2005) to a given De #' distribution #' #' This function fits a k-component mixture to a De distribution with differing #' known standard errors. Parameters (doses and mixing proportions) are #' estimated by maximum likelihood assuming that the log dose estimates are #' from a mixture of normal distributions. #' #' This model uses the maximum likelihood and Bayesian Information Criterion #' (BIC) approaches. #' #' Indications of overfitting are: #' #' - increasing BIC #' - repeated dose estimates #' - covariance matrix not positive definite #' - covariance matrix produces NaNs #' - convergence problems #' #' **Plot** #' #' If a vector (`c(k.min:k.max)`) is provided #' for `n.components` a plot is generated showing the the k components #' equivalent doses as normal distributions. By default `pdf.weight` is #' set to `FALSE`, so that the area under each normal distribution is #' always 1. If `TRUE`, the probability density functions are weighted by #' the components proportion for each iteration of k components, so the sum of #' areas of each component equals 1. While the density values are on the same #' scale when no weights are used, the y-axis are individually scaled if the #' probability density are weighted by the components proportion.\cr #' The standard deviation (sigma) of the normal distributions is by default #' determined by a common `sigmab` (see `pdf.sigma`). For #' `pdf.sigma = "se"` the standard error of each component is taken #' instead.\cr #' The stacked barplot shows the proportion of each component (in #' per cent) calculated by the FFM. The last plot shows the achieved BIC scores #' and maximum log-likelihood estimates for each iteration of k. #' #' @param data [RLum.Results-class] or [data.frame] (**required**): #' for [data.frame]: two columns with De `(data[,1])` and De error `(values[,2])` #' #' @param sigmab [numeric] (**required**): #' spread in De values given as a fraction (e.g. 0.2). This value represents the expected #' overdispersion in the data should the sample be well-bleached #' (Cunningham & Wallinga 2012, p. 100). #' #' @param n.components [numeric] (**required**): #' number of components to be fitted. If a vector is provided (e.g. `c(2:8)`) the #' finite mixtures for 2, 3 ... 8 components are calculated and a plot and a #' statistical evaluation of the model performance (BIC score and maximum #' log-likelihood) is provided. #' #' @param grain.probability [logical] (*with default*): #' prints the estimated probabilities of which component each grain is in #' #' @param dose.scale [numeric]: #' manually set the scaling of the y-axis of the first plot with a vector #' in the form of `c(min, max)` #' #' @param pdf.weight [logical] (*with default*): #' weight the probability density functions by the components proportion (applies only #' when a vector is provided for `n.components`) #' #' @param pdf.sigma [character] (*with default*): #' if `"sigmab"` the components normal distributions are plotted with a common standard #' deviation (i.e. `sigmab`) as assumed by the FFM. Alternatively, #' `"se"` takes the standard error of each component for the sigma #' parameter of the normal distribution #' #' @param pdf.colors [character] (*with default*): #' color coding of the components in the the plot. #' Possible options are `"gray"`, `"colors"` and `"none"` #' #' @param pdf.scale [numeric]: #' manually set the max density value for proper scaling of the x-axis of the first plot #' #' @param plot.proportions [logical] (*with default*): #' plot barplot showing the proportions of components #' #' @param plot [logical] (*with default*): #' plot output #' #' @param ... further arguments to pass. See details for their usage. #' #' @return #' Returns a plot (*optional*) and terminal output. In addition an #' [RLum.Results-class] object is returned containing the #' following elements: #' #' \item{.$summary}{[data.frame] summary of all relevant model results.} #' \item{.$data}{[data.frame] original input data} #' \item{.$args}{[list] used arguments} #' \item{.$call}{[call] the function call} #' \item{.$mle}{ covariance matrices of the log likelhoods} #' \item{.$BIC}{ BIC score} #' \item{.$llik}{ maximum log likelihood} #' \item{.$grain.probability}{ probabilities of a grain belonging to a component} #' \item{.$components}{[matrix] estimates of the de, de error and proportion for each component} #' \item{.$single.comp}{[data.frame] single componente FFM estimate} #' #' If a vector for `n.components` is provided (e.g. `c(2:8)`), #' `mle` and `grain.probability` are lists containing matrices of the #' results for each iteration of the model. #' #' The output should be accessed using the function [get_RLum] #' #' @section Function version: 0.4.1 #' #' @author #' Christoph Burow, University of Cologne (Germany) \cr #' Based on a rewritten S script of Rex Galbraith, 2006. #' #' @seealso [calc_CentralDose], [calc_CommonDose], #' [calc_FuchsLang2001], [calc_MinDose] #' #' @references #' Galbraith, R.F. & Green, P.F., 1990. Estimating the component #' ages in a finite mixture. Nuclear Tracks and Radiation Measurements 17, #' 197-206. #' #' Galbraith, R.F. & Laslett, G.M., 1993. Statistical models #' for mixed fission track ages. Nuclear Tracks Radiation Measurements 4, #' 459-470. #' #' Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of #' equivalent dose and error calculation and display in OSL dating: An overview #' and some recommendations. Quaternary Geochronology 11, 1-27. #' #' Roberts, R.G., Galbraith, R.F., Yoshida, H., Laslett, G.M. & Olley, J.M., 2000. #' Distinguishing dose populations in sediment mixtures: a test of single-grain #' optical dating procedures using mixtures of laboratory-dosed quartz. #' Radiation Measurements 32, 459-465. #' #' Galbraith, R.F., 2005. Statistics for Fission Track Analysis, Chapman & Hall/CRC, Boca Raton. #' #' **Further reading** #' #' Arnold, L.J. & Roberts, R.G., 2009. Stochastic #' modelling of multi-grain equivalent dose (De) distributions: Implications #' for OSL dating of sediment mixtures. Quaternary Geochronology 4, #' 204-230. #' #' Cunningham, A.C. & Wallinga, J., 2012. Realizing the #' potential of fluvial archives using robust OSL chronologies. Quaternary #' Geochronology 12, 98-106. #' #' Rodnight, H., Duller, G.A.T., Wintle, A.G. & #' Tooth, S., 2006. Assessing the reproducibility and accuracy of optical #' dating of fluvial deposits. Quaternary Geochronology 1, 109-120. #' #' Rodnight, H. 2008. How many equivalent dose values are needed to obtain a #' reproducible distribution?. Ancient TL 26, 3-10. #' #' #' @examples #' #' ## load example data #' data(ExampleData.DeValues, envir = environment()) #' #' ## (1) apply the finite mixture model #' ## NOTE: the data set is not suitable for the finite mixture model, #' ## which is why a very small sigmab is necessary #' calc_FiniteMixture(ExampleData.DeValues$CA1, #' sigmab = 0.2, n.components = 2, #' grain.probability = TRUE) #' #' ## (2) repeat the finite mixture model for 2, 3 and 4 maximum number of fitted #' ## components and save results #' ## NOTE: The following example is computationally intensive. Please un-comment #' ## the following lines to make the example work. #' FMM<- calc_FiniteMixture(ExampleData.DeValues$CA1, #' sigmab = 0.2, n.components = c(2:4), #' pdf.weight = TRUE, dose.scale = c(0, 100)) #' #' ## show structure of the results #' FMM #' #' ## show the results on equivalent dose, standard error and proportion of #' ## fitted components #' get_RLum(object = FMM, data.object = "components") #' #' @md #' @export calc_FiniteMixture <- function( data, sigmab, n.components, grain.probability = FALSE, dose.scale, pdf.weight = TRUE, pdf.sigma = "sigmab", pdf.colors = "gray", pdf.scale, plot.proportions = TRUE, plot=TRUE, ... ){ ##============================================================================## ## CONSISTENCY CHECK OF INPUT DATA ##============================================================================## if(missing(data)==FALSE){ if(is(data, "data.frame") == FALSE & is(data,"RLum.Results") == FALSE){ stop("[calc_FiniteMixture] Error: 'data' object has to be of type 'data.frame' or 'RLum.Results'!") } else { if(is(data, "RLum.Results") == TRUE){ data <- get_RLum(data, "data") } } } try(colnames(data)<- c("ED","ED_Error"),silent=TRUE) if(colnames(data[1])!="ED"||colnames(data[2])!="ED_Error") { cat(paste("Columns must be named 'ED' and 'ED_Error'"), fill = FALSE) stop(domain=NA) } if(sigmab <0 | sigmab >1) { cat(paste("sigmab needs to be given as a fraction between", "0 and 1 (e.g. 0.2)"), fill = FALSE) stop(domain=NA) } if(any(n.components<2) == TRUE) { cat(paste("Atleast two components need to be fitted"), fill = FALSE) stop(domain=NA) } if(pdf.sigma!="se" ) { if(pdf.sigma!="sigmab") { cat(paste("Only 'se' or 'sigmab' allowed for the pdf.sigma argument"), fill = FALSE) stop(domain=NA) } } ##============================================================================## ## ... ARGUMENTS ##============================================================================## extraArgs <- list(...) ## console output if("verbose" %in% names(extraArgs)) { verbose<- extraArgs$verbose } else { verbose<- TRUE } # trace calculations if("trace" %in% names(extraArgs)) { trace<- extraArgs$trace } else { trace<- FALSE } # plot title if("main" %in% names(extraArgs)) { main<- extraArgs$main } else { main<- "Finite Mixture Model" } ##============================================================================## ## CALCULATIONS ##============================================================================## ## create storage variables if more than one k is provided if(length(n.components)>1) { # counter needed for various purposes cnt<- 1 # create summary matrix containing DE, standard error (se) and proportion # for each component comp.n<- matrix(data = NA, ncol = length(n.components), nrow = n.components[length(n.components)] * 3, byrow = TRUE) # create empty vector as storage for BIC and LLIK scores BIC.n<- vector(mode = "double") LLIK.n<- vector(mode = "double") # create empty vectors of type "lists" as storage for mle matrices and # grain probabilities vmat.n<- vector(mode = "list", length = length(n.components)) grain.probability.n<- vector(mode = "list", length = length(n.components)) } ## start actual calculation (loop) for each provided maximum components to ## be fitted. for(i in 1:length(n.components)) { k<- n.components[i] # calculate yu = log(ED), su = se(logED), n = number of grains yu<- log(data$ED) su<- data$ED_Error/data$ED n<- length(yu) # compute starting values fui<- matrix(0,n,k) pui<- matrix(0,n,k) nui<- matrix(0,n,k) pii<- rep(1/k,k) mu<- min(yu) + (max(yu)-min(yu))*(1:k)/(k+1) # remove the # in the line below to get alternative starting values # (useful to check that the algorithm converges to the same values) # mu<- quantile(yu,(1:k)/(k+1)) # compute maximum log likelihood estimates nit<- 499L wu<- 1/(sigmab^2 + su^2) rwu<- sqrt(wu) for(j in 1:nit){ for(i in 1:k) { fui[,i]<- rwu*exp(-0.5*wu*(yu-mu[i])^2) nui[,i]<- pii[i]*fui[,i] } pui<- nui/apply(nui,1,sum) mu<- apply(wu*yu*pui,2,sum)/apply(wu*pui,2,sum) pii<- apply(pui,2,mean) } # calculate the log likelihood and BIC llik<- sum( log( (1/sqrt(2*pi))*apply(nui,1,sum) )) bic<- -2*llik + (2*k - 1)*log(n) # calculate the covariance matrix and standard errors of the estimates # i.e., the dose estimtes in Gy and relative standard errors, and # the mixing proportions and standard errors. aui<- matrix(0,n,k) bui<- matrix(0,n,k) for(i in 1:k) { aui[,i]<- wu*(yu-mu[i]) bui[,i]<- -wu + (wu*(yu-mu[i]))^2 } delta<- diag(rep(1,k)) Au<- matrix(0,k-1,k-1) Bu<- matrix(0,k-1,k) Cu<- matrix(0,k,k) for(i in 1:(k-1)){ for(j in 1:(k-1)){ Au[i,j]<- sum( (pui[,i]/pii[i] - pui[,k]/pii[k])*(pui[,j]/pii[j] - pui[,k]/pii[k]) )}} for(i in 1:(k-1)){ for(j in 1:k){ Bu[i,j]<- sum( pui[,j]*aui[,j]*(pui[,i]/pii[i] - pui[,k]/pii[k] - delta[i,j]/pii[i] + delta[k,j]/pii[k] ) )}} for(i in 1:k){ for(j in 1:k){ Cu[i,j]<- sum( pui[,i]*pui[,j]*aui[,i]*aui[,j] - delta[i,j]*bui[,i]* pui[,i] ) }} invvmat<- rbind(cbind(Au,Bu),cbind(t(Bu),Cu)) vmat<- solve(invvmat, tol=.Machine$double.xmin) rek<- sqrt(sum(vmat[1:(k-1),1:(k-1)])) # calculate DE, relative standard error, standard error dose<- exp(mu) re<- sqrt(diag(vmat))[-c(1:(k-1))] if (any(is.nan(re))) re[is.nan(re)] <- NA sed<- dose*re estd<- rbind(dose,re,sed) # rename proportion prop<- pii # this calculates the proportional standard error of the proportion of grains # in the fitted components. However, the calculation is most likely erroneous. # sep<- c(sqrt(diag(vmat))[c(1:(k-1))],rek) # rename proportion estp<- prop # merge results to a data frame blk<- rep(" ",k) comp<- rbind(blk,round(estd,4),blk,round(estp,4)) comp<- data.frame(comp,row.names=c("","dose (Gy) ","rse(dose) ", "se(dose)(Gy)"," ","proportion ")) # label results data frame cp<- rep("comp",k) cn<- c(1:k) names(comp)<- paste(cp,cn,sep="") # calculate the log likelihood and BIC for a single component -- can # be useful to see if there is evidence of more than one component mu0<- sum(wu*yu)/sum(wu) fu0<- rwu*exp(-0.5*wu*(yu-mu0)^2) L0<- sum( log((1/sqrt(2*pi))*fu0 ) ) bic0<- -2*L0 + log(n) comp0<- round(c(exp(mu0),sigmab,L0,bic0),4) ## save results for k components in storage variables if(length(n.components)>1) { # vector of indices needed for finding the dose rows of the summary # matrix - position 1,4,7...n pos.n<- seq(from = 1, to = n.components[cnt]*3, by = 3) # save results of each iteration to summary matrix for(i in 1:n.components[cnt]) { comp.n[pos.n[i], cnt]<- round(dose[i], 2) #De comp.n[pos.n[i]+1, cnt]<- round(sed[i], 2) #SE comp.n[pos.n[i]+2, cnt]<- round(estp[i], 2) #Proportion } # save BIC and llik of each iteration to corresponding vector BIC.n[cnt]<- bic LLIK.n[cnt]<- llik # merge BIC and llik scores to a single data frame results.n<- rbind(BIC = round(BIC.n, 3), llik = round(LLIK.n, 3)) # save mle matrix and grain probabilities to corresponding vector vmat.n[[cnt]]<- vmat grain.probability.n[[cnt]]<- as.data.frame(pui) # increase counter by one for next iteration cnt<- cnt+1 }#EndOf::save intermediate results }##EndOf::calculation loop ##============================================================================## ## STATISTICAL CHECK ##============================================================================## if(length(n.components)>1) { ## Evaluate maximum log likelihood estimates LLIK.significant<- vector(mode = "logical") # check if llik is at least three times greater when adding a further # component for(i in 1:c(length(LLIK.n)-1)) { LLIK.significant[i]<- (LLIK.n[i+1]/LLIK.n[i])>3 } ## Find lowest BIC score BIC.lowest<- n.components[which.min(BIC.n)] } ##============================================================================## ## OUTPUT ##============================================================================## if(verbose==TRUE) { ## HEADER (always printed) cat("\n [calc_FiniteMixture]") ##---------------------------------------------------------------------------- ## OUTPUT WHEN ONLY ONE VALUE FOR n.components IS PROVIDED if(length(n.components) == 1) { # covariance matrix cat(paste("\n\n--- covariance matrix of mle's ---\n\n")) print(round(vmat,6)) # general information on sample and model performance cat(paste("\n----------- meta data ------------")) cat(paste("\n n: ",n)) cat(paste("\n sigmab: ",sigmab)) cat(paste("\n number of components: ",k)) cat(paste("\n llik: ",round(llik,4))) cat(paste("\n BIC: ",round(bic,3))) # fitted components cat(paste("\n\n----------- components -----------\n\n")) print(comp) # print (to 2 decimal places) the estimated probabilities of which component # each grain is in -- sometimes useful for diagnostic purposes if(grain.probability==TRUE) { cat(paste("\n-------- grain probability -------\n\n")) print(round(pui,2)) } # output for single component cat(paste("\n-------- single component --------")) cat(paste("\n mu: ", comp0[1])) cat(paste("\n sigmab: ", comp0[2])) cat(paste("\n llik: ", comp0[3])) cat(paste("\n BIC: ", comp0[4])) cat(paste("\n----------------------------------\n\n")) }#EndOf::Output for length(n.components) == 1 ##---------------------------------------------------------------------------- ## OUTPUT WHEN ONLY >1 VALUE FOR n.components IS PROVIDED if(length(n.components) > 1) { ## final labeling of component and BIC/llik matrices # create labels dose.lab<- paste("c", 1:n.components[length(n.components)],"_dose", sep="") se.lab<- paste("c", 1:n.components[length(n.components)],"_se", sep="") prop.lab<- paste("c", 1:n.components[length(n.components)],"_prop", sep="") # empty vector which stores the labeles in correct order (dose, se, prop) n.lab<- vector(mode = "expression", n.components[length(n.components)]*3) # loop to store the labels in correct order (dose, se, prop) cnt<- 1 for(i in pos.n) { n.lab[i]<- dose.lab[cnt] n.lab[i+1]<- se.lab[cnt] n.lab[i+2]<- prop.lab[cnt] cnt<- cnt+1 } # label columns and rows of summary matrix and BIC/LLIK data frame colnames(comp.n)<- n.components[1]:n.components[length(n.components)] rownames(comp.n)<- n.lab colnames(results.n)<- n.components[1]:n.components[length(n.components)] ## CONSOLE OUTPUT # general information on sample and model performance cat(paste("\n\n----------- meta data ------------")) cat(paste("\n n: ",n)) cat(paste("\n sigmab: ",sigmab)) cat(paste("\n number of components: ",n.components[1],"-", n.components[length(n.components)], sep="")) # output for single component cat(paste("\n\n-------- single component --------")) cat(paste("\n mu: ", comp0[1])) cat(paste("\n sigmab: ", comp0[2])) cat(paste("\n llik: ", comp0[3])) cat(paste("\n BIC: ", comp0[4])) # print component matrix cat(paste("\n\n----------- k components -----------\n")) print(comp.n, na.print="") # print BIC scores and LLIK estimates cat(paste("\n----------- statistical criteria -----------\n")) print(results.n) ## print evaluation of statistical criteria # lowest BIC score cat(paste("\n Lowest BIC score for k =", BIC.lowest)) # first significant increase in LLIK estimates if(!any(LLIK.significant, na.rm = TRUE)) { cat(paste("\n No significant increase in maximum log", "likelihood estimates. \n")) } else { cat(paste("\n First significant increase in maximum log likelihood for", "k =", which(LLIK.significant==TRUE)[1], "\n\n")) } cat(paste("\n")) }#EndOf::Output for length(n.components) > 1 } ##============================================================================## ## RETURN VALUES ##============================================================================## # .@data$meta BIC<- data.frame(n.components=k, BIC=bic) llik<- data.frame(n.components=k, llik=llik) if(length(n.components)>1) { BIC.n<- data.frame(n.components=n.components, BIC=BIC.n) llik.n<- data.frame(n.components=n.components, llik=LLIK.n) } # .@data$single.comp single.comp<- data.frame(mu=comp0[1],sigmab=comp0[2], llik=comp0[3],BIC=comp0[4]) # .@data$components comp.re<- t(rbind(round(estd,4),round(estp,4))) colnames(comp.re)<- c("de","rel_de_err","de_err","proportion") comp.re<- comp.re[,-2] # remove the relative error column # .@data$grain.probability grain.probability<- round(pui, 2) summary<- data.frame(comp.re) call<- sys.call() args<- list(sigmab = sigmab, n.components = n.components) # create S4 object newRLumResults.calc_FiniteMixture <- set_RLum( class = "RLum.Results", data = list( summary=summary, data=data, args=args, call=call, mle=if(length(n.components)==1){vmat}else{vmat.n}, BIC=if(length(n.components)==1){BIC}else{BIC.n}, llik=if(length(n.components)==1){llik}else{llik.n}, grain.probability=if(length(n.components)==1){grain.probability}else{grain.probability.n}, components=if(length(n.components)==1){comp.re}else{comp.n}, single.comp=single.comp)) if (anyNA(unlist(summary)) && verbose) warning("\n[calc_FiniteMixture] The model produced NA values. Either the input data are inapplicable for the model", " or the the model parameters need to be adjusted (e.g. 'sigmab')", call. = FALSE) ##=========## ## PLOTTING if(plot && !anyNA(unlist(summary))) try(do.call(plot_RLum.Results, c(list(newRLumResults.calc_FiniteMixture), as.list(sys.call())[-c(1,2)]))) # Return values invisible(newRLumResults.calc_FiniteMixture) } Luminescence/R/calc_Lamothe2003.R0000644000176200001440000003023313463647055016106 0ustar liggesusers#' Apply fading correction after Lamothe et al., 2003 #' #' This function applies the fading correction for the prediction of long-term fading as suggested #' by Lamothe et atl., 2003. The function basically adjusts the Ln/Tn values and fit a new dose-response #' curve using the function [plot_GrowthCurve]. #' #' #' @param object [RLum.Results-class] [data.frame] (**required**): Input data for applying the #' fading correction. Alow are (1) [data.frame] with three columns (dose, LxTx, LxTx error), (2) #' [RLum.Results-class] object created by the function [analyse_SAR.CWOSL] or [analyse_pIRIRSequence] #' #' @param dose_rate.envir [numeric] vector of length 2 (**required**): Environmental dose rate in mGy/a #' #' @param dose_rate.source [numeric] vector of length 2 (**required**): Irradiation source dose rate in Gy/s #' #' @param g_value [numeric] vector of length 2 (**required**): g_value in \%/decade *recalculated at the moment* #' the equivalent dose was calculated, i.e. tc is either similar for the g-value measurement **and** the De measurement or #' needs be to recalculated (cf. [calc_FadingCorr]). Inserting a normalised g-value, e.g., normalised to 2-days , will #' lead to wrong results #' #' @param tc [numeric] (optional): time in seconds between irradiation and the prompt measurement used in the De estimation (cf. Huntley & Lamothe 2001). #' If set to `NULL` it is assumed that tc is similar for the equivalent dose estimation and the g-value estimation #' #' @param tc.g_value [numeric] (with default): the time in seconds between irradiation and the prompt measurement used for estimating the g-value. #' If the g-value was normalised to, e.g., 2 days, this time in seconds (i.e., 172800) should be given here along with the time used for the #' De estimation. If nothing is provided the time is set to tc, which is usual case for g-values obtained using the #' SAR method and g-values that had been not normalised to 2 days. Note: If this value is not `NULL` the functions expects a [numeric] value for `tc`. #' #' @param plot [logical] (with default): Enables/disables plot output #' #' @param verbose [logical] (with default): Enables/disables terminal verbose mode #' #' @param ... further arguments passed to the function [plot_GrowthCurve] #' #' @return The function returns are graphical output produced by the function [plot_GrowthCurve] and #' an [RLum.Results-class]. #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `$data` \tab `data.frame` \tab the fading corrected values \cr #' `$fit` \tab `nls` \tab the object returned by the dose response curve fitting \cr #' } #' #' '**slot:** **`@info`** #' #' The original function call #' #' @references #' #' Huntley, D.J., Lamothe, M., 2001. Ubiquity of anomalous fading in K-feldspars and the measurement #' and correction for it in optical dating. Canadian Journal of Earth Sciences 38, 1093-1106. #' #' Lamothe, M., Auclair, M., Hamzaoui, C., Huot, S., 2003. #' Towards a prediction of long-term anomalous fadingof feldspar IRSL. Radiation Measurements 37, #' 493-498. #' #' @section Function version: 0.1.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France), Norbert Mercier, #' IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @keywords datagen #' #' @seealso [plot_GrowthCurve], [calc_FadingCorr], [analyse_SAR.CWOSL], [analyse_pIRIRSequence] #' #' @examples #' #'##load data #'##ExampleData.BINfileData contains two BINfileData objects #'##CWOSL.SAR.Data and TL.SAR.Data #'data(ExampleData.BINfileData, envir = environment()) #' #'##transform the values from the first position in a RLum.Analysis object #'object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) #' #'##perform SAR analysis and set rejection criteria #'results <- analyse_SAR.CWOSL( #' object = object, #' signal.integral.min = 1, #' signal.integral.max = 2, #' background.integral.min = 900, #' background.integral.max = 1000, #' verbose = FALSE, #' plot = FALSE, #' onlyLxTxTable = TRUE #' ) #' #' ##run fading correction #' results_corr <- calc_Lamothe2003( #' object = results, #' dose_rate.envir = c(1.676 , 0.180), #' dose_rate.source = c(0.184, 0.003), #' g_value = c(2.36, 0.6), #' plot = TRUE, #' fit.method = "EXP") #' #' #'@md #'@export calc_Lamothe2003 <- function( object, dose_rate.envir, dose_rate.source, g_value, tc = NULL, tc.g_value = tc, verbose = TRUE, plot = TRUE, ... ){ # Input parameter test ------------------------------------------------------------------------ ##object if(missing(object)){ stop("[calc_Lamothe2003()] Input for 'object' missing but required!", call. = FALSE) } ##dose_rate.envir if(missing(dose_rate.envir)){ stop("[calc_Lamothe2003()] Input for 'dose_rate.envir' missing but required!", call. = FALSE) } ##dose_rate.source if(missing(dose_rate.source)){ stop("[calc_Lamothe2003()] Input for 'dose_rate.source' missing but required!", call. = FALSE) } ##g_value if(missing(g_value)){ stop("[calc_Lamothe2003()] Input for 'g_value' missing but required!", call. = FALSE) } ##check input type and length ##dose_rate.envir if(class(dose_rate.envir) != "numeric" || length(dose_rate.envir) < 2){ stop("[calc_Lamothe2003()] Input for 'dose_rate.envir' is not of type 'numeric' and/or of length < 2!", call. = FALSE) }else{ if(length(dose_rate.envir) > 2){ warning("[calc_Lamothe2003()] 'dose_rate.envir' has length > 2. Take only the first two entries.",call. = FALSE, immediate. = TRUE) dose_rate.envir <- dose_rate.envir[1:2] } } ##dose_rate.source if(class(dose_rate.source) != "numeric" || length(dose_rate.source) < 2){ stop("[calc_Lamothe2003()] Input for 'dose_rate.source' is not of type 'numeric' and/or of length < 2!", call. = FALSE) }else{ if(length(dose_rate.source) > 2){ warning("[calc_Lamothe2003()] 'dose_rate.source' has length > 2. Take only the first two entries.",call. = FALSE, immediate. = TRUE) dose_rate.source <- dose_rate.source[1:2] } } ##tc if(is.null(tc) && !is.null(tc.g_value)) stop("[calc_Lamothe2003()] If you set 'tc.g_value' you have to provide a value for 'tc' too!", call. = FALSE) # Input assignment ----------------------------------------------------------------------------- ## We allow input as data.frame() and RLum.Results objects ... the output from functions listed ## below .. if we allow a data.frame it should have at least Dose, Lx/Tx, Lx/Tx Error if(class(object) == "data.frame"){ data <- object[,1:3] ##add signal information if(any(grepl(pattern = "Signal", x = colnames(object), fixed = TRUE))){ SIGNAL <- object[[which(grepl(pattern = "Signal", colnames(object), fixed = TRUE))[1]]] }else{ SIGNAL <- NA } }else if(class(object) == "RLum.Results"){ if(object@originator == "analyse_SAR.CWOSL" || object@originator == "analyse_pIRIRSequence"){ ##now we do crazy stuff, we make a self-call here since this file can contain a lot of information ##get number of datasets; we have to search for the word natural, everthing else is not safe enough full_table <- object@data$LnLxTnTx.table set_start <- which(grepl(full_table$Name, pattern = "Natural", fixed = TRUE)) set_end <- c(set_start[-1] - 1, nrow(full_table)) ##signal column if available if(object@originator == "analyse_pIRIRSequence"){ object <- full_table[,c("Dose", "LxTx", "LxTx.Error", "Signal")] }else{ object <- full_table[,c("Dose", "LxTx", "LxTx.Error")] } ##now run the function results <- lapply(1:length(set_start), function(x){ calc_Lamothe2003( object = object[set_start[x]:set_end[x], ], dose_rate.envir = dose_rate.envir, dose_rate.source = dose_rate.source, g_value = g_value, tc = tc, tc.g_value = tc.g_value, verbose = verbose, plot = plot, ... ) }) ##merge output return(merge_RLum(results)) }else{ stop(paste0("[calc_Lamothe2003()] Input for 'object' created by function ",object@originator, "() not supported!"), call. = FALSE) } }else{ stop("[calc_Lamothe2003()] Unsupported data type for 'object'!", call. = FALSE) } # Apply correction---------------------------------------------------------------------------- ##recalculate the g-value to the given tc ... ##re-calulation thanks to the help by Sebastien Huot, e-mail: 2016-07-19 if(!is.null(tc)){ k0 <- g_value / 100 / log(10) k1 <- k0 / (1 - k0 * log(tc[1]/tc.g_value[1])) g_value <- 100 * k1 * log(10) } # transform irradiation times to dose values data[[1]] <- data[[1]] * dose_rate.source[1] # fading correction rr <- 31.5576 * 10^9 * dose_rate.source[1] / (exp(1) * dose_rate.envir[1]) s_rr <- (sqrt ((100*dose_rate.source[2]/dose_rate.source[1])^2 + (100*dose_rate.envir[2]/dose_rate.envir[1])^2)) * rr / 100 Fading_C <- 1 - (g_value[1])/100 * log10(rr) sFading_C <- sqrt ((log10(rr) )^2 * ((g_value[2])/100)^2 + (g_value[1]/(100*rr))^2 * (s_rr)^2 ) #apply to input data LnTn_BEFORE <- data[[2]][1] LnTn_BEFORE.ERROR <- data[[3]][1] data[[2]][1] <- data[[2]][1] / Fading_C data[[3]][1] <- (sqrt( (100*data[[3]][1]/data[[2]][1])^2 + ((1/Fading_C - 1)*100*sFading_C/Fading_C)^2 )) * data[[2]][1] / 100 # Fitting --------------------------------------------------------------------------------- ##set arguments argument_list <- list( sample = data, verbose = FALSE, main = "Corrected Dose Response Curve", xlab = "Dose [Gy]", txtProgressBar = verbose, output.plotExtended = FALSE, output.plot = plot ) ##filter doubled arguments argument_list <- modifyList(x = argument_list, val = list(...)) ##run plot function fit_results <- do.call(what = plot_GrowthCurve,args = argument_list) # Age calculation ----------------------------------------------------------------------------- Age <- get_RLum(fit_results)[["De"]] / dose_rate.envir[1] s_Age <- sqrt ( (100*get_RLum(fit_results)[["De.Error"]]/get_RLum(fit_results)[["De"]])^2 + (100*dose_rate.envir[2]/dose_rate.envir[1])^2 ) *Age/100 # Terminal output ----------------------------------------------------------------------------- if(verbose){ cat("\n[calc_Lamothe2003()] \n\n") cat(" Used g_value:\t\t", round(g_value[1],3)," \u00b1 ",round(g_value[2],3),"%/decade \n") if(!is.null(tc)){ cat(" tc for g_value:\t", tc.g_value, " s\n") } cat("\n") cat(" Fading_C:\t\t", round(Fading_C,3), " \u00b1 ", round(sFading_C,3),"\n") cat(" Corrected Ln/Tn:\t", round(data[[2]][1],3), " \u00b1 ", round(data[[3]][1],3),"\n") cat(" Corrected De:\t\t", round(get_RLum(fit_results)[["De"]],2), " \u00b1 ", round(get_RLum(fit_results)[["De.Error"]],2)," Gy \n") cat("--------------------------------------------------------\n") cat(" Corrected Age:\t\t", round(Age,2), " \u00b1 ", round(s_Age,2)," ka \n") cat("--------------------------------------------------------\n") } # Compile output ------------------------------------------------------------------------------ return( set_RLum( class = "RLum.Results", data = list( data = data.frame( g_value = g_value[1], g_value.ERROR = g_value[2], tc = ifelse(is.null(tc), NA, tc), tc.g_value = ifelse(is.null(tc.g_value), NA, tc.g_value), FADING_C = Fading_C, FADING_C.ERROR = sFading_C, LnTn_BEFORE = LnTn_BEFORE, LnTn_BEFORE.ERROR = LnTn_BEFORE.ERROR, LnTn_AFTER = data[[2]][1], LnTn_AFTER.ERROR = data[[3]][1], DE = get_RLum(fit_results)[["De"]], DE = get_RLum(fit_results)[["De"]], DE.ERROR = get_RLum(fit_results)[["De.Error"]], AGE = Age, AGE.ERROR = s_Age, SIGNAL = SIGNAL ), fit = get_RLum(fit_results, data.object = "Fit") ), info = list( call = sys.call() ) ) ) } Luminescence/R/fit_CWCurve.R0000644000176200001440000007346413571743147015422 0ustar liggesusers#' Nonlinear Least Squares Fit for CW-OSL curves -beta version- #' #' The function determines the weighted least-squares estimates of the #' component parameters of a CW-OSL signal for a given maximum number of #' components and returns various component parameters. The fitting procedure #' uses the [nls] function with the `port` algorithm. #' #' **Fitting function** #' #' The function for the CW-OSL fitting has the general form: #' #' \deqn{y = I0_{1}*\lambda_{1}*exp(-\lambda_1*x) + ,\ldots, + I0_{i}*\lambda_{i}*exp(-\lambda_i*x) } #' #' where \eqn{0 < i < 8} #' #' and \eqn{\lambda} is the decay constant \cr #' and \eqn{I0} the intial number of trapped electrons. #' #' *(for the used equation cf. Boetter-Jensen et al., 2003, Eq. 2.31)* #' #' **Start values** #' #' Start values are estimated automatically by fitting a linear function to the #' logarithmized input data set. Currently, there is no option to manually #' provide start parameters. #' #' **Goodness of fit** #' #' The goodness of the fit is given as pseudoR^2 value (pseudo coefficient of #' determination). According to Lave (1970), the value is calculated as: #' #' \deqn{pseudoR^2 = 1 - RSS/TSS} #' #' where \eqn{RSS = Residual~Sum~of~Squares} \cr #' and \eqn{TSS = Total~Sum~of~Squares} #' #' #' #' **Error of fitted component parameters** #' #' The 1-sigma error for the #' components is calculated using the function [confint]. Due to #' considerable calculation time, this option is deactived by default. In #' addition, the error for the components can be estimated by using internal R #' functions like [summary]. See the [nls] help page #' for more information. #' #' *For details on the nonlinear regression in R, see Ritz & Streibig (2008).* #' #' @param values [RLum.Data.Curve-class] or [data.frame] (**required**): #' x, y data of measured values (time and counts). See examples. #' #' @param n.components.max [vector] (*optional*): #' maximum number of components that are to be used for fitting. #' The upper limit is 7. #' #' @param fit.failure_threshold [vector] (*with default*): #' limits the failed fitting attempts. #' #' @param fit.method [character] (*with default*): #' select fit method, allowed values: `'port'` and `'LM'`. `'port'` uses the 'port' #' routine usint the funtion [nls] `'LM'` utilises the function `nlsLM` from #' the package `minpack.lm` and with that the Levenberg-Marquardt algorithm. #' #' @param fit.trace [logical] (*with default*): #' traces the fitting process on the terminal. #' #' @param fit.calcError [logical] (*with default*): #' calculate 1-sigma error range of components using [confint] #' #' @param LED.power [numeric] (*with default*): #' LED power (max.) used for intensity ramping in mW/cm^2. #' **Note:** The value is used for the calculation of the absolute #' photoionisation cross section. #' #' @param LED.wavelength [numeric] (*with default*): #' LED wavelength used for stimulation in nm. #' **Note:** The value is used for the calculation of the absolute #' photoionisation cross section. #' #' @param cex.global [numeric] (*with default*): #' global scaling factor. #' #' @param sample_code [character] (*optional*): #' sample code used for the plot and the optional output table (mtext). #' #' @param output.path [character] (*optional*): #' output path for table output containing the results of the fit. The file #' name is set automatically. If the file already exists in the directory, #' the values are appended. #' #' @param output.terminal [logical] (*with default*): #' terminal ouput with fitting results. #' #' @param output.terminalAdvanced [logical] (*with default*): #' enhanced terminal output. Requires `output.terminal = TRUE`. #' If `output.terminal = FALSE` no advanced output is possible. #' #' @param plot [logical] (*with default*): #' returns a plot of the fitted curves. #' #' @param ... further arguments and graphical parameters passed to [plot]. #' #' @return #' **plot (*optional*)** #' #' the fitted CW-OSL curves are returned as plot. #' #' **table (*optional*)** #' #' an output table (*.csv) with parameters of the fitted components is #' provided if the `output.path` is set. #' #' #' **RLum.Results** #' #' Beside the plot and table output options, an [RLum.Results-class] object is #' returned. #' #' `fit`: #' an `nls` object (`$fit`) for which generic R functions are #' provided, e.g. [summary], [confint], [profile]. For more #' details, see [nls]. #' #' `output.table`: #' a [data.frame] containing the summarised parameters including the error #' #' `component.contribution.matrix`: #' [matrix] containing the values for the component to sum contribution plot #' (`$component.contribution.matrix`). #' #' Matrix structure:\cr #' Column 1 and 2: time and `rev(time)` values \cr #' Additional columns are used for the components, two for each component, #' containing I0 and n0. The last columns `cont.` provide information on #' the relative component contribution for each time interval including the row #' sum for this values. #' #' **object** #' #' beside the plot and table output options, an [RLum.Results-class] object #' is returned. #' #' `fit`: #' an `nls` object (`$fit`) for which generic R functions #' are provided, e.g. [summary], [confint], [profile]. For more #' details, see [nls]. #' #' `output.table`: #' a [data.frame] containing the summarised parameters including the error\cr #' `component.contribution.matrix`: [matrix] containing the values #' for the component to sum contribution plot (`$component.contribution.matrix`).\cr #' #' Matrix structure:\cr #' Column 1 and 2: time and `rev(time)` values\cr #' Additional columns are used for the components, two for each component, #' containing I0 and n0. The last columns `cont.` provide information on #' the relative component contribution for each time interval including the row #' sum for this values. #' #' #' @note #' #' **Beta version - This function has not been properly tested yet and** #' **should therefore not be used for publication purposes!** #' #' The pseudo-R^2 may not be the best parameter to describe the goodness of the #' fit. The trade off between the `n.components` and the pseudo-R^2 value #' is currently not considered. #' #' The function **does not** ensure that the fitting procedure has reached a #' global minimum rather than a local minimum! #' #' @section Function version: 0.5.2 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [fit_LMCurve], [plot],[nls], [RLum.Data.Curve-class], #' [RLum.Results-class], [get_RLum], [minpack.lm::nlsLM] #' #' @references #' Boetter-Jensen, L., McKeever, S.W.S., Wintle, A.G., 2003. #' Optically Stimulated Luminescence Dosimetry. Elsevier Science B.V. #' #' Lave, C.A.T., 1970. The Demand for Urban Mass Transportation. The Review of #' Economics and Statistics, 52 (3), 320-323. #' #' Ritz, C. & Streibig, J.C., 2008. Nonlinear Regression with R. In: R. #' Gentleman, K. Hornik, G. Parmigiani, eds., Springer, p. 150. #' #' @keywords dplot models #' #' @examples #' #' ##load data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' #' ##fit data #' fit <- fit_CWCurve(values = ExampleData.CW_OSL_Curve, #' main = "CW Curve Fit", #' n.components.max = 4, #' log = "x") #' #' @md #' @export fit_CWCurve<- function( values, n.components.max, fit.failure_threshold = 5, fit.method = "port", fit.trace = FALSE, fit.calcError = FALSE, LED.power = 36, LED.wavelength = 470, cex.global = 0.6, sample_code = "Default", output.path, output.terminal = TRUE, output.terminalAdvanced = TRUE, plot = TRUE, ... ){ ##TODO ##remove output.path # INTEGRITY CHECKS -------------------------------------------------------- ##INPUT OBJECTS if(is(values, "RLum.Data.Curve") == FALSE & is(values, "data.frame") == FALSE){ stop("[fit_CWCurve()] Input object is not of type 'RLum.Data.Curve' or 'data.frame'!", call. = FALSE) } if(is(values, "RLum.Data.Curve") == TRUE){ x <- values@data[,1] y <- values@data[,2] ##needed due to inconsistencies in the R code below values <- data.frame(x,y) }else{ ##set x and y values x<-values[,1] y<-values[,2] } # Deal with extra arguments ----------------------------------------------- ##deal with addition arguments extraArgs <- list(...) main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {"CW-OSL Curve Fit"} log <- if("log" %in% names(extraArgs)) {extraArgs$log} else {""} xlab <- if("xlab" %in% names(extraArgs)) {extraArgs$xlab} else {"Time [s]"} ylab <- if("ylab" %in% names(extraArgs)) {extraArgs$ylab} else {paste("OSL [cts/",round(max(x)/length(x), digits = 2)," s]",sep="")} ##============================================================================## ## FITTING ##============================================================================## ## ##////equation used for fitting////(start) fit.equation <- function(I0.i,lambda.i){ equation<-parse( text=paste("I0[",I0.i,"]*lambda[",lambda.i,"]*exp(-lambda[",lambda.i,"]*x)", collapse="+",sep="")) return(equation) } ##////equation used for fitting///(end) ##////equation used for fitting////(start) fit.equation.simple <- function(I0.i,lambda.i){ equation<-parse( text=paste("I0[",I0.i,"]*exp(-lambda[",lambda.i,"]*x)", collapse="+",sep="")) return(equation) } ##////equation used for fitting///(end) ##set formula elements for fitting functions ## the upper two funtions should be removed ... but chances are needed ... TODO ##////equation used for fitting////(start) fit.formula <- function(n.components){ I0 <- paste0("I0.",1:n.components) lambda <- paste0("lambda.",1:n.components) as.formula(paste0("y ~ ", paste(I0," * ", lambda, "* exp(-",lambda," * x)", collapse=" + "))) } ##////equation used for fitting///(end) ##////equation used for fitting////(start) fit.formula.simple <- function(n.components){ I0 <- paste0("I0.",1:n.components) lambda <- paste0("lambda.",1:n.components) as.formula(paste0("y ~ ", paste(I0," * exp(-",lambda," * x)", collapse=" + "))) } ##////equation used for fitting///(end) ##set variables fit.trigger <- TRUE #triggers if the fitting should stopped n.components <- 1 #number of components used for fitting - start with 1 fit.failure_counter <- 0 #counts the failed fitting attempts ##if n.components_max is missing, then it is Inf if(missing(n.components.max)==TRUE){n.components.max<-Inf} ## ##++++Fitting loop++++(start) while(fit.trigger==TRUE & n.components <= n.components.max){ ##(0) START PARAMETER ESTIMATION ##rough automatic start parameter estimation ##I0 I0<-rep(values[1,2]/3,n.components) names(I0) <- paste0("I0.",1:n.components) ##lambda ##ensure that no values <=0 are included remove them for start parameter ##estimation and fit an linear function a first guess if(min(y)<=0){ temp.values<-data.frame(x[-which(y<=0)], log(y[-which(y<=0)])) }else{ temp.values<-data.frame(x, log(y)) } temp<-lm(temp.values) lambda<-abs(temp$coefficient[2])/nrow(values) k<-2 while(k<=n.components){ lambda[k]<-lambda[k-1]/100 k<-k+1 } names(lambda) <- paste0("lambda.",1:n.components) ##(1) FIRST FIT WITH A SIMPLE FUNCTION if(fit.method == "LM"){ ##try fit simple fit.try<-suppressWarnings(try(minpack.lm::nlsLM(fit.formula.simple(n.components), data=values, start=c(I0,lambda), na.action = "na.exclude", trace = fit.trace, control = minpack.lm::nls.lm.control( maxiter = 500 )), silent = TRUE ))#end try }else if(fit.method == "port"){ ##try fit simple fit.try<-suppressWarnings(try(nls(fit.formula.simple(n.components), data=values, trace = fit.trace, algorithm="port", na.action = "na.exclude", start=c(I0,lambda), nls.control( tol = 1, maxiter=100, warnOnly=FALSE, minFactor=1/1024 ), lower=rep(0,n.components * 2)# set lower boundaries for components ), silent=TRUE# nls ))#end try }else{ stop("[fit_CWCurve()] fit.method unknown.", call. = FALSE) } ##(3) FIT WITH THE FULL FUNCTION if(inherits(fit.try,"try-error") == FALSE){ ##grep parameters from simple fit to further work with them parameters <- coef(fit.try) ##grep parameters an set new starting parameters, here just lambda is choosen as ##it seems to be the most valuable parameter lambda <- parameters[(n.components+1):length(parameters)] if(fit.method == "LM"){ ##try fit simple fit.try<-suppressWarnings(try(minpack.lm::nlsLM(fit.formula(n.components), data=values, start=c(I0,lambda), trace = fit.trace, na.action = "na.exclude", lower = rep(0,n.components * 2), control = minpack.lm::nls.lm.control( maxiter = 500 )), silent = TRUE)) ## HACK: # minpack.lm::nlsLM() stores the 'lower' argument as class "call" rather # than "numeric" as nls() does. Before running confint() on this object # we overwrite the "lower" slot with the numeric values again. if (!inherits(fit.try, "try-error")) { fit.try$call$lower <- rep(0,n.components * 2) } }else{ ##try fit fit.try<-suppressWarnings(try(nls(fit.formula(n.components), trace=fit.trace, data=values, algorithm="port", na.action = "na.exclude", start=c(I0,lambda), nls.control( maxiter = 500, warnOnly = FALSE, minFactor = 1/4096 ), lower=rep(0,n.components * 2)# set lower boundaries for components ), silent=TRUE# nls ))#end try }#fit.method } ##count failed attempts for fitting if(inherits(fit.try,"try-error")==FALSE){ fit <- fit.try n.components <- n.components + 1 }else{ n.components<-n.components+1 fit.failure_counter <- fit.failure_counter+1 if(n.components==fit.failure_counter & exists("fit")==FALSE){fit<-fit.try}} ##stop fitting after a given number of wrong attempts if(fit.failure_counter>=fit.failure_threshold){ fit.trigger <- FALSE if(!exists("fit")){fit <- fit.try} }else if(n.components == n.components.max & exists("fit") == FALSE){ fit <- fit.try } }##end while ##++++Fitting loop++++(end) ##============================================================================## ## FITTING OUTPUT ##============================================================================## ##grep parameters if(inherits(fit,"try-error")==FALSE){ parameters <- coef(fit) ##correct fit equation for the de facto used number of components I0.i<-1:(length(parameters)/2) lambda.i<-1:(length(parameters)/2) fit.function<-fit.equation(I0.i=I0.i,lambda.i=lambda.i) n.components<-length(I0.i) ##write parameters in vectors and order by decreasing lambda value I0<-parameters[1:(length(parameters)/2)] lambda<-parameters[(1+(length(parameters)/2)):length(parameters)] o<-order(lambda,decreasing=TRUE) I0<-I0[o] lambda<-lambda[o] ##============================================================================## ## Additional Calculation ##============================================================================## ## --------------------------------------------- ##calculate stimulation intensity Schmidt (2008) ##Energy - E = h*v h<-6.62606957e-34 #in W*s^2 - Planck constant ny<-299792458/(LED.wavelength/10^9) #frequency of light E<-h*ny ##transform LED.power in W/cm^2 LED.power<-LED.power/1000 ##gets stimulation intensity stimulation_intensity<-LED.power/E ## --------------------------------------------- ##calculate photoionisation cross section and print on terminal ##using EQ (5) in Kitis cs<-as.vector(lambda/stimulation_intensity) cs.rel<-round(cs/cs[1],digits=4) ## --------------------------------------------- ##coefficient of determination after law RSS <- sum(residuals(fit)^2) #residual sum of squares TSS <- sum((y - mean(y))^2) #total sum of squares pR<-round(1-RSS/TSS,digits=4) if(pR<0){ warning("pseudo-R^2 < 0!") } ## --------------------------------------------- ##calculate 1- sigma CONFIDENCE INTERVALL lambda.error<-rep(NA, n.components) I0.error<-rep(NA, n.components) if(fit.calcError==TRUE){ ##option for confidence interval values.confint<-confint(fit, level=0.68) I0.confint<-values.confint[1:(length(values.confint[,1])/2),] lambda.confint<-values.confint[((length(values.confint[,1])/2)+1):length(values.confint[,1]),] ##error calculation I0.error<-as.vector(abs(I0.confint[,1]-I0.confint[,2])) lambda.error<-as.vector(abs(lambda.confint[,1]-lambda.confint[,2])) }#endif::fit.calcError ##============================================================================## ## Terminal Output ##============================================================================## if (output.terminal==TRUE){ ##print rough fitting information - use the nls() control for more information writeLines("\n[fit_CWCurve()]") writeLines(paste("\nFitting was finally done using a ",n.components, "-component function (max=",n.components.max,"):",sep="")) writeLines("------------------------------------------------------------------------------") writeLines(paste0("y ~ ", as.character(fit.formula(n.components))[3], "\n")) ##combine values and change rows names fit.results<-cbind(I0,I0.error,lambda,lambda.error,cs, cs.rel) row.names(fit.results)<-paste("c", 1:(length(parameters)/2), sep="") ##print parameters print(fit.results) #print some additional information if(fit.calcError==TRUE){writeLines("(errors quoted as 1-sigma values)")} writeLines("------------------------------------------------------------------------------") }#end if ##============================================================================## ## Terminal Output (advanced) ##============================================================================## if (output.terminalAdvanced==TRUE && output.terminal==TRUE){ ##sum of squares writeLines(paste("pseudo-R^2 = ",pR,sep="")) }#end if ##============================================================================## ## Table Output ##============================================================================## ##write output table if values exists if (exists("fit")){ ##set data.frame for a max value of 7 components output.table<-data.frame(NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA, NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA, NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA) output.tableColNames<-c("I01","I01.error","lambda1", "lambda1.error", "cs1","cs1.rel", "I02","I02.error","lambda2", "lambda2.error", "cs2","cs2.rel", "I03","I03.error","lambda3", "lambda3.error", "cs3","cs3.rel", "I04","I04.error","lambda4", "lambda4.error", "cs4","cs4.rel", "I05","I05.error","lambda5", "lambda5.error", "cs5","cs5.rel", "I06","I06.error","lambda6", "lambda6.error", "cs6","cs6.rel", "I07","I07.error","lambda7", "lambda7.error", "cs7","cs7.rel" ) ##write components in output table i<-0 k<-1 while(i<=n.components*6){ output.table[1,i+1]<-I0[k] output.table[1,i+2]<-I0.error[k] output.table[1,i+3]<-lambda[k] output.table[1,i+4]<-lambda.error[k] output.table[1,i+5]<-cs[k] output.table[1,i+6]<-cs.rel[k] i<-i+6 k<-k+1 } ##add pR and n.components output.table<-cbind(sample_code,n.components,output.table,pR) ##alter column names colnames(output.table)<-c("sample_code","n.components", output.tableColNames,"pseudo-R^2") if(missing(output.path)==FALSE){ ##write file with just the header if the file not exists if(file.exists(paste(output.path,"fit_CWCurve_Output_",sample_code,".csv",sep=""))==FALSE){ write.table(output.table,file=paste(output.path,"fit_CWCurve_Output_", sample_code,".csv",sep=""), sep=";" ,row.names=FALSE) }else{ write.table(output.table,file=paste(output.path,"fit_CWCurve_Output_", sample_code,".csv",sep=""), sep=";" ,row.names=FALSE, append=TRUE, col.names=FALSE) }#endif::for write option }#endif::table output ##============================================================================## ## COMPONENT TO SUM CONTRIBUTION PLOT ##============================================================================## ##+++++++++++++++++++++++++++++++ ##set matrix ##set polygon matrix for optional plot output component.contribution.matrix <- matrix(NA, nrow = length(values[,1]), ncol = (2*length(I0)) + 2) ##set x-values component.contribution.matrix[,1] <- values[,1] component.contribution.matrix[,2] <- rev(values[,1]) ##+++++++++++++++++++++++++++++++ ##set 1st polygon ##1st polygon (calculation) y.contribution_first<-(I0[1]*lambda[1]*exp(-lambda[1]*x))/(eval(fit.function))*100 ##avoid NaN values (might happen with synthetic curves) y.contribution_first[is.nan(y.contribution_first)==TRUE] <- 0 ##set values in matrix component.contribution.matrix[,3] <- 100 component.contribution.matrix[,4] <- 100 - rev(y.contribution_first) ##+++++++++++++++++++++++++++++++ ##set polygons in between ##polygons in between (calculate and plot) if (length(I0)>2){ y.contribution_prev <- y.contribution_first i<-2 ##matrix stepping k <- seq(3, ncol(component.contribution.matrix), by=2) while (i<=length(I0)-1) { y.contribution_next<-I0[i]*lambda[i]*exp(-lambda[i]*x)/(eval(fit.function))*100 ##avoid NaN values y.contribution_next[is.nan(y.contribution_next)==TRUE] <- 0 ##set values in matrix component.contribution.matrix[,k[i]] <- 100 - y.contribution_prev component.contribution.matrix[, k[i]+1] <- rev(100-y.contribution_prev- y.contribution_next) y.contribution_prev <- y.contribution_prev + y.contribution_next i <- i+1 }#end while loop }#end if ##+++++++++++++++++++++++++++++++ ##set last polygon ##last polygon (calculation) y.contribution_last <- I0[length(I0)]*lambda[length(lambda)]*exp(-lambda[length(lambda)]*x)/ (eval(fit.function))*100 ##avoid NaN values y.contribution_last[is.nan(y.contribution_last)==TRUE]<-0 component.contribution.matrix[,((2*length(I0))+1)] <- y.contribution_last component.contribution.matrix[,((2*length(I0))+2)] <- 0 ##change names of matrix to make more easy to understand component.contribution.matrix.names <- c( "x", "rev.x", paste(c("y.c","rev.y.c"),rep(1:n.components,each=2), sep="")) ##calculate area for each component, for each time interval component.contribution.matrix.area <- sapply( seq(3,ncol(component.contribution.matrix),by=2), function(x){ matrixStats::rowDiffs(cbind(rev(component.contribution.matrix[,(x+1)]), component.contribution.matrix[,x])) }) ##append to existing matrix component.contribution.matrix <- cbind( component.contribution.matrix, component.contribution.matrix.area, rowSums(component.contribution.matrix.area) ) ##set final column names colnames(component.contribution.matrix) <- c( component.contribution.matrix.names, paste(c("cont.c"),rep(1:n.components,each=1), sep=""), "cont.sum") }#endif :: (exists("fit")) }else{ if (output.terminal==TRUE) writeLines("[fit_CWCurve()] Fitting Error >> Plot without fit produced!") output.table<-NA component.contribution.matrix <- NA } ##============================================================================## ## PLOTTING ##============================================================================## if(plot==TRUE){ ##grep par parameters par.default <- par(no.readonly = TRUE) ##set colors gallery to provide more colors col <- get("col", pos = .LuminescenceEnv) ##set plot frame if(!inherits(fit, "try-error")){ layout(matrix(c(1,2,3),3,1,byrow=TRUE),c(1.6,1,1), c(1,0.3,0.4),TRUE) par(oma=c(1,1,1,1),mar=c(0,4,3,0),cex=cex.global) }else{ par(cex=cex.global) } ##==uppper plot==## ##open plot area plot(NA,NA, xlim=c(min(x),max(x)), ylim=if(log=="xy"){c(1,max(y))}else{c(0,max(y))}, xlab=if(!inherits(fit, "try-error")){""}else{xlab}, xaxt=if(!inherits(fit, "try-error")){"n"}else{"s"}, ylab=ylab, main=main, log=log) ##plotting measured signal points(x,y,pch=20, col="grey") ##add additional labeling (fitted function) mtext(side=3, sample_code, cex=0.7*cex.global) ##plot sum function if(inherits(fit,"try-error")==FALSE){ lines(x,eval(fit.function), lwd=2, col="black") legend.caption<-"sum curve" curve.col <- 1 ##plot signal curves ##plot curve for additional parameters if(length(I0)>1){ for (i in 1:length(I0)) { curve(I0[i]*lambda[i]*exp(-lambda[i]*x),col=col[i+1], lwd = 2, add = TRUE) legend.caption<-c(legend.caption,paste("component ",i,sep="")) curve.col<-c(curve.col,i+1) } }#end if ##plot legend #legend(y=max(y)*1,"measured values",pch=20, col="gray", bty="n") legend("topright",legend.caption,lty=rep(1,n.components+1,NA),lwd=2,col=col[curve.col], bty="n") ##==lower plot==## ##plot residuals par(mar=c(4.2,4,0,0)) plot(x,residuals(fit), xlim=c(min(x),max(x)), xlab=xlab, type="l", col="grey", ylab="Residual [a.u.]", lwd=2, log=if(log=="x" | log=="xy"){log="x"}else{""} ) ##add 0 line abline(h=0) ##------------------------------------------------------------------------## ##++component to sum contribution plot ++## ##------------------------------------------------------------------------## ##plot component contribution to the whole signal #open plot area par(mar=c(4,4,3.2,0)) plot(NA,NA, xlim=c(min(x),max(x)), ylim=c(0,100), ylab="Contribution [%]", xlab=xlab, main="Component contribution to sum curve", log=if(log=="x" | log=="xy"){log="x"}else{""}) stepping <- seq(3,length(component.contribution.matrix[1,]),2) for(i in 1:length(I0)){ polygon(c(component.contribution.matrix[,1], component.contribution.matrix[,2]), c(component.contribution.matrix[,stepping[i]], component.contribution.matrix[,stepping[i]+1]), col = col[i+1]) } rm(stepping) }#end if try-error for fit par(par.default) rm(par.default) } ##============================================================================## ## Return Values ##============================================================================## newRLumResults.fit_CWCurve <- set_RLum( class = "RLum.Results", data = list( data = output.table, fit = fit, component.contribution.matrix = list(component.contribution.matrix) ), info = list(call = sys.call()) ) rm(fit) rm(output.table) rm(component.contribution.matrix) invisible(newRLumResults.fit_CWCurve) } Luminescence/R/plot_FilterCombinations.R0000644000176200001440000003066213231137116020046 0ustar liggesusers#' Plot filter combinations along with the (optional) net transmission window #' #' The function allows to plot transmission windows for different filters. Missing data for specific #' wavelenghts are automatically interpolated for the given filter data using the function [approx]. #' With that a standardised output is reached and a net transmission window can be shown. #' #' **Calculations** #' #' **Net transmission window** #' #' The net transmission window of two filters is approximated by #' #' \deqn{T_{final} = T_{1} * T_{2}} #' #' **Optical density** #' #' \deqn{OD = -log(T)} #' #' **Total optical density** #' #' \deqn{OD_{total} = OD_{1} + OD_{2}} #' #' Please consider using own calculations for more precise values. #' #' **How to provide input data?** #' #' *CASE 1* #' #' The function expects that all filter values are either of type `matrix` or `data.frame` #' with two columns. The first columens contains the wavelength, the second the relative transmission #' (but not in percentage, i.e. the maximum transmission can be only become 1). #' #' In this case only the transmission window is show as provided. Changes in filter thickness and #' relection factor are not considered. #' #' *CASE 2* #' #' The filter data itself are provided as list element containing a `matrix` or #' `data.frame` and additional information on the thickness of the filter, e.g., #' `list(filter1 = list(filter_matrix, d = 2))`. #' The given filter data are always considered as standard input and the filter thickness value #' is taken into account by #' #' \deqn{Transmission = Transmission^(d)} #' #' with d given in the same dimension as the original filter data. #' #' *CASE 3* #' #' Same as CASE 2 but additionally a reflection factor P is provided, e.g., #' `list(filter1 = list(filter_matrix, d = 2, P = 0.9))`. #' The final transmission becomes: #' #' \deqn{Transmission = Transmission^(d) * P} #' #' **Advanced plotting parameters** #' #' The following further non-common plotting parameters can be passed to the function: #' #' \tabular{lll}{ #' **`Argument`** \tab **`Datatype`** \tab **`Description`**\cr #' `legend` \tab `logical` \tab enable/disable legend \cr #' `legend.pos` \tab `character` \tab change legend position ([graphics::legend]) \cr #' `legend.text` \tab `character` \tab same as the argument `legend` in ([graphics::legend]) \cr #' `net_transmission.col` \tab `col` \tab colour of net transmission window polygon \cr #' `net_transmission.col_lines` \tab `col` \tab colour of net transmission window polygon lines \cr #' `net_transmission.density` \tab `numeric` \tab specify line density in the transmission polygon \cr #' `grid` \tab `list` \tab full list of arguments that can be passd to the function [graphics::grid] #' } #' #' For further modifications standard additional R plot functions are recommend, e.g., the legend #' can be fully customised by disabling the standard legend and use the function [graphics::legend] #' instead. #' #' #' @param filters [list] (**required**): #' a named list of filter data for each filter to be shown. #' The filter data itself should be either provided as [data.frame] or [matrix]. #' (for more options s. Details) #' #' @param wavelength_range [numeric] (*with default*): #' wavelength range used for the interpolation #' #' @param show_net_transmission [logical] (*with default*): #' show net transmission window as polygon. #' #' @param interactive [logical] (*with default*): #' enable/disable interactive plot #' #' @param plot [logical] (*with default*): #' enables or disables the plot output #' #' @param ... further arguments that can be passed to control the plot output. #' Suppored are `main`, `xlab`, `ylab`, `xlim`, `ylim`, `type`, `lty`, `lwd`. #' For non common plotting parameters see the details section. #' #' @return Returns an S4 object of type [RLum.Results-class]. #' #' **@data** #' #' \tabular{lll}{ #' **`Object`** \tab **`Type`** **`Description`** \cr #' `net_transmission_window` \tab `matrix` \tab the resulting net transmission window \cr #' `OD_total` \tab `matrix` \tab the total optical density\cr #' `filter_matrix` \tab `matrix` \tab the filter matrix used for plotting #' } #' #' **@info** #' #' \tabular{lll}{ #' **Object** \tab **Type** **Description** \cr #' `call` \tab [call] \tab the original function call #' } #' #' @section Function version: 0.3.1 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montagine (France) #' #' @seealso [RLum.Results-class], [approx] #' #' @keywords datagen aplot #' #' @examples #' #' ## (For legal reasons no real filter data are provided) #' #' ## Create filter sets #' filter1 <- density(rnorm(100, mean = 450, sd = 20)) #' filter1 <- matrix(c(filter1$x, filter1$y/max(filter1$y)), ncol = 2) #' filter2 <- matrix(c(200:799,rep(c(0,0.8,0),each = 200)), ncol = 2) #' #' ## Example 1 (standard) #' plot_FilterCombinations(filters = list(filter1, filter2)) #' #' ## Example 2 (with d and P value and name for filter 2) #' results <- plot_FilterCombinations( #' filters = list(filter_1 = filter1, Rectangle = list(filter2, d = 2, P = 0.6))) #' results #' #' ## Example 3 show optical density #' plot(results$OD_total) #' #' \dontrun{ #' ##Example 4 #' ##show the filters using the interactive mode #' plot_FilterCombinations(filters = list(filter1, filter2), interactive = TRUE) #' #' } #' #' #' @md #' @export plot_FilterCombinations <- function( filters, wavelength_range = 200:1000, show_net_transmission = TRUE, interactive = FALSE, plot = TRUE, ...) { # Integrity tests ----------------------------------------------------------------------------- #check filters if (!is(filters, "list")) { stop("[plot_FilterCombinations()] 'filters' should be of type 'list'") } #input should either data.frame or matrix lapply(filters, function(x) { if (!is(x, "data.frame") & !is(x, "matrix") & !is(x, "list")) { stop( paste( "[plot_FilterCombinations()] input for filter", x, "is not of type 'matrix', 'data.frame' or 'list'!" ) ) } }) #check for named list, if not set names if (is.null(names(filters))) { names(filters) <- paste("Filter ", 1:length(filters)) } # Data Preparation ---------------------------------------------------------------------------- ##check if filters are provided with their tickness, if so correct ##transmission for this ... relevant for glass filters filters <- lapply(filters, function(x) { if (is(x, "list")) { ##correction for the transmission accounting for filter tickness, the ##provided thickness is always assumed to be 1 if(length(x) > 1){ x[[1]][, 2] <- x[[1]][, 2] ^ (x[[2]]) }else{ return(x[[1]]) } ##account for potentially provided transmission relexion factor if(length(x) > 2){ x[[1]][,2] <- x[[1]][,2] * x[[3]] return(x[[1]]) }else{ return(x[[1]]) } } else{ return(x) } }) #check if there are transmission values greater than one, this is not possible lapply(filters, function(x) { if (max(x[, 2], na.rm = TRUE) > 1.01) { stop("[plot_FilterCombinations()] transmission values > 1 found. Check your data.") } }) ##combine everything in a matrix using approx for interpolation filter_matrix <- vapply(filters, function(x) { approx(x = x[, 1], y = x[, 2], xout = wavelength_range)$y }, FUN.VALUE = vector(mode = "numeric", length = length(wavelength_range))) ##calculate transmission window filter_matrix <- cbind(filter_matrix) net_transmission_window <- matrix( c(wavelength_range, matrixStats::rowProds(filter_matrix)), ncol = 2) ##add optical density to filter matrix ##calculate OD OD <- -log(filter_matrix) ##calculate total OD OD_total <- cbind(wavelength_range, matrixStats::rowSums2(OD)) ##add to matrix filter_matrix <- cbind(filter_matrix, OD) ##set rownames of filter matrix rownames(filter_matrix) <- wavelength_range ##set column names for filter matrix colnames(filter_matrix) <- c(names(filters), paste0(names(filters), "_OD")) # Plotting ------------------------------------------------------------------------------------ if (plot) { ##(1) ... select transmission values filter_matrix_transmisison <- filter_matrix[,!grepl(pattern = "OD", x = colnames(filter_matrix)), drop = FALSE] ##set plot settings plot_settings <- list( main = "Filter Combination", xlab = "Wavelength [nm]", ylab = "Transmission [a.u.]", xlim = range(wavelength_range), ylim = c(0, 1), legend.pos = "topleft", lty = 1, lwd = 1, col = 1:length(filters), grid = expression(nx = 10, ny = 10), legend = TRUE, legend.text = colnames(filter_matrix_transmisison), net_transmission.col = rgb(0,0.7,0,.2), net_transmission.col_lines = "grey", net_transmission.density = 20 ) ##modify settings on request plot_settings <- modifyList(plot_settings, list(...)) if(interactive){ ##check for plotly if (!requireNamespace("plotly", quietly = TRUE)) { stop("[plot_FilterCombinations()] Package 'plotly' needed interactive plot functionality. Please install it.", call. = FALSE) } ##create basic plot p <- plotly::plot_ly(x = wavelength_range, y = filter_matrix[,1], type = "scatter", name = colnames(filter_matrix_transmisison)[1], mode = "lines") ##add further filters if (ncol(filter_matrix_transmisison) > 1) { for (i in 2:ncol(filter_matrix_transmisison)) { p <- plotly::add_trace(p, y = filter_matrix[, i], name = colnames(filter_matrix_transmisison)[i], mode = 'lines') } } ##add polygon ##replace all NA vaules with 0, otherwise it looks odd net_transmission_window[is.na(net_transmission_window)] <- 0 p <- plotly::add_polygons(p, x = c(wavelength_range, rev(wavelength_range)), y = c(net_transmission_window[, 2], rep(0, length(wavelength_range))), name = "net transmission" ) ##change graphical parameters p <- plotly::layout( p = p, xaxis = list( title = plot_settings$xlab ), yaxis = list( title = plot_settings$ylab ), title = plot_settings$main ) print(p) on.exit(return(p)) }else{ ##plot induvidal filters graphics::matplot( x = wavelength_range, y = filter_matrix_transmisison, type = "l", main = plot_settings$main, xlab = plot_settings$xlab, ylab = plot_settings$ylab, xlim = plot_settings$xlim, ylim = plot_settings$ylim, lty = plot_settings$lty, lwd = plot_settings$lwd, col = plot_settings$col ) if (!is.null(plot_settings$grid)) { graphics::grid(eval(plot_settings$grid)) } ##show effective transmission, which is the minimum for each row if (show_net_transmission) { ##replace all NA vaules with 0, otherwise it looks odd net_transmission_window[is.na(net_transmission_window)] <- 0 polygon( x = c(wavelength_range, rev(wavelength_range)), y = c(net_transmission_window[, 2], rep(0, length(wavelength_range))), col = plot_settings$net_transmission.col, border = NA, ) polygon( x = c(wavelength_range, rev(wavelength_range)), y = c(net_transmission_window[, 2], rep(0, length(wavelength_range))), col = plot_settings$net_transmission.col_lines, border = NA, density = plot_settings$net_transmission.density ) } #legend if (plot_settings$legend) { legend( plot_settings$legend.pos, legend = plot_settings$legend.text, col = plot_settings$col, lty = plot_settings$lty, bty = "n" ) } } } # Produce output object ----------------------------------------------------------------------- return(set_RLum( class = "RLum.Results", data = list( net_transmission_window = net_transmission_window, OD_total = OD_total, filter_matrix = filter_matrix ), info = list(call = sys.call()) )) } Luminescence/R/analyse_FadingMeasurement.R0000644000176200001440000007675413431444475020356 0ustar liggesusers#' Analyse fading measurements and returns the fading rate per decade (g-value) #' #' The function analysis fading measurements and returns a fading rate including an error estimation. #' The function is not limited to standard fading measurements, as can be seen, e.g., Huntley and #' Lamothe 2001. Additionally, the density of recombination centres (rho') is estimated after #' Kars et al. 2008. #' #' All provided output corresponds to the \eqn{tc} value obtained by this analysis. Additionally #' in the output object the g-value normalised to 2-days is provided. The output of this function #' can be passed to the function [calc_FadingCorr]. #' #' **Fitting and error estimation** #' #' For the fitting the function [stats::lm] is used without applying weights. For the #' error estimation all input values, except tc, as the precision can be consdiered as sufficiently #' high enough with regard to the underlying problem, are sampled assuming a normal distribution #' for each value with the value as the mean and the provided uncertainty as standard deviation. #' #' **Density of recombination centres** #' #' The density of recombination centres, expressed by the dimensionless variable rho', is estimated #' by fitting equation 5 in Kars et al. 2008 to the data. For the fitting the function #' [stats::nls] is used without applying weights. For the error estimation the same #' procedure as for the g-value is applied (see above). #' #' **Multiple aliquots & Lx/Tx normalisation** #' #' Be aware that this function will always normalise all Lx/Tx values by the Lx/Tx value of the #' prompt measurement of the first aliquot. This implicitly assumes that there are no systematic #' inter-aliquot variations in Lx/Tx values. If deemed necessary to normalise the Lx/Tx values #' of each aliquot by its individual prompt measurement please do so **before** running #' [analyse_FadingMeasurement] and provide the already normalised values for `object` instead. #' #' @param object [RLum.Analysis-class] (**required**): #' input object with the measurement data. Alternatively, a [list] containing [RLum.Analysis-class] #' objects or a [data.frame] with three columns #' (x = LxTx, y = LxTx error, z = time since irradiation) can be provided. #' Can also be a wide table, i.e. a [data.frame] with a number of colums divisible by 3 #' and where each triplet has the before mentioned column structure. #' **Please note: The input object should solely consists of the curve needed for the data analysis, i.e. #' only IRSL curves representing Lx (and Tx)** #' #' If data from multiple aliquots are provided please **see the details below** with regard to #' Lx/Tx normalisation. **The function assumes that all your measurements are related to #' one (comparable) sample. If you to treat independent samples, you have use this function #' in a loop.** #' #' @param structure [character] (*with default*): #' sets the structure of the measurement data. Allowed are `'Lx'` or `c('Lx','Tx')`. #' Other input is ignored #' #' @param signal.integral [vector] (**required**): #' vector with the limits for the signal integral. #' Not required if a `data.frame` with LxTx values are provided. #' #' @param background.integral [vector] (**required**): #' vector with the bounds for the background integral. #' Not required if a `data.frame` with LxTx values are provided. #' #' @param t_star [character] (*with default*): #' method for calculating the time elasped since irradiaton. Options are: #' `'half'`, which is \eqn{t_star := t_1 + (t_2 - t_1)/2} (Auclair et al., 2003) #' and `'end'`, which takes the time between irradiation and the measurement step. #' Default is `'half'` #' #' @param n.MC [integer] (*with default*): #' number for Monte Carlo runs for the error estimation #' #' @param verbose [logical] (*with default*): #' enables/disables verbose mode #' #' @param plot [logical] (*with default*): #' enables/disables plot output #' #' @param plot.single [logical] (*with default*): #' enables/disables single plot mode, i.e. one plot window per plot. #' Alternatively a vector specifying the plot to be drawn, e.g., #' `plot.single = c(3,4)` draws only the last two plots #' #' @param ... (*optional*) further arguments that can be passed to internally used functions (see details) #' #' @return #' An [RLum.Results-class] object is returned: #' #' Slot: **@data** #' #' \tabular{lll}{ #' **OBJECT** \tab **TYPE** \tab **COMMENT**\cr #' `fading_results` \tab `data.frame` \tab results of the fading measurement in a table \cr #' `fit` \tab `lm` \tab object returned by the used linear fitting function [stats::lm]\cr #' `rho_prime` \tab `data.frame` \tab results of rho' estimation after Kars et al. 2008 \cr #' `LxTx_table` \tab `data.frame` \tab Lx/Tx table, if curve data had been provided \cr #' `irr.times` \tab `integer` \tab vector with the irradiation times in seconds \cr #' } #' #' Slot: **@info** #' #' \tabular{lll}{ #' **OBJECT** \tab `TYPE` \tab `COMMENT`\cr #' `call` \tab `call` \tab the original function call\cr #' } #' #' #' @section Function version: 0.1.11 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) \cr #' Christoph Burow, University of Cologne (Germany) #' #' #' @keywords datagen #' #' @references #' #' Auclair, M., Lamothe, M., Huot, S., 2003. Measurement of anomalous fading for feldpsar IRSL using #' SAR. Radiation Measurements 37, 487-492. doi:10.1016/S1350-4487(03)00018-0 #' #' Huntley, D.J., Lamothe, M., 2001. Ubiquity of anomalous fading in K-feldspars and the measurement #' and correction for it in optical dating. Canadian Journal of Earth Sciences 38, #' 1093-1106. doi:10.1139/cjes-38-7-1093 #' #' Kars, R.H., Wallinga, J., Cohen, K.M., 2008. A new approach towards anomalous fading correction for feldspar #' IRSL dating-tests on samples in field saturation. Radiation Measurements 43, 786-790. doi:10.1016/j.radmeas.2008.01.021 #' #' @seealso [calc_OSLLxTxRatio], [read_BIN2R], [read_XSYG2R], #' [extract_IrradiationTimes] #' #' @examples #' #' ## load example data (sample UNIL/NB123, see ?ExampleData.Fading) #' data("ExampleData.Fading", envir = environment()) #' #' ##(1) get fading measurement data (here a three column data.frame) #' fading_data <- ExampleData.Fading$fading.data$IR50 #' #' ##(2) run analysis #' g_value <- analyse_FadingMeasurement( #' fading_data, #' plot = TRUE, #' verbose = TRUE, #' n.MC = 10) #' #' ##(3) this can be further used in the function #' ## to correct the age according to Huntley & Lamothe, 2001 #' results <- calc_FadingCorr( #' age.faded = c(100,2), #' g_value = g_value, #' n.MC = 10) #' #' #' @md #' @export analyse_FadingMeasurement <- function( object, structure = c("Lx", "Tx"), signal.integral, background.integral, t_star = 'half', n.MC = 100, verbose = TRUE, plot = TRUE, plot.single = FALSE, ... ){ # Integrity Tests ----------------------------------------------------------------------------- if (is(object, "list")) { if (any(sapply(object, class) != "RLum.Analysis")) { ##warning warning(paste("[analyse_FadingMeasurement()]", length(which(sapply(object, class) != "RLum.Analysis")), "non-supported records removed!"), call. = FALSE) ##remove unwanted stuff object[sapply(object, class) != "RLum.Analysis"] <- NULL ##check whether this is empty now if(length(object) == 0) stop( "[analyse_FadingMeasurement()] 'object' expects an 'RLum.Analysis' object or a 'list' of such objects!", call. = FALSE ) } } else if (class(object) == "RLum.Analysis") { object <- list(object) } else if(class(object) == "data.frame"){ if (ncol(object) %% 3 != 0) { stop("[analyse_FadingMeasurement()] 'object': if you provide a data.frame as input, the number of columns must be a multiple of 3.") } else { object <- do.call(rbind, lapply(seq(1, ncol(object), 3), function(col) { setNames(object[ , col:c(col+2)], c("LxTx", "LxTxError", "timeSinceIrr")) }) ) object <- object[complete.cases(object), ] } ##set table and object LxTx_table <- data.frame(LxTx = object[[1]], LxTx.Error = object[[2]]) TIMESINCEIRR <- object[[3]] irradiation_times <- TIMESINCEIRR object <- NULL }else{ stop( "[analyse_FadingMeasurement()] 'object' needs to be of type 'RLum.Analysis' or a 'list' of such objects!", call. = FALSE ) } # Prepare data -------------------------------------------------------------------------------- if(!is.null(object)){ ##support read_XSYG2R() if(length(unique(unlist(lapply(object, slot, name = "originator")))) == 1 && unique(unlist(lapply(object, slot, name = "originator"))) == "read_XSYG2R"){ irradiation_times <- extract_IrradiationTimes(object = object) ##reduce irradiation times ... extract curve data TIMESINCEIRR <- unlist(lapply(irradiation_times, function(x) { ##get time since irradiation temp_TIMESINCEIRR <- x$irr.times[["TIMESINCEIRR"]][!grepl(pattern = "irradiation", x = x$irr.times[["STEP"]], fixed = TRUE)] ##substract half irradiation time temp_IRR_TIME <- x$irr.times[["IRR_TIME"]][!grepl(pattern = "irradiation", x = x$irr.times[["STEP"]], fixed = TRUE)] ##in accordance with Auclair et al., 2003, p. 488 ##but here we have no t1 ... this needs to be calculated ##set variables t1 <- temp_TIMESINCEIRR t2 <- temp_TIMESINCEIRR + temp_IRR_TIME if(t_star == "half"){ ##calculate t_star t_star <- t1 + (t2 - t1)/2 }else if (t_star == "end"){ ##set t_start as t_1 (so after the end of irradiation) t_star <- t1 }else{ stop("[analyse_FadingMeasurement()] Invalid value for t_star.", call. = FALSE) } return(t_star) })) ##clean object by removing the irradiation step ... and yes, we drop! object_clean <- unlist(get_RLum(object, curveType = "measured")) ##support read_BIN2R() }else if (length(unique(unlist(lapply(object, slot, name = "originator")))) == 1 && unique(unlist(lapply(object, slot, name = "originator"))) %in% c("read_BIN2R","Risoe.BINfileData2RLum.Analysis")){ ##assign object, unlist and drop it object_clean <- unlist(get_RLum(object)) ##set TIMESINCEIRR vector TIMESINCEIRR <- vapply(object_clean, function(o){ o@info$TIMESINCEIRR }, numeric(1)) ##check whether we have negative irradiation times, sort out such values if(any(TIMESINCEIRR < 0)){ #count affected records rm_records <- length(which(TIMESINCEIRR < 0)) ##now we have a problem and we first have to make sure that we understand ##the data structure and remove also the corresponding values if(all(structure == c("Lx", "Tx"))){ rm_id <- matrix(TIMESINCEIRR, ncol = 2, byrow = TRUE) rm_id[apply(rm_id < 0, MARGIN = 1, any),] <- NA rm_id <- which(is.na(as.numeric(t(rm_id)))) object_clean[rm_id] <- NULL TIMESINCEIRR <- TIMESINCEIRR[-rm_id] rm_records <- length(rm_id) rm(rm_id) }else{ object_clean[TIMESINCEIRR < 0] <- NULL TIMESINCEIRR <- TIMESINCEIRR[!TIMESINCEIRR < 0] } ##return warning warning( paste0("[analyse_FadingMeasurement()] ", rm_records, " records 'time since irradiation' value removed from the dataset!"), call. = FALSE) rm(rm_records) } ##set irradiation times irradiation_times <- vapply(object_clean, function(o){ o@info$IRR_TIME }, numeric(1)) ##not support }else{ try(stop("[analyse_FadingMeasurement()] Unknown or unsupported originator!", call. = FALSE)) return(NULL) } # Calculation --------------------------------------------------------------------------------- ##calculate Lx/Tx or ... just Lx, it depends on the patttern ... set IRR_TIME if(length(structure) == 2){ Lx_data <- object_clean[seq(1,length(object_clean), by = 2)] Tx_data <- object_clean[seq(2,length(object_clean), by = 2)] ##we need only every 2nd irradiation time, the one from the Tx should be the same ... all the time TIMESINCEIRR <- TIMESINCEIRR[seq(1,length(TIMESINCEIRR), by = 2)] }else if(length(structure) == 1){ Lx_data <- object_clean Tx_data <- NULL }else{ try(stop("[analyse_FadingMeasurement()] I have no idea what your structure means!", call. = FALSE)) return(NULL) } ##calculate Lx/Tx table LxTx_table <- merge_RLum(lapply(1:length(Lx_data), function(x) { calc_OSLLxTxRatio( Lx.data = Lx_data[[x]], Tx.data = Tx_data[[x]], signal.integral = signal.integral, background.integral = background.integral, signal.integral.Tx = list(...)$signal.integral.Tx, background.integral.Tx = list(...)$background.integral.Tx, sigmab = list(...)$sigmab, sig0 = if( is.null(list(...)$sig0)){ formals(calc_OSLLxTxRatio)$sig0 }else{ list(...)$sig0 }, background.count.distribution = if( is.null(list(...)$background.count.distribution)){ formals(calc_OSLLxTxRatio)$background.count.distribution }else{ list(...)$background.count.distribution } ) }))$LxTx.table } ##create unique identifier uid <- create_UID() ##normalise data to prompt measurement tc <- min(TIMESINCEIRR)[1] ##remove NA values in LxTx table if(any(is.infinite(LxTx_table[["LxTx"]]))){ rm_id <- which(is.infinite(LxTx_table[["LxTx"]])) LxTx_table <- LxTx_table[-rm_id,] TIMESINCEIRR <- TIMESINCEIRR[-rm_id] rm(rm_id) } ##normalise if(length(structure) == 2 | is.null(object)){ LxTx_NORM <- LxTx_table[["LxTx"]] / LxTx_table[["LxTx"]][which(TIMESINCEIRR== tc)[1]] LxTx_NORM.ERROR <- LxTx_table[["LxTx.Error"]] / LxTx_table[["LxTx"]][which(TIMESINCEIRR == tc)[1]] }else{ LxTx_NORM <- LxTx_table[["Net_LnLx"]] / LxTx_table[["Net_LnLx"]][which(TIMESINCEIRR== tc)[1]] LxTx_NORM.ERROR <- LxTx_table[["Net_LnLx.Error"]] / LxTx_table[["Net_LnLx"]][which(TIMESINCEIRR == tc)[1]] } ##normalise time since irradtion TIMESINCEIRR_NORM <- TIMESINCEIRR/tc ##add dose and time since irradiation LxTx_table <- cbind( LxTx_table, TIMESINCEIRR = TIMESINCEIRR, TIMESINCEIRR_NORM = TIMESINCEIRR_NORM, TIMESINCEIRR_NORM.LOG = log10(TIMESINCEIRR_NORM), LxTx_NORM = LxTx_NORM, LxTx_NORM.ERROR = LxTx_NORM.ERROR, UID = uid ) # Fitting ------------------------------------------------------------------------------------- ##we need to fit the data to get the g_value ##sample for monte carlo runs MC_matrix <- suppressWarnings(cbind(LxTx_table[["TIMESINCEIRR_NORM.LOG"]], matrix(rnorm( n = n.MC * nrow(LxTx_table), mean = LxTx_table[["LxTx_NORM"]], sd = abs(LxTx_table[["LxTx_NORM.ERROR"]]) ), ncol = n.MC))) ##apply the fit fit_matrix <- vapply(X = 2:(n.MC+1), FUN = function(x){ ##fit fit <- try(stats::lm(y~x, data = data.frame( x = MC_matrix[,1], y = MC_matrix[,x]))$coefficients, silent = TRUE) if(class(fit) == "try-error"){ return(c(NA_real_, NA_real_)) }else{ return(fit) } }, FUN.VALUE = vector("numeric", length = 2)) ##calculate g-values from matrix g_value.MC <- -fit_matrix[2, ] * 1 / fit_matrix[1, ] * 100 ##calculate rho prime (Kars et al. 2008; proposed by Georgina E. King) ##s value after Huntley (2006) J. Phys. D. Hs <- 3e15 ##sample for monte carlo runs MC_matrix_rhop <- suppressWarnings(matrix(rnorm( n = n.MC * nrow(LxTx_table), mean = LxTx_table[["LxTx_NORM"]], sd = abs(LxTx_table[["LxTx_NORM.ERROR"]]) ), ncol = n.MC)) ## calculate rho prime for all MC samples fit_vector_rhop <- suppressWarnings(apply(MC_matrix_rhop, MARGIN = 2, FUN = function(x) { tryCatch({ coef(minpack.lm::nlsLM(x ~ c * exp(-rhop * (log(1.8 * Hs * LxTx_table$TIMESINCEIRR))^3), start = list(c = x[1], rhop = 10^-5.5)))[["rhop"]] }, error = function(e) { return(NA) }) })) ## discard all NA values produced in MC runs fit_vector_rhop <- fit_vector_rhop[!is.na(fit_vector_rhop)] ## calculate mean and standard deviation of rho prime (in log10 space) rhoPrime <- data.frame( MEAN = mean(fit_vector_rhop), SD = sd(fit_vector_rhop), Q_0.025 = quantile(x = fit_vector_rhop, probs = 0.025, na.rm = TRUE), Q_0.16 = quantile(x = fit_vector_rhop, probs = 0.16, na.rm = TRUE), Q_0.84 = quantile(x = fit_vector_rhop, probs = 0.84, na.rm = TRUE), Q_0.975 = quantile(x = fit_vector_rhop, probs = 0.975, na.rm = TRUE), row.names = NULL ) fit <- try(stats::lm(y ~ x, data = data.frame(x = LxTx_table[["TIMESINCEIRR_NORM.LOG"]], y = LxTx_table[["LxTx_NORM"]])), silent = TRUE) fit_power <- try(stats::lm(y ~ I(x^3) + I(x^2) + I(x) , data = data.frame(x = LxTx_table[["TIMESINCEIRR_NORM.LOG"]], y = LxTx_table[["LxTx_NORM"]])), silent = TRUE) ##for predicting fit_predict <- try(stats::lm(y ~ x, data = data.frame(y = LxTx_table[["TIMESINCEIRR_NORM.LOG"]], x = LxTx_table[["LxTx_NORM"]])), silent = TRUE) ##calculate final g_value ##the 2nd term corrects for the (potential) offset from one if(class(fit) == "try-error"){ g_value_fit <- NA }else{ g_value_fit <- -fit$coefficient[2] * 1 / fit$coefficient[1] * 100 } ##construct output data.frame g_value <- data.frame( FIT = g_value_fit, MEAN = mean(g_value.MC), SD = sd(g_value.MC), Q_0.025 = quantile(x = g_value.MC, probs = 0.025, na.rm = TRUE), Q_0.16 = quantile(x = g_value.MC, probs = 0.16, na.rm = TRUE), Q_0.84 = quantile(x = g_value.MC, probs = 0.84, na.rm = TRUE), Q_0.975 = quantile(x = g_value.MC, probs = 0.975, na.rm = TRUE) ) ##normalise the g-value to 2-days using the equation provided by Sebastien Huot via e-mail ##this means the data is extended k0 <- g_value[,c("FIT", "SD")] / 100 / log(10) k1 <- k0 / (1 - k0 * log(172800/tc)) g_value_2days <- 100 * k1 * log(10) names(g_value_2days) <- c("G_VALUE_2DAYS", "G_VALUE_2DAYS.ERROR") # Approximation ------------------------------------------------------------------------------- T_0.5.interpolated <- try(approx(x = LxTx_table[["LxTx_NORM"]], y = LxTx_table[["TIMESINCEIRR_NORM"]], ties = mean, xout = 0.5), silent = TRUE) if(class(T_0.5.interpolated) =='try-error'){ T_0.5.predict <- NULL T_0.5.interpolated <- NULL }else{ T_0.5.predict <- stats::predict.lm(fit_predict,newdata = data.frame(x = 0.5), interval = "predict") } T_0.5 <- data.frame( T_0.5_INTERPOLATED = T_0.5.interpolated$y, T_0.5_PREDICTED = (10^T_0.5.predict[,1])*tc, T_0.5_PREDICTED.LOWER = (10^T_0.5.predict[,2])*tc, T_0.5_PREDICTED.UPPER = (10^T_0.5.predict[,2])*tc ) # Plotting ------------------------------------------------------------------------------------ if(plot) { if (!plot.single[1]) { par.default <- par()$mfrow on.exit(par(mfrow = par.default)) par(mfrow = c(2, 2)) } ##get package col <- get("col", pos = .LuminescenceEnv) ##set some plot settings plot_settings <- list( xlab = "Stimulation time [s]", log = "", mtext = "" ) ##modify on request plot_settings <- modifyList(x = plot_settings, val = list(...)) ##get unique irradiation times ... for plotting irradiation_times.unique <- unique(TIMESINCEIRR) ##limit to max 5 if(length(irradiation_times.unique) >= 5){ irradiation_times.unique <- irradiation_times.unique[seq(1, length(irradiation_times.unique), length.out = 5)] } if (!is.null(object)) { if (length(structure) == 2) { if (is(plot.single, "logical") || (is(plot.single, "numeric") & 1 %in% plot.single)) { plot_RLum( set_RLum(class = "RLum.Analysis", records = object_clean[seq(1, length(object_clean), by = 2)]), combine = TRUE, col = c(col[1:5], rep( rgb(0, 0, 0, 0.3), length(TIMESINCEIRR) - 5 )), plot.single = TRUE, legend.text = c(paste(irradiation_times.unique, "s"), "others"), legend.col = c(col[1:length(irradiation_times.unique)], rgb(0, 0, 0, 0.3)), xlab = plot_settings$xlab, log = plot_settings$log, legend.pos = "outside", main = expression(paste(L[x], " - curves")), mtext = plot_settings$mtext ) ##add integration limits abline( v = range(signal.integral) * max(as.matrix(object_clean[[1]][, 1])) / nrow(as.matrix(object_clean[[1]])), lty = 2, col = "green" ) abline( v = range(background.integral) * max(as.matrix(object_clean[[1]][, 1])) / nrow(as.matrix(object_clean[[1]])), lty = 2, col = "red" ) } if (is(plot.single, "logical") || (is(plot.single, "numeric") & 2 %in% plot.single)) { plot_RLum( set_RLum(class = "RLum.Analysis", records = object_clean[seq(2, length(object_clean), by = 2)]), combine = TRUE, col = c(col[1:5], rep( rgb(0, 0, 0, 0.3), length(TIMESINCEIRR) - 5 )), plot.single = TRUE, legend.text = c(paste(irradiation_times.unique, "s"), "others"), legend.col = c(col[1:length(irradiation_times.unique)], rgb(0, 0, 0, 0.3)), xlab = plot_settings$xlab, log = plot_settings$log, legend.pos = "outside", main = expression(paste(T[x], " - curves")), mtext = plot_settings$mtext ) if (is.null(list(...)$signal.integral.Tx)) { ##add integration limits abline( v = range(signal.integral) * max(as.matrix(object_clean[[1]][, 1])) / nrow(as.matrix(object_clean[[1]])), lty = 2, col = "green" ) abline( v = range(background.integral) * max(as.matrix(object_clean[[1]][, 1])) / nrow(as.matrix(object_clean[[1]])), lty = 2, col = "red" ) } else{ ##add integration limits abline( v = range(list(...)$signal.integral.Tx) * max(as.matrix(object_clean[[1]][, 1])) / nrow(as.matrix(object_clean[[1]])), lty = 2, col = "green" ) abline( v = range(list(...)$background.integral.Tx) * max(as.matrix(object_clean[[1]][, 1])) / nrow(as.matrix(object_clean[[1]])), lty = 2, col = "red" ) } } } else{ if (is(plot.single, "logical") || (is(plot.single, "numeric") & 1 %in% plot.single)) { plot_RLum( set_RLum(class = "RLum.Analysis", records = object_clean), combine = TRUE, col = c(col[1:5], rep( rgb(0, 0, 0, 0.3), length(TIMESINCEIRR) - 5 )), plot.single = TRUE, legend.text = c(paste(irradiation_times.unique, "s"), "others"), legend.col = c(col[1:length(irradiation_times.unique)], rgb(0, 0, 0, 0.3)), legend.pos = "outside", xlab = plot_settings$xlab, log = plot_settings$log, main = expression(paste(L[x], " - curves")), mtext = plot_settings$mtext ) ##add integration limits abline( v = range(signal.integral) * max(as.matrix(object_clean[[1]][, 1])) / nrow(as.matrix(object_clean[[1]])), lty = 2, col = "green" ) abline( v = range(background.integral) * max(as.matrix(object_clean[[1]][, 1])) / nrow(as.matrix(object_clean[[1]])), lty = 2, col = "red" ) } ##empty Tx plot if (is(plot.single, "logical") || (is(plot.single, "numeric") & 2 %in% plot.single)) { plot( NA, NA, xlim = c(0, 1), ylim = c(0, 1), xlab = "", ylab = "", axes = FALSE ) text(x = 0.5, y = 0.5, labels = expression(paste("No ", T[x], " curves detected"))) } } }else{ if (is(plot.single, "logical") || (is(plot.single, "numeric") & 1 %in% plot.single)) { ##empty Lx plot plot( NA, NA, xlim = c(0, 1), ylim = c(0, 1), xlab = "", ylab = "", axes = FALSE ) text(x = 0.5, y = 0.5, labels = expression(paste("No ", L[x], " curves detected"))) } if (is(plot.single, "logical") || (is(plot.single, "numeric") & 2 %in% plot.single)) { ##empty Tx plot plot( NA, NA, xlim = c(0, 1), ylim = c(0, 1), xlab = "", ylab = "", axes = FALSE ) text(x = 0.5, y = 0.5, labels = expression(paste("No ", T[x], " curves detected"))) } } ##(2) Fading plot if (is(plot.single, "logical") || (is(plot.single, "numeric") & 3 %in% plot.single)) { if(all(is.na(LxTx_table[["LxTx_NORM"]]))){ shape::emptyplot() text(x = .5, y = .5, labels = "All NA values!") }else{ plot( NA, NA, ylab = "Normalised intensity [a.u.]", xaxt = "n", xlab = "Time since irradition [s]", sub = expression(paste("[", log[10](t / t[c]), "]")), ylim = if (max(LxTx_table[["LxTx_NORM"]]) > 1.1) { c(0.1, max(LxTx_table[["LxTx_NORM"]]) + max(LxTx_table[["LxTx_NORM.ERROR"]])) } else{ c(0.1, 1.1) }, xlim = range(LxTx_table[["TIMESINCEIRR_NORM.LOG"]], na.rm = TRUE), main = "Signal Fading" ) ##add axis axis(side = 1, at = axTicks(side = 1), labels = suppressWarnings(format((10 ^ (axTicks(side = 1)) * tc), digits = 0, decimal.mark = "", scientific = TRUE ))) mtext( side = 3, paste0( "g-value: ", round(g_value$FIT, digits = 2), " \u00b1 ", round(g_value$SD, digits = 2), " (%/decade) | tc = ", format(tc, digits = 4, scientific = TRUE) ), cex = par()$cex * 0.9 ) ##add curves x <- NA for (i in 1:n.MC) { curve(fit_matrix[2, i] * x + fit_matrix[1, i], col = rgb(0, 0.2, 0.4, 0.2), add = TRUE) } ##add master curve in red curve( fit$coefficient[2] * x + fit$coefficient[1], col = "red", add = TRUE, lwd = 1.5 ) ##add power law curve curve( x ^ 3 * fit_power$coefficient[2] + x ^ 2 * fit_power$coefficient[3] + x * fit_power$coefficient[4] + fit_power$coefficient[1], add = TRUE, col = "blue", lty = 2 ) ##addpoints points(x = LxTx_table[["TIMESINCEIRR_NORM.LOG"]], y = LxTx_table[["LxTx_NORM"]], pch = 21, bg = "grey") ##error bars segments( x0 = LxTx_table[["TIMESINCEIRR_NORM.LOG"]], x1 = LxTx_table[["TIMESINCEIRR_NORM.LOG"]], y0 = LxTx_table[["LxTx_NORM"]] + LxTx_table[["LxTx_NORM.ERROR"]], y1 = LxTx_table[["LxTx_NORM"]] - LxTx_table[["LxTx_NORM.ERROR"]], col = "grey" ) ##add legend legend( "bottom", legend = c("fit", "fit MC", "trend"), col = c("red", "grey", "blue"), lty = c(1, 1, 2), bty = "n", horiz = TRUE ) }#end if a }# if (is(plot.single, "logical") || (is(plot.single, "numeric") & 4 %in% plot.single)) { if(all(is.na(g_value.MC))){ shape::emptyplot() text(x = .5, y = .5, labels = "All NA values!") }else{ plot(density(g_value.MC), main = "Density: g-values (%/decade)") rug(x = g_value.MC) abline(v = c(g_value[["Q_0.16"]], g_value[["Q_0.84"]]), lty = 2, col = "darkgreen") abline(v = c(g_value[["Q_0.025"]], g_value[["Q_0.975"]]), lty = 2, col = "red") legend( "topleft", legend = c("HPD - 68 %", "HPD - 95 %"), lty = 2, col = c("darkgreen", "red"), bty = "n" ) } } } # Terminal ------------------------------------------------------------------------------------ if (verbose){ cat("\n[analyse_FadingMeasurement()]\n") cat(paste0("\n n.MC:\t",n.MC)) cat(paste0("\n tc:\t",format(tc, digits = 4, scientific = TRUE), " s")) cat("\n---------------------------------------------------") cat(paste0("\nT_0.5 interpolated:\t",T_0.5$T_0.5_INTERPOLATED)) cat(paste0("\nT_0.5 predicted:\t",format(T_0.5$T_0.5_PREDICTED, digits = 2, scientific = TRUE))) cat(paste0("\ng-value:\t\t", round(g_value$FIT, digits = 2), " \u00b1 ", round(g_value$SD, digits = 2), " (%/decade)")) cat(paste0("\ng-value (norm. 2 days):\t", round(g_value_2days[1], digits = 2), " \u00b1 ", round(g_value_2days[2], digits = 2), " (%/decade)")) cat("\n---------------------------------------------------") cat(paste0("\nrho':\t\t\t", format(rhoPrime$MEAN, digits = 3), " \u00b1 ", format(rhoPrime$SD, digits = 3))) cat(paste0("\nlog10(rho'):\t\t", suppressWarnings(round(log10(rhoPrime$MEAN), 2)), " \u00b1 ", round(rhoPrime$SD / (rhoPrime$MEAN * log(10, base = exp(1))), 2))) cat("\n---------------------------------------------------") } # Return -------------------------------------------------------------------------------------- ##set data.frame if(all(is.na(g_value))){ fading_results <- data.frame( g_value = NA, TC = NA, G_VALUE_2DAYS = NA, G_VALUE_2DAYS.ERROR = NA, T_0.5 = NA, UID = uid, stringsAsFactors = FALSE ) }else{ fading_results <- data.frame( g_value, TC = tc, G_VALUE_2DAYS = g_value_2days[1], G_VALUE_2DAYS.ERROR = g_value_2days[2], T_0.5, UID = uid, stringsAsFactors = FALSE ) } ##return return(set_RLum( class = "RLum.Results", data = list( fading_results = fading_results, fit = fit, rho_prime = rhoPrime, LxTx_table = LxTx_table, irr.times = irradiation_times ), info = list(call = sys.call()) )) } Luminescence/R/Second2Gray.R0000644000176200001440000001476113231137116015337 0ustar liggesusers#' Converting equivalent dose values from seconds (s) to gray (Gy) #' #' Conversion of absorbed radiation dose in seconds (s) to the SI unit gray #' (Gy) including error propagation. Normally used for equivalent dose data. #' #' Calculation of De values from seconds (s) to gray (Gy) #' #' \deqn{De [Gy] = De [s] * Dose Rate [Gy/s])} #' #' Provided calculation error propagation methods for error calculation #' (with 'se' as the standard error and 'DR' of the dose rate of the beta-source): #' #' **(1) `omit`** (default) #' #' \deqn{se(De) [Gy] = se(De) [s] * DR [Gy/s]} #' #' In this case the standard error of the dose rate of the beta-source is #' treated as systematic (i.e. non-random), it error propagation is omitted. #' However, the error must be considered during calculation of the final age. #' (cf. Aitken, 1985, pp. 242). This approach can be seen as method (2) (gaussian) #' for the case the (random) standard error of the beta-source calibration is #' 0. Which particular method is requested depends on the situation and cannot #' be prescriptive. #' #' **(2) `gaussian`** error propagation #' #' \deqn{se(De) [Gy] = \sqrt((DR [Gy/s] * se(De) [s])^2 + (De [s] * se(DR) [Gy/s])^2)} #' #' Applicable under the assumption that errors of De and se are uncorrelated. #' #' **(3) `absolute`** error propagation #' #' \deqn{se(De) [Gy]= abs(DR [Gy/s] * se(De) [s]) + abs(De [s] * se(DR) [Gy/s])} #' #' Applicable under the assumption that errors of De and se are not uncorrelated. #' #' #' @param data [data.frame] (**required**): #' input values, structure: data (`values[,1]`) and data error (`values [,2]`) #' are required #' #' @param dose.rate [RLum.Results-class], [data.frame] or [numeric] (**required**): #' `RLum.Results` needs to be orginated from the function [calc_SourceDoseRate], #' for `vector` dose rate in Gy/s and dose rate error in Gy/s #' #' @param error.propagation [character] (*with default*): #' error propagation method used for error calculation (`omit`, `gaussian` or #' `absolute`), see details for further information #' #' @return #' Returns a [data.frame] with converted values. #' #' @note #' If no or a wrong error propagation method is given, the execution of the function is #' stopped. Furthermore, if a `data.frame` is provided for the dose rate values is has to #' be of the same length as the data frame provided with the argument `data` #' #' @section Function version: 0.6.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr #' Michael Dietze, GFZ Potsdam (Germany)\cr #' Margret C. Fuchs, HZDR, Helmholtz-Institute Freiberg for Resource Technology (Germany) #' #' @seealso [calc_SourceDoseRate] #' #' @references #' Aitken, M.J., 1985. Thermoluminescence dating. Academic Press. #' #' @keywords manip #' #' @examples #' #' ##(A) for known source dose rate at date of measurement #' ## - load De data from the example data help file #' data(ExampleData.DeValues, envir = environment()) #' ## - convert De(s) to De(Gy) #' Second2Gray(ExampleData.DeValues$BT998, c(0.0438,0.0019)) #' #' #' #' #' #' ##(B) for source dose rate calibration data #' ## - calculate source dose rate first #' dose.rate <- calc_SourceDoseRate(measurement.date = "2012-01-27", #' calib.date = "2014-12-19", #' calib.dose.rate = 0.0438, #' calib.error = 0.0019) #' # read example data #' data(ExampleData.DeValues, envir = environment()) #' #' # apply dose.rate to convert De(s) to De(Gy) #' Second2Gray(ExampleData.DeValues$BT998, dose.rate) #' #' @md #' @export Second2Gray <- function( data, dose.rate, error.propagation = "omit" ){ # Integrity tests ----------------------------------------------------------------------------- ##(1) data.frame or RLum.Data.Curve object? if(!is(data, "data.frame")){ stop("[Second2Gray()] 'data' object has to be of type 'data.frame'!") } ##(2) numeric, data.frame or RLum.Data.Curve object? if(!is(dose.rate, "numeric") & !is(dose.rate, "RLum.Results") & !is(dose.rate, "data.frame")){ stop("[Second2Gray()] 'dose.rate' object has to be of type 'numeric', 'data.frame' or 'RLum.Results'!") } ##(3) last check to avoid problems if(is(dose.rate, "data.frame")){ if(nrow(dose.rate)!=nrow(data)){ stop("[Second2Gray()] the data frames in 'data' and 'dose.rate' need to be of similar length!") } } ##(4) check for right orginator if(is(dose.rate, "RLum.Results")){ if(dose.rate@originator != "calc_SourceDoseRate"){ stop("[Second2Gray()] Wrong originator for dose.rate 'RLum.Results' object.") }else{ ##check what is what if(!is(get_RLum(dose.rate, data.object = "dose.rate"), "data.frame")){ dose.rate <- data.frame( dose.rate <- as.numeric(get_RLum(dose.rate, data.object = "dose.rate")[1]), dose.rate.error <- as.numeric(get_RLum(dose.rate, data.object = "dose.rate")[2]) ) }else{ dose.rate <- get_RLum(dose.rate, data.object = "dose.rate") } } } # Calculation --------------------------------------------------------------------------------- De.seconds <- data[,1] De.error.seconds <- data[,2] De.gray <- NA De.error.gray <- NA if(is(dose.rate,"data.frame")){ De.gray <- round(De.seconds*dose.rate[,1], digits=2) }else{ De.gray <- round(De.seconds*dose.rate[1], digits=2) } if(error.propagation == "omit"){ if(is(dose.rate,"data.frame")){ De.error.gray <- round(dose.rate[,1]*De.error.seconds, digits=3) }else{ De.error.gray <- round(dose.rate[1]*De.error.seconds, digits=3) } }else if(error.propagation == "gaussian"){ if(is(dose.rate,"data.frame")){ De.error.gray <- round(sqrt((De.seconds*dose.rate[,2])^2+(dose.rate[,1]*De.error.seconds)^2), digits=3) }else{ De.error.gray <- round(sqrt((De.seconds*dose.rate[2])^2+(dose.rate[1]*De.error.seconds)^2), digits=3) } }else if (error.propagation == "absolute"){ if(is(dose.rate,"data.frame")){ De.error.gray <- round(abs(dose.rate[,1] * De.error.seconds) + abs(De.seconds * dose.rate[,2]), digits=3) }else{ De.error.gray <- round(abs(dose.rate[1] * De.error.seconds) + abs(De.seconds * dose.rate[2]), digits=3) } }else{ stop("[Second2Gray()] unsupported error propagation method!" ) } # Return -------------------------------------------------------------------------------------- data <- data.frame(De=De.gray, De.error=De.error.gray) return(data) } Luminescence/R/RLum.Data.Image-class.R0000644000176200001440000002223113517522532017070 0ustar liggesusers#' @include get_RLum.R set_RLum.R names_RLum.R NULL #' Class `"RLum.Data.Image"` #' #' Class for representing luminescence image data (TL/OSL/RF). Such data are for example produced #' by the function [read_SPE2R] #' #' @name RLum.Data.Image-class #' #' @docType class #' #' @slot recordType #' Object of class [character] containing the type of the curve (e.g. "OSL image", "TL image") #' #' @slot curveType #' Object of class [character] containing curve type, allowed values #' are measured or predefined #' #' @slot data #' Object of class [raster::brick] containing images (raster data). #' #' @slot info #' Object of class [list] containing further meta information objects #' #' @note #' The class should only contain data for a set of images. For additional #' elements the slot `info` can be used. #' #' @section Objects from the class: #' Objects can be created by calls of the form `set_RLum("RLum.Data.Image", ...)`. #' #' @section Class version: 0.4.2 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @seealso [RLum-class], [RLum.Data-class], [plot_RLum], [read_SPE2R] #' #' @keywords classes #' #' @examples #' #' showClass("RLum.Data.Image") #' #' ##create empty RLum.Data.Image object #' set_RLum(class = "RLum.Data.Image") #' #' @importClassesFrom raster RasterBrick #' #' @md #' @export setClass( "RLum.Data.Image", slots = list( recordType = "character", curveType = "character", data = "RasterBrick", info = "list" ), contains = "RLum.Data", prototype = list ( recordType = character(), curveType = character(), data = raster::brick(raster::raster(matrix())), info = list() ) ) # as() ---------------------------------------------------------------------------------------- ##DATA.FRAME ##COERCE RLum.Data.Image >> data.frame AND data.frame >> RLum.Data.Image #' as() #' #' for `[RLum.Data.Image-class]` #' #' **[RLum.Data.Image-class]** #' #' \tabular{ll}{ #' **from** \tab **to**\cr #' `data.frame` \tab `data.frame`\cr #' `matrix` \tab `matrix` #' } #' #' @md #' @name as setAs("data.frame", "RLum.Data.Image", function(from,to){ new(to, recordType = "unkown curve type", curveType = "NA", data = raster::brick(raster::raster(as.matrix(from))), info = list()) }) setAs("RLum.Data.Image", "data.frame", function(from){ as.data.frame(matrix(from@data@data@values[,1], ncol = from@data@ncols)) }) ##MATRIX ##COERCE RLum.Data.Image >> matrix AND matrix >> RLum.Data.Image setAs("matrix", "RLum.Data.Image", function(from,to){ new(to, recordType = "unkown curve type", curveType = "NA", data = raster::brick(raster::raster(as.matrix(from))), info = list()) }) setAs("RLum.Data.Image", "matrix", function(from){ matrix(from@data@data@values[,1], ncol = from@data@ncols) }) # show() -------------------------------------------------------------------------------------- #' @describeIn RLum.Data.Image #' Show structure of `RLum.Data.Image` object #' #' @keywords internal #' #' @md #' @export setMethod("show", signature(object = "RLum.Data.Image"), function(object){ x.rows <- object@data@ncols y.cols <- object@data@nrows z.range <- paste(min(object@data@data@min),":",max(object@data@data@max)) ##print information cat("\n [RLum.Data.Image-class]") cat("\n\t recordType:", object@recordType) cat("\n\t curveType:", object@curveType) cat("\n\t .. recorded frames:", length(object@data@data@names)) cat("\n\t .. .. pixel per frame:", x.rows*y.cols) cat("\n\t .. .. x dimension [px]:", x.rows) cat("\n\t .. .. y dimension [px]:", y.cols) cat("\n\t .. .. full pixel value range:", z.range) cat("\n\t additional info elements:", length(object@info)) #cat("\n\t\t >> names:", names(object@info)) } ) # set_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Data.Image #' Construction method for RLum.Data.Image object. The slot info is optional #' and predefined as empty list by default. #' #' @param class [`set_RLum`]; [character]: name of the `RLum` class to create #' #' @param originator [`set_RLum`]; [character] (*automatic*): #' contains the name of the calling function (the function that produces this object); #' can be set manually. #' #' @param .uid [`set_RLum`]; [character] (*automatic*): #' sets an unique ID for this object using the internal C++ function `create_UID`. #' #' @param .pid [`set_RLum`]; [character] (*with default*): #' option to provide a parent id for nesting at will. #' #' @param recordType [`set_RLum`]; [character]: #' record type (e.g. "OSL") #' #' @param curveType [`set_RLum`]; [character]: #' curve type (e.g. "predefined" or "measured") #' #' @param data [`set_RLum`]; [matrix]: #' raw curve data. If data is of type `RLum.Data.Image` this can be used to #' re-construct the object, i.e. modified parameters except `.uid` and `.pid`. The rest #' will be subject to copy and paste unless provided. #' #' @param info [`set_RLum`]; [list]: #' info elements #' #' @return #' #' **`set_RLum`** #' #' Returns an object from class `RLum.Data.Image` #' #' @md #' @export setMethod( "set_RLum", signature = signature("RLum.Data.Image"), definition = function( class, originator, .uid, .pid, recordType = "Image", curveType = NA_character_, data = raster::brick(raster::raster(matrix())), info = list()) { ##The case where an RLum.Data.Image object can be provided ##with this RLum.Data.Image objects can be provided to be reconstructed if (is(data, "RLum.Data.Image")) { ##check for missing curveType if (missing(curveType)) curveType <- data@curveType ##check for missing recordType if (missing(recordType)) recordType <- data@recordType ##check for missing data ... not possible as data is the object itself ##check for missing info if (missing(info)) info <- data@info ##check for modified .uid & .pid ## >> this cannot be changed here, since both would be reset, by ## the arguments passed down from set_RLum() ... the generic function ##set empty clas form object newRLumDataImage <- new("RLum.Data.Image") ##fill - this is the faster way, filling in new() costs ... newRLumDataImage@originator = data@originator newRLumDataImage@recordType = recordType newRLumDataImage@curveType = curveType newRLumDataImage@data = data@data newRLumDataImage@info = info newRLumDataImage@.uid = data@.uid newRLumDataImage@.pid = data@.pid } else { ##set empty class from object newRLumDataImage <- new("RLum.Data.Image") ##fill - this is the faster way, filling in new() costs ... newRLumDataImage@originator = originator newRLumDataImage@recordType = recordType newRLumDataImage@curveType = curveType newRLumDataImage@data = data newRLumDataImage@info = info newRLumDataImage@.uid = .uid newRLumDataImage@.pid = .pid } return(newRLumDataImage) } ) # get_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Data.Image #' Accessor method for RLum.Data.Image object. The argument `info.object` is #' optional to directly access the info elements. If no info element name is #' provided, the raw image data (`RasterBrick`) will be returned. #' #' @param object [`get_RLum`], [`names_RLum`] (**required**): #' an object of class [RLum.Data.Image-class] #' #' @param info.object [`get_RLum`]; [character]: #' name of the info object to returned #' #' @return #' #' **`get_RLum`** #' #' 1. Returns the data object ([raster::brick]) #' 2. only the info object if `info.object` was set. #' #' @md #' @export setMethod("get_RLum", signature("RLum.Data.Image"), definition = function(object, info.object) { ##if missing info.object just show the curve values if(!missing(info.object)){ if(class(info.object) != "character") stop("[get_RLum] 'info.object' has to be a character!", call. = FALSE) if(info.object %in% names(object@info)){ unlist(object@info[info.object]) } else { stop(paste0( "[get_RLum] Invalid element name. Valid names are: ", paste(names(object@info), collapse = ", ") ), call. = FALSE) } } else { object@data } }) # names_RLum() -------------------------------------------------------------------------------- #' @describeIn RLum.Data.Image #' Returns the names info elements coming along with this curve object #' #' @return #' #' **`names_RLum`** #' #' Returns the names of the info elements #' #' @md #' @export setMethod("names_RLum", "RLum.Data.Image", function(object) { names(object@info) }) Luminescence/R/calc_SourceDoseRate.R0000644000176200001440000001701013475754431017075 0ustar liggesusers#' Calculation of the source dose rate via the date of measurement #' #' Calculating the dose rate of the irradiation source via the date of #' measurement based on: source calibration date, source dose rate, dose rate #' error. The function returns a data.frame that provides the input argument #' dose_rate for the function [Second2Gray]. #' #' Calculation of the source dose rate based on the time elapsed since the last #' calibration of the irradiation source. Decay parameters assume a Sr-90 beta #' source. \deqn{dose.rate = D0 * exp(-log(2) / T.1/2 * t)} \cr with: D0 <- #' calibration dose rate T.1/2 <- half-life of the source nuclide (here in #' days) t <- time since source calibration (in days) log(2) / T.1/2 equals the #' decay constant lambda #' #' Information on the date of measurements may be taken from the data's #' original .BIN file (using e.g., `BINfile <- readBIN2R()` and the slot #' `BINfile@@METADATA$DATE`) #' #' **Allowed source types and related values** #' #' \tabular{rllll}{ #' **#** \tab **Source type** \tab **T.1/2** \tab **Reference** \cr #' `[1]` \tab Sr-90 \tab 28.90 y \tab NNDC, Brookhaven National Laboratory \cr #' `[2]`\tab Am-214 \tab 432.6 y \tab NNDC, Brookhaven National Laboratory \cr #' `[3]` \tab Co-60 \tab 5.274 y \tab NNDC, Brookhaven National Laboratory \cr #' `[4` \tab Cs-137 \tab 30.08 y \tab NNDC, Brookhaven National Laboratory} #' #' @param measurement.date [character] or [Date] (with default): Date of measurement in "YYYY-MM-DD". #' If no value is provided, the date will be set to today. The argument can be provided as vector. #' #' @param calib.date [character] or [Date] (**required**): #' date of source calibration in "YYYY-MM-DD" #' #' @param calib.dose.rate [numeric] (**required**): #' dose rate at date of calibration in Gy/s or Gy/min #' #' @param calib.error [numeric] (**required**): #' error of dose rate at date of calibration Gy/s or Gy/min #' #' @param source.type [character] (*with default*): #' specify irradiation source (`Sr-90`, `Co-60`, `Cs-137`, `Am-214`), #' see details for further information #' #' @param dose.rate.unit [character] (*with default*): #' specify dose rate unit for input (`Gy/min` or `Gy/s`), the output is given in #' Gy/s as valid for the function [Second2Gray] #' #' @param predict [integer] (*with default*): #' option allowing to predicit the dose rate of the source over time in days #' set by the provided value. Starting date is the value set with #' `measurement.date`, e.g., `calc_SourceDoseRate(..., predict = 100)` calculates #' the source dose rate for the next 100 days. #' #' @return #' Returns an S4 object of type [RLum.Results-class]. #' Slot `data` contains a [list] with the following structure: #' #' ``` #' $ dose.rate (data.frame) #' .. $ dose.rate #' .. $ dose.rate.error #' .. $ date (corresponding measurement date) #' $ parameters (list) #' .. $ source.type #' .. $ halflife #' .. $ dose.rate.unit #' $ call (the original function call) #' ``` #' #' The output should be accessed using the function [get_RLum].\cr #' A plot method of the output is provided via [plot_RLum] #' #' @note #' Please be careful when using the option `predict`, especially when a multiple set #' for `measurement.date` and `calib.date` is provided. For the source dose rate prediction #' the function takes the last value `measurement.date` and predicts from that the the source #' source dose rate for the number of days requested, #' means: the (multiple) orignal input will be replaced. However, the function #' do not change entries for the calibration dates, but mix them up. Therefore, #' it is not recommended to use this option when multiple calibration dates (`calib.date`) #' are provided. #' #' @section Function version: 0.3.2 #' #' @author #' Margret C. Fuchs, HZDR, Helmholtz-Institute Freiberg for Resource Technology (Germany) \cr #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' #' @seealso [Second2Gray], [get_RLum], [plot_RLum] #' #' @references #' NNDC, Brookhaven National Laboratory [http://www.nndc.bnl.gov/]() #' #' @keywords manip #' #' @examples #' #' #' ##(1) Simple function usage #' ##Basic calculation of the dose rate for a specific date #' dose.rate <- calc_SourceDoseRate(measurement.date = "2012-01-27", #' calib.date = "2014-12-19", #' calib.dose.rate = 0.0438, #' calib.error = 0.0019) #' #' ##show results #' get_RLum(dose.rate) #' #' ##(2) Usage in combination with another function (e.g., Second2Gray() ) #' ## load example data #' data(ExampleData.DeValues, envir = environment()) #' #' ## use the calculated variable dose.rate as input argument #' ## to convert De(s) to De(Gy) #' Second2Gray(ExampleData.DeValues$BT998, dose.rate) #' #' ##(3) source rate prediction and plotting #' dose.rate <- calc_SourceDoseRate(measurement.date = "2012-01-27", #' calib.date = "2014-12-19", #' calib.dose.rate = 0.0438, #' calib.error = 0.0019, #' predict = 1000) #' plot_RLum(dose.rate) #' #' #'##(4) export output to a LaTeX table (example using the package 'xtable') #'\dontrun{ #' xtable::xtable(get_RLum(dose.rate)) #' #'} #' #' @md #' @export calc_SourceDoseRate <- function( measurement.date = Sys.Date(), calib.date, calib.dose.rate, calib.error, source.type = "Sr-90", dose.rate.unit = "Gy/s", predict = NULL ){ if (is(measurement.date, "character")) { measurement.date <- as.Date(measurement.date) } ##calibration date if(is(calib.date, "character")) { calib.date <- as.Date(calib.date) } # --- if predict is set if(!is.null(predict) && predict > 1){ measurement.date <- seq(tail(measurement.date), by = 1, length = predict) } # -- calc days since source calibration decay.days <- measurement.date - calib.date # -- calc dose rate of source at date of measurement, considering the chosen source-type ##set halflife halflife.years <- switch( source.type, "Sr-90" = 28.90, "Am-241" = 432.6, "Co-60" = 5.274, "Cs-137" = 30.08 ) if(is.null(halflife.years)) stop("[calc_SourceDoseRate()] Source type unknown or currently not supported!", call. = FALSE) halflife.days <- halflife.years * 365 # N(t) = N(0)*e^((lambda * t) with lambda = log(2)/T1.2) measurement.dose.rate <- (calib.dose.rate) * exp((-log(2) / halflife.days) * as.numeric(decay.days)) measurement.dose.rate.error <- (calib.error) * exp((-log(2) / halflife.days) * as.numeric(decay.days)) # -- convert to input unit to [Gy/s] if(dose.rate.unit == "Gy/min"){ source.dose.rate <- measurement.dose.rate / 60 source.dose.rate.error <- source.dose.rate * (measurement.dose.rate.error / measurement.dose.rate) }else if(dose.rate.unit == "Gy/s"){ source.dose.rate <- measurement.dose.rate source.dose.rate.error <- measurement.dose.rate.error } # Output -------------------------------------------------------------------------------------- dose_rate <- data.frame( dose.rate = source.dose.rate, dose.rate.error = source.dose.rate.error, date = measurement.date, stringsAsFactors = TRUE ) temp.return <- set_RLum( class = "RLum.Results", data = list( dose.rate = dose_rate, parameters = list(source.type = source.type, halflife = halflife.years, dose.rate.unit = dose.rate.unit), call = sys.call() )) return(temp.return) } Luminescence/R/calc_Kars2008.R0000644000176200001440000000735713250440021015407 0ustar liggesusers#' Apply the Kars et al. (2008) model (deprecated) #' #' A function to calculate the expected sample specific fraction of saturation #' following Kars et al. (2008) and Huntley (2006). This function is deprecated #' and will eventually be removed. Please use `calc_Huntley2006()` instead. #' #' This function applies the approach described in Kars et al. (2008), #' developed from the model of Huntley (2006) to calculate the expected sample #' specific fraction of saturation of a feldspar and also to calculate fading #' corrected age using this model. \eqn{\rho}' (`rhop`), the density of recombination #' centres, is a crucial parameter of this model and must be determined #' separately from a fading measurement. The function [analyse_FadingMeasurement] #' can be used to calculate the sample specific \eqn{\rho}' value. #' #' @param fit.method [character] (*with default*): #' Fit function of the dose response curve. Can either be `EXP` (the default) #' or `GOK`. Note that `EXP` (single saturating exponential) is the original #' function the model after Huntley (2006) and Kars et al. (2008) was #' designed to use. The use of a general-order kinetics function (`GOK`) #' is an experimental adaption of the model and should only be used #' with great care. #' #' @param ... Parameters passed to [calc_Huntley2006]. #' #' @return An [RLum.Results-class] object is returned: #' #' @section Function version: 0.4.0 #' #' @author #' Georgina E. King, University of Bern (Switzerland) \cr #' Christoph Burow, University of Cologne (Germany) #' #' @note **This function is deprecated and will eventually be removed from the package.** #' **Please use the function [calc_Huntley2006()] instead** #' **(use `fit.method = "EXP"` to apply the model after Kars et al., 2008).** #' #' @keywords datagen #' #' @references #' #' Kars, R.H., Wallinga, J., Cohen, K.M., 2008. A new approach towards anomalous fading correction for feldspar #' IRSL dating-tests on samples in field saturation. Radiation Measurements 43, 786-790. doi:10.1016/j.radmeas.2008.01.021 #' #' Huntley, D.J., 2006. An explanation of the power-law decay of luminescence. #' Journal of Physics: Condensed Matter 18, 1359-1365. doi:10.1088/0953-8984/18/4/020 #' #' King, G.E., Herman, F., Lambert, R., Valla, P.G., Guralnik, B., 2016. #' Multi-OSL-thermochronometry of feldspar. Quaternary Geochronology 33, 76-87. doi:10.1016/j.quageo.2016.01.004 #' #' #' **Further reading** #' #' Morthekai, P., Jain, M., Cunha, P.P., Azevedo, J.M., Singhvi, A.K., 2011. An attempt to correct #' for the fading in million year old basaltic rocks. Geochronometria 38(3), 223-230. #' #' @examples #' #' ## Load example data (sample UNIL/NB123, see ?ExampleData.Fading) #' data("ExampleData.Fading", envir = environment()) #' #' ## (1) Set all relevant parameters #' # a. fading measurement data (IR50) #' fading_data <- ExampleData.Fading$fading.data$IR50 #' #' # b. Dose response curve data #' data <- ExampleData.Fading$equivalentDose.data$IR50 #' #' ## (2) Define required function parameters #' ddot <- c(7.00, 0.004) #' readerDdot <- c(0.134, 0.0067) #' #' # Analyse fading measurement and get an estimate of rho'. #' # Note that the RLum.Results object can be directly used for further processing. #' # The number of MC runs is reduced for this example #' rhop <- analyse_FadingMeasurement(fading_data, plot = TRUE, verbose = FALSE, n.MC = 10) #' #' ## (3) Apply the Kars et al. (2008) model to the data #' kars <- suppressWarnings( #' calc_Kars2008(data = data, #' rhop = rhop, #' ddot = ddot, #' readerDdot = readerDdot, #' n.MC = 25) #' ) #' #' @md #' @export calc_Kars2008 <- function(fit.method = "EXP", ...) { .Deprecated("calc_Huntley2006") calc_Huntley2006(fit.method = fit.method, ...) }Luminescence/R/calc_MaxDose.R0000644000176200001440000001255713540751607015554 0ustar liggesusers#' Apply the maximum age model to a given De distribution #' #' Function to fit the maximum age model to De data. This is a wrapper function #' that calls calc_MinDose() and applies a similiar approach as described in #' Olley et al. (2006). #' #' **Data transformation** #' #' To estimate the maximum dose population #' and its standard error, the three parameter minimum age model of Galbraith #' et al. (1999) is adapted. The measured De values are transformed as follows: #' #' 1. convert De values to natural logs #' 2. multiply the logged data to creat a mirror image of the De distribution #' 3. shift De values along x-axis by the smallest x-value found to obtain only positive values #' 4. combine in quadrature the measurement error associated with each De value #' with a relative error specified by sigmab #' 5. apply the MAM to these data #' #' When all calculations are done the results are then converted as follows #' 1. subtract the x-offset #' 2. multiply the natural logs by -1 #' 3. take the exponent to obtain the maximum dose estimate in Gy #' #' **Further documentation** #' #' Please see [calc_MinDose]. #' #' @param data [RLum.Results-class] or [data.frame] (**required**): #' for [data.frame]: two columns with De `(data[ ,1])` and De error `(data[ ,2])`. #' #' @param sigmab [numeric] (**required**): #' additional spread in De values. #' This value represents the expected overdispersion in the data should the sample be #' well-bleached (Cunningham & Walling 2012, p. 100). #' **NOTE**: For the logged model (`log = TRUE`) this value must be #' a fraction, e.g. 0.2 (= 20 \%). If the un-logged model is used (`log = FALSE`), #' sigmab must be provided in the same absolute units of the De values (seconds or Gray). #' See details ([calc_MinDose]. #' #' @param log [logical] (*with default*): #' fit the (un-)logged three parameter minimum dose model to De data #' #' @param par [numeric] (*with default*): #' apply the 3- or 4-parameter minimum age model (`par=3` or `par=4`). #' #' @param bootstrap [logical] (*with default*): #' apply the recycled bootstrap approach of Cunningham & Wallinga (2012). #' #' @param init.values [numeric] (*with default*): #' starting values for gamma, sigma, p0 and mu. Custom values need to be provided in a vector of #' length three in the form of `c(gamma, sigma, p0)`. #' #' @param plot [logical] (*with default*): #' plot output (`TRUE`/`FALSE`) #' #' @param ... further arguments for bootstrapping (`bs.M, bs.N, bs.h, sigmab.sd`). #' See details for their usage. #' #' @return Please see [calc_MinDose]. #' #' @section Function version: 0.3.1 #' #' @author #' Christoph Burow, University of Cologne (Germany) \cr #' Based on a rewritten S script of Rex Galbraith, 2010 #' #' #' @seealso [calc_CentralDose], [calc_CommonDose], [calc_FiniteMixture], #' [calc_FuchsLang2001], [calc_MinDose] #' #' @references #' Arnold, L.J., Roberts, R.G., Galbraith, R.F. & DeLong, S.B., #' 2009. A revised burial dose estimation procedure for optical dating of young #' and modern-age sediments. Quaternary Geochronology 4, 306-325. #' #' Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for mixed fission #' track ages. Nuclear Tracks Radiation Measurements 4, 459-470. #' #' Galbraith, R.F., Roberts, R.G., Laslett, G.M., Yoshida, H. & Olley, J.M., #' 1999. Optical dating of single grains of quartz from Jinmium rock shelter, #' northern Australia. Part I: experimental design and statistical models. #' Archaeometry 41, 339-364. #' #' Galbraith, R.F., 2005. Statistics for #' Fission Track Analysis, Chapman & Hall/CRC, Boca Raton. #' #' Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error #' calculation and display in OSL dating: An overview and some recommendations. #' Quaternary Geochronology 11, 1-27. #' #' Olley, J.M., Roberts, R.G., Yoshida, H., Bowler, J.M., 2006. Single-grain optical dating of grave-infill #' associated with human burials at Lake Mungo, Australia. Quaternary Science #' Reviews 25, 2469-2474 #' #' **Further reading** #' #' Arnold, L.J. & Roberts, R.G., 2009. Stochastic modelling of multi-grain equivalent dose #' (De) distributions: Implications for OSL dating of sediment mixtures. #' Quaternary Geochronology 4, 204-230. #' #' Bailey, R.M. & Arnold, L.J., 2006. Statistical modelling of single grain quartz De distributions and an #' assessment of procedures for estimating burial dose. Quaternary Science #' Reviews 25, 2475-2502. #' #' Cunningham, A.C. & Wallinga, J., 2012. Realizing the potential of fluvial archives using robust OSL chronologies. #' Quaternary Geochronology 12, 98-106. #' #' Rodnight, H., Duller, G.A.T., Wintle, A.G. & Tooth, S., 2006. Assessing the reproducibility and accuracy #' of optical dating of fluvial deposits. Quaternary Geochronology 1, 109-120. #' #' Rodnight, H., 2008. How many equivalent dose values are needed to #' obtain a reproducible distribution?. Ancient TL 26, 3-10. #' #' @examples #' #' ## load example data #' data(ExampleData.DeValues, envir = environment()) #' #' # apply the maximum dose model #' calc_MaxDose(ExampleData.DeValues$CA1, sigmab = 0.2, par = 3) #' #' @md #' @export calc_MaxDose<- function( data, sigmab, log=TRUE, par=3, bootstrap=FALSE, init.values, plot=TRUE, ... ){ res<- calc_MinDose(data, sigmab, log, par, bootstrap, init.values, plot=FALSE, invert=TRUE, ...) res@originator<- "calc_MaxDose" if (plot) try(plot_RLum.Results(res, ...)) invisible(res) } Luminescence/R/plot_RLum.Results.R0000644000176200001440000011753013540751607016604 0ustar liggesusers#' Plot function for an RLum.Results S4 class object #' #' The function provides a standardised plot output for data of an RLum.Results #' S4 class object #' #' The function produces a multiple plot output. A file output is recommended #' (e.g., [pdf]). #' #' @param object [RLum.Results-class] (**required**): #' S4 object of class `RLum.Results` #' #' @param single [logical] (*with default*): #' single plot output (`TRUE/FALSE`) to allow for plotting the results in as #' few plot windows as possible. #' #' @param ... further arguments and graphical parameters will be passed to #' the `plot` function. #' #' @return Returns multiple plots. #' #' @note #' Not all arguments available for [plot] will be passed! #' Only plotting of `RLum.Results` objects are supported. #' #' @section Function version: 0.2.1 #' #' @author #' Christoph Burow, University of Cologne (Germany) \cr #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [plot], [plot_RLum] #' #' @keywords aplot #' #' @examples #' #' #' ###load data #' data(ExampleData.DeValues, envir = environment()) #' #' # apply the un-logged minimum age model #' mam <- calc_MinDose(data = ExampleData.DeValues$CA1, sigmab = 0.2, log = TRUE, plot = FALSE) #' #' ##plot #' plot_RLum.Results(mam) #' #' # estimate the number of grains on an aliquot #' grains<- calc_AliquotSize(grain.size = c(100,150), sample.diameter = 1, plot = FALSE, MC.iter = 100) #' #' ##plot #' plot_RLum.Results(grains) #' #' #' @md #' @export plot_RLum.Results<- function( object, single = TRUE, ... ){ ##============================================================================## ## CONSISTENCY CHECK OF INPUT DATA ##============================================================================## ##check if object is of class RLum.Data.Curve if(!is(object,"RLum.Results")){ stop("[plot_RLum.Results()] Input object is not of type 'RLum.Results'") } ##============================================================================## ## SAFE AND RESTORE PLOT PARAMETERS ON EXIT ##============================================================================## par.old <- par(no.readonly = TRUE) on.exit(par(par.old)) ##============================================================================## ## ... ARGUMENTS ##============================================================================## ##deal with addition arguments extraArgs <- list(...) ##main main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {""} ##mtext mtext <- if("mtext" %in% names(extraArgs)) {extraArgs$mtext} else {""} ##log log <- if("log" %in% names(extraArgs)) {extraArgs$log} else {""} ##lwd lwd <- if("lwd" %in% names(extraArgs)) {extraArgs$lwd} else {1} ##lty lty <- if("lty" %in% names(extraArgs)) {extraArgs$lty} else {1} ##type type <- if("type" %in% names(extraArgs)) {extraArgs$type} else {"l"} ##pch pch <- if("pch" %in% names(extraArgs)) {extraArgs$pch} else {1} ##col col <- if("col" %in% names(extraArgs)) {extraArgs$col} else {"black"} ##============================================================================## ## PLOTTING ##============================================================================## #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# ## CASE 1: Minimum Age Model / Maximum Age Model if(object@originator=="calc_MinDose" || object@originator=="calc_MaxDose") { ## single MAM estimate # plot profile log likelhood profiles <- object@data$profile if (object@data$args$log) { profiles@profile$gamma$par.vals[ ,"gamma"] <- exp(profiles@profile$gamma$par.vals[ ,"gamma"]) profiles@profile$sigma$par.vals[ ,"sigma"] <- exp(profiles@profile$sigma$par.vals[ ,"sigma"]) if (object@data$args$par == 4) profiles@profile$mu$par.vals[ ,"mu"] <- exp(profiles@profile$mu$par.vals[ ,"mu"]) } if (single) par(mfrow=c(2, 2)) param <- c("gamma", "sigma", "p0", "mu") for (i in param) { if (object@data$summary$par == 3 && i == "mu") break tryCatch({ xvals <- as.data.frame(profiles@profile[[i]]$par.vals)[[i]] xlim <- range(xvals[xvals > 0]) suppressWarnings( bbmle::plot(profiles, which = i, xlab = "", xaxt = "n", xlim = xlim) ) axis(1, mgp = c(3, 0.5, 0)) title(xlab = i, line = 1.2) if (i %in% c("gamma", "sigma", "mu") && object@data$args$log && object@data$args$log.output) { axis(1, at = axTicks(1), labels = format(round(log(axTicks(1)), 2), nsmall = 2), line = 2.5, mgp = c(3, 0.5, 0)) title(xlab = paste0("log(", i, ")"), line = 4) } }, error = function(e) { message(paste("Unable to plot the Likelihood profile for:", i)) }) } par(mfrow=c(1,1)) # }) ## bootstrap MAM estimates if(object@data$args$bootstrap==TRUE) { # save previous plot parameter and set new ones .pardefault<- par(no.readonly = TRUE) # get De-llik pairs pairs<- object@data$bootstrap$pairs$gamma # get polynomial fit objects poly.lines<- list(poly.three=object@data$bootstrap$poly.fits$poly.three, poly.four=object@data$bootstrap$poly.fits$poly.four, poly.five=object@data$bootstrap$poly.fits$poly.five, poly.six=object@data$bootstrap$poly.fits$poly.six) # define polynomial curve functions for plotting poly.curves<- list(poly.three.curve=function(x) { poly.lines$poly.three$coefficient[4]*x^3 + poly.lines$poly.three$coefficient[3]*x^2 + poly.lines$poly.three$coefficient[2]*x + poly.lines$poly.three$coefficient[1] }, poly.four.curve=function(x) { poly.lines$poly.four$coefficient[5]*x^4 + poly.lines$poly.four$coefficient[4]*x^3 + poly.lines$poly.four$coefficient[3]*x^2 + poly.lines$poly.four$coefficient[2]*x + poly.lines$poly.four$coefficient[1] }, poly.five.curve=function(x) { poly.lines$poly.five$coefficient[6]*x^5 + poly.lines$poly.five$coefficient[5]*x^4 + poly.lines$poly.five$coefficient[4]*x^3 + poly.lines$poly.five$coefficient[3]*x^2 + poly.lines$poly.five$coefficient[2]*x + poly.lines$poly.five$coefficient[1] }, poly.six.curve=function(x) { poly.lines$poly.six$coefficient[7]*x^6 + poly.lines$poly.six$coefficient[6]*x^5 + poly.lines$poly.six$coefficient[5]*x^4 + poly.lines$poly.six$coefficient[4]*x^3 + poly.lines$poly.six$coefficient[3]*x^2 + poly.lines$poly.six$coefficient[2]*x + poly.lines$poly.six$coefficient[1] }) ## --------- PLOT "RECYCLE" BOOTSTRAP RESULTS ------------ ## if(single==TRUE) { layout(cbind(c(1,1,2, 5,5,6), c(3,3,4, 7,7,8))) par(cex = 0.6) } else { layout(matrix(c(1,1,2)),2,1) par(cex = 0.8) } for(i in 1:4) { ## ----- LIKELIHOODS # set margins (bottom, left, top, right) par(mar=c(0,5,5,3)) # sort De and likelihoods by De (increasing) pairs<- pairs[order(pairs[,1]),] # remove invalid NA values pairs<- na.omit(pairs) plot(x=pairs[,1], y=pairs[,2], xlab="Equivalent Dose [Gy]", ylab="Likelihood", xlim=range(pretty(pairs[,1])), ylim=range(pretty(c(0, as.double(quantile(pairs[,2],probs=0.98))))), xaxt = "n", xaxs = "i", yaxs = "i", bty = "l", main="Recycled bootstrap MAM-3") axis(side = 1, labels = FALSE, tick = FALSE) # add subtitle mtext(as.expression(bquote(italic(M) == .(object@data$args$bs.M) ~ "|" ~ italic(N) == .(object@data$args$bs.N) ~ "|" ~ italic(sigma[b]) == .(object@data$args$sigmab) ~ "\u00B1" ~ .(object@data$args$sigmab.sd) ~ "|" ~ italic(h) == .(round(object@data$args$bs.h,1)) ) ), side = 3, line = 0.3, adj = 0.5, cex = if(single){0.5}else{0.8}) # add points points(x=pairs[,1], y=pairs[,2], pch=1, col = "grey80") # get polynomial function poly.curve<- poly.curves[[i]] # add curve to plot curve(poly.curve, from = min(pairs[,1]), to = (max(pairs[,1])), col = "black", add = TRUE, type = "l") # add legend legend<- c("Third degree", "Fourth degree", "Fifth degree", "Sixth degree") legend("topright", xjust = 0, legend = legend[i], y.intersp = 1.2, bty = "n", title = "Polynomial Fit", lty = 1, lwd= 1.5) ## ----- RESIDUALS # set margins (bottom, left, top, right) par(mar=c(5,5,0,3)) plot(x = pairs[,1], y = residuals(poly.lines[[i]]), ylim = c(min(residuals(poly.lines[[i]]))*1.2, as.double(quantile(residuals(poly.lines[[i]]),probs=0.99))), xlim=range(pretty(pairs[,1])), xaxt = "n", bty = "l", xaxs = "i", col = "grey80", ylab = "Fit residual", xlab = "Equivalent dose [Gy]") axis(side = 1, labels = TRUE, tick = TRUE) # add horizontal line abline(h = 0, lty=2) # calculate residual sum of squares (RSS) and add to plot rss<- sum(residuals(poly.lines[[i]])^2) mtext(text = paste("RSS =",round(rss,3)), adj = 1, side = 3, line = -2, cex = if(single){0.6}else{0.8}) ## ----- PROPORTIONS }##EndOf::Plot_loop # restore previous plot parameters par(.pardefault) ### TODO: plotting of the LOESS fit needs to be fleshed out ### possibly integrate this in the prior polynomial plot loop ### LOESS PLOT pairs<- object@data$bootstrap$pairs$gamma pred<- predict(object@data$bootstrap$loess.fit) loess<- cbind(pairs[,1], pred) loess<- loess[order(loess[,1]),] # plot gamma-llik pairs plot(pairs, ylim = c(0, as.double(quantile( pairs[,2],probs=0.99))), ylab = "Likelihood", xlab = "Equivalent dose [Gy]", col = "gray80") # add LOESS line lines(loess, type = "l", col = "black") ### ------ PLOT BOOTSTRAP LIKELIHOOD FIT par(mar=c(5,4,4,4)) xlim<- range(pretty(object@data$data[,1])) xlim[1]<- xlim[1]-object@data$data[which.min(object@data$data[,1]),2] xlim[2]<- xlim[2]+object@data$data[which.max(object@data$data[,1]),2] xlim<- range(pretty(xlim)) # empty plot plot(NA,NA, xlim=xlim, ylim=c(0,2), xlab="Equivalent dose [Gy]", ylab="", bty="l", axes=FALSE, xaxs="i", yaxs="i", yaxt="n") axis(side = 1) axis(side = 2, at = c(0,0.5,1)) mtext(text = "Normalised likelihood / density", side = 2, line = 2.5, adj = 0) # set the polynomial to plot poly.curve<- poly.curves[[1]] # three degree poly # plot a nice grey polygon as in the publication step<- 0.1 x<- seq(min(pairs[,1]), max(pairs[,1]), step) y<- poly.curve(x) # normalise y-values y<- y/max(y) x<- c(min(pairs[,1]), x, max(pairs[,1])) y<- c(0, y, 0) # cutoff negative y values neg<- which(y<0) if (length(neg) != 0) { y<- y[-neg] x<- x[-neg] } # add bootstrap likelihood polygon to plot polygon(x, y, col = "grey80", border = NA) if (all(x > max(xlim)) || all(x < min(xlim))) warning("Bootstrap estimates out of x-axis range.", call. = FALSE) ### ----- PLOT MAM SINGLE ESTIMATE # symmetric errors, might not be appropriate mean<- object@data$summary$de sd<- object@data$summary$de_err if (any(is.na(c(mean, sd)))) { warning("Unable to plot the MAM single estimate (NA value).", call. = FALSE) } else { x<- seq(mean-5*sd, mean+5*sd, 0.001) y<- dnorm(seq(mean-5*sd, mean+5*sd, 0.001), mean, sd) # normalise y-values y<- y/max(y) points(x, y, type="l", col="red") ## asymmetric errors x<- unlist(object@data$profile@profile$gamma$par.vals[,1]) y<- abs(unlist(object@data$profile@profile$gamma$z)) if(object@data$args$log == TRUE) { x<- exp(x) } # now invert the data by shifting y<- -y y<- y-min(y) y<- y/max(y) # fit a smoothing spline l<- spline(x = x, y = y, method = "n", n = 1000) # make the endpoints zero l$y[1]<- l$y[length(l$y)]<- 0 # add profile log likelihood curve to plot lines(l, col="blue", lwd=1) # add vertical lines of the mean values #points(x = 80, y = 100,type = "l") } #### ------ PLOT DE par(new = TRUE) # sort the data in ascending order dat<- object@data$data[order(object@data$data[,1]),] x<- dat[,1] y<- 1:length(object@data$data[,1]) plot(x = x, y = y, xlim=xlim, ylim=c(0, max(y)+1), axes = FALSE, pch = 16, xlab = "", ylab="", xaxs="i", yaxs="i") axis(side = 4) mtext(text = "# Grain / aliquot", side = 4, line = 2.5) # get sorted errors err<- object@data$data[order(object@data$data[,1]),2] # fancy error bars arrows(x0 = x-err, y0 = y, x1 = x+err, y1 = y, code = 3, angle = 90, length = 0.05) ### ---- AUXILLARY # add legend legend("bottomright", bty = "n", col = c("grey80", "red", "blue", "black"), pch = c(NA,NA,NA,16), lty = c(1,1,1,1), lwd=c(10,2,2,2), legend = c("Bootstrap likelihood", "Profile likelihood (gaussian fit)","Profile likelihood", "Grain / aliquot"), ) }##EndOf::Bootstrap_plotting }#EndOf::CASE1_MinimumAgeModel-3 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# ## CASE 2: Central Age Model if(object@originator=="calc_CentralDose") { # get profile log likelihood data sig<- object@data$profile$sig*100 llik<- object@data$profile$llik # save previous plot parameter and set new ones .pardefault<- par(no.readonly = TRUE) # plot the profile log likeihood par(oma=c(2,1,2,1),las=1,cex.axis=1.2, cex.lab=1.2) plot(sig,llik,type="l",xlab=as.expression(bquote(sigma[OD]~"[%]")),ylab="Log likelihood",lwd=1.5) abline(h=0,lty=3) abline(h=-1.92,lty=3) title(as.expression(bquote("Profile log likelihood for" ~ sigma[OD]))) # find upper and lower confidence limits for sigma sigmax<- sig[which.max(llik)] tf<- abs(llik+1.92) < 0.05 sig95<- sig[tf] ntf<- length(sig95) sigL<- sig95[1] sigU<- sig95[ntf] # put them on the graph abline(v=sigL) abline(v=sigmax) abline(v=sigU) dx<- 0.006 dy<- 0.2 ytext<- min(llik) + dy res<- c(sigL,sigmax,sigU) text(res+dx,rep(ytext,3),round(res,2),adj=0) # restore previous plot parameters par(.pardefault) rm(.pardefault) }##EndOf::Case 2 - calc_CentralDose() #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# ## CASE 3: Fuchs & Lang 2001 if(object@originator=="calc_FuchsLang2001") { ##deal with addition arguments extraArgs <- list(...) main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {"Fuchs & Lang (2001)"} xlab <- if("xlab" %in% names(extraArgs)) {extraArgs$xlab} else {expression(paste(D[e]," [s]"))} ylab <- if("ylab" %in% names(extraArgs)) {extraArgs$ylab} else {"# Aliquots"} sub <- if("sub" %in% names(extraArgs)) {extraArgs$sub} else {""} cex <- if("cex" %in% names(extraArgs)) {extraArgs$cex} else {1} lwd <- if("lwd" %in% names(extraArgs)) {extraArgs$lwd} else {1} pch <- if("pch" %in% names(extraArgs)) {extraArgs$pch} else {19} ylim <- if("ylim" %in% names(extraArgs)) {extraArgs$ylim} else {c(1,length(object@data$data[,1])+3)} xlim <- if("xlim" %in% names(extraArgs)) {extraArgs$xlim} else {c(min(object@data$data[,1])-max(object@data$data[,2]), max(object@data$data[,1])+max(object@data$data[,2]))} mtext <- if("mtext" %in% names(extraArgs)) {extraArgs$mtext} else {"unknown sample"} # extract relevant plotting parameters o<- order(object@data$data[1]) data_ordered<- object@data$data[o,] usedDeValues<- object@data$usedDeValues n.usedDeValues<- object@data$summary$n.usedDeValues par(cex = cex, mfrow=c(1,1)) ##PLOT counter<-seq(1,max(o)) plot(NA,NA, ylim = ylim, xlim = xlim, xlab = xlab, ylab = ylab, main = main, sub = sub) ##SEGMENTS segments(data_ordered[,1]-data_ordered[,2],1:length(data_ordered[,1]), data_ordered[,1]+data_ordered[,2],1:length(data_ordered[,1]), col="gray") ##POINTS points(data_ordered[,1], counter,pch=pch) ##LINES ##BOUNDARY INFORMATION ##lower boundary lines(c( usedDeValues[length(usedDeValues[,1])-n.usedDeValues+1,1], #boundary_counter for incorporate skipped values usedDeValues[length(usedDeValues[,1])-n.usedDeValues+1,1]), c(min(o)-0.5,max(o)+0.5), col="red", lty="dashed", lwd = lwd) #upper boundary lines(c(max(usedDeValues[,1]),max(usedDeValues[,1])),c(min(o)-0.5,max(o)+0.5), col="red",lty="dashed", lwd = lwd) #plot some further informations into the grafik arrows( usedDeValues[length(usedDeValues[,1])-n.usedDeValues+1,1]+usedDeValues[length(usedDeValues[,1])-n.usedDeValues+1,1]*0.02, #x1 max(o)+0.5, #y1 max(usedDeValues[,1]-usedDeValues[,1]*0.02), #x2 max(o)+0.5, #y2, code=3, length=0.03) text( c( usedDeValues[length(usedDeValues[,1])-n.usedDeValues+1,1], usedDeValues[length(usedDeValues[,1])-n.usedDeValues+1,1]), c(max(o)+2,max(o)+2), labels=paste("used values = ", n.usedDeValues), cex=0.6*cex, adj=0) ##MTEXT mtext(side=3,mtext,cex=cex) } #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# ## CASE 4: Finite Mixture Model if(object@originator == "calc_FiniteMixture") { if(length(object@data$args$n.components) > 1L) { ##deal with addition arguments extraArgs <- list(...) main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {"Finite Mixture Model"} plot.proportions<- if("plot.proportions" %in% names(extraArgs)) {extraArgs$plot.proportions} else {TRUE} pdf.colors<- if("pdf.colors" %in% names(extraArgs)) {extraArgs$pdf.colors} else {"gray"} pdf.weight<- if("pdf.weight" %in% names(extraArgs)) {extraArgs$pdf.weight} else {TRUE} pdf.sigma<- if("pdf.sigma" %in% names(extraArgs)) {extraArgs$pdf.sigma} else {"sigmab"} # extract relevant data from object n.components<- object@data$args$n.components comp.n<- object@data$components sigmab<- object@data$args$sigmab BIC.n<- object@data$BIC$BIC LLIK.n<- object@data$llik$llik # save previous plot parameter and set new ones .pardefault<- par(no.readonly = TRUE) ## DEVICE AND PLOT LAYOUT n.plots<- length(n.components) #number of PDF plots in plotarea #1 seq.vertical.plots<- seq(from = 1, to = n.plots, by = 1) #indices ID.plot.two<- n.plots+if(plot.proportions==TRUE){1}else{0} #ID of second plot area ID.plot.three<- n.plots+if(plot.proportions==TRUE){2}else{1} #ID of third plot area #empty vector for plot indices seq.matrix<- vector(mode="integer", length=4*n.plots) #fill vector with plot indices in correct order cnt<- 1 seq<- seq(1,length(seq.matrix),4) for(i in seq) { seq.matrix[i]<- cnt seq.matrix[i+1]<- cnt seq.matrix[i+2]<- if(plot.proportions==TRUE){ID.plot.two}else{cnt} seq.matrix[i+3]<- ID.plot.three cnt<- cnt+1 } # create device layout layout(matrix(c(seq.matrix),4,n.plots)) # outer margins (bottom, left, top, right) par(oma=c(2.5,5,3,5)) # general plot parameters (global scaling, allow overplotting) par(cex = 0.8, xpd = NA) # define color palette for prettier output if(pdf.colors == "colors") { col.n<- c("red3", "slateblue3", "seagreen", "tan3", "yellow3", "burlywood4", "magenta4", "mediumpurple3", "brown4","grey", "aquamarine") poly.border<- FALSE } if(pdf.colors == "gray" || pdf.colors == "grey") { col.n<- gray.colors(length(n.components)*2) poly.border<- FALSE } if(pdf.colors == "none") { col.n<- NULL poly.border<- TRUE } ##-------------------------------------------------------------------------- ## PLOT 1: EQUIVALENT DOSES OF COMPONENTS ## create empty plot without x-axis for(i in 1:n.plots) { pos.n<- seq(from = 1, to = n.components[i]*3, by = 3) # set margins (bottom, left, top, right) par(mar=c(1,0,2,0)) # empty plot area plot(NA, NA, xlim=c(min(n.components)-0.2, max(n.components)+0.2), ylim=c(min(comp.n[pos.n,]-comp.n[pos.n+1,], na.rm = TRUE), max((comp.n[pos.n,]+comp.n[pos.n+1,])*1.1, na.rm = TRUE)), ylab="", xaxt="n", yaxt="n", xlab="") # add text in upper part of the plot ("k = 1,2..n") mtext(bquote(italic(k) == .(n.components[i])), side = 3, line = -2, cex=0.8) # add y-axis label (only for the first plot) if(i==1) { mtext(expression(paste("D"[e]," [Gy]")), side=2,line=2.7, cex=1) } # empty list to store normal distribution densities sapply.storage<- list() ## NORMAL DISTR. OF EACH COMPONENT options(warn=-1) #supress warnings for NA values # LOOP - iterate over number of components for(j in 1:max(n.components)) { # draw random values of the ND to check for NA values comp.nd.n<- sort(rnorm(n = length(object@data$data[,1]), mean = comp.n[pos.n[j],i], sd = comp.n[pos.n[j]+1,i])) # proceed if no NA values occured if(length(comp.nd.n)!=0) { # weight - proportion of the component wi<- comp.n[pos.n[j]+2,i] # calculate density values with(out) weights fooX<- function(x) { dnorm(x, mean = comp.n[pos.n[j],i], sd = if(pdf.sigma=="se"){comp.n[pos.n[j]+1,i]} else{if(pdf.sigma=="sigmab"){comp.n[pos.n[j],i]*sigmab}} )* if(pdf.weight==TRUE){wi}else{1} } # x-axis scaling - determine highest dose in first cycle if(i==1 && j==1){ max.dose<- max(object@data$data[,1])+sd(object@data$data[,1])/2 min.dose<- min(object@data$data[,1])-sd(object@data$data[,1])/2 # density function to determine y-scaling if no weights are used fooY<- function(x) { dnorm(x, mean = na.exclude(comp.n[pos.n,]), sd = na.exclude(comp.n[pos.n+1,])) } # set y-axis scaling dens.max<-max(sapply(0:max.dose, fooY)) }##EndOfIf::first cycle settings # override y-axis scaling if weights are used if(pdf.weight==TRUE){ sapply.temp<- list() for(b in 1:max(n.components)){ # draw random values of the ND to check for NA values comp.nd.n<- sort(rnorm(n = length(object@data$data[,1]), mean = comp.n[pos.n[b],i], sd = comp.n[pos.n[b]+1,i])) # proceed if no NA values occured if(length(comp.nd.n)!=0) { # weight - proportion of the component wi.temp<- comp.n[pos.n[b]+2,i] fooT<- function(x) { dnorm(x, mean = comp.n[pos.n[b],i], sd = if(pdf.sigma=="se"){comp.n[pos.n[b]+1,i]} else{if(pdf.sigma=="sigmab"){comp.n[pos.n[b],i]*sigmab}} )*wi.temp } sapply.temp[[b]]<- sapply(0:max.dose, fooT) } } dens.max<- max(Reduce('+', sapply.temp)) } # calculate density values for 0 to maximum dose sapply<- sapply(0:max.dose, fooX) # save density values in list for sum curve of gaussians sapply.storage[[j]]<- sapply ## determine axis scaling # x-axis (dose) if("dose.scale" %in% names(extraArgs)) { y.scale<- extraArgs$dose.scale } else { y.scale<- c(min.dose,max.dose) } # y-axis (density) if("pdf.scale" %in% names(extraArgs)) { x.scale<- extraArgs$pdf.scale } else { x.scale<- dens.max*1.1 } ## PLOT Normal Distributions par(new=TRUE) plot(sapply, 1:length(sapply)-1, type="l", yaxt="n", xaxt="n", col=col.n[j], lwd=1, ylim=y.scale, xlim=c(0,x.scale), xaxs="i", yaxs="i", ann=FALSE, xpd = FALSE) # draw colored polygons under curve polygon(x=c(min(sapply), sapply, min(sapply)), y=c(0, 0:max.dose, 0), col = adjustcolor(col.n[j], alpha.f = 0.66), yaxt="n", border=poly.border, xpd = FALSE, lty = 2, lwd = 1.5) } }##EndOf::Component loop #turn warnings on again options(warn=0) # Add sum of gaussians curve par(new = TRUE) plot(Reduce('+', sapply.storage),1:length(sapply)-1, type="l", yaxt="n", xaxt="n", col="black", lwd=1.5, lty = 1, ylim=y.scale, xlim=c(0,x.scale), xaxs="i", yaxs="i", ann=FALSE, xpd = FALSE) # draw additional info during first k-cycle if(i == 1) { # plot title mtext("Normal distributions", side = 3, font = 2, line = 0, adj = 0, cex = 0.8) # main title mtext(main, side = 3, font = 2, line = 3.5, adj = 0.5, at = grconvertX(0.5, from = "ndc", to = "user")) # subtitle mtext(as.expression(bquote(italic(sigma[b]) == .(sigmab) ~ "|" ~ n == .(length(object@data$data[,1])))), side = 3, font = 1, line = 2.2, adj = 0.5, at = grconvertX(0.5, from = "ndc", to = "user"), cex = 0.9) # x-axis label mtext("Density [a.u.]", side = 1, line = 0.5, adj = 0.5, at = grconvertX(0.5, from = "ndc", to = "user")) # draw y-axis with proper labels axis(side=2, labels = TRUE) } if(pdf.colors == "colors") { # create legend labels dose.lab.legend<- paste("c", 1:n.components[length(n.components)], sep="") if(max(n.components)>8) { ncol.temp<- 8 yadj<- 1.025 } else { ncol.temp<- max(n.components) yadj<- 0.93 } # add legend if(i==n.plots) { legend(grconvertX(0.55, from = "ndc", to = "user"), grconvertY(yadj, from = "ndc", to = "user"), legend = dose.lab.legend, col = col.n[1:max(n.components)], pch = 15, adj = c(0,0.2), pt.cex=1.4, bty = "n", ncol=ncol.temp, x.intersp=0.4) mtext("Components: ", cex = 0.8, at = grconvertX(0.5, from = "ndc", to = "user")) } } }##EndOf::k-loop and Plot 1 ##-------------------------------------------------------------------------- ## PLOT 2: PROPORTION OF COMPONENTS if(plot.proportions==TRUE) { # margins for second plot par(mar=c(2,0,2,0)) # create matrix with proportions from a subset of the summary matrix prop.matrix<- comp.n[pos.n+2,]*100 # stacked barplot of proportions without x-axis barplot(prop.matrix, width=1, xlim=c(0.2, length(n.components)-0.2), ylim=c(0,100), axes=TRUE, space=0, col=col.n, xpd=FALSE, xaxt="n") # y-axis label mtext("Proportion [%]", side=2,line=3, cex=1) # add x-axis with corrected tick positions axis(side = 1, labels = n.components, at = n.components+0.5-n.components[1]) # draw a box (not possible with barplot()) box(lty=1, col="black") # add subtitle mtext("Proportion of components", side = 3, font = 2, line = 0, adj = 0, cex = 0.8) } ##-------------------------------------------------------------------------- ## PLOT 3: BIC & LLIK # margins for third plot par(mar=c(2,0,2,0)) # prepare scaling for both y-axes BIC.scale<- c(min(BIC.n)*if(min(BIC.n)<0){1.2}else{0.8}, max(BIC.n)*if(max(BIC.n)<0){0.8}else{1.2}) LLIK.scale<- c(min(LLIK.n)*if(min(LLIK.n)<0){1.2}else{0.8}, max(LLIK.n)*if(max(LLIK.n)<0){0.8}else{1.2}) # plot BIC scores plot(n.components, BIC.n, main= "", type="b", pch=22, cex=1.5, xlim=c(min(n.components)-0.2, max(n.components)+0.2), ylim=BIC.scale, xaxp=c(min(n.components), max(n.components), length(n.components)-1), xlab=expression(paste(italic(k), " Components")), ylab=expression(paste("BIC")), cex.lab=1.25) # following plot should be added to previous par(new = TRUE) # plot LLIK estimates plot(n.components, LLIK.n, xlim=c(min(n.components)-0.2, max(n.components)+0.2), xaxp=c(min(n.components), max(n.components), length(n.components)-1), ylim=LLIK.scale, yaxt="n", type="b", pch=16, xlab="", ylab="", lty=2, cex = 1.5) # subtitle mtext("Statistical criteria", side = 3, font = 2, line = 0, adj = 0, cex = 0.8) # second y-axis with proper scaling axis(side = 4, ylim=c(0,100)) # LLIK axis label mtext(bquote(italic(L)[max]), side=4,line=3, cex=1.3) # legend legend(grconvertX(0.75, from = "nfc", to = "user"), grconvertY(0.96, from = "nfc", to = "user"), legend = c("BIC", as.expression(bquote(italic(L)[max]))), pch = c(22,16), pt.bg=c("white","black"), adj = 0, pt.cex=1.3, lty=c(1,2), bty = "n", horiz = TRUE, x.intersp=0.5) ## restore previous plot parameters par(.pardefault) } }##EndOf::Case 4 - Finite Mixture Model #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# ## CASE 5: Aliquot Size if(object@originator=="calc_AliquotSize") { if(!is.null(object@data$MC$estimates)) { extraArgs <- list(...) main <- if("main" %in% names(extraArgs)) { extraArgs$main } else { "Monte Carlo Simulation" } xlab <- if("xlab" %in% names(extraArgs)) { extraArgs$xlab } else { "Amount of grains on aliquot" } # extract relevant data MC.n<- object@data$MC$estimates MC.n.kde<- object@data$MC$kde MC.stats<- object@data$MC$statistics MC.q<- object@data$MC$quantile MC.iter<- object@data$args$MC.iter # set layout of plotting device layout(matrix(c(1,1,2)),2,1) par(cex = 0.8) ## plot MC estimate distribution # set margins (bottom, left, top, right) par(mar=c(2,5,5,3)) # plot histogram hist(MC.n, freq=FALSE, col = "gray90", main="", xlab=xlab, xlim = c(min(MC.n)*0.95, max(MC.n)*1.05), ylim = c(0, max(MC.n.kde$y)*1.1)) # add rugs to histogram rug(MC.n) # add KDE curve lines(MC.n.kde, col = "black", lwd = 1) # add mean, median and quantils (0.05,0.95) abline(v=c(MC.stats$mean, MC.stats$median, MC.q), lty=c(2, 4, 3,3), lwd = 1) # add main- and subtitle mtext(main, side = 3, adj = 0.5, line = 3, cex = 1) mtext(as.expression(bquote(italic(n) == .(MC.iter) ~ "|" ~ italic(hat(mu)) == .(round(MC.stats$mean)) ~ "|" ~ italic(hat(sigma)) == .(round(MC.stats$sd.abs)) ~ "|" ~ italic(frac(hat(sigma),sqrt(n))) == .(round(MC.stats$se.abs)) ~ "|" ~ italic(v) == .(round(MC.stats$skewness, 2)) ) ), side = 3, line = 0.3, adj = 0.5, cex = 0.9) # add legend legend("topright", legend = c("mean","median", "0.05 / 0.95 quantile"), lty = c(2, 4, 3), bg = "white", box.col = "white", cex = 0.9) ## BOXPLOT # set margins (bottom, left, top, right) par(mar=c(5,5,0,3)) plot(NA, type="n", xlim=c(min(MC.n)*0.95, max(MC.n)*1.05), xlab=xlab, ylim=c(0.5,1.5), xaxt="n", yaxt="n", ylab="") par(bty="n") boxplot(MC.n, horizontal = TRUE, add = TRUE, bty="n") } else { on.exit(NULL) } }#EndOf::Case 5 - calc_AliqoutSize() #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# ## CASE 6: calc_SourceDoseRate() if(object@originator=="calc_SourceDoseRate") { ##prepare data ##get data df <- get_RLum(object = object, data.object = "dose.rate") ##reduce the size for plotting, more than 100 points makes no sense if(nrow(df)>100) { df <- df[seq(1,nrow(df), length = 100),] } ##plot settings plot.settings <- list( main = "Source Dose Rate Prediction", xlab = "Date", ylab = paste0( "Dose rate/(",get_RLum(object = object, data.object = "parameters")$dose.rate.unit,")"), log = "", cex = 1, xlim = NULL, ylim = c(min(df[,1]) - max(df[,2]), max(df[,1]) + max(df[,2])), pch = 1, mtext = paste0( "source type: ", get_RLum(object = object, data.object = "parameters")$source.type, " | ", "half-life: ", get_RLum(object = object, data.object = "parameters")$halflife, " a" ), grid = expression(nx = 10, ny = 10), col = 1, type = "b", lty = 1, lwd = 1, segments = "" ) ##modify list if something was set plot.settings <- modifyList(plot.settings, list(...)) ##plot plot( df[,3], df[,1], main = plot.settings$main, xlab = plot.settings$xlab, ylab = plot.settings$ylab, xlim = plot.settings$xlim, ylim = plot.settings$ylim, log = plot.settings$log, pch = plot.settings$pch, col = plot.settings$pch, type = plot.settings$type, lty = plot.settings$lty, lwd = plot.settings$lwd ) if(!is.null(plot.settings$segments)){ segments( x0 = df[,3], y0 = df[,1] + df[,2], x1 = df[,3], y1 = df[,1] - df[,2] ) } mtext(side = 3, plot.settings$mtext) if(!is.null(plot.settings$grid)){ grid(eval(plot.settings$grid)) } }#EndOf::Case 6 - calc_SourceDoseRate() #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# ## CASE 7: Fast Ratio if (object@originator=="calc_FastRatio") { # graphical settings settings <- list(main = "Fast Ratio", xlab = "t/s", ylab = "Signal/cts", type = "b", log = "", pch = 16, cex = 1.0, col = "black") settings <- modifyList(settings, list(...)) par(cex = settings$cex) # fetch data from RLum.Results object curve <- get_RLum(object, "data") if (inherits(curve, "RLum.Data.Curve")) curve <- get_RLum(curve) res <- get_RLum(object, "summary") fit <- get_RLum(object, "fit") # calculate the dead channel time offset offset <- res$dead.channels.start * res$channel.width # plot the OSL curve plot(curve, type = "n", main = settings$main, xlab = settings$xlab, ylab = settings$ylab, log = settings$log) # plot points to show measured data points (i.e., the channels) if (settings$type == "p" || settings$type == "b") points(curve[(res$dead.channels.start + 1):(nrow(curve) - res$dead.channels.end), ], pch = settings$pch, col = settings$col) # plot dead channels as empty circles if (res$dead.channels.start > 0) points(curve[1:res$dead.channels.start,]) if (res$dead.channels.end > 0) points(curve[(nrow(curve) - res$dead.channels.end):nrow(curve), ]) if (settings$type == "l" || settings$type == "b") lines(curve, col = settings$col) # optional: plot fitted CW curve if (!is.null(fit)) { nls.fit <- get_RLum(fit, "fit") if (!inherits(fit, "try-error") & "fitCW.curve" %in% names(object@data$args)) { if (object@data$args$fitCW.curve == "T" | object@data$args$fitCW.curve == TRUE) { lines(curve[(res$dead.channels.start + 1):(nrow(curve) - res$dead.channels.end), 1], predict(nls.fit), col = "red", lty = 1) ##plot curve for additional parameters col_components <- c("red", "green", "blue") for (i in 1:3) { if (!is.na(fit@data$data[[paste0("I0", i)]])) curve(fit@data$data[[paste0("I0", i)]] * fit@data$data[[paste0("lambda", i)]] * exp(-fit@data$data[[paste0("lambda", i)]] * x), lwd = 1, lty = 4, add = TRUE, col = col_components[i]) } } } } # add vertical lines and labels for L1, L2, L3 L_times <- c(curve[res$Ch_L1, 1], curve[res$Ch_L2, 1], curve[res$Ch_L3_start, 1], curve[res$Ch_L3_end, 1]) + offset abline(v = L_times, lty = 2) text(L_times, max(curve[ ,2]) * 0.95, pos = c(4,4,2,2), labels = expression('L'[1], 'L'[2], 'L'[3['start']], 'L'[3['end']])) }#EndOf::Case7 - calc_FastRatio() } Luminescence/R/github.R0000644000176200001440000001470513417364147014513 0ustar liggesusers# ------------------------------------------------------------------------ # Author: Christoph Burow # Affiliation: University of Cologne # Date: 15/01/2019 # API version: v3 # Reference: https://developer.github.com/v3/ # ------------------------------------------------------------------------ #' GitHub API #' #' R Interface to the GitHub API v3. #' #' These functions can be used to query a specific repository hosted on GitHub. \cr #' #' #' @param user [character] (*with default*): #' GitHub user name (defaults to 'r-lum'). #' #' @param repo [character] (*with default*): #' name of a GitHub repository (defaults to 'luminescence'). #' #' @param branch [character] (*with default*): #' branch of a GitHub repository (defaults to 'master'). #' #' @param n [integer] (*with default*): #' number of commits returned (defaults to 5). #' #' @param verbose [logical] (*with default*): #' print the output to the console (defaults to `TRUE`). #' #' @author Christoph Burow, University of Cologne (Germany) #' #' @section Function version: 0.1.0 #' #' @references #' #' GitHub Developer API v3. [https://developer.github.com/v3/](), last accessed: 10/01/2017. #' #' @examples #' #' \dontrun{ #' github_branches(user = "r-lum", repo = "luminescence") #' github_issues(user = "r-lum", repo = "luminescence") #' github_commits(user = "r-lum", repo = "luminescence", branch = "master", n = 10) #' } #' #' @md #' @name GitHub-API NULL # COMMITS ----------------------------------------------------------------- #' @rdname GitHub-API #' #' @details #' `github_commits` lists the most recent `n` commits of a specific branch of a repository. #' #' @return #' `github_commits`: [data.frame] with columns: #' #' \tabular{ll}{ #' `[ ,1]` \tab SHA \cr #' `[ ,2]` \tab AUTHOR \cr #' `[ ,3]` \tab DATE \cr #' `[ ,4]` \tab MESSAGE \cr #' } #' #' @md #' @export github_commits <- function(user = "r-lum", repo = "luminescence", branch = "master", n = 5) { # fetch available branches and check if provided branch exists branches <- github_branches(user, repo) if (!any(grepl(branch, branches$BRANCH))) stop("Branch ", branch, " does not exist.", call. = FALSE) # build URL and retrieve content sha <- branches$SHA[grep(paste0("^", branch, "$"), branches$BRANCH)] url <- paste0("https://api.github.com/repos/", user, "/", repo, "/commits?", "per_page=", n, "&sha=", sha) content <- github_getContent(url) # format output as data.frame output <- do.call(rbind, lapply(content, function(x) { data.frame(SHA = x$sha, AUTHOR = x$commit$author$name, DATE = x$commit$author$date, MESSAGE = x$commit$message, stringsAsFactors = FALSE) })) return(output) } # BRANCHES ---------------------------------------------------------------- #' @rdname GitHub-API #' #' @details #' `github_branches` can be used to list all current branches of a #' repository and returns the corresponding SHA hash as well as an installation #' command to install the branch in R via the 'devtools' package. #' #' @return #' `github_branches`: [data.frame] with columns: #' #' \tabular{ll}{ #' `[ ,1]` \tab BRANCH \cr #' `[ ,2]` \tab SHA \cr #' `[ ,3]` \tab INSTALL \cr #' } #' #' @md #' @export github_branches <- function(user = "r-lum", repo = "luminescence") { # build URL and retrieve content url <- paste0("https://api.github.com/repos/", user, "/", repo, "/branches") content <- github_getContent(url) # extract relevant information from server response branches <- sapply(content, function(x) x$name) sha <- sapply(content, function(x) x$commit$sha) # format output as data.frame output <- data.frame( BRANCH = branches, SHA = sha, INSTALL = paste0("devtools::install_github('r-lum/luminescence@", branches, "')"), stringsAsFactors = FALSE ) return(output) } # ISSUES ------------------------------------------------------------------ #' @rdname GitHub-API #' #' @details #' `github_issues` lists all open issues for a repository in valid YAML. #' #' @return #' `github_commits`: Nested [list] with `n` elements. #' Each commit element is a list with elements: #' #' \tabular{ll}{ #' `[[1]]` \tab NUMBER \cr #' `[[2]]` \tab TITLE \cr #' `[[3]]` \tab BODY \cr #' `[[4]]` \tab CREATED \cr #' `[[5]]` \tab UPDATED \cr #' `[[6]]` \tab CREATOR \cr #' `[[7]]` \tab URL \cr #' `[[8]]` \tab STATUS \cr #' } #' #' @md #' @export github_issues <- function(user = "r-lum", repo = "luminescence", verbose = TRUE) { # build URL and retrieve content url <- paste0("https://api.github.com/repos/", user,"/", repo, "/issues") content <- github_getContent(url) # format output as nested list issues <- lapply(content, function(x) { list( NUMBER = x$number, TITLE = x$title, BODY = gsub("\n", "", x$body), CREATED = x$created_at, UPDATED = x$updated_at, CREATOR = x$user$login, URL = x$url, STATUS = x$state, MILESTONE = x$milestone$title) }) # custom printing of the the issues-list as print.list produces unreadable # console output if (verbose) { tmp <- lapply(issues, function(x) { # limit width of description text DESCRIPTION <- "" for (i in seq_len(ceiling(nchar(x$BODY) / 100))) DESCRIPTION <- paste(DESCRIPTION, " ", substr(x$BODY, i*100-99, i*100), "\n") # print to console in valid YAML cat(paste0("---\n", 'title: "', x$TITLE, '"', "\n", "number: ", x$NUMBER, "\n", 'url: "', x$URL, '"', "\n", "created: ", x$CREATED, "\n", "updated: ", x$UPDATED, "\n", "creator: ", x$CREATOR, "\n", "status: ", x$STATUS, "\n", 'milestone: "', x$MILESTONE, '"', "\n", "description: >\n", DESCRIPTION, "\n\n\n")) }) } # return invisible as we explicitly print the output invisible(issues) } # HELPER ------------------------------------------------------------------ # This function queries the URL, checks the server response and returns # the content. github_getContent <- function(url) { response <- GET(url, accept_json()) if (status_code(response) != 200) stop("Contacting ", url, " had status code ", status_code(response), call. = FALSE) content <- content(response) return(content) } Luminescence/R/analyse_SAR.CWOSL.R0000644000176200001440000016134613571743147016266 0ustar liggesusers#' Analyse SAR CW-OSL measurements #' #' The function performs a SAR CW-OSL analysis on an #' [RLum.Analysis-class] object including growth curve fitting. #' #' The function performs an analysis for a standard SAR protocol measurements #' introduced by Murray and Wintle (2000) with CW-OSL curves. For the #' calculation of the Lx/Tx value the function [calc_OSLLxTxRatio] is #' used. For **changing the way the Lx/Tx error is calculated** use the argument #' `background.count.distribution` and `sigmab`, which will be passed to the function #' [calc_OSLLxTxRatio]. #' #' **Argument `object` is of type `list`** #' #' If the argument `object` is of type [list] containing **only** #' [RLum.Analysis-class] objects, the function re-calls itself as often as elements #' are in the list. This is usefull if an entire measurement wanted to be analysed without #' writing separate for-loops. To gain in full control of the parameters (e.g., `dose.points`) for #' every aliquot (corresponding to one [RLum.Analysis-class] object in the list), in #' this case the arguments can be provided as [list]. This `list` should #' be of similar length as the `list` provided with the argument `object`, #' otherwise the function will create an own list of the requested lenght. #' Function output will be just one single [RLum.Results-class] object. #' #' Please be careful when using this option. It may allow a fast an efficient data analysis, but #' the function may also break with an unclear error message, due to wrong input data. #' #' **Working with IRSL data** #' #' The function was originally designed to work just for 'OSL' curves, #' following the principles of the SAR protocol. An IRSL measurement protocol #' may follow this procedure, e.g., post-IR IRSL protocol (Thomsen et al., #' 2008). Therefore this functions has been enhanced to work with IRSL data, #' however, the function is only capable of analysing curves that follow the #' SAR protocol structure, i.e., to analyse a post-IR IRSL protocol, curve data #' have to be pre-selected by the user to fit the standards of the SAR #' protocol, i.e., Lx,Tx,Lx,Tx and so on. #' #' Example: Imagine the measurement contains pIRIR50 and pIRIR225 IRSL curves. #' Only one curve type can be analysed at the same time: The pIRIR50 curves or #' the pIRIR225 curves. #' #' **Supported rejection criteria** #' #' `[recycling.ratio]`: calculated for every repeated regeneration dose point. #' #' `[recuperation.rate]`: recuperation rate calculated by comparing the #' Lx/Tx values of the zero regeneration point with the Ln/Tn value (the Lx/Tx #' ratio of the natural signal). For methodological background see Aitken and #' Smith (1988). #' #' `[testdose.error]`: set the allowed error for the testdose, which per #' default should not exceed 10\%. The testdose error is calculated as Tx_net.error/Tx_net. #' #' `[palaeodose.error]`: set the allowed error for the De value, which per #' default should not exceed 10\%. #' #' @param object [RLum.Analysis-class] (**required**): #' input object containing data for analysis, alternatively a [list] of #' [RLum.Analysis-class] objects can be provided. #' #' @param signal.integral.min [integer] (**required**): #' lower bound of the signal integral. Can be a [list] of [integer]s, if `object` is #' of type [list]. If the input is vector (e.g., `c(1,2)`) the 2nd value will be interpreted #' as the minimum signal integral for the Tx curve. #' #' @param signal.integral.max [integer] (**required**): #' upper bound of the signal integral. Can be a [list] of [integer]s, if `object` is #' of type [list]. If the input is vector (e.g., `c(1,2)`) the 2nd value will be interpreted #' as the maximum signal integral for the Tx curve. #' #' @param background.integral.min [integer] (**required**): #' lower bound of the background integral. Can be a [list] of [integer]s, if `object` is #' of type [list]. If the input is vector (e.g., `c(1,2)`) the 2nd value will be interpreted #' as the minimum background integral for the Tx curve. #' #' @param background.integral.max [integer] (**required**): #' upper bound of the background integral. Can be a [list] of [integer]s, if `object` is #' of type [list]. If the input is vector (e.g., `c(1,2)`) the 2nd value will be interpreted #' as the maximum background integral for the Tx curve. #' #' @param rejection.criteria [list] (*with default*): #' provide a named list and set rejection criteria in **percentage** for further calculation. Can be a [list] in #' a [list], if `object` is of type [list] #' #' Allowed arguments are `recycling.ratio`, `recuperation.rate`, #' `palaeodose.error`, `testdose.error` and `exceed.max.regpoint = TRUE/FALSE`. #' Example: `rejection.criteria = list(recycling.ratio = 10)`. #' Per default all numerical values are set to 10, `exceed.max.regpoint = TRUE`. #' Every criterium can be set to `NA`. In this value are calculated, but not considered, i.e. #' the RC.Status becomes always `'OK'` #' #' @param dose.points [numeric] (*optional*): #' a numeric vector containg the dose points values Using this argument #' overwrites dose point values in the signal curves. Can be a [list] of #' [numeric] vectors, if `object` is of type [list] #' #' @param mtext.outer [character] (*optional*): #' option to provide an outer margin mtext. Can be a [list] of [character]s, #' if `object` is of type [list] #' #' @param plot [logical] (*with default*): #' enables or disables plot output. #' #' @param plot.single [logical] (*with default*) or [numeric] (*optional*): #' single plot output (`TRUE/FALSE`) to allow for plotting the results in single plot windows. #' If a numerice vector is provided the plots can be selected individually, i.e. #' `plot.single = c(1,2,3,4)` will plot the TL and Lx, Tx curves but not the legend (5) or the #' growth curve (6), (7) and (8) belong to rejection criteria plots. Requires #' `plot = TRUE`. #' #' @param onlyLxTxTable [logical] (with default): If `TRUE` the dose response curve fitting and plotting #' is skipped. This allows to get hands on the Lx/Tx table for large datasets without the need for #' a curve fitting. #' #' @param ... further arguments that will be passed to the function #' [plot_GrowthCurve] or [calc_OSLLxTxRatio] #' (supported: `background.count.distribution`, `sigmab`, `sig0`). #' **Please note** that if you consider to use the early light subtraction #' method you should provide your own `sigmab` value! #' #' #' @return #' A plot (*optional*) and an [RLum.Results-class] object is #' returned containing the following elements: #' #' \item{data}{[data.frame] containing De-values, De-error and further parameters} #' \item{LnLxTnTx.values}{[data.frame] of all calculated Lx/Tx values including signal, background counts and the dose points} #' \item{rejection.criteria}{[data.frame] with values that might by used as rejection criteria. NA is produced if no R0 dose point exists.} #' \item{Formula}{[formula] formula that have been used for the growth curve fitting } #' #' The output should be accessed using the function [get_RLum]. #' #' #' @note #' This function must not be mixed up with the function #' [Analyse_SAR.OSLdata], which works with #' [Risoe.BINfileData-class] objects. #' #' **The function currently does support only 'OSL', 'IRSL' and 'POSL' data!** #' #' @section Function version: 0.8.8 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS-Université Bordeaux Montaigne (France) #' #' #' @seealso [calc_OSLLxTxRatio], [plot_GrowthCurve], [RLum.Analysis-class], #' [RLum.Results-class], [get_RLum] #' #' #' @references #' Aitken, M.J. and Smith, B.W., 1988. Optical dating: recuperation #' after bleaching. Quaternary Science Reviews 7, 387-393. #' #' Duller, G., 2003. Distinguishing quartz and feldspar in single grain #' luminescence measurements. Radiation Measurements, 37 (2), 161-165. #' #' Murray, A.S. and Wintle, A.G., 2000. Luminescence dating of quartz using an #' improved single-aliquot regenerative-dose protocol. Radiation Measurements #' 32, 57-73. #' #' Thomsen, K.J., Murray, A.S., Jain, M., Boetter-Jensen, L., 2008. Laboratory #' fading rates of various luminescence signals from feldspar-rich sediment #' extracts. Radiation Measurements 43, 1474-1486. #' doi:10.1016/j.radmeas.2008.06.002 #' #' @keywords datagen plot #' #' @examples #' #' ##load data #' ##ExampleData.BINfileData contains two BINfileData objects #' ##CWOSL.SAR.Data and TL.SAR.Data #' data(ExampleData.BINfileData, envir = environment()) #' #' ##transform the values from the first position in a RLum.Analysis object #' object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) #' #' ##perform SAR analysis and set rejection criteria #' results <- analyse_SAR.CWOSL( #' object = object, #' signal.integral.min = 1, #' signal.integral.max = 2, #' background.integral.min = 900, #' background.integral.max = 1000, #' log = "x", #' fit.method = "EXP", #' rejection.criteria = list( #' recycling.ratio = 10, #' recuperation.rate = 10, #' testdose.error = 10, #' palaeodose.error = 10, #' exceed.max.regpoint = TRUE) #') #' #' ##show De results #' get_RLum(results) #' #' ##show LnTnLxTx table #' get_RLum(results, data.object = "LnLxTnTx.table") #' #' @md #' @export analyse_SAR.CWOSL<- function( object, signal.integral.min, signal.integral.max, background.integral.min, background.integral.max, rejection.criteria = NULL, dose.points = NULL, mtext.outer, plot = TRUE, plot.single = FALSE, onlyLxTxTable = FALSE, ... ) { # SELF CALL ----------------------------------------------------------------------------------- if(is.list(object)){ ##make life easy if(missing("signal.integral.min")){ signal.integral.min <- 1 warning("[analyse_SAR.CWOSL()] 'signal.integral.min' missing, set to 1", call. = FALSE) } if(missing("signal.integral.max")){ signal.integral.max <- 2 warning("[analyse_SAR.CWOSL()] 'signal.integral.max' missing, set to 2", call. = FALSE) } ##now we have to extend everything to allow list of arguments ... this is just consequent signal.integral.min <- rep(as.list(signal.integral.min), length = length(object)) signal.integral.max <- rep(as.list(signal.integral.max), length = length(object)) background.integral.min <- rep(as.list(background.integral.min), length = length(object)) background.integral.max <- rep(as.list(background.integral.max), length = length(object)) ##it is a little bit more complex, as we have a list in a list if(is(rejection.criteria[[1]], "list")){ rejection.criteria <- rep(rejection.criteria, length = length(object)) }else{ rejection.criteria <- rep(list(rejection.criteria), length = length(object)) } if(!is.null(dose.points)){ if(is(dose.points, "list")){ dose.points <- rep(dose.points, length = length(object)) }else{ dose.points <- rep(list(dose.points), length = length(object)) } }else{ dose.points <- rep(list(NULL), length(object)) } if(!missing(mtext.outer)){ mtext.outer <- rep(as.list(mtext.outer), length = length(object)) }else{ mtext.outer <- rep(list(""), length = length(object)) } ##handle main if("main"%in% names(list(...))){ if(class(list(...)$main) == "list"){ main <- rep(list(...)$main,length = length(object)) }else{ main <- rep(as.list(list(...)$main),length = length(object)) } }else{ main <- as.list(paste0("ALQ #",1:length(object))) } ##run analysis temp <- lapply(1:length(object), function(x){ analyse_SAR.CWOSL(object[[x]], signal.integral.min = signal.integral.min[[x]], signal.integral.max = signal.integral.max[[x]], background.integral.min = background.integral.min[[x]], background.integral.max = background.integral.max[[x]] , dose.points = dose.points[[x]], mtext.outer = mtext.outer[[x]], plot = plot, rejection.criteria = rejection.criteria[[x]], plot.single = plot.single, onlyLxTxTable = onlyLxTxTable, main = main[[x]], ...) }) ##combine everything to one RLum.Results object as this as what was written ... only ##one object ##merge results and check if the output became NULL results <- merge_RLum(temp) ##DO NOT use invisible here, this will prevent the function from stopping if(length(results) == 0){ return(NULL) }else{ return(results) } } # CONFIG ----------------------------------------------------------------- ##set error list, this allows to set error messages without breaking the function error.list <- list() # General Integrity Checks --------------------------------------------------- ##GENERAL ##MISSING INPUT if(missing("object")){ stop("[analyse_SAR.CWOSL()] No value set for 'object'!") } ##INPUT OBJECTS if(!is(object, "RLum.Analysis")){ stop("[analyse_SAR.CWOSL()] Input object is not of type 'RLum.Analyis'!") } if(missing("signal.integral.min") & !is.list(object)){ signal.integral.min <- 1 warning("[analyse_SAR.CWOSL()] 'signal.integral.min' missing, set to 1", call. = FALSE) } if(missing("signal.integral.max") & !is.list(object)){ signal.integral.max <- 2 warning("[analyse_SAR.CWOSL()] 'signal.integral.max' missing, set to 2", call. = FALSE) } if(missing("background.integral.min")){ stop("[analyse_SAR.CWOSL()] No value set for 'background.integral.min'!") } if(missing("background.integral.max")){ stop("[analyse_SAR.CWOSL()] No value set for 'background.integral.max'!") } ##build signal and background integrals signal.integral <- c(signal.integral.min[1]:signal.integral.max[1]) background.integral <- c(background.integral.min[1]:background.integral.max[1]) ##account for the case that Lx and Tx integral differ if (length(signal.integral.min) == 2 & length(signal.integral.max) == 2) { signal.integral.Tx <- c(signal.integral.min[2]:signal.integral.max[2]) }else{ signal.integral.Tx <- NULL } if (length(background.integral.min) == 2 & length(background.integral.max) == 2) { background.integral.Tx <- c(background.integral.min[2]:background.integral.max[2]) }else{ background.integral.Tx <- NULL } ##Account for the case that the use did not provide everything ... if(is.null(signal.integral.Tx) & !is.null(background.integral.Tx)){ signal.integral.Tx <- signal.integral warning("[analyse_SAR.CWOSL()] background integral for Tx curves set, but not for the signal integral; signal integral for Tx automatically set.") } if(!is.null(signal.integral.Tx) & is.null(background.integral.Tx)){ background.integral.Tx <- background.integral warning("[analyse_SAR.CWOSL()] signal integral for Tx curves set, but not for the background integral; background integral for Tx automatically set.") } ##INTEGRAL LIMITS if(!is(signal.integral, "integer") | !is(background.integral, "integer")){ stop("[analyse_SAR.CWOSL()] 'signal.integral' or 'background.integral' is not of type integer!", call. = FALSE) } ##CHECK IF DATA SET CONTAINS ANY OSL or IRSL curve if (!any(c(grepl("OSL", names(object), fixed = TRUE), grepl("IRSL", names(object), fixed = TRUE)))){ stop("[analyse_SAR.CWOSL()] No record of type 'OSL', 'IRSL', 'POSL' detected! NULL returned.", call. = FALSE) return(NULL) } ## try to extract the correct curves for the sequence based on allowed curve types and ## the curve type used most frequently ## now remove all non-allowed curves CWcurve.type <- regmatches(names(object), m = regexpr("(OSL|IRSL|POSL)(?!\\))", names(object), perl = TRUE)) ## now get the type which is used most CWcurve.type <- names(which.max(table(CWcurve.type))) # Rejection criteria ------------------------------------------------------ ##set list rejection.criteria.default <- list( recycling.ratio = 10, recuperation.rate = 10, palaeodose.error = 10, testdose.error = 10, exceed.max.regpoint = TRUE ) ##modify list on the request if(!is.null(rejection.criteria)){ ##check if the provided values are valid at all if(!all(names(rejection.criteria)%in%names(rejection.criteria.default))){ try(stop( paste0("[analyse_SAR.CWOSL()] Rejection criteria '", paste( names( rejection.criteria)[ !names(rejection.criteria)%in%names(rejection.criteria.default)], collapse = ", ") ,"' unknown! Input ignored!"), call. = FALSE)) } ##modify list rejection.criteria <- modifyList(rejection.criteria.default, rejection.criteria) }else{ rejection.criteria <- rejection.criteria.default } # Deal with extra arguments ---------------------------------------------------- ##deal with addition arguments extraArgs <- list(...) main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {""} log <- if("log" %in% names(extraArgs)) {extraArgs$log} else {""} cex <- if("cex" %in% names(extraArgs)) {extraArgs$cex} else {1} background.count.distribution <- if ("background.count.distribution" %in% names(extraArgs)) { extraArgs$background.count.distribution } else { "non-poisson" } sigmab <- if("sigmab" %in% names(extraArgs)) {extraArgs$sigmab} else {NULL} sig0 <- if("sig0" %in% names(extraArgs)) {extraArgs$sig0} else {0} # Protocol Integrity Checks -------------------------------------------------- ##check overall structur of the object ##every SAR protocol has to have equal number of curves ##grep curve types from analysis value and remove unwanted information temp.ltype <- sapply(1:length(object@records), function(x) { ##export as global variable object@records[[x]]@recordType <<- gsub(" .*", "", object@records[[x]]@recordType) object@records[[x]]@recordType }) ##problem: FI lexsyg devices provide irradiation information in a separate curve if("irradiation"%in%temp.ltype){ ##grep irraditation times temp.irradiation <- structure_RLum(object) temp.irradiation <- temp.irradiation[temp.irradiation$recordType == "irradiation", "x.max"] ##remove every 2nd entry (test dose) and add "0" dose for natural signal temp.Dose <- c(0,temp.irradiation) ##remove irradiation entries from file object <- set_RLum( class = "RLum.Analysis", records = get_RLum(object, recordType = c(CWcurve.type, "TL")), protocol = "SAR") } ##check if the wanted curves are a multiple of two ##gsub removes unwanted information from the curves if(table(temp.ltype)[CWcurve.type]%%2!=0){ error.list[[1]] <- "[analyse_SAR.CWOSL()] Input OSL/IRSL curves are not a multiple of two." } ##check if the curve lengths differ temp.matrix.length <- unlist(sapply(1:length(object@records), function(x) { if(object@records[[x]]@recordType==CWcurve.type){ length(object@records[[x]]@data[,1]) } })) if(length(unique(temp.matrix.length))!=1){ error.list[[2]] <- "[analyse_SAR.CWOSL()] Input curves lengths differ." } ##just proceed if error list is empty if (length(error.list) == 0) { ##check background integral if (max(signal.integral) == min(signal.integral)) { signal.integral <- c(min(signal.integral) : (max(signal.integral) + 1)) warning("[analyse_SAR.CWOSL()] integral signal limits cannot be equal, reset automatically!") } ##background integral should not longer than curve channel length if (max(background.integral) == min(background.integral)) { background.integral <- c((min(background.integral) - 1) : max(background.integral)) } if (max(background.integral) > temp.matrix.length[1]) { background.integral <- c((temp.matrix.length[1] - length(background.integral)):temp.matrix.length[1]) ##prevent that the background integral becomes negative if(min(background.integral) < max(signal.integral)){ background.integral <- c((max(signal.integral) + 1):max(background.integral)) } warning( "[analyse_SAR.CWOSL()] Background integral out of bounds. Set to: c(", min(background.integral),":", max(background.integral),")" ) } ##Do the same for the Tx-if set if (!is.null(background.integral.Tx)) { if (max(background.integral.Tx) == min(background.integral.Tx)) { background.integral.Tx <- c((min(background.integral.Tx) - 1) : max(background.integral.Tx)) } if (max(background.integral.Tx) > temp.matrix.length[2]) { background.integral.Tx <- c((temp.matrix.length[2] - length(background.integral.Tx)):temp.matrix.length[2]) ##prevent that the background integral becomes negative if (min(background.integral.Tx) < max(signal.integral.Tx)) { background.integral.Tx <- c((max(signal.integral.Tx) + 1):max(background.integral.Tx)) } warning( "Background integral for Tx out of bounds. Set to: c(", min(background.integral.Tx), ":", max(background.integral.Tx), ")" ) } } # Grep Curves ------------------------------------------------------------- ##grep relevant curves from RLum.Analyis object OSL.Curves.ID <- get_RLum(object, recordType = CWcurve.type, get.index = TRUE) ##separate curves by Lx and Tx (it makes it much easier) OSL.Curves.ID.Lx <- OSL.Curves.ID[seq(1,length(OSL.Curves.ID),by = 2)] OSL.Curves.ID.Tx <- OSL.Curves.ID[seq(2,length(OSL.Curves.ID),by = 2)] ##get index of TL curves TL.Curves.ID <- suppressWarnings(get_RLum(object, recordType = "TL$", get.index = TRUE)) ##separate TL curves TL.Curves.ID.Lx <- lapply(1:length(OSL.Curves.ID.Lx), function(x) { TL.Curves.ID[which(TL.Curves.ID == (OSL.Curves.ID.Lx[x] - 1))] }) TL.Curves.ID.Tx <- lapply(1:length(OSL.Curves.ID.Tx), function(x) { TL.Curves.ID[which(TL.Curves.ID == (OSL.Curves.ID.Tx[x] - 1))] }) # COMPONENT FITTING ------------------------------------------------------- # for(x in seq(1,length(OSL.Curves.ID),by=2)){ # # # temp.fit.output <- fit_CWCurve(object@records[[OSL.Curves.ID[x]]], # n.components.max=3, # output.terminal = FALSE, # output.terminalAdvanced = FALSE, # plot = FALSE # # ) # if(exists("fit.output") == FALSE){ # # fit.output <- get_RLum(temp.fit.output) # # }else{ # # fit.output <- rbind(fit.output, get_RLum(temp.fit.output)) # # } # # } ##TODO # Calculate LnLxTnTx values -------------------------------------------------- ##calculate LxTx values using external function LnLxTnTx <- lapply(seq(1,length(OSL.Curves.ID),by = 2), function(x){ temp.LnLxTnTx <- get_RLum( calc_OSLLxTxRatio( Lx.data = object@records[[OSL.Curves.ID[x]]]@data, Tx.data = object@records[[OSL.Curves.ID[x + 1]]]@data, signal.integral = signal.integral, signal.integral.Tx = signal.integral.Tx, background.integral = background.integral, background.integral.Tx = background.integral.Tx, background.count.distribution = background.count.distribution, sigmab = sigmab, sig0 = sig0 ) ) ##grep dose if (exists("temp.irradiation") == FALSE) { temp.Dose <- object@records[[OSL.Curves.ID[x]]]@info$IRR_TIME ##for the case that no information on the dose can be found if (is.null(temp.Dose)) { temp.Dose <- NA } temp.LnLxTnTx <- cbind(Dose = temp.Dose, temp.LnLxTnTx) }else{ temp.LnLxTnTx <- cbind(Dose = temp.Dose[x], temp.LnLxTnTx) } }) ##combine LnLxTnTx <- data.table::rbindlist(LnLxTnTx) # Set regeneration points ------------------------------------------------- ##overwrite dose point manually if (!is.null(dose.points)) { if (length(dose.points) != length(LnLxTnTx$Dose)) { stop("[analyse_SAR.CWOSL()] length 'dose.points' differs from number of curves.") } LnLxTnTx$Dose <- dose.points } ##check whether we have dose points at all if (is.null(dose.points) & anyNA(LnLxTnTx$Dose)) { stop("[analyse_SAR.CWOSL()] 'dose.points' contains NA values or have not been set!") } ##check whether the first OSL/IRSL curve (i.e., the Natural) has 0 dose. If not ##not, it is probably a Dose Recovery Test with the given dose that is treated as the ##unknown dose. We overwrite this value and warn the user. if (LnLxTnTx$Dose[1] != 0) { warning("[analyse_SAR.CWOSL()] The natural signal has a dose of ", LnLxTnTx$Dose[1], " s, which is indicative of a dose recovery test. The natural dose was set to 0.", call. = FALSE) LnLxTnTx$Dose[1] <- 0 } #generate unique dose id - this are also the # for the generated points temp.DoseID <- c(0:(length(LnLxTnTx$Dose) - 1)) temp.DoseName <- paste("R",temp.DoseID,sep = "") temp.DoseName <- cbind(Name = temp.DoseName,Dose = LnLxTnTx$Dose) ##set natural temp.DoseName[temp.DoseName[,"Name"] == "R0","Name"] <- "Natural" ##set R0 temp.DoseName[temp.DoseName[,"Name"] != "Natural" & temp.DoseName[,"Dose"] == 0,"Name"] <- "R0" ##correct numeration numeration of other dose points ##how many dose points do we have with 0? non.temp.zero.dose.number <- nrow(temp.DoseName[temp.DoseName[, "Dose"] != 0,]) temp.DoseName[temp.DoseName[,"Name"] != "Natural" & temp.DoseName[,"Name"] != "R0","Name"] <- paste("R",c(1:non.temp.zero.dose.number),sep = "") ##find duplicated doses (including 0 dose - which means the Natural) temp.DoseDuplicated <- duplicated(temp.DoseName[,"Dose"]) ##combine temp.DoseName temp.DoseName <- cbind(temp.DoseName,Repeated = temp.DoseDuplicated) ##correct value for R0 (it is not really repeated) temp.DoseName[temp.DoseName[,"Dose"] == 0,"Repeated"] <- FALSE ##combine in the data frame temp.LnLxTnTx <- data.frame(Name = temp.DoseName[,"Name"], Repeated = as.logical(temp.DoseName[,"Repeated"])) LnLxTnTx <- cbind(temp.LnLxTnTx,LnLxTnTx) LnLxTnTx[,"Name"] <- as.character(LnLxTnTx[,"Name"]) # Calculate Recycling Ratio ----------------------------------------------- ##Calculate Recycling Ratio if (length(LnLxTnTx[LnLxTnTx[,"Repeated"] == TRUE,"Repeated"]) > 0) { ##identify repeated doses temp.Repeated <- LnLxTnTx[LnLxTnTx[,"Repeated"] == TRUE,c("Name","Dose","LxTx")] ##find concering previous dose for the repeated dose temp.Previous <- t(sapply(1:length(temp.Repeated[,1]),function(x) { LnLxTnTx[LnLxTnTx[,"Dose"] == temp.Repeated[x,"Dose"] & LnLxTnTx[,"Repeated"] == FALSE,c("Name","Dose","LxTx")] })) ##convert to data.frame temp.Previous <- as.data.frame(temp.Previous) ##set column names temp.ColNames <- unlist(lapply(1:length(temp.Repeated[,1]),function(x) { temp <- paste("Recycling ratio (", temp.Repeated[x,"Name"],"/", temp.Previous[temp.Previous[,"Dose"] == temp.Repeated[x,"Dose"],"Name"], ")", sep = "") return(temp[1]) })) ##Calculate Recycling Ratio RecyclingRatio <- round(as.numeric(temp.Repeated[,"LxTx"]) / as.numeric(temp.Previous[,"LxTx"]), digits = 4) ##Just transform the matrix and add column names RecyclingRatio <- t(RecyclingRatio) colnames(RecyclingRatio) <- temp.ColNames }else{ RecyclingRatio <- NA } # Calculate Recuperation Rate --------------------------------------------- ##Recuperation Rate (capable to handle multiple type of recuperation values) if (length(LnLxTnTx[LnLxTnTx[,"Name"] == "R0","Name"]) > 0) { Recuperation <- sapply(1:length(LnLxTnTx[LnLxTnTx[,"Name"] == "R0","Name"]), function(x) { round(LnLxTnTx[LnLxTnTx[,"Name"] == "R0","LxTx"][x] / LnLxTnTx[LnLxTnTx[,"Name"] == "Natural","LxTx"], digits = 4) }) ##Just transform the matrix and add column names Recuperation <- t(Recuperation) colnames(Recuperation) <- unlist(strsplit(paste( "Recuperation rate", 1:length(LnLxTnTx[LnLxTnTx[,"Name"] == "R0","Name"]), collapse = ";" ), ";")) }else{ Recuperation <- NA } # Evaluate and Combine Rejection Criteria --------------------------------- temp.criteria <- c( if(!is.null(colnames(RecyclingRatio))){ colnames(RecyclingRatio)}else{NA}, if(!is.null(colnames(Recuperation))){ colnames(Recuperation)}else{NA}) temp.value <- c(RecyclingRatio,Recuperation) temp.threshold <- c(rep( rejection.criteria$recycling.ratio / 100, length(RecyclingRatio) ), rep( rejection.criteria$recuperation.rate / 100, length(Recuperation) )) ##RecyclingRatio if (!is.na(RecyclingRatio)[1] & !is.na(rejection.criteria$recycling.ratio)) { temp.status.RecyclingRatio <- sapply(1:length(RecyclingRatio), function(x) { if (abs(1 - RecyclingRatio[x]) > (rejection.criteria$recycling.ratio / 100)) { "FAILED" }else{ "OK" } }) }else{ temp.status.RecyclingRatio <- rep("OK", length(RecyclingRatio)) } ##Recuperation if (!is.na(Recuperation)[1] & !is.na(rejection.criteria$recuperation.rate)) { temp.status.Recuperation <- sapply(1:length(Recuperation), function(x) { if (Recuperation[x] > rejection.criteria$recuperation.rate / 100) { "FAILED" } else{ "OK" } }) } else{ temp.status.Recuperation <- "OK" } # Provide Rejection Criteria for Testdose error -------------------------- testdose.error.calculated <- (LnLxTnTx$Net_TnTx.Error/LnLxTnTx$Net_TnTx)[1] testdose.error.threshold <- rejection.criteria$testdose.error / 100 if (is.na(testdose.error.calculated)) { testdose.error.status <- "FAILED" }else{ if(!is.na(testdose.error.threshold)){ testdose.error.status <- ifelse( testdose.error.calculated <= testdose.error.threshold, "OK", "FAILED" ) }else{ testdose.error.status <- "OK" } } testdose.error.data.frame <- data.frame( Criteria = "Testdose error", Value = testdose.error.calculated, Threshold = testdose.error.threshold, Status = testdose.error.status, stringsAsFactors = FALSE ) RejectionCriteria <- data.frame( Criteria = temp.criteria, Value = temp.value, Threshold = temp.threshold, Status = c(temp.status.RecyclingRatio,temp.status.Recuperation), stringsAsFactors = FALSE ) RejectionCriteria <- rbind(RejectionCriteria, testdose.error.data.frame) ##============================================================================## ##PLOTTING ##============================================================================## if (plot == TRUE) { # Plotting - Config ------------------------------------------------------- ##colours and double for plotting col <- get("col", pos = .LuminescenceEnv) if (plot.single[1] == FALSE) { ## read par settings par.default <- par(no.readonly = TRUE) layout(matrix( c(1,1,3,3, 1,1,3,3, 2,2,4,4, 2,2,4,4, 5,5,5,5),5,4,byrow = TRUE )) par( oma = c(0,0,0,0), mar = c(4,4,3,3), cex = cex * 0.6 ) ## 1 -> TL previous LnLx ## 2 -> LnLx ## 3 -> TL previous TnTx ## 4 -> TnTx ## 5 -> Legend ## set selected curves to allow plotting of all curves plot.single.sel <- c(1,2,3,4,5,6,7,8) }else{ ##check for values in the single output of the function and convert if (!is(plot.single, "logical")) { if (!is(plot.single, "numeric")) { stop("[analyse_SAR.CWOSL()] Invalid data type for 'plot.single'.") } plot.single.sel <- plot.single }else{ plot.single.sel <- c(1,2,3,4,5,6,7,8) } } ##warning if number of curves exceed colour values if (length(col) < length(OSL.Curves.ID) / 2) { temp.message <- paste( "\n[analyse_SAR.CWOSL()] To many curves! Only the first", length(col),"curves are plotted!" ) warning(temp.message) } ##legend text legend.text <- paste(LnLxTnTx$Name,"\n(",LnLxTnTx$Dose,")", sep = "") ##get channel resolution (should be equal for all curves) resolution.OSLCurves <- round(object@records[[OSL.Curves.ID[1]]]@data[2,1] - object@records[[OSL.Curves.ID[1]]]@data[1,1], digits = 2) # Plotting TL Curves previous LnLx ---------------------------------------- ##overall plot option selection for plot.single.sel if (1 %in% plot.single.sel) { ##check if TL curves are available if (length(TL.Curves.ID.Lx[[1]] > 0)) { ##It is just an approximation taken from the data resolution.TLCurves <- round(mean(diff( round(object@records[[TL.Curves.ID.Lx[[1]]]]@data[,1], digits = 1) )), digits = 1) ylim.range <- sapply(seq(1,length(TL.Curves.ID.Lx),by = 1) ,function(x) { range(object@records[[TL.Curves.ID.Lx[[x]]]]@data[,2]) }) plot( NA,NA, xlab = "T [\u00B0C]", ylab = paste("TL [cts/",resolution.TLCurves," \u00B0C]",sep = ""), xlim = c(object@records[[TL.Curves.ID.Lx[[1]]]]@data[1,1], max(object@records[[TL.Curves.ID.Lx[[1]]]]@data[,1])), ylim = c(1,max(ylim.range)), main = main, log = if (log == "y" | log == "xy") { "y" }else{ "" } ) #provide curve information as mtext, to keep the space for the header mtext(side = 3, expression(paste( "TL previous ", L[n],",",L[x]," curves",sep = "" )), cex = cex * 0.7) ##plot TL curves sapply(1:length(TL.Curves.ID.Lx) ,function(x) { lines(object@records[[TL.Curves.ID.Lx[[x]]]]@data,col = col[x]) }) }else{ plot( NA,NA,xlim = c(0,1), ylim = c(0,1), main = "", axes = FALSE, ylab = "", xlab = "" ) text(0.5,0.5, "No TL curve detected") } }#plot.single.sel # Plotting LnLx Curves ---------------------------------------------------- ##overall plot option selection for plot.single.sel if (2 %in% plot.single.sel) { ylim.range <- sapply(1:length(OSL.Curves.ID.Lx) ,function(x) { range(object@records[[OSL.Curves.ID.Lx[x]]]@data[,2]) }) if((log == "x" | log == "xy") & object@records[[OSL.Curves.ID.Lx[[1]]]]@data[1,1] == 0){ xlim <- c(object@records[[OSL.Curves.ID.Lx[1]]]@data[2,1], max(object@records[[OSL.Curves.ID.Lx[1]]]@data[,1]) + object@records[[OSL.Curves.ID.Lx[1]]]@data[2,1]) }else{ xlim <- c(object@records[[OSL.Curves.ID.Lx[1]]]@data[1,1], max(object@records[[OSL.Curves.ID.Lx[1]]]@data[,1])) } #open plot area LnLx plot( NA,NA, xlab = "Time [s]", ylab = paste(CWcurve.type," [cts/",resolution.OSLCurves," s]",sep = ""), xlim = xlim, ylim = range(ylim.range), main = main, log = log ) #provide curve information as mtext, to keep the space for the header mtext(side = 3, expression(paste(L[n],",",L[x]," curves",sep = "")), cex = cex * 0.7) ##plot curves sapply(1:length(OSL.Curves.ID.Lx), function(x) { if((log == "x" | log == "xy") & object@records[[OSL.Curves.ID.Lx[[x]]]]@data[1,1] == 0){ object@records[[OSL.Curves.ID.Lx[[x]]]]@data[1,] <- object@records[[OSL.Curves.ID.Lx[[x]]]]@data[1,] + diff(c(object@records[[OSL.Curves.ID.Lx[[x]]]]@data[1,1], object@records[[OSL.Curves.ID.Lx[[x]]]]@data[2,1])) warnings("[analyse_SAR.CWOSL()] curves shifted by one chanel for log-plot.") } lines(object@records[[OSL.Curves.ID.Lx[[x]]]]@data,col = col[x]) }) ##mark integration limit Lx curves abline( v = (object@records[[OSL.Curves.ID.Lx[1]]]@data[min(signal.integral),1]), lty = 2, col = "gray" ) abline( v = (object@records[[OSL.Curves.ID.Lx[1]]]@data[max(signal.integral),1]), lty = 2, col = "gray" ) abline( v = (object@records[[OSL.Curves.ID.Lx[1]]]@data[min(background.integral),1]), lty = 2, col = "gray" ) abline( v = (object@records[[OSL.Curves.ID.Lx[1]]]@data[max(background.integral),1]), lty = 2, col = "gray" ) ##mtext, implemented here, as a plot window has to be called first if (missing(mtext.outer)) { mtext.outer <- "" } mtext( mtext.outer, side = 4, outer = TRUE, line = -1.7, cex = cex, col = "blue" ) }# plot.single.sel # Plotting TL Curves previous TnTx ---------------------------------------- ##overall plot option selection for plot.single.sel if (3 %in% plot.single.sel) { ##check if TL curves are available if (length(TL.Curves.ID.Tx[[1]] > 0)) { ##It is just an approximation taken from the data resolution.TLCurves <- round(mean(diff( round(object@records[[TL.Curves.ID.Tx[[1]]]]@data[,1], digits = 1) )), digits = 1) ylim.range <- sapply(1:length(TL.Curves.ID.Tx) ,function(x) { range(object@records[[TL.Curves.ID.Tx[[x]]]]@data[,2]) }) plot( NA,NA, xlab = "T [\u00B0C]", ylab = paste("TL [cts/",resolution.TLCurves," \u00B0C]",sep = ""), xlim = c(object@records[[TL.Curves.ID.Tx[[1]]]]@data[1,1], max(object@records[[TL.Curves.ID.Tx[[1]]]]@data[,1])), ylim = c(1,max(ylim.range)), main = main, log = if (log == "y" | log == "xy") { "y" }else{ "" } ) #provide curve information as mtext, to keep the space for the header mtext(side = 3, expression(paste( "TL previous ", T[n],",",T[x]," curves",sep = "" )), cex = cex * 0.7) ##plot TL curves sapply(1:length(TL.Curves.ID.Tx) ,function(x) { lines(object@records[[TL.Curves.ID.Tx[[x]]]]@data,col = col[x]) }) }else{ plot( NA,NA,xlim = c(0,1), ylim = c(0,1), main = "", axes = FALSE, ylab = "", xlab = "" ) text(0.5,0.5, "No TL curve detected") } }#plot.single.sel # Plotting TnTx Curves ---------------------------------------------------- ##overall plot option selection for plot.single.sel if (4 %in% plot.single.sel) { ylim.range <- sapply(1:length(OSL.Curves.ID.Tx) ,function(x) { range(object@records[[OSL.Curves.ID.Tx[x]]]@data[,2]) }) if((log == "x" | log == "xy") & object@records[[OSL.Curves.ID.Tx[[1]]]]@data[1,1] == 0){ xlim <- c(object@records[[OSL.Curves.ID.Tx[1]]]@data[2,1], max(object@records[[OSL.Curves.ID.Tx[1]]]@data[,1]) + object@records[[OSL.Curves.ID.Tx[1]]]@data[2,1]) }else{ xlim <- c(object@records[[OSL.Curves.ID.Tx[1]]]@data[1,1], max(object@records[[OSL.Curves.ID.Tx[1]]]@data[,1])) } #open plot area LnLx plot( NA,NA, xlab = "Time [s]", ylab = paste(CWcurve.type ," [cts/",resolution.OSLCurves," s]",sep = ""), xlim = xlim, ylim = range(ylim.range), main = main, log = log ) #provide curve information as mtext, to keep the space for the header mtext(side = 3, expression(paste(T[n],",",T[x]," curves",sep = "")), cex = cex * 0.7) ##plot curves and get legend values sapply(1:length(OSL.Curves.ID.Tx) ,function(x) { ##account for log-scale and 0 values if((log == "x" | log == "xy") & object@records[[OSL.Curves.ID.Tx[[x]]]]@data[1,1] == 0){ object@records[[OSL.Curves.ID.Tx[[x]]]]@data[1,] <- object@records[[OSL.Curves.ID.Tx[[x]]]]@data[1,] + diff(c(object@records[[OSL.Curves.ID.Tx[[x]]]]@data[1,1], object@records[[OSL.Curves.ID.Tx[[x]]]]@data[2,1])) warnings("[analyse_SAR.CWOSL()] curves shifted by one chanel for log-plot.") } lines(object@records[[OSL.Curves.ID.Tx[[x]]]]@data,col = col[x]) }) ##mark integration limit Tx curves abline( v = (object@records[[OSL.Curves.ID.Tx[1]]]@data[min(signal.integral),1]), lty = 2, col = "gray" ) abline( v = (object@records[[OSL.Curves.ID.Tx[1]]]@data[max(signal.integral),1]), lty = 2, col = "gray" ) abline( v = (object@records[[OSL.Curves.ID.Tx[1]]]@data[min(background.integral),1]), lty = 2, col = "gray" ) abline( v = (object@records[[OSL.Curves.ID.Tx[1]]]@data[max(background.integral),1]), lty = 2, col = "gray" ) }# plot.single.sel # Plotting Legend ---------------------------------------- ##overall plot option selection for plot.single.sel if (5 %in% plot.single.sel) { par.margin <- par()$mar par.mai <- par()$mai par(mar = c(1,1,1,1), mai = c(0,0,0,0)) plot( c(1:(length( OSL.Curves.ID ) / 2)), rep(7,length(OSL.Curves.ID) / 2), type = "p", axes = FALSE, xlab = "", ylab = "", pch = 20, col = unique(col[1:length(OSL.Curves.ID)]), cex = 4 * cex, ylim = c(0,10) ) ##add text text(c(1:(length( OSL.Curves.ID ) / 2)), rep(7,length(OSL.Curves.ID) / 2), legend.text, offset = 1, pos = 1) ##add line abline(h = 10,lwd = 0.5) #reset margin par(mar = par.margin, mai = par.mai) }#plot.single.sel if (exists("par.default")) { par(par.default) } }##end plot == TRUE # Plotting GC ---------------------------------------- ##create data.frame temp.sample <- data.frame( Dose = LnLxTnTx$Dose, LxTx = LnLxTnTx$LxTx, LxTx.Error = LnLxTnTx$LxTx.Error, TnTx = LnLxTnTx$Net_TnTx ) ##overall plot option selection for plot.single.sel if (plot == TRUE && 6 %in% plot.single.sel) { plot <- TRUE }else { plot <- FALSE } ##Fit and plot growth curve if(!onlyLxTxTable){ temp.GC <- plot_GrowthCurve(temp.sample, output.plot = plot, ...) ##if null if(is.null(temp.GC)){ temp.GC <- data.frame( De = NA, De.Error = NA, D01 = NA, D01.ERROR = NA, D02 = NA, D02.ERROR = NA, De.MC = NA, Fit = NA, RC.Status = NA, stringsAsFactors = FALSE ) temp.GC.fit.Formula <- NA ##create empty plots if needed, otherwise subsequent functions may crash if(plot){ if("output.plotExtended" %in% list(...) && list(...)$output.plotExtended == FALSE){ shape::emptyplot() }else{ shape::emptyplot() shape::emptyplot() shape::emptyplot() } } }else{ ##grep information on the fit object temp.GC.fit.Formula <- get_RLum(temp.GC, "Formula") ##grep results temp.GC <- get_RLum(temp.GC) # Provide Rejection Criteria for Palaedose error -------------------------- if(is.na(temp.GC[,1])){ palaeodose.error.calculated <- NA }else{ palaeodose.error.calculated <- round(temp.GC[,2] / temp.GC[,1], digits = 5) } palaeodose.error.threshold <- rejection.criteria$palaeodose.error / 100 if (is.na(palaeodose.error.calculated)) { palaeodose.error.status <- "FAILED" }else{ if(!is.na(palaeodose.error.threshold)){ palaeodose.error.status <- ifelse( palaeodose.error.calculated <= palaeodose.error.threshold, "OK", "FAILED" ) }else{ palaeodose.error.status <- "OK" } } palaeodose.error.data.frame <- data.frame( Criteria = "Palaeodose error", Value = palaeodose.error.calculated, Threshold = palaeodose.error.threshold, Status = palaeodose.error.status, stringsAsFactors = FALSE ) ##add exceed.max.regpoint if (!is.na(temp.GC[,1]) & !is.na(rejection.criteria$exceed.max.regpoint) && rejection.criteria$exceed.max.regpoint) { status.exceed.max.regpoint <- ifelse(max(LnLxTnTx$Dose) < temp.GC[,1], "FAILED", "OK") }else{ status.exceed.max.regpoint <- "OK" } exceed.max.regpoint.data.frame <- data.frame( Criteria = "De > max. dose point", Value = as.numeric(temp.GC[,1]), Threshold = if(is.na(rejection.criteria$exceed.max.regpoint)){ NA }else if(!rejection.criteria$exceed.max.regpoint){ Inf }else{ as.numeric(max(LnLxTnTx$Dose)) }, Status = status.exceed.max.regpoint ) ##add to RejectionCriteria data.frame RejectionCriteria <- rbind(RejectionCriteria, palaeodose.error.data.frame, exceed.max.regpoint.data.frame) ##add recjection status if (length(grep("FAILED",RejectionCriteria$Status)) > 0) { temp.GC <- data.frame(temp.GC, RC.Status = "FAILED", stringsAsFactors = FALSE) }else{ temp.GC <- data.frame(temp.GC, RC.Status = "OK", stringsAsFactors = FALSE) } }#endif for is.null ##end onlyLxTxTable }else{ temp.GC <- data.frame(De = NA, De.Error = NA, D01 = NA, D01.ERROR = NA, D02 = NA, D02.ERROR = NA, De.MC = NA, Fit = NA) temp.GC.fit.Formula <- NULL } ##add information on the integration limits temp.GC.extended <- data.frame( signal.range = paste(min(signal.integral),":", max(signal.integral)), background.range = paste(min(background.integral),":", max(background.integral)), signal.range.Tx = paste(min(ifelse(is.null(signal.integral.Tx),NA,signal.integral.Tx)),":", max(ifelse(is.null(signal.integral.Tx),NA,signal.integral.Tx))), background.range.Tx = paste(min(ifelse(is.null(background.integral.Tx), NA,background.integral.Tx)) ,":", max(ifelse(is.null(background.integral.Tx), NA,background.integral.Tx))), stringsAsFactors = FALSE ) # Set return Values ----------------------------------------------------------- ##generate unique identifier UID <- create_UID() temp.results.final <- set_RLum( class = "RLum.Results", data = list( data = as.data.frame(c(temp.GC, temp.GC.extended, UID = UID), stringsAsFactors = FALSE), LnLxTnTx.table = cbind(LnLxTnTx, UID = UID, stringsAsFactors = FALSE), rejection.criteria = cbind(RejectionCriteria, UID, stringsAsFactors = FALSE), Formula = temp.GC.fit.Formula ), info = list(call = sys.call()) ) # Plot graphical interpretation of rejection criteria ----------------------------------------- if (plot && 7 %in% plot.single.sel) { ##set graphical parameter if (!plot.single[1]) par(mfrow = c(1,2)) ##Rejection criteria temp.rejection.criteria <- get_RLum(temp.results.final, data.object = "rejection.criteria") temp.rc.reycling.ratio <- temp.rejection.criteria[grep("Recycling ratio",temp.rejection.criteria[,"Criteria"]),] temp.rc.recuperation.rate <- temp.rejection.criteria[grep("Recuperation rate",temp.rejection.criteria[,"Criteria"]),] temp.rc.palaedose.error <- temp.rejection.criteria[grep("Palaeodose error",temp.rejection.criteria[,"Criteria"]),] temp.rc.testdose.error <- temp.rejection.criteria[grep("Testdose error",temp.rejection.criteria[,"Criteria"]),] plot( NA,NA, xlim = c(-0.5,0.5), ylim = c(0,40), yaxt = "n", ylab = "", xaxt = "n", xlab = "", bty = "n", main = "Rejection criteria" ) axis( side = 1, at = c(-0.2,-0.1,0,0.1,0.2), labels = c("- 0.2", "- 0.1","0/1","+ 0.1", "+ 0.2") ) ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++## ##polygon for recycling ratio text( x = -0.35, y = 35, "Recycling R.", pos = 3, srt = 90, cex = 0.8*cex, offset = 0, ) polygon( x = c( -as.numeric(as.character(temp.rc.reycling.ratio$Threshold))[1],-as.numeric(as.character(temp.rc.reycling.ratio$Threshold))[1], as.numeric(as.character(temp.rc.reycling.ratio$Threshold))[1], as.numeric(as.character(temp.rc.reycling.ratio$Threshold))[1] ), y = c(31,39,39,31), col = "gray", border = NA ) polygon( x = c(-0.3, -0.3, 0.3, 0.3) , y = c(31, 39, 39, 31), border = ifelse(any( grepl(pattern = "FAILED", temp.rc.reycling.ratio$Status) ), "red", "black") ) ##consider possibility of multiple pIRIR signals and multiple recycling ratios if (nrow(temp.rc.recuperation.rate) > 0) { col.id <- 1 for (i in seq(1,nrow(temp.rc.recuperation.rate), length(unique(temp.rc.recuperation.rate[,"Criteria"])))) { for (j in 0:length(unique(temp.rc.recuperation.rate[,"Criteria"]))) { points( temp.rc.reycling.ratio[i + j, "Value"] - 1, y = 35, pch = col.id, col = col.id, cex = 1.3 * cex ) } col.id <- col.id + 1 } rm(col.id) ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++## ##polygon for recuperation rate text( x = -0.35, y = 25, "Recuperation", pos = 3, srt = 90, cex = 0.8*cex, offset = 0, ) polygon( x = c( 0, 0, as.numeric(as.character( temp.rc.recuperation.rate$Threshold ))[1], as.numeric(as.character( temp.rc.recuperation.rate$Threshold ))[1] ), y = c(21,29,29,21), col = "gray", border = NA ) polygon( x = c(-0.3, -0.3, 0.3, 0.3) , y = c(21, 29, 29, 21), border = ifelse(any( grepl(pattern = "FAILED", temp.rc.recuperation.rate$Status) ), "red", "black") ) polygon( x = c(-0.3,-0.3,0,0) , y = c(21,29,29,21), border = NA, density = 10, angle = 45 ) for (i in 1:nrow(temp.rc.recuperation.rate)) { points( temp.rc.recuperation.rate[i, "Value"], y = 25, pch = i, col = i, cex = 1.3 * cex ) } } ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++## ##polygon for testdose error text( x = -0.35, y = 15, "Testdose Err.", pos = 3, srt = 90, cex = 0.8*cex, offset = 0, ) polygon( x = c( 0, 0, as.numeric(as.character(temp.rc.testdose.error$Threshold))[1], as.numeric(as.character(temp.rc.testdose.error$Threshold))[1] ), y = c(11,19,19,11), col = "gray", border = NA ) polygon( x = c(-0.3, -0.3, 0.3, 0.3) , y = c(11, 19, 19, 11), border = ifelse(any( grepl(pattern = "FAILED", temp.rc.testdose.error$Status) ), "red", "black") ) polygon( x = c(-0.3,-0.3,0,0) , y = c(11,19,19,11), border = NA, density = 10, angle = 45 ) for (i in 1:nrow(temp.rc.testdose.error)) { points( temp.rc.testdose.error[i, "Value"], y = 15, pch = i, col = i, cex = 1.3 * cex ) } ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++## ##polygon for palaeodose error text( x = -0.35, y = 5, "Palaeodose Err.", pos = 3, srt = 90, cex = 0.8*cex, offset = 0, ) polygon( x = c( 0, 0, as.numeric(as.character(temp.rc.palaedose.error$Threshold))[1], as.numeric(as.character(temp.rc.palaedose.error$Threshold))[1] ), y = c(1,9,9,1), col = "gray", border = NA ) polygon( x = c(-0.3, -0.3, 0.3, 0.3) , y = c(1, 9, 9, 1), border = ifelse(any( grepl(pattern = "FAILED", temp.rc.palaedose.error$Status) ), "red", "black") ) polygon( x = c(-0.3,-0.3,0,0) , y = c(1,9,9,1), border = NA, density = 10, angle = 45 ) if(nrow(temp.rc.palaedose.error) != 0){ for (i in 1:nrow(temp.rc.palaedose.error)) { if(!is.na(temp.rc.palaedose.error[i, "Value"])){ points( temp.rc.palaedose.error[i, "Value"], y = 5, pch = i, col = i, cex = 1.3 * cex ) } } } } if (plot == TRUE && 8 %in% plot.single.sel) { ##graphical represenation of IR-curve temp.IRSL <- suppressWarnings(get_RLum(object, recordType = "IRSL")) if(length(temp.IRSL) != 0){ if(class(temp.IRSL) == "RLum.Data.Curve"){ plot_RLum.Data.Curve(temp.IRSL, par.local = FALSE) }else if(class(temp.IRSL) == "list"){ plot_RLum.Data.Curve(temp.IRSL[[length(temp.IRSL)]], par.local = FALSE) warning( "[analyse_SAR.CWOSL()] Multiple IRSL curves detected (IRSL test), show only the last one.", immediate. = TRUE, call. = FALSE ) }else{ shape::emptyplot() } }else{ plot(1, type="n", axes=F, xlab="", ylab="") text(x = c(1,1), y = c(1, 1), labels = "No IRSL curve detected!") } } ##It is doubled in this function, but the par settings need some more careful considerations ... if (exists("par.default")) { par(par.default) rm(par.default) } # Return -------------------------------------------------------------------------------------- invisible(temp.results.final) }else{ warning(paste0( "\n", paste(unlist(error.list), collapse = "\n"),"\n... >> nothing was done here!" ), call. = FALSE) invisible(NULL) } } Luminescence/R/convert_BIN2CSV.R0000644000176200001440000000706513231137116016024 0ustar liggesusers#' Export Risoe BIN-file(s) to CSV-files #' #' This function is a wrapper function around the functions [read_BIN2R] and #' [write_RLum2CSV] and it imports a Risoe BIN-file and directly exports its #' content to CSV-files. If nothing is set for the argument `path` #' ([write_RLum2CSV]) the input folder will become the output folder. #' #' @param file [character] (**required**): #' name of the BIN-file to be converted to CSV-files #' #' @param ... further arguments that will be passed to the function #' [read_BIN2R] and [write_RLum2CSV] #' #' @return #' The function returns either a CSV-file (or many of them) or for the #' option `export == FALSE` a list comprising objects of type [data.frame] and [matrix] #' #' @section Function version: 0.1.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Analysis-class], [RLum.Data-class], [RLum.Results-class], #' [utils::write.table], [write_RLum2CSV], [read_BIN2R] #' #' @keywords IO #' #' @examples #' #' ##transform Risoe.BINfileData values to a list #' data(ExampleData.BINfileData, envir = environment()) #' convert_BIN2CSV(subset(CWOSL.SAR.Data, POSITION == 1), export = FALSE) #' #' \dontrun{ #' ##select your BIN-file #' file <- file.choose() #' #' ##convert #' convert_BIN2CSV(file) #' #' } #' #' @md #' @export convert_BIN2CSV <- function( file, ... ){ # General tests ------------------------------------------------------------------------------- ##file is missing? if(missing(file)){ stop("[convert_BIN2CSV()] file is missing!", call. = FALSE) } ##set input arguments convert_BIN2CSV_settings.default <- list( path = if(!is(file, "Risoe.BINfileData")){dirname(file)}else{NULL}, show.raw.values = FALSE, position = NULL, n.records = NULL, zero_data.rm = TRUE, duplicated.rm = FALSE, show.record.number = FALSE, txtProgressBar = TRUE, forced.VersionNumber = NULL, ignore.RECTYPE = FALSE, pattern = NULL, verbose = TRUE, export = TRUE ) ##modify list on demand convert_BIN2CSV_settings <- modifyList(x = convert_BIN2CSV_settings.default, val = list(...)) # Import file --------------------------------------------------------------------------------- if(!is(file, "Risoe.BINfileData")){ object <- read_BIN2R( file = file, show.raw.values = convert_BIN2CSV_settings$show.raw.values, position = convert_BIN2CSV_settings$position, n.records = convert_BIN2CSV_settings$n.records, zero_data.rm = convert_BIN2CSV_settings$zero_data.rm, duplicated.rm = convert_BIN2CSV_settings$duplicated.rm, fastForward = TRUE, show.record.number = convert_BIN2CSV_settings$show.record.number, txtProgressBar = convert_BIN2CSV_settings$txtProgressBar, forced.VersionNumber = convert_BIN2CSV_settings$forced.VersionNumber, ignore.RECTYPE = convert_BIN2CSV_settings$ignore.RECTYPE, pattern = convert_BIN2CSV_settings$pattern, verbose = convert_BIN2CSV_settings$verbose ) }else{ object <- Risoe.BINfileData2RLum.Analysis(file) } # Export to CSV ------------------------------------------------------------------------------- ##get all arguments we want to pass and remove the doubled one arguments <- c(list(object = object, export = convert_BIN2CSV_settings$export), list(...)) arguments[duplicated(names(arguments))] <- NULL ##this if-condition prevents NULL in the terminal if(convert_BIN2CSV_settings$export == TRUE){ invisible(do.call("write_RLum2CSV", arguments)) }else{ do.call("write_RLum2CSV", arguments) } } Luminescence/R/plot_DetPlot.R0000644000176200001440000002734413571743147015647 0ustar liggesusers#' Create De(t) plot #' #' Plots the equivalent dose (De) in dependency of the chosen signal integral #' (cf. Bailey et al., 2003). The function is simply passing several arguments #' to the function [plot] and the used analysis functions and runs it in a loop. #' Example: `legend.pos` for legend position, `legend` for legend text. #' #' **method** #' #' The original method presented by Baiely et al., 2003 shifted the signal integrals and slightly #' extended them accounting for changes in the counting statistics. Example: `c(1:3, 3:5, 5:7)`. #' However, here also another method is provided allowing to expand the signal integral by #' consectutively expaning the integral by its chosen length. Example: `c(1:3, 1:5, 1:7)` #' #' Note that in both cases the integral limits are overlap. The finally applied limits are part #' of the function output. #' #' **analyse_function.control** #' #' The argument `analyse_function.control` currently supports the following arguments #' `sequence.structure`, `dose.points`, `mtext.outer`, `fit.method`, `fit.force_through_origin`, `plot`, `plot.single` #' #' @param object [RLum.Analysis-class] (**required**): #' input object containing data for analysis #' #' @param signal.integral.min [integer] (**required**): #' lower bound of the signal integral. #' #' @param signal.integral.max [integer] (**required**): #' upper bound of the signal integral. #' #' @param background.integral.min [integer] (**required**): #' lower bound of the background integral. #' #' @param background.integral.max [integer] (**required**): #' upper bound of the background integral. #' #' @param method [character] (*with default*): #' method applied for constructing the De(t) plot. #' - `shift` (*the default*): the chosen signal integral is shifted the shine down curve, #' - `expansion`: the chosen signal integral is expanded each time by its length #' #' @param signal_integral.seq [numeric] (*optional*): #' argument to provide an own signal integral sequence for constructing the De(t) plot #' #' @param analyse_function [character] (*with default*): #' name of the analyse function to be called. Supported functions are: #' `'analyse_SAR.CWOSL'`, `'analyse_pIRIRSequence'` #' #' @param analyse_function.control [list] (*optional*): #' selected arguments to be passed to the supported analyse functions #' (`'analyse_SAR.CWOSL'`, `'analyse_pIRIRSequence'`) #' #' @param n.channels [integer] (*optional*): #' number of channels used for the De(t) plot. If nothing is provided all #' De-values are calculated and plotted until the start of the background #' integral. #' #' @param show_ShineDownCurve [logical] (*with default*): #' enables or disables shine down curve in the plot output #' #' @param respect_RC.Status [logical] (*with default*): #' remove De-values with 'FAILED' RC.Status from the plot #' (cf. [analyse_SAR.CWOSL] and [analyse_pIRIRSequence]) #' #' @param verbose [logical] (*with default*): #' enables or disables terminal feedback #' #' @param ... further arguments and graphical parameters passed to #' [plot.default], [analyse_SAR.CWOSL] and [analyse_pIRIRSequence] (see details for further information). #' Plot control parameters are: `ylim`, `xlim`, `ylab`, `xlab`, `main`, `pch`, `mtext`, `cex`, `legend`, #' `legend.text`, `legend.pos` #' #' @return #' A plot and an [RLum.Results-class] object with the produced De values #' #' `@data`: #' #' \tabular{lll}{ #' **Object** \tab **Type** \tab **Description**\cr #' De.values \tab `data.frame` \tab table with De values \cr #' signal_integral.seq \tab `numeric` \tab integral sequence used for the calculation #' } #' #' `@info`: #' #' \tabular{lll}{ #' **Object** \tab **Type** \tab **Description**\cr #' call \tab `call` \tab the original function call #' } #' #' #' @note #' The entire analysis is based on the used analysis functions, namely #' [analyse_SAR.CWOSL] and [analyse_pIRIRSequence]. However, the integrity #' checks of this function are not that thoughtful as in these functions itself. #' It means, that every sequence should be checked carefully before running long #' calculations using serveral hundreds of channels. #' #' @section Function version: 0.1.3 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @references #' Bailey, R.M., Singarayer, J.S., Ward, S., Stokes, S., 2003. Identification of partial resetting #' using De as a function of illumination time. Radiation Measurements 37, 511-518. #' doi:10.1016/S1350-4487(03)00063-5 #' #' @seealso [plot], [analyse_SAR.CWOSL], [analyse_pIRIRSequence] #' #' @examples #' #' \dontrun{ #' ##load data #' ##ExampleData.BINfileData contains two BINfileData objects #' ##CWOSL.SAR.Data and TL.SAR.Data #' data(ExampleData.BINfileData, envir = environment()) #' #' ##transform the values from the first position in a RLum.Analysis object #' object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) #' #' plot_DetPlot(object, #' signal.integral.min = 1, #' signal.integral.max = 3, #' background.integral.min = 900, #' background.integral.max = 1000, #' n.channels = 5, #' ) #' } #' #' @md #' @export plot_DetPlot <- function( object, signal.integral.min, signal.integral.max, background.integral.min, background.integral.max, method = "shift", signal_integral.seq = NULL, analyse_function = "analyse_SAR.CWOSL", analyse_function.control = list(), n.channels = NULL, show_ShineDownCurve = TRUE, respect_RC.Status = FALSE, verbose = TRUE, ... ) { # Integrity Tests ----------------------------------------------------------------------------- ##get structure object.structure <- structure_RLum(object) # Set parameters ------------------------------------------------------------------------------ ##set n.channels if(is.null(n.channels)){ n.channels <- ceiling( (background.integral.min - 1 - signal.integral.max) / (signal.integral.max - signal.integral.min) ) } analyse_function.settings <- list( sequence.structure = c("TL", "IR50", "pIRIR225"), dose.points = NULL, mtext.outer = "", fit.method = "EXP", fit.force_through_origin = FALSE, plot = FALSE, plot.single = FALSE ) analyse_function.settings <- modifyList(analyse_function.settings, analyse_function.control) # Analyse ------------------------------------------------------------------------------------- ##set integral sequence if (is.null(signal_integral.seq)) { if(signal.integral.min == signal.integral.max){ signal_integral.seq <- signal.integral.min:(background.integral.min - 1) }else{ signal_integral.seq <- seq(signal.integral.min, background.integral.min - 1, by = signal.integral.max - signal.integral.min) } } if(analyse_function == "analyse_SAR.CWOSL"){ results <- merge_RLum(lapply(1:n.channels, function(x){ analyse_SAR.CWOSL( object = object, signal.integral.min = if(method == "shift"){signal_integral.seq[x]}else{signal_integral.seq[1]}, signal.integral.max = signal_integral.seq[x+1], background.integral.min = background.integral.min, background.integral.max = background.integral.max, dose.points = analyse_function.settings$dose.points, mtext.outer = analyse_function.settings$mtext.outer, fit.force_through_origin = analyse_function.settings$fit.force_through_origin, fit.method = analyse_function.settings$fit.method, plot = analyse_function.settings$plot, plot.single = analyse_function.settings$plot.single, verbose = verbose ) })) } else if(analyse_function == "analyse_pIRIRSequence"){ results <- merge_RLum(lapply(1:n.channels, function(x){ analyse_pIRIRSequence( object = object, signal.integral.min = if(method == "shift"){signal_integral.seq[x]}else{signal_integral.seq[1]}, signal.integral.max = signal_integral.seq[x+1], background.integral.min = background.integral.min, background.integral.max = background.integral.max, dose.points = analyse_function.settings$dose.points, mtext.outer = analyse_function.settings$mtext.outer, plot = analyse_function.settings$plot, plot.single = analyse_function.settings$plot.single, sequence.structure = analyse_function.settings$sequence.structure, verbose = verbose ) })) } else{ stop("[plot_DetPlot()] 'analyse_function' unknown!", call. = FALSE) } # Plot ---------------------------------------------------------------------------------------- ##get De results if(analyse_function == "analyse_pIRIRSequence"){ pIRIR_signals <- unique(get_RLum(results)$Signal) }else{ pIRIR_signals <- NA } ##run this in a loop to account for pIRIR data df_final <- lapply(1:length(pIRIR_signals), function(i){ ##get data.frame df <- get_RLum(results) ##further limit if(!is.na(pIRIR_signals[1])){ df <- df[df$Signal == pIRIR_signals[i],] } ##add shine down curve, which is by definition the first IRSL/OSL curve ##and normalise on the highest De value OSL_curve <- as(get_RLum(object, recordType = "SL")[[i]], "matrix") ##limit to what we see OSL_curve <- OSL_curve[1:signal_integral.seq[n.channels + 1],] m <- ((min(df$De - df$De.Error, na.rm = TRUE)) - (max(df$De, na.rm = TRUE) + max(df$De.Error, na.rm = TRUE))) / (min(OSL_curve[, 2], na.rm = TRUE) - max(OSL_curve[, 2], na.rm = TRUE)) n <- (max(df$De, na.rm = TRUE) + max(df$De.Error, na.rm = TRUE)) - m * max(OSL_curve[, 2]) OSL_curve[, 2] <- m * OSL_curve[, 2] + n rm(n, m) ##set plot settings plot.settings <- list( ylim = c(min(df$De - df$De.Error, na.rm = TRUE), (max(df$De, na.rm = TRUE) + max(df$De.Error, na.rm = TRUE))), xlim = c(min(OSL_curve[, 1]), max(OSL_curve[, 1])), ylab = expression(paste(D[e] / s, " and ", L[n]/(a.u.))), xlab = "Stimulation time [s]", main = "De(t) plot", pch = 1, mtext = ifelse(is.na(pIRIR_signals[1]), "", paste0("Signal: ",pIRIR_signals[i])), cex = 1, legend = TRUE, legend.text = c(expression(L[n]-signal), expression(D[e])), legend.pos = "bottomleft" ) plot.settings <- modifyList(plot.settings, list(...)) ##general settings par(cex = plot.settings$cex) ##open plot area plot( NA, NA, xlim = plot.settings$xlim, ylim = plot.settings$ylim, xlab = plot.settings$xlab, ylab = plot.settings$ylab, main = plot.settings$main ) if (show_ShineDownCurve) { lines(OSL_curve, type = "b", pch = 20) } ##set x-axis df_x <- OSL_curve[seq(signal.integral.max, signal_integral.seq[n.channels+1], length.out = nrow(df)),1] #combine everything to allow excluding unwanted values df_final <- cbind(df, df_x) if (respect_RC.Status) { df_final <- df_final[df_final$RC.Status != "FAILED", ] } ##TodDo:color failed points red ##plot points and error bars points(df_final[, c("df_x", "De")], pch = plot.settings$pch) segments( x0 = df_final$df_x, y0 = df_final$De + df_final$De.Error, x1 = df_final$df_x, y1 = df_final$De - df_final$De.Error ) ##set mtext mtext(side = 3, plot.settings$mtext) ##legend if(plot.settings$legend){ legend( plot.settings$legend.pos, legend = plot.settings$legend.text, pch = c(plot.settings$pch, 20), bty = "n" ) } ##set return return(df_final) }) ##merge results return(set_RLum( class = "RLum.Results", data = list( De.values = as.data.frame(data.table::rbindlist(df_final)), signal_integral.seq = signal_integral.seq ), info = list(call = sys.call()) )) } Luminescence/R/length_RLum.R0000644000176200001440000000172013231137116015426 0ustar liggesusers#' General accessor function for RLum S4 class objects #' #' Function calls object-specific get functions for RLum S4 class objects. #' #' The function provides a generalised access point for specific #' [RLum-class] objects.\cr #' Depending on the input object, the corresponding get function will be selected. #' Allowed arguments can be found in the documentations of the corresponding #' [RLum-class] class. #' #' @param object [RLum-class] (**required**): #' S4 object of class `RLum` #' #' @return Return is the same as input objects as provided in the list. #' #' @section Function version: 0.1.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne #' (France) #' @seealso #' [RLum.Data.Curve-class], #' [RLum.Data.Image-class], #' [RLum.Data.Spectrum-class], #' [RLum.Analysis-class], #' [RLum.Results-class] #' @keywords utilities #' #' #' @md #' @export setGeneric("length_RLum", function(object) { standardGeneric("length_RLum") }) Luminescence/R/internals_RLum.R0000644000176200001440000003157513571743147016175 0ustar liggesusers#################################################################################################### ## INTERNAL HELPER FUNCTIONS ## #################################################################################################### #+++++++++++++++++++++ #+ .set_pid() + #+++++++++++++++++++++ #' Set unique id of the RLum.Analysis object as parent id for each RLum.Data object in the record list #' #' This function only applies on RLum.Analysis objects and was written for performance not #' usability, means the functions runs without any checks and is for internal usage only. #' #' @param [RLum.Analysis-class] (**required**): #' input object where the function should be applied on #' #' @return #' Returns the same object as the input #' #' @section Function version: 0.1.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @examples #' #' ##example using self created data #' object <- set_RLum( #' "RLum.Analysis", #' records = list( #' set_RLum("RLum.Data.Curve"), #' set_RLum("RLum.Data.Curve"))) #' #' object <- .set_pid(object) #' #' @md #' @noRd .set_pid <- function(object){ object@records <- lapply(object@records, function(x) { x@.pid <- object@.uid return(x) }) return(object) } #+++++++++++++++++++++ #+ .warningCatcher() + #+++++++++++++++++++++ #' Catches warning returned by a function and merges them. #' The original return of the function is returned. This function is in particular #' helpful if a function returns a lot of warnings with the same content. #' #' @param expr [expression] (**required**): #' the R expression, usually a function #' #' @return #' Returns the same object as the input and a warning table #' #' @section Function version: 0.1.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @examples #' #' f <- function() { #' warning("warning 1") #' warning("warning 1") #' warning("warnigs 2") #' 1:10 #' } #' print(.warningCatcher(f())) #' #' @md #' @noRd .warningCatcher <- function(expr) { ##set variables warning_collector <- list() env <- environment() ##run function and catch warnings results <- withCallingHandlers( expr = expr, warning = function(c) { assign(x = "warning_collector", value = c, envir = env) invokeRestart("muffleWarning") } ) ##set new warning messages with merged results if (length(warning_collector) > 0) { w_table <- table(as.character(unlist(warning_collector))) w_table_names <- names(w_table) for (w in 1:length(w_table)) { warning(paste( w_table_names[w], "This warning occurred", w_table[w], "times!" ), call. = FALSE) } } return(results) } #+++++++++++++++++++++ #+ .smoothing() + #+++++++++++++++++++++ #' Allows smmoothing of data based on the function zoo::rollmean #' #' The function just allows a direct and meaningfull access to the functionality of the zoo::rollmean() #' function. Arguments of the function are only partly valid. #' #' @param x [numeric] (**required**): #' the object for which the smoothing should be applied. #' #' @param k [integer] (*with default*): #' window for the rolling mean; must be odd for rollmedian. #' If nothing is set k is set automatically #' #' @param fill [numeric] (*with default*): #' a vector defining the left and the right hand data #' #' @param align [character] (*with default*): #' specifying whether the index of the result should be #' left- or right-aligned or centered (default) compared to the rolling window of observations, #' allowed `"right"`, `"center"` and `left` #' #' @param method [method] (*with default*): #' defines which method should be applied for the smoothing: `"mean"` or `"median"` #' #' @return #' Returns the same object as the input and a warning table #' #' @section Function version: 0.1.1 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @examples #' #' v <- 1:100 #' .smoothing(v) #' #' @md #' @noRd .smoothing <- function( x, k = NULL, fill = NA, align = "right", method = "mean") { ##set k if (is.null(k)){ k <- ceiling(length(x) / 100) if(method == "median" && k %%2 ==0) k <- k + 1 } ##smooth data if(method == "mean"){ zoo::rollmean(x, k = k, fill = fill, align = align) }else if(method == "median"){ zoo::rollmedian(x, k = k, fill = fill, align = align) }else{ stop("[Luminescence:::.smoothing()] Unvalid input for 'method'!") } } #++++++++++++++++++++++++++++++ #+ Scientific axis annotation + #++++++++++++++++++++++++++++++ #' Bored of the 1e10 notation of large numbers in R? Already tried to force #' R to produce more fancy labels? Worry not, fancy_scientific() (written by #' Jack Aidley) is at your help! #' #' Source: #' [http://stackoverflow.com/questions/11610377/how-do-i-change-the-formatting-of-numbers-on-an-axis-with-ggplot]() #' #' @param l [numeric] (**required**): #' a numeric vector, i.e. the labels that you want to add to your plot #' #' @return #' Returns an expression #' #' @section Function version: 0.1.0 #' #' @author Jack Aidley #' #' @examples #' #' plot(seq(1e10, 1e20, length.out = 10), #' 1:10, #' xaxt = "n") #' #' axis(1, at = axTicks(1), #' labels = fancy_scientific(axTicks(1))) #' #' @md #' @noRd fancy_scientific <- function(l) { # turn in to character string in scientific notation l <- format(l, scientific = TRUE) # quote the part before the exponent to keep all the digits l <- gsub("^(.*)e", "'\\1'e", l) # turn the 'e+' into plotmath format l <- gsub("e", "%*%10^", l) # remove plus sign l <- gsub("\\+", "", l) # return this as an expression parse(text=l) } #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #+ Statistical Summary for Plot functions #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #' Create Statistical Summary Character Vector for Plot functions #' #' This function automatically generates the statistical summary for the plot functions within #' the package. This should unify the approach how such things are created and support, theoretically #' all keywords for all plot functions in a similar way. #' #'@param x [data.frame] (optional): output from the function `calc_Statistics()`. If nothing is #'provided a list of prefix keyword combinations supported by the function `calc_Statistics()` is returned. #' #'@param keywords[character] (with default): keywords supported by the function `calc_Statistics()` #' #'@param digits [numeric] (with default): modifiy the digits independently for the plot output #' #'@param sep [character] (with default): a separator used for the creation of the output of the plot #' #'@param prefix [character] (with default): allows to add a leading prefix to the string #' #'@param suffix [character] (with default): allows to add a suffix to the entire string #' #'@author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #'@section Version: 0.1.0 #' #' #'@md #'@noRd .create_StatisticalSummaryText <- function( x = NULL, #insert the output of calc_Statistics keywords = NULL, digits = 2, #allow for different digts sep = " \n ", prefix = "", suffix = "" ){ # Grep keyword information -------------------------------------------------------------------- if (is.null(x)) { summary <- calc_Statistics(data.frame(x = 1:2, y = 1:2)) } else { summary <- x } #all allowed combinations keywords_allowed <- unlist(lapply(names(summary), function(x){ paste0(x, "$", names(summary[[x]])) })) ##return if for x == NULL if(is.null(x)) return(keywords_allowed) # Create call --------------------------------------------------------------------------------- #create list l <- lapply(keywords, function(k) { ##strip keyword if necessary if (grepl(pattern = "$", x = k, fixed = TRUE)[1]) { strip <- strsplit(k, split = "$", fixed = TRUE)[[1]] keywords_prefix <- strip[1] k_strip <- strip[2] } else{ keywords_prefix <- "unweighted" k_strip <- k } ##construct string if(!is.null(summary[[keywords_prefix]][[k_strip]])){ if(keywords_prefix == "unweighted"){ paste0(k_strip, " = ", round(summary[[keywords_prefix]][[k_strip]], digits)) }else{ paste0(k, " = ", round(summary[[keywords_prefix]][[k_strip]], digits)) } }else{ return(NULL) } }) ##remove NULL entries l <- l[!sapply(l, is.null)] ##construct final call return(paste0(prefix, paste(unlist(l), collapse = sep), suffix)) } #++++++++++++++++++++++++++++++ #+ Unlist RLum + #++++++++++++++++++++++++++++++ #' #' Recursive unlisting of lists until the first element in the list #' is something, but not a list. This funktion helps #' to get rid of nested lists. The function stops running if a single #' level list is reached. #' #' @param x [list] (**required**): list with lists #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @examples #' a <- list(b = list(c = list("test"))) #' .unlist_RLum(a) #' #' @return [list] with only one level left #' @md #' @noRd .unlist_RLum <- function(x){ stopifnot(class(x) == "list") if(length(x) > 0 && class(x[[1]]) == "list"){ x <- unlist(x, recursive = FALSE) .unlist_RLum(x) }else{ return(x) } } #++++++++++++++++++++++++++++++ #+ .matrix_binning + #++++++++++++++++++++++++++++++ # #' This function allows efficient binning of matricies including #' row and column name handling. Internally, the function uses [rowsum], #' means the binning is always applied on the rows. For column binning the function #' internally transposes the matrix first #' #' @param m [matrix] (**required**): the matrix uses the base function [rowsum] #' #' @param bin_size [integer] (*with default*): bin size #' #' @param bin_col [logical] (*with default*): applies the binning on the columns instead of the #' rows. If you want to perform binning on rows and columns, you have to call this function twice. #' #' @param names [character] (*with default*): the handling of the row and column names. The default #' `NULL` removes the column and row names. Other allowed input is: `'groups'` this uses the group #' name, e.g., the last time value of a group, `'mean'` this calculates the mean value of a group, #' `'sum'` to sum-up groups and you can provide any other value which will then be recycled throughout. #' For example: `c('row1', 'row2')`. #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @section Function version: 0.1.2 #' #' @note Row and column names are transformed to numeric and also summed up; this is not a bug #' but a feature! #' #' @return [matrix] #' #' @examples #' m <- matrix(data = c(rep(1:20,each = 20)), ncol = 10, nrow = 20) #' rownames(m) <- 1:nrow(m) #' colnames(m) <- 1:ncol(m) #' #' .matrix_binning(m, bin_size = 4) #' #' @md #' @noRd .matrix_binning <- function( m, bin_size = 1, bin_col = FALSE, names = NULL) { # The only check ------------------------------------------------------------------------------ if(class(m)[1] != "matrix") stop("[.matrix_binning()] Input is not of class 'matrix'!", call. = FALSE) # transpose in column mode -------------------------------------------------------------------- if(bin_col) m <- t(m) # Binning calculation ------------------------------------------------------------------------- ##set groups ##with the correction in the 2nd line we ##get rid potential problems groups <- rep(1:nrow(m), each = bin_size)[1:nrow(m)] ##row binning (thats all) temp_m <- rowsum(m, group = groups) # Correct names ------------------------------------------------------------------------------- if(!is.null(names[1])){ if(names[1] == "groups"){ ##get rownames correct (it is the end of each bin) row_names <- rownames(m)[which(diff(groups) != 0)] ##correct last value if(length(row_names) < nrow(m)) row_names <- c(row_names,rownames(m)[nrow(m)]) }else if(names[1] == "mean"){ groups <- rep(1:nrow(m), each = bin_size)[1:nrow(m)] row_names <- as.numeric(rownames(m)) row_names <- tapply(X = row_names, INDEX = groups, FUN = mean) }else if(names[1] == "sum"){ row_names <- rowsum(as.numeric(rownames(m)), group = groups) }else{ row_names <- names } ##reset rownames and make sure it fits the length rownames(temp_m) <- rep(row_names, length.out = nrow(temp_m)) }else{ rownames(temp_m) <- NULL } # re-transpose in column mode ----------------------------------------------------------------- if(bin_col) temp_m <- t(temp_m) # Return -------------------------------------------------------------------------------------- return(temp_m) } Luminescence/R/app_RLum.R0000644000176200001440000000151313231137116014725 0ustar liggesusers#' Run Luminescence shiny apps (wrapper) #' #' Wrapper for the function [RLumShiny::app_RLum] from the package #' [RLumShiny::RLumShiny-package]. For further details and examples please #' see the manual of this package. #' #' @param app [character] (**required**): #' name of the application to start. See details for a list of available apps. #' #' @param ... further arguments passed [shiny::runApp] #' #' @author Christoph Burow, University of Cologne (Germany) #' #' @section Function version: 0.1.1 #' #' @md #' @export app_RLum <- function(app = NULL, ...) { if (!requireNamespace("RLumShiny", quietly = TRUE)) stop("Shiny applications require the 'RLumShiny' package. To install", " this package run 'install.packages('RLumShiny')' in your R console.", call. = FALSE) RLumShiny::app_RLum(app, ...) }Luminescence/R/plot_RLum.Analysis.R0000644000176200001440000005610713417222471016723 0ustar liggesusers#' Plot function for an RLum.Analysis S4 class object #' #' The function provides a standardised plot output for curve data of an #' RLum.Analysis S4 class object #' #' The function produces a multiple plot output. A file output is recommended #' (e.g., [pdf]). #' #' **curve.transformation** #' #' This argument allows transforming continuous wave (CW) curves to pseudo #' (linear) modulated curves. For the transformation, the functions of the #' package are used. Currently, it is not possible to pass further arguments to #' the transformation functions. The argument works only for `ltype` #' `OSL` and `IRSL`. #' #' Please note: The curve transformation within this functions works roughly, #' i.e. every IRSL or OSL curve is transformed, without considerung whether it #' is measured with the PMT or not! However, for a fast look it might be #' helpful. #' #' #' @param object [RLum.Analysis-class] (**required**): #' S4 object of class `RLum.Analysis` #' #' @param subset named [list] (*optional*): #' subsets elements for plotting. The arguments in the named [list] will be #' directly passed to the function [get_RLum] #' (e.g., `subset = list(curveType = "measured")`) #' #' @param nrows [integer] (*optional*): #' sets number of rows for plot output, if nothing is set the function #' tries to find a value. #' #' @param ncols [integer] (*optional*): #' sets number of columns for plot output, if nothing is set the function #' tries to find a value. #' #' @param abline [list] (*optional*): #' allows to add ablines to the plot. Argument are provided #' in a list and will be forwared to the function [abline], #' e.g., `list(v = c(10, 100))` adds two vertical lines add 10 and 100 to all #' plots. In contrast `list(v = c(10), v = c(100)` adds a vertical at 10 to #' the first and a vertical line at 100 to the 2nd plot. #' #' @param combine [logical] (*with default*): #' allows to combine all [RLum.Data.Curve-class] objects in one single plot. #' #' @param curve.transformation [character] (*optional*): #' allows transforming CW-OSL and CW-IRSL curves to pseudo-LM curves via #' transformation functions. Allowed values are: `CW2pLM`, `CW2pLMi`, #' `CW2pHMi` and `CW2pPMi`. See details. #' #' @param plot.single [logical] (*with default*): #' global par settings are considered, normally this should end in one plot per page #' #' @param ... further arguments and graphical parameters will be passed to #' the `plot` function. #' #' Supported arguments: `main`, `mtext`, `log`, `lwd`, `lty` `type`, `pch`, `col`, #' `norm`, `xlim`,`ylim`, `xlab`, `ylab`... #' #' and for `combine = TRUE` also: `sub`, `legend`, `legend.text`, `legend.pos` #' (typical plus 'outside'), `legend.col`, `smooth`. #' #' All arguments can be provided as `vector` or `list` to gain in full control #' of all plot settings. #' #' @return Returns multiple plots. #' #' @note #' Not all arguments available for [plot] will be passed and they partly do not behave in the #' way you might expect them to work. This function was designed to serve as an overview #' plot, if you want to have more control, extract the objects and plot them individually. #' #' @section Function version: 0.3.11 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @seealso [plot], [plot_RLum], [plot_RLum.Data.Curve] #' #' @keywords aplot #' #' @examples #' #'##load data #'data(ExampleData.BINfileData, envir = environment()) #' #'##convert values for position 1 #'temp <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) #' #'##(1) plot (combine) TL curves in one plot #'plot_RLum.Analysis( #' temp, #' subset = list(recordType = "TL"), #' combine = TRUE, #' norm = TRUE, #' abline = list(v = c(110)) #' ) #' #'##(2) same as example (1) but using #'## the argument smooth = TRUE #'plot_RLum.Analysis( #' temp, #' subset = list(recordType = "TL"), #' combine = TRUE, #' norm = TRUE, #' smooth = TRUE, #' abline = list(v = c(110)) #' ) #' #' @md #' @export plot_RLum.Analysis <- function( object, subset = NULL, nrows, ncols, abline = NULL, combine = FALSE, curve.transformation, plot.single = FALSE, ... ){ # Integrity check ---------------------------------------------------------------------------- ##check if object is of class RLum.Analysis (lists are handled via plot_RLum()) if (!is(object, "RLum.Analysis")) { stop("[plot_RLum.Analysis()] Input object is not of type 'RLum.Analysis'") } # Make selection if wanted ------------------------------------------------------------------- if(!is.null(subset)){ ##check whether the user set the drop option and remove it, as we cannot work with it subset <- subset[!sapply(names(subset), function(x){"drop" %in% x})] object <- do.call(get_RLum, c(object = object, subset, drop = FALSE)) } # Deal with additional arguments. ------------------------------------------------------------ ##create plot settings list plot.settings <- list( main = NULL, mtext = NULL, log = "", lwd = 1, lty = 1, type = "l", xlab = NULL, ylab = NULL, xlim = NULL, ylim = NULL, pch = 1, col = "auto", norm = FALSE, sub = NULL, cex = 1, legend = TRUE, legend.text = NULL, legend.pos = NULL, legend.col = NULL, smooth = FALSE ) plot.settings <- modifyList(x = plot.settings, val = list(...), keep.null = TRUE) ##try to find optimal parameters, this is however, a little bit stupid, but ##better than without any presetting if(combine){ n.plots <- length(unique(as.character(structure_RLum(object)$recordType))) }else{ n.plots <- length_RLum(object) } if (missing(ncols) | missing(nrows)) { if (missing(ncols) & !missing(nrows)) { if (n.plots == 1) { ncols <- 1 } else{ ncols <- 2 } } else if (!missing(ncols) & missing(nrows)) { if (n.plots == 1) { nrows <- 1 } else if (n.plots > 1 & n.plots <= 4) { nrows <- 2 } else{ nrows <- 3 } } else{ if (n.plots == 1) { nrows <- 1 ncols <- 1 } else if (n.plots > 1 & n.plots <= 2) { nrows <- 1 ncols <- 2 } else if (n.plots > 2 & n.plots <= 4) { nrows <- 2 ncols <- 2 } else{ nrows <- 3 ncols <- 2 } } } # Plotting ------------------------------------------------------------------ ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##(1) NORMAL (combine == FALSE) ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ if(!combine || length(object@records) == 1){ ##show warning message if(combine & length(object@records) == 1){ warning("Nothing to combine, object contains a single curve.") } ##grep RLum.Data.Curve or RLum.Data.Spectrum objects temp <- lapply(1:length(object@records), function(x){ if(is(object@records[[x]], "RLum.Data.Curve") || is(object@records[[x]], "RLum.Data.Spectrum")){ object@records[[x]] }}) ##calculate number of pages for mtext if (length(temp) %% (nrows * ncols) > 0) { n.pages <- round(length(temp) / (nrows * ncols), digits = 0) + 1 } else{ n.pages <- length(temp) / (nrows * ncols) } ##set par par.default <- par("mfrow") if(!plot.single){on.exit(par(mfrow = par.default))} if(!plot.single) { par(mfrow = c(nrows, ncols)) } ##expand plot settings list plot.settings <- lapply(setNames(1:length(plot.settings), names(plot.settings)), function(x) { if (!is.null(plot.settings[[x]])) { if(length(plot.settings[[x]]) > 1){ if(is(plot.settings[[x]], "list")){ rep_len(plot.settings[[x]], length.out = length(temp)) }else{ rep_len(list(plot.settings[[x]]), length.out = length(temp)) } }else{ rep_len(plot.settings[[x]], length.out = length(temp)) } } else{ plot.settings[[x]] } }) ##expand abline if(!is.null(abline)){ abline.names <- rep_len(names(abline), length.out = length(temp)) abline <- rep_len(abline, length.out = length(temp)) names(abline) <- abline.names } ##apply curve transformation for(i in 1:length(temp)){ if(is(temp[[i]], "RLum.Data.Curve") == TRUE){ ##set curve transformation if wanted if((grepl("IRSL", temp[[i]]@recordType) | grepl("OSL", temp[[i]]@recordType)) & !missing(curve.transformation)){ if(curve.transformation=="CW2pLM"){ temp[[i]] <- CW2pLM(temp[[i]]) }else if(curve.transformation=="CW2pLMi"){ temp[[i]] <- CW2pLMi(temp[[i]]) }else if(curve.transformation=="CW2pHMi"){ temp[[i]]<- CW2pHMi(temp[[i]]) }else if(curve.transformation=="CW2pPMi"){ temp[[i]] <- CW2pPMi(temp[[i]]) }else{ warning("Function for 'curve.transformation' is unknown. No transformation is performed.") } } ##check plot settings and adjust ##xlim if (!is.null(plot.settings$xlim)) { xlim.set <- plot.settings$xlim[[i]] if (plot.settings$xlim[[i]][1] < min(temp[[i]]@data[,1])) { warning(paste0("[plot_RLum.Analysis()] min('xlim') < x-value range for curve #",i,"; reset to minimum."), call. = FALSE) xlim.set[1] <- min(temp[[i]]@data[,1]) } if (plot.settings$xlim[[i]][2] > max(temp[[i]]@data[,1])) { warning(paste0("[plot_RLum.Analysis()] max('xlim') > x-value range for curve #",i,"; reset to maximum."), call. = FALSE) xlim.set[2] <- max(temp[[i]]@data[,1]) } }else{ xlim.set <- plot.settings$xlim[[i]] } ##ylim if (!is.null(plot.settings$ylim)) { ylim.set <- plot.settings$ylim[[i]] if (plot.settings$ylim[[i]][1] < min(temp[[i]]@data[,2])) { warning(paste0("[plot_RLum.Analysis()] min('ylim') < y-value range for curve #",i,"; reset to minimum."), call. = FALSE) ylim.set[1] <- min(temp[[i]]@data[,2]) } if (plot.settings$ylim[[i]][2] > max(temp[[i]]@data[,2])) { warning(paste0("[plot_RLum.Analysis()] max('ylim') > y-value range for curve #",i,"; reset to maximum."), call. = FALSE) ylim.set[2] <- max(temp[[i]]@data[,2]) } }else{ ylim.set <- plot.settings$ylim[[i]] } ##col if (unique(plot.settings$col) != "auto") { col <- plot.settings$col[i] } else{ if (grepl("IRSL", temp[[i]]@recordType)) { col <- "red" } else if (grepl("OSL", temp[[i]]@recordType)) { col <- "blue" } else { col <- "black" } } ##main main <- if (is.null(plot.settings$main[[i]])) { temp[[i]]@recordType } else{ plot.settings$main[[i]] } ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##PLOT ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##plot RLum.Data.Curve curve ##we have to do this via this way, otherwise we run into a duplicated arguments ##problem ##check and remove duplicated arguments arguments <- c( list( object = temp[[i]], col = col, mtext = if (!is.null(plot.settings$mtext[[i]])) { plot.settings$mtext[[i]] } else{ paste("#", i, sep = "") }, par.local = FALSE, main = main, log = plot.settings$log[[i]], lwd = plot.settings$lwd[[i]], type = plot.settings$type[[i]], lty = plot.settings$lty[[i]], xlim = xlim.set, ylim = ylim.set, pch = plot.settings$pch[[i]], cex = plot.settings$cex[[i]], smooth = plot.settings$smooth[[i]] ), list(...) ) arguments[duplicated(names(arguments))] <- NULL ##call the fucntion plot_RLum.Data.Curve do.call(what = "plot_RLum.Data.Curve", args = arguments) rm(arguments) ##add abline if(!is.null(abline[[i]])){ do.call(what = "abline", args = abline[i]) } } else if(is(temp[[i]], "RLum.Data.Spectrum")) { plot_RLum.Data.Spectrum(temp[[i]], mtext = if(!is.null(plot.settings$mtext[[i]])){ plot.settings$mtext[[i]] }else{ paste("#", i, sep = "") }, par.local = FALSE, main = if(!is.null(plot.settings$main)){ plot.settings$main }else{ temp[[i]]@recordType }) } }#end for loop }else{ ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##(2) NORMAL (combine == TRUE) ##++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##(1) check RLum objects in the set object.list <- object@records sapply(object.list, function(o){ if(class(o)[1] != "RLum.Data.Curve"){ stop("[plot_RLum.Analysis()] Using 'combine' is limited to 'RLum.Data.Curve' objects.", call. = FALSE) } }) ##account for different curve types, combine similar temp.object.structure <- structure_RLum(object) temp.recordType <- as.character(unique(temp.object.structure$recordType)) ##change graphic settings if(!plot.single){ par.default <- par()[c("cex", "mfrow")] if(!missing(ncols) & !missing(nrows)){ par(mfrow = c(nrows, ncols)) } ##this 2nd par request is needed as seeting mfrow resets the par settings ... this might ##not be wanted par(cex = plot.settings$cex[1]) }else{ par.default <- par()[c("cex")] par(cex = plot.settings$cex) } ##expand plot settings list ##expand list plot.settings <- lapply(setNames(1:length(plot.settings), names(plot.settings)), function(x) { if (!is.null(plot.settings[[x]])) { if(is.list(plot.settings[[x]])){ rep_len(plot.settings[[x]], length.out = length(temp.recordType)) }else{ rep_len(list(plot.settings[[x]]), length.out = length(temp.recordType)) } } else{ plot.settings[[x]] } }) ##expand abline if(!is.null(abline)){ abline.names <- rep_len(names(abline), length.out = length(temp.recordType)) abline <- rep_len(abline, length.out = length(temp.recordType)) names(abline) <- abline.names } ##(2) PLOT values for(k in 1:length(temp.recordType)) { ###get type of curves temp.object <- get_RLum(object, recordType = temp.recordType[k], drop = FALSE) ##get structure object.structure <- structure_RLum(temp.object) ##now get the real list object (note the argument recursive = FALSE) object.list <- get_RLum(object, recordType = temp.recordType[k], recursive = FALSE) ##prevent problems for non set argument if (missing(curve.transformation)) { curve.transformation <- "None" } ##transform values to data.frame and norm values temp.data.list <- lapply(1:length(object.list), function(x) { ##set curve transformation if wanted if (grepl("IRSL", object.list[[x]]@recordType) | grepl("OSL", object.list[[x]]@recordType)) { if (curve.transformation == "CW2pLM") { object.list[[x]] <- CW2pLM(object.list[[x]]) }else if (curve.transformation == "CW2pLMi") { object.list[[x]] <- CW2pLMi(object.list[[x]]) }else if (curve.transformation == "CW2pHMi") { object.list[[x]] <- CW2pHMi(object.list[[x]]) }else if (curve.transformation == "CW2pPMi") { object.list[[x]] <- CW2pPMi(object.list[[x]]) } } temp.data <- as(object.list[[x]], "data.frame") ##normalise curves if argument has been set if (plot.settings$norm[[k]]) { temp.data[,2] <- temp.data[,2] / max(temp.data[,2]) } return(temp.data) }) ##set plot parameters ##main main <- if (!is.null(plot.settings$main[[k]])) { plot.settings$main[[k]] } else{ paste0(temp.recordType[[k]], " combined") } ##xlab xlab <- if(!is.null(plot.settings$xlab[[k]])){ plot.settings$xlab[[k]] }else{ switch(temp.recordType[[k]], "TL" = "Temperature [\u00B0C]", "IRSL" = "Time [s]", "OSL" = "Time [s]", "RF" = "Time [s]", "RBR" = "Time [s]", "LM-OSL" = "Time [s]" ) } ##ylab ylab <- if(!is.null(plot.settings$ylab[[k]])){ plot.settings$ylab[[k]] }else{ paste0(temp.recordType[[k]], " [a.u.]") } ##xlim xlim <- if (!is.null(plot.settings$xlim[[k]]) & length(plot.settings$xlim[[k]]) >1) { plot.settings$xlim[[k]] } else { c(min(object.structure$x.min), max(object.structure$x.max)) } if (grepl("x", plot.settings$log[[k]], ignore.case = TRUE)) xlim[which(xlim == 0)] <- 1 ##ylim ylim <- if (!is.null(plot.settings$ylim[[k]]) & length(plot.settings$ylim[[k]]) > 1) { plot.settings$ylim[[k]] } else { range(unlist(lapply(X = temp.data.list, FUN = function(x){ range(x[,2]) }))) } if (grepl("y", plot.settings$log[[k]], ignore.case = TRUE)) ylim[which(ylim == 0)] <- 1 ##col (again) col <- if(length(plot.settings$col[[k]]) > 1 || plot.settings$col[[k]][1] != "auto"){ plot.settings$col[[k]] }else{ col <- get("col", pos = .LuminescenceEnv) } ##if length of provided colours is < the number of objects, just one colour is supported if (length(col) < length(object.list)) { col <- rep_len(col, length(object.list)) } ##lty if (length(plot.settings$lty[[k]]) < length(object.list)) { lty <- rep(plot.settings$lty[[k]], times = length(object.list)) }else{ lty <- plot.settings$lty[[k]] } ##pch if (length(plot.settings$pch[[k]]) < length(object.list)) { pch <- rep(plot.settings$pch[[k]], times = length(object.list)) }else{ pch <- plot.settings$pch[[k]] } ##legend.text legend.text <- if(!is.null(plot.settings$legend.text[[k]])){ plot.settings$legend.text[[k]] }else{ paste("Curve", 1:length(object.list)) } ##legend.col legend.col <- if(!is.null(plot.settings$legend.col[[k]])){ plot.settings$legend.col[[k]] }else{ NULL } ##legend.pos legend.pos <- if(!is.null(plot.settings$legend.pos[[k]])){ plot.settings$legend.pos[[k]] }else{ "topright" } if (legend.pos == "outside") { par.default.outside <- par()[c("mar", "xpd")] par(mar = c(5.1, 4.1, 4.1, 8.1)) } ##open plot area plot( NA,NA, xlim = xlim, ylim = ylim, main = main, xlab = xlab, ylab = ylab, log = plot.settings$log[[k]], sub = plot.settings$sub[[k]] ) ##plot single curve values ## ...?Why using matplot is a bad idea: The channel resolution might be different for (n in 1:length(temp.data.list)) { ##smooth ##Why here again ... because the call differs from the one before, where the argument ##is passed to plot_RLum.Data.Curve() if(plot.settings$smooth[[k]]){ k_factor <- ceiling(length(temp.data.list[[n]][, 2])/100) temp.data.list[[n]][, 2] <- zoo::rollmean(temp.data.list[[n]][, 2], k = k_factor, fill = NA) } ##remove 0 values if plotted on a log-scale # y-Axis if (grepl("y", plot.settings$log[[k]], ignore.case = TRUE)) temp.data.list[[n]] <- temp.data.list[[n]][which(temp.data.list[[n]]$y > 0), ] # x-Axis if (grepl("x", plot.settings$log[[k]], ignore.case = TRUE)) temp.data.list[[n]] <- temp.data.list[[n]][which(temp.data.list[[n]]$x > 0), ] ##print lines if (plot.settings$type[[k]] == "l" | plot.settings$type[[k]] == "b" ) { lines( temp.data.list[[n]], col = col[n], lty = lty[n], lwd = plot.settings$lwd[[k]] ) } ##add points if requested if (plot.settings$type[[k]] == "p" | plot.settings$type[[k]] == "b" ) { points( temp.data.list[[n]], col = col[n], pch = pch[n], ) } } ##add abline if(!is.null(abline[[k]])){ do.call(what = "abline", args = abline[k]) } ##mtext mtext(plot.settings$mtext[[k]], side = 3, cex = .8 * plot.settings$cex[[k]]) ##if legend is outside of the plotting area we need to allow overplotting ##AFTER all lines have been drawn if (legend.pos == "outside") { par(xpd = TRUE) # determine legend position on log(y) scale if (grepl("y", plot.settings$log[[k]], ignore.case = TRUE)) ypos <- 10^par()$usr[4] else ypos <- par()$usr[4] # determine position on log(x) scale if (grepl("x", plot.settings$log[[k]], ignore.case = TRUE)) xpos <- 10^par()$usr[2] else xpos <- par()$usr[2] } ##legend if (plot.settings$legend[[k]]) { legend( x = ifelse(legend.pos == "outside", xpos, legend.pos), y = ifelse(legend.pos == "outside", ypos, NULL), legend = legend.text, lwd = plot.settings$lwd[[k]], lty = plot.settings$lty[[k]], col = if (is.null(legend.col)) { col[1:length(object.list)] } else{ legend.col }, bty = "n", cex = 0.8 * plot.settings$cex[[k]] ) # revert the overplotting if (legend.pos == "outside") par(xpd = FALSE) } } ##reset graphic settings if (exists("par.default.outside")) { par(par.default.outside) rm(par.default.outside) } par(par.default) rm(par.default) } } Luminescence/R/calc_FuchsLang2001.R0000644000176200001440000002100013231137116016336 0ustar liggesusers#' Apply the model after Fuchs & Lang (2001) to a given De distribution. #' #' This function applies the method according to Fuchs & Lang (2001) for #' heterogeneously bleached samples with a given coefficient of variation #' threshold. #' #' **Used values** #' #' If the coefficient of variation (`c[v]`) of the first #' two values is larger than the threshold `c[v_threshold]`, the first value is #' skipped. Use the `startDeValue` argument to define a start value for #' calculation (e.g. 2nd or 3rd value). #' #' **Basic steps of the approach** #' #' 1. Estimate natural relative variation of the sample using a dose recovery test #' 2. Sort the input values ascendingly #' 3. Calculate a running mean, starting with the lowermost two values and add values iteratively. #' 4. Stop if the calculated `c[v]` exceeds the specified `cvThreshold` #' #' @param data [RLum.Results-class] or [data.frame] (**required**): #' for [data.frame]: two columns with De `(data[,1])` and De error `(values[,2])` #' #' @param cvThreshold [numeric] (*with default*): #' coefficient of variation in percent, as threshold for the method, #' e.g. `cvThreshold = 3`. See details #' . #' @param startDeValue [numeric] (*with default*): #' number of the first aliquot that is used for the calculations #' #' @param plot [logical] (*with default*): #' plot output `TRUE`/`FALSE` #' #' @param ... further arguments and graphical parameters passed to [plot] #' #' @return #' Returns a plot (*optional*) and terminal output. In addition an #' [RLum.Results-class] object is returned containing the #' following elements: #' #' \item{summary}{[data.frame] summary of all relevant model results.} #' \item{data}{[data.frame] original input data} #' \item{args}{[list] used arguments} #' \item{call}{[call] the function call} #' \item{usedDeValues}{[data.frame] containing the used values for the calculation} #' #' @note Please consider the requirements and the constraints of this method #' (see Fuchs & Lang, 2001) #' #' @section Function version: 0.4.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) \cr #' Christoph Burow, University of Cologne (Germany) #' #' @seealso [plot], [calc_MinDose], [calc_FiniteMixture], [calc_CentralDose], #' [calc_CommonDose], [RLum.Results-class] #' #' @references #' Fuchs, M. & Lang, A., 2001. OSL dating of coarse-grain fluvial #' quartz using single-aliqout protocols on sediments from NE Peloponnese, #' Greece. In: Quaternary Science Reviews 20, 783-787. #' #' Fuchs, M. & Wagner, G.A., 2003. Recognition of insufficient bleaching by #' small aliquots of quartz for reconstructing soil erosion in Greece. #' Quaternary Science Reviews 22, 1161-1167. #' #' @keywords dplot #' #' #' @examples #' ## load example data #' data(ExampleData.DeValues, envir = environment()) #' #' ## calculate De according to Fuchs & Lang (2001) #' temp<- calc_FuchsLang2001(ExampleData.DeValues$BT998, cvThreshold = 5) #' #' @md #' @export calc_FuchsLang2001 <- function( data, cvThreshold = 5, startDeValue = 1, plot = TRUE, ... ){ # Integrity Tests --------------------------------------------------------- if(missing(data)==FALSE){ if(is(data, "data.frame") == FALSE & is(data,"RLum.Results") == FALSE){ stop("[calc_FuchsLang2001] 'data' has to be of type 'data.frame' or 'RLum.Results'!") } else { if(is(data, "RLum.Results") == TRUE){ data <- get_RLum(data, "data") } } } # Deal with extra arguments ----------------------------------------------- ##deal with addition arguments extraArgs <- list(...) verbose <- if("verbose" %in% names(extraArgs)) {extraArgs$verbose} else {TRUE} ##============================================================================## ##PREPARE DATA ##============================================================================## ##1. order values in acending order write used D[e] values in data.frame o <- order(data[1]) # o is only an order parameter data_ordered <- data[o,] # sort values after o and write them into a new variable ##2. estimate D[e] # set variables usedDeValues<-data.frame(De=NA,De_Error=NA,cv=NA) endDeValue<-startDeValue # if the frist D[e] values are not used write this information in the data.frame if (startDeValue!=1) { n <- abs(1-startDeValue) # write used D[e] values in data.frame usedDeValues[1:n,1]<-data_ordered[1:n,1] usedDeValues[1:n,2]<-data_ordered[1:n,2] usedDeValues[1:n,3]<-"skipped" } ##=================================================================================================## ##LOOP FOR MODEL ##=================================================================================================## # repeat loop (run at least one time) repeat { #calculate mean, sd and cv mean<-round(mean(data_ordered[startDeValue:endDeValue,1]),digits=2) #calculate mean from ordered D[e] values sd<-round(sd(data_ordered[startDeValue:endDeValue,1]),digits=2) #calculate sd from ordered D[e] values cv<-round(sd/mean*100, digits=2) #calculate coefficent of variation # break if cv > cvThreshold if (cv>cvThreshold & endDeValue>startDeValue){ # if the first two D[e] values give a cv > cvThreshold, than skip the first D[e] value if (endDeValue-startDeValue<2) { # write used D[e] values in data.frame usedDeValues[endDeValue,1]<-data_ordered[endDeValue,1] usedDeValues[endDeValue,2]<-data_ordered[endDeValue,2] usedDeValues[endDeValue-1,3]<-"not used" # go to the next D[e] value startDeValue<-startDeValue+1 } else { usedDeValues[endDeValue,1]<-data_ordered[endDeValue,1] usedDeValues[endDeValue,2]<-data_ordered[endDeValue,2] usedDeValues[endDeValue,3]<-paste("# ",cv," %",sep="") break #break loop } }#EndIf else { # write used D[e] values in data.frame usedDeValues[endDeValue,1]<-data_ordered[endDeValue,1] usedDeValues[endDeValue,2]<-data_ordered[endDeValue,2] # first cv values alway contains NA to ensure that NA% is not printed test if(is.na(cv)==TRUE) { usedDeValues[endDeValue,3]<-cv } else { usedDeValues[endDeValue,3]<-paste(cv," %",sep="") } }#EndElse # go the next D[e] value until the maximum number is reached if (endDeValue> data.frame AND data.frame >> RLum.Data.Spectrum #' as() #' #' for `[RLum.Data.Spectrum-class]` #' #' #' **[RLum.Data.Spectrum-class]** #' #' \tabular{ll}{ #' **from** \tab **to**\cr #' `data.frame` \tab `data.frame`\cr #' `matrix` \tab `matrix` #' } #' #' #' @md #' @name as setAs("data.frame", "RLum.Data.Spectrum", function(from,to){ new(to, recordType = NA_character_, curveType = NA_character_, data = as.matrix(from), info = list()) }) setAs("RLum.Data.Spectrum", "data.frame", function(from){ as.data.frame(from@data) }) ##MATRIX ##COERCE RLum.Data.Spectrum >> matrix AND matrix >> RLum.Data.Spectrum setAs("matrix", "RLum.Data.Spectrum", function(from,to){ new(to, recordType = NA_character_, curveType = NA_character_, data = from, info = list()) }) setAs("RLum.Data.Spectrum", "matrix", function(from){ from@data }) # show() ------------------------------------------------------------------------------------- #' @describeIn RLum.Data.Spectrum #' Show structure of `RLum.Data.Spectrum` object #' #' @keywords internal #' #' @md #' @export setMethod("show", signature(object = "RLum.Data.Spectrum"), function(object){ x.range <- suppressWarnings(range(as.numeric(rownames(object@data)))) y.range <- suppressWarnings(range(as.numeric(colnames(object@data)))) z.range <- range(object@data) ##print information cat("\n [RLum.Data.Spectrum-class]") cat("\n\t recordType:", object@recordType) cat("\n\t curveType:", object@curveType) cat("\n\t .. recorded frames:", length(object@data[1,])) cat("\n\t .. .. measured values per frame:", length(object@data[,1])) cat("\n\t .. .. range wavelength/pixel:", x.range) cat("\n\t .. .. range time/temp.:", y.range) cat("\n\t .. .. range count values:", z.range) cat("\n\t additional info elements:", length(object@info)) #cat("\n\t\t >> names:", names(object@info)) } ) # set_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Data.Spectrum #' Construction method for RLum.Data.Spectrum object. The slot info is optional #' and predefined as empty list by default #' #' @param class [`set_RLum`]; [character] (*automatic*): #' name of the `RLum` class to create. #' #' @param originator [character] (*automatic*): #' contains the name of the calling function (the function that produces this object); #' can be set manually. #' #' @param .uid [`set_RLum`]; [character] (*automatic*): #' sets an unique ID for this object using the internal C++ function `create_UID`. #' #' @param .pid [`set_RLum`]; [character] (*with default*): #' option to provide a parent id for nesting at will. #' #' @param recordType [`set_RLum`]; [character]: #' record type (e.g. "OSL") #' #' @param curveType [`set_RLum`]; [character]: #' curve type (e.g. "predefined" or "measured") #' #' @param data [`set_RLum`]; [matrix]: #' raw curve data. If data is of type `RLum.Data.Spectrum`, this can be used #' to re-construct the object. If the object is reconstructed, `.uid`, `.pid` and `orginator` #' are always taken from the input object #' #' @param info [`set_RLum`] [list]: #' info elements #' #' @return #' #' **`[set_RLum]`** #' #' An object from the class `RLum.Data.Spectrum` #' #' @md #' @export setMethod( "set_RLum", signature = signature("RLum.Data.Spectrum"), definition = function( class, originator, .uid, .pid, recordType = "Spectrum", curveType = NA_character_, data = matrix(), info = list()) { ##The case where an RLum.Data.Spectrum object can be provided ##with this RLum.Data.Spectrum objects can be provided to be reconstructed if (is(data, "RLum.Data.Spectrum")) { ##check for missing curveType if (missing(curveType)) curveType <- data@curveType ##check for missing recordType if (missing(recordType)) recordType <- data@recordType ##check for missing data ... not possible as data is the object itself ##check for missing info if (missing(info)) info <- data@info ##check for missing .uid and .pid >> this are always taken from the ##original dataset ##set empty clas form object newRLumDataSpectrum <- new("RLum.Data.Spectrum") ##fill - this is the faster way, filling in new() costs ... newRLumDataSpectrum@originator = data@originator newRLumDataSpectrum@recordType = recordType newRLumDataSpectrum@curveType = curveType newRLumDataSpectrum@data = data@data newRLumDataSpectrum@info = info newRLumDataSpectrum@.uid = data@.uid newRLumDataSpectrum@.pid = data@.pid } else { ##set empty class from object newRLumDataSpectrum <- new("RLum.Data.Spectrum") ##fill - this is the faster way, filling in new() costs ... newRLumDataSpectrum@originator = originator newRLumDataSpectrum@recordType = recordType newRLumDataSpectrum@curveType = curveType newRLumDataSpectrum@data = data newRLumDataSpectrum@info = info newRLumDataSpectrum@.uid = .uid newRLumDataSpectrum@.pid = .pid } return(newRLumDataSpectrum) } ) # get_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Data.Spectrum #' Accessor method for RLum.Data.Spectrum object. The argument info.object #' is optional to directly access the info elements. If no info element name #' is provided, the raw curve data (matrix) will be returned #' #' @param object [`get_RLum`], [`names_RLum`] (**required**): #' an object of class [RLum.Data.Spectrum-class] #' #' @param info.object [`get_RLum`]; [character] (*optional*): #' the name of the info object to be called #' #' @return #' #' **`[get_RLum]`** #' #' 1. A [matrix] with the spectrum values or #' 2. only the info object if `info.object` was set. #' #' @md #' @export setMethod("get_RLum", signature("RLum.Data.Spectrum"), definition = function(object, info.object) { ##if missing info.object just show the curve values if (missing(info.object) == FALSE){ if(is(info.object, "character") == FALSE) stop("[get_RLum] 'info.object' has to be a character!", call. = FALSE) if (info.object %in% names(object@info) == TRUE){ unlist(object@info[info.object]) } else { stop(paste0( "[get_RLum] Invalid element name. Valid names are: ", paste(names(object@info), collapse = ", ") ), call. = FALSE) } } else { object@data } }) # names() ------------------------------------------------------------------------------------- #' @describeIn RLum.Data.Spectrum #' Returns the names info elements coming along with this curve object #' #' @return #' #' **`[names_RLum]`** #' #' The names of the info objects #' #' @md #' @export setMethod("names_RLum", "RLum.Data.Spectrum", function(object){ names(object@info) }) # bin_RLum() ----------------------------------------------------------------------------------# #' @describeIn RLum.Data.Spectrum #' Allows binning of RLum.Data.Spectrum data. Count values and values on the x-axis are summed-up; #' for wavalength/energy values the mean is calculated. #' #' @param bin_size.col [integer] (*with default*): #' set number of channels used for each bin, e.g. `bin_size.col = 2` means that #' two channels are binned. Note: The function does not check the input, very large values #' mean a full column binning (a single sum) #' #' @param bin_size.row [integer] (*with default*): #' set number of channels used for each bin, e.g. `bin_size.row = 2` means that #' two channels are binned. Note: The function does not check the input, very large values #' mean a full row binning (a single sum) #' #' @return #' #' **`[bin_RLum.Data]`** #' #' Same object as input, after applying the binning. #' #' @md #' @export setMethod(f = "bin_RLum.Data", signature = "RLum.Data.Spectrum", function(object, bin_size.col = 1, bin_size.row = 1) { ##makee sure that we have no input problems if (class(bin_size.col) != "numeric" || class(bin_size.row) != "numeric"){ stop("[bin_RLum.Data()] 'bin_size.row' and 'bin_size.col' must be of class 'numeric'!", call. = FALSE) } ##make sure that we do not get in trouble with negative values bin_size.col <- abs(bin_size.col) bin_size.row <- abs(bin_size.row) ##perform binning ##we want to be efficient, so we start ##with the larger object if(bin_size.row > bin_size.col){ ##row binning first m <- .matrix_binning(object@data, bin_size = bin_size.row, bin_col = FALSE, names = "mean") m <- .matrix_binning(m, bin_size = bin_size.col, bin_col = TRUE, names = "groups") } else { ##column binning first m <- .matrix_binning(object@data, bin_size = bin_size.col, bin_col = TRUE, names = "groups") m <- .matrix_binning(m, bin_size = bin_size.row, bin_col = FALSE, names = "mean") } ##write back to object object@data <- m ##return object return(object) }) Luminescence/R/set_Risoe.BINfileData.R0000644000176200001440000000175713231137116017215 0ustar liggesusers#' General accessor function for RLum S4 class objects #' #' Function calls object-specific get functions for RisoeBINfileData S4 class objects. #' #' The function provides a generalised access point for specific #' [Risoe.BINfileData-class] objects.\cr #' Depending on the input object, the corresponding get function will be selected. #' Allowed arguments can be found in the documentations of the corresponding #' [Risoe.BINfileData-class] class. #' #' @param METADATA x #' #' @param DATA x #' #' @param .RESERVED x #' #' @return Return is the same as input objects as provided in the list. #' #' @section Function version: 0.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [Risoe.BINfileData-class] #' #' @keywords utilities #' #' @md #' @export setGeneric("set_Risoe.BINfileData", function(METADATA = data.frame(), DATA = list(), .RESERVED = list()) { standardGeneric("set_Risoe.BINfileData") }, package = "Luminescence" ) Luminescence/R/merge_RLum.Analysis.R0000644000176200001440000001017213231137116017027 0ustar liggesusers#' Merge function for RLum.Analysis S4 class objects #' #' Function allows merging of RLum.Analysis objects and adding of allowed #' objects to an RLum.Analysis. #' #' This function simply allowing to merge [RLum.Analysis-class] #' objects. Additionally other [RLum-class] objects can be added #' to an existing [RLum.Analysis-class] object. Supported objects #' to be added are: [RLum.Data.Curve-class], #' [RLum.Data.Spectrum-class] and #' [RLum.Data.Image-class]. #' #' The order in the new [RLum.Analysis-class] object is the object #' order provided with the input list. #' #' @param objects [list] of [RLum.Analysis-class] (**required**): #' list of S4 objects of class `RLum.Analysis`. Furthermore other objects of #' class [RLum-class] can be added, see details. #' #' @return Return an [RLum.Analysis-class] object. #' #' @note #' The information for the slot 'protocol' is taken from the first #' [RLum.Analysis-class] object in the input list. Therefore at #' least one object of type [RLum.Analysis-class] has to be provided. #' #' @section Function version: 0.2.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [merge_RLum], [RLum.Analysis-class], [RLum.Data.Curve-class], #' [RLum.Data.Spectrum-class], [RLum.Data.Image-class], [RLum-class] #' #' #' @keywords utilities internal #' #' @examples #' #' #' ##merge different RLum objects from the example data #' data(ExampleData.RLum.Analysis, envir = environment()) #' data(ExampleData.BINfileData, envir = environment()) #' #' object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) #' curve <- get_RLum(object)[[2]] #' #' temp.merged <- merge_RLum.Analysis(list(curve, IRSAR.RF.Data, IRSAR.RF.Data)) #' #' @md #' @export merge_RLum.Analysis<- function( objects ){ # Ingegrity checks ---------------------------------------------------------------------------- ##check if object is of class RLum temp.class.test <- sapply(1:length(objects), function(x){ if(is(objects[[x]], "RLum") == FALSE){ temp.text <- paste("[merge_RLum.Analysis()]: At least element", x, "is not of class 'RLum' or a derivative class!") stop(temp.text) } ##provide class of objects is(objects[[x]])[1] }) ##check if at least one object of RLum.Analysis is provided if(!"RLum.Analysis"%in%temp.class.test){ stop("[merge_RLum.Analysis()] At least one input object in the list has to be of class 'RLum.Analysis'!") } # Merge objects ------------------------------------------------------------------------------- ##(0) get recent environment to later set variable temp.meta.data.first temp.environment <- environment() temp.meta.data.first <- NA; rm(temp.meta.data.first) #to avoid problems with the R check routine ##(1) collect all elements in a list temp.element.list <- unlist(lapply(1:length(objects), function(x){ ##Depending on the element the right functions is used if(is(objects[[x]])[1] == "RLum.Analysis"){ ##grep export meta data from the first RLum.Analysis objects an write if(!exists("temp.meta.data.first")){ assign("temp.meta.data.first", objects[[x]]@protocol, envir = temp.environment) } ##return to list get_RLum(objects[[x]]) }else if((is(objects[[x]])[1] == "RLum.Data.Curve") | (is(objects[[x]])[1] == "RLum.Data.Image") | (is(objects[[x]])[1] == "RLum.Data.Spectrum")){ ##return to list objects[[x]] }else{ stop("[merge_RLum.Anlysis()] What ever was provided, this 'RLum' object is not supported!") } })) # Build new RLum.Analysis object -------------------------------------------------------------- temp.new.RLum.Analysis <- set_RLum( class = "RLum.Analysis", originator = "merge_RLum.Analysis", records = temp.element.list, protocol = temp.meta.data.first, info = unlist(lapply(objects, function(x) { x@info }), recursive = FALSE), .pid = unlist(lapply(objects, function(x) { x@.uid })) ) # Return object ------------------------------------------------------------------------------- return( temp.new.RLum.Analysis) } Luminescence/R/RcppExports.R0000644000176200001440000000144713604173202015505 0ustar liggesusers# Generated by using Rcpp::compileAttributes() -> do not edit by hand # Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 create_UID <- function() { .Call(`_Luminescence_create_UID`) } src_analyse_IRSARRF_SRS <- function(values_regenerated_limited, values_natural_limited, vslide_range, n_MC, trace = FALSE) { .Call(`_Luminescence_analyse_IRSARRF_SRS`, values_regenerated_limited, values_natural_limited, vslide_range, n_MC, trace) } src_create_RLumDataCurve_matrix <- function(DATA, VERSION, NPOINTS, LTYPE, LOW, HIGH, AN_TEMP, TOLDELAY, TOLON, TOLOFF) { .Call(`_Luminescence_create_RLumDataCurve_matrix`, DATA, VERSION, NPOINTS, LTYPE, LOW, HIGH, AN_TEMP, TOLDELAY, TOLON, TOLOFF) } src_get_XSYG_curve_values <- function(s) { .Call(`_Luminescence_src_get_XSYG_curve_values`, s) } Luminescence/R/plot_Risoe.BINfileData.R0000644000176200001440000002112513231137116017367 0ustar liggesusers#' Plot single luminescence curves from a BIN file object #' #' Plots single luminescence curves from an object returned by the #' [read_BIN2R] function. #' #' **Nomenclature** #' #' See [Risoe.BINfileData-class] #' #' **curve.transformation** #' #' This argument allows transforming continuous wave (CW) curves to pseudo #' (linear) modulated curves. For the transformation, the functions of the #' package are used. Currently, it is not possible to pass further arguments #' to the transformation functions. The argument works only for `ltype` #' `OSL` and `IRSL`. #' #' **Irradiation time** #' #' Plotting the irradiation time (s) or the given dose (Gy) requires that the #' variable `IRR_TIME` has been set within the BIN-file. This is normally #' done by using the 'Run Info' option within the Sequence Editor or by editing #' in R. #' #' @param BINfileData [Risoe.BINfileData-class] (**required**): #' requires an S4 object returned by the [read_BIN2R] function. #' #' @param position [vector] (*optional*): #' option to limit the plotted curves by position #' (e.g. `position = 1`, `position = c(1,3,5)`). #' #' @param run [vector] (*optional*): #' option to limit the plotted curves by run #' (e.g., `run = 1`, `run = c(1,3,5)`). #' #' @param set [vector] (*optional*): #' option to limit the plotted curves by set #' (e.g., `set = 1`, `set = c(1,3,5)`). #' #' @param sorter [character] (*with default*): #' the plot output can be ordered by "POSITION","SET" or "RUN". #' POSITION, SET and RUN are options defined in the Risoe Sequence Editor. #' #' @param ltype [character] (*with default*): #' option to limit the plotted curves by the type of luminescence stimulation. #' Allowed values: `"IRSL"`, `"OSL"`,`"TL"`, `"RIR"`, `"RBR"` #' (corresponds to LM-OSL), `"RL"`. All type of curves are plotted by #' default. #' #' @param curve.transformation [character] (*optional*): #' allows transforming CW-OSL and CW-IRSL curves to pseudo-LM curves via #' transformation functions. Allowed values are: `CW2pLM`, `CW2pLMi`, `CW2pHMi` and #' `CW2pPMi`. See details. #' #' @param dose_rate [numeric] (*optional*): #' dose rate of the irradition source at the measurement date. #' If set, the given irradiation dose will be shown in Gy. See details. #' #' @param temp.lab [character] (*optional*): #' option to allow for different temperature units. If no value is set deg. C is chosen. #' #' @param cex.global [numeric] (*with default*): #' global scaling factor. #' #' @param ... further undocumented plot arguments. #' #' @return Returns a plot. #' #' @note #' The function has been successfully tested for the Sequence Editor file #' output version 3 and 4. #' #' @section Function version: 0.4.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr #' Michael Dietze, GFZ Potsdam (Germany) #' #' @seealso [Risoe.BINfileData-class],[read_BIN2R], [CW2pLM], [CW2pLMi], #' [CW2pPMi], [CW2pHMi] #' #' @references #' Duller, G., 2007. Analyst. pp. 1-45. #' #' @keywords dplot #' #' @examples #' #' ##load data #' data(ExampleData.BINfileData, envir = environment()) #' #' ##plot all curves from the first position to the desktop #' #pdf(file = "~/Desktop/CurveOutput.pdf", paper = "a4", height = 11, onefile = TRUE) #' #' ##example - load from *.bin file #' #BINfile<- file.choose() #' #BINfileData<-read_BIN2R(BINfile) #' #' #par(mfrow = c(4,3), oma = c(0.5,1,0.5,1)) #' #plot_Risoe.BINfileData(CWOSL.SAR.Data,position = 1) #' #mtext(side = 4, BINfile, outer = TRUE, col = "blue", cex = .7) #' #dev.off() #' #' @md #' @export plot_Risoe.BINfileData<- function( BINfileData, position, run, set, sorter = "POSITION", ltype = c("IRSL","OSL","TL","RIR","RBR","RL"), curve.transformation, dose_rate, temp.lab, cex.global = 1, ... ){ ##check if the object is of type Risoe.BINfileData if(class(BINfileData)!="Risoe.BINfileData"){stop("Wrong object! Object of type Risoe.BINfileData needed.")} temp<-BINfileData # Missing check ---------------------------------------------------------------- ##set plot position if missing if(missing(position)==TRUE){position<-c(min(temp@METADATA[,"POSITION"]):max(temp@METADATA[,"POSITION"]))} if(missing(run)==TRUE){run<-c(min(temp@METADATA[,"RUN"]):max(temp@METADATA[,"RUN"]))} if(missing(set)==TRUE){set<-c(min(temp@METADATA[,"SET"]):max(temp@METADATA[,"SET"]))} ##temp.lab if(missing(temp.lab) == TRUE){temp.lab <- "\u00B0C"} ##fun extraArgs <- list(...) # read out additional arguments list fun <- if("fun" %in% names(extraArgs)) {extraArgs$fun} else {FALSE} # Ordering -------------------------------------------------------------------- ##(1) order by RUN, SET OR BY POSITION if(sorter=="RUN"){ temp@METADATA<-temp@METADATA[order(temp@METADATA[,"RUN"]),] }else if(sorter=="SET"){ temp@METADATA<-temp@METADATA[order(temp@METADATA[,"SET"]),] }else { temp@METADATA<-temp@METADATA[order(temp@METADATA[,"POSITION"]),] } # Select values for plotting ------------------------------------------------------------------ ##(2) set SEL for selected position ##set all to FALSE temp@METADATA[,"SEL"]<-FALSE ##set TRUE temp@METADATA[(temp@METADATA[,"POSITION"] %in% position)==TRUE & (temp@METADATA[,"RUN"] %in% run)==TRUE & (temp@METADATA[,"SET"] %in% set)==TRUE & (temp@METADATA[,"LTYPE"] %in% ltype)==TRUE,"SEL"]<-TRUE ##------------------------------------------------------------------------## ##PLOTTING ##------------------------------------------------------------------------## ##(3) plot curves for(i in 1:length(temp@METADATA[,"ID"])){ ##print only if SEL == TRUE if(temp@METADATA[i,"SEL"]==TRUE) { ##find measured unit measured_unit<-if(temp@METADATA[i,"LTYPE"]=="TL"){" \u00B0C"}else{"s"} ##set x and y values values.x <- seq(temp@METADATA[i,"HIGH"]/temp@METADATA[i,"NPOINTS"], temp@METADATA[i,"HIGH"],by=temp@METADATA[i,"HIGH"]/temp@METADATA[i,"NPOINTS"]) values.y <- unlist(temp@DATA[temp@METADATA[i,"ID"]]) values.xy <- data.frame(values.x, values.y) ##set curve transformation if wanted if((temp@METADATA[i,"LTYPE"] == "OSL" | temp@METADATA[i,"LTYPE"] == "IRSL") & missing(curve.transformation) == FALSE){ if(curve.transformation=="CW2pLM"){ values.xy <- CW2pLM(values.xy) }else if(curve.transformation=="CW2pLMi"){ values.xy <- CW2pLMi(values.xy)[,1:2] }else if(curve.transformation=="CW2pHMi"){ values.xy <- CW2pHMi(values.xy)[,1:2] }else if(curve.transformation=="CW2pPMi"){ values.xy <- CW2pPMi(values.xy)[,1:2] }else{ warning("Function for curve.transformation is unknown. No transformation is performed.") } } ##plot graph plot(values.xy, main=paste("pos=", temp@METADATA[i,"POSITION"],", run=", temp@METADATA[i,"RUN"], ", set=", temp@METADATA[i,"SET"],sep="" ), type="l", ylab=paste(temp@METADATA[i,"LTYPE"]," [cts/",round(temp@METADATA[i,"HIGH"]/temp@METADATA[i,"NPOINTS"],digits=3)," ", measured_unit,"]",sep=""), xlab=if(measured_unit=="\u00B0C"){paste("temp. [",temp.lab,"]",sep="")}else{"time [s]"}, col=if(temp@METADATA[i,"LTYPE"]=="IRSL" | temp@METADATA[i,"LTYPE"]=="RIR"){"red"} else if(temp@METADATA[i,"LTYPE"]=="OSL" | temp@METADATA[i,"LTYPE"]=="RBR"){"blue"} else{"black"}, sub=if(temp@METADATA[i,"LTYPE"]=="TL"){paste("(",temp@METADATA[i,"RATE"]," K/s)",sep="")}else{}, lwd=1.2*cex.global, cex=0.9*cex.global ) ##add mtext for temperature ##grep temperature (different for different verions) temperature<-if(temp@METADATA[i,"VERSION"]=="03"){temp@METADATA[i,"AN_TEMP"]} else{temp@METADATA[i,"TEMPERATURE"]} ##mtext mtext(side=3, if(temp@METADATA[i,"LTYPE"]=="TL"){paste("TL to ",temp@METADATA[i,"HIGH"], " ",temp.lab,sep="")} else{paste(temp@METADATA[i,"LTYPE"],"@",temperature," ",temp.lab ,sep="")}, cex=0.9*cex.global) ##add mtext for irradiation mtext(side=4,cex=0.8*cex.global, line=0.5, if(temp@METADATA[i, "IRR_TIME"]!=0){ if(missing("dose_rate")==TRUE){ paste("dose = ",temp@METADATA[i, "IRR_TIME"], " s", sep="") }else{ paste("dose = ",temp@METADATA[i, "IRR_TIME"]*dose_rate, " Gy", sep="") } } )#end mtext }#endif::selection }#endforloop if(fun==TRUE){sTeve()} } Luminescence/R/Risoe.BINfileData2RLum.Data.Curve.R0000644000176200001440000000726213540654503021163 0ustar liggesusers#' Convert an element from a Risoe.BINfileData object to an RLum.Data.Curve #' object #' #' The function converts one specified single record from a Risoe.BINfileData #' object to an RLum.Data.Curve object. #' #' The function extracts all `METADATA` from the `Risoe.BINfileData` #' object and stores them in the `RLum.Data.Curve` object. This function #' can be used stand-alone, but is the base function for [Risoe.BINfileData2RLum.Analysis]. #' #' @param object [Risoe.BINfileData-class] (**required**): #' `Risoe.BINfileData` object #' #' @param id [integer] (**required**): #' record id in the `Risoe.BINfileData` object of the curve that is to be #' stored in the `RLum.Data.Curve` object. If no value for id is provided, #' the record has to be specified by `pos`, `set` and `run`. #' #' @param pos [integer] (*optional*): #' record position number in the `Risoe.BINfileData` object of the curve that #' is to be stored in the `RLum.Data.Curve` object. If a value for `id` is #' provided, this argument is ignored. #' #' @param run [integer] (*optional*): #' record run number in the `Risoe.BINfileData` object of the curve that is #' to be stored in the `RLum.Data.Curve` object. If a value for `id` is #' provided, this argument is ignored. #' #' @param set [integer] (*optional*): #' record set number in the `Risoe.BINfileData` object of the curve that is #' to be stored in the `RLum.Data.Curve` object. If a value for `id` is #' provided, this argument is ignored. #' #' @return Returns an [RLum.Data.Curve-class] object. #' #' @note #' Due to changes in the BIN-file (version 3 to version 4) format the recalculation of TL-curves might be not #' overall correct for cases where the TL measurement is combined with a preheat. #' #' @section Function version: 0.5.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr #' Christoph Burow, Universtiy of Cologne (Germany) #' #' @seealso [Risoe.BINfileData2RLum.Analysis], [set_RLum], #' [RLum.Data.Curve-class], [RLum.Analysis-class], [Risoe.BINfileData-class], #' [plot_RLum] #' #' @keywords manip #' #' @examples #' #' ##get package example data #' data(ExampleData.BINfileData, envir = environment()) #' #' ##convert one record #' Risoe.BINfileData2RLum.Data.Curve(CWOSL.SAR.Data, id = 1) #' #' @md #' @noRd .Risoe.BINfileData2RLum.Data.Curve <- function( object, id, pos, run, set ){ ##disaggregate object ... this makes it much faster below ##we could also access via index, not number, but this is far to risky, as ##every update in the BIN-file version will break the code here METADATA <- as.list(object@METADATA) DATA <- object@DATA # grep id of record ------------------------------------------------------- ##if id is set, no input for pos and rund is nescessary if (missing(id)) { id <- METADATA[METADATA[["POSITION"]] == pos & METADATA[["SET"]] == set & METADATA[["RUN"]] == run, "ID"] } ##grep info elements info <- lapply(1:length(names(METADATA)), function(x){METADATA[[x]][id]}) names(info) <- names(METADATA) # Build object ------------------------------------------------------------ set_RLum( class = "RLum.Data.Curve", recordType = METADATA[["LTYPE"]][id], data = src_create_RLumDataCurve_matrix( DATA = DATA[[id]], NPOINTS = METADATA[["NPOINTS"]][id], VERSION = METADATA[["VERSION"]][id], LTYPE = METADATA[["LTYPE"]][id], LOW = METADATA[["LOW"]][id], HIGH = METADATA[["HIGH"]][id], AN_TEMP = METADATA[["AN_TEMP"]][id], TOLDELAY = METADATA[["TOLDELAY"]][id], TOLON = METADATA[["TOLON"]][id], TOLOFF = METADATA[["TOLOFF"]][id] ), info = info ) } Luminescence/R/scale_GammaDose.R0000644000176200001440000007504613437452273016242 0ustar liggesusers#' Calculate the gamma dose deposited within a sample taking layer-to-layer #' variations in radioactivity into account (according to Aitken, 1985) #' #' This function calculates the gamma dose deposited in a luminescence sample #' taking into account layer-to-layer variations in sediment radioactivity . #' The function scales user inputs of Uranium, Thorium and Potassium based on #' input parameters for sediment density, water content and given layer #' thicknesses and distances to the sample. #' #' **User Input** #' #' To calculate the gamma dose which is deposited in a sample, the user needs #' to provide information on those samples influencing the luminescence sample. #' As a rule of thumb, all sediment layers within at least 30 cm radius from #' the luminescence sample taken should be taken into account when calculating #' the gamma dose rate. However, the actual range of gamma radiation might be #' different, depending on the emitting radioelement, the water content and the #' sediment density of each layer (Aitken, 1985). Therefore the user is #' advised to provide as much detail as possible and physically sensible. #' #' The function requires a [data.frame] that is to be structured #' in columns and rows, with samples listed in rows. The first column contains #' information on the layer/sample ID, the second on the thickness (in cm) of #' each layer, whilst column 3 should contain `NA` for all layers that are not #' sampled for OSL/TL. For the layer the OSL/TL sample was taken from a numerical #' value must be provided, which is the distance (in cm) measured from **bottom** #' of the layer of interest. If the whole layer was sampled insert `0`. If the #' sample was taken from *within* the layer, insert a numerical value `>0`, #' which describes the distance from the middle of the sample to the bottom of #' the layer in cm. Columns 4 to 9 should contain radionuclide concentrations #' and their standard errors for #' Potassium (in %), Thorium (in ppm) and Uranium (in ppm). Columns 10 and 11 #' give information on the water content and its uncertainty (standard error) #' in %. The layer density (in g/cm3) should be given in column 12. No cell #' should be left blank. Please ensure to keep the column titles as given in #' the example dataset (`data('ExampleData.ScaleGammaDose')`, see examples). #' #' The user can decide which dose rate #' conversion factors should be used to calculate the gamma dose rates. #' The options are: #' - `"Cresswelletal2019"` (Cresswell et al., in press; the default) #' - `"Liritzisetal2013"` (Liritzis et al., 2013) #' - `"Guerinetal2011"` (Guerin et al., 2011) #' - `"AdamiecAitken1998"` (Adamiec and Aitken, 1998) #' #' #' **Water content** #' #' The water content provided by the user should be calculated according to: #' #' \deqn{ ( Wet weight [g] - Dry weight [g] ) / Dry weight [g] * 100 } #' #' #' **Calculations** #' #' After converting the radionuclide concentrations into dose rates, the #' function will scale the dose rates based on the thickness of the layers, #' the distances to the sample, the water content and the density of the sediment. #' The calculations are based on Aitken (1985, Appendix H). As an example #' (equivalent to Aitken, 1985), assuming three layers of sediment, where **L** is #' inert and positioned in between the infinite thick and equally active #' layers **A** and **B**, the dose in **L** and **B** due to **A** is given by #' #' \deqn{ {1-f(x)}D_A } #' #' Where `x` is the distance into the inert medium, so `f(x)` is the weighted #' average fractional dose at `x` and `D_A` denotes that the dose is delivered by **A**. #' `f(x)` is derived from table H1 (Aitken, 1985), when setting `z = x`. #' Consequently, the dose in **A** and **L** due to **B** is given by #' #' \deqn{ {1 - f(t-x)}D_B } #' #' Here `t` is the thickness of **L** and the other parameters are denoted as above, #' just for the dose being delivered by B. `f(t-x)` is derived from table H1 #' (Aitken, 1985), when setting `z` equal to `t-x`. Following this, the dose in **L** #' delivered by **A** and **B** is given by #' #' \deqn{ {2 - f(x) - f(t-x)}D_{AB} } #' #' Since **A** and **B** are equally active `D_{AB} = D_A = D_B`. #' #' The function uses the value of the fractional dose rate at the layer #' boundary to start the calculation for the next layer. This way, the function #' is able to scale the gamma dose rate accurately for distant layers when the #' density and water content is not constant for the entire section. #' #' @param data [data.frame] (**required**): #' A table containing all relevant information for each individual layer. The #' table must have the following named columns: #' #' - `id` ([character]): an arbitrary id or name of each layer #' - `thickness` ([numeric]): vertical extent of each layer in cm #' - `sample_offset` ([logical]): distance of the sample in cm, #' **measured from the BOTTOM OF THE TARGET LAYER**. Except for the target layer #' all values must be `NA`. #' - `K` ([numeric]): K nuclide content in % #' - `K_se` ([numeric]): error on the K content #' - `Th` ([numeric]): Th nuclide content in ppm #' - `Th_se` ([numeric]): error on the Th content #' - `U` ([numeric]): U nuclide content in ppm #' - `U_se` ([numeric]): error on the U content #' - `water_content` ([numeric]): water content of each layer in % #' - `water_content_se` ([numeric]): error on the water content #' - `density` ([numeric]): bulk density of each layer in g/cm^-3 #' #' @param conversion_factors [character] (*optional*): #' The conversion factors used to calculate the dose rate from sediument #' nuclide contents. Valid options are: #' #' - `"Cresswelletal2019"` (default) #' - `"Liritzisetal2013"` #' - `"Guerinetal2011"` #' - `"AdamiecAitken1998"` #' #' @param fractional_gamma_dose [character] (*optional*): #' Factors to scale gamma dose rate values. Valid options are: #' #' - `"Aitken1985"` (default): Table H1 in the appendix #' #' @param verbose [logical] (*optional*): #' Show or hide console output (defaults to `TRUE`). #' #' @param plot [logical] (*optional*): #' Show or hide the plot (defaults to `TRUE`). #' #' @param plot_single [logical] (*optional*): #' Show all plots in one panel (defaults to `TRUE`). #' #' @param ... Further parameters passed to [barplot]. #' #' @return #' #' After performing the calculations the user is provided with different outputs. #' 1. The total gamma dose rate received by the sample (+/- uncertainties) as a #' print in the console. #' 2. A plot showing the sediment sequence, the user input sample information #' and the contribution to total gamma dose rate. #' 3. RLum Results. If the user wishes to save these results, writing a script #' to run the function and to save the results would look like this: #' #' ``` #' mydata <- read.table("c:/path/to/input/file.txt") #' results <- scale_GammaDose(mydata) #' table <- get_RLum(results) #' write.csv(table, "c:/path/to/results.csv") #' ``` #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `$summary` \tab `data.frame` \tab summary of the model results \cr #' `$data` \tab `data.frame` \tab the original input data \cr #' `$dose_rates` \tab `list` \tab two `data.frames` for the scaled and infinite matrix dose rates \cr #' `$tables` \tab `list` \tab several `data.frames` containing intermediate results \cr #' `$args` \tab `character` \tab arguments of the call \cr #' `$call` \tab `call` \tab the original function call \cr #' } #' #' **slot:** **`@info`** #' #' Currently unused. #' #' ------------------------\cr #' `[ PLOT OUTPUT ]`\cr #' ------------------------\cr #' #' Three plots are produced: #' #' - A visualisation of the provided sediment layer structure to quickly #' assess whether the data was provided and interpreted correctly. #' - A scatter plot of the nuclide contents per layer (K, Th, U) as well as the #' water content. This may help to correlate the dose rate contribution of #' specific layers to the layer of interest. #' - A barplot visualising the contribution of each layer to the total dose rate #' received by the sample in the target layer. #' #' @section Function version: 0.1.1 #' #' @keywords datagen #' #' @note #' **This function has BETA status. If possible, results should be** #' **cross-checked.** #' #' @author Svenja Riedesel, Aberystwyth University (United Kingdom) \cr #' Martin Autzen, DTU NUTECH Center for Nuclear Technologies (Denmark) \cr #' Christoph Burow, University of Cologne (Germany) \cr #' Based on an excel spreadsheet and accompanying macro written by Ian Bailiff. #' #' @seealso [ExampleData.ScaleGammaDose], [approx], [barplot] #' #' @references #' #' Aitken, M.J., 1985. Thermoluminescence Dating. Academic Press, London. #' #' Adamiec, G., Aitken, M.J., 1998. Dose-rate conversion factors: update. #' Ancient TL 16, 37-46. #' #' Cresswell, A. J., Carter, J., Sanderson, D. C. W., in press. Dose rate #' conversion parameters: Assessment of nuclear data. Radiation Measurements. #' #' Guerin, G., Mercier, N., Adamiec, G., 2011. Dose-rate conversion #' factors: update. Ancient TL, 29, 5-8. #' #' Liritzis, I., Stamoulis, K., Papachristodoulou, C., Ioannides, K., 2013. #' A re-evaluation of radiation dose-rate conversion factors. Mediterranean #' Archaeology and Archaeometry 13, 1-15. #' #' #' @section Acknowledgements: #' #' We thank Dr Ian Bailiff for the provision of an excel spreadsheet, which has #' been very helpful when writing this function. #' #' @examples #' #' # Load example data #' data("ExampleData.ScaleGammaDose", envir = environment()) #' x <- ExampleData.ScaleGammaDose #' #' # Scale gamma dose rate #' results <- scale_GammaDose(data = x, #' conversion_factors = "Cresswelletal2019", #' fractional_gamma_dose = "Aitken1985", #' verbose = TRUE, #' plot = TRUE) #' #' get_RLum(results) #' #' @md #' @export scale_GammaDose <- function( data, conversion_factors = c("Cresswelletal2019", "Guerinetal2011", "AdamiecAitken1998", "Liritzisetal2013")[1], fractional_gamma_dose = c("Aitken1985")[1], verbose = TRUE, plot = TRUE, plot_single = TRUE, ...) { ## HELPER FUNCTION ---- # Wrapper for formatC to enforce precise digit printing f <- function(x, d = 3) formatC(x, digits = d, format = "f") ## ------------------------------------------------------------------------ ## ## LOAD TABLES ## ------------------------------------------------------------------------ ## # To satisfy CRAN check ('no visible global binding') BaseDataSet.ConversionFactors <- BaseDataSet.FractionalGammaDose <- NA load(system.file("data", "BaseDataSet.ConversionFactors.rda", package = "Luminescence")) load(system.file("data", "BaseDataSet.FractionalGammaDose.rda", package = "Luminescence")) ## ------------------------------------------------------------------------ ## ## DEFAULT SETTINGS ## ------------------------------------------------------------------------ ## settings <- list( main = "Contributions of each layer to the total \n gamma dose rate received by the sample", xlab = "Contribution to total gamma dose rate (%) received by the sample", cex = 1.0, col = "grey", info = list() ) # overwrite and append default values settings <- modifyList(settings, list(...)) ## ------------------------------------------------------------------------ ## ## CHECK INPUT ## ------------------------------------------------------------------------ ## ## Input data # basic class and length check if (!is.data.frame(data)) stop("'data' must be a data frame.", call. = FALSE) if (ncol(data) != 12) stop("'data' must have 12 columns (currently ", ncol(data), ").", call. = FALSE) # make sure that it has the correct column names colnames_expected <- c("id","thickness","sample_offset","K","K_se","Th","Th_se","U","U_se", "water_content","water_content_se", "density") if (is.null(names(data)) || any(names(data) != colnames_expected)) { if (verbose) warning("Unexpected column names for 'data'. New names were automatically assigned. ", "Please make sure that columns are in proper order. See documentation.", call. = FALSE) colnames(data) <- colnames_expected } # check if there is only one target layer if (sum(!is.na(data$sample_offset)) != 1) stop("Only one layer must be contain a numeric value in column 'sample_offset', all other rows must be `NA`.", call. = FALSE) if (!is.numeric(data$sample_offset[which(!is.na(data$sample_offset))])) stop("Non-numeric value in the the row of the target layer.", call. = FALSE) if (data$sample_offset[which(!is.na(data$sample_offset))] < 0) stop("The numeric value in 'sample_offset' must be positive.", call. = FALSE) if (data$sample_offset[which(!is.na(data$sample_offset))] > data$thickness[which(!is.na(data$sample_offset))]) stop("Impossible! Sample offset larger than the target-layer's thickness!", call. = FALSE) # conversion factors if (length(conversion_factors) != 1 || !is.character(conversion_factors)) stop("'conversion_factors' must be an object of length 1 and of class 'character'.", call. = FALSE) if (!conversion_factors %in% names(BaseDataSet.ConversionFactors)) stop("Invalid 'conversion_factors'. Valid options: ", paste(names(BaseDataSet.ConversionFactors), collapse = ", "), ".", call. = FALSE) # tables for gamma dose fractions if (length(fractional_gamma_dose) != 1 || !is.character(fractional_gamma_dose)) stop("'fractional_gamma_dose' must be an object of length 1 and of class 'character'.", call. = FALSE) if (!fractional_gamma_dose %in% names(BaseDataSet.FractionalGammaDose)) stop("Invalid 'fractional_gamma_dose'. Valid options: ", paste(names(BaseDataSet.FractionalGammaDose), collapse = ", "), ".", call. = FALSE) ## ------------------------------------------------------------------------ ## ## Select tables ## ------------------------------------------------------------------------ ## conv_fac <- BaseDataSet.ConversionFactors[[conversion_factors]]$gamma frac_dose <- BaseDataSet.FractionalGammaDose[[fractional_gamma_dose]] ## ------------------------------------------------------------------------ ## ## CALCULATION ## ------------------------------------------------------------------------ ## dose_rate <- data.frame( K = data$K * conv_fac$K[1], K_re = sqrt( (data$K_se / data$K)^2 + conv_fac$K[2]^2 ), Th = data$Th * conv_fac$Th[1], Th_re = sqrt( (data$Th_se / data$Th)^2 + conv_fac$Th[2]^2 ), U = data$U * conv_fac$U[1], U_re = sqrt( (data$U_se / data$U)^2 + conv_fac$U[2]^2 ) ) dose_rate$sum <- dose_rate$K + dose_rate$Th + dose_rate$U dose_rate$sum_re <- sqrt(dose_rate$K_re^2 + dose_rate$Th_re^2 + dose_rate$U_re^2) dose_rate$K_frac <- dose_rate$K / dose_rate$sum dose_rate$K_frac_re <- sqrt(dose_rate$K_re^2 + dose_rate$sum_re^2 ) dose_rate$Th_frac <- dose_rate$Th / dose_rate$sum dose_rate$Th_frac_re <- sqrt(dose_rate$Th_re^2 + dose_rate$sum_re^2 ) dose_rate$U_frac <- dose_rate$U / dose_rate$sum dose_rate$U_frac_re <- sqrt(dose_rate$U_re^2 + dose_rate$sum_re^2 ) ## weighted fractional dose z_scale <- do.call(cbind, Map(function(d, wc) { (frac_dose$z * 2) / (d + ( (wc / 100) * d)) }, data$density, data$water_content)) layer_fracDoseRate <- do.call(cbind, Map(function(K, Th, U, K_re, Th_re, U_re) { data.frame( val = frac_dose$K * K + frac_dose$Th * Th + frac_dose$U * U, err = sqrt( K_re^2 + Th_re^2 + U_re^2 ) ) }, dose_rate$K_frac, dose_rate$Th_frac, dose_rate$U_frac, dose_rate$K_frac_re, dose_rate$Th_frac_re, dose_rate$U_frac_re)) ## TODO: LEGACY CODE target <- which(!is.na(data$sample_offset)) distance <- data.frame(upper = c(rev(cumsum(data$thickness[target:1])[-1]) - data$sample_offset[target], abs(data$sample_offset[target]), cumsum(data$thickness[(target+1):nrow(data)]) + data$sample_offset[target])) distance$lower <- abs(distance$upper - data$thickness) ## Calculate infitite dose rate and dose received by the sample ## MAP: iterate over LAYERS Inf_frac <- as.data.frame(do.call(rbind, Map(function(z, n) { interpol <- Map(function(x) { approx(z, x, n = 1000, method = "linear") }, frac_dose[, c("K", "Th", "U")]) x1 = data$thickness[n] x2 = 0 C1 = which.min(abs(interpol$K$x - x1)) C2 = which.min(abs(interpol$K$x - x2)) ## MAP: iterate over NUCLIDE do.call(cbind, Map(function(x) { y1 = interpol[[x]]$y[C1] y2 = interpol[[x]]$y[C2] ### ---- if (n != target) { if (n < target) { k <- n + 1 seq <- k:target } else if (n > target) { k <- n - 1 seq <- target:k } for (j in seq) { fit <- approx(z_scale[ ,j], frac_dose[ , x], n = 1000, method = "linear") x1_temp <- which.min(abs(fit$y - y1)) x2_temp <- which.min(abs(fit$y - y2)) if (j != target) { x1 <- fit$x[x1_temp] + data$thickness[j] x2 <- fit$x[x2_temp] + data$thickness[j] } if (j == target) { if (n < target) { x1 <- fit$x[x1_temp] + data$thickness[target] - data$sample_offset[target] x2 <- fit$x[x2_temp] + data$thickness[target] - data$sample_offset[target] } if (n > target) { x1 <- fit$x[x1_temp] + data$sample_offset[target] x2 <- fit$x[x2_temp] + data$sample_offset[target] } } C1_temp <- which.min(abs(fit$x - x1)) C2_temp <- which.min(abs(fit$x - x2)) y1 <- fit$y[C1_temp] y2 <- fit$y[C2_temp] } r <- y1 - y2 } ### ---- if (n == target) { x1 <- data$sample_offset[target] x2 <- abs(data$thickness[target] - data$sample_offset[target]) C1_temp <- which.min(abs(interpol[[x]]$x - x1)) C2_temp <- which.min(abs(interpol[[x]]$x - x2)) r <- interpol[[x]]$y[C1_temp] + interpol[[x]]$y[C2_temp] - 1 } return(r) }, c("K", "Th", "U"))) }, as.data.frame(z_scale), 1:nrow(data)))) ## Generate output object op <- setNames(vector(mode = "list", length = 17), nm = c("K","K_se","Th","Th_se","U","U_se","sum","sum_se", "K_inf","K_inf_se","Th_inf","Th_inf_se","U_inf","U_inf_se","sum_inf","sum_inf_se", "contrib")) # fractional dose rate op$K <- Inf_frac$K * dose_rate$K / (1 + 1.14 * data$water_content / 100) op$K_se <- op$K * sqrt(dose_rate$K_re^2 + (data$water_content_se / data$water_content)^2) op$Th <- Inf_frac$Th * dose_rate$Th / (1 + 1.14 * data$water_content / 100) op$Th_se <- op$Th * sqrt(dose_rate$Th_re^2 + (data$water_content_se / data$water_content)^2) op$U <- Inf_frac$U * dose_rate$U / (1 + 1.14 * data$water_content / 100) op$U_se <- op$U * sqrt(dose_rate$U_re^2 + (data$water_content_se / data$water_content)^2) op$sum <- op$K + op$Th + op$U op$sum_se <- sqrt(op$K_se^2 + op$Th_se^2 + op$U_se^2) # infinite matrix dose rate op$K_inf <- op$K / Inf_frac$K op$K_inf_se <- op$K_inf * sqrt(dose_rate$K_re^2 + (data$water_content_se / data$water_content)^2) op$Th_inf <- op$Th / Inf_frac$Th op$Th_inf_se <- op$Th_inf * sqrt(dose_rate$Th_re^2 + (data$water_content_se / data$water_content)^2) op$U_inf <- op$U / Inf_frac$U op$U_inf_se <- op$U_inf * sqrt(dose_rate$U_re^2 + (data$water_content_se / data$water_content)^2) op$sum_inf <- op$K_inf + op$Th_inf + op$U_inf op$sum_inf_se <- sqrt(op$K_inf_se^2 + op$Th_inf_se^2 + op$U_inf_se^2) ## Calculate the total dose rates for(i in seq(1, length(op)-1, 2)) { op[[i]] <- c(op[[i]], sum(op[[i]])) op[[i+1]] <- c(op[[i+1]], sqrt(sum(op[[i+1]]^2))) } ## Calculate contributions for each layer op$contrib <- op$sum[1:nrow(data)] / op$sum[nrow(data)+1] * 100 op$contrib <- c(op$contrib, sum(op$contrib)) # Cast to data.frame op <- as.data.frame(do.call(cbind, op)) ## ------------------------------------------------------------------------ ## ## CONSOLE OUTPUT ## ------------------------------------------------------------------------ ## if (verbose) { cat(paste0("\n [scale_GammaDose()]\n\n")) cat(" ----\n") cat(" Conversion factors:", conversion_factors, "\n") cat(" Gamma dose fractions:", fractional_gamma_dose, "\n") cat(" Target layer:", data$id[target], "\n\n") cat(" ---- Infinite matrix gamma dose rate per layer ----\n\n") print(data.frame(ID = data$id, `K (Gy/ka)` = paste0(f(op$K_inf[-(nrow(data)+1)]), "\u00b1", f(op$K_inf_se[-(nrow(data)+1)])), `Th (Gy/ka)` = paste0(f(op$Th_inf[-(nrow(data)+1)]), "\u00b1", f(op$Th_inf_se[-(nrow(data)+1)])), `U (Gy/ka)` = paste0(f(op$U_inf[-(nrow(data)+1)]), "\u00b1", f(op$U_inf_se[-(nrow(data)+1)])), `Total (Gy/ka)` = f(op$sum_inf[-(nrow(data)+1)]), check.names = FALSE )) cat("\n") cat(sprintf(" ---- Scaled gamma dose rate for target layer: %s ----\n\n", data$id[target])) print(data.frame(ID = c(data$id, "TOTAL"), `K (Gy/ka)` = paste0(f(op$K), "\u00b1", f(op$K_se)), `Th (Gy/ka)` = paste0(f(op$Th), "\u00b1", f(op$Th_se)), `U (Gy/ka)` = paste0(f(op$U), "\u00b1", f(op$U_se)), `Contribution (%)` = round(op$contrib, 1), check.names = FALSE )) cat("\n ----\n") cat(" Infinite matrix gamma dose rate:\t", f(op$sum_inf[target]), "\u00b1", f(op$sum_inf_se[target]), "Gy/ka \n") cat(" Scaled gamma dose rate:\t\t", f(op$sum[length(op$sum)]), "\u00b1", f(op$sum_se[length(op$sum_se)]), "Gy/ka") cat("\n\n") } ## ------------------------------------------------------------------------ ## ## PLOT ## ------------------------------------------------------------------------ ## if (plot) { # save and recover plot parameters par.old <- par(no.readonly = TRUE) on.exit(par(par.old)) if (plot_single) layout(matrix(c(1,1, 2, 3, 4, 5, 1,1, 2, 3, 4, 5, 1,1, 6, 6, 6, 6, 1,1, 6, 6, 6, 6), ncol = 6, byrow = TRUE)) ## Plot 1 - Layer setup ## -------------------------------------------------------------- ## Global plot settings if (plot_single) par(mar = c(2, 5, 1, 4) + 0.1) else par(mar = c(2, 5, 4, 4) + 0.1) plot(NA, NA, main = ifelse(plot_single, "", "Profile structure"), xlim = c(0, 1), ylim = rev(range(pretty(c(sum(data$thickness), 0)))), xaxt = "n", xlab = "", ylab = "Depth below surface of uppermost layer (cm)", bty = "n", xaxs = "i") # x-axis label title(xlab = "Horizontal extent (a.u.)", line = 0) # horizontal layer lines abline(h = c(0, cumsum(data$thickness), sum(data$thickness)), lty = 1, col = "grey50", xpd = FALSE) # layer names mtext(side = 2, at = c(0, cumsum(data$thickness) - data$thickness / 2, sum(data$thickness)), text = c("", data$id, ""), las = 1, line = -5, cex = 0.75, padj = 0.3, col = "#428bca") # right y-axis axis(side = 4, at = c(0, cumsum(data$thickness), sum(data$thickness)), labels = FALSE, tck = -0.01) # right y-axis labels mtext(side = 4, at = c(0, cumsum(data$thickness) - data$thickness / 2, sum(data$thickness)), text = c("", paste(data$thickness, "cm"), ""), las = 1, line = ifelse(plot_single, -4, 0.5), cex = 0.8, col = "#b22222") # fill gap between lowermost layer and max range of pretty xlim polygon(x = c(0, 1, 1, 0), y = c(sum(data$thickness), sum(data$thickness), max(range(pretty(c(sum(data$thickness), 0)))), max(range(pretty(c(sum(data$thickness), 0))))), density = 10, angle = 45 ) # add sample points(x = 0.5, y = sum(data$thickness[1:target]) - data$sample_offset[target], pch = 13, col = "#b22222", cex = 3, lwd = 2) ## PLOT 2 - Nuclide contents ## -------------------------------------------------------------- # global plot settings if (plot_single) { par( mar = c(4, 2, 3, 0.5) + 0.1, cex = 0.6, # oma = c(1, 1, 1, 1) + 0.1, pch = 16) } else { par(par.old) par( mfrow = c(1, 4), mar = c(4, 5, 0, 0) + 0.1, oma = c(1, 1, 1, 1) + 0.1, pch = 16) } # calculate fancy x-axis limits by considering the error bars calc_xlim <- function(x, se) { range(pretty(c(x - se, x + se))) } # horizontal error bars plot_error <- function(x, se) { segments(x - se, length(x):1, x + se, length(x):1) epsilon <- 0.1 segments(x - se, length(x):1 - epsilon, x - se, length(x):1 + epsilon) segments(x + se, length(x):1 - epsilon, x + se, length(x):1 + epsilon) } # plot labels cols <- c("K", "Th", "U", "water_content") xlabs <- c("K content (%)", "Th content (ppm)", "U content (ppm)", "Water content (%)") # main plot for (i in 1:length(cols)) { # main plot(NA, NA, xlim = calc_xlim(data[[cols[i]]], data[[paste0(cols[i], "_se")]]), ylim = c(1, nrow(data)), ylab = "", xlab = xlabs[i], yaxt = "n") # vertical orientation lines abline(v = axTicks(side = 1), col = "grey", lty = 3, xpd = FALSE) # data points points(data[[cols[i]]], nrow(data):1, cex = 1.5) # errors plot_error(data[[cols[i]]], data[[paste0(cols[i], "_se")]]) # y-axis label for the first plot if (i == 1) axis(2, at = nrow(data):1, labels = data$id, las = 1) } ## PLOT 3 - Contribution ## -------------------------------------------------------------- ## Global plot settings # recover standard plot settings first if (plot_single) { par(mar = c(5, 5, 1, 6) + 0.1, cex = 0.7) } else { par(par.old) par(mar = c(5, 8, 4, 4) + 0.1, cex = settings$cex) } ## Set colors: target layer is blue, all other grey cols <- c("grey", "#428bca") pos <- rev(as.numeric(!is.na(data$sample_offset)) + 1) ## Contributions of each layer bp <- barplot(height = op$contrib[(nrow(op)-1):1], horiz = TRUE, main = ifelse(plot_single, "", settings$main), xlab = settings$xlab, xlim = range(pretty(op$contrib[1:(nrow(op)-1)])), col = cols[pos]) # layer names mtext(side = 2, at = bp, line = ifelse(plot_single, 3.5, 3), las = 1, cex = ifelse(plot_single, 0.7, 0.8), text = rev(data$id)) # contribution percentage mtext(side = 2, at = rev(bp), text = paste(signif(op$contrib[1:(nrow(op) - 1)], 2), "%"), col = "#b22222", las = 1, line = 0.5, cex = 0.7) # absolute dose rate values (right side) mtext(side = 4, at = rev(bp), text = paste(c(" ", rep("+", nrow(op) - 2)), f(op$sum[1:(nrow(op) - 1)]), "Gy/ka"), col = "black", las = 1, line = -0.5, cex = 0.7) # sum of absolute dose rate values (-> scaled total gamma dose rate) mtext(side = 4, at = min(bp) - diff(bp)[1] / 2, text = paste("=", f(op$sum[nrow(op)]), "Gy/ka"), col = "#b22222", las = 1, line = ifelse(plot_single, -0.5, -1), cex = ifelse(plot_single, 0.7, 0.8)) # recover old plot parameters par(par.old) } ## ------------------------------------------------------------------------ ## ## RETURN VALUE ## ------------------------------------------------------------------------ ## ## Infinity matrix dose rate table infinite_matrix <- data.frame( ID = data$id, K = op$K_inf[-(nrow(data)+1)], K_err = op$K_inf_se[-(nrow(data)+1)], Th = op$Th_inf[-(nrow(data)+1)], Th_err = op$Th_inf_se[-(nrow(data)+1)], U = op$U_inf[-(nrow(data)+1)], U_err = op$U_inf_se[-(nrow(data)+1)], Total = op$sum_inf[-(nrow(data)+1)] ) ## Scaled dose rate table scaled_dose_rate <- data.frame( ID = c(data$id, "TOTAL"), K = op$K, K_err = op$K_se, Th = op$Th, Th_err = op$Th_se, U = op$U, U_err = op$U_se, Contribution = op$contrib ) ## Summary table with the most important results summary <- data.frame( id = data$id[target], dose_rate_K = op$K[nrow(op)], dose_rate_K_err = op$K_se[nrow(op)], dose_rate_Th = op$Th[nrow(op)], dose_rate_Th_err = op$Th_se[nrow(op)], dose_rate_U = op$U[nrow(op)], dose_rate_U_err = op$U_se[nrow(op)], dose_rate_total = op$sum[length(op$sum)], dose_rate_total_err = op$sum_se[length(op$sum_se)] ) ## Create RLum.Results object (return object) results <- set_RLum(class = "RLum.Results", originator = "scale_GammaDose", data = list(summary = summary, data = data, dose_rates = list( infinite_matrix = infinite_matrix, scaled_dose_rate = scaled_dose_rate ), tables = list( conversion_factors = conv_fac, distances = distance, layer_fractional_dose_rate = layer_fracDoseRate, dose_rates = dose_rate, infnite_matrix_dose_fractions = Inf_frac, z_scale = z_scale ), args = as.list(sys.call()[-1]), call = sys.call()), info = settings$info ) return(results) } Luminescence/R/plot_AbanicoPlot.R0000644000176200001440000040346313604172511016453 0ustar liggesusers#' Function to create an Abanico Plot. #' #' A plot is produced which allows comprehensive presentation of data precision #' and its dispersion around a central value as well as illustration of a #' kernel density estimate, histogram and/or dot plot of the dose values. #' #' The Abanico Plot is a combination of the classic Radial Plot #' (`plot_RadialPlot`) and a kernel density estimate plot (e.g #' `plot_KDE`). It allows straightforward visualisation of data precision, #' error scatter around a user-defined central value and the combined #' distribution of the values, on the actual scale of the measured data (e.g. #' seconds, equivalent dose, years). The principle of the plot is shown in #' Galbraith & Green (1990). The function authors are thankful for the #' thoughtprovocing figure in this article. #' #' The semi circle (z-axis) of the classic Radial Plot is bent to a straight #' line here, which actually is the basis for combining this polar (radial) #' part of the plot with any other cartesian visualisation method #' (KDE, histogram, PDF and so on). Note that the plot allows dispaying #' two measures of distribution. One is the 2-sigma #' bar, which illustrates the spread in value errors, and the other is the #' polygon, which stretches over both parts of the Abanico Plot (polar and #' cartesian) and illustrates the actual spread in the values themselves. #' #' Since the 2-sigma-bar is a polygon, it can be (and is) filled with shaded #' lines. To change density (lines per inch, default is 15) and angle (default #' is 45 degrees) of the shading lines, specify these parameters. See #' `?polygon()` for further help. #' #' The Abanico Plot supports other than the weighted mean as measure of #' centrality. When it is obvious that the data #' is not (log-)normally distributed, the mean (weighted or not) cannot be a #' valid measure of centrality and hence central dose. Accordingly, the median #' and the weighted median can be chosen as well to represent a proper measure #' of centrality (e.g. `centrality = "median.weighted"`). Also #' user-defined numeric values (e.g. from the central age model) can be used if #' this appears appropriate. #' #' The proportion of the polar part and the cartesian part of the Abanico Plot #' can be modfied for display reasons (`plot.ratio = 0.75`). By default, #' the polar part spreads over 75 \% and leaves 25 \% for the part that #' shows the KDE graph. #' #' #' A statistic summary, i.e. a collection of statistic measures of #' centrality and dispersion (and further measures) can be added by specifying #' one or more of the following keywords: #' #' - `"n"` (number of samples) #' - `"mean"` (mean De value) #' - `"median"` (median of the De values) #' - `"sd.rel"` (relative standard deviation in percent) #' - `"sd.abs"` (absolute standard deviation) #' - `"se.rel"` (relative standard error) #' - `"se.abs"` (absolute standard error) #' - `"in.2s"` (percent of samples in 2-sigma range) #' - `"kurtosis"` (kurtosis) #' - `"skewness"` (skewness) #' #' **Note** that the input data for the statistic summary is sent to the function #' `calc_Statistics()` depending on the log-option for the z-scale. If #' `"log.z = TRUE"`, the summary is based on the logarithms of the input #' data. If `"log.z = FALSE"` the linearly scaled data is used. #' #' **Note** as well, that `"calc_Statistics()"` calculates these statistic #' measures in three different ways: `unweighted`, `weighted` and #' `MCM-based` (i.e., based on Monte Carlo Methods). By default, the #' MCM-based version is used. If you wish to use another method, indicate this #' with the appropriate keyword using the argument `summary.method`. #' #' The optional parameter `layout` allows to modify the entire plot more #' sophisticated. Each element of the plot can be addressed and its properties #' can be defined. This includes font type, size and decoration, colours and #' sizes of all plot items. To infer the definition of a specific layout style #' cf. `get_Layout()` or type eg. for the layout type `"journal"` #' `get_Layout("journal")`. A layout type can be modified by the user by #' assigning new values to the list object. #' #' It is possible for the z-scale to specify where ticks are to be drawn #' by using the parameter `at`, e.g. `at = seq(80, 200, 20)`, cf. function #' documentation of `axis`. Specifying tick positions manually overrides a #' `zlim`-definition. #' #' @param data [data.frame] or [RLum.Results-class] object (**required**): #' for `data.frame` two columns: De (`data[,1]`) and De error (`data[,2]`). #' To plot several data sets in one plot the data sets must be provided as #' `list`, e.g. `list(data.1, data.2)`. #' #' @param na.rm [logical] (*with default*): #' exclude NA values from the data set prior to any further operations. #' #' @param log.z [logical] (*with default*): #' Option to display the z-axis in logarithmic scale. Default is `TRUE`. #' #' @param z.0 [character] or [numeric]: #' User-defined central value, used for centering of data. One out of `"mean"`, #' `"mean.weighted"` and `"median"` or a numeric value (not its logarithm). #' Default is `"mean.weighted"`. #' #' @param dispersion [character] (*with default*): #' measure of dispersion, used for drawing the scatter polygon. One out of #' - `"qr"` (quartile range), #' - `"pnn"` (symmetric percentile range with nn the lower percentile, e.g. #' - `"p05"` depicting the range between 5 and 95 %), #' - `"sd"` (standard deviation) and #' - `"2sd"` (2 standard deviations), #' #' The default is `"qr"`. Note that `"sd"` and `"2sd"` are only meaningful in #' combination with `"z.0 = 'mean'"` because the unweighted mean is used to #' center the polygon. #' #' @param plot.ratio [numeric]: #' Relative space, given to the radial versus the cartesian plot part, #' default is `0.75`. #' #' @param rotate [logical]: #' Option to turn the plot by 90 degrees. #' #' @param mtext [character]: #' additional text below the plot title. #' #' @param summary [character] (*optional*): #' add statistic measures of centrality and dispersion to the plot. #' Can be one or more of several keywords. See details for available keywords. #' Results differ depending on the log-option for the z-scale (see details). #' #' @param summary.pos [numeric] or [character] (*with default*): #' optional position coordinates or keyword (e.g. `"topright"`) for the #' statistical summary. Alternatively, the keyword `"sub"` may be #' specified to place the summary below the plot header. However, this latter #' option in only possible if `mtext` is not used. #' #' @param summary.method [character] (*with default*): #' keyword indicating the method used to calculate the statistic summary. #' One out of #' - `"unweighted"`, #' - `"weighted"` and #' - `"MCM"`. #' #' See [calc_Statistics] for details. #' #' @param legend [character] vector (*optional*): #' legend content to be added to the plot. #' #' @param legend.pos [numeric] or [character] (*with default*): #' optional position coordinates or keyword (e.g. `"topright"`) #' for the legend to be plotted. #' #' @param stats [character]: #' additional labels of statistically important values in the plot. #' One or more out of the following: #' - `"min"`, #' - `"max"`, #' - `"median"`. #' #' @param rug [logical]: #' Option to add a rug to the KDE part, to indicate the location of individual values. #' #' @param kde [logical]: #' Option to add a KDE plot to the dispersion part, default is `TRUE`. #' #' @param hist [logical]: #' Option to add a histogram to the dispersion part. Only meaningful when not #' more than one data set is plotted. #' #' @param dots [logical]: #' Option to add a dot plot to the dispersion part. If number of dots exceeds #' space in the dispersion part, a square indicates this. #' #' @param boxplot [logical]: #' Option to add a boxplot to the dispersion part, default is `FALSE`. #' #' @param y.axis [logical]: #' Option to hide y-axis labels. Useful for data with small scatter. #' #' @param error.bars [logical]: #' Option to show De-errors as error bars on De-points. Useful in combination #' with `y.axis = FALSE, bar.col = "none"`. #' #' @param bar [numeric] (*with default*): #' option to add one or more dispersion bars (i.e., bar showing the 2-sigma range) #' centered at the defined values. By default a bar is drawn according to `"z.0"`. #' To omit the bar set `"bar = FALSE"`. #' #' @param bar.col [character] or [numeric] (*with default*): #' colour of the dispersion bar. Default is `"grey60"`. #' #' @param polygon.col [character] or [numeric] (*with default*): #' colour of the polygon showing the data scatter. Sometimes this #' polygon may be omitted for clarity. To disable it use `FALSE` or #' `polygon = FALSE`. Default is `"grey80"`. #' #' @param line [numeric]: #' numeric values of the additional lines to be added. #' #' @param line.col [character] or [numeric]: #' colour of the additional lines. #' #' @param line.lty [integer]: #' line type of additional lines #' #' @param line.label [character]: #' labels for the additional lines. #' #' @param grid.col [character] or [numeric] (*with default*): #' colour of the grid lines (originating at `[0,0]` and strechting to #' the z-scale). To disable grid lines use `FALSE`. Default is `"grey"`. #' #' @param frame [numeric] (*with default*): #' option to modify the plot frame type. Can be one out of #' - `0` (no frame), #' - `1` (frame originates at 0,0 and runs along min/max isochrons), #' - `2` (frame embraces the 2-sigma bar), #' - `3` (frame embraces the entire plot as a rectangle). #' #' Default is `1`. #' #' @param bw [character] (*with default*): #' bin-width for KDE, choose a numeric value for manual setting. #' #' @param output [logical]: #' Optional output of numerical plot parameters. These can be useful to #' reproduce similar plots. Default is `TRUE`. #' #' @param interactive [logical] (*with default*): #' create an interactive abanico plot (requires the 'plotly' package) #' #' @param ... Further plot arguments to pass. `xlab` must be a vector of #' length 2, specifying the upper and lower x-axes labels. #' #' @return #' returns a plot object and, optionally, a list with plot calculus data. #' #' @section Function version: 0.1.11 #' #' @author #' Michael Dietze, GFZ Potsdam (Germany)\cr #' Sebastian Kreutzer, RAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr #' Inspired by a plot introduced by Galbraith & Green (1990) #' #' @seealso [plot_RadialPlot], [plot_KDE], [plot_Histogram] #' #' @references #' Galbraith, R. & Green, P., 1990. Estimating the component ages #' in a finite mixture. International Journal of Radiation Applications and #' Instrumentation. Part D. Nuclear Tracks and Radiation Measurements, 17 (3), #' 197-206. #' #' Dietze, M., Kreutzer, S., Burow, C., Fuchs, M.C., Fischer, M., Schmidt, C., 2015. #' The abanico plot: visualising chronometric data with individual standard errors. #' Quaternary Geochronology. doi:10.1016/j.quageo.2015.09.003 #' #' @examples #' #' ## load example data and recalculate to Gray #' data(ExampleData.DeValues, envir = environment()) #' ExampleData.DeValues <- ExampleData.DeValues$CA1 #' #' ## plot the example data straightforward #' plot_AbanicoPlot(data = ExampleData.DeValues) #' #' ## now with linear z-scale #' plot_AbanicoPlot(data = ExampleData.DeValues, #' log.z = FALSE) #' #' ## now with output of the plot parameters #' plot1 <- plot_AbanicoPlot(data = ExampleData.DeValues, #' output = TRUE) #' str(plot1) #' plot1$zlim #' #' ## now with adjusted z-scale limits #' plot_AbanicoPlot(data = ExampleData.DeValues, #' zlim = c(10, 200)) #' #' ## now with adjusted x-scale limits #' plot_AbanicoPlot(data = ExampleData.DeValues, #' xlim = c(0, 20)) #' #' ## now with rug to indicate individual values in KDE part #' plot_AbanicoPlot(data = ExampleData.DeValues, #' rug = TRUE) #' #' ## now with a smaller bandwidth for the KDE plot #' plot_AbanicoPlot(data = ExampleData.DeValues, #' bw = 0.04) #' #' ## now with a histogram instead of the KDE plot #' plot_AbanicoPlot(data = ExampleData.DeValues, #' hist = TRUE, #' kde = FALSE) #' #' ## now with a KDE plot and histogram with manual number of bins #' plot_AbanicoPlot(data = ExampleData.DeValues, #' hist = TRUE, #' breaks = 20) #' #' ## now with a KDE plot and a dot plot #' plot_AbanicoPlot(data = ExampleData.DeValues, #' dots = TRUE) #' #' ## now with user-defined plot ratio #' plot_AbanicoPlot(data = ExampleData.DeValues, #' plot.ratio = 0.5) #' ## now with user-defined central value #' plot_AbanicoPlot(data = ExampleData.DeValues, #' z.0 = 70) #' #' ## now with median as central value #' plot_AbanicoPlot(data = ExampleData.DeValues, #' z.0 = "median") #' #' ## now with the 17-83 percentile range as definition of scatter #' plot_AbanicoPlot(data = ExampleData.DeValues, #' z.0 = "median", #' dispersion = "p17") #' #' ## now with user-defined green line for minimum age model #' CAM <- calc_CentralDose(ExampleData.DeValues, #' plot = FALSE) #' #' plot_AbanicoPlot(data = ExampleData.DeValues, #' line = CAM, #' line.col = "darkgreen", #' line.label = "CAM") #' #' ## now create plot with legend, colour, different points and smaller scale #' plot_AbanicoPlot(data = ExampleData.DeValues, #' legend = "Sample 1", #' col = "tomato4", #' bar.col = "peachpuff", #' pch = "R", #' cex = 0.8) #' #' ## now without 2-sigma bar, polygon, grid lines and central value line #' plot_AbanicoPlot(data = ExampleData.DeValues, #' bar.col = FALSE, #' polygon.col = FALSE, #' grid.col = FALSE, #' y.axis = FALSE, #' lwd = 0) #' #' ## now with direct display of De errors, without 2-sigma bar #' plot_AbanicoPlot(data = ExampleData.DeValues, #' bar.col = FALSE, #' ylab = "", #' y.axis = FALSE, #' error.bars = TRUE) #' #' ## now with user-defined axes labels #' plot_AbanicoPlot(data = ExampleData.DeValues, #' xlab = c("Data error (%)", #' "Data precision"), #' ylab = "Scatter", #' zlab = "Equivalent dose [Gy]") #' #' ## now with minimum, maximum and median value indicated #' plot_AbanicoPlot(data = ExampleData.DeValues, #' stats = c("min", "max", "median")) #' #' ## now with a brief statistical summary as subheader #' plot_AbanicoPlot(data = ExampleData.DeValues, #' summary = c("n", "in.2s")) #' #' ## now with another statistical summary #' plot_AbanicoPlot(data = ExampleData.DeValues, #' summary = c("mean.weighted", "median"), #' summary.pos = "topleft") #' #' ## now a plot with two 2-sigma bars for one data set #' plot_AbanicoPlot(data = ExampleData.DeValues, #' bar = c(30, 100)) #' #' ## now the data set is split into sub-groups, one is manipulated #' data.1 <- ExampleData.DeValues[1:30,] #' data.2 <- ExampleData.DeValues[31:62,] * 1.3 #' #' ## now a common dataset is created from the two subgroups #' data.3 <- list(data.1, data.2) #' #' ## now the two data sets are plotted in one plot #' plot_AbanicoPlot(data = data.3) #' #' ## now with some graphical modification #' plot_AbanicoPlot(data = data.3, #' z.0 = "median", #' col = c("steelblue4", "orange4"), #' bar.col = c("steelblue3", "orange3"), #' polygon.col = c("steelblue1", "orange1"), #' pch = c(2, 6), #' angle = c(30, 50), #' summary = c("n", "in.2s", "median")) #' #' ## create Abanico plot with predefined layout definition #' plot_AbanicoPlot(data = ExampleData.DeValues, #' layout = "journal") #' #' ## now with predefined layout definition and further modifications #' plot_AbanicoPlot(data = data.3, #' z.0 = "median", #' layout = "journal", #' col = c("steelblue4", "orange4"), #' bar.col = adjustcolor(c("steelblue3", "orange3"), #' alpha.f = 0.5), #' polygon.col = c("steelblue3", "orange3")) #' #' ## for further information on layout definitions see documentation #' ## of function get_Layout() #' #' ## now with manually added plot content #' ## create empty plot with numeric output #' AP <- plot_AbanicoPlot(data = ExampleData.DeValues, #' pch = NA, #' output = TRUE) #' #' ## identify data in 2 sigma range #' in_2sigma <- AP$data[[1]]$data.in.2s #' #' ## restore function-internal plot parameters #' par(AP$par) #' #' ## add points inside 2-sigma range #' points(x = AP$data[[1]]$precision[in_2sigma], #' y = AP$data[[1]]$std.estimate.plot[in_2sigma], #' pch = 16) #' #' ## add points outside 2-sigma range #' points(x = AP$data[[1]]$precision[!in_2sigma], #' y = AP$data[[1]]$std.estimate.plot[!in_2sigma], #' pch = 1) #' #' @md #' @export plot_AbanicoPlot <- function( data, na.rm = TRUE, log.z = TRUE, z.0 = "mean.weighted", dispersion = "qr", plot.ratio = 0.75, rotate = FALSE, mtext, summary, summary.pos, summary.method = "MCM", legend, legend.pos, stats, rug = FALSE, kde = TRUE, hist = FALSE, dots = FALSE, boxplot = FALSE, y.axis = TRUE, error.bars = FALSE, bar, bar.col, polygon.col, line, line.col, line.lty, line.label, grid.col, frame = 1, bw = "SJ", output = TRUE, interactive = FALSE, ... ) { ## check data and parameter consistency-------------------------------------- ## Homogenise input data format if(is(data, "list") == FALSE) { data <- list(data) } ## Check input data for(i in 1:length(data)) { if(is(data[[i]], "RLum.Results") == FALSE & is(data[[i]], "data.frame") == FALSE) { stop(paste("[plot_AbanicoPlot()] Input data format is neither", "'data.frame' nor 'RLum.Results'")) } else { if(is(data[[i]], "RLum.Results") == TRUE) { data[[i]] <- get_RLum(data[[i]], "data")[,c(1:2)] } } } ## optionally, remove NA-values if(na.rm == TRUE) { for(i in 1:length(data)) { n.NA <- sum(!complete.cases(data[[i]])) if(n.NA == 1) {message(paste0("[plot_AbanicoPlot()] data set (", i, "): 1 NA value excluded.")) } else if(n.NA > 1) { message(paste0("[plot_AbanicoPlot()] data set (", i,"): ", n.NA, " NA values excluded.")) } data[[i]] <- na.exclude(data[[i]]) } } ##AFTER NA removal, we should check the data set carefully again ... ##(1) ##check if there is still data left in the entire set if(all(sapply(data, nrow) == 0)){ try(stop("[plot_AbanicoPlot()] Nothing plotted, your data set is empty!", call. = FALSE)) return(NULL) } ##(2) ##check for sets with only 1 row or 0 rows at all else if(any(sapply(data, nrow) <= 1)){ ##select problematic sets and remove the entries from the list NArm.id <- which(sapply(data, nrow) <= 1) data[NArm.id] <- NULL warning(paste0("[plot_AbanicoPlot()] Data sets ", paste(NArm.id, collapse = ", "), " are found to be empty or consisting of only 1 row. Sets removed!")) rm(NArm.id) ##unfortunately, the data set might become now empty at all if(length(data) == 0){ try(stop("[plot_AbanicoPlot()] After removing invalid entries, nothing is plotted!", call. = FALSE)) return(NULL) } } ## check for zero-error values for(i in 1:length(data)) { if(length(data[[i]]) < 2) { stop("Data without errors cannot be displayed!") } if(sum(data[[i]][,2] == 0) > 0) { data[[i]] <- data[[i]][data[[i]][,2] > 0,] if(nrow(data[[i]]) < 1) { stop("[plot_AbanicoPlot()] Data set contains only values with zero errors.", call. = FALSE) } warning("[plot_AbanicoPlot()] values with zero errors cannot be displayed and were removed!",call. = FALSE) } } ## save original plot parameters and restore them upon end or stop par.old.full <- par(no.readonly = TRUE) cex_old <- par()$cex ## this ensures par() is respected for several plots on one page if(sum(par()$mfrow) == 2 & sum(par()$mfcol) == 2){ on.exit(par(par.old.full)) } ## check/set layout definitions if("layout" %in% names(list(...))) { layout = get_Layout(layout = list(...)$layout) } else { layout <- get_Layout(layout = "default") } if(missing(stats) == TRUE) { stats <- numeric(0) } if(missing(bar) == TRUE) { bar <- rep(TRUE, length(data)) } if(missing(bar.col) == TRUE) { bar.fill <- rep(x = rep(x = layout$abanico$colour$bar.fill, length.out = length(data)), length(bar)) bar.line <- rep(rep(layout$abanico$colour$bar.line, length.out = length(data)), length(bar)) } else { bar.fill <- bar.col bar.line <- NA } if(missing(polygon.col) == TRUE) { polygon.fill <- rep(layout$abanico$colour$poly.fill, length.out = length(data)) polygon.line <- rep(layout$abanico$colour$poly.line, length.out = length(data)) } else { polygon.fill <- polygon.col polygon.line <- NA } if(missing(grid.col) == TRUE) { grid.major <- layout$abanico$colour$grid.major grid.minor <- layout$abanico$colour$grid.minor } else { if(length(grid.col) == 1) { grid.major <- grid.col[1] grid.minor <- grid.col[1] } else { grid.major <- grid.col[1] grid.minor <- grid.col[2] } } if(missing(summary) == TRUE) { summary <- c("n", "in.2s") } if(missing(summary.pos) == TRUE) { summary.pos <- "sub" } if(missing(mtext) == TRUE) { mtext <- "" } ## create preliminary global data set De.global <- data[[1]][,1] if(length(data) > 1) { for(i in 2:length(data)) { De.global <- c(De.global, data[[i]][,1]) } } ## calculate major preliminary tick values and tick difference extraArgs <- list(...) if("zlim" %in% names(extraArgs)) { limits.z <- extraArgs$zlim } else { z.span <- (mean(De.global) * 0.5) / (sd(De.global) * 100) z.span <- ifelse(z.span > 1, 0.9, z.span) limits.z <- c((ifelse(min(De.global) <= 0, 1.1, 0.9) - z.span) * min(De.global), (1.1 + z.span) * max(De.global)) } if("at" %in% names(extraArgs)) { ticks <- extraArgs$at } else { ticks <- round(pretty(limits.z, n = 5), 3) } if("breaks" %in% names(extraArgs)) { breaks <- extraArgs$breaks } else { breaks <- "Sturges" } ## check/set bw-parameter for(i in 1:length(data)) { bw.test <- try(density(x = data[[i]][,1], bw = bw), silent = TRUE) if(grepl(pattern = "Error", x = bw.test[1]) == TRUE) { bw <- "nrd0" warning("[plot_AbanicoPlot()] Option for bw not possible. Set to nrd0!", call. = FALSE) } } if ("fun" %in% names(extraArgs)) { fun <- list(...)$fun } else { fun <- FALSE } ## check for negative values, stop function, but do not stop if(min(De.global) < 0) { if("zlim" %in% names(extraArgs)) { De.add <- abs(extraArgs$zlim[1]) } else { ## estimate delta De to add to all data De.add <- min(10^ceiling(log10(abs(De.global))) * 10) ## optionally readjust delta De for extreme values if(De.add <= abs(min(De.global))) { De.add <- De.add * 10 } } } else { De.add <- 0 } ## optionally add correction dose to data set and adjust error if(log.z == TRUE) { for(i in 1:length(data)) { data[[i]][,1] <- data[[i]][,1] + De.add } De.global <- De.global + De.add } ## calculate and append statistical measures -------------------------------- ## z-values based on log-option z <- lapply(1:length(data), function(x){ if(log.z == TRUE) { log(data[[x]][,1]) } else { data[[x]][,1] } }) if(is(z, "list") == FALSE) { z <- list(z) } data <- lapply(1:length(data), function(x) { cbind(data[[x]], z[[x]]) }) rm(z) ## calculate dispersion based on log-option se <- lapply(1:length(data), function(x, De.add){ if(log.z == TRUE) { if(De.add != 0) { data[[x]][,2] <- data[[x]][,2] / (data[[x]][,1] + De.add) } else { data[[x]][,2] / data[[x]][,1] } } else { data[[x]][,2] }}, De.add = De.add) if(is(se, "list") == FALSE) { se <- list(se) } data <- lapply(1:length(data), function(x) { cbind(data[[x]], se[[x]]) }) rm(se) ## calculate initial data statistics stats.init <- list(NA) for(i in 1:length(data)) { stats.init[[length(stats.init) + 1]] <- calc_Statistics(data = data[[i]][,3:4]) } stats.init[[1]] <- NULL ## calculate central values if(z.0 == "mean") { z.central <- lapply(1:length(data), function(x){ rep(stats.init[[x]]$unweighted$mean, length(data[[x]][,3]))}) } else if(z.0 == "median") { z.central <- lapply(1:length(data), function(x){ rep(stats.init[[x]]$unweighted$median, length(data[[x]][,3]))}) } else if(z.0 == "mean.weighted") { z.central <- lapply(1:length(data), function(x){ rep(stats.init[[x]]$weighted$mean, length(data[[x]][,3]))}) } else if(is.numeric(z.0) == TRUE) { z.central <- lapply(1:length(data), function(x){ rep(ifelse(log.z == TRUE, log(z.0), z.0), length(data[[x]][,3]))}) } else { stop("Value for z.0 not supported!") } data <- lapply(1:length(data), function(x) { cbind(data[[x]], z.central[[x]])}) rm(z.central) ## calculate precision precision <- lapply(1:length(data), function(x){ 1 / data[[x]][,4]}) if(is(precision, "list") == FALSE) {precision <- list(precision)} data <- lapply(1:length(data), function(x) { cbind(data[[x]], precision[[x]])}) rm(precision) ## calculate standardised estimate std.estimate <- lapply(1:length(data), function(x){ (data[[x]][,3] - data[[x]][,5]) / data[[x]][,4]}) if(is(std.estimate, "list") == FALSE) {std.estimate <- list(std.estimate)} data <- lapply(1:length(data), function(x) { cbind(data[[x]], std.estimate[[x]])}) ## append empty standard estimate for plotting data <- lapply(1:length(data), function(x) { cbind(data[[x]], std.estimate[[x]])}) rm(std.estimate) ## append optional weights for KDE curve if("weights" %in% names(extraArgs)) { if(extraArgs$weights == TRUE) { wgt <- lapply(1:length(data), function(x){ (1 / data[[x]][,2]) / sum(1 / data[[x]][,2]^2) }) if(is(wgt, "list") == FALSE) { wgt <- list(wgt) } data <- lapply(1:length(data), function(x) { cbind(data[[x]], wgt[[x]])}) rm(wgt) } else { wgt <- lapply(1:length(data), function(x){ rep(x = 1, times = nrow(data[[x]])) / sum(rep(x = 1, times = nrow(data[[x]]))) }) if(is(wgt, "list") == FALSE) { wgt <- list(wgt) } data <- lapply(1:length(data), function(x) { cbind(data[[x]], wgt[[x]])}) rm(wgt) } } else { wgt <- lapply(1:length(data), function(x){ rep(x = 1, times = nrow(data[[x]])) / sum(rep(x = 1, times = nrow(data[[x]]))) }) if(is(wgt, "list") == FALSE) { wgt <- list(wgt) } data <- lapply(1:length(data), function(x) { cbind(data[[x]], wgt[[x]])}) rm(wgt) } ## generate global data set data.global <- cbind(data[[1]], rep(x = 1, times = nrow(data[[1]]))) colnames(data.global) <- rep("", 10) if(length(data) > 1) { for(i in 2:length(data)) { data.add <- cbind(data[[i]], rep(x = i, times = nrow(data[[i]]))) colnames(data.add) <- rep("", 10) data.global <- rbind(data.global, data.add) } } ## create column names colnames(data.global) <- c("De", "error", "z", "se", "z.central", "precision", "std.estimate", "std.estimate.plot", "weights", "data set") ## calculate global data statistics stats.global <- calc_Statistics(data = data.global[,3:4]) ## calculate global central value if(z.0 == "mean") { z.central.global <- stats.global$unweighted$mean } else if(z.0 == "median") { z.central.global <- stats.global$unweighted$median } else if(z.0 == "mean.weighted") { z.central.global <- stats.global$weighted$mean } else if(is.numeric(z.0) == TRUE) { z.central.global <- ifelse(log.z == TRUE, log(z.0), z.0) } else { stop("Value for z.0 not supported!") } ## create column names for(i in 1:length(data)) { colnames(data[[i]]) <- c("De", "error", "z", "se", "z.central", "precision", "std.estimate", "std.estimate.plot", "weights") } ## re-calculate standardised estimate for plotting for(i in 1:length(data)) { data[[i]][,8] <- (data[[i]][,3] - z.central.global) / data[[i]][,4] } data.global.plot <- data[[1]][,8] if(length(data) > 1) { for(i in 2:length(data)) { data.global.plot <- c(data.global.plot, data[[i]][,8]) } } data.global[,8] <- data.global.plot ## print message for too small scatter if(max(abs(1 / data.global[6])) < 0.02) { small.sigma <- TRUE message("[plot_AbanicoPlot()] Attention, small standardised estimate scatter. Toggle off y.axis?") } ## read out additional arguments--------------------------------------------- extraArgs <- list(...) main <- if("main" %in% names(extraArgs)) { extraArgs$main } else { expression(paste(D[e], " distribution")) } sub <- if("sub" %in% names(extraArgs)) { extraArgs$sub } else { "" } if("xlab" %in% names(extraArgs)) { if(length(extraArgs$xlab) != 2) { if (length(extraArgs$xlab) == 3) { xlab <- c(extraArgs$xlab[1:2], "Density") } else { stop("Argmuent xlab is not of length 2!") } } else {xlab <- c(extraArgs$xlab, "Density")} } else { xlab <- c(if(log.z == TRUE) { "Relative standard error (%)" } else { "Standard error" }, "Precision", "Density") } ylab <- if("ylab" %in% names(extraArgs)) { extraArgs$ylab } else { "Standardised estimate" } zlab <- if("zlab" %in% names(extraArgs)) { extraArgs$zlab } else { expression(paste(D[e], " [Gy]")) } if("zlim" %in% names(extraArgs)) { limits.z <- extraArgs$zlim } else { z.span <- (mean(data.global[,1]) * 0.5) / (sd(data.global[,1]) * 100) z.span <- ifelse(z.span > 1, 0.9, z.span) limits.z <- c((0.9 - z.span) * min(data.global[[1]]), (1.1 + z.span) * max(data.global[[1]])) } if("xlim" %in% names(extraArgs)) { limits.x <- extraArgs$xlim } else { limits.x <- c(0, max(data.global[,6]) * 1.05) } if(limits.x[1] != 0) { limits.x[1] <- 0 warning("Lower x-axis limit not set to zero, issue corrected!") } if("ylim" %in% names(extraArgs)) { limits.y <- extraArgs$ylim } else { y.span <- (mean(data.global[,1]) * 10) / (sd(data.global[,1]) * 100) y.span <- ifelse(y.span > 1, 0.98, y.span) limits.y <- c(-(1 + y.span) * max(abs(data.global[,7])), (1 + y.span) * max(abs(data.global[,7]))) } cex <- if("cex" %in% names(extraArgs)) { extraArgs$cex } else { 1 } lty <- if("lty" %in% names(extraArgs)) { extraArgs$lty } else { rep(rep(2, length(data)), length(bar)) } lwd <- if("lwd" %in% names(extraArgs)) { extraArgs$lwd } else { rep(rep(1, length(data)), length(bar)) } pch <- if("pch" %in% names(extraArgs)) { extraArgs$pch } else { rep(20, length(data)) } if("col" %in% names(extraArgs)) { bar.col <- extraArgs$col kde.line <- extraArgs$col kde.fill <- NA value.dot <- extraArgs$col value.bar <- extraArgs$col value.rug <- extraArgs$col summary.col <- extraArgs$col centrality.col <- extraArgs$col } else { if(length(layout$abanico$colour$bar) == 1) { bar.col <- 1:length(data) } else { bar.col <- layout$abanico$colour$bar.col } if(length(layout$abanico$colour$kde.line) == 1) { kde.line <- 1:length(data) } else { kde.line <- layout$abanico$colour$kde.line } if(length(layout$abanico$colour$kde.fill) == 1) { kde.fill <- rep(layout$abanico$colour$kde.fill, length(data)) } else { kde.fill <- layout$abanico$colour$kde.fill } if(length(layout$abanico$colour$value.dot) == 1) { value.dot <- 1:length(data) } else { value.dot <- layout$abanico$colour$value.dot } if(length(layout$abanico$colour$value.bar) == 1) { value.bar <- 1:length(data) } else { value.bar <- layout$abanico$colour$value.bar } if(length(layout$abanico$colour$value.rug) == 1) { value.rug <- 1:length(data) } else { value.rug <- layout$abanico$colour$value.rug } if(length(layout$abanico$colour$summary) == 1) { summary.col <- 1:length(data) } else { summary.col <- layout$abanico$colour$summary } if(length(layout$abanico$colour$centrality) == 1) { centrality.col <- rep(x = 1:length(data), times = length(bar)) } else { centrality.col <- rep(x = layout$abanico$colour$centrality, times = length(bar)) } } ## update central line colour centrality.col <- rep(centrality.col, length(bar)) tck <- if("tck" %in% names(extraArgs)) { extraArgs$tck } else { NA } tcl <- if("tcl" %in% names(extraArgs)) { extraArgs$tcl } else { -0.5 } ## define auxiliary plot parameters ----------------------------------------- ## set space between z-axis and baseline of cartesian part if(boxplot == TRUE) { lostintranslation <- 1.03 } else { lostintranslation <- 1.03 plot.ratio <- plot.ratio * 1.05 } ## create empty plot to update plot parameters if(rotate == FALSE) { plot(NA, xlim = c(limits.x[1], limits.x[2] * (1 / plot.ratio)), ylim = limits.y, main = "", sub = "", xlab = "", ylab = "", xaxs = "i", yaxs = "i", frame.plot = FALSE, axes = FALSE) } else { plot(NA, xlim = limits.y, ylim = c(limits.x[1], limits.x[2] * (1 / plot.ratio)), main = "", sub = "", xlab = "", ylab = "", xaxs = "i", yaxs = "i", frame.plot = FALSE, axes = FALSE) } ## calculate conversion factor for plot coordinates f <- 0 ## calculate major and minor z-tick values if("at" %in% names(extraArgs)) { tick.values.major <- extraArgs$at tick.values.minor <- extraArgs$at } else { tick.values.major <- signif(pretty(limits.z, n = 5), 3) tick.values.minor <- signif(pretty(limits.z, n = 25), 3) } tick.values.major <- tick.values.major[tick.values.major >= min(tick.values.minor)] tick.values.major <- tick.values.major[tick.values.major <= max(tick.values.minor)] tick.values.major <- tick.values.major[tick.values.major >= limits.z[1]] tick.values.major <- tick.values.major[tick.values.major <= limits.z[2]] tick.values.minor <- tick.values.minor[tick.values.minor >= limits.z[1]] tick.values.minor <- tick.values.minor[tick.values.minor <= limits.z[2]] if(log.z == TRUE) { tick.values.major[which(tick.values.major==0)] <- 1 tick.values.minor[which(tick.values.minor==0)] <- 1 tick.values.major <- log(tick.values.major) tick.values.minor <- log(tick.values.minor) } ## calculate z-axis radius r <- max(sqrt((limits.x[2])^2 + (data.global[,7] * f)^2)) ## create z-axes labels if(log.z == TRUE) { label.z.text <- signif(exp(tick.values.major), 3) } else { label.z.text <- signif(tick.values.major, 3) } ## calculate node coordinates for semi-circle ellipse.values <- c(min(ifelse(log.z == TRUE, log(limits.z[1]), limits.z[1]), tick.values.major, tick.values.minor), max(ifelse(log.z == TRUE, log(limits.z[2]), limits.z[2]), tick.values.major, tick.values.minor)) ## correct for unpleasant value ellipse.values[ellipse.values == -Inf] <- 0 if(rotate == FALSE) { ellipse.x <- r / sqrt(1 + f^2 * (ellipse.values - z.central.global)^2) ellipse.y <- (ellipse.values - z.central.global) * ellipse.x } else { ellipse.y <- r / sqrt(1 + f^2 * (ellipse.values - z.central.global)^2) ellipse.x <- (ellipse.values - z.central.global) * ellipse.y } ellipse <- cbind(ellipse.x, ellipse.y) ## calculate statistical labels if(length(stats == 1)) {stats <- rep(stats, 2)} stats.data <- matrix(nrow = 3, ncol = 3) data.stats <- as.numeric(data.global[,1]) if("min" %in% stats == TRUE) { stats.data[1, 3] <- data.stats[data.stats == min(data.stats)][1] stats.data[1, 1] <- data.global[data.stats == stats.data[1, 3], 6][1] stats.data[1, 2] <- data.global[data.stats == stats.data[1, 3], 8][1] } if("max" %in% stats == TRUE) { stats.data[2, 3] <- data.stats[data.stats == max(data.stats)][1] stats.data[2, 1] <- data.global[data.stats == stats.data[2, 3], 6][1] stats.data[2, 2] <- data.global[data.stats == stats.data[2, 3], 8][1] } if("median" %in% stats == TRUE) { stats.data[3, 3] <- data.stats[data.stats == quantile(data.stats, 0.5, type = 3)] stats.data[3, 1] <- data.global[data.stats == stats.data[3, 3], 6][1] stats.data[3, 2] <- data.global[data.stats == stats.data[3, 3], 8][1] } ## re-calculate axes limits if necessary if(rotate == FALSE) { limits.z.x <- range(ellipse[,1]) limits.z.y <- range(ellipse[,2]) } else { limits.z.x <- range(ellipse[,2]) limits.z.y <- range(ellipse[,1]) } if(!("ylim" %in% names(extraArgs))) { if(limits.z.y[1] < 0.66 * limits.y[1]) { limits.y[1] <- 1.8 * limits.z.y[1] } if(limits.z.y[2] > 0.77 * limits.y[2]) { limits.y[2] <- 1.3 * limits.z.y[2] } if(rotate == TRUE) { limits.y <- c(-max(abs(limits.y)), max(abs(limits.y))) } } if(!("xlim" %in% names(extraArgs))) { if(limits.z.x[2] > 1.1 * limits.x[2]) { limits.x[2] <- limits.z.x[2] } } ## calculate and paste statistical summary De.stats <- matrix(nrow = length(data), ncol = 12) colnames(De.stats) <- c("n", "mean", "median", "kde.max", "sd.abs", "sd.rel", "se.abs", "se.rel", "q.25", "q.75", "skewness", "kurtosis") for(i in 1:length(data)) { statistics <- calc_Statistics(data[[i]])[[summary.method]] statistics.2 <- calc_Statistics(data[[i]][,3:4])[[summary.method]] De.stats[i,1] <- statistics$n De.stats[i,2] <- statistics.2$mean De.stats[i,3] <- statistics.2$median De.stats[i,5] <- statistics$sd.abs De.stats[i,6] <- statistics$sd.rel De.stats[i,7] <- statistics$se.abs De.stats[i,8] <- statistics$se.rel De.stats[i,9] <- quantile(data[[i]][,1], 0.25) De.stats[i,10] <- quantile(data[[i]][,1], 0.75) De.stats[i,11] <- statistics$skewness De.stats[i,12] <- statistics$kurtosis ## account for log.z-option if(log.z == TRUE) { De.stats[i,2:4] <- exp(De.stats[i,2:4]) } ## kdemax - here a little doubled as it appears below again De.density <- try(density(x = data[[i]][,1], kernel = "gaussian", bw = bw, from = limits.z[1], to = limits.z[2]), silent = TRUE) if(class(De.density) == "try-error") { De.stats[i,4] <- NA } else { De.stats[i,4] <- De.density$x[which.max(De.density$y)] } } label.text = list(NA) if(summary.pos[1] != "sub") { n.rows <- length(summary) for(i in 1:length(data)) { stops <- paste(rep("\n", (i - 1) * n.rows), collapse = "") summary.text <- character(0) for(j in 1:length(summary)) { summary.text <- c(summary.text, paste( "", ifelse("n" %in% summary[j] == TRUE, paste("n = ", De.stats[i,1], "\n", sep = ""), ""), ifelse("mean" %in% summary[j] == TRUE, paste("mean = ", round(De.stats[i,2], 2), "\n", sep = ""), ""), ifelse("median" %in% summary[j] == TRUE, paste("median = ", round(De.stats[i,3], 2), "\n", sep = ""), ""), ifelse("kde.max" %in% summary[j] == TRUE, paste("kdemax = ", round(De.stats[i,4], 2), " \n ", sep = ""), ""), ifelse("sd.abs" %in% summary[j] == TRUE, paste("abs. sd = ", round(De.stats[i,5], 2), "\n", sep = ""), ""), ifelse("sd.rel" %in% summary[j] == TRUE, paste("rel. sd = ", round(De.stats[i,6], 2), " %", "\n", sep = ""), ""), ifelse("se.abs" %in% summary[j] == TRUE, paste("se = ", round(De.stats[i,7], 2), "\n", sep = ""), ""), ifelse("se.rel" %in% summary[j] == TRUE, paste("rel. se = ", round(De.stats[i,8], 2), " %", "\n", sep = ""), ""), ifelse("skewness" %in% summary[j] == TRUE, paste("skewness = ", round(De.stats[i,11], 2), "\n", sep = ""), ""), ifelse("kurtosis" %in% summary[j] == TRUE, paste("kurtosis = ", round(De.stats[i,12], 2), "\n", sep = ""), ""), ifelse("in.2s" %in% summary[j] == TRUE, paste("in 2 sigma = ", round(sum(data[[i]][,7] > -2 & data[[i]][,7] < 2) / nrow(data[[i]]) * 100 , 1), " %", sep = ""), ""), sep = "")) } summary.text <- paste(summary.text, collapse = "") label.text[[length(label.text) + 1]] <- paste(stops, summary.text, stops, sep = "") } } else { for(i in 1:length(data)) { summary.text <- character(0) for(j in 1:length(summary)) { summary.text <- c(summary.text, ifelse("n" %in% summary[j] == TRUE, paste("n = ", De.stats[i,1], " | ", sep = ""), ""), ifelse("mean" %in% summary[j] == TRUE, paste("mean = ", round(De.stats[i,2], 2), " | ", sep = ""), ""), ifelse("median" %in% summary[j] == TRUE, paste("median = ", round(De.stats[i,3], 2), " | ", sep = ""), ""), ifelse("kde.max" %in% summary[j] == TRUE, paste("kdemax = ", round(De.stats[i,4], 2), " | ", sep = ""), ""), ifelse("sd.abs" %in% summary[j] == TRUE, paste("abs. sd = ", round(De.stats[i,5], 2), " | ", sep = ""), ""), ifelse("sd.rel" %in% summary[j] == TRUE, paste("rel. sd = ", round(De.stats[i,6], 2), " %", " | ", sep = ""), ""), ifelse("se.abs" %in% summary[j] == TRUE, paste("abs. se = ", round(De.stats[i,7], 2), " | ", sep = ""), ""), ifelse("se.rel" %in% summary[j] == TRUE, paste("rel. se = ", round(De.stats[i,8], 2), " %", " | ", sep = ""), ""), ifelse("skewness" %in% summary[j] == TRUE, paste("skewness = ", round(De.stats[i,11], 2), " | ", sep = ""), ""), ifelse("kurtosis" %in% summary[j] == TRUE, paste("kurtosis = ", round(De.stats[i,12], 2), " | ", sep = ""), ""), ifelse("in.2s" %in% summary[j] == TRUE, paste("in 2 sigma = ", round(sum(data[[i]][,7] > -2 & data[[i]][,7] < 2) / nrow(data[[i]]) * 100 , 1), " % | ", sep = ""), "") ) } summary.text <- paste(summary.text, collapse = "") label.text[[length(label.text) + 1]] <- paste( " ", summary.text, sep = "") } ## remove outer vertical lines from string for(i in 2:length(label.text)) { label.text[[i]] <- substr(x = label.text[[i]], start = 3, stop = nchar(label.text[[i]]) - 3) } } ## remove dummy list element label.text[[1]] <- NULL if(rotate == FALSE) { ## convert keywords into summary placement coordinates if(missing(summary.pos) == TRUE) { summary.pos <- c(limits.x[1], limits.y[2]) summary.adj <- c(0, 1) } else if(length(summary.pos) == 2) { summary.pos <- summary.pos summary.adj <- c(0, 1) } else if(summary.pos[1] == "topleft") { summary.pos <- c(limits.x[1], limits.y[2] - par()$cxy[2] * 1) summary.adj <- c(0, 1) } else if(summary.pos[1] == "top") { summary.pos <- c(mean(limits.x), limits.y[2] - par()$cxy[2] * 1) summary.adj <- c(0.5, 1) } else if(summary.pos[1] == "topright") { summary.pos <- c(limits.x[2], limits.y[2] - par()$cxy[2] * 1) summary.adj <- c(1, 1) } else if(summary.pos[1] == "left") { summary.pos <- c(limits.x[1], mean(limits.y)) summary.adj <- c(0, 0.5) } else if(summary.pos[1] == "center") { summary.pos <- c(mean(limits.x), mean(limits.y)) summary.adj <- c(0.5, 0.5) } else if(summary.pos[1] == "right") { summary.pos <- c(limits.x[2], mean(limits.y)) summary.adj <- c(1, 0.5) }else if(summary.pos[1] == "bottomleft") { summary.pos <- c(limits.x[1], limits.y[1] + par()$cxy[2] * 3.5) summary.adj <- c(0, 0) } else if(summary.pos[1] == "bottom") { summary.pos <- c(mean(limits.x), limits.y[1] + par()$cxy[2] * 3.5) summary.adj <- c(0.5, 0) } else if(summary.pos[1] == "bottomright") { summary.pos <- c(limits.x[2], limits.y[1] + par()$cxy[2] * 3.5) summary.adj <- c(1, 0) } ## convert keywords into legend placement coordinates if(missing(legend.pos) == TRUE) { legend.pos <- c(limits.x[1], limits.y[2]) legend.adj <- c(0, 1) } else if(length(legend.pos) == 2) { legend.pos <- legend.pos legend.adj <- c(0, 1) } else if(legend.pos[1] == "topleft") { legend.pos <- c(limits.x[1], limits.y[2]) legend.adj <- c(0, 1) } else if(legend.pos[1] == "top") { legend.pos <- c(mean(limits.x), limits.y[2]) legend.adj <- c(0.5, 1) } else if(legend.pos[1] == "topright") { legend.pos <- c(limits.x[2], limits.y[2]) legend.adj <- c(1, 1) } else if(legend.pos[1] == "left") { legend.pos <- c(limits.x[1], mean(limits.y)) legend.adj <- c(0, 0.5) } else if(legend.pos[1] == "center") { legend.pos <- c(mean(limits.x), mean(limits.y)) legend.adj <- c(0.5, 0.5) } else if(legend.pos[1] == "right") { legend.pos <- c(limits.x[2], mean(limits.y)) legend.adj <- c(1, 0.5) } else if(legend.pos[1] == "bottomleft") { legend.pos <- c(limits.x[1], limits.y[1]) legend.adj <- c(0, 0) } else if(legend.pos[1] == "bottom") { legend.pos <- c(mean(limits.x), limits.y[1]) legend.adj <- c(0.5, 0) } else if(legend.pos[1] == "bottomright") { legend.pos <- c(limits.x[2], limits.y[1]) legend.adj <- c(1, 0) } } else { ## convert keywords into summary placement coordinates if(missing(summary.pos) == TRUE) { summary.pos <- c(limits.y[1] + par()$cxy[1] * 7.5, limits.x[1]) summary.adj <- c(0, 0) } else if(length(summary.pos) == 2) { summary.pos <- summary.pos summary.adj <- c(0, 1) } else if(summary.pos[1] == "topleft") { summary.pos <- c(limits.y[1] + par()$cxy[1] * 7.5, limits.x[2]) summary.adj <- c(0, 1) } else if(summary.pos[1] == "top") { summary.pos <- c(mean(limits.y), limits.x[2]) summary.adj <- c(0.5, 1) } else if(summary.pos[1] == "topright") { summary.pos <- c(limits.y[2], limits.x[2]) summary.adj <- c(1, 1) } else if(summary.pos[1] == "left") { summary.pos <- c(limits.y[1] + par()$cxy[1] * 7.5, mean(limits.x)) summary.adj <- c(0, 0.5) } else if(summary.pos[1] == "center") { summary.pos <- c(mean(limits.y), mean(limits.x)) summary.adj <- c(0.5, 0.5) } else if(summary.pos[1] == "right") { summary.pos <- c(limits.y[2], mean(limits.x)) summary.adj <- c(1, 0.5) }else if(summary.pos[1] == "bottomleft") { summary.pos <- c(limits.y[1] + par()$cxy[1] * 7.5, limits.x[1]) summary.adj <- c(0, 0) } else if(summary.pos[1] == "bottom") { summary.pos <- c(mean(limits.y), limits.x[1]) summary.adj <- c(0.5, 0) } else if(summary.pos[1] == "bottomright") { summary.pos <- c(limits.y[2], limits.x[1]) summary.adj <- c(1, 0) } ## convert keywords into legend placement coordinates if(missing(legend.pos) == TRUE) { legend.pos <- c(limits.y[1] + par()$cxy[1] * 7.5, limits.x[1]) legend.adj <- c(0, 0) } else if(length(legend.pos) == 2) { legend.pos <- legend.pos legend.adj <- c(1, 0) } else if(legend.pos[1] == "topleft") { legend.pos <- c(limits.y[1] + par()$cxy[1] * 11, limits.x[2]) legend.adj <- c(1, 0) } else if(legend.pos[1] == "top") { legend.pos <- c(mean(limits.y), limits.x[2]) legend.adj <- c(1, 0.5) } else if(legend.pos[1] == "topright") { legend.pos <- c(limits.y[2], limits.x[2]) legend.adj <- c(1, 1) } else if(legend.pos[1] == "left") { legend.pos <- c(limits.y[1] + par()$cxy[1] * 7.5, mean(limits.x)) legend.adj <- c(0.5, 0) } else if(legend.pos[1] == "center") { legend.pos <- c(mean(limits.y), mean(limits.x)) legend.adj <- c(0.5, 0.5) } else if(legend.pos[1] == "right") { legend.pos <- c(limits.y[2], mean(limits.x)) legend.adj <- c(0.5, 1) } else if(legend.pos[1] == "bottomleft") { legend.pos <- c(limits.y[1] + par()$cxy[1] * 7.5, limits.x[1]) legend.adj <- c(0, 0) } else if(legend.pos[1] == "bottom") { legend.pos <- c(mean(limits.y), limits.x[1]) legend.adj <- c(0, 0.5) } else if(legend.pos[1] == "bottomright") { legend.pos <- c(limits.y[2], limits.x[1]) legend.adj <- c(0, 1) } } ## define cartesian plot origins if(rotate == FALSE) { xy.0 <- c(min(ellipse[,1]) * lostintranslation, min(ellipse[,2])) } else { xy.0 <- c(min(ellipse[,1]), min(ellipse[,2]) * lostintranslation) } ## calculate coordinates for dispersion polygon overlay y.max.x <- 2 * limits.x[2] / max(data.global[6]) polygons <- matrix(nrow = length(data), ncol = 14) for(i in 1:length(data)) { if(dispersion == "qr") { ci.lower <- quantile(data[[i]][,1], 0.25) ci.upper <- quantile(data[[i]][,1], 0.75) } else if(grepl(x = dispersion, pattern = "p") == TRUE) { ci.plot <- as.numeric(strsplit(x = dispersion, split = "p")[[1]][2]) ci.plot <- (100 - ci.plot) / 100 ci.lower <- quantile(data[[i]][,1], ci.plot) ci.upper <- quantile(data[[i]][,1], 1 - ci.plot) } else if(dispersion == "sd") { if(log.z == TRUE) { ci.lower <- exp(mean(log(data[[i]][,1])) - sd(log(data[[i]][,1]))) ci.upper <- exp(mean(log(data[[i]][,1])) + sd(log(data[[i]][,1]))) } else { ci.lower <- mean(data[[i]][,1]) - sd(data[[i]][,1]) ci.upper <- mean(data[[i]][,1]) + sd(data[[i]][,1]) } } else if(dispersion == "2sd") { if(log.z == TRUE) { ci.lower <- exp(mean(log(data[[i]][,1])) - 2 * sd(log(data[[i]][,1]))) ci.upper <- exp(mean(log(data[[i]][,1])) + 2 * sd(log(data[[i]][,1]))) } else { ci.lower <- mean(data[[i]][,1]) - 2 * sd(data[[i]][,1]) ci.upper <- mean(data[[i]][,1]) + 2 * sd(data[[i]][,1]) } } else { stop("Measure of dispersion not supported.") } if(log.z == TRUE) { ci.lower[which(ci.lower < 0)] <- 1 y.lower <- log(ci.lower) y.upper <- log(ci.upper) } else { y.lower <- ci.lower y.upper <- ci.upper } if(rotate == FALSE) { polygons[i,1:7] <- c(limits.x[1], limits.x[2], xy.0[1], par()$usr[2], par()$usr[2], xy.0[1], limits.x[2]) polygons[i,8:14] <- c(0, (y.upper - z.central.global) * limits.x[2], (y.upper - z.central.global) * xy.0[1], (y.upper - z.central.global) * xy.0[1], (y.lower - z.central.global) * xy.0[1], (y.lower - z.central.global) * xy.0[1], (y.lower - z.central.global) * limits.x[2] ) } else { y.max <- par()$usr[4] polygons[i,1:7] <- c(limits.x[1], limits.x[2], xy.0[2], y.max, y.max, xy.0[2], limits.x[2]) polygons[i,8:14] <- c(0, (y.upper - z.central.global) * limits.x[2], (y.upper - z.central.global) * xy.0[2], (y.upper - z.central.global) * xy.0[2], (y.lower - z.central.global) * xy.0[2], (y.lower - z.central.global) * xy.0[2], (y.lower - z.central.global) * limits.x[2] ) } } ## append information about data in confidence interval for(i in 1:length(data)) { data.in.2s <- rep(x = FALSE, times = nrow(data[[i]])) data.in.2s[data[[i]][,8] > -2 & data[[i]][,8] < 2] <- TRUE data[[i]] <- cbind(data[[i]], data.in.2s) } ## calculate coordinates for 2-sigma bar overlay if(bar[1] == TRUE) { bars <- matrix(nrow = length(data), ncol = 8) for(i in 1:length(data)) { bars[i,1:4] <- c(limits.x[1], limits.x[1], ifelse("xlim" %in% names(extraArgs), extraArgs$xlim[2] * 0.95, max(data.global$precision)), ifelse("xlim" %in% names(extraArgs), extraArgs$xlim[2] * 0.95, max(data.global$precision))) bars[i,5:8] <- c(-2, 2, (data[[i]][1,5] - z.central.global) * bars[i,3] + 2, (data[[i]][1,5] - z.central.global) * bars[i,3] - 2) } } else { bars <- matrix(nrow = length(bar), ncol = 8) if(is.numeric(bar) == TRUE & log.z == TRUE) { bar <- log(bar) } for(i in 1:length(bar)) { bars[i,1:4] <- c(limits.x[1], limits.x[1], ifelse("xlim" %in% names(extraArgs), extraArgs$xlim[2] * 0.95, max(data.global$precision)), ifelse("xlim" %in% names(extraArgs), extraArgs$xlim[2] * 0.95, max(data.global$precision))) bars[i,5:8] <- c(-2, 2, (bar[i] - z.central.global) * bars[i,3] + 2, (bar[i] - z.central.global) * bars[i,3] - 2) } } if (rotate == TRUE) { bars <- matrix(bars[, rev(seq_len(ncol(bars)))], ncol = 8) } ## calculate error bar coordinates if(error.bars == TRUE) { arrow.coords <- list(NA) for(i in 1:length(data)) { arrow.x1 <- data[[i]][,6] arrow.x2 <- data[[i]][,6] arrow.y1 <- data[[i]][,1] - data[[i]][,2] arrow.y2 <- data[[i]][,1] + data[[i]][,2] if(log.z == TRUE) { arrow.y1 <- log(arrow.y1) arrow.y2 <- log(arrow.y2) } arrow.coords[[length(arrow.coords) + 1]] <- cbind( arrow.x1, arrow.x2, (arrow.y1 - z.central.global) * arrow.x1, (arrow.y2 - z.central.global) * arrow.x1) } arrow.coords[[1]] <- NULL } ## calculate KDE KDE <- list(NA) KDE.ext <- 0 KDE.bw <- numeric(0) for(i in 1:length(data)) { KDE.i <- density(x = data[[i]][,3], kernel = "gaussian", bw = bw, from = ellipse.values[1], to = ellipse.values[2], weights = data[[i]]$weights) KDE.xy <- cbind(KDE.i$x, KDE.i$y) KDE.bw <- c(KDE.bw, KDE.i$bw) KDE.ext <- ifelse(max(KDE.xy[,2]) < KDE.ext, KDE.ext, max(KDE.xy[,2])) KDE.xy <- rbind(c(min(KDE.xy[,1]), 0), KDE.xy, c(max(KDE.xy[,1]), 0)) KDE[[length(KDE) + 1]] <- cbind(KDE.xy[,1], KDE.xy[,2]) } KDE[1] <- NULL ## calculate mean KDE bandwidth KDE.bw <- mean(KDE.bw, na.rm = TRUE) ## calculate max KDE value for labelling KDE.max.plot <- numeric(length(data)) for(i in 1:length(data)) { KDE.plot <- density(x = data[[i]][,1], kernel = "gaussian", bw = bw, from = limits.z[1], to = limits.z[2]) KDE.max.plot[i] <- max(KDE.plot$y) } KDE.max.plot <- max(KDE.max.plot, na.rm = TRUE) ## calculate histogram data without plotting ## create dummy list hist.data <- list(NA) for(i in 1:length(data)) { hist.i <- hist(x = data[[i]][,3], plot = FALSE, breaks = breaks) hist.data[[length(hist.data) + 1]] <- hist.i } ## remove dummy list object hist.data[[1]] <- NULL ## calculate maximum histogram bar height for normalisation hist.max.plot <- numeric(length(data)) for(i in 1:length(data)) { hist.max.plot <- ifelse(max(hist.data[[i]]$counts, na.rm = TRUE) > hist.max.plot, max(hist.data[[i]]$counts, na.rm = TRUE), hist.max.plot) } hist.max.plot <- max(hist.max.plot, na.rm = TRUE) ## normalise histogram bar height to KDE dimensions for(i in 1:length(data)) { hist.data[[i]]$density <- hist.data[[i]]$counts / hist.max.plot * KDE.max.plot } ## calculate boxplot data without plotting ## create dummy list boxplot.data <- list(NA) for(i in 1:length(data)) { boxplot.i <- boxplot(x = data[[i]][,3], plot = FALSE) boxplot.data[[length(boxplot.data) + 1]] <- boxplot.i } ## remove dummy list object boxplot.data[[1]] <- NULL ## calculate line coordinates and further parameters if(missing(line) == FALSE) { ## check if line parameters are R.Lum-objects for(i in 1:length(line)) { if(is.list(line) == TRUE) { if(is(line[[i]], "RLum.Results")) { line[[i]] <- as.numeric(get_RLum(object = line[[i]], data.object = "summary")$de) } } else if(is(object = line, class2 = "RLum.Results")) { line <- as.numeric(get_RLum(object = line, data.object = "summary")$de) } } ## convert list to vector if(is.list(line) == TRUE) { line <- unlist(line) } if(log.z == TRUE) { line <- log(line) } line.coords <- list(NA) if(rotate == FALSE) { for(i in 1:length(line)) { line.x <- c(limits.x[1], min(ellipse[,1]), par()$usr[2]) line.y <- c(0, (line[i] - z.central.global) * min(ellipse[,1]), (line[i] - z.central.global) * min(ellipse[,1])) line.coords[[length(line.coords) + 1]] <- rbind(line.x, line.y) } } else { for(i in 1:length(line)) { line.x <- c(limits.x[1], min(ellipse[,2]),y.max) line.y <- c(0, (line[i] - z.central.global) * min(ellipse[,2]), (line[i] - z.central.global) * min(ellipse[,2])) line.coords[[length(line.coords) + 1]] <- rbind(line.x, line.y) } } line.coords[1] <- NULL if(missing(line.col) == TRUE) { line.col <- seq(from = 1, to = length(line.coords)) } if(missing(line.lty) == TRUE) { line.lty <- rep(1, length(line.coords)) } if(missing(line.label) == TRUE) { line.label <- rep("", length(line.coords)) } } ## calculate rug coordinates if(missing(rug) == FALSE) { if(log.z == TRUE) { rug.values <- log(De.global) } else { rug.values <- De.global } rug.coords <- list(NA) if(rotate == FALSE) { for(i in 1:length(rug.values)) { rug.x <- c(xy.0[1] * (1 - 0.013 * (layout$abanico$dimension$rugl / 100)), xy.0[1]) rug.y <- c((rug.values[i] - z.central.global) * min(ellipse[,1]), (rug.values[i] - z.central.global) * min(ellipse[,1])) rug.coords[[length(rug.coords) + 1]] <- rbind(rug.x, rug.y) } } else { for(i in 1:length(rug.values)) { rug.x <- c(xy.0[2] * (1 - 0.013 * (layout$abanico$dimension$rugl / 100)), xy.0[2]) rug.y <- c((rug.values[i] - z.central.global) * min(ellipse[,2]), (rug.values[i] - z.central.global) * min(ellipse[,2])) rug.coords[[length(rug.coords) + 1]] <- rbind(rug.x, rug.y) } } rug.coords[1] <- NULL } ## Generate plot ------------------------------------------------------------ ## determine number of subheader lines to shift the plot if(length(summary) > 0 & summary.pos[1] == "sub") { shift.lines <- (length(data) + 1) * layout$abanico$dimension$summary.line/100 } else {shift.lines <- 1} ## extract original plot parameters par(bg = layout$abanico$colour$background) bg.original <- par()$bg if(rotate == FALSE) { ## setup plot area par(mar = c(4.5, 4.5, shift.lines + 1.5, 7), xpd = TRUE, cex = cex) if(layout$abanico$dimension$figure.width != "auto" | layout$abanico$dimension$figure.height != "auto") { par(mai = layout$abanico$dimension$margin / 25.4, pin = c(layout$abanico$dimension$figure.width / 25.4 - layout$abanico$dimension$margin[2] / 25.4 - layout$abanico$dimension$margin[4] / 25.4, layout$abanico$dimension$figure.height / 25.4 - layout$abanico$dimension$margin[1] / 25.4 - layout$abanico$dimension$margin[3]/25.4)) } ## create empty plot par(new = TRUE) plot(NA, xlim = c(limits.x[1], limits.x[2] * (1 / plot.ratio)), ylim = limits.y, main = "", sub = sub, xlab = "", ylab = "", xaxs = "i", yaxs = "i", frame.plot = FALSE, axes = FALSE) ## add y-axis label mtext(text = ylab, at = mean(x = c(min(ellipse[,2]), max(ellipse[,2])), na.rm = TRUE), # at = 0, ## BUG FROM VERSION 0.4.0, maybe removed in future adj = 0.5, side = 2, line = 3 * layout$abanico$dimension$ylab.line / 100, col = layout$abanico$colour$ylab, family = layout$abanico$font.type$ylab, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ylab], cex = cex * layout$abanico$font.size$ylab/12) ## calculate upper x-axis label values label.x.upper <- if(log.z == TRUE) { as.character(round(1/axTicks(side = 1)[-1] * 100, 1)) } else { as.character(round(1/axTicks(side = 1)[-1], 1)) } # optionally, plot 2-sigma-bar if(bar[1] != FALSE) { for(i in 1:length(bar)) { polygon(x = bars[i,1:4], y = bars[i,5:8], col = bar.fill[i], border = bar.line[i]) } } ## remove unwanted parts polygon(x = c(par()$usr[2], par()$usr[2], par()$usr[2] * 2, par()$usr[2] * 2), y = c(min(ellipse[,2]) * 2, max(ellipse[,2]) * 2, max(ellipse[,2]) * 2, min(ellipse[,2]) * 2), col = bg.original, lty = 0) ## optionally, plot dispersion polygon if(polygon.fill[1] != "none") { for(i in 1:length(data)) { polygon(x = polygons[i,1:7], y = polygons[i,8:14], col = polygon.fill[i], border = polygon.line[i]) } } ## optionally, add minor grid lines if(grid.minor != "none") { for(i in 1:length(tick.values.minor)) { lines(x = c(limits.x[1], min(ellipse[,1])), y = c(0, (tick.values.minor[i] - z.central.global) * min(ellipse[,1])), col = grid.minor, lwd = 1) } for(i in 1:length(tick.values.minor)) { lines(x = c(xy.0[1], par()$usr[2]), y = c((tick.values.minor[i] - z.central.global) * min(ellipse[,1]), (tick.values.minor[i] - z.central.global) * min(ellipse[,1])), col = grid.minor, lwd = 1) } } ## optionally, add major grid lines if(grid.major != "none") { for(i in 1:length(tick.values.major)) { lines(x = c(limits.x[1], min(ellipse[,1])), y = c(0, (tick.values.major[i] - z.central.global) * min(ellipse[,1])), col = grid.major, lwd = 1) } for(i in 1:length(tick.values.major)) { lines(x = c(xy.0[1], par()$usr[2]), y = c((tick.values.major[i] - z.central.global) * min(ellipse[,1]), (tick.values.major[i] - z.central.global) * min(ellipse[,1])), col = grid.major, lwd = 1) } } ## optionally, plot lines for each bar if(lwd[1] > 0 & lty[1] > 0 & bar[1] != FALSE & length(data) == 1) { if(bar[1] == TRUE & length(bar) == 1) { bar[1] <- z.central.global } for(i in 1:length(bar)) { x2 <- r / sqrt(1 + f^2 * ( bar[i] - z.central.global)^2) y2 <- (bar[i] - z.central.global) * x2 lines(x = c(limits.x[1], x2, xy.0[1], par()$usr[2]), y = c(0, y2, y2, y2), lty = lty[i], lwd = lwd[i], col = centrality.col[i]) } } else if(lwd[1] > 0 & lty[1] > 0 & bar[1] != FALSE) { for(i in 1:length(data)) { z.line <- ifelse(test = is.numeric(bar[i]) == TRUE, yes = bar[i], no = data[[i]][1,5]) x2 <- r / sqrt(1 + f^2 * ( z.line - z.central.global)^2) y2 <- (z.line - z.central.global) * x2 lines(x = c(limits.x[1], x2, xy.0[1], par()$usr[2]), y = c(0, y2, y2, y2), lty = lty[i], lwd = lwd[i], col = centrality.col[i]) } } ## optionally add further lines if(missing(line) == FALSE) { for(i in 1:length(line)) { lines(x = line.coords[[i]][1,1:3], y = line.coords[[i]][2,1:3], col = line.col[i], lty = line.lty[i] ) text(x = line.coords[[i]][1,3], y = line.coords[[i]][2,3] + par()$cxy[2] * 0.3, labels = line.label[i], pos = 2, col = line.col[i], cex = cex * 0.9) } } ## add plot title cex.old <- par()$cex par(cex = layout$abanico$font.size$main / 12) title(main = main, family = layout$abanico$font.type$main, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$main], col.main = layout$abanico$colour$main, line = shift.lines * layout$abanico$dimension$main / 100) par(cex = cex.old) ## calculate lower x-axis (precision) x.axis.ticks <- axTicks(side = 1) x.axis.ticks <- x.axis.ticks[c(TRUE, x.axis.ticks <= limits.x[2])] x.axis.ticks <- x.axis.ticks[x.axis.ticks <= max(ellipse[,1])] ## x-axis with lables and ticks axis(side = 1, at = x.axis.ticks, col = layout$abanico$colour$xtck1, col.axis = layout$abanico$colour$xtck1, labels = NA, tcl = -layout$abanico$dimension$xtcl1 / 200, cex = cex) axis(side = 1, at = x.axis.ticks, line = 2 * layout$abanico$dimension$xtck1.line / 100 - 2, lwd = 0, col = layout$abanico$colour$xtck1, family = layout$abanico$font.type$xtck1, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xtck1], col.axis = layout$abanico$colour$xtck1, cex.axis = layout$abanico$font.size$xlab1/12) ## extend axis line to right side of the plot lines(x = c(max(x.axis.ticks), max(ellipse[,1])), y = c(limits.y[1], limits.y[1]), col = layout$abanico$colour$xtck1) ## draw closing tick on right hand side axis(side = 1, tcl = -layout$abanico$dimension$xtcl1 / 200, lwd = 0, lwd.ticks = 1, at = limits.x[2], labels = FALSE, col = layout$abanico$colour$xtck1) axis(side = 1, tcl = layout$abanico$dimension$xtcl2 / 200, lwd = 0, lwd.ticks = 1, at = limits.x[2], labels = FALSE, col = layout$abanico$colour$xtck2) ## add lower axis label mtext(xlab[2], at = (limits.x[1] + max(ellipse[,1])) / 2, side = 1, line = 2.5 * layout$abanico$dimension$xlab1.line / 100, col = layout$abanico$colour$xlab1, family = layout$abanico$font.type$xlab1, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xlab1], cex = cex * layout$abanico$font.size$xlab1/12) ## add upper axis label mtext(xlab[1], at = (limits.x[1] + max(ellipse[,1])) / 2, side = 1, line = -3.5 * layout$abanico$dimension$xlab2.line / 100, col = layout$abanico$colour$xlab2, family = layout$abanico$font.type$xlab2, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xlab2], cex = cex * layout$abanico$font.size$xlab2/12) ## plot upper x-axis axis(side = 1, at = x.axis.ticks[-1], col = layout$abanico$colour$xtck2, col.axis = layout$abanico$colour$xtck2, labels = NA, tcl = layout$abanico$dimension$xtcl2 / 200, cex = cex) ## remove first tick label (infinity) label.x.upper <- label.x.upper[1:(length(x.axis.ticks) - 1)] axis(side = 1, at = x.axis.ticks[-1], labels = label.x.upper, line = -1 * layout$abanico$dimension$xtck2.line / 100 - 2, lwd = 0, col = layout$abanico$colour$xtck2, family = layout$abanico$font.type$xtck2, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xtck2], col.axis = layout$abanico$colour$xtck2, cex.axis = layout$abanico$font.size$xlab2/12) ## plot y-axis if(y.axis == TRUE) { char.height <- par()$cxy[2] tick.space <- axisTicks(usr = limits.y, log = FALSE) tick.space <- (max(tick.space) - min(tick.space)) / length(tick.space) if(tick.space < char.height * 1.7) { axis(side = 2, tcl = -layout$abanico$dimension$ytcl / 200, lwd = 1, lwd.ticks = 1, at = c(-2, 2), labels = c("", ""), las = 1, col = layout$abanico$colour$ytck) axis(side = 2, at = 0, tcl = 0, line = 2 * layout$abanico$dimension$ytck.line / 100 - 2, labels = paste("\u00B1", "2"), las = 1, family = layout$abanico$font.type$ytck, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ytck], col.axis = layout$abanico$colour$ytck, cex.axis = layout$abanico$font.size$ylab/12) } else { axis(side = 2, at = seq(-2, 2, by = 2), col = layout$abanico$colour$ytck, col.axis = layout$abanico$colour$ytck, labels = NA, las = 1, tcl = -layout$abanico$dimension$ytcl / 200, cex = cex) axis(side = 2, at = seq(-2, 2, by = 2), line = 2 * layout$abanico$dimension$ytck.line / 100 - 2, lwd = 0, las = 1, col = layout$abanico$colour$ytck, family = layout$abanico$font.type$ytck, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ytck], col.axis = layout$abanico$colour$ytck, cex.axis = layout$abanico$font.size$ylab/12) } } else { axis(side = 2, at = 0, col = layout$abanico$colour$ytck, col.axis = layout$abanico$colour$ytck, labels = NA, las = 1, tcl = -layout$abanico$dimension$ytcl / 200, cex = cex) axis(side = 2, at = 0, line = 2 * layout$abanico$dimension$ytck.line / 100 - 2, lwd = 0, las = 1, col = layout$abanico$colour$ytck, family = layout$abanico$font.type$ytck, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ytck], col.axis = layout$abanico$colour$ytck, cex.axis = layout$abanico$font.size$ylab/12) } ## plot minor z-ticks for(i in 1:length(tick.values.minor)) { lines(x = c(par()$usr[2], (1 + 0.007 * cex * layout$abanico$dimension$ztcl / 100) * par()$usr[2]), y = c((tick.values.minor[i] - z.central.global) * min(ellipse[,1]), (tick.values.minor[i] - z.central.global) * min(ellipse[,1])), col = layout$abanico$colour$ztck) } ## plot major z-ticks for(i in 1:length(tick.values.major)) { lines(x = c(par()$usr[2], (1 + 0.015 * cex * layout$abanico$dimension$ztcl / 100) * par()$usr[2]), y = c((tick.values.major[i] - z.central.global) * min(ellipse[,1]), (tick.values.major[i] - z.central.global) * min(ellipse[,1])), col = layout$abanico$colour$ztck) } ## plot z-axes lines(ellipse, col = layout$abanico$colour$border) lines(rep(par()$usr[2], nrow(ellipse)), ellipse[,2], col = layout$abanico$colour$ztck) ## plot z-axis text text(x = (1 + 0.04 * cex * layout$abanico$dimension$ztcl / 100) * par()$usr[2], y = (tick.values.major - z.central.global) * min(ellipse[,1]), labels = label.z.text, adj = 0, family = layout$abanico$font.type$ztck, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ztck], cex = cex * layout$abanico$font.size$ztck/12) ## plot z-label mtext(text = zlab, at = mean(x = c(min(ellipse[,2]), max(ellipse[,2])), na.rm = TRUE), # at = 0, ## BUG from version 0.4.0, maybe removed in future side = 4, las = 3, adj = 0.5, line = 5 * layout$abanico$dimension$zlab.line / 100, col = layout$abanico$colour$zlab, family = layout$abanico$font.type$zlab, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$zlab], cex = cex * layout$abanico$font.size$zlab/12) ## plot values and optionally error bars if(error.bars == TRUE) { for(i in 1:length(data)) { arrows(x0 = arrow.coords[[i]][,1], x1 = arrow.coords[[i]][,2], y0 = arrow.coords[[i]][,3], y1 = arrow.coords[[i]][,4], length = 0, angle = 90, code = 3, col = value.bar[i]) } } for(i in 1:length(data)) { points(data[[i]][,6][data[[i]][,6] <= limits.x[2]], data[[i]][,8][data[[i]][,6] <= limits.x[2]], col = value.dot[i], pch = pch[i], cex = layout$abanico$dimension$pch / 100) } ## calculate KDE width KDE.max <- 0 for(i in 1:length(data)) { KDE.max <- ifelse(test = KDE.max < max(KDE[[i]][,2]), yes = max(KDE[[i]][,2]), no = KDE.max) } ## optionally adjust KDE width for boxplot option if(boxplot == TRUE) { KDE.max <- 1.25 * KDE.max } KDE.scale <- (par()$usr[2] - xy.0[1]) / (KDE.max * 1.05) ## optionally add KDE plot if(kde == TRUE) { ## plot KDE lines for(i in 1:length(data)) { polygon(x = xy.0[1] + KDE[[i]][,2] * KDE.scale, y = (KDE[[i]][,1] - z.central.global) * min(ellipse[,1]), col = kde.fill[i], border = kde.line[i], lwd = 1.7) } ## plot KDE x-axis axis(side = 1, at = c(xy.0[1], par()$usr[2]), col = layout$abanico$colour$xtck3, col.axis = layout$abanico$colour$xtck3, labels = NA, tcl = -layout$abanico$dimension$xtcl3 / 200, cex = cex) axis(side = 1, at = c(xy.0[1], par()$usr[2]), labels = as.character(round(c(0, KDE.max.plot), 3)), line = 2 * layout$abanico$dimension$xtck3.line / 100 - 2, lwd = 0, col = layout$abanico$colour$xtck3, family = layout$abanico$font.type$xtck3, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xtck3], col.axis = layout$abanico$colour$xtck3, cex.axis = layout$abanico$font.size$xtck3/12) mtext(text = paste(xlab[3], " (bw ", round(x = KDE.bw, digits = 3), ")", sep = ""), at = (xy.0[1] + par()$usr[2]) / 2, side = 1, line = 2.5 * layout$abanico$dimension$xlab3.line / 100, col = layout$abanico$colour$xlab3, family = layout$abanico$font.type$xlab3, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xlab3], cex = cex * layout$abanico$font.size$xlab3/12) } ## optionally add histogram or dot plot axis if(hist == TRUE) { axis(side = 1, at = c(xy.0[1], par()$usr[2]), labels = as.character(c(0, hist.max.plot)), line = -1 * layout$abanico$dimension$xtck3.line / 100 - 2, lwd = 0, col = layout$abanico$colour$xtck3, family = layout$abanico$font.type$xtck3, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xtck3], col.axis = layout$abanico$colour$xtck3, cex.axis = layout$abanico$font.size$xtck3/12) ## add label mtext(text = "n", at = (xy.0[1] + par()$usr[2]) / 2, side = 1, line = -3.5 * layout$abanico$dimension$xlab2.line / 100, col = layout$abanico$colour$xlab2, family = layout$abanico$font.type$xlab2, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xlab2], cex = cex * layout$abanico$font.size$xlab2/12) ## plot ticks axis(side = 1, at = c(xy.0[1], par()$usr[2]), col = layout$abanico$colour$xtck2, col.axis = layout$abanico$colour$xtck2, labels = NA, tcl = layout$abanico$dimension$xtcl2 / 200, cex = cex) ## calculate scaling factor for histogram bar heights hist.scale <- (par()$usr[2] - xy.0[1]) / (KDE.max.plot * 1.05) ## draw each bar for each data set for(i in 1:length(data)) { for(j in 1:length(hist.data[[i]]$density)) { ## calculate x-coordinates hist.x.i <- c(xy.0[1], xy.0[1], xy.0[1] + hist.data[[i]]$density[j] * hist.scale, xy.0[1] + hist.data[[i]]$density[j] * hist.scale) ## calculate y-coordinates hist.y.i <- c((hist.data[[i]]$breaks[j] - z.central.global) * min(ellipse[,1]), (hist.data[[i]]$breaks[j + 1] - z.central.global) * min(ellipse[,1]), (hist.data[[i]]$breaks[j + 1] - z.central.global) * min(ellipse[,1]), (hist.data[[i]]$breaks[j] - z.central.global) * min(ellipse[,1])) ## remove data out of z-axis range hist.y.i <- ifelse(hist.y.i < min(ellipse[,2]), min(ellipse[,2]), hist.y.i) hist.y.i <- ifelse(hist.y.i > max(ellipse[,2]), max(ellipse[,2]), hist.y.i) ## draw the bars polygon(x = hist.x.i, y = hist.y.i, col = kde.fill[i], border = kde.line[i]) } } } ## optionally add dot plot if(dots == TRUE) { for(i in 1:length(data)) { for(j in 1:length(hist.data[[i]]$counts)) { ## calculate scaling factor for histogram bar heights dots.distance <- (par()$usr[2] - (xy.0[1] + par()$cxy[1] * 0.4)) / hist.max.plot dots.x.i <- seq(from = xy.0[1] + par()$cxy[1] * 0.4, by = dots.distance, length.out = hist.data[[i]]$counts[j]) dots.y.i <- rep((hist.data[[i]]$mids[j] - z.central.global) * min(ellipse[,1]), length(dots.x.i)) ## remove data out of z-axis range dots.x.i <- dots.x.i[dots.y.i >= min(ellipse[,2]) & dots.y.i <= max(ellipse[,2])] dots.y.i <- dots.y.i[dots.y.i >= min(ellipse[,2]) & dots.y.i <= max(ellipse[,2])] if(max(c(0, dots.x.i), na.rm = TRUE) >= (par()$usr[2] - par()$cxy[1] * 0.4)) { dots.y.i <- dots.y.i[dots.x.i < (par()$usr[2] - par()$cxy[1] * 0.4)] dots.x.i <- dots.x.i[dots.x.i < (par()$usr[2] - par()$cxy[1] * 0.4)] pch.dots <- c(rep(20, length(dots.x.i) - 1), 15) } else { pch.dots <- rep(20, length(dots.x.i)) } ## plot points points(x = dots.x.i, y = dots.y.i, pch = "|", cex = 0.7 * cex, col = kde.line[i]) } } } ## optionally add box plot if(boxplot == TRUE) { for(i in 1:length(data)) { ## draw median line lines(x = c(xy.0[1] + KDE.max * 0.85, xy.0[1] + KDE.max * 0.95), y = c((boxplot.data[[i]]$stats[3,1] - z.central.global) * min(ellipse[,1]), (boxplot.data[[i]]$stats[3,1] - z.central.global) * min(ellipse[,1])), lwd = 2, col = kde.line[i]) ## draw p25-p75-polygon polygon(x = c(xy.0[1] + KDE.max * 0.85, xy.0[1] + KDE.max * 0.85, xy.0[1] + KDE.max * 0.95, xy.0[1] + KDE.max * 0.95), y = c((boxplot.data[[i]]$stats[2,1] - z.central.global) * min(ellipse[,1]), (boxplot.data[[i]]$stats[4,1] - z.central.global) * min(ellipse[,1]), (boxplot.data[[i]]$stats[4,1] - z.central.global) * min(ellipse[,1]), (boxplot.data[[i]]$stats[2,1] - z.central.global) * min(ellipse[,1])), border = kde.line[i]) ## draw whiskers lines(x = c(xy.0[1] + KDE.max * 0.9, xy.0[1] + KDE.max * 0.9), y = c((boxplot.data[[i]]$stats[2,1] - z.central.global) * min(ellipse[,1]), (boxplot.data[[i]]$stats[1,1] - z.central.global) * min(ellipse[,1])), col = kde.line[i]) lines(x = c(xy.0[1] + KDE.max * 0.87, xy.0[1] + KDE.max * 0.93), y = rep((boxplot.data[[i]]$stats[1,1] - z.central.global) * min(ellipse[,1]), 2), col = kde.line[i]) lines(x = c(xy.0[1] + KDE.max * 0.9, xy.0[1] + KDE.max * 0.9), y = c((boxplot.data[[i]]$stats[4,1] - z.central.global) * min(ellipse[,1]), (boxplot.data[[i]]$stats[5,1] - z.central.global) * min(ellipse[,1])), col = kde.line[i]) lines(x = c(xy.0[1] + KDE.max * 0.87, xy.0[1] + KDE.max * 0.93), y = rep((boxplot.data[[i]]$stats[5,1] - z.central.global) * min(ellipse[,1]), 2), col = kde.line[i]) ## draw outlier points points(x = rep(xy.0[1] + KDE.max * 0.9, length(boxplot.data[[i]]$out)), y = (boxplot.data[[i]]$out - z.central.global) * min(ellipse[,1]), cex = cex * 0.8, col = kde.line[i]) } } ## optionally add stats, i.e. min, max, median sample text if(length(stats) > 0) { text(x = stats.data[,1], y = stats.data[,2], pos = 2, labels = round(stats.data[,3], 1), family = layout$abanico$font.type$stats, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$stats], cex = cex * layout$abanico$font.size$stats/12, col = layout$abanico$colour$stats) } ## optionally add rug if(rug == TRUE) { for(i in 1:length(rug.coords)) { lines(x = rug.coords[[i]][1,], y = rug.coords[[i]][2,], col = value.rug[data.global[i,10]]) } } ## plot KDE base line lines(x = c(xy.0[1], xy.0[1]), y = c(min(ellipse[,2]), max(ellipse[,2])), col = layout$abanico$colour$border) ## draw border around plot if(frame == 1) { polygon(x = c(limits.x[1], min(ellipse[,1]), par()$usr[2], par()$usr[2], min(ellipse[,1])), y = c(0, max(ellipse[,2]), max(ellipse[,2]), min(ellipse[,2]), min(ellipse[,2])), border = layout$abanico$colour$border, lwd = 0.8) } else if(frame == 2) { polygon(x = c(limits.x[1], min(ellipse[,1]), par()$usr[2], par()$usr[2], min(ellipse[,1]), limits.x[1]), y = c(2, max(ellipse[,2]), max(ellipse[,2]), min(ellipse[,2]), min(ellipse[,2]), -2), border = layout$abanico$colour$border, lwd = 0.8) } else if(frame == 3) { polygon(x = c(limits.x[1], par()$usr[2], par()$usr[2], limits.x[1]), y = c(max(ellipse[,2]), max(ellipse[,2]), min(ellipse[,2]), min(ellipse[,2])), border = layout$abanico$colour$border, lwd = 0.8) } ## optionally add legend content if(missing(legend) == FALSE) { ## store and change font familiy par.family <- par()$family par(family = layout$abanico$font.type$legend) legend(x = legend.pos[1], y = 0.8 * legend.pos[2], xjust = legend.adj[1], yjust = legend.adj[2], legend = legend, pch = pch, col = value.dot, text.col = value.dot, text.font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$legend], cex = cex * layout$abanico$font.size$legend/12, bty = "n") ## restore font family par(family = par.family) } ## optionally add subheader text mtext(text = mtext, side = 3, line = (shift.lines - 2) * layout$abanico$dimension$mtext / 100, col = layout$abanico$colour$mtext, family = layout$abanico$font.type$mtext, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$mtext], cex = cex * layout$abanico$font.size$mtext / 12) ## add summary content for(i in 1:length(data)) { if(summary.pos[1] != "sub") { text(x = summary.pos[1], y = summary.pos[2], adj = summary.adj, labels = label.text[[i]], col = summary.col[i], family = layout$abanico$font.type$summary, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$summary], cex = cex * layout$abanico$font.size$summary / 12) } else { if(mtext == "") { mtext(side = 3, line = (shift.lines- 1 - i) * layout$abanico$dimension$summary / 100 , text = label.text[[i]], col = summary.col[i], family = layout$abanico$font.type$summary, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$summary], cex = cex * layout$abanico$font.size$summary / 12) } } } } else { ## setup plot area par(mar = c(4, 4, shift.lines + 5, 4), xpd = TRUE, cex = cex) if(layout$abanico$dimension$figure.width != "auto" | layout$abanico$dimension$figure.height != "auto") { par(mai = layout$abanico$dimension$margin / 25.4, pin = c(layout$abanico$dimension$figure.width / 25.4 - layout$abanico$dimension$margin[2] / 25.4 - layout$abanico$dimension$margin[4] / 25.4, layout$abanico$dimension$figure.height / 25.4 - layout$abanico$dimension$margin[1] / 25.4 - layout$abanico$dimension$margin[3]/25.4)) } ## create empty plot par(new = TRUE) plot(NA, xlim = limits.y, ylim = c(limits.x[1], limits.x[2] * (1 / plot.ratio)), main = "", sub = sub, xlab = "", ylab = "", xaxs = "i", yaxs = "i", frame.plot = FALSE, axes = FALSE) ## add y-axis label mtext(text = ylab, at = 0, adj = 0.5, side = 1, line = 3 * layout$abanico$dimension$ylab.line / 100, col = layout$abanico$colour$ylab, family = layout$abanico$font.type$ylab, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ylab], cex = cex * layout$abanico$font.size$ylab/12) ## calculate upper x-axis label values label.x.upper <- if(log.z == TRUE) { as.character(round(1/axTicks(side = 2)[-1] * 100, 1)) } else { as.character(round(1/axTicks(side = 2)[-1], 1)) } # optionally, plot 2-sigma-bar if(bar[1] != FALSE) { for(i in 1:length(bar)) { polygon(x = bars[i,1:4], y = bars[i,5:8], col = bar.fill[i], border = bar.line[i]) } } ## remove unwanted parts polygon(y = c(par()$usr[2], par()$usr[2], par()$usr[2] * 2, par()$usr[2] * 2), x = c(min(ellipse[,2]) * 2, max(ellipse[,2]) * 2, max(ellipse[,2]) * 2, min(ellipse[,2]) * 2), col = bg.original, lty = 0) ## optionally, plot dispersion polygon if(polygon.fill[1] != "none") { for(i in 1:length(data)) { polygon(x = polygons[i,8:14], y = polygons[i,1:7], col = polygon.fill[i], border = polygon.line[i]) } } ## optionally, add minor grid lines if(grid.minor != "none") { for(i in 1:length(tick.values.minor)) { lines(y = c(limits.x[1], min(ellipse[,1])), x = c(0, (tick.values.minor[i] - z.central.global) * min(ellipse[,1])), col = grid.minor, lwd = 1) } for(i in 1:length(tick.values.minor)) { lines(y = c(xy.0[2], par()$usr[2]), x = c((tick.values.minor[i] - z.central.global) * min(ellipse[,1]), (tick.values.minor[i] - z.central.global) * min(ellipse[,1])), col = grid.minor, lwd = 1) } } ## optionally, add major grid lines if(grid.major != "none") { for(i in 1:length(tick.values.major)) { lines(y = c(limits.x[1], min(ellipse[,2])), x = c(0, (tick.values.major[i] - z.central.global) * min(ellipse[,2])), col = grid.major, lwd = 1) } for(i in 1:length(tick.values.major)) { lines(y = c(xy.0[2],y.max), x = c((tick.values.major[i] - z.central.global) * min(ellipse[,2]), (tick.values.major[i] - z.central.global) * min(ellipse[,2])), col = grid.major, lwd = 1) } } ## optionally, plot lines for each bar if(lwd[1] > 0 & lty[1] > 0 & bar[1] != FALSE & length(data) == 1) { if(bar[1] == TRUE & length(bar) == 1) { bar[1] <- z.central.global } for(i in 1:length(bar)) { x2 <- r / sqrt(1 + f^2 * ( bar[i] - z.central.global)^2) y2 <- (bar[i] - z.central.global) * x2 lines(x = c(0, y2, y2, y2), y = c(limits.x[1], x2, xy.0[2], par()$usr[4]), lty = lty[i], lwd = lwd[i], col = centrality.col[i]) } } ## optionally add further lines if(missing(line) == FALSE) { for(i in 1:length(line)) { lines(y = line.coords[[i]][1,1:3], x = line.coords[[i]][2,1:3], col = line.col[i], lty = line.lty[i] ) text(y = line.coords[[i]][1,3], x = line.coords[[i]][2,3] + par()$cxy[2] * 0.3, labels = line.label[i], pos = 2, col = line.col[i], cex = cex * 0.9) } } ## add plot title cex.old <- par()$cex par(cex = layout$abanico$font.size$main / 12) title(main = main, family = layout$abanico$font.type$main, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$main], col.main = layout$abanico$colour$main, line = (shift.lines + 3.5) * layout$abanico$dimension$main / 100) par(cex = cex.old) ## calculate lower x-axis (precision) x.axis.ticks <- axTicks(side = 2) x.axis.ticks <- x.axis.ticks[c(TRUE, x.axis.ticks <= limits.x[2])] x.axis.ticks <- x.axis.ticks[x.axis.ticks <= max(ellipse[,2])] ## x-axis with lables and ticks axis(side = 2, at = x.axis.ticks, col = layout$abanico$colour$xtck1, col.axis = layout$abanico$colour$xtck1, labels = NA, tcl = -layout$abanico$dimension$xtcl1 / 200, cex = cex) axis(side = 2, at = x.axis.ticks, line = 2 * layout$abanico$dimension$xtck1.line / 100 - 2, lwd = 0, col = layout$abanico$colour$xtck1, family = layout$abanico$font.type$xtck1, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xtck1], col.axis = layout$abanico$colour$xtck1, cex.axis = layout$abanico$font.size$xlab1/12) ## extend axis line to right side of the plot lines(y = c(max(x.axis.ticks), max(ellipse[,2])), x = c(limits.y[1], limits.y[1]), col = layout$abanico$colour$xtck1) ## draw closing tick on right hand side axis(side = 2, tcl = -layout$abanico$dimension$xtcl1 / 200, lwd = 0, lwd.ticks = 1, at = limits.x[2], labels = FALSE, col = layout$abanico$colour$xtck1) axis(side = 2, tcl = layout$abanico$dimension$xtcl2 / 200, lwd = 0, lwd.ticks = 1, at = limits.x[2], labels = FALSE, col = layout$abanico$colour$xtck2) ## add lower axis label mtext(xlab[2], at = (limits.x[1] + max(ellipse[,2])) / 2, side = 2, line = 2.5 * layout$abanico$dimension$xlab1.line / 100, col = layout$abanico$colour$xlab1, family = layout$abanico$font.type$xlab1, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xlab1], cex = cex * layout$abanico$font.size$xlab1/12) ## add upper axis label mtext(xlab[1], at = (limits.x[1] + max(ellipse[,2])) / 2, side = 2, line = -3.5 * layout$abanico$dimension$xlab2.line / 100, col = layout$abanico$colour$xlab2, family = layout$abanico$font.type$xlab2, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xlab2], cex = cex * layout$abanico$font.size$xlab2/12) ## plot upper x-axis axis(side = 2, at = x.axis.ticks[-1], col = layout$abanico$colour$xtck2, col.axis = layout$abanico$colour$xtck2, labels = NA, tcl = layout$abanico$dimension$xtcl2 / 200, cex = cex) ## remove first tick label (infinity) label.x.upper <- label.x.upper[1:(length(x.axis.ticks) - 1)] axis(side = 2, at = x.axis.ticks[-1], labels = label.x.upper, line = -1 * layout$abanico$dimension$xtck2.line / 100 - 2, lwd = 0, col = layout$abanico$colour$xtck2, family = layout$abanico$font.type$xtck2, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xtck2], col.axis = layout$abanico$colour$xtck2, cex.axis = layout$abanico$font.size$xlab2/12) ## plot y-axis if(y.axis == TRUE) { char.height <- par()$cxy[2] tick.space <- axisTicks(usr = limits.y, log = FALSE) tick.space <- (max(tick.space) - min(tick.space)) / length(tick.space) if(tick.space < char.height * 1.7) { axis(side = 1, tcl = -layout$abanico$dimension$ytcl / 200, lwd = 1, lwd.ticks = 1, at = c(-2, 2), labels = c("", ""), las = 1, col = layout$abanico$colour$ytck) axis(side = 1, at = 0, tcl = 0, line = 2 * layout$abanico$dimension$ytck.line / 100 - 2, labels = paste("\u00B1", "2"), las = 1, family = layout$abanico$font.type$ytck, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ytck], col.axis = layout$abanico$colour$ytck, cex.axis = layout$abanico$font.size$ylab/12) } else { axis(side = 1, at = seq(-2, 2, by = 2), col = layout$abanico$colour$ytck, col.axis = layout$abanico$colour$ytck, labels = NA, las = 1, tcl = -layout$abanico$dimension$ytcl / 200, cex = cex) axis(side = 1, at = seq(-2, 2, by = 2), line = 2 * layout$abanico$dimension$ytck.line / 100 - 2, lwd = 0, las = 1, col = layout$abanico$colour$ytck, family = layout$abanico$font.type$ytck, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ytck], col.axis = layout$abanico$colour$ytck, cex.axis = layout$abanico$font.size$ylab/12) } } else { axis(side = 1, at = 0, col = layout$abanico$colour$ytck, col.axis = layout$abanico$colour$ytck, labels = NA, las = 1, tcl = -layout$abanico$dimension$ytcl / 200, cex = cex) axis(side = 1, at = 0, line = 2 * layout$abanico$dimension$ytck.line / 100 - 2, lwd = 0, las = 1, col = layout$abanico$colour$ytck, family = layout$abanico$font.type$ytck, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ytck], col.axis = layout$abanico$colour$ytck, cex.axis = layout$abanico$font.size$ylab/12) } ## plot minor z-ticks for(i in 1:length(tick.values.minor)) { lines(y = c(par()$usr[4], (1 + 0.015 * cex * layout$abanico$dimension$ztcl / 100) * y.max), x = c((tick.values.minor[i] - z.central.global) * min(ellipse[,2]), (tick.values.minor[i] - z.central.global) * min(ellipse[,2])), col = layout$abanico$colour$ztck) } ## plot major z-ticks for(i in 1:length(tick.values.major)) { lines(y = c(par()$usr[4], (1 + 0.03 * cex * layout$abanico$dimension$ztcl / 100) * y.max), x = c((tick.values.major[i] - z.central.global) * min(ellipse[,2]), (tick.values.major[i] - z.central.global) * min(ellipse[,2])), col = layout$abanico$colour$ztck) } ## plot z-axes lines(ellipse, col = layout$abanico$colour$border) lines(y = rep(par()$usr[4], nrow(ellipse)), x = ellipse[,1], col = layout$abanico$colour$ztck) ## plot z-axis text text(y = (1 + 0.06 * cex * layout$abanico$dimension$ztcl / 100) * y.max, x = (tick.values.major - z.central.global) * min(ellipse[,2]), labels = label.z.text, adj = 0.5, family = layout$abanico$font.type$ztck, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$ztck], cex = cex * layout$abanico$font.size$ztck/12) ## plot z-label mtext(text = zlab, at = 0, side = 3, las = 1, adj = 0.5, line = 2.5 * layout$abanico$dimension$zlab.line / 100, col = layout$abanico$colour$zlab, family = layout$abanico$font.type$zlab, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$zlab], cex = cex * layout$abanico$font.size$zlab/12) ## plot values and optionally error bars if(error.bars == TRUE) { for(i in 1:length(data)) { arrows(y0 = arrow.coords[[i]][,1], y1 = arrow.coords[[i]][,2], x0 = arrow.coords[[i]][,3], x1 = arrow.coords[[i]][,4], length = 0, angle = 90, code = 3, col = value.bar[i]) } } for(i in 1:length(data)) { points(y = data[[i]][,6][data[[i]][,6] <= limits.x[2]], x = data[[i]][,8][data[[i]][,6] <= limits.x[2]], col = value.dot[i], pch = pch[i], cex = layout$abanico$dimension$pch / 100) } ## calculate KDE width KDE.max <- 0 for(i in 1:length(data)) { KDE.max <- ifelse(test = KDE.max < max(KDE[[i]][,2]), yes = max(KDE[[i]][,2]), no = KDE.max) } ## optionally adjust KDE width for boxplot option if(boxplot == TRUE) { KDE.max <- 1.3 * KDE.max } KDE.scale <- (par()$usr[4] - xy.0[2]) / (KDE.max * 1.05) ## optionally add KDE plot if(kde == TRUE) { ## plot KDE lines for(i in 1:length(data)) { polygon(y = xy.0[2] + KDE[[i]][,2] * KDE.scale, x = (KDE[[i]][,1] - z.central.global) * min(ellipse[,2]), col = kde.fill[i], border = kde.line[i], lwd = 1.7) } ## plot KDE x-axis axis(side = 2, at = c(xy.0[2], y.max), col = layout$abanico$colour$xtck3, col.axis = layout$abanico$colour$xtck3, labels = NA, tcl = -layout$abanico$dimension$xtcl3 / 200, cex = cex) axis(side = 2, at = c(xy.0[2], y.max), labels = as.character(round(c(0, KDE.max.plot), 3)), line = 2 * layout$abanico$dimension$xtck3.line / 100 - 2, lwd = 0, col = layout$abanico$colour$xtck3, family = layout$abanico$font.type$xtck3, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xtck3], col.axis = layout$abanico$colour$xtck3, cex.axis = layout$abanico$font.size$xtck3/12) mtext(text = paste(xlab[3], " (bw ", round(x = KDE.bw, digits = 3), ")", sep = ""), at = (xy.0[2] + y.max) / 2, side = 2, line = 2.5 * layout$abanico$dimension$xlab3.line / 100, col = layout$abanico$colour$xlab3, family = layout$abanico$font.type$xlab3, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xlab3], cex = cex * layout$abanico$font.size$xlab3/12) } ## optionally add histogram or dot plot axis if(hist == TRUE) { axis(side = 2, at = c(xy.0[2], y.max), labels = as.character(c(0, hist.max.plot)), line = -1 * layout$abanico$dimension$xtck3.line / 100 - 2, lwd = 0, col = layout$abanico$colour$xtck3, family = layout$abanico$font.type$xtck3, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xtck3], col.axis = layout$abanico$colour$xtck3, cex.axis = layout$abanico$font.size$xtck3/12) ## add label mtext(text = "n", at = (xy.0[2] + y.max) / 2, side = 2, line = -3.5 * layout$abanico$dimension$xlab2.line / 100, col = layout$abanico$colour$xlab2, family = layout$abanico$font.type$xlab2, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$xlab2], cex = cex * layout$abanico$font.size$xlab2/12) ## plot ticks axis(side = 2, at = c(xy.0[2], y.max), col = layout$abanico$colour$xtck2, col.axis = layout$abanico$colour$xtck2, labels = NA, tcl = layout$abanico$dimension$xtcl2 / 200, cex = cex) ## calculate scaling factor for histogram bar heights hist.scale <- (par()$usr[4] - xy.0[2]) / (KDE.max.plot * 1.05) ## draw each bar for each data set for(i in 1:length(data)) { for(j in 1:length(hist.data[[i]]$density)) { ## calculate x-coordinates hist.x.i <- c(xy.0[2], xy.0[2], xy.0[2] + hist.data[[i]]$density[j] * hist.scale, xy.0[2] + hist.data[[i]]$density[j] * hist.scale) ## calculate y-coordinates hist.y.i <- c((hist.data[[i]]$breaks[j] - z.central.global) * min(ellipse[,2]), (hist.data[[i]]$breaks[j + 1] - z.central.global) * min(ellipse[,2]), (hist.data[[i]]$breaks[j + 1] - z.central.global) * min(ellipse[,2]), (hist.data[[i]]$breaks[j] - z.central.global) * min(ellipse[,2])) ## remove data out of z-axis range hist.y.i <- ifelse(hist.y.i < min(ellipse[,1]), min(ellipse[,1]), hist.y.i) hist.y.i <- ifelse(hist.y.i > max(ellipse[,1]), max(ellipse[,1]), hist.y.i) ## draw the bars polygon(y = hist.x.i, x = hist.y.i, col = kde.fill[i], border = kde.line[i]) } } } ## optionally add dot plot if(dots == TRUE) { for(i in 1:length(data)) { for(j in 1:length(hist.data[[i]]$counts)) { ## calculate scaling factor for histogram bar heights dots.distance <- (par()$usr[4] - (xy.0[2] + par()$cxy[1] * 0.4)) / hist.max.plot dots.x.i <- seq(from = xy.0[2] + par()$cxy[2] * 0.4, by = dots.distance, length.out = hist.data[[i]]$counts[j]) dots.y.i <- rep((hist.data[[i]]$mids[j] - z.central.global) * min(ellipse[,2]), length(dots.x.i)) ## remove data out of z-axis range dots.x.i <- dots.x.i[dots.y.i >= min(ellipse[,1]) & dots.y.i <= max(ellipse[,1])] dots.y.i <- dots.y.i[dots.y.i >= min(ellipse[,1]) & dots.y.i <= max(ellipse[,1])] if(max(c(0, dots.x.i), na.rm = TRUE) >= (par()$usr[4] - par()$cxy[2] * 0.4)) { dots.y.i <- dots.y.i[dots.x.i < (par()$usr[4] - par()$cxy[2] * 0.4)] dots.x.i <- dots.x.i[dots.x.i < (par()$usr[4] - par()$cxy[2] * 0.4)] pch.dots <- c(rep(20, length(dots.x.i) - 1), 15) } else { pch.dots <- rep(20, length(dots.x.i)) } ## plot points points(y = dots.x.i, x = dots.y.i, pch = "-", cex = 0.7 * cex, col = kde.line[i]) } } } ## optionally add box plot if(boxplot == TRUE) { for(i in 1:length(data)) { ## draw median line lines(x = c((boxplot.data[[i]]$stats[3,1] - z.central.global) * min(ellipse[,2]), (boxplot.data[[i]]$stats[3,1] - z.central.global) * min(ellipse[,2])), y = c(min(ellipse[,2]) + KDE.max * 0.91, xy.0[2] + KDE.max * 0.96), lwd = 2, col = kde.line[i]) ## draw p25-p75-polygon polygon(y = c(min(ellipse[,2]) + KDE.max * 0.91, min(ellipse[,2]) + KDE.max * 0.91, xy.0[2] + KDE.max * 0.96, xy.0[2] + KDE.max * 0.96), x = c((boxplot.data[[i]]$stats[2,1] - z.central.global) * min(ellipse[,2]), (boxplot.data[[i]]$stats[4,1] - z.central.global) * min(ellipse[,2]), (boxplot.data[[i]]$stats[4,1] - z.central.global) * min(ellipse[,2]), (boxplot.data[[i]]$stats[2,1] - z.central.global) * min(ellipse[,2])), border = kde.line[i]) ## draw whiskers lines(y = rep(mean(c(min(ellipse[,2]) + KDE.max * 0.91, xy.0[2] + KDE.max * 0.96)), 2), x = c((boxplot.data[[i]]$stats[2,1] - z.central.global) * min(ellipse[,2]), (boxplot.data[[i]]$stats[1,1] - z.central.global) * min(ellipse[,2])), col = kde.line[i]) lines(y = c(min(ellipse[,2]) + KDE.max * 0.91, xy.0[2] + KDE.max * 0.96), x = rep((boxplot.data[[i]]$stats[1,1] - z.central.global) * min(ellipse[,2]), 2), col = kde.line[i]) lines(y = rep(mean(c(min(ellipse[,2]) + KDE.max * 0.91, xy.0[2] + KDE.max * 0.96)), 2), x = c((boxplot.data[[i]]$stats[4,1] - z.central.global) * min(ellipse[,2]), (boxplot.data[[i]]$stats[5,1] - z.central.global) * min(ellipse[,2])), col = kde.line[i]) lines(y = c(min(ellipse[,2]) + KDE.max * 0.91, xy.0[2] + KDE.max * 0.96), x = rep((boxplot.data[[i]]$stats[5,1] - z.central.global) * min(ellipse[,2]), 2), col = kde.line[i]) ## draw outlier points points(y = rep(mean(c(min(ellipse[,2]) + KDE.max * 0.91, xy.0[2] + KDE.max * 0.96)), length(boxplot.data[[i]]$out)), x = (boxplot.data[[i]]$out - z.central.global) * min(ellipse[,2]), cex = cex * 0.8, col = kde.line[i]) } } ## optionally add stats, i.e. min, max, median sample text if(length(stats) > 0) { text(y = stats.data[,1], x = stats.data[,2], pos = 2, labels = round(stats.data[,3], 1), family = layout$abanico$font.type$stats, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$stats], cex = cex * layout$abanico$font.size$stats/12, col = layout$abanico$colour$stats) } ## optionally add rug if(rug == TRUE) { for(i in 1:length(rug.coords)) { lines(y = rug.coords[[i]][1,], x = rug.coords[[i]][2,], col = value.rug[data.global[i,10]]) } } ## plot KDE base line lines(y = c(xy.0[2], xy.0[2]), x = c(min(ellipse[,1]), max(ellipse[,1])), col = layout$abanico$colour$border) ## draw border around plot polygon(y = c(limits.x[1], min(ellipse[,2]), y.max, y.max, min(ellipse[,2])), x = c(0, max(ellipse[,1]), max(ellipse[,1]), min(ellipse[,1]), min(ellipse[,1])), border = layout$abanico$colour$border, lwd = 0.8) ## optionally add legend content if(missing(legend) == FALSE) { ## store and change font familiy par.family <- par()$family par(family = layout$abanico$font.type$legend) legend(y = legend.pos[2], x = 0.8 * legend.pos[1], xjust = legend.adj[2], yjust = legend.adj[1], legend = legend, pch = pch, col = value.dot, text.col = value.dot, text.font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$legend], cex = cex * layout$abanico$font.size$legend/12, bty = "n") ## restore font family par(family = par.family) } ## optionally add subheader text mtext(text = mtext, side = 3, line = (shift.lines - 2 + 3.5) * layout$abanico$dimension$mtext / 100, col = layout$abanico$colour$mtext, family = layout$abanico$font.type$mtext, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$mtext], cex = cex * layout$abanico$font.size$mtext / 12) ## add summary content for(i in 1:length(data)) { if(summary.pos[1] != "sub") { text(x = summary.pos[1], y = summary.pos[2], adj = summary.adj, labels = label.text[[i]], col = summary.col[i], family = layout$abanico$font.type$summary, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$summary], cex = cex * layout$abanico$font.size$summary / 12) } else { if(mtext == "") { mtext(side = 3, line = (shift.lines - 1 + 3.5 - i) * layout$abanico$dimension$summary / 100 , text = label.text[[i]], col = summary.col[i], family = layout$abanico$font.type$summary, font = (1:4)[c("plain", "bold", "italic", "bold italic") == layout$abanico$font.deco$summary], cex = cex * layout$abanico$font.size$summary / 12) } } } } ##sTeve if(fun & !interactive){sTeve()} ## create numeric output plot.output <- list(xlim = limits.x, ylim = limits.y, zlim = limits.z, polar.box = c(limits.x[1], limits.x[2], min(ellipse[,2]), max(ellipse[,2])), cartesian.box = c(xy.0[1], par()$usr[2], xy.0[2], max(ellipse[,2])), plot.ratio = plot.ratio, data = data, data.global = data.global, KDE = KDE, par = par(no.readonly = TRUE)) ## INTERACTIVE PLOT ---------------------------------------------------------- if (interactive) { if (!requireNamespace("plotly", quietly = TRUE)) stop("The interactive abanico plot requires the 'plotly' package. To install", " this package run 'install.packages('plotly')' in your R console.", call. = FALSE) ##cheat R check (global visible binding error) x <- NA y <- NA ## tidy data ---- data <- plot.output kde <- data.frame(x = data$KDE[[1]][ ,2], y = data$KDE[[1]][ ,1]) # radial scatter plot ---- point.text <- paste0("Measured value:
", data$data.global$De, " ± ", data$data.global$error,"
", "P(",format(data$data.global$precision, digits = 2, nsmall = 1),", ", format(data$data.global$std.estimate, digits = 2, nsmall = 1),")") IAP <- plotly::plot_ly(data = data$data.global, x = precision, y = std.estimate, type = "scatter", mode = "markers", hoverinfo = "text", text = point.text, name = "Points", yaxis = "y") ellipse <- as.data.frame(ellipse) IAP <- plotly::add_trace(IAP, data = ellipse, x = ellipse.x, y = ellipse.y, hoverinfo = "none", name = "z-axis (left)", type = "scatter", mode = "lines", line = list(color = "black", width = 1), yaxis = "y") ellipse.right <- ellipse ellipse.right$ellipse.x <- ellipse.right$ellipse.x * 1/0.75 IAP <- plotly::add_trace(IAP, data = ellipse.right, x = ellipse.x, y = ellipse.y, hoverinfo = "none", name = "z-axis (right)", type = "scatter", mode = "lines", line = list(color = "black", width = 1), yaxis = "y") # z-axis ticks major.ticks.x <- c(data$xlim[2] * 1/0.75, (1 + 0.015 * layout$abanico$dimension$ztcl / 100) * data$xlim[2] * 1/0.75) minor.ticks.x <- c(data$xlim[2] * 1/0.75, (1 + 0.01 * layout$abanico$dimension$ztcl / 100) * data$xlim[2] * 1/0.75) major.ticks.y <- (tick.values.major - z.central.global) * min(ellipse[ ,1]) minor.ticks.y <- (tick.values.minor - z.central.global) * min(ellipse[ ,1]) # major z-tick lines for (i in 1:length(major.ticks.y)) { major.tick <- data.frame(x = major.ticks.x, y = rep(major.ticks.y[i], 2)) IAP <- plotly::add_trace(IAP, data = major.tick, x = x, y = y, showlegend = FALSE, hoverinfo = "none", type = "scatter", mode = "lines", line = list(color = "black", width = 1), yaxis = "y") } # minor z-tick lines for (i in 1:length(minor.ticks.y)) { minor.tick <- data.frame(x = minor.ticks.x, y = rep(minor.ticks.y[i], 2)) IAP <- plotly::add_trace(IAP, data = minor.tick, hoverinfo = "none", x = x, y = y, showlegend = FALSE, type = "scatter", mode = "lines", line = list(color = "black", width = 1), yaxis = "y") } # z-tick label tick.text <- paste(" ", exp(tick.values.major)) tick.pos <- data.frame(x = major.ticks.x[2], y = major.ticks.y) IAP <- plotly::add_trace(IAP, data = tick.pos, x = x, y = y, showlegend = FALSE, text = tick.text, textposition = "right", hoverinfo = "none", type = "scatter", mode = "text", yaxis = "y") # Central Line ---- central.line <- data.frame(x = c(-100, data$xlim[2]*1/0.75), y = c(0, 0)) central.line.text <- paste0("Central value: ", format(exp(z.central.global), digits = 2, nsmall = 1)) IAP <- plotly::add_trace(IAP, data = central.line, x = x, y = y, name = "Central line", type = "scatter", mode = "lines", hoverinfo = "text", text = central.line.text, yaxis = "y", line = list(color = "black", width = 0.5, dash = 2)) # KDE plot ---- KDE.x <- xy.0[1] + KDE[[1]][ ,2] * KDE.scale KDE.y <- (KDE[[1]][ ,1] - z.central.global) * min(ellipse[,1]) KDE.curve <- data.frame(x = KDE.x, y = KDE.y) KDE.curve <- KDE.curve[KDE.curve$x != xy.0[1], ] KDE.text <- paste0("Value:", format(exp(KDE[[1]][ ,1]), digits = 2, nsmall = 1), "
", "Density:", format(KDE[[1]][ ,2], digits = 2, nsmall = 1)) IAP <- plotly::add_trace(IAP, data = KDE.curve, hoverinfo = "text", text = KDE.text, x = x, y = y, name = "KDE", type = "scatter", mode = "lines", line = list(color = "red"), yaxis = "y") # set layout ---- IAP <- plotly::layout(IAP, hovermode = "closest", dragmode = "pan", xaxis = list(range = c(data$xlim[1], data$xlim[2] * 1/0.65), zeroline = FALSE, showgrid = FALSE, tickmode = "array", tickvals = x.axis.ticks), yaxis = list(range = data$ylim, zeroline = FALSE, showline = FALSE, showgrid = FALSE, tickmode = "array", tickvals = c(-2, 0, 2)), shapes = list(list(type = "rect", # 2 sigma bar x0 = 0, y0 = -2, x1 = bars[1,3], y1 = 2, xref = "x", yref = "y", fillcolor = "grey", opacity = 0.2)) ) # show and return interactive plot ---- #print(plotly::subplot(IAP, IAP.kde)) print(IAP) return(IAP) } ## restore initial cex par(cex = cex_old) ## create and return numeric output if(output == TRUE) { return(invisible(plot.output)) } } Luminescence/R/plot_DRTResults.R0000644000176200001440000005634513501731627016303 0ustar liggesusers#' Visualise dose recovery test results #' #' The function provides a standardised plot output for dose recovery test #' measurements. #' #' Procedure to test the accuracy of a measurement protocol to reliably #' determine the dose of a specific sample. Here, the natural signal is erased #' and a known laboratory dose administered which is treated as unknown. Then #' the De measurement is carried out and the degree of congruence between #' administered and recovered dose is a measure of the protocol's accuracy for #' this sample.\cr #' In the plot the normalised De is shown on the y-axis, i.e. obtained De/Given Dose. #' #' @param values [RLum.Results-class] or [data.frame] (**required**): #' input values containing at least De and De error. To plot #' more than one data set in one figure, a `list` of the individual data #' sets must be provided (e.g. `list(dataset.1, dataset.2)`). #' #' @param given.dose [numeric] (*optional*): #' given dose used for the dose recovery test to normalise data. #' If only one given dose is provided this given dose is valid for all input #' data sets (i.e., `values` is a list). Oherwise a given dose for each input #' data set has to be provided (e.g., `given.dose = c(100,200)`). #' If `given.dose` in `NULL` the values are plotted without normalisation #' (might be useful for preheat plateau tests). #' **Note:** Unit has to be the same as from the input values (e.g., Seconds or #' Gray). #' #' @param error.range [numeric]: #' symmetric error range in percent will be shown as dashed lines in the plot. #' Set `error.range` to 0 to void plotting of error ranges. #' #' @param preheat [numeric]: #' optional vector of preheat temperatures to be used for grouping the De values. #' If specified, the temperatures are assigned to the x-axis. #' #' @param boxplot [logical]: #' optionally plot values, that are grouped by preheat temperature as boxplots. #' Only possible when `preheat` vector is specified. #' #' @param mtext [character]: #' additional text below the plot title. #' #' @param summary [character] (*optional*): #' adds numerical output to the plot. Can be one or more out of: #' - `"n"` (number of samples), #' - `"mean"` (mean De value), #' - `"weighted$mean"` (error-weighted mean), #' - `"median"` (median of the De values), #' - `"sd.rel"` (relative standard deviation in percent), #' - `"sd.abs"` (absolute standard deviation), #' - `"se.rel"` (relative standard error) and #' - `"se.abs"` (absolute standard error) #' #' and all other measures returned by the function [calc_Statistics]. #' #' @param summary.pos [numeric] or [character] (*with default*): #' optional position coordinates or keyword (e.g. `"topright"`) #' for the statistical summary. Alternatively, the keyword `"sub"` may be #' specified to place the summary below the plot header. However, this latter #' option in only possible if `mtext` is not used. #' #' @param legend [character] vector (*optional*): #' legend content to be added to the plot. #' #' @param legend.pos [numeric] or [character] (*with default*): #' optional position coordinates or keyword (e.g. `"topright"`) for the #' legend to be plotted. #' #' @param par.local [logical] (*with default*): #' use local graphical parameters for plotting, e.g. the plot is shown in one #' column and one row. If `par.local = FALSE`, global parameters are inherited, #' i.e. parameters provided via `par()` work #' #' @param na.rm [logical]: indicating wether `NA` values are #' removed before plotting from the input data set #' #' @param ... further arguments and graphical parameters passed to [plot], supported are: #' `xlab`, `ylab`, `xlim`, `ylim`, `main`, `cex`, `las` and `pch`` #' #' @return A plot is returned. #' #' @note #' Further data and plot arguments can be added by using the appropriate R #' commands. #' #' @section Function version: 0.1.13 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060 - Université Bordeaux Montaigne (France)\cr #' Michael Dietze, GFZ Potsdam (Germany) #' #' @seealso [plot] #' #' @references #' Wintle, A.G., Murray, A.S., 2006. A review of quartz optically #' stimulated luminescence characteristics and their relevance in #' single-aliquot regeneration dating protocols. Radiation Measurements, 41, #' 369-391. #' #' @keywords dplot #' #' @examples #' #' ## read example data set and misapply them for this plot type #' data(ExampleData.DeValues, envir = environment()) #' #' ## plot values #' plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], #' given.dose = 2800, mtext = "Example data") #' #' ## plot values with legend #' plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], #' given.dose = 2800, #' legend = "Test data set") #' #' ## create and plot two subsets with randomised values #' x.1 <- ExampleData.DeValues$BT998[7:11,] #' x.2 <- ExampleData.DeValues$BT998[7:11,] * c(runif(5, 0.9, 1.1), 1) #' #' plot_DRTResults(values = list(x.1, x.2), #' given.dose = 2800) #' #' ## some more user-defined plot parameters #' plot_DRTResults(values = list(x.1, x.2), #' given.dose = 2800, #' pch = c(2, 5), #' col = c("orange", "blue"), #' xlim = c(0, 8), #' ylim = c(0.85, 1.15), #' xlab = "Sample aliquot") #' #' ## plot the data with user-defined statistical measures as legend #' plot_DRTResults(values = list(x.1, x.2), #' given.dose = 2800, #' summary = c("n", "mean.weighted", "sd")) #' #' ## plot the data with user-defined statistical measures as sub-header #' plot_DRTResults(values = list(x.1, x.2), #' given.dose = 2800, #' summary = c("n", "mean.weighted", "sd"), #' summary.pos = "sub") #' #' ## plot the data grouped by preheat temperatures #' plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], #' given.dose = 2800, #' preheat = c(200, 200, 200, 240, 240)) #' ## read example data set and misapply them for this plot type #' data(ExampleData.DeValues, envir = environment()) #' #' ## plot values #' plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], #' given.dose = 2800, mtext = "Example data") #' ## plot two data sets grouped by preheat temperatures #' plot_DRTResults(values = list(x.1, x.2), #' given.dose = 2800, #' preheat = c(200, 200, 200, 240, 240)) #' #' ## plot the data grouped by preheat temperatures as boxplots #' plot_DRTResults(values = ExampleData.DeValues$BT998[7:11,], #' given.dose = 2800, #' preheat = c(200, 200, 200, 240, 240), #' boxplot = TRUE) #' #' @md #' @export plot_DRTResults <- function( values, given.dose = NULL, error.range = 10, preheat, boxplot = FALSE, mtext, summary, summary.pos, legend, legend.pos, par.local = TRUE, na.rm = FALSE, ... ){ ## Validity checks ---------------------------------------------------------- ##avoid crash for wrongly set boxlot argument if(missing(preheat) & boxplot == TRUE){ warning("[plot_DRTResults()] Option 'boxplot' not valid without any value in 'preheat'. Reset to FALSE.") boxplot <- FALSE } if(missing(summary) == TRUE) {summary <- NULL} if(missing(summary.pos) == TRUE) {summary.pos <- "topleft"} if(missing(legend.pos) == TRUE) {legend.pos <- "topright"} if(missing(mtext) == TRUE) {mtext <- ""} ## Homogenise and check input data if(is(values, "list") == FALSE) {values <- list(values)} for(i in 1:length(values)) { if(is(values[[i]], "RLum.Results")==FALSE & is(values[[i]], "data.frame")==FALSE){ stop(paste("[plot_DRTResults()] Wrong input data format", "(!= 'data.frame' or 'RLum.Results')")) } else { if(is(values[[i]], "RLum.Results")==TRUE){ values[[i]] <- get_RLum(values[[i]])[,1:2] } } } ## Check input arguments ---------------------------------------------------- for(i in 1:length(values)) { ##check for preheat temperature values if(missing(preheat) == FALSE) { if(length(preheat) != nrow(values[[i]])){ stop("[plot_DRTResults()] number of preheat temperatures != De values!") } } ##remove NA values; yes Micha, it is not that simple if(na.rm == TRUE){ ##currently we assume that all input data sets comprise a similar of data if(!missing(preheat) & i == length(values)){ ##find and mark NA value indicies temp.NA.values <- unique(c(which(is.na(values[[i]][,1])), which(is.na(values[[i]][,2])))) ##remove preheat entries preheat <- preheat[-temp.NA.values] } values[[i]] <- na.exclude(values[[i]]) } } ## create global data set values.global <- NULL n.values <- NULL for(i in 1:length(values)) { values.global <- rbind(values.global, values[[i]]) n.values <- c(n.values, nrow(values[[i]])) } ## Set plot format parameters ----------------------------------------------- extraArgs <- list(...) # read out additional arguments list main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {"Dose recovery test"} xlab <- if("xlab" %in% names(extraArgs)) {extraArgs$xlab} else { ifelse(missing(preheat) == TRUE, "# Aliquot", "Preheat temperature [\u00B0C]") } ylab <- if("ylab" %in% names(extraArgs)) {extraArgs$ylab} else {if(!is.null(given.dose)){ expression(paste("Normalised ", D[e], sep="")) }else{expression(paste(D[e], " [s]"), sep = "")}} xlim <- if("xlim" %in% names(extraArgs)) {extraArgs$xlim} else {c(1, max(n.values) + 1)} ylim <- if("ylim" %in% names(extraArgs)) {extraArgs$ylim} else {c(0.75, 1.25)} #check below for further corrections if boundaries exceed set range cex <- if("cex" %in% names(extraArgs)) {extraArgs$cex} else {1} pch <- if("pch" %in% names(extraArgs)) {extraArgs$pch} else { abs(seq(from = 20, to = -100)) } ##axis labels las <- if("las" %in% names(extraArgs)) extraArgs$las else 0 fun <- if("fun" %in% names(extraArgs)) {extraArgs$fun} else {FALSE} ## calculations and settings------------------------------------------------- ## normalise data if given.dose is given if(!is.null(given.dose)){ if(length(given.dose) > 1){ if(length(values) < length(given.dose)){ stop("[plot_DRTResults()] 'given.dose' > number of input data sets!", call. = FALSE) } for(i in 1:length(values)) { values[[i]] <- values[[i]]/given.dose[i] } }else{ for(i in 1:length(values)) { values[[i]] <- values[[i]]/given.dose } } } ##correct ylim for data set which exceed boundaries if((max(sapply(1:length(values), function(x){max(values[[x]][,1], na.rm = TRUE)}))>1.25 | min(sapply(1:length(values), function(x){min(values[[x]][,1], na.rm = TRUE)}))<0.75) & ("ylim" %in% names(extraArgs)) == FALSE){ ylim <- c( min(sapply(1:length(values), function(x){ min(values[[x]][,1], na.rm = TRUE) - max(values[[x]][,2], na.rm = TRUE)})), max(sapply(1:length(values), function(x){ max(values[[x]][,1], na.rm = TRUE) + max(values[[x]][,2], na.rm = TRUE)}))) } ## optionally group data by preheat temperature if(missing(preheat) == FALSE) { modes <- as.numeric(rownames(as.matrix(table(preheat)))) values.preheat <- list(NA) values.boxplot <- list(NA) for(i in 1:length(modes)) { for(j in 1:length(values)) { values.preheat[[length(values.preheat) + 1]] <- cbind(values[[j]][preheat == modes[i],], preheat[preheat == modes[i]]) values.boxplot[[length(values.boxplot) + 1]] <- values[[j]][preheat == modes[i],1] } j <- 1 } values.preheat[[1]] <- NULL values.boxplot[[1]] <- NULL modes.plot <- rep(modes, each = length(values)) } else {modes <- 1} ## assign colour indices col <- if("col" %in% names(extraArgs)) {extraArgs$col} else { if(missing(preheat) == TRUE) { rep(seq(from = 1, to = length(values)), each = length(modes)) } else { rep(seq(from = 1, to = length(values)), length(modes)) } } ## calculate and paste statistical summary if(summary.pos[1] != "sub") { label.text <- lapply(1:length(values), function(i) { .create_StatisticalSummaryText( x = calc_Statistics(values[[i]]), keywords = summary, digits = 2, sep = " \n", prefix = paste(rep("\n", (i - 1) * length(summary)), collapse = "") ) }) }else{ label.text <- lapply(1:length(values), function(i) { .create_StatisticalSummaryText( x = calc_Statistics(values[[i]]), keywords = summary, digits = 2, sep = " | " ) }) } ## convert keywords into summary placement coordinates if(missing(summary.pos) == TRUE) { summary.pos <- c(xlim[1], ylim[2]) summary.adj <- c(0, 1) } else if(length(summary.pos) == 2) { summary.pos <- summary.pos summary.adj <- c(0, 1) } else if(summary.pos[1] == "topleft") { summary.pos <- c(xlim[1], ylim[2]) summary.adj <- c(0, 1) } else if(summary.pos[1] == "top") { summary.pos <- c(mean(xlim), ylim[2]) summary.adj <- c(0.5, 1) } else if(summary.pos[1] == "topright") { summary.pos <- c(xlim[2], ylim[2]) summary.adj <- c(1, 1) } else if(summary.pos[1] == "left") { summary.pos <- c(xlim[1], mean(ylim)) summary.adj <- c(0, 0.5) } else if(summary.pos[1] == "center") { summary.pos <- c(mean(xlim), mean(ylim)) summary.adj <- c(0.5, 0.5) } else if(summary.pos[1] == "right") { summary.pos <- c(xlim[2], mean(ylim)) summary.adj <- c(1, 0.5) }else if(summary.pos[1] == "bottomleft") { summary.pos <- c(xlim[1], ylim[1]) summary.adj <- c(0, 0) } else if(summary.pos[1] == "bottom") { summary.pos <- c(mean(xlim), ylim[1]) summary.adj <- c(0.5, 0) } else if(summary.pos[1] == "bottomright") { summary.pos <- c(xlim[2], ylim[1]) summary.adj <- c(1, 0) } ## convert keywords into legend placement coordinates if(missing(legend.pos) == TRUE) { legend.pos <- c(xlim[2], ylim[2]) legend.adj <- c(1, 1) } else if(length(legend.pos) == 2) { legend.pos <- legend.pos legend.adj <- c(0, 1) } else if(legend.pos[1] == "topleft") { legend.pos <- c(xlim[1], ylim[2]) legend.adj <- c(0, 1) } else if(legend.pos[1] == "top") { legend.pos <- c(mean(xlim), ylim[2]) legend.adj <- c(0.5, 1) } else if(legend.pos[1] == "topright") { legend.pos <- c(xlim[2], ylim[2]) legend.adj <- c(1, 1) } else if(legend.pos[1] == "left") { legend.pos <- c(xlim[1], mean(ylim)) legend.adj <- c(0, 0.5) } else if(legend.pos[1] == "center") { legend.pos <- c(mean(xlim), mean(ylim)) legend.adj <- c(0.5, 0.5) } else if(legend.pos[1] == "right") { legend.pos <- c(xlim[2], mean(ylim)) legend.adj <- c(1, 0.5) } else if(legend.pos[1] == "bottomleft") { legend.pos <- c(xlim[1], ylim[1]) legend.adj <- c(0, 0) } else if(legend.pos[1] == "bottom") { legend.pos <- c(mean(xlim), ylim[1]) legend.adj <- c(0.5, 0) } else if(legend.pos[1] == "bottomright") { legend.pos <- c(xlim[2], ylim[1]) legend.adj <- c(1, 0) } ## Plot output -------------------------------------------------------------- ## determine number of subheader lines to shif the plot shift.lines <- if(summary.pos[1] == "sub" & mtext == "") { length(label.text) - 1 } else {1} ## setup plot area if(par.local){ if (shift.lines <= 0) shift.lines <- 1 par.default <- par()[c("mfrow", "cex", "oma")] par(mfrow = c(1, 1), cex = cex, oma = c(0, 1, shift.lines - 1, 1)) } ## optionally plot values and error bars if(boxplot == FALSE) { ## plot data and error if(missing(preheat) == TRUE) { ## create empty plot plot(NA,NA, xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, xaxt = "n", las = las, main = "" ) ##add x-axis ... this avoids digits in the axis labeling axis(side = 1, at = 1:(nrow(values[[1]])+1), labels = 1:(nrow(values[[1]])+1), las = las) ## add title title(main = main, line = shift.lines + 2) ## add additional lines if (!is.null(given.dose)) { abline(h = 1) if (error.range > 0) { ## error range lines abline(h = 1 * (1 + error.range / 100), lty = 2) abline(h = 1 * (1 - error.range / 100), lty = 2) ## error range labels text( par()$usr[2], (1 + error.range / 100) + 0.02, paste("+", error.range , " %", sep = ""), pos = 2, cex = 0.8 ) text( par()$usr[2], (1 - error.range / 100) - 0.02, paste("-", error.range , "%", sep = ""), pos = 2, cex = 0.8 ) } } ## add data and error bars for(i in 1:length(values)) { points(x = c(1:nrow(values[[i]])), y = values[[i]][,1], pch = if(nrow(values[[i]]) == length(pch)){ pch } else { pch[i] }, col = if(nrow(values[[i]]) == length(col)){ col } else { col[i] }, cex = 1.2 * cex) arrows(c(1:nrow(values[[i]])), values[[i]][,1] + values[[i]][,2], c(1:nrow(values[[i]])), values[[i]][,1] - values[[i]][,2], angle = 90, length = 0.075, code = 3, col = if(nrow(values[[i]]) == length(col)){ col } else { col[i] }) ## add summary content if(summary.pos[1] != "sub") { text(x = summary.pos[1], y = summary.pos[2], adj = summary.adj, labels = label.text[[i]], cex = 0.8 * cex, col = if(nrow(values[[i]]) == length(col)){ "black" } else { col[i] }) } else { if(mtext == "") { mtext(side = 3, line = - i + 2.5, text = label.text[[i]], col = if(nrow(values[[i]]) == length(col)){ "black" } else { col[i] }, cex = cex * 0.8) } } } } else { ## option for provided preheat data ## create empty plot plot(NA,NA, xlim = c(min(modes.plot) * 0.9, max(modes.plot) * 1.1), ylim = ylim, xlab = xlab, ylab = ylab, las = las, main = "", axes = FALSE, frame.plot = TRUE) ## add axes axis(1, at = modes.plot, labels = modes.plot) axis(2) ## add title title(main = main, line = shift.lines + 2) ## add additional lines if (!is.null(given.dose)) { abline(h = 1) if (error.range > 0) { ## error range lines abline(h = 1 * (1 + error.range / 100), lty = 2) abline(h = 1 * (1 - error.range / 100), lty = 2) ## error range labels text( par()$usr[2], (1 + error.range / 100) + 0.02, paste("+", error.range , " %", sep = ""), pos = 2, cex = 0.8 ) text( par()$usr[2], (1 - error.range / 100) - 0.02, paste("-", error.range , "%", sep = ""), pos = 2, cex = 0.8 ) } } ## plot values for(i in 1:length(values.preheat)) { points(x = values.preheat[[i]][,3], y = values.preheat[[i]][,1], pch = pch[i], col = col[i], cex = 1.2 * cex) arrows(values.preheat[[i]][,3], values.preheat[[i]][,1] + values.preheat[[i]][,2], values.preheat[[i]][,3], values.preheat[[i]][,1] - values.preheat[[i]][,2], angle = 90, length = 0.075, code = 3, col = col[i]) } } } ## optionally, plot boxplot if(boxplot == TRUE) { ## create empty plot boxplot(values.boxplot, names = modes.plot, ylim = ylim, xlab = xlab, ylab = ylab, las = las, xaxt = "n", main = "", border = col) ## add axis label, if necessary if (length(modes.plot) == 1) { axis(side = 1, at = 1, labels = modes.plot, las = las) } else if (length(modes.plot) > length(unique(modes.plot))){ ticks <- seq(from = 1 + ((length(values.boxplot)/length(unique(modes.plot)) - 1)/2), to = length(values.boxplot), by = length(values.boxplot)/length(unique(modes.plot))) axis( side = 1, at = ticks, las = las, labels = unique(modes.plot) ) ##polygon for a better graphical representation of the groups polygon.x <- seq( 1,length(values.boxplot), by = length(values.boxplot) / length(unique(modes.plot)) ) polygon.step <- unique(diff(polygon.x) - 1) for (x.plyg in polygon.x) { polygon( x = c(x.plyg,x.plyg,x.plyg + polygon.step, x.plyg + polygon.step), y = c( par()$usr[3], ylim[1] - (ylim[1] - par()$usr[3]) / 2, ylim[1] - (ylim[1] - par()$usr[3]) / 2, par()$usr[3] ), col = "grey", border = "grey" ) } }else{ axis(side = 1, at = 1:length(unique(modes.plot)), labels = unique(modes.plot), las = las) } ## add title title(main = main, line = shift.lines + 2) ## add additional lines if(!is.null(given.dose)){ abline(h = 1) if(error.range > 0){ ## error range lines abline(h = 1 * (1 + error.range / 100), lty = 2) abline(h = 1 * (1 - error.range / 100), lty = 2) ## error range labels text(par()$usr[2], (1 + error.range / 100) + 0.02, paste("+", error.range ," %", sep = ""), pos = 2, cex = 0.8) text(par()$usr[2], (1 - error.range / 100) - 0.02, paste("-", error.range ,"%", sep = ""), pos = 2, cex = 0.8) } } ## plot data and error for(i in 1:length(values)) { ## add summary content if(summary.pos[1] != "sub") { text(x = summary.pos[1], y = summary.pos[2], adj = summary.adj, labels = label.text[[i]], cex = 0.8 * cex, col = if(nrow(values[[i]]) == length(col)){ "black" } else { col[i] }) } else { if(mtext == "") { mtext(side = 3, line = - i + 2.5, text = label.text[[i]], col = if(nrow(values[[i]]) == length(col)){ "black" } else { col[i] }, cex = cex * 0.8) } } } } ## optionally add legend content if(missing(legend) == FALSE) { legend(x = legend.pos[1], y = legend.pos[2], xjust = legend.adj[1], yjust = legend.adj[2], legend = legend, col = unique(col), pch = unique(pch), lty = 1, cex = cex * 0.8) } ## optionally add subheader text mtext(side = 3, line = shift.lines, text = mtext, cex = 0.8 * cex) ##reset par() if(par.local){ par(par.default) rm(par.default) } ##FUN by R Luminescence Team if(fun == TRUE) {sTeve()} } Luminescence/R/RLum.Analysis-class.R0000644000176200001440000006533613540732472017000 0ustar liggesusers#' @include get_RLum.R set_RLum.R length_RLum.R structure_RLum.R names_RLum.R smooth_RLum.R NULL #' Class `"RLum.Analysis"` #' #' Object class to represent analysis data for protocol analysis, i.e. all curves, #' spectra etc. from one measurements. Objects from this class are produced, #' by e.g. [read_XSYG2R], [read_Daybreak2R] #' #' #' @name RLum.Analysis-class #' #' @docType class #' #' @slot protocol #' Object of class [character] describing the applied measurement protocol #' #' @slot records #' Object of class [list] containing objects of class [RLum.Data-class] #' #' @note #' The method [structure_RLum] is currently just avaiblable for objects #' containing [RLum.Data.Curve-class]. #' #' @section Objects from the Class: #' Objects can be created by calls of the form `set_RLum("RLum.Analysis", ...)`. #' #' @section Class version: 0.4.15 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @seealso [Risoe.BINfileData2RLum.Analysis], #' [Risoe.BINfileData-class], [RLum-class] #' #' @keywords classes methods #' #' @examples #' #' showClass("RLum.Analysis") #' #' ##set empty object #' set_RLum(class = "RLum.Analysis") #' #' ###use example data #' ##load data #' data(ExampleData.RLum.Analysis, envir = environment()) #' #' ##show curves in object #' get_RLum(IRSAR.RF.Data) #' #' ##show only the first object, but by keeping the object #' get_RLum(IRSAR.RF.Data, record.id = 1, drop = FALSE) #' #' @keywords internal #' #' @md #' @export setClass("RLum.Analysis", slots = list( protocol = "character", records = "list" ), contains = "RLum", prototype = list ( protocol = NA_character_, records = list() ) ) # as() ----------------------------------------------------------------------------------------- ##LIST ##COERCE RLum.Analyse >> list AND list >> RLum.Analysis #' as() - RLum-object coercion #' #' for `[RLum.Analysis-class]` #' #' **[RLum.Analysis-class]** #' #' \tabular{ll}{ #' **from** \tab **to**\cr #' `list` \tab `list`\cr #' } #' #' Given that the [list] consits of [RLum.Analysis-class] objects. #' #' @md #' @name as setAs("list", "RLum.Analysis", function(from,to){ new(to, protocol = NA_character_, records = from) }) setAs("RLum.Analysis", "list", function(from){ lapply(1:length(from@records), function(x){ from@records[[x]] }) }) # show() -------------------------------------------------------------------------------------- #' @describeIn RLum.Analysis #' Show structure of `RLum.Analysis` object #' #' @md #' @export setMethod("show", signature(object = "RLum.Analysis"), function(object){ ##print cat("\n [RLum.Analysis-class]") ##show slot originator, for compatibily reasons with old example data, here ##a check if(.hasSlot(object, "originator")){cat("\n\t originator:", paste0(object@originator,"()"))} cat("\n\t protocol:", object@protocol) cat("\n\t additional info elements: ", if(.hasSlot(object, "info")){length(object@info)}else{0}) cat("\n\t number of records:", length(object@records)) #skip this part if nothing is included in the object if(length(object@records) > 0){ ##get object class types temp <- vapply(object@records, function(x){ is(x)[1] }, FUN.VALUE = vector(mode = "character", length = 1)) ##print object class types lapply(1:length(table(temp)), function(x){ ##show RLum class type cat("\n\t .. :", names(table(temp)[x]),":",table(temp)[x]) ##show structure ##set width option ... just an implementation for the tutorial output ifelse(getOption("width")<=50, temp.width <- 4, temp.width <- 7) ##set linebreak variable linebreak <- FALSE env <- environment() ##create terminal output terminal_output <- vapply(1:length(object@records), function(i) { if (names(table(temp)[x]) == is(object@records[[i]])[1]) { if (i %% temp.width == 0 & i != length(object@records)) { assign(x = "linebreak", value = TRUE, envir = env) } ##FIRST first <- paste0("#", i, " ", object@records[[i]]@recordType) ##LAST if (i < length(object@records) && !is.null(object@records[[i]]@info[["parentID"]]) && !is.null(object@records[[i + 1]]@info[["parentID"]]) && (object@records[[i]]@info[["parentID"]] == object@records[[i+1]]@info[["parentID"]])) { last <- " <> " }else { if(i == length(object@records)){ last <- "" }else if (linebreak){ last <- "\n\t .. .. : " assign(x = "linebreak", value = FALSE, envir = env) }else{ last <- " | " } } return(paste0(first,last)) }else{ return("") } }, FUN.VALUE = vector(mode = "character", length = 1)) ##print on screen, differentiate between records with many curves or just one if(any(grepl(terminal_output, pattern = "<>", fixed = TRUE))){ cat("\n\t .. .. : ", gsub(pattern = "|", replacement = "\n\t .. .. :", x = terminal_output, fixed = TRUE), sep = "") } else{ cat("\n\t .. .. : ", terminal_output, sep = "") } }) }else{ cat("\n\t >> This is an empty object, which cannot be used for further analysis! <<") } } )##end show method # set_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Analysis #' Construction method for [RLum.Analysis-class] objects. #' #' @param class [`set_RLum`] [character] (**required**): #' name of the `RLum` class to be created #' #' @param originator [`set_RLum`] [character] (*automatic*): #' contains the name of the calling function (the function that produces this object); #' can be set manually. #' #' @param .uid [`set_RLum`] [character] (*automatic*): #' sets an unique ID for this object using the internal C++ function `create_UID`. #' #' @param .pid [`set_RLum`] [character] (*with default*): #' option to provide a parent id for nesting at will. #' #' @param protocol [`set_RLum`] [character] (*optional*): #' sets protocol type for analysis object. Value may be used by subsequent analysis functions. #' #' @param records [`set_RLum`] [list] (**required**): #' list of [RLum.Analysis-class] objects #' #' @param info [`set_RLum`] [list] (*optional*): #' a list containing additional info data for the object #' #' **`set_RLum`**: #' #' Returns an [RLum.Analysis-class] object. #' #' @md #' @export setMethod( "set_RLum", signature = "RLum.Analysis", definition = function( class, originator, .uid, .pid, protocol = NA_character_, records = list(), info = list()) { ##produce empty class object newRLumAnalysis <- new(Class = "RLum.Analysis") ##allow self set to reset an RLum.Analysis object if(class(records) == "RLum.Analysis"){ #fill slots (this is much faster than the old code!) newRLumAnalysis@protocol <- if(missing(protocol)){records@protocol}else{protocol} newRLumAnalysis@originator <- originator newRLumAnalysis@records <- records@records newRLumAnalysis@info <- if(missing(info)){records@info}else{c(records@info, info)} newRLumAnalysis@.uid <- .uid newRLumAnalysis@.pid <- if(missing(.pid)){records@.uid}else{.pid} }else{ #fill slots (this is much faster than the old code!) newRLumAnalysis@protocol <- protocol newRLumAnalysis@originator <- originator newRLumAnalysis@records <- records newRLumAnalysis@info <- info newRLumAnalysis@.uid <- .uid newRLumAnalysis@.pid <- .pid } return(newRLumAnalysis) } ) # get_RLum() ---------------------------------------------------------------------------------- #' @describeIn RLum.Analysis #' Accessor method for RLum.Analysis object. #' #' The slots record.id, recordType, curveType and RLum.type are optional to allow for records #' limited by their id (list index number), their record type (e.g. recordType = "OSL") #' or object type. #' #' Example: curve type (e.g. curveType = "predefined" or curveType ="measured") #' #' The selection of a specific RLum.type object superimposes the default selection. #' Currently supported objects are: RLum.Data.Curve and RLum.Data.Spectrum #' #' @param object [`get_RLum`]: [`names_RLum`], [`length_RLum`], [`structure_RLum`] (**required**): #' an object of class [RLum.Analysis-class] #' #' @param record.id [`get_RLum`]: [numeric] or [logical] (*optional*): #' IDs of specific records. If of type `logical` the entire id range is assumed #' and `TRUE` and `FALSE` indicates the selection. #' #' @param recordType [`get_RLum`]: [character] (*optional*): #' record type (e.g., "OSL"). Can be also a vector, for multiple matching, #' e.g., `recordType = c("OSL", "IRSL")` #' #' @param curveType [`get_RLum`]: [character] (*optional*): #' curve type (e.g. "predefined" or "measured") #' #' @param RLum.type [`get_RLum`]: [character] (*optional*): #' RLum object type. Defaults to "RLum.Data.Curve" and "RLum.Data.Spectrum". #' #' @param get.index [`get_RLum`]: [logical] (*optional*): #' return a numeric vector with the index of each element in the RLum.Analysis object. #' #' @param recursive [`get_RLum`]: [logical] (*with default*): #' if `TRUE` (the default) and the result of the 'get_RLum' request is a single #' object this object will be unlisted, means only the object itself and no #' list containing exactly one object is returned. Mostly this makes things #' easier, however, if this method is used within a loop this might be undesired. #' #' @param drop [`get_RLum`]: [logical] (*with default*): #' coerce to the next possible layer (which are `RLum.Data`-objects), #' `drop = FALSE` keeps the original `RLum.Analysis` #' #' @param info.object [`get_RLum`]: [character] (*optional*): #' name of the wanted info element #' #' @param subset [`get_RLum`]: [expression] (*optional*): #' logical expression indicating elements or rows to keep: missing values are #' taken as false. This argument takes precedence over all other arguments, #' meaning they are not considered when subsetting the object. #' #' @param env [`get_RLum`]: [environment] (*with default*): #' An environment passed to [eval] as the enclosure. This argument is only #' relevant when subsetting the object and should not be used manually. #' #' @return #' #' **`get_RLum`**: #' #' Returns: #' #' 1. [list] of [RLum.Data-class] objects or #' 2. Single [RLum.Data-class] object, if only one object is contained and `recursive = FALSE` or #' 3. [RLum.Analysis-class] ojects for `drop = FALSE` #' #' @md #' @export setMethod("get_RLum", signature = ("RLum.Analysis"), function(object, record.id = NULL, recordType = NULL, curveType = NULL, RLum.type = NULL, protocol = "UNKNOWN", get.index = NULL, drop = TRUE, recursive = TRUE, info.object = NULL, subset = NULL, env = parent.frame(2)) { if (!is.null(substitute(subset))) { # To account for different lengths and elements in the @info slot we first # check all unique elements (in all records) info_el <- unique(unlist(lapply(object@records, function(el) names(el@info)))) envir <- as.data.frame(do.call(rbind, lapply(object@records, function(el) { val <- c(curveType = el@curveType, recordType = el@recordType, unlist(el@info)) # add missing info elements and set NA if (any(!info_el %in% names(val))) { val_new <- setNames(rep(NA, length(info_el[!info_el %in% names(val)])), info_el[!info_el %in% names(val)]) val <- c(val, val_new) } # order the named char vector by its names so we don't mix up the columns val <- val[order(names(val))] return(val) }) ), stringsAsFactors = FALSE) ##check for a logical expression, to avoid problems afterwards # if(class(eval(substitute(subset), envir = envir, enclos = env)) != "logical") # stop("[get_RLum] The argument 'subset' does not contain a logical expression!", call. = FALSE) ##select relevant rows sel <- tryCatch(eval( expr = substitute(subset), envir = envir, enclos = env ), error = function(e) { stop("[get_RLum()] Invalid subset options. \nValid terms are: ", paste(names(envir), collapse = ", "), call. = FALSE) }) if (all(is.na(sel))) sel <- FALSE if (any(sel)) { object@records <- object@records[sel] return(object) } else { tmp <- mapply(function(name, op) { message(" ",name, ": ", paste(unique(op), collapse = ", ")) }, names(envir), envir) message("\n [get_RLum()] Invalid value, please refer to unique options given above.") return(NULL) } } ##if info.object is set, only the info objects are returned else if(!is.null(info.object)) { if(info.object %in% names(object@info)){ unlist(object@info[info.object]) }else{ ##check for entries if(length(object@info) == 0){ warning( "[get_RLum()] This RLum.Analysis object has no info objects! NULL returned!)", call. = FALSE) }else{ warning(paste0( "[get_RLum()] Invalid info.object name. Valid names are: ", paste(names(object@info), collapse = ", ") ), call. = FALSE) } return(NULL) } } else { ##check for records if (length(object@records) == 0) { warning("[get_RLum()] This RLum.Analysis object has no records! NULL returned!)") return(NULL) } ##record.id if (is.null(record.id)) { record.id <- c(1:length(object@records)) } else if (!is.numeric(record.id) & !is.logical(record.id)) { stop("[get_RLum()] 'record.id' has to be of type 'numeric' or 'logical'!", call. = FALSE) } ##logical needs a slightly different treatment ##Why do we need this? Because a lot of standard R functions work with logical ##values instead of numerical indicies if (is.logical(record.id)) { record.id <- c(1:length(object@records))[record.id] } ##check if record.id exists if (FALSE %in% (abs(record.id) %in% (1:length(object@records)))) { try(stop("[get_RLum()] At least one 'record.id' is invalid!", call. = FALSE)) return(NULL) } ##recordType if (is.null(recordType)) { recordType <- unique(unlist(lapply(1:length(object@records), function(x) { object@records[[x]]@recordType }))) } else if (class(recordType) != "character"){ stop("[get_RLum()] 'recordType' has to be of type 'character'!", call. = FALSE) } ##curveType if (is.null(curveType)) { curveType <- unique(unlist(lapply(1:length(object@records), function(x) { object@records[[x]]@curveType }))) } else if (!is(curveType, "character")) { stop("[get_RLum()] 'curveType' has to be of type 'character'!", call. = FALSE) } ##RLum.type if (is.null(RLum.type)) { RLum.type <- c("RLum.Data.Curve", "RLum.Data.Spectrum", "RLum.Data.Image") } else if (!is(RLum.type, "character")) { stop("[get_RLum()] 'RLum.type' has to be of type 'character'!", call. = FALSE) } ##get.index if (is.null(get.index)) { get.index <- FALSE } else if (!is(get.index, "logical")) { stop("[get_RLum()] 'get.index' has to be of type 'logical'!", call. = FALSE) } ##get originator if (.hasSlot(object, "originator")) { originator <- object@originator } else{ originator <- NA_character_ } ##-----------------------------------------------------------------## ##a pre-selection is necessary to support negative index selection object@records <- object@records[record.id] record.id <- 1:length(object@records) ##select curves according to the chosen parameter if (length(record.id) > 1) { temp <- lapply(record.id, function(x) { if (is(object@records[[x]])[1] %in% RLum.type == TRUE) { ##as input a vector is allowed temp <- lapply(1:length(recordType), function(k) { ##translate input to regular expression recordType[k] <- glob2rx(recordType[k]) recordType[k] <- substr(recordType[k], start = 2, stop = nchar(recordType[k]) - 1) if (grepl(recordType[k], object@records[[x]]@recordType) == TRUE & object@records[[x]]@curveType %in% curveType) { if (!get.index) { object@records[[x]] } else{ x } } }) ##remove empty entries and select just one to unlist temp <- temp[!sapply(temp, is.null)] ##if list has length 0 skip entry if (length(temp) != 0) { temp[[1]] } else{ temp <- NULL } } }) ##remove empty list element temp <- temp[!sapply(temp, is.null)] ##check if the produced object is empty and show warning message if (length(temp) == 0) { warning("[get_RLum()] This request produced an empty list of records!") } ##remove list for get.index if (get.index) { return(unlist(temp)) } else{ if (!drop) { temp <- set_RLum( class = "RLum.Analysis", originator = originator, records = temp, protocol = object@protocol, .pid = object@.pid ) return(temp) } else{ if (length(temp) == 1 & recursive == TRUE) { return(temp[[1]]) } else{ return(temp) } } } } else{ if (get.index == FALSE) { if (drop == FALSE) { ##needed to keep the argument drop == TRUE temp <- set_RLum( class = "RLum.Analysis", originator = originator, records = list(object@records[[record.id]]), protocol = object@protocol, .pid = object@.pid ) return(temp) } else{ return(object@records[[record.id]]) } } else{ return(record.id) } } } }) # structure_RLum() ---------------------------------------------------------------------------- ### #' @describeIn RLum.Analysis #' Method to show the structure of an [RLum.Analysis-class] object. #' #' @param fullExtent [structure_RLum]; [logical] (*with default*): #' extents the returned `data.frame` to its full extent, i.e. all info elements #' are part of the return as well. The default value is `FALSE` as the data #' frame might become rather big. #' #' @return #' #' **`structure_RLum`**: #' #' Returns [data.frame-class] showing the structure. #' #' @md #' @export setMethod("structure_RLum", signature= "RLum.Analysis", definition = function(object, fullExtent = FALSE) { ##check if the object containing other elements than allowed if(!all(vapply(object@records, FUN = class, character(1)) == "RLum.Data.Curve")) stop("[structure_RLum()] Only 'RLum.Data.Curve' objects are allowed!", call. = FALSE) ##get length object temp.object.length <- length(object@records) ##ID temp.id <- 1:temp.object.length ##recordType temp.recordType <- vapply(object@records, function(x) { x@recordType }, character(1)) ##PROTOCOL STEP temp.protocol.step <- c(NA) length(temp.protocol.step) <- temp.object.length ##n.channels temp.n.channels <- vapply(object@records, function(x){length(x@data[,1])}, numeric(1)) ##X.MIN temp.x.min <- vapply(object@records, function(x){min(x@data[,1])}, numeric(1)) ##X.MAX temp.x.max <- vapply(object@records, function(x){max(x@data[,1])}, numeric(1)) ##y.MIN temp.y.min <- vapply(object@records, function(x){min(x@data[,2])}, numeric(1)) ##X.MAX temp.y.max <- vapply(object@records, function(x){max(x@data[,2])}, numeric(1)) ##.uid temp.uid <- unlist(lapply(object@records, function(x){x@.uid})) ##.pid temp.pid <- unlist(lapply(object@records, function(x){x@.pid})) ##originator temp.originator <- unlist(lapply(object@records, function(x){x@originator})) ##curveType temp.curveType <- unlist(lapply(object@records, function(x){x@curveType})) ##info elements as character value if (fullExtent) { temp.info.elements <- as.data.frame(data.table::rbindlist(lapply(object@records, function(x) { x@info }), fill = TRUE)) } else{ temp.info.elements <- unlist(sapply(1:temp.object.length, function(x) { if (length(object@records[[x]]@info) != 0) { do.call(paste, as.list(names(object@records[[x]]@info))) } else{ NA } })) } ##combine output to a data.frame return( data.frame( id = temp.id, recordType = temp.recordType, curveType = temp.curveType, protocol.step = temp.protocol.step, n.channels = temp.n.channels, x.min = temp.x.min, x.max = temp.x.max, y.min = temp.y.min, y.max = temp.y.max, originator = temp.originator, .uid = temp.uid, .pid = temp.pid, info = temp.info.elements, stringsAsFactors = FALSE ) ) }) # length_RLum() ------------------------------------------------------------------------------- #' @describeIn RLum.Analysis #' Returns the length of the object, i.e., number of stored records. #' #' @return #' #' **`length_RLum`** #' #' Returns the number records in this object. #' #' @md #' @export setMethod("length_RLum", "RLum.Analysis", function(object){ length(object@records) }) # names_RLum() -------------------------------------------------------------------------------- #' @describeIn RLum.Analysis #' Returns the names of the [RLum.Data-class] objects objects (same as shown with the show method) #' #' @return #' #' **`names_RLum`** #' #' Returns the names of the record types (`recordType`) in this object. #' #' @md #' @export setMethod("names_RLum", "RLum.Analysis", function(object){ sapply(1:length(object@records), function(x){ object@records[[x]]@recordType}) }) # smooth_RLum() ------------------------------------------------------------------------------- #' @describeIn RLum.Analysis #' #' Smoothing of `RLum.Data` objects contained in this `RLum.Analysis` object #' [zoo::rollmean] or [zoo::rollmedian][zoo::rollmean]. In particular the internal #' function `.smoothing` is used. #' #' @param ... further arguments passed to underlying methods #' #' @return #' #' **`smooth_RLum`** #' #' Same object as input, after smoothing #' #' @md #' @export setMethod( f = "smooth_RLum", signature = "RLum.Analysis", function(object, ...) { object@records <- lapply(object@records, function(x){ smooth_RLum(x, ...) }) return(object) } ) Luminescence/R/plot_ViolinPlot.R0000644000176200001440000001777513231137116016364 0ustar liggesusers#' Create a violin plot #' #' Draws a kernal densiy plot in combination with a boxplot in its middle. The shape of the violin #' is constructed using a mirrored density curve. This plot is especially designed for cases #' where the individual errors are zero or to small to be visualised. The idea for this plot is #' based on the the 'volcano plot' in the ggplot2 package by Hadely Wickham and Winston Chang. #' The general idea for the Violin Plot seems to be introduced by Hintze and Nelson (1998). #' #' The function is passing several arguments to the function [plot], #' [stats::density], [graphics::boxplot]: #' #' Supported arguments are: #' `xlim`, `main`, `xlab`, `ylab`, `col.violin`, `col.boxplot`, `mtext`, `cex`, `mtext` #' #' **`Valid summary keywords`** #' #' `'n'`, `'mean'`, `'median'`, `'sd.abs'`, `'sd.rel'`, `'se.abs'`, `'se.rel'`. #' `'skewness'`, `'kurtosis'` #' #' @param data [numeric] or [RLum.Results-class] (**required**): #' input data for plotting. Alternatively a [data.frame] or a [matrix] can #' be provided, but only the first column will be considered by the #' function #' #' @param boxplot [logical] (*with default*): #' enable or disable boxplot #' #' @param rug [logical] (*with default*): #' enable or disable rug #' #' @param summary [character] (*optional*): #' add statistic measures of centrality and dispersion to the plot. #' Can be one or more of several keywords. See details for available keywords. #' #' @param summary.pos [numeric] or [character] (*with default*): #' optional position keywords (cf., [legend]) for the statistical summary. #' Alternatively, the keyword `"sub"` may be specified to place the summary #' below the plot header. However, this latter option in only possible if #' `mtext` is not used. #' #' @param na.rm [logical] (*with default*): #' exclude NA values from the data set prior to any further operations. #' #' @param ... further arguments and graphical parameters passed to #' [plot.default], [stats::density] and [boxplot]. See details for further information #' #' @note #' Although the code for this function was developed independently and just the idea for the plot #' was based on the 'ggplot2' package plot type 'volcano', it should be mentioned that, beyond this, #' two other R packages exist providing a possibility to produces this kind of plot, namely: #' 'vioplot' and 'violinmplot' (see References for details). #' #' @section Function version: 0.1.4 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @references #' Daniel Adler (2005). vioplot: A violin plot is a combination of a box plot and a kernel density plot. #' R package version 0.2 http://CRAN.R-project.org/package=violplot #' #' Hintze, J.L., Nelson, R.D., 1998. A Box Plot-Density Trace Synergism. The American Statistician 52, 181-184. #' #' Raphael W. Majeed (2012). violinmplot: Combination of violin plot with mean and standard deviation. #' R package version 0.2.1. http://CRAN.R-project.org/package=violinmplot #' #' Wickham. H (2009). ggplot2: elegant graphics for data analysis. Springer New York. #' #' @seealso [stats::density], [plot], [boxplot], [rug], [calc_Statistics] #' #' @examples #' #' ## read example data set #' data(ExampleData.DeValues, envir = environment()) #' ExampleData.DeValues <- Second2Gray(ExampleData.DeValues$BT998, c(0.0438,0.0019)) #' #' ## create plot straightforward #' plot_ViolinPlot(data = ExampleData.DeValues) #' #' @md #' @export plot_ViolinPlot <- function( data, boxplot = TRUE, rug = TRUE, summary = NULL, summary.pos = "sub", na.rm = TRUE, ... ) { # Integrity tests and conversion -------------------------------------------------------------- ##Prechecks if(missing(data)){ stop("[plot_ViolinPlot()] I don't know what to do, data input needed." ) }else{ ##check for RLum.Results object if(is(data, "RLum.Results")){ data <- get_RLum(data, "data") } ##if data.frame or matrix if(is(data, "data.frame") | is(data, "matrix")){ data <- data[,1] } } ##Remove NA values if(na.rm){ data <- na.exclude(data) if(length(attr(data, "na.action")) > 0){ warning(paste("[plot_ViolinPlot()]", length(attr(data, "na.action")), "NA values removed!"), call. = FALSE) } } #Further checks if(!is(summary.pos, "character")){ stop("[plot_ViolinPlot()] argument 'summary.pos' needs to be of type character!") } ##stop if only one or 0 values are left in data if(length(data) == 0){ warning("[plot_ViolinePlot()] Actually it is rather hard to plot 0 values. NULL returned", call. = FALSE) return() } # Pre-calculations ---------------------------------------------------------------------------- ##density for the violin if(length(data)>1){ density <- density(x = data, bw = ifelse("bw" %in% names(list(...)),list(...)$bw,"nrd0")) }else{ density <- NULL warning("[plot_ViolinePlot()] single data point found, no density calculated.", call. = FALSE) } ##some statistical parameter, get rid of the weighted statistics stat.summary <- list(suppressWarnings(calc_Statistics(as.data.frame(data), digits = 2)[["unweighted"]])) names(stat.summary) <- "unweighted" ##make valid summary string if(is.null(summary)){ summary <- c("n","median") } ##at least show a warning for invalid keywords if(!all(summary %in% names(stat.summary[[1]]))){ warning(paste0("[plot_ViolinePlot()] Only keywords for weighted statistical measures are supported. Valid keywords are: ", paste(names(stat.summary[[1]]), collapse = ", ")), call. = FALSE) } ##make sure that only valid keywords make it summary <- summary[(summary %in% names(stat.summary[[1]]))] stat.text <- .create_StatisticalSummaryText(stat.summary, keywords = summary, sep = " \n ") stat.mtext <- .create_StatisticalSummaryText(stat.summary, keywords = summary, sep = " | ") # Plot settings ------------------------------------------------------------------------------- ##set default values plot.settings <- list( xlim = if(!is.null(density)){range(density$x)}else{c(data[1]*0.9, data[1]*1.1)}, main = "Violin Plot", xlab = expression(paste(D[e], " [a.u.]")), ylab = if(!is.null(density)){"Density"}else{" "}, col.violin = rgb(0,0,0,0.2), col.boxplot = NULL, mtext = ifelse(summary.pos != 'sub', "", stat.mtext), cex = 1 ) ##modify list accordingly plot.settings <- modifyList(plot.settings, val = list(...)) # Plot ---------------------------------------------------------------------------------------- ##open empty plot area plot( NA,NA, xlim = plot.settings$xlim, ylim = c(0.2,1.8), xlab = plot.settings$xlab, ylab = plot.settings$ylab, yaxt = "n", main = plot.settings$main, cex = plot.settings$cex ) ##add polygon ... the violin if(!is.null(density)){ polygon( x = c(density$x, rev(density$x)), y = c(1 + density$y / max(density$y) * 0.5, rev(1 - density$y / max(density$y) * 0.5)), col = plot.settings$col.violin, border = plot.settings$col.violin ) } ##add the boxplot if(boxplot){ boxplot( data, outline = TRUE, boxwex = 0.4, horizontal = TRUE, axes = FALSE, add = TRUE, col = plot.settings$col.boxplot ) } ##add rug if(rug){ rug(x = data) } ##add mtext if(!is.null(plot.settings$mtext)){ mtext(side = 3, text = plot.settings$mtext) } ##add stat.text if (summary.pos != "sub") { valid_keywords <- c( "bottomright", "bottom", "bottomleft", "left", "topleft", "top", "topright", "right", "center" ) if (any( summary.pos %in% valid_keywords )) { legend(summary.pos, legend = stat.text, bty = "n") }else{ warning_text <- paste0("Value provided for 'summary.pos' is not a valid keyword, valid keywords are:", paste(valid_keywords, collapse = ", ")) warning(warning_text) } } } Luminescence/R/fit_ThermalQuenching.R0000644000176200001440000003010713456377773017337 0ustar liggesusers#' @title Fitting Thermal Quenching Data #' #' @description Applying a nls-fitting to thermal quenching data. #' #' @details #' #' **Used equation**\cr #' #' The equation used for the fitting is #' #' \deqn{y = (A / (1 + C * (exp(-W / (k * x))))) + c} #' #' *W* is the energy depth in eV and *C* is dimensionless constant. *A* and *c* are used to #' adjust the curve for the given signal. *k* is the Blotzmann in eV/K and *x* is the absolute #' temperature in K. #' #' **Error estimation**\cr #' #' The error estimation is done be varying the input parameters using the given uncertanties in #' a Monte Carlo simulation. Errors are assumed to follow a normal distribution. #' #' **start_param** \cr #' #' The function allows the injection of own start parameters via the argument `start_param`. The #' parameters needs to be provided as names list. The names are the parameters to be optimised. #' Examples: `start_param = list(A = 1, C = 1e+5, W = 0.5, c = 0)` #' #' #' **method_control** \cr #' #' The following arguments can be provided via `method_control`. Please note that arguments provided #' via `method_control` are not further tested, i.e., if the function crashs your input was probably #' wrong. #' #' \tabular{lll}{ #' **ARGUMENT** \tab **TYPE** \tab **DESCRIPTION**\cr #' `upper` \tab named [vector] \tab sets upper fitting boundaries, if provided boundaries for all arguments #' are requried, e.g., `c(A = 0, C = 0, W = 0, c = 0)` \cr #' `lower` \tab names [vector] \tab sets lower fitting boundaries (see `upper` for details) \cr #' `trace` \tab [logical] \tab enables/disables progression trace for [minpack.lm::nlsLM]\cr #' `weights` \tab [numeric] \tab option to provide own weights for the fitting, the length of this #' vector needs to be equal to the number for rows of the input `data.frame`. If set to `NULL` no weights #' are applied. The weights are defined by the thrid column of the input `data.frame`. #' } #' #' @param data [data.frame] (**required**): input data with three columns, the first column contains #' temperature values in deg. C, colmns 2 and 3 the dependent values with its error #' #' @param start_param [list] (optional): option to provide own start parameters for the fitting, see #' detalis #' #' @param method_control [list] (optianl): further options to fine tune the fitting, see details for #' further information #' #' @param n.MC [numeric] (*with default*): number of Monte Carlo runs for the error estimation. If `n.MC` is #' `NULL` or `<=1`, the error estimation is skipped #' #' @param verbose [logical] (*with default*): enables/disables terminal output #' #' @param plot [logical] (*with default*): enables/disables plot output #' #' @param ... further arguments that can be passed to control the plotting, support are `main`, `pch`, #' `col_fit`, `col_points`, `lty`, `lwd`, `xlab`, `ylab`, `xlim`, `ylim`, `xaxt` #' #' @return #' #' The function returns numerical output and an (*optional*) plot. #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' `[.. $data : data.frame]`\cr #' #' A table with all fitting parameters and the number of Monte Carlo runs used for the error estimation. #' #' `[.. $fit : nls object]` \cr #' #' The nls [stats::nls] object returned by the function [minpack.lm::nlsLM]. This object #' can be further passed to other functions supporting an nls object (cf. details section #' in [stats::nls]) #' #' **slot:** **`@info`** #' #' `[.. $call : call]`\cr #' #' The original function call. #' #' -----------------------------------\cr #' `[ GAPHICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' Plotted are temperature against the signal and their uncertainties. #' The fit is shown as dashed-line (can be modified). Please note that for the fitting the absolute #' temperature values are used but are re-calculated to deg. C for the plot. #' #' #' @section Function version: 0.1.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, UMR5060, CNRS - Université Bordeaux Montaigne (Frange) #' #' @references #' #' Wintle, A.G., 1975. Thermal Quenching of Thermoluminescence in Quartz. Geophys. J. R. astr. Soc. 41, 107–113. #' #' @seealso [minpack.lm::nlsLM] #' #' @examples #' #' ##create short example dataset #' data <- data.frame( #' T = c(25, 40, 50, 60, 70, 80, 90, 100, 110), #' V = c(0.06, 0.058, 0.052, 0.051, 0.041, 0.034, 0.035, 0.033, 0.032), #' V_X = c(0.012, 0.009, 0.008, 0.008, 0.007, 0.006, 0.005, 0.005, 0.004)) #' #' ##fit #' fit_ThermalQuenching( #' data = data, #' n.MC = NULL) #' #' @md #' @export fit_ThermalQuenching <- function( data, start_param = list(), method_control = list(), n.MC = 100, verbose = TRUE, plot = TRUE, ... ){ # Self-call ----------------------------------------------------------------------------------- if(class(data) == "list"){ ##get arguments args <- as.list(match.call()) args[[1]] <- NULL args$data <- NULL ##run function results_list <- lapply(data, function(x){ do.call(fit_ThermalQuenching, c(list(data = x),args)) }) ##combine and return return(merge_RLum(results_list)) } # Integrity checks ---------------------------------------------------------------------------- if(class(data) != 'data.frame'){ stop("[fit_ThermalQuenching()] 'data' must by of type 'data.frame' or list of 'data.frames'!", call. = FALSE) }else{ if(nrow(data) < 1 || ncol(data) < 3) stop("[fit_ThermalQuenching()] 'data' is empty or has less than three columns!", call. = FALSE) if(ncol(data) > 3) warning("[fit_ThermalQuenching()] 'data' has more than 3 columns, taking only the first three!", call. = FALSE) if(any(is.na(data))) warning("[fit_ThermalQuenching()] NA values in 'data' automatically removed!", call. = FALSE) ##this we do anyway, you never know data <- na.exclude(data[,1:3]) } # Prepare data -------------------------------------------------------------------------------- ##set formula for quenching accordingt to Wintle 1973 ##we here add a constant, otherwise the fit will not really work k <- 8.6173303e-05 f <- y ~ (A / (1 + C * (exp(-W / (k * x))))) + c ##set translate values in data.frame to absolute temperature data_raw <- data data[[1]] <- data[[1]] + 273.15 ##start parameter start_param <- modifyList(x = list( A = max(data[[2]]), C = max(data[[1]] * 10e+5), W = 0.5, c = 0), val = start_param) ##method control method_control <- modifyList( x = list( lower = c(A = 0, C = 0, W = 0, c = 0), upper = c(A = 10 * start_param$A, C = Inf, W = 10, c = start_param$A), trace = FALSE, weights = data[[3]] ), val = method_control) # Fitting ------------------------------------------------------------------------------------- ##guine fitting fit <- try(minpack.lm::nlsLM( formula = f, data = data.frame(x = data[[1]], y = data[[2]]), weights = if(is.null(method_control$weights)){ rep(1, length(data[[2]])) } else { method_control$weights}, control = list( maxiter = 500, maxfev = 1000, trace = method_control$weights ), start = start_param, lower = method_control$lower, upper = method_control$upper ), silent = TRUE) ##only continue if the first fitting worked out if(class(fit) != "try-error"){ ##reset n.MC if(is.null(n.MC) || n.MC < 1) n.MC <- 1 ##Prepare MC runs for the fitting x_MC <- data[[1]] y_MC <- matrix( data = data[[2]] + rnorm(n.MC * length(x_MC)) * data[[3]], nrow = length(x_MC), ncol = n.MC) y_MC[y_MC < 0] <- 0 ##run fitting fit_MC <- lapply(1:n.MC, function(x){ temp <- try(minpack.lm::nlsLM( formula = f, data = data.frame(x = x_MC, y = y_MC[,x]), weights = if(is.null(method_control$weights)){ rep(1, length(data[[2]])) } else { method_control$weights}, control = list( maxiter = 500, maxfev = 1000 ), start = start_param, lower = method_control$lower, upper = method_control$upper ), silent = TRUE) ##return value if(class(temp) == 'try-error') { return(NULL) } else{ temp } }) }else{ try(stop("[fit_ThermalQuenching()] Fitting failed, NULL returned!", call. = FALSE), silent = FALSE) return(NULL) } ##remove NULL (the fit was not sucessfull) fit_MC <- fit_MC[!sapply(X = fit_MC, is.null)] n.MC <- length(fit_MC) # Extract values ------------------------------------------------------------------------------ ##(1) - extract parameters from main fit fit_coef <- coef(fit) A <- fit_coef[["A"]] C <- fit_coef[["C"]] W <- fit_coef[["W"]] c <- fit_coef[["c"]] ##(2) - extract values from MC run fit_coef_MC_full <- vapply(X = fit_MC, FUN = coef, FUN.VALUE = numeric(4)) fit_coef_MC <- round(matrixStats::rowSds(fit_coef_MC_full),3) A_MC_X <- fit_coef_MC[1] C_MC_X <- fit_coef_MC[2] W_MC_X <- fit_coef_MC[3] c_MC_X <- fit_coef_MC[4] # Terminal output ----------------------------------------------------------------------------- if(verbose){ cat("\n[fit_ThermalQuenching()]\n\n") cat(" A = ", A, " \u00b1 ",A_MC_X,"\n") cat(" C = ", C, " \u00b1 ",C_MC_X,"\n") cat(" W = ", W, " \u00b1 ",W_MC_X, " eV\n") cat(" c = ", c, " \u00b1 ",c_MC_X, "\n") cat(" --------------------------------\n") } # Potting ------------------------------------------------------------------------------------- if(plot) { ##plot settings plot_settings <- list( xlim = range(data[[1]]), ylim = c(min(data[[2]]) - data[[3]][which.min(data[[2]])], max(data[[2]]) + data[[3]][which.max(data[[2]])]), pch = 1, xaxt = "n", xlab = "Temperature [\u00b0C]", ylab = "Dependent [a.u.]", main = "Thermal quenching", lty = 2, col_points = "black", col_fit = "red", lwd = 1.3, mtext = if(n.MC == 1) "" else paste0("n.MC = ", n.MC) ) ##overwrite settings plot_settings <- modifyList(x = plot_settings, val = list(...)) ##create plot window plot( x = NA, y = NA, xlim = plot_settings$xlim, ylim = plot_settings$ylim, xaxt = plot_settings$xaxt, xlab = plot_settings$xlab, ylab = plot_settings$ylab, main = plot_settings$main ) ##add axis with correct temperature if(!is.null(plot_settings$xaxt) && plot_settings$xaxt == "n"){ at <- pretty(round(axTicks(side = 1) - 273.15)) axis(side = 1, at = at + 273.15, labels = at) } ##reset n.MC if(!is.null(n.MC) && n.MC > 1){ ##add MC curves for(i in 1:n.MC){ A <- fit_coef_MC_full[1,i] C <- fit_coef_MC_full[2,i] W <- fit_coef_MC_full[3,i] c <- fit_coef_MC_full[4,i] x <- 0 curve((A / (1 + C * (exp(-W / (k * x))))) + c, col = rgb(0,0,0,.1), add = TRUE) } } ##add points and uncertainties points(data[, 1:2], pch = plot_settings$pch, lwd = 2, col = plot_settings$col_points) segments(x0 = data[[1]], x1 = data[[1]], y0 = data[[2]] + data[[3]], y1 = data[[2]] - data[[3]], col = plot_settings$col_points ) ##add central fit A <- fit_coef[["A"]] C <- fit_coef[["C"]] W <- fit_coef[["W"]] c <- fit_coef[["c"]] x <- 0 curve((A / (1 + C * (exp( -W / (k * x) )))) + c, lty = plot_settings$lty, lwd = plot_settings$lwd, col = plot_settings$col_fit, add = TRUE ) ##add mtext mtext(side = 3, text = plot_settings$mtext) } # Return -------------------------------------------------------------------------------------- output_df <- data.frame( A = A, A_X = A_MC_X, C = C, C_X = C_MC_X, W = W, W_X = W_MC_X, c = c, c_X = c_MC_X, n.MC = n.MC ) output <- set_RLum( class = "RLum.Results", data = list( data = output_df, fit = fit ), info = list( call = sys.call() ) ) return(output) } Luminescence/R/convert_XSYG2CSV.R0000644000176200001440000000540513231137116016202 0ustar liggesusers#' Export XSYG-file(s) to CSV-files #' #' This function is a wrapper function around the functions [read_XSYG2R] and #' [write_RLum2CSV] and it imports an XSYG-file and directly exports its content #' to CSV-files. If nothing is set for the argument `path` ([write_RLum2CSV]) #' the input folder will become the output folder. #' #' @param file [character] (**required**): #' name of the XSYG-file to be converted to CSV-files #' #' @param ... further arguments that will be passed to the function #' [read_XSYG2R] and [write_RLum2CSV] #' #' @return #' The function returns either a CSV-file (or many of them) or for the option `export = FALSE` #' a list comprising objects of type [data.frame] and [matrix] #' #' @section Function version: 0.1.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Analysis-class], [RLum.Data-class], [RLum.Results-class], #' [utils::write.table], [write_RLum2CSV], [read_XSYG2R] #' #' @keywords IO #' #' @examples #' #' ##transform XSYG-file values to a list #' data(ExampleData.XSYG, envir = environment()) #' convert_XSYG2CSV(OSL.SARMeasurement$Sequence.Object[1:10], export = FALSE) #' #' \dontrun{ #' ##select your BIN-file #' file <- file.choose() #' #' ##convert #' convert_XSYG2CSV(file) #' #' } #' #' @md #' @export convert_XSYG2CSV <- function( file, ... ){ # General tests ------------------------------------------------------------------------------- ##file is missing? if(missing(file)){ stop("[convert_XSYG2R()] file is missing!", call. = FALSE) } ##set input arguments convert_XSYG2R_settings.default <- list( recalculate.TL.curves = TRUE, pattern = ".xsyg", txtProgressBar = TRUE, export = TRUE ) ##modify list on demand convert_XSYG2R_settings <- modifyList(x = convert_XSYG2R_settings.default, val = list(...)) # Import file --------------------------------------------------------------------------------- if(!inherits(file, "RLum")){ object <- read_XSYG2R( file = file, fastForward = TRUE, recalculate.TL.curves = convert_XSYG2R_settings$recalculate.TL.curves, pattern = convert_XSYG2R_settings$pattern, txtProgressBar = convert_XSYG2R_settings$txtProgressBar ) }else{ object <- file } # Export to CSV ------------------------------------------------------------------------------- ##get all arguments we want to pass and remove the doubled one arguments <- c(list(object = object, export = convert_XSYG2R_settings$export), list(...)) arguments[duplicated(names(arguments))] <- NULL ##this if-condition prevents NULL in the terminal if(convert_XSYG2R_settings$export == TRUE){ invisible(do.call("write_RLum2CSV", arguments)) }else{ do.call("write_RLum2CSV", arguments) } } Luminescence/R/plot_DRCSummary.R0000644000176200001440000002076513437452426016260 0ustar liggesusers#'Create a Dose-Response Curve Summary Plot #' #'While analysing OSL SAR or pIRIR data the view on the data is limited usually to one #'dose-response curve (DRC) at the time for one aliquot. This function overcomes this limitation #'by plotting all DRC from an [RLum.Results-class] object created by the function [analyse_SAR.CWOSL] #'in one single plot. #' #'If you want plot your DRC on an energy scale (dose in Gy), you can either use the option `source_dose_rate` provided #'below or your can SAR analysis with the dose points in Gy (better axis scaling). #' #'@param object [RLum.Results-class] object (**required**): input object created by the function [analyse_SAR.CWOSL]. The input object can be provided as [list]. #' #'@param source_dose_rate [numeric] (*optional*): allows to modify the axis and show values in Gy, instead seconds. Only a single numerical values is allowed. #' #'@param sel_curves [numeric] (optional): id of the curves to be plotting in its occuring order. A sequence can #'be provided for selecting, e.g., only every 2nd curve from the input object #' #'@param show_dose_points [logical] (with default): enable or disable plot of dose points in the graph #' #'@param show_natural [logical] (with default): enable or disable the plot of the natural Lx/Tx values #' #'@param n [integer] (with default): the number of x-values used to evaluate one curve object. Large numbers slow #'down the plotting process and are usually not needed #' #'@param ... Further arguments and graphical parameters to be passed. #' #'@section Function version: 0.2.1 #' #' @return An [RLum.Results-class] object is returned: #' #' Slot: **@data**\cr #' #' \tabular{lll}{ #' **OBJECT** \tab **TYPE** \tab **COMMENT**\cr #' `results` \tab [data.frame] \tab with dose and LxTx values \cr #' `data` \tab [RLum.Results-class] \tab original input data \cr #' } #' #' Slot: **@info**\cr #' #' \tabular{lll}{ #' **OBJECT** \tab **TYPE** \tab **COMMENT** \cr #' `call` \tab `call` \tab the original function call \cr #' `args` \tab `list` \tab arguments of the original function call \cr #' } #' #'*Note: If the input object is a [list] a list of [RLum.Results-class] objects is returned.* #' #'@author Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) \cr #' Christoph Burow, University of Cologne #' #'@seealso [RLum.Results-class], [analyse_SAR.CWOSL] #' #'@examples #' #'#load data example data #'data(ExampleData.BINfileData, envir = environment()) #' #'#transform the values from the first position in a RLum.Analysis object #'object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) #' ##perform SAR analysis #' results <- analyse_SAR.CWOSL( #' object = object, #' signal.integral.min = 1, #' signal.integral.max = 2, #' background.integral.min = 900, #' background.integral.max = 1000, #' plot = FALSE #' ) #' #'##plot only DRC #'plot_DRCSummary(results) #' #'@md #'@export plot_DRCSummary <- function( object, source_dose_rate = NULL, sel_curves = NULL, show_dose_points = FALSE, show_natural = FALSE, n = 51L, ... ){ # Self-call ----------------------------------------------------------------------------------- if(class(object) == "list"){ ##catch ... arguments plot_settings <- list(...) ##expand arguments if("main" %in% names(list(...))){ main <- as.list(rep(list(...)[["main"]], length(object))) ##filter main from the ... argument list otherwise we will have a collusion plot_settings["main" %in% names(plot_settings)] <- NULL }else{ main <- as.list(rep("DRC", length(object))) } results <- lapply(1:length(object), function(o){ plot_DRCSummary( object = object[[o]], sel_curves = sel_curves, show_dose_points = show_dose_points, show_natural = show_natural, n = n, main = main[[o]], ... = plot_settings ) }) ##return merged object return(results) } # Check input --------------------------------------------------------------------------------- if(class(object) != "RLum.Results") stop("[plot_DRCSummary()] The input is not of class 'RLum.Results'!",call. = FALSE) # Extract data from object -------------------------------------------------------------------- ##get data from RLum.Results object if(object@originator %in% c("analyse_SAR.CWOSL", "analyse_pIRIRSequence")){ ##set limit if(is.null(sel_curves)){ sel_curves <- 1:length(object@data$Formula) }else{ if(min(sel_curves) < 1 || max(sel_curves) > length(object@data$Formula) || length(sel_curves) > length(object@data$Formula)){ warning("[plot_DRCSummary()] 'sel_curves' out of bounds, reset to full dataset.", call. = FALSE, immediate. = TRUE) sel_curves <- 1:length(object@data$Formula) } } ##get DRC DRC <- object@data$Formula[sel_curves] ##get limits for each set dataset_limits <- matrix( c(which(object@data$LnLxTnTx.table[["Name"]] == "Natural"), which(object@data$LnLxTnTx.table[["Name"]] == "Natural")[-1] - 1, nrow(object@data$LnLxTnTx.table)), ncol = 2) ##create list LxTx <- lapply(1:nrow(dataset_limits), function(x){ object@data$LnLxTnTx.table[dataset_limits[x,1]:dataset_limits[x,2],] })[sel_curves] }else{ stop( "[plot_DRCSummary()] 'object' was created by none supported function, cf. manual for allowed originators!",call. = FALSE) } # Plotting ------------------------------------------------------------------------------------ ##set default plot_settings <- list( xlab = if(is.null(source_dose_rate)){"Dose [s]"}else{"Dose [Gy]"}, ylab = expression(L[x]/T[x]), xlim = c(0,max(vapply(LxTx, function(x){max(x[["Dose"]])}, numeric(1)))), ylim = if(show_dose_points){ c(0,max(vapply(LxTx, function(x){max(x[["LxTx"]] + x[["LxTx.Error"]])}, numeric(1)), na.rm = TRUE)) }else{ c(0,max(vapply(1:length(LxTx), function(y){ x <- max(LxTx[[y]][["Dose"]], na.rm = TRUE) eval(DRC[[y]]) },numeric(1)), na.rm = TRUE)) }, main = "DRC Summary", lty = rep(1,length(sel_curves)), lwd = 1, pch = rep(20,length(sel_curves)), col = rep(rgb(0,0,0,0.5), length(sel_curves)) ) ##modify on request plot_settings <- modifyList(x = plot_settings, val = list(...)) ##create empty plot window plot( x = NA, y = NA, xlab = plot_settings$xlab, ylab = plot_settings$ylab, xlim = plot_settings$xlim, ylim = plot_settings$ylim, main = plot_settings$main, xaxt = "n" ) #exchange xaxis if source dose rate is set if(!is.null(source_dose_rate)){ axis(side = 1, at = axTicks(side = 1), labels = round(axTicks(side = 1) * source_dose_rate[1],0)) }else{ axis(side = 1) } for(i in 1:length(sel_curves)){ ##plot natural if(show_natural){ segments(x0 = LxTx[[i]]$Dose[1], x1 = LxTx[[i]]$Dose[1], y0 = LxTx[[i]]$LxTx[1] - LxTx[[i]]$LxTx.Error[1], y1 = LxTx[[i]]$LxTx[1] + LxTx[[i]]$LxTx.Error[1], col = plot_settings$col[[i]]) points( x = LxTx[[i]]$Dose[1], y = LxTx[[i]]$LxTx[1], col = plot_settings$col[[i]], pch = plot_settings$pch[[i]] ) } ##plot dose points if(show_dose_points){ segments(x0 = LxTx[[i]]$Dose[-1], x1 = LxTx[[i]]$Dose[-1], y0 = LxTx[[i]]$LxTx[-1] - LxTx[[i]]$LxTx.Error[-1], y1 = LxTx[[i]]$LxTx[-1] + LxTx[[i]]$LxTx.Error[-1], col = plot_settings$col[[i]]) points( x = LxTx[[i]]$Dose[-1], y = LxTx[[i]]$LxTx[-1], col = plot_settings$col[[i]], pch = plot_settings$pch[[i]] ) } ##plot lines x <- seq(min(plot_settings$xlim),max(plot_settings$xlim), length.out = n) y <- eval(DRC[[i]]) if (any(is.na(y)) || any(is.nan(y))) { warning("[plot_DRCSummary()] Dose response curve ", i, " is NA/NaN and was removed before plotting.", call. = FALSE) next } lines( x = x, y = eval(DRC[[i]]), col = plot_settings$col[[i]], lwd = plot_settings$lwd, lty = plot_settings$lty[[i]] ) } ## Results ------------------------------------------------------------------- results <- set_RLum( class = "RLum.Results", data = list( results = data.frame( dose = x, sapply(DRC, function(d, n) { eval(d) }, n) ), data = object ), info = list(call = sys.call(), args = as.list(sys.call())[-1]) ) ## Return value -------------------------------------------------------------- return(results) } Luminescence/R/apply_CosmicRayRemoval.R0000644000176200001440000002764413475754651017672 0ustar liggesusers#' Function to remove cosmic rays from an RLum.Data.Spectrum S4 class object #' #' The function provides several methods for cosmic-ray removal and spectrum #' smoothing [RLum.Data.Spectrum-class] objects and such objects embedded in [list] or #' [RLum.Analysis-class] objects. #' #' **`method = "Pych"`** #' #' This method applies the cosmic-ray removal algorithm described by Pych #' (2003). Some aspects that are different to the publication: #' #' - For interpolation between neighbouring values the median and not the mean is used. #' - The number of breaks to construct the histogram is set to: `length(number.of.input.values)/2` #' #' For further details see references below. #' #'**`method = "smooth"`** #' #' Method uses the function [smooth] to remove cosmic rays. #' #' Arguments that can be passed are: `kind`, `twiceit` #' #' **`method = "smooth.spline"`** #' #' Method uses the function [smooth.spline] to remove cosmic rays. #' #' Arguments that can be passed are: `spar` #' #' **How to combine methods?** #' #' Different methods can be combined by applying the method repeatedly to the #' dataset (see example). #' #' @param object [RLum.Data.Spectrum-class] or [RLum.Analysis-class] (**required**): input #' object to be treated. This can be also provided as [list]. If an [RLum.Analysis-class] object #' is provided, only the [RLum.Data.Spectrum-class] objects are treated. Please note: this mixing of #' objects do not work for a list of `RLum.Data` objects. #' #' @param method [character] (*with default*): #' Defines method that is applied for cosmic ray removal. Allowed methods are #' `smooth`, the default, ([smooth]), `smooth.spline` ([smooth.spline]) #' and `Pych`. See details for further information. #' #' @param method.Pych.smoothing [integer] (*with default*): #' Smoothing parameter for cosmic ray removal according to Pych (2003). #' The value defines how many neighboring values in each frame are used for smoothing #' (e.g., `2` means that the two previous and two following values are used). #' #' @param method.Pych.threshold_factor [numeric] (*with default*): #' Threshold for zero-bins in the histogram. Small values mean that more peaks #' are removed, but signal might be also affected by this removal. #' #' @param MARGIN [integer] (*with default*): #' on which part the function cosmic ray removal should be applied on: #' #' - 1 = along the time axis (line by line), #' - 2 = along the wavelength axis (column by column). #' #' **Note:** This argument currently only affects the methods `smooth` and `smooth.spline` #' #' @param verbose [logical] (*with default*): #' Option to suppress terminal output., #' #' @param plot [logical] (*with default*): #' If `TRUE` the histograms used for the cosmic-ray removal are returned as plot #' including the used threshold. Note: A separat plot is returned for each frame! #' Currently only for `method = "Pych"` a graphical output is provided. #' #' @param ... further arguments and graphical parameters that will be passed #' to the [smooth] function. #' #' @return Returns same object as input. #' #' @section Function version: 0.3.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne #' (France) #' #' @seealso [RLum.Data.Spectrum-class], [RLum.Analysis-class], [smooth], [smooth.spline], #' [apply_CosmicRayRemoval] #' #' @references #' Pych, W., 2003. A Fast Algorithm for Cosmic-Ray Removal from #' Single Images. Astrophysics 116, 148-153. #' [http://arxiv.org/pdf/astro-ph/0311290.pdf?origin=publication_detail]() #' #' @keywords manip #' #' @examples #' #' ##(1) - use with your own data and combine (uncomment for usage) #' ## run two times the default method and smooth with another method #' ## your.spectrum <- apply_CosmicRayRemoval(your.spectrum, method = "Pych") #' ## your.spectrum <- apply_CosmicRayRemoval(your.spectrum, method = "Pych") #' ## your.spectrum <- apply_CosmicRayRemoval(your.spectrum, method = "smooth") #' #' @md #' @export apply_CosmicRayRemoval <- function( object, method = "smooth", method.Pych.smoothing = 2, method.Pych.threshold_factor = 3, MARGIN = 2, verbose = FALSE, plot = FALSE, ... ){ # Self-call ---------------------------------------------------------------------------------- ##Black magic: The function recalls itself until all RLum.Data.Spectrum objects have been treated ##If you want to test the basics of the function please only use a single RLum.Data.Spectrum-object ##if it comes in as an RLum.Analysis object ... make a list out of it if(class(object) == "RLum.Analysis"){ object <- list(object) class_original <- "RLum.Analysis" }else{ class_original <- NULL } ##handle the list and recall if(class(object) == "list"){ results_list <- lapply(object, function(o){ ##preset objects record_id.spectra <- NULL ##RLum.Analysis if(class(o) == "RLum.Analysis"){ ##get id of RLum.Data.Spectrum objects in this object record_id.spectra <- which( vapply(o@records, function(x) class(x) == "RLum.Data.Spectrum", logical(1))) ##rewrite o temp_o <- o@records[record_id.spectra] }else{ temp_o <- o } ##call function results <- apply_CosmicRayRemoval( object = temp_o, method = method, method.Pych.smoothing = method.Pych.smoothing, method.Pych.threshold_factor = method.Pych.threshold_factor, MARGIN = MARGIN, verbose = verbose, plot = plot, ... = list(...) ) ##combine in RLum.Analysis object if needed if(!is.null(record_id.spectra)){ o@records[record_id.spectra] <- results return(o) }else{ return(results) } }) ##final return, make sure that we return what we had as input if(!is.null(class_original)){ return(results_list[[1]]) }else{ return(results_list) } } # Integrity check ----------------------------------------------------------- ##check if object is of class RLum.Data.Spectrum if(class(object) != "RLum.Data.Spectrum"){ stop(paste0("[apply_CosmicRayRemoval()] An object of class '",class(object)[1], "' is not supported as input; please read the manual!"), call. = FALSE) } ##deal with addition arguments extraArgs <- list(...) kind <- if("kind" %in% names(extraArgs)) {extraArgs$kind} else {"3RS3R"} twiceit <- if("twiceit" %in% names(extraArgs)) {extraArgs$twiceit} else {TRUE} spar <- if("spar" %in% names(extraArgs)) {extraArgs$spar} else {NULL} # Apply method ------------------------------------------------------------ ## +++++++++++++++++++++++++++++++++++ (smooth) ++++++++++++++++++++++++++++## if(method == "smooth"){ ##apply smooth object.data.temp.smooth <- apply( X = object@data, MARGIN = MARGIN, FUN = stats::smooth, kind = kind, twiceit = twiceit ) ##rotate output matrix if necessary if(MARGIN == 1){ object.data.temp.smooth <- t(object.data.temp.smooth) } ## +++++++++++++++++++++++++++++++++++ (smooth.spline) +++++++++++++++++++++## }else if(method == "smooth.spline"){ ##write the function in a new function to acess the data more easily temp_smooth.spline <- function(x, spar){ stats::smooth.spline(x, spar = spar)$y } ##apply smooth.spline object.data.temp.smooth <- apply( X = object@data, MARGIN = MARGIN, FUN = temp_smooth.spline, spar = spar ) ##rotate output matrix if necessary if(MARGIN == 1){ object.data.temp.smooth <- t(object.data.temp.smooth) } ## +++++++++++++++++++++++++++++++++++ (Pych) ++++++++++++++++++++++++++++++## }else if(method == "Pych"){ ## grep data matrix object.data.temp <- object@data ## apply smoothing object.data.temp.smooth <- sapply(X = 1:ncol(object.data.temp), function(x){ ##(1) - calculate sd for each subframe temp.sd <- sd(object.data.temp[,x]) ##(2) - correct estimation of sd by 1-sigma clipping temp.sd.corr <- sd(object.data.temp[ object.data.temp[,x] >= (mean(object.data.temp[,x]) - temp.sd) & object.data.temp[,x] <= (mean(object.data.temp[,x]) + temp.sd) , x]) ##(3) - construct histogram of count distribution temp.hist <- hist(object.data.temp[,x], breaks = length(object.data.temp[,x])/2, plot = FALSE) ##(4) - find mode of the histogram (e.g. peak) temp.hist.max <- which.max(temp.hist$counts) ##(5) - find gaps in the histogram (bins with zero value) temp.hist.zerobin <- which(temp.hist$counts == 0) ##(5.1) ##select just values right from the peak temp.hist.zerobin <- temp.hist.zerobin[ (temp.hist.max[1] + 1):length(temp.hist.zerobin)] ##(5.2) ##select non-zerobins temp.hist.nonzerobin <- which(temp.hist$counts != 0) temp.hist.nonzerobin <- temp.hist.nonzerobin[ temp.hist.nonzerobin >= (temp.hist.zerobin[1]-1)] ##(6) - find the first gap which is wider than the threshold temp.hist.nonzerobin.diff <- diff( temp.hist$breaks[temp.hist.nonzerobin]) ## select the first value where the thershold is reached ## factor 3 is defined by Pych (2003) temp.hist.thres <- which( temp.hist.nonzerobin.diff >= method.Pych.threshold_factor * temp.sd.corr)[1] ##(7) - use counts above the threshold and recalculate values ## on all further values if(!is.na(temp.hist.thres)){ object.data.temp[,x] <- sapply(1:nrow(object.data.temp), function(n){ if(c(n + method.Pych.smoothing) <= nrow(object.data.temp) & (n - method.Pych.smoothing) >= 0){ ifelse( object.data.temp[n,x] >= temp.hist$breaks[temp.hist.thres], median(object.data.temp[(n-method.Pych.smoothing): (n+method.Pych.smoothing),x]), object.data.temp[n,x]) }else{ object.data.temp[n,x] } }) } ##(8) - return histogram used for the removal as plot if(plot){ plot(temp.hist, xlab = "Signal intensity [a.u.]", main = "Cosmic-ray removal histogram") abline(v = temp.hist$breaks[temp.hist.thres], col = "red") if(!is.na(temp.hist$breaks[temp.hist.thres])){ legend("topright", "threshold" ,lty = 1, lwd = 1, col = "red", bty = "n") mtext(side = 3, paste0("Frame: ", x, " (", colnames(object.data.temp)[x], ")")) }else{ mtext(side = 3, paste0("Frame: ", x, " (", colnames(object.data.temp)[x], ") - no threshold applied!")) } } ##(9) - return information on the amount of removed cosmic-rays if(verbose){ #sum up removed counts values above the threshold sum.corrected.channels <- try( sum(temp.hist$counts[temp.hist.thres:length(temp.hist$counts)]), silent = TRUE) if(is(sum.corrected.channels)[1] == "try-error"){sum.corrected.channels <- 0} cat("[apply_CosmicRayRemoval()] >> ") cat(paste(sum.corrected.channels, " channels corrected in frame ", x, "\n", sep = "")) } ##return object return(object.data.temp[,x]) })#end loop }else{ stop("[apply_CosmicRayRemoval()] Unkown method for cosmic ray removal.") } # Correct row and column names -------------------------------------------- colnames(object.data.temp.smooth) <- colnames(object@data) rownames(object.data.temp.smooth) <- rownames(object@data) # Return Output------------------------------------------------------------ temp.output <- set_RLum( class = "RLum.Data.Spectrum", recordType = object@recordType, curveType = object@curveType, data = object.data.temp.smooth, info = object@info) invisible(temp.output) } Luminescence/R/bin_RLum.Data.R0000644000176200001440000000310413422432455015571 0ustar liggesusers#' Channel binning - method dispatchter #' #' Function calls the object-specific bin functions for RLum.Data S4 class objects. #' #' The function provides a generalised access point for specific #' [RLum.Data-class] objects. \cr #' Depending on the input object, the corresponding function will be selected. #' Allowed arguments can be found in the documentations of the corresponding #' [RLum.Data-class] class. #' #' @param object [RLum.Data-class] (**required**): #' S4 object of class `RLum.Data` #' #' @param ... further arguments passed to the specifc class method #' #' @return An object of the same type as the input object is provided #' #' @section Function version: 0.2.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5050, CNRS - Université Bordeaux Montaigne #' (France) #' #' @note Currently only `RLum.Data` objects of class [RLum.Data.Curve-class] and [RLum.Data.Spectrum-class] are supported! #' #' @seealso [RLum.Data.Curve-class], [RLum.Data.Spectrum-class] #' #' @examples #' #' ##load example data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' #' ##create RLum.Data.Curve object from this example #' curve <- #' set_RLum( #' class = "RLum.Data.Curve", #' recordType = "OSL", #' data = as.matrix(ExampleData.CW_OSL_Curve) #' ) #' #' ##plot data without and with 2 and 4 channel binning #' plot_RLum(curve) #' plot_RLum(bin_RLum.Data(curve, bin_size = 2)) #' plot_RLum(bin_RLum.Data(curve, bin_size = 4)) #' #' @keywords utilities #' #' @md #' @export setGeneric("bin_RLum.Data", function(object, ...) { standardGeneric("bin_RLum.Data") }) Luminescence/R/Risoe.BINfileData-class.R0000644000176200001440000004336613425056142017453 0ustar liggesusers#' @include get_Risoe.BINfileData.R set_Risoe.BINfileData.R NULL #' Class `"Risoe.BINfileData"` #' #' S4 class object for luminescence data in R. The object is produced as output #' of the function [read_BIN2R]. #' #' #' #' @name Risoe.BINfileData-class #' #' @docType class #' #' @slot METADATA Object of class "data.frame" containing the meta information for each curve. #' #' @slot DATA Object of class "list" containing numeric vector with count data. #' #' @slot .RESERVED Object of class "list" containing list of undocumented raw values for internal use only. #' #' @keywords internal #' #' @note #' #' **Internal METADATA - object structure** #' #' This structure is compatible with BIN/BINX-files version 03-08, however, it does not follow (in its #' sequential arrangment) the manual provided by the manufacturer, #' but an own structure accounting for the different versions. #' #' \tabular{rllll}{ #' **#** \tab **Name** \tab **Data Type** \tab **V** \tab **Description** \cr #' `[,1]` \tab ID \tab `numeric` \tab RLum \tab Unique record ID (same ID as in slot `DATA`)\cr #' `[,2]` \tab SEL \tab `logic` \tab RLum \tab Record selection, not part official BIN-format, triggered by TAG\cr #' `[,3]` \tab VERSION \tab `raw` \tab 03-08 \tab BIN-file version number \cr #' `[,4]` \tab LENGTH \tab `integer` \tab 03-08 \tab Length of this record\cr #' `[,5]` \tab PREVIOUS \tab `integer` \tab 03-08 \tab Length of previous record\cr #' `[,6]` \tab NPOINTS \tab `integer` \tab 03-08 \tab Number of data points in the record\cr #' `[,7]` \tab RECTYPE \tab `integer` \tab 08 \tab Record type \cr #' `[,8]` \tab RUN \tab `integer` \tab 03-08 \tab Run number\cr #' `[,9]` \tab SET \tab `integer` \tab 03-08 \tab Set number\cr #' `[,10]` \tab POSITION \tab `integer` \tab 03-08 \tab Position number\cr #' `[,11]` \tab GRAIN \tab `integer` \tab 03-04 \tab Grain number\cr #' `[,12]` \tab GRAINNUMBER \tab `integer` \tab 05-08 \tab Grain number\cr #' `[,13]` \tab CURVENO \tab `integer` \tab 05-08 \tab Curve number\cr #' `[,14]` \tab XCOORD \tab `integer` \tab 03-08 \tab X position of a single grain\cr #' `[,15]` \tab YCOORD \tab `integer` \tab 03-08 \tab Y position of a single grain\cr #' `[,16]` \tab SAMPLE \tab `factor` \tab 03-08 \tab Sample name\cr #' `[,17]` \tab COMMENT \tab `factor` \tab 03-08 \tab Comment name\cr #' `[,18]` \tab SYSTEMID \tab `integer` \tab 03-08 \tab Risoe system id\cr #' `[,19]` \tab FNAME \tab `factor` \tab 05-08 \tab File name (*.bin/*.binx)\cr #' `[,20]` \tab USER \tab `facotr` \tab 03-08 \tab User name\cr #' `[,21]` \tab TIME \tab `character` \tab 03-08 \tab Data collection time (hh-mm-ss)\cr #' `[,22]` \tab DATE \tab `factor` \tab 03-08 \tab Data collection date (ddmmyy)\cr #' `[,23]` \tab DTYPE \tab `character` \tab 03-08 \tab Data type\cr #' `[,24]` \tab BL_TIME \tab `numeric` \tab 03-08 \tab Bleaching time\cr #' `[,25]` \tab BL_UNIT \tab `integer` \tab 03-08 \tab Bleaching unit (mJ, J, secs, mins, hrs)\cr #' `[,26]` \tab NORM1 \tab `numeric` \tab 03-08 \tab Normalisation factor (1)\cr #' `[,27]` \tab NORM2 \tab `numeric` \tab 03-08 \tab Normalisation factor (2)\cr #' `[,28]` \tab NORM3 \tab `numeric` \tab 03-08 \tab Normalisation factor (3)\cr #' `[,29]` \tab BG \tab `numeric` \tab 03-08 \tab Background level\cr #' `[,30]` \tab SHIFT \tab `integer` \tab 03-08 \tab Number of channels to shift data\cr #' `[,31]` \tab TAG \tab `integer` \tab 03-08 \tab Tag, triggers SEL\cr #' `[,32]` \tab LTYPE \tab `character` \tab 03-08 \tab Luminescence type\cr #' `[,33]` \tab LIGHTSOURCE \tab `character` \tab 03-08 \tab Light source\cr #' `[,34]` \tab LPOWER \tab `numeric` \tab 03-08 \tab Optical stimulation power\cr #' `[,35]` \tab LIGHTPOWER \tab `numeric` \tab 05-08 \tab Optical stimulation power\cr #' `[,36]` \tab LOW \tab `numeric` \tab 03-08 \tab Low (temperature, time, wavelength)\cr #' `[,37]` \tab HIGH \tab `numeric` \tab 03-08 \tab High (temperature, time, wavelength)\cr #' `[,38]` \tab RATE \tab `numeric` \tab 03-08 \tab Rate (heating rate, scan rate)\cr #' `[,39]` \tab TEMPERATURE \tab `integer` \tab 03-08 \tab Sample temperature\cr #' `[,40]` \tab MEASTEMP \tab `integer` \tab 05-08 \tab Measured temperature\cr #' `[,41]` \tab AN_TEMP \tab `numeric` \tab 03-08 \tab Annealing temperature\cr #' `[,42]` \tab AN_TIME \tab `numeric` \tab 03-08 \tab Annealing time\cr #' `[,43]` \tab TOLDELAY \tab `integer` \tab 03-08 \tab TOL 'delay' channels\cr #' `[,44]` \tab TOLON \tab `integer` \tab 03-08 \tab TOL 'on' channels\cr #' `[,45]` \tab TOLOFF \tab `integer` \tab 03-08 \tab TOL 'off' channels\cr #' `[,46]` \tab IRR_TIME \tab `numeric` \tab 03-08 \tab Irradiation time\cr #' `[,47]` \tab IRR_TYPE \tab `integer` \tab 03-08 \tab Irradiation type (alpha, beta or gamma)\cr #' `[,48]` \tab IRR_UNIT \tab `integer` \tab 03-04 \tab Irradiation unit (Gy, Rads, secs, mins, hrs)\cr #' `[,49]` \tab IRR_DOSERATE \tab `numeric` \tab 05-08 \tab Irradiation dose rate (Gy/s)\cr #' `[,50]` \tab IRR_DOSERATEERR \tab `numeric` \tab 06-08 \tab Irradiation dose rate error (Gy/s)\cr #' `[,51]` \tab TIMESINCEIRR \tab `integer` \tab 05-08 \tab Time since irradiation (s)\cr #' `[,52]` \tab TIMETICK \tab `numeric` \tab 05-08 \tab Time tick for pulsing (s)\cr #' `[,53]` \tab ONTIME \tab `integer` \tab 05-08 \tab On-time for pulsing (in time ticks)\cr #' `[,54]` \tab OFFTIME \tab `integer` \tab 03 \tab Off-time for pulsed stimulation (in s) \cr #' `[,55]` \tab STIMPERIOD \tab `integer` \tab 05-08 \tab Stimulation period (on+off in time ticks)\cr #' `[,56]` \tab GATE_ENABLED \tab `raw` \tab 05-08 \tab PMT signal gating enabled\cr #' `[,57]` \tab ENABLE_FLAGS \tab `raw` \tab 05-08 \tab PMT signal gating enabled\cr #' `[,58]` \tab GATE_START \tab `integer` \tab 05-08 \tab Start gating (in time ticks)\cr #' `[,59]` \tab GATE_STOP \tab `ingeter` \tab 05-08 \tab Stop gating (in time ticks), 'Gateend' for version 04, here only GATE_STOP is used\cr #' `[,60]` \tab PTENABLED \tab `raw` \tab 05-08 \tab Photon time enabled\cr #' `[,61]` \tab DTENABLED \tab `raw` \tab 05-08 \tab PMT dead time correction enabled\cr #' `[,62]` \tab DEADTIME \tab `numeric` \tab 05-08 \tab PMT dead time (s)\cr #' `[,63]` \tab MAXLPOWER \tab `numeric` \tab 05-08 \tab Stimulation power to 100 percent (mW/cm^2)\cr #' `[,64]` \tab XRF_ACQTIME \tab `numeric` \tab 05-08 \tab XRF acquisition time (s)\cr #' `[,65]` \tab XRF_HV \tab `numeric` \tab 05-08 \tab XRF X-ray high voltage (V)\cr #' `[,66]` \tab XRF_CURR \tab `integer` \tab 05-08 \tab XRF X-ray current (uA)\cr #' `[,67]` \tab XRF_DEADTIMEF \tab `numeric` \tab 05-08 \tab XRF dead time fraction\cr #' `[,68]` \tab DETECTOR_ID \tab `raw` \tab 07-08 \tab Detector ID\cr #' `[,69]` \tab LOWERFILTER_ID \tab `integer` \tab 07-08 \tab Lower filter ID in reader\cr #' `[,70]` \tab UPPERFILTER_ID \tab `integer` \tab 07-08 \tab Uper filter ID in reader\cr #' `[,71]` \tab ENOISEFACTOR \tab `numeric` \tab 07-08 \tab Excess noise filter, usage unknown \cr #' `[,72]` \tab MARKPOS_X1 \tab `numeric` \tab 08 \tab Coordinates marker position 1 \cr #' `[,73]` \tab MARKPOS_Y1 \tab `numeric` \tab 08 \tab Coordinates marker position 1 \cr #' `[,74]` \tab MARKPOS_X2 \tab `numeric` \tab 08 \tab Coordinates marker position 2 \cr #' `[,75]` \tab MARKPOS_Y2 \tab `numeric` \tab 08 \tab Coordinates marker position 2 \cr #' `[,76]` \tab MARKPOS_X3 \tab `numeric` \tab 08 \tab Coordinates marker position 3 \cr #' `[,77]` \tab MARKPOS_Y3 \tab `numeric` \tab 08 \tab Coordinates marker position 3 \cr #' `[,78]` \tab EXTR_START \tab `numeric` \tab 08 \tab usage unknown \cr #' `[,79]` \tab EXTR_END \tab `numeric` \tab 08 \tab usage unknown\cr #' `[,80]` \tab SEQUENCE \tab `character` \tab 03-04 \tab Sequence name #' } #' V = BIN-file version (RLum means that it does not depend on a specific BIN version) #' #' Note that the `Risoe.BINfileData` object combines all values from #' different versions from the BIN-file, reserved bits are skipped, however, #' the function [write_R2BIN] reset arbitrary reserved bits. Invalid #' values for a specific version are set to `NA`. Furthermore, the #' internal R data types do not necessarily match the required data types for #' the BIN-file data import! Data types are converted during data import.\cr #' #' **LTYPE** values #' #' \tabular{rll}{ #' `[,0]` \tab TL \tab: Thermoluminescence \cr #' `[,1]` \tab OSL \tab: Optically stimulated luminescence \cr #' `[,2]` \tab IRSL \tab: Infrared stimulated luminescence \cr #' `[,3]` \tab M-IR \tab: Infrared monochromator scan\cr #' `[,4]` \tab M-VIS \tab: Visible monochromator scan\cr #' `[,5]` \tab TOL \tab: Thermo-optical luminescence \cr #' `[,6]` \tab TRPOSL \tab: Time Resolved Pulsed OSL\cr #' `[,7]` \tab RIR \tab: Ramped IRSL\cr #' `[,8]` \tab RBR \tab: Ramped (Blue) LEDs\cr #' `[,9]` \tab USER \tab: User defined\cr #' `[,10]` \tab POSL \tab: Pulsed OSL \cr #' `[,11]` \tab SGOSL \tab: Single Grain OSL\cr #' `[,12]` \tab RL \tab: Radio Luminescence \cr #' `[,13]` \tab XRF \tab: X-ray Fluorescence #' } #' #' **DTYPE** values #' #' \tabular{rll}{ #' `[,0]` \tab 0 \tab Natural \cr #' `[,1]` \tab 1 \tab N+dose \cr #' `[,2]` \tab 2 \tab Bleach \cr #' `[,3]` \tab 3 \tab Bleach+dose \cr #' `[,4]` \tab 4 \tab Natural (Bleach) \cr #' `[,5]` \tab 5 \tab N+dose (Bleach) \cr #' `[,6]` \tab 6 \tab Dose \cr #' `[,7]` \tab 7 \tab Background #' } #' #' **LIGHTSOURCE** values #' #' \tabular{rll}{ #' `[,0]` \tab 0 \tab None \cr #' `[,1]` \tab 1 \tab Lamp \cr #' `[,2]` \tab 2 \tab IR diodes/IR Laser \cr #' `[,3]` \tab 3 \tab Calibration LED \cr #' `[,4]` \tab 4 \tab Blue Diodes \cr #' `[,5]` \tab 5 \tab White lite \cr #' `[,6]` \tab 6 \tab Green laser (single grain) \cr #' `[,7]` \tab 7 \tab IR laser (single grain) } #' #' (information on the BIN/BINX file format are kindly provided by Risoe, DTU Nutech) #' #' @section Objects from the Class: Objects can be created by calls of the form #' `new("Risoe.BINfileData", ...)`. #' #' @section Function version: 0.4.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, Université Bordeaux Montaigne (France)\cr #' based on information provided by Torben Lapp and Karsten Bracht Nielsen (Risø DTU, Denmark) #' #' @seealso [plot_Risoe.BINfileData], [read_BIN2R], [write_R2BIN], #' [merge_Risoe.BINfileData], [Risoe.BINfileData2RLum.Analysis] #' #' @references #' Risoe DTU, 2013. The Sequence Editor User Manual - Feb 2013 and Risoe DTU, 2016. #' #' The Sequence Editor User Manual - Feburar 2016 #' #' [http://www.nutech.dtu.dk/]() #' #' @keywords classes #' #' @examples #' #' showClass("Risoe.BINfileData") #' #' @md #' @export setClass("Risoe.BINfileData", slots = list( METADATA = "data.frame", DATA = "list", .RESERVED = "list" ), prototype = prototype( METADATA = data.frame( ID = integer(), SEL = logical(), VERSION = integer(), LENGTH = integer(), PREVIOUS = integer(), NPOINTS = integer(), RECTYPE = integer(), RUN = integer(), SET = integer(), POSITION = integer(), GRAIN = integer(), GRAINNUMBER = integer(), CURVENO = integer(), XCOORD = integer(), YCOORD = integer(), SAMPLE = character(), COMMENT = character(), SYSTEMID = integer(), FNAME = character(), USER = character(), TIME = character(), DATE = character(), DTYPE = character(), BL_TIME = numeric(), BL_UNIT = integer(), NORM1 = numeric(), NORM2 = numeric(), NORM3 = numeric(), BG = numeric(), SHIFT = integer(), TAG = integer(), LTYPE = character(), LIGHTSOURCE = character(), LPOWER = numeric(), LIGHTPOWER = numeric(), LOW = numeric(), HIGH = numeric(), RATE = numeric(), TEMPERATURE = numeric(), MEASTEMP = numeric(), AN_TEMP = numeric(), AN_TIME = numeric(), TOLDELAY = integer(), TOLON = integer(), TOLOFF = integer(), IRR_TIME = numeric(), IRR_TYPE = integer(), IRR_UNIT = integer(), IRR_DOSERATE = numeric(), IRR_DOSERATEERR = numeric(), TIMESINCEIRR = numeric(), TIMETICK = numeric(), ONTIME = numeric(), OFFTIME = numeric(), STIMPERIOD = integer(), GATE_ENABLED = numeric(), ENABLE_FLAGS = numeric(), GATE_START = numeric(), GATE_STOP = numeric(), PTENABLED = numeric(), DTENABLED = numeric(), DEADTIME = numeric(), MAXLPOWER = numeric(), XRF_ACQTIME = numeric(), XRF_HV = numeric(), XRF_CURR = numeric(), XRF_DEADTIMEF = numeric(), DETECTOR_ID = integer(), LOWERFILTER_ID = integer(), UPPERFILTER_ID = integer(), ENOISEFACTOR = numeric(), MARKPOS_X1 = numeric(), MARKPOS_Y1 = numeric(), MARKPOS_X2 = numeric(), MARKPOS_Y2 = numeric(), MARKPOS_X3 = numeric(), MARKPOS_Y3 = numeric(), EXTR_START = numeric(), EXTR_END = numeric(), SEQUENCE = character(), stringsAsFactors=FALSE ), DATA = list(), .RESERVED = list() ) ) ##set generic S4 function for object #' @describeIn Risoe.BINfileData #' Show structure of RLum and Risoe.BINfile class objects #' #' @md #' @export setMethod(f = "show", signature = signature(object = "Risoe.BINfileData"), definition = function(object){ if(nrow(object@METADATA) != 0){ version<-paste(unique(object@METADATA[,"VERSION"]), collapse = ", ") systemID<-paste(unique(object@METADATA[,"SYSTEMID"]), collapse = ", ") filename <- as.character(object@METADATA[1,"FNAME"]) records.overall<-length(object@DATA) records.type<-table(object@METADATA[,"LTYPE"]) user<-paste(unique(as.character(object@METADATA[,"USER"])), collapse = ", ") date<-paste(unique(as.character(object@METADATA[,"DATE"])), collapse = ", ") run.range<-range(object@METADATA[,"RUN"]) set.range<-range(object@METADATA[,"SET"]) grain.range <- range(object@METADATA[,"GRAIN"]) pos.range<-range(object@METADATA[,"POSITION"]) records.type.count <- sapply(1:length(records.type), function(x){paste( names(records.type)[x],"\t(n = ",records.type[x],")",sep="") }) records.type.count <- paste(records.type.count, collapse="\n\t ") ##print cat("\n[Risoe.BINfileData object]") cat("\n\n\tBIN/BINX version ", version) if(version>=6){ cat("\n\tFile name: ", filename) } cat("\n\tObject date: ", date) cat("\n\tUser: ", user) cat("\n\tSystem ID: ", ifelse(systemID == 0,"0 (unknown)", systemID)) cat("\n\tOverall records: ", records.overall) cat("\n\tRecords type: ", records.type.count) cat("\n\tPosition range: ",pos.range[1],":",pos.range[2]) cat("\n\tGrain range: ",grain.range[1],":",grain.range[2]) cat("\n\tRun range: ",run.range[1],":",run.range[2]) cat("\n\tSet range: ",set.range[1],":",set.range[2]) }else{ cat("\n[Risoe.BINfileData object]") cat("\n\n >> This object is empty!<<") } }#end function )#end setMethod # constructor (set) method for object class ----------------------------------- #' @describeIn Risoe.BINfileData #' The Risoe.BINfileData is normally produced as output of the function read_BIN2R. #' This construction method is intended for internal usage only. #' #' @param METADATA Object of class "data.frame" containing the meta information #' for each curve. #' #' @param DATA Object of class "list" containing numeric vector with count data. #' #' @param .RESERVED Object of class "list" containing list of undocumented raw #' values for internal use only. #' #' @md #' @export setMethod(f = "set_Risoe.BINfileData", signature = signature("ANY"), definition = function(METADATA, DATA, .RESERVED) { if(length(METADATA) == 0){ new("Risoe.BINfileData") }else{ new( "Risoe.BINfileData", METADATA = METADATA, DATA = DATA, .RESERVED = .RESERVED ) } }) # accessor (get) method for object class ----------------------------------- #' @describeIn Risoe.BINfileData #' Formal get-method for Risoe.BINfileData object. It does not allow accessing #' the object directly, it is just showing a terminal message. #' #' @param object an object of class [Risoe.BINfileData-class] #' #' @param ... other arguments that might be passed #' #' @md #' @export setMethod("get_Risoe.BINfileData", signature= "Risoe.BINfileData", definition = function(object, ...) { cat("[get_Risoe.BINfileData()] No direct access is provided for this object type. Use the function 'Risoe.BINfileData2RLum.Analysis' for object coercing.") })##end setMethod ##-------------------------------------------------------------------------------------------------## ##=================================================================================================## Luminescence/R/analyse_pIRIRSequence.R0000644000176200001440000007431013540751607017357 0ustar liggesusers#' Analyse post-IR IRSL measurement sequences #' #' The function performs an analysis of post-IR IRSL sequences including curve #' fitting on [RLum.Analysis-class] objects. #' #' To allow post-IR IRSL protocol (Thomsen et al., 2008) measurement analyses #' this function has been written as extended wrapper function for the function #' [analyse_SAR.CWOSL], facilitating an entire sequence analysis in #' one run. With this, its functionality is strictly limited by the #' functionality of the function [analyse_SAR.CWOSL]. #' #' **Defining the sequence structure ** #' #' The argument `sequence.structure` expects a shortened pattern of your sequence structure and was #' mainly introduced to ease the use of the function. For example: If your measurement data contains #' the following curves: `TL`, `IRSL`, `IRSL`, `TL`, `IRSL`, `IRSL`, the sequence pattern in `sequence.structure` #' becomes `c('TL', 'IRSL', 'IRSL')`. The second part of your sequence for one cycle should be #' similar and can be discarded. If this is not the case (e.g., additional hotbleach) such curves #' have to be removed before using the function. #' #' **If the input is a `list`** #' #' If the input is a list of RLum.Analysis-objects, every argument can be provided as list to allow #' for different sets of parameters for every single input element. #' For further information see [analyse_SAR.CWOSL]. #' #' @param object [RLum.Analysis-class] or [list] of [RLum.Analysis-class] objects (**required**): #' input object containing data for analysis. #' If a [list] is provided the functions tries to iteratre over the list. #' #' @param signal.integral.min [integer] (**required**): #' lower bound of the signal integral. Provide this value as vector for different #' integration limits for the different IRSL curves. #' #' @param signal.integral.max [integer] (**required**): #' upper bound of the signal integral. Provide this value as vector for different #' integration limits for the different IRSL curves. #' #' @param background.integral.min [integer] (**required**): #' lower bound of the background integral. Provide this value as vector for #' different integration limits for the different IRSL curves. #' #' @param background.integral.max [integer] (**required**): #' upper bound of the background integral. Provide this value as vector for #' different integration limits for the different IRSL curves. #' #' @param dose.points [numeric] (*optional*): #' a numeric vector containing the dose points values. Using this argument overwrites dose point #' values in the signal curves. #' #' @param sequence.structure [vector] [character] (*with default*): #' specifies the general sequence structure. Allowed values are `"TL"` and #' any `"IR"` combination (e.g., `"IR50"`,`"pIRIR225"`). #' Additionally a parameter `"EXCLUDE"` is allowed to exclude curves from #' the analysis (Note: If a preheat without PMT measurement is used, i.e. #' preheat as none TL, remove the TL step.) #' #' @param plot [logical] (*with default*): #' enables or disables plot output. #' #' @param plot.single [logical] (*with default*): #' single plot output (`TRUE/FALSE`) to allow for plotting the results in single plot #' windows. Requires `plot = TRUE`. #' #' @param ... further arguments that will be passed to the function #' [analyse_SAR.CWOSL] and [plot_GrowthCurve]. Furthermore, the arguments `main` (headers), `log` (IRSL curves), `cex` (control #' the size) and `mtext.outer` (additional text on the plot area) can be passed to influence the plotting. If the input #' is list, `main` can be passed as [vector] or [list]. #' #' @return #' Plots (*optional*) and an [RLum.Results-class] object is #' returned containing the following elements: #' #' \tabular{lll}{ #' **DATA.OBJECT** \tab **TYPE** \tab **DESCRIPTION** \cr #' `..$data` : \tab `data.frame` \tab Table with De values \cr #' `..$LnLxTnTx.table` : \tab `data.frame` \tab with the LnLxTnTx values \cr #' `..$rejection.criteria` : \tab [data.frame] \tab rejection criteria \cr #' `..$Formula` : \tab [list] \tab Function used for fitting of the dose response curve \cr #' `..$call` : \tab [call] \tab the original function call #' } #' #' The output should be accessed using the function [get_RLum]. #' #' @note #' Best graphical output can be achieved by using the function `pdf` #' with the following options: #' #' `pdf(file = "", height = 15, width = 15)` #' #' @section Function version: 0.2.4 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne #' (France) #' #' @seealso [analyse_SAR.CWOSL], [calc_OSLLxTxRatio], [plot_GrowthCurve], #' [RLum.Analysis-class], [RLum.Results-class] [get_RLum] #' #' @references #' Murray, A.S., Wintle, A.G., 2000. Luminescence dating of quartz #' using an improved single-aliquot regenerative-dose protocol. Radiation #' Measurements 32, 57-73. \doi{10.1016/S1350-4487(99)00253-X} #' #' Thomsen, K.J., Murray, A.S., Jain, M., Boetter-Jensen, L., 2008. Laboratory #' fading rates of various luminescence signals from feldspar-rich sediment #' extracts. Radiation Measurements 43, 1474-1486. #' \doi{10.1016/j.radmeas.2008.06.002} #' #' @keywords datagen plot #' #' @examples #' #' #' ### NOTE: For this example existing example data are used. These data are non pIRIR data. #' ### #' ##(1) Compile example data set based on existing example data (SAR quartz measurement) #' ##(a) Load example data #' data(ExampleData.BINfileData, envir = environment()) #' #' ##(b) Transform the values from the first position in a RLum.Analysis object #' object <- Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos=1) #' #' ##(c) Grep curves and exclude the last two (one TL and one IRSL) #' object <- get_RLum(object, record.id = c(-29,-30)) #' #' ##(d) Define new sequence structure and set new RLum.Analysis object #' sequence.structure <- c(1,2,2,3,4,4) #' sequence.structure <- as.vector(sapply(seq(0,length(object)-1,by = 4), #' function(x){sequence.structure + x})) #' #' object <- sapply(1:length(sequence.structure), function(x){ #' #' object[[sequence.structure[x]]] #' #' }) #' #' object <- set_RLum(class = "RLum.Analysis", records = object, protocol = "pIRIR") #' #' ##(2) Perform pIRIR analysis (for this example with quartz OSL data!) #' ## Note: output as single plots to avoid problems with this example #' results <- analyse_pIRIRSequence(object, #' signal.integral.min = 1, #' signal.integral.max = 2, #' background.integral.min = 900, #' background.integral.max = 1000, #' fit.method = "EXP", #' sequence.structure = c("TL", "pseudoIRSL1", "pseudoIRSL2"), #' main = "Pseudo pIRIR data set based on quartz OSL", #' plot.single = TRUE) #' #' #' ##(3) Perform pIRIR analysis (for this example with quartz OSL data!) #' ## Alternative for PDF output, uncomment and complete for usage #' \dontrun{ #' tempfile <- tempfile(fileext = ".pdf") #' pdf(file = tempfile, height = 15, width = 15) #' results <- analyse_pIRIRSequence(object, #' signal.integral.min = 1, #' signal.integral.max = 2, #' background.integral.min = 900, #' background.integral.max = 1000, #' fit.method = "EXP", #' main = "Pseudo pIRIR data set based on quartz OSL") #' #' dev.off() #' } #' #' @md #' @export analyse_pIRIRSequence <- function( object, signal.integral.min, signal.integral.max, background.integral.min, background.integral.max, dose.points = NULL, sequence.structure = c("TL", "IR50", "pIRIR225"), plot = TRUE, plot.single = FALSE, ... ){ # SELF CALL ----------------------------------------------------------------------------------- if(is.list(object)){ ##make live easy if(missing("signal.integral.min")){ signal.integral.min <- 1 warning("[analyse_pIRIRSequence()] 'signal.integral.min' missing, set to 1", call. = FALSE) } if(missing("signal.integral.max")){ signal.integral.max <- 2 warning("[analyse_pIRIRSequence()] 'signal.integral.max' missing, set to 2", call. = FALSE) } ##now we have to extend everything to allow list of arguments ... this is just consequent signal.integral.min <- rep(list(signal.integral.min), length = length(object)) signal.integral.max <- rep(list(signal.integral.max), length = length(object)) background.integral.min <- rep(list(background.integral.min), length = length(object)) background.integral.max <- rep(list(background.integral.max), length = length(object)) sequence.structure <- rep(list(sequence.structure), length = length(object)) if(!is.null(dose.points)){ if(is(dose.points, "list")){ dose.points <- rep(dose.points, length = length(object)) }else{ dose.points <- rep(list(dose.points), length = length(object)) } }else{ dose.points <- rep(list(NULL), length(object)) } ##main if("main" %in% names(list(...))){ main_list <- rep(list(...)$main, length.out = length(object)) if(class(main_list) != "list"){ main_list <- as.list(main_list) } } ##run analysis temp <- lapply(1:length(object), function(x){ analyse_pIRIRSequence(object[[x]], signal.integral.min = signal.integral.min[[x]], signal.integral.max = signal.integral.max[[x]], background.integral.min = background.integral.min[[x]], background.integral.max = background.integral.max[[x]] , dose.points = dose.points[[x]], sequence.structure = sequence.structure[[x]], plot = plot, plot.single = plot.single, main = ifelse("main"%in% names(list(...)), main_list[[x]], paste0("ALQ #",x)), ...) }) ##combine everything to one RLum.Results object as this as what was written ... only ##one object ##merge results and check if the output became NULL results <- merge_RLum(temp) ##DO NOT use invisible here, this will stop the function from stopping if(length(results) == 0){ return(NULL) }else{ return(results) } } # General Integrity Checks --------------------------------------------------- ##GENERAL ##MISSING INPUT if(missing("object")==TRUE){ stop("[analyse_pIRIRSequence()] No value set for 'object'!") } ##INPUT OBJECTS if(is(object, "RLum.Analysis")==FALSE){ stop("[analyse_pIRIRSequence()] Input object is not of type 'RLum.Analyis'!") } ##CHECK ALLOWED VALUES IN SEQUENCE STRUCTURE temp.collect.invalid.terms <- paste(sequence.structure[ (!grepl("TL",sequence.structure)) & (!grepl("IR",sequence.structure)) & (!grepl("OSL",sequence.structure)) & (!grepl("EXCLUDE",sequence.structure))], collapse = ", ") if(temp.collect.invalid.terms != ""){ stop("[analyse_pIRIRSequence()] ", temp.collect.invalid.terms, " not allowed in 'sequence.structure'!") } # Deal with extra arguments ------------------------------------------------------------------- ##deal with addition arguments extraArgs <- list(...) mtext.outer <- if("mtext.outer" %in% names(extraArgs)) {extraArgs$mtext.outer} else {"MEASUREMENT INFO"} main <- if("main" %in% names(extraArgs)) {extraArgs$main} else {""} log <- if("log" %in% names(extraArgs)) {extraArgs$log} else {""} cex <- if("cex" %in% names(extraArgs)) {extraArgs$cex} else {.7} # Protocol Integrity Checks -------------------------------------------------- ##(1) Check structure and remove curves that fit not the recordType criteria ##get sequence structure temp.sequence.structure <- structure_RLum(object) ##remove data types that fit not to the allowed values temp.sequence.rm.id <- temp.sequence.structure[ (!grepl("TL",temp.sequence.structure[["recordType"]])) & (!grepl("OSL", temp.sequence.structure[["recordType"]])) & (!grepl("IRSL", temp.sequence.structure[["recordType"]])) ,"id"] if(length(temp.sequence.rm.id)>0){ ##removed record from data set object <- get_RLum(object, record.id = -temp.sequence.rm.id, drop = FALSE ) ##compile warning message temp.sequence.rm.warning <- paste( temp.sequence.structure[temp.sequence.rm.id, "recordType"], collapse = ", ") temp.sequence.rm.warning <- paste( "Record types are unrecognised and have been removed:", temp.sequence.rm.warning) warning(temp.sequence.rm.warning, call. = FALSE) } ##(2) Apply user sequence structure ##get sequence structure temp.sequence.structure <- structure_RLum(object) ##try to account for a very common mistake if(any(grepl(sequence.structure, pattern = "TL", fixed = TRUE)) && !any(grepl(temp.sequence.structure[["recordType"]], pattern = "TL", fixed = TRUE))){ warning("[analyse_pIRIRSequence()] Your sequence does not contain 'TL' curves, trying to adapt 'sequence.structure' for you ...", call. = FALSE, immediate. = TRUE) sequence.structure <- sequence.structure[!grepl(sequence.structure, pattern = "TL", fixed = TRUE)] } ##set values to structure data.frame ##but check first if(2 * length( rep(sequence.structure, nrow(temp.sequence.structure)/2/length(sequence.structure))) == length(temp.sequence.structure[["protocol.step"]])){ temp.sequence.structure[["protocol.step"]] <- rep( sequence.structure, nrow(temp.sequence.structure)/2/length(sequence.structure)) }else{ try(stop("[analyse_pIRIRSequence()] Number of records is not a multiple of the defined sequence structure! NULL returned!", call. = FALSE)) return(NULL) } ##remove values that have been excluded temp.sequence.rm.id <- temp.sequence.structure[ temp.sequence.structure[,"protocol.step"] == "EXCLUDE" ,"id"] if(length(temp.sequence.rm.id)>0){ ##remove from object object <- get_RLum( object, record.id = -temp.sequence.rm.id, drop = FALSE) ##remove from sequence structure sequence.structure <- sequence.structure[sequence.structure != "EXCLUDE"] ##set new structure temp.sequence.structure <- structure_RLum(object) temp.sequence.structure[, "protocol.step"] <- rep( sequence.structure, nrow(temp.sequence.structure)/2/length(temp.sequence.structure)) ##print warning message warning("[analyse_pIRIRSequence()] ", length(temp.sequence.rm.id), " records have been removed due to EXCLUDE!", call. = FALSE) } ##============================================================================## # Analyse data and plotting ---------------------------------------------------- ##============================================================================## ##(1) find out how many runs are needed for the analysis by checking for "IR" ## now should by every signal except the TL curves n.TL<- table(grepl("TL", sequence.structure))["TRUE"] if(is.na(n.TL)) {n.TL<- 0} n.loops <- as.numeric(length(grepl("TL", sequence.structure)) - n.TL) ##grep ids of TL curves (we need them later on) TL.curves.id <- temp.sequence.structure[ temp.sequence.structure[,"protocol.step"] == "TL","id"] ##grep ids of all OSL curves (we need them later on) IRSL.curves.id <- temp.sequence.structure[ grepl("IR", temp.sequence.structure[,"protocol.step"]),"id"] ##grep information on the names of the IR curves, we need them later on pIRIR.curve.names <- unique(temp.sequence.structure[ temp.sequence.structure[IRSL.curves.id,"id"],"protocol.step"]) ##===========================================================================# ## set graphic layout using the layout option ## unfortunately a little bit more complicated then expected previously due ## the order of the produced plots by the previous functions if(plot.single == FALSE & plot == TRUE){ ##first (Tx,Tn, Lx,Ln) temp.IRSL.layout.vector.first <- c(3,5,6,7,3,5,6,8) ##middle (any other Lx,Ln) if(n.loops > 2){ temp.IRSL.layout.vector.middle <- vapply( 2:(n.loops - 1), FUN = function(x) { offset <- 5 * x - 1 c((offset):(offset + 3), (offset):(offset + 2), offset + 4) }, FUN.VALUE = vector(mode = "numeric", length = 8) ) } ##last (Lx,Ln and legend) temp.IRSL.layout.vector.last <- c( ifelse(n.loops > 2,max(temp.IRSL.layout.vector.middle) + 1, max(temp.IRSL.layout.vector.first) + 1), ifelse(n.loops > 2,max(temp.IRSL.layout.vector.middle) + 2, max(temp.IRSL.layout.vector.first) + 2), ifelse(n.loops > 2,max(temp.IRSL.layout.vector.middle) + 4, max(temp.IRSL.layout.vector.first) + 4), ifelse(n.loops > 2,max(temp.IRSL.layout.vector.middle) + 5, max(temp.IRSL.layout.vector.first) + 5), ifelse(n.loops > 2,max(temp.IRSL.layout.vector.middle) + 1, max(temp.IRSL.layout.vector.first) + 1), ifelse(n.loops > 2,max(temp.IRSL.layout.vector.middle) + 2, max(temp.IRSL.layout.vector.first) + 2), ifelse(n.loops > 2,max(temp.IRSL.layout.vector.middle) + 4, max(temp.IRSL.layout.vector.first) + 4), ifelse(n.loops > 2,max(temp.IRSL.layout.vector.middle) + 6, max(temp.IRSL.layout.vector.first) + 6)) ##options for different sets of curves if(n.loops > 2){ temp.IRSL.layout.vector <- c(temp.IRSL.layout.vector.first, temp.IRSL.layout.vector.middle, temp.IRSL.layout.vector.last) }else{ temp.IRSL.layout.vector <- c(temp.IRSL.layout.vector.first, temp.IRSL.layout.vector.last) } ##get layout information def.par <- par(no.readonly = TRUE) ##set up layout matrix linked to the number of plot areas needed layout.matrix <- c( rep(c(2,4,1,1),2), #header row with TL curves and info window temp.IRSL.layout.vector, #IRSL curves, rep((max(temp.IRSL.layout.vector)-3),8), #legend, rep((max(temp.IRSL.layout.vector)+1),1), #GC rep((max(temp.IRSL.layout.vector)+2),1), #TnTc rep((max(temp.IRSL.layout.vector)+3),2), #Rejection criteria rep((max(temp.IRSL.layout.vector)+1),1), #GC rep((max(temp.IRSL.layout.vector)+2),1), #TnTc rep((max(temp.IRSL.layout.vector)+3),2)) #Rejection criteria ##set layout nf <- layout( matrix(layout.matrix,(max(layout.matrix)/2 + ifelse(n.loops > 2, 0,2)), 4, byrow = TRUE), widths = c(rep(c(1,1,1,.75),6),c(1,1,1,1)), heights = c(rep(c(1),(2+2*n.loops)),c(0.20, 0.20))) ## show the regions that have been allocated to each plot for debug #layout.show(nf) } ##(1) INFO PLOT if (plot) { plot(NA,NA, ylim = c(0,1), xlab = "", xlim = c(0,1), ylab = "", axes = FALSE, main = main) text(0.5,0.5, paste(sequence.structure, collapse = "\n"), cex = cex *2) } ##(2) set loop for(i in 1:n.loops){ ##compile record ids temp.id.sel <- sort(c(TL.curves.id, IRSL.curves.id[seq(i,length(IRSL.curves.id),by=n.loops)])) ##(a) select data set (TL curves has to be considered for the data set) temp.curves <- get_RLum(object, record.id = temp.id.sel, drop = FALSE) ##(b) grep integral limits as they might be different for different curves if(length(signal.integral.min)>1){ temp.signal.integral.min <- signal.integral.min[i] temp.signal.integral.max <- signal.integral.max[i] temp.background.integral.min <- background.integral.min[i] temp.backbround.integral.max <- background.integral.max[i] }else{ temp.signal.integral.min <- signal.integral.min temp.signal.integral.max <- signal.integral.max temp.background.integral.min <- background.integral.min temp.background.integral.max <- background.integral.max } ##(c) call analysis sequence and plot ## call single plots if(i == 1){ temp.plot.single <- c(1,2,3,4,6) }else if(i == n.loops){ temp.plot.single <- c(2,4,5,6) }else{ temp.plot.single <- c(2,4,6) } ##start analysis temp.results <- analyse_SAR.CWOSL( temp.curves, signal.integral.min = temp.signal.integral.min, signal.integral.max = temp.signal.integral.max, background.integral.min = temp.background.integral.min, background.integral.max = temp.background.integral.max, plot = plot, dose.points = dose.points, plot.single = temp.plot.single, output.plotExtended.single = TRUE, cex.global = cex, ... ) ##TODO should be replaced be useful explicit arguments ##check whether NULL was return if (is.null(temp.results)) { try(stop("[plot_pIRIRSequence()] An error occurred, analysis skipped. Check your sequence!", call. = FALSE)) return(NULL) } ##add signal information to the protocol step temp.results.pIRIR.De <- as.data.frame(c( get_RLum(temp.results, "data"), data.frame(Signal = pIRIR.curve.names[i]) )) temp.results.pIRIR.LnLxTnTx <- as.data.frame(c( get_RLum(temp.results, "LnLxTnTx.table"), data.frame(Signal = pIRIR.curve.names[i]) )) temp.results.pIRIR.rejection.criteria <- as.data.frame(c( get_RLum(temp.results, "rejection.criteria"), data.frame(Signal = pIRIR.curve.names[i]) )) temp.results.pIRIR.formula <- list(get_RLum(temp.results, "Formula")) names(temp.results.pIRIR.formula) <- pIRIR.curve.names[i] ##create now object temp.results <- set_RLum( class = "RLum.Results", data = list( data = temp.results.pIRIR.De, LnLxTnTx.table = temp.results.pIRIR.LnLxTnTx, rejection.criteria = temp.results.pIRIR.rejection.criteria, Formula = temp.results.pIRIR.formula ), info = list( call = sys.call() ) ) ##merge results if (exists("temp.results.final")) { temp.results.final <- merge_RLum(list(temp.results.final, temp.results)) } else{ temp.results.final <- temp.results } } ##============================================================================## # Plotting additionals-------------------------------------------------------- ##============================================================================## if(plot){ ##extract LnLnxTnTx.table LnLxTnTx.table <- get_RLum(temp.results.final, "LnLxTnTx.table") ##remove Inf if(any(is.infinite(LnLxTnTx.table[["LxTx"]]))) LnLxTnTx.table[["LxTx"]][is.infinite(LnLxTnTx.table[["LxTx"]])] <- NA if(any(is.infinite(LnLxTnTx.table[["LxTx.Error"]]))) LnLxTnTx.table[["LxTx.Error"]][is.infinite(LnLxTnTx.table[["LxTx.Error"]])] <- NA ##plot growth curves plot(NA, NA, xlim = range(get_RLum(temp.results.final, "LnLxTnTx.table")$Dose), ylim = c( if(min(LnLxTnTx.table$LxTx, na.rm = TRUE) - max(LnLxTnTx.table$LxTx.Error, na.rm = TRUE) < 0){ min(LnLxTnTx.table$LxTx, na.rm = TRUE)- max(LnLxTnTx.table$LxTx.Error, na.rm = TRUE) }else{0}, max(LnLxTnTx.table$LxTx, na.rm = TRUE)+ max(LnLxTnTx.table$LxTx.Error, na.rm = TRUE)), xlab = "Dose [s]", ylab = expression(L[x]/T[x]), main = "Summarised Dose Response Curves") ##set x for expression evaluation x <- seq(0,max(LnLxTnTx.table$Dose)*1.05,length = 100) for(j in 1:length(pIRIR.curve.names)){ ##dose points temp.curve.points <- LnLxTnTx.table[,c("Dose", "LxTx", "LxTx.Error", "Signal")] temp.curve.points <- temp.curve.points[ temp.curve.points[,"Signal"] == pIRIR.curve.names[j], c("Dose", "LxTx", "LxTx.Error")] points(temp.curve.points[-1,c("Dose", "LxTx")], col = j, pch = j) segments(x0 = temp.curve.points[-1,c("Dose")], y0 = temp.curve.points[-1,c("LxTx")] - temp.curve.points[-1,c("LxTx.Error")], x1 = temp.curve.points[-1,c("Dose")], y1 = temp.curve.points[-1,c("LxTx")] + temp.curve.points[-1,c("LxTx.Error")], col = j) ##De values lines(c(0, get_RLum(temp.results.final, "data")[j,1]), c(temp.curve.points[1,c("LxTx")], temp.curve.points[1,c("LxTx")]), col = j, lty = 2) lines(c(rep(get_RLum(temp.results.final, "data")[j,1], 2)), c(temp.curve.points[1,c("LxTx")], 0), col = j, lty = 2) ##curve temp.curve.formula <- get_RLum( temp.results.final, "Formula")[[pIRIR.curve.names[j]]] try(lines(x, eval(temp.curve.formula), col = j), silent = TRUE) } rm(x) ##plot legend legend("bottomright", legend = pIRIR.curve.names, lty = 1, col = c(1:length(pIRIR.curve.names)), bty = "n", pch = c(1:length(pIRIR.curve.names)) ) ##plot Tn/Tx curves ##select signal temp.curve.TnTx <- LnLxTnTx.table[, c("TnTx", "Signal")] temp.curve.TnTx.matrix <- matrix(NA, nrow = nrow(temp.curve.TnTx)/ length(pIRIR.curve.names), ncol = length(pIRIR.curve.names)) ##calculate normalised values for(j in 1:length(pIRIR.curve.names)){ temp.curve.TnTx.sel <- temp.curve.TnTx[ temp.curve.TnTx[,"Signal"] == pIRIR.curve.names[j] , "TnTx"] temp.curve.TnTx.matrix[,j] <- temp.curve.TnTx.sel/temp.curve.TnTx.sel[1] } plot(NA, NA, xlim = c(0,nrow(LnLxTnTx.table)/ n.loops), ylim = range(temp.curve.TnTx.matrix), xlab = "# Cycle", ylab = expression(T[x]/T[n]), main = "Sensitivity change") ##zero line abline(h = 1:nrow(temp.curve.TnTx.matrix), col = "gray") for(j in 1:length(pIRIR.curve.names)){ lines(1:nrow(temp.curve.TnTx.matrix), temp.curve.TnTx.matrix[,j], type = "b", col = j, pch = j) } ##plot legend legend("bottomleft", legend = pIRIR.curve.names, lty = 1, col = c(1:length(pIRIR.curve.names)), bty = "n", pch = c(1:length(pIRIR.curve.names)) ) ##Rejection criteria temp.rejection.criteria <- get_RLum(temp.results.final, data.object = "rejection.criteria") temp.rc.reycling.ratio <- temp.rejection.criteria[ grep("Recycling ratio",temp.rejection.criteria[,"Criteria"]),] temp.rc.recuperation.rate <- temp.rejection.criteria[ grep("Recuperation rate",temp.rejection.criteria[,"Criteria"]),] temp.rc.palaedose.error <- temp.rejection.criteria[ grep("Palaeodose error",temp.rejection.criteria[,"Criteria"]),] plot(NA,NA, xlim = c(-0.5,0.5), ylim = c(0,30), yaxt = "n", ylab = "", xaxt = "n", xlab = "", bty = "n", main = "Rejection criteria") axis(side = 1, at = c(-0.2,-0.1,0,0.1,0.2), labels = c("- 0.2", "- 0.1","0/1","+ 0.1", "+ 0.2")) ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++## ##polygon for recycling ratio text(x = -.4, y = 30, "Recycling ratio", pos = 1, srt = 0) polygon(x = c(-as.numeric(as.character(temp.rc.reycling.ratio$Threshold))[1], -as.numeric(as.character(temp.rc.reycling.ratio$Threshold))[1], as.numeric(as.character(temp.rc.reycling.ratio$Threshold))[1], as.numeric(as.character(temp.rc.reycling.ratio$Threshold))[1]), y = c(21,29,29,21), col = "gray", border = NA) polygon(x = c(-0.3,-0.3,0.3,0.3) , y = c(21,29,29,21)) ##consider possibility of multiple pIRIR signals and multiple recycling ratios col.id <- 1 ##the conditional case might valid if no rejection criteria could be calculated if(nrow(temp.rc.recuperation.rate)>0){ for(i in seq(1,nrow(temp.rc.recuperation.rate), length(unique(temp.rc.recuperation.rate[,"Criteria"])))){ for(j in 0:length(unique(temp.rc.recuperation.rate[,"Criteria"]))){ points(temp.rc.reycling.ratio[i+j, "Value"]-1, y = 25, pch = col.id, col = col.id) } col.id <- col.id + 1 } }#endif rm(col.id) ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++## ##polygon for recuperation rate text(x = -.4, y = 20, "Recuperation rate", pos = 1, srt = 0) if(length(as.character(temp.rc.recuperation.rate$Threshold))>0){ polygon(x = c(0, 0, as.numeric(as.character(temp.rc.recuperation.rate$Threshold))[1], as.numeric(as.character(temp.rc.recuperation.rate$Threshold))[1]), y = c(11,19,19,11), col = "gray", border = NA) polygon(x = c(-0.3,-0.3,0.3,0.3) , y = c(11,19,19,11)) polygon(x = c(-0.3,-0.3,0,0) , y = c(11,19,19,11), border = NA, density = 10, angle = 45) for(i in 1:nrow(temp.rc.recuperation.rate)){ points(temp.rc.palaedose.error[i, "Value"], y = 15, pch = i, col = i) } }#endif ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++## ##polygon for palaeodose error text(x = -.4, y = 10, "Palaeodose error", pos = 1, srt = 0) polygon(x = c(0, 0, as.numeric(as.character(temp.rc.palaedose.error$Threshold))[1], as.numeric(as.character(temp.rc.palaedose.error$Threshold))[1]), y = c(1,9,9,1), col = "gray", border = NA) polygon(x = c(-0.3,-0.3,0.3,0.3) , y = c(1,9,9,1)) polygon(x = c(-0.3,-0.3,0,0) , y = c(1,9,9,1), border = NA, density = 10, angle = 45) for(i in 1:nrow(temp.rc.palaedose.error)){ points(temp.rc.palaedose.error[i, "Value"], y = 5, pch = i, col = i) } ##add 0 value lines(x = c(0,0), y = c(0,19), lwd = 1.5*cex) lines(x = c(0,0), y = c(20,29), lwd = 1.5*cex) ##plot legend legend("bottomright", legend = pIRIR.curve.names, col = c(1:length(pIRIR.curve.names)), bty = "n", pch = c(1:length(pIRIR.curve.names))) ##reset graphic settings if(plot.single == FALSE){par(def.par)} }##end plot == TRUE ##============================================================================## # Return Values ----------------------------------------------------------- ##============================================================================## return(temp.results.final) } Luminescence/R/Luminescence-package.R0000644000176200001440000011277113604172773017236 0ustar liggesusers#' Comprehensive Luminescence Dating Data Analysis #' #' \if{html}{ #' \figure{RL_Logo.png}{options: width="50px" alt="r-luminescence.org"}\cr #' \emph{R Luminescence Developer Team} #' } #' #' A collection of various R functions for the purpose of Luminescence dating #' data analysis. This includes, amongst others, data import, export, #' application of age models, curve deconvolution, sequence analysis and #' plotting of equivalent dose distributions. #' #' #' @name Luminescence-package #' #' @aliases Luminescence-package Luminescence #' #' @docType package #' #' @details **Full list of authors and contributors** (alphabetic order) #' #' \tabular{ll}{ #' Martin Autzen \tab *DTU NUTECH Center for Nuclear Technologies* \cr #' Christoph Burow \tab *University of Cologne, Germany** \cr #' Claire Christophe \tab *IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne, France* \cr #' Michael Dietze \tab *GFZ Helmholtz Centre Potsdam, Germany* \cr #' Julie Durcan \tab *University of Oxford, United Kingdom* \cr #' Pierre Guibert \tab *IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne, France* \cr #' Manfred Fischer\tab *University of Bayreuth, Germany* \cr #' Margret C. Fuchs \tab *Helmholtz-Zentrum Dresden-Rossendorf, Helmholtz-Institute Freiberg for Resource Technology, Freiberg, Germany* \cr #' Johannes Friedrich \tab *Chair of Geomorphology, University of Bayreuth, Germany* \cr #' Guillaume Guérin \tab *IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne, France* \cr #' Georgina E. King \tab *University of Lausanne, Switzerland* \cr #' Sebastian Kreutzer \tab *Department of Geography & Earth Sciences, Aberystwyth University, United Kingdom * \cr #' Norbert Mercier \tab *IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne, France* \cr #' Svenja Riedesel \tab *Aberystwyth University, United Kingdom* \cr #' Christoph Schmidt \tab *Chair of Geomorophology, University of Bayreuth, Germany* \cr #' Rachel K. Smedley \tab *Liverpool University, United Kingdom* \cr #' Anne Philippe \tab *Universite de Nantes and ANJA INRIA, Rennes, France* \cr #' Antoine Zink \tab *C2RMF, Palais du Louvre, Paris, France* #' } #' #' **Supervisor of the initial version in 2012** #' #' Markus Fuchs, Justus-Liebig-University Giessen, Germany #' #' **Support contact** #' #' \email{developers@@r-luminescence.org} #' #' We may further encourage the usage of our support forum. For this please #' visit our project website (link below). #' #' **Bug reporting** #' #' - \email{developers@@r-luminescence.org} or #' - [https://github.com/R-Lum/Luminescence/issues]() #' #' **Project website** #' #' - [http://www.r-luminescence.org]() #' #' **Project source code repository** #' #' - [https://github.com/R-Lum/Luminescence]() #' #' **Related package projects** #' #' - [https://cran.r-project.org/package=RLumShiny]() #' - [https://cran.r-project.org/package=RLumModel]() #' #' **Package maintainer** #' #' Sebastian Kreutzer, Department of of Geography & Earth Sciences, Aberystwyth University, United Kingdom,\cr #' \email{sebastian.kreutzer@@u-bordeaux-montaigne.fr} #' #' **Funding** #' #' Cooperation and personal exchange between the developers is gratefully #' funded by the DFG (SCHM 3051/3-1) in the framework of the program #' "Scientific Networks". Project title: "RLum.Network: Ein #' Wissenschaftsnetzwerk zur Analyse von Lumineszenzdaten mit R" (2014-2018) #' #' Between 2014--2019, the work of Sebastian Kreutzer as maintainer of the package was supported #' by LabEx LaScArBxSK (ANR - n. ANR-10-LABX-52). #' #' @references #' Dietze, M., Kreutzer, S., Fuchs, M.C., Burow, C., Fischer, M., #' Schmidt, C., 2013. A practical guide to the R package Luminescence. #' Ancient TL, 31 (1), 11-18. #' #' Dietze, M., Kreutzer, S., Burow, C., Fuchs, M.C., Fischer, M., Schmidt, C., 2016. The abanico plot: #' visualising chronometric data with individual standard errors. Quaternary Geochronology 31, 1-7. #' https://doi.org/10.1016/j.quageo.2015.09.003 #' #' Fuchs, M.C., Kreutzer, S., Burow, C., Dietze, M., Fischer, M., Schmidt, C., #' Fuchs, M., 2015. Data processing in luminescence dating analysis: An #' exemplary workflow using the R package 'Luminescence'. Quaternary #' International, 362,8-13. https://doi.org/10.1016/j.quaint.2014.06.034 #' #' Kreutzer, S., Schmidt, C., Fuchs, M.C., Dietze, M., Fischer, M., Fuchs, M., #' 2012. Introducing an R package for luminescence dating analysis. Ancient TL, #' 30 (1), 1-8. #' #' Mercier, N., Kreutzer, S., Christophe, C., Guérin, G., Guibert, P., Lahaye, C., Lanos, P., Philippe, A., #' Tribolo, C., 2016. Bayesian statistics in luminescence dating: The ’baSAR’-model and its #' implementation in the R package ’Luminescence’. Ancient TL 34 (2), 14-21. #' #' Smedley, R.K., 2015. A new R function for the Internal External Uncertainty (IEU) model. #' Ancient TL, 33 (1), 16-21. #' #' King, E.G., Burow, C., Roberts, H., Pearce, N.J.G., 2018. Age determination #' using feldspar: evaluating fading-correction model performance. Radiation Measurements 119, 58-73. #' https://doi.org/10.1016/j.radmeas.2018.07.013 #' #' @keywords package #' #' @import utils methods data.table magrittr #' #' @importFrom raster nlayers raster contour plot plotRGB brick #' @importFrom graphics plot plot.default frame abline mtext text lines par layout lines arrows axTicks axis barplot box boxplot contour curve grconvertX grconvertY hist legend persp points polygon rug segments title grid close.screen screen split.screen #' @importFrom grDevices adjustcolor axisTicks colorRampPalette gray.colors rgb topo.colors xy.coords dev.off #' @importFrom stats formula approx as.formula complete.cases density dnorm glm lm median na.exclude na.omit nls nls.control pchisq pnorm quantile rnorm runif sd smooth smooth.spline spline t.test uniroot var weighted.mean setNames coef confint predict update residuals fitted qf #' @importFrom parallel parLapply makeCluster stopCluster #' @importFrom httr GET accept_json status_code content #' #' @useDynLib Luminescence, .registration = TRUE #' #' @md NULL #' Base data set of dose-rate conversion factors #' #' Collection of published dose-rate conversion factors to convert concentrations #' of radioactive isotopes to dose rate values. #' #' @format #' #' A [`list`] with three elements with dose-rate conversion factors #' sorted by article and radiation type (alpha, beta, gamma): #' #' \tabular{ll}{ #' #' `AdamiecAitken1998`: \tab #' Conversion factors from Tables 5 and 6 \cr #' #' `Guerinetal2011`: \tab #' Conversion factors from Tables 1, 2 and 3 \cr #' #' `Liritzisetal2013`: \tab #' Conversion factors from Tables 1, 2 and 3 \cr #' } #' #' #' @section Version: 0.1 #' #' @references #' Guerin, G., Mercier, N., Adamiec, G., 2011. Dose-rate conversion #' factors: update. Ancient TL, 29, 5-8. #' #' Adamiec, G., Aitken, M.J., 1998. Dose-rate conversion factors: update. #' Ancient TL 16, 37-46. #' #' Liritzis, I., Stamoulis, K., Papachristodoulou, C., Ioannides, K., 2013. #' A re-evaluation of radiation dose-rate conversion factors. Mediterranean #' Archaeology and Archaeometry 13, 1-15. #' #' @source #' All gamma conversion factors were carefully read from the tables given in the #' references above. #' #' @keywords datasets #' #' @examples #' #' ## Load data #' data("BaseDataSet.ConversionFactors") #' #' @name BaseDataSet.ConversionFactors #' @md NULL #' Base data set of fractional gamma-dose values #' #' Collection of (un-)published fractional gamma dose-rate values to scale the #' gamma-dose rate considering layer-to-layer variations in soil radioactivity. #' #' @format #' #' A [`list`] with fractional gamma dose-rate values #' sorted by article: #' #' \tabular{ll}{ #' #' `Aitken1985`: \tab #' Fractional gamma-dose values from table H.1 #' } #' #' #' @section Version: 0.1 #' #' @references #' Aitken, M.J., 1985. Thermoluminescence Dating. Academic Press, London. #' #' @source #' Fractional gamma dose values were carefully read from the tables given in the #' references above. #' #' @keywords datasets #' #' @examples #' #' ## Load data #' data("BaseDataSet.FractionalGammaDose") #' #' @name BaseDataSet.FractionalGammaDose #' @md NULL #' Example data for scale_GammaDose() #' #' An example data set for the function `scale_GammaDose()` containing layer #' specific information to scale the gamma dose rate considering variations in #' soil radioactivity. #' #' @format #' #' A [`data.frame`]. Please see `?scale_GammaDose()` for a detailed description #' of its structure. #' #' #' @section Version: 0.1 #' #' @keywords datasets #' #' @examples #' #' ## Load data #' data("ExampleData.ScaleGammaDose") #' #' @name ExampleData.ScaleGammaDose #' @md NULL #' Base data set for cosmic dose rate calculation #' #' Collection of data from various sources needed for cosmic dose rate #' calculation #' #' #' @format #' #' \tabular{ll}{ #' #' `values.cosmic.Softcomp`: \tab #' data frame containing cosmic dose rates #' for shallow depths (< 167 g cm^-2) obtained using the "AGE" program by #' Rainer Gruen (cf. Gruen 2009). These data essentially reproduce the graph #' shown in Fig. 1 of Prescott & Hutton (1988). \cr #' #' `values.factor.Altitude`: \tab #' data frame containing altitude factors #' for adjusting geomagnetic field-change factors. Values were read from Fig. 1 #' in Prescott & Hutton (1994). \cr #' #' `values.par.FJH`: \tab #' data frame containing values for parameters F, J #' and H (read from Fig. 2 in Prescott & Hutton 1994) used in the expression \cr #' } #' #' \deqn{Dc = D0*(F+J*exp((altitude/1000)/H))} #' #' @section Version: 0.1 #' #' @references #' Gruen, R., 2009. The "AGE" program for the calculation of luminescence age estimates. #' Ancient TL, 27, pp. 45-46. #' #' Prescott, J.R., Hutton, J.T., 1988. Cosmic ray and gamma ray dosimetry for #' TL and ESR. Nuclear Tracks and Radiation Measurements, 14, pp. 223-227. #' #' Prescott, J.R., Hutton, J.T., 1994. Cosmic ray contributions to dose rates #' for luminescence and ESR dating: large depths and long-term time variations. #' Radiation Measurements, 23, pp. 497-500. #' #' @source #' The following data were carefully read from figures in mentioned #' sources and used for fitting procedures. The derived expressions are used in #' the function `calc_CosmicDoseRate`. #' #' **values.cosmic.Softcomp** #' #' \tabular{ll}{ #' Program: \tab "AGE"\cr #' Reference: \tab Gruen (2009) \cr #' Fit: \tab Polynomials in the form of #' } #' #' For depths between 40-167 g cm^-2: #' #' \deqn{y = 2*10^-6*x^2-0.0008*x+0.2535} #' #' (For depths <40 g cm^-2) #' #' \deqn{y = -6*10^-8*x^3+2*10^-5*x^2-0.0025*x+0.2969} #' #' **values.factor.Altitude** #' #' \tabular{ll}{ #' Reference: \tab Prescott & Hutton (1994) \cr #' Page: \tab 499 \cr #' Figure: \tab 1 \cr #' Fit: \tab 2-degree polynomial in the form of #' } #' #' \deqn{y = -0.026*x^2 + 0.6628*x + 1.0435} #' #' #' **values.par.FJH** #' #' \tabular{ll}{ #' Reference: \tab Prescott & Hutton (1994) \cr #' Page: \tab 500 \cr #' Figure: \tab 2 \cr #' Fits: \tab 3-degree polynomials and linear fits #' } #' #' F (non-linear part, \eqn{\lambda} < 36.5 deg.): #' #' \deqn{y = -7*10^-7*x^3-8*10^-5*x^2-0.0009*x+0.3988} #' #' F (linear part, \eqn{\lambda} > 36.5 deg.): #' #' \deqn{y = -0.0001*x + 0.2347} #' #' J (non-linear part, \eqn{\lambda} < 34 deg.): #' #' \deqn{y = 5*10^-6*x^3-5*10^-5*x^2+0.0026*x+0.5177} #' #' J (linear part, \eqn{\lambda} > 34 deg.): #' #' \deqn{y = 0.0005*x + 0.7388} #' #' H (non-linear part, \eqn{\lambda} < 36 deg.): #' #' \deqn{y = -3*10^-6*x^3-5*10^-5*x^2-0.0031*x+4.398} #' #' H (linear part, \eqn{\lambda} > 36 deg.): #' #' \deqn{y = 0.0002*x + 4.0914} #' #' @keywords datasets #' #' @examples #' #' ##load data #' data(BaseDataSet.CosmicDoseRate) #' #' @name BaseDataSet.CosmicDoseRate #' @aliases values.cosmic.Softcomp values.factor.Altitude values.par.FJH #' #' @md NULL #' @title Example data from a SAR OSL and SAR TL measurement for the package #' Luminescence #' #' @description Example data from a SAR OSL and TL measurement for package Luminescence #' directly extracted from a Risoe BIN-file and provided in an object of type #' [Risoe.BINfileData-class] #' @format #' #' `CWOSL.SAR.Data`: SAR OSL measurement data #' #' `TL.SAR.Data`: SAR TL measurement data #' #' Each class object contains two slots: (a) `METADATA` is a [data.frame] with #' all metadata stored in the BIN file of the measurements and (b) `DATA` #' contains a list of vectors of the measured data (usually count values). #' #' @section Version: 0.1 #' #' @references #' **CWOSL.SAR.Data**: unpublished data #' #' **TL.SAR.Data**: unpublished data #' #' @source **CWOSL.SAR.Data** #' #' \tabular{ll}{ #' Lab: \tab Luminescence Laboratory Bayreuth \cr #' Lab-Code: \tab BT607 \cr #' Location: \tab Saxony/Germany \cr #' Material: \tab Middle grain quartz measured on aluminum cups on a Risoe TL/OSL DA-15 reader\cr #' Reference: \tab unpublished #' } #' #' **TL.SAR.Data** #' #' \tabular{ll}{ #' Lab: \tab Luminescence Laboratory of Cologne\cr #' Lab-Code: \tab LP1_5\cr #' Location: \tab Spain\cr #' Material: \tab Flint \cr #' Setup: \tab Risoe TL/OSL DA-20 reader (Filter: Semrock Brightline, HC475/50, N2, unpolished steel discs) \cr #' Reference: \tab unpublished \cr #' Remarks: \tab dataset limited to one position #' } #' #' @note #' Please note that this example data cannot be exported to a BIN-file using the function #' `writeR2BIN` as it was generated and implemented in the package long time ago. In the meantime #' the BIN-file format changed. #' #' @docType data #' #' @keywords datasets #' #' @examples #' #' ## show first 5 elements of the METADATA and DATA elements in the terminal #' data(ExampleData.BINfileData, envir = environment()) #' CWOSL.SAR.Data@@METADATA[1:5,] #' CWOSL.SAR.Data@@DATA[1:5] #' #' @name ExampleData.BINfileData #' @aliases CWOSL.SAR.Data TL.SAR.Data #' @md NULL #' Example CW-OSL curve data for the package Luminescence #' #' `data.frame` containing CW-OSL curve data (time, counts) #' #' @name ExampleData.CW_OSL_Curve #' #' @docType data #' #' @format Data frame with 1000 observations on the following 2 variables: #' #' \describe{ #' \item{list("x")}{a numeric vector, time} #' \item{list("y")}{a numeric vector, counts} #' } #' #' @references #' Baartman, J.E.M., Veldkamp, A., Schoorl, J.M., Wallinga, J., #' Cammeraat, L.H., 2011. Unravelling Late Pleistocene and Holocene landscape #' dynamics: The Upper Guadalentin Basin, SE Spain. Geomorphology, 125, #' 172-185. #' #' Bos, A.J.J. & Wallinga, J., 2012. How to visualize quartz OSL signal #' components. Radiation Measurements, 47, 752-758. #' #' @source **ExampleData.CW_OSL_Curve** #' #' \tabular{ll}{ #' Lab: \tab Luminescence Laboratory Bayreuth\cr #' Lab-Code: \tab BT607\cr #' Location: \tab Saxony/Germany\cr #' Material: \tab Middle grain quartz measured on aluminum cups on a Risoe TL/OSL DA-15 reader.\cr #' Reference: \tab unpublished data } #' #' **CW_Curve.BosWallinga2012** #' #' \tabular{ll}{ #' Lab: \tab Netherlands Centre for Luminescence Dating (NCL)\cr #' Lab-Code: \tab NCL-2108077\cr #' Location: \tab Guadalentin Basin, Spain\cr #' Material: \tab Coarse grain quartz\cr #' Reference: \tab Bos & Wallinga (2012) and Baartman et al. (2011) #' } #' #' @keywords datasets #' #' @examples #' #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' plot(ExampleData.CW_OSL_Curve) #' #' @aliases CW_Curve.BosWallinga2012 ExampleData.CW_OSL_Curve #' @md NULL #' Example portable OSL curve data for the package Luminescence #' #' A `list` of [RLum.Analysis-class] objects, each containing #' the same number of [RLum.Data.Curve-class] objects representing #' individual OSL, IRSL and dark count measurements of a sample. #' #' @name ExampleData.portableOSL #' #' @docType data #' #' @source #' #' **ExampleData.portableOSL** #' #' \tabular{ll}{ #' Lab: \tab Cologne Luminescence Laboratory\cr #' Lab-Code: \tab `none` \cr #' Location: \tab Nievenheim/Germany\cr #' Material: \tab Fine grain quartz \cr #' Reference: \tab unpublished data #' } #' #' @keywords datasets #' #' @examples #' #' data(ExampleData.portableOSL, envir = environment()) #' plot_RLum(ExampleData.portableOSL) #' #' @md NULL #' Example data for fit_LMCurve() in the package Luminescence #' #' Lineraly modulated (LM) measurement data from a quartz sample from Norway #' including background measurement. Measurements carried out in the #' luminescence laboratory at the University of Bayreuth. #' #' @format Two objects (data.frames) with two columns (time and counts). #' #' @references #' Fuchs, M., Kreutzer, S., Fischer, M., Sauer, D., Soerensen, R., 2012. OSL and IRSL #' dating of raised beach sand deposits along the southeastern coast of Norway. #' Quaternary Geochronology, 10, 195-200. #' #' @source #' #' \tabular{ll}{ #' Lab: \tab Luminescence Laboratory Bayreuth\cr #' Lab-Code: \tab BT900\cr #' Location: \tab Norway\cr #' Material: \tab Beach deposit, coarse grain quartz measured on aluminum discs on a Risoe TL/OSL DA-15 reader\cr #' } #' #' @examples #' #' ##show LM data #' data(ExampleData.FittingLM, envir = environment()) #' plot(values.curve,log="x") #' #' @name ExampleData.FittingLM #' @aliases values.curve values.curveBG #' @md NULL #' Example Lx/Tx data from CW-OSL SAR measurement #' #' LxTx data from a SAR measurement for the package Luminescence. #' #' @format A [`data.frame`] with 4 columns (Dose, LxTx, LxTx.Error, TnTx). #' #' @references unpublished data #' #' @source #' #' \tabular{ll}{ #' Lab: \tab Luminescence Laboratory Bayreuth\cr #' Lab-Code: \tab BT607\cr #' Location: \tab Ostrau (Saxony-Anhalt/Germany)\cr #' Material: \tab Middle grain (38-63 \eqn{\mu}m) quartz measured on a Risoe TL/OSL DA-15 reader. #' } #' #' @examples #' #' ## plot Lx/Tx data vs dose [s] #' data(ExampleData.LxTxData, envir = environment()) #' plot(LxTxData$Dose,LxTxData$LxTx) #' #' @name ExampleData.LxTxData #' @aliases LxTxData #' @md NULL #' Example Lx and Tx curve data from an artificial OSL measurement #' #' Lx and Tx data of continous wave (CW-) OSL signal curves. #' #' @format Two [`data.frame`]s containing time and count values. #' #' @references unpublished data #' #' @source #' Arbitrary OSL measurement. #' #' @examples #' #' ##load data #' data(ExampleData.LxTxOSLData, envir = environment()) #' #' ##plot data #' plot(Lx.data) #' plot(Tx.data) #' #' @name ExampleData.LxTxOSLData #' @aliases Lx.data Tx.data #' @md NULL #' Example data as [RLum.Analysis-class] objects #' #' Collection of different [RLum.Analysis-class] objects for #' protocol analysis. #' #' @format #' #' `IRSAR.RF.Data`: IRSAR.RF.Data on coarse grain feldspar #' #' Each object contains data needed for the given protocol analysis. #' #' @section Version: 0.1 #' #' @references #' **IRSAR.RF.Data** #' #' Kreutzer, S., Lauer, T., Meszner, S., Krbetschek, M.R., Faust, D., Fuchs, #' M., 2014. Chronology of the Quaternary profile Zeuchfeld in Saxony-Anhalt / #' Germany - a preliminary luminescence dating study. Zeitschrift fuer #' Geomorphologie 58, 5-26. doi: 10.1127/0372-8854/2012/S-00112 #' #' @source **IRSAR.RF.Data** #' #' These data were kindly provided by Tobias Lauer and Matthias Krbetschek. #' #' \tabular{ll}{ #' Lab: \tab Luminescence Laboratory TU Bergakademie Freiberg\cr #' Lab-Code: \tab ZEU/SA1\cr #' Location: \tab Zeuchfeld (Zeuchfeld Sandur; Saxony-Anhalt/Germany)\cr #' Material: \tab K-feldspar (130-200 \eqn{\mu}m)\cr #' Reference: \tab Kreutzer et al. (2014) #' } #' #' @keywords datasets #' #' @examples #' #' ##load data #' data(ExampleData.RLum.Analysis, envir = environment()) #' #' ##plot data #' plot_RLum(IRSAR.RF.Data) #' #' @name ExampleData.RLum.Analysis #' @aliases IRSAR.RF.Data #' @md NULL #' Example data as [RLum.Data.Image-class] objects #' #' Measurement of Princton Instruments camera imported with the function #' [read_SPE2R] to R to produce an #' [RLum.Data.Image-class] object. #' #' #' @format Object of class [RLum.Data.Image-class] #' #' @section Version: 0.1 #' #' @source #' **ExampleData.RLum.Data.Image** #' #' These data were kindly provided by Regina DeWitt. #' #' \tabular{ll}{ #' Lab.: \tab Department of Physics, East-Carolina University, NC, USA\cr #' Lab-Code: \tab - \cr #' Location: \tab - \cr #' Material: \tab - \cr #' Reference: \tab - \cr #' } #' #' Image data is a measurement of fluorescent ceiling lights with a cooled #' Princeton Instruments (TM) camera fitted on Risoe DA-20 TL/OSL reader. #' #' @keywords datasets #' #' @examples #' #' ##load data #' data(ExampleData.RLum.Data.Image, envir = environment()) #' #' ##plot data #' plot_RLum(ExampleData.RLum.Data.Image) #' #' @name ExampleData.RLum.Data.Image #' @md NULL #' Example data for a SAR OSL measurement and a TL spectrum using a lexsyg #' reader #' #' Example data from a SAR OSL measurement and a TL spectrum for package #' Luminescence imported from a Freiberg Instruments XSYG file using the #' function [read_XSYG2R]. #' #' @format #' #' `OSL.SARMeasurement`: SAR OSL measurement data #' #' The data contain two elements: (a) `$Sequence.Header` is a #' [data.frame] with metadata from the measurement,(b) #' `Sequence.Object` contains an [RLum.Analysis-class] object #' for further analysis. #' #' `TL.Spectrum`: TL spectrum data #' #' [RLum.Data.Spectrum-class] object for further analysis. The #' spectrum was cleaned from cosmic-rays using the function #' #' `apply_CosmicRayRemoval`. Note that no quantum efficiency calibration #' was performed. #' #' @section Version: 0.1 #' #' @seealso [read_XSYG2R], [RLum.Analysis-class], [RLum.Data.Spectrum-class], #' [plot_RLum], [plot_RLum.Analysis], [plot_RLum.Data.Spectrum] #' #' @references #' Unpublished data measured to serve as example data for that #' package. Location origin of sample BT753 is given here: #' #' Fuchs, M., Kreutzer, S., Rousseau, D.D., Antoine, P., Hatte, C., Lagroix, #' F., Moine, O., Gauthier, C., Svoboda, J., Lisa, L., 2013. The loess sequence #' of Dolni Vestonice, Czech Republic: A new OSL-based chronology of the Last #' Climatic Cycle. Boreas, 42, 664--677. #' #' @source #' **OSL.SARMeasurement** #' #' \tabular{ll}{ #' Lab: \tab Luminescence Laboratory Giessen\cr #' Lab-Code: \tab no code\cr #' Location: \tab not specified\cr #' Material: \tab Coarse grain quartz on steel cups on lexsyg research reader\cr #' Reference: \tab unpublished #' } #' #' **TL.Spectrum** #' #' \tabular{ll}{ #' Lab: \tab Luminescence Laboratory Giessen\cr #' Lab-Code: \tab BT753\cr #' Location: \tab Dolni Vestonice/Czech Republic\cr #' Material: \tab Fine grain polymineral on steel cups on lexsyg rearch reader\cr #' Reference: \tab Fuchs et al., 2013 \cr #' Spectrum: \tab Integration time 19 s, channel time 20 s\cr #' Heating: \tab 1 K/s, up to 500 deg. C #' } #' #' @keywords datasets #' #' @examples #' ##show data #' data(ExampleData.XSYG, envir = environment()) #' #' ## ========================================= #' ##(1) OSL.SARMeasurement #' OSL.SARMeasurement #' #' ##show $Sequence.Object #' OSL.SARMeasurement$Sequence.Object #' #' ##grep OSL curves and plot the first curve #' OSLcurve <- get_RLum(OSL.SARMeasurement$Sequence.Object, #' recordType="OSL")[[1]] #' plot_RLum(OSLcurve) #' #' ## ========================================= #' ##(2) TL.Spectrum #' TL.Spectrum #' #' ##plot simple spectrum (2D) #' plot_RLum.Data.Spectrum(TL.Spectrum, #' plot.type="contour", #' xlim = c(310,750), #' ylim = c(0,300), #' bin.rows=10, #' bin.cols = 1) #' #' ##plot 3d spectrum (uncomment for usage) #' # plot_RLum.Data.Spectrum(TL.Spectrum, plot.type="persp", #' # xlim = c(310,750), ylim = c(0,300), bin.rows=10, #' # bin.cols = 1) #' #' @name ExampleData.XSYG #' @aliases OSL.SARMeasurement TL.Spectrum #' @md NULL #' Example De data sets for the package Luminescence #' #' Equivalent dose (De) values measured for a fine grain quartz sample from a #' loess section in Rottewitz (Saxony/Germany) and for a coarse grain quartz #' sample from a fluvial deposit in the rock shelter of Cueva Anton #' (Murcia/Spain). #' #' #' @format A [list] with two elements, each containing a two column [data.frame]: #' #' \describe{ #' `$BT998`: De and De error values for a fine grain quartz #' sample from a loess section in Rottewitz. #' #' `$CA1`: Single grain De #' and De error values for a coarse grain quartz sample from a fluvial deposit #' in the rock shelter of Cueva Anton #' } #' #' @references #' **BT998** #' #' Unpublished data #' #' **CA1** #' #' Burow, C., Kehl, M., Hilgers, A., Weniger, G.-C., Angelucci, D., Villaverde, #' V., Zapata, J. and Zilhao, J. (2015). Luminescence dating of fluvial #' deposits in the rock shelter of Cueva Anton, Spain. Geochronometria 52, 107-125. #' #' **BT998** #' #' \tabular{ll}{ #' Lab: \tab Luminescence Laboratory Bayreuth\cr #' Lab-Code: \tab BT998\cr #' Location: \tab Rottewitz (Saxony/Germany)\cr #' Material: \tab Fine grain quartz measured on aluminum discs on a Risoe TL/OSL DA-15 reader\cr #' Units: \tab Values are given in seconds \cr #' Dose Rate: \tab Dose rate of the beta-source at measurement ca. 0.0438 Gy/s +/- 0.0019 Gy/s\cr #' Measurement Date: \tab 2012-01-27 #' } #' #' **CA1** #' #' \tabular{ll}{ #' Lab: \tab Cologne Luminescence Laboratory (CLL)\cr #' Lab-Code: \tab C-L2941\cr #' Location: \tab Cueva Anton (Murcia/Spain)\cr #' Material: \tab Coarse grain quartz (200-250 microns) measured on single grain discs on a Risoe TL/OSL DA-20 reader\cr #' Units: \tab Values are given in Gray \cr #' Measurement Date: \tab 2012 #' } #' #' @keywords datasets #' #' @examples #' #' ##(1) plot values as histogram #' data(ExampleData.DeValues, envir = environment()) #' plot_Histogram(ExampleData.DeValues$BT998, xlab = "De [s]") #' #' ##(2) plot values as histogram (with second to gray conversion) #' data(ExampleData.DeValues, envir = environment()) #' #' De.values <- Second2Gray(ExampleData.DeValues$BT998, #' dose.rate = c(0.0438, 0.0019)) #' #' #' plot_Histogram(De.values, xlab = "De [Gy]") #' #' @name ExampleData.DeValues #' @md NULL #' Example data for feldspar fading measurements #' #' Example data set for fading measurements of the IR50, IR100, IR150 and #' IR225 feldspar signals of sample UNIL/NB123. It further contains regular equivalent dose #' measurement data of the same sample, which can be used to apply a #' fading correction to. #' #' #' @format A [list] with two elements, each containing a further [list] of #' [data.frame]s containing the data on the fading and equivalent dose measurements: #' #' \describe{ #' `$fading.data`: A named [list] of [data.frame]s, #' each having three named columns (`LxTx, LxTx.error, timeSinceIrradiation`).\cr #' `..$IR50`: Fading data of the IR50 signal.\cr #' `..$IR100`: Fading data of the IR100 signal.\cr #' `..$IR150`: Fading data of the IR150 signal.\cr #' `..$IR225`: Fading data of the IR225 signal.\cr #' #' #' `$equivalentDose.data`: A named of [data.frame]s, #' each having three named columns (`dose, LxTx, LxTx.error`).\cr #' `..$IR50`: Equivalent dose measurement data of the IR50 signal.\cr #' `..$IR100`: Equivalent dose measurement data of the IR100 signal.\cr #' `..$IR150`: Equivalent dose measurement data of the IR150 signal.\cr #' `..$IR225`: Equivalent dose measurement data of the IR225 signal.\cr #' } #' #' @source #' #' These data were kindly provided by Georgina E. King. Detailed information #' on the sample UNIL/NB123 can be found in the reference given below. The raw #' data can be found in the accompanying supplementary information. #' #' @references #' #' King, G.E., Herman, F., Lambert, R., Valla, P.G., Guralnik, B., 2016. #' Multi-OSL-thermochronometry of feldspar. Quaternary Geochronology 33, 76-87. #' doi:10.1016/j.quageo.2016.01.004 #' #' **Details** #' #' \tabular{ll}{ #' Lab: \tab University of Lausanne \cr #' Lab-Code: \tab UNIL/NB123 \cr #' Location: \tab Namche Barwa (eastern Himalaya)\cr #' Material: \tab Coarse grained (180-212 microns) potassium feldspar \cr #' Units: \tab Values are given in seconds \cr #' Lab Dose Rate: \tab Dose rate of the beta-source at measurement ca. 0.1335 +/- 0.004 Gy/s \cr #' Environmental Dose Rate: \tab 7.00 +/- 0.92 Gy/ka (includes internal dose rate) #' } #' #' #' @keywords datasets #' #' @examples #' #' ## Load example data #' data("ExampleData.Fading", envir = environment()) #' #' ## Get fading measurement data of the IR50 signal #' IR50_fading <- ExampleData.Fading$fading.data$IR50 #' head(IR50_fading) #' #' ## Determine g-value and rho' for the IR50 signal #' IR50_fading.res <- analyse_FadingMeasurement(IR50_fading) #' #' ## Show g-value and rho' results #' gval <- get_RLum(IR50_fading.res) #' rhop <- get_RLum(IR50_fading.res, "rho_prime") #' #' gval #' rhop #' #' ## Get LxTx values of the IR50 DE measurement #' IR50_De.LxTx <- ExampleData.Fading$equivalentDose.data$IR50 #' #' ## Calculate the De of the IR50 signal #' IR50_De <- plot_GrowthCurve(IR50_De.LxTx, #' mode = "interpolation", #' fit.method = "EXP") #' #' ## Extract the calculated De and its error #' IR50_De.res <- get_RLum(IR50_De) #' De <- c(IR50_De.res$De, IR50_De.res$De.Error) #' #' ## Apply fading correction (age conversion greatly simplified) #' IR50_Age <- De / 7.00 #' IR50_Age.corr <- calc_FadingCorr(IR50_Age, g_value = IR50_fading.res) #' #' #' @name ExampleData.Fading #' @md NULL #' Example OSL surface exposure dating data #' #' A set of synthetic OSL surface exposure dating data to demonstrate the #' [fit_SurfaceExposure] functionality. See examples to reproduce the data #' interactively. #' #' @details #' #' **`$sample_1`** #' #' \tabular{ccc}{ #' **mu** \tab **sigmaphi** \tab **age** \cr #' 0.9 \tab 5e-10 \tab 10000 \cr #' } #' #' **`$sample_2`** #' #' \tabular{ccccc}{ #' **mu** \tab **sigmaphi** \tab **age** \tab **Dose rate** \tab **D0** \cr #' 0.9 \tab 5e-10 \tab 10000 \tab 2.5 \tab 40 \cr #' } #' #' **`$set_1`** #' #' \tabular{ccc}{ #' **mu** \tab **sigmaphi** \tab **ages** \cr #' 0.9 \tab 5e-10 \tab 1e3, 1e4, 1e5, 1e6 \cr #' } #' #' **`$set_2`** #' #' \tabular{ccccc}{ #' **mu** \tab **sigmaphi** \tab **ages** \tab **Dose rate** \tab **D0** \cr #' 0.9 \tab 5e-10 \tab 1e2, 1e3, 1e4, 1e5, 1e6 \tab 1.0 \tab 40 \cr #' } #' #' @format A [list] with 4 elements: #' #' \tabular{ll}{ #' **Element** \tab **Content** \cr #' `$sample_1` \tab A [data.frame] with 3 columns (depth, intensity, error) \cr #' `$sample_2` \tab A [data.frame] with 3 columns (depth, intensity, error) \cr #' `$set_1` \tab A [list] of 4 [data.frame]s, each representing a sample with different ages \cr #' `$set_2` \tab A [list] of 5 [data.frame]s, each representing a sample with different ages \cr #' } #' #' @references Unpublished synthetic data #' #' @source #' #' See examples for the code used to create the data sets. #' #' @examples #' #' ## ExampleData.SurfaceExposure$sample_1 #' sigmaphi <- 5e-10 #' age <- 10000 #' mu <- 0.9 #' x <- seq(0, 10, 0.1) #' fun <- exp(-sigmaphi * age * 365.25*24*3600 * exp(-mu * x)) #' #' set.seed(666) #' synth_1 <- data.frame(depth = x, #' intensity = jitter(fun, 1, 0.1), #' error = runif(length(x), 0.01, 0.2)) #' #' ## VALIDATE sample_1 #' fit_SurfaceExposure(synth_1, mu = mu, sigmaphi = sigmaphi) #' #' #' #' #' ## ExampleData.SurfaceExposure$sample_2 #' sigmaphi <- 5e-10 #' age <- 10000 #' mu <- 0.9 #' x <- seq(0, 10, 0.1) #' Ddot <- 2.5 / 1000 / 365.25 / 24 / 60 / 60 # 2.5 Gy/ka in Seconds #' D0 <- 40 #' fun <- (sigmaphi * exp(-mu * x) * #' exp(-(age * 365.25*24*3600) * #' (sigmaphi * exp(-mu * x) + Ddot/D0)) + Ddot/D0) / #' (sigmaphi * exp(-mu * x) + Ddot/D0) #' #' set.seed(666) #' synth_2 <- data.frame(depth = x, #' intensity = jitter(fun, 1, 0.1), #' error = runif(length(x), 0.01, 0.2)) #' #' ## VALIDATE sample_2 #' fit_SurfaceExposure(synth_2, mu = mu, sigmaphi = sigmaphi, Ddot = 2.5, D0 = D0) #' #' #' #' ## ExampleData.SurfaceExposure$set_1 #' sigmaphi <- 5e-10 #' mu <- 0.9 #' x <- seq(0, 15, 0.2) #' age <- c(1e3, 1e4, 1e5, 1e6) #' set.seed(666) #' #' synth_3 <- vector("list", length = length(age)) #' #' for (i in 1:length(age)) { #' fun <- exp(-sigmaphi * age[i] * 365.25*24*3600 * exp(-mu * x)) #' synth_3[[i]] <- data.frame(depth = x, #' intensity = jitter(fun, 1, 0.05)) #' } #' #' #' ## VALIDATE set_1 #' fit_SurfaceExposure(synth_3, age = age, sigmaphi = sigmaphi) #' #' #' #' ## ExampleData.SurfaceExposure$set_2 #' sigmaphi <- 5e-10 #' mu <- 0.9 #' x <- seq(0, 15, 0.2) #' age <- c(1e2, 1e3, 1e4, 1e5, 1e6) #' Ddot <- 1.0 / 1000 / 365.25 / 24 / 60 / 60 # 2.0 Gy/ka in Seconds #' D0 <- 40 #' set.seed(666) #' #' synth_4 <- vector("list", length = length(age)) #' #' for (i in 1:length(age)) { #' fun <- (sigmaphi * exp(-mu * x) * #' exp(-(age[i] * 365.25*24*3600) * #' (sigmaphi * exp(-mu * x) + Ddot/D0)) + Ddot/D0) / #' (sigmaphi * exp(-mu * x) + Ddot/D0) #' #' synth_4[[i]] <- data.frame(depth = x, #' intensity = jitter(fun, 1, 0.05)) #' } #' #' #' ## VALIDATE set_2 #' fit_SurfaceExposure(synth_4, age = age, sigmaphi = sigmaphi, D0 = D0, Ddot = 1.0) #' #' \dontrun{ #' ExampleData.SurfaceExposure <- list( #' sample_1 = synth_1, #' sample_2 = synth_2, #' set_1 = synth_3, #' set_2 = synth_4 #' ) #' } #' #' @keywords datasets #' @name ExampleData.SurfaceExposure #' @md NULL #' Example Al2O3:C Measurement Data #' #' Measurement data obtained from measuring Al2O3:C chips at the IRAMAT-CRP2A, Université Bordeaux #' Montainge in 2017 on a Freiberg Instruments lexsyg SMART reader. #' The example data used in particular to allow test of the functions #' developed in framework of the work by Kreutzer et al., 2018. #' #' @format Two datasets comprising [RLum.Analysis-class] data imported using the function [read_XSYG2R] #' #' \describe{ #' `data_ITC`: Measurement data to determine the irradiation time correction, the data can #' be analysed with the function [analyse_Al2O3C_ITC] #' #' `data_CrossTalk`: Measurement data obtained while estimating the irradiation cross-talk of the #' reader used for the experiments. The data can be analysed either with the function #' [analyse_Al2O3C_CrossTalk] or [analyse_Al2O3C_Measurement] #' } #' #' @note From both datasets unneeded curves have been removed and #' the number of aliquots have been reduced to a required minimum to keep the file size small, but #' still being able to run the corresponding functions. #' #' @seealso [analyse_Al2O3C_ITC], [analyse_Al2O3C_CrossTalk], [analyse_Al2O3C_Measurement] #' #' @references Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., 2018. #' Environmental Dose Rate Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. #' Geochronometria 45, 56–67. \doi{10.1515/geochr-2015-0086} #' #' @keywords datasets #' #' @examples #' #' ##(1) curves #' data(ExampleData.Al2O3C, envir = environment()) #' plot_RLum(data_ITC[1:2]) #' #' @name ExampleData.Al2O3C #' @aliases data_CrossTalk data_ITC #' @md NULL #' Example TR-OSL data #' #' Single TR-OSL curve obtained by Schmidt et al. (under review) for quartz sample BT729 #' (origin: Trebgast Valley, Germay, quartz, 90-200 µm, unpublished data). #' #' @format One [RLum.Data.Curve-class] dataset imported using the function [read_XSYG2R] #' #' \describe{ #' `ExampleData.TR_OSL`: A single [RLum.Data.Curve-class] object with the TR-OSL data #' #' } #' #' #' @seealso [fit_OSLLifeTimes] #' #' @references Schmidt, C., Simmank, O., Kreutzer, S., under review. #' Time-Resolved Optically Stimulated Luminescence of Quartz in the Nanosecond Time Domain. Journal #' of Luminescence, 1-90 #' #' @keywords datasets #' #' @examples #' #' ##(1) curves #' data(ExampleData.TR_OSL, envir = environment()) #' plot_RLum(ExampleData.TR_OSL) #' #' @name ExampleData.TR_OSL #' @md NULL #' Collection of External Data #' #' @description Description and listing of data provided in the folder `data/extdata` #' #' @details #' The **R** package `Luminescence` includes a number of raw data files, which are mostly used in #' the example sections of appropriate functions. They are also used internally for testing corresponding #' functions using the `testthat` package (see files in `tests/testthat/`) to ensure their operational #' reliability. #' #' **Accessibility** #' #' If the **R** package `Luminescence` is installed correctly the preferred way to access and use these #' data from within **R** is as follows: #' #' `system.file("extdata/", package = "Luminescence")` #' #' **Individual file descriptions** #' #' *>>Daybreak_TestFile.DAT/.txt<<* #' #' **Type:** raw measurement data \cr #' **Device:** Daybreak OSL/TL reader\cr #' **Measurement date:** unknown\cr #' **Location:** unknown\cr #' **Provided by:** unknown\cr #' **Related R function(s):** `read_Daybreak2R()`\cr #' **Reference:** unknown #' #' *>>DorNie_0016.psl<<* #' #' **Type:** raw measurement data \cr #' **Device:** SUERC portable OSL reader \cr #' **Measurement date:** 19/05/2016 \cr #' **Location:** Dormagen-Nievenheim, Germany \cr #' **Provided by:** Christoph Burow (University of Cologne) \cr #' **Related R function(s):** `read_PSL2R()` \cr #' **Reference:** unpublished \cr #' **Additional information:** Sample measured at an archaeological site near \cr #' Dormagen-Nievenheim (Germany) during a practical course on Luminesence dating in 2016. \cr #' #' *>>QNL84_2_bleached.txt*, *QNL84_2_unbleached.txt<<* #' #' **Type:** Test data for exponential fits \cr #' **Reference:** Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. \cr #' #' #' *>>STRB87_1_bleached.txt*, *STRB87_1_unbleached.txt<<* #' #' **Type:** Test data for exponential fits \cr #' **Reference:** Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. #' #' *>>XYSG_file.xysg* #' #' **Type:** XSYG-file stump \cr #' **Info: ** XSYG-file with some basic curves to test functions \cr #' **Reference:** no reference available #' #' #' @keywords datasets #' @name extdata #' @md NULL Luminescence/R/apply_EfficiencyCorrection.R0000644000176200001440000001130513604167047020521 0ustar liggesusers#' Function to apply spectral efficiency correction to RLum.Data.Spectrum S4 #' class objects #' #' The function allows spectral efficiency corrections for RLum.Data.Spectrum #' S4 class objects #' #' The efficiency correction is based on a spectral response dataset provided #' by the user. Usually the data set for the quantum efficiency is of lower #' resolution and values are interpolated for the required spectral resolution using #' the function [stats::approx][stats::approxfun] #' #' If the energy calibration differes for both data set `NA` values are produces that #' will be removed from the matrix. #' #' @param object [RLum.Data.Spectrum-class] or [RLum.Analysis-class] (**required**): #' S4 object of class `RLum.Data.Spectrum`, `RLum.Analysis`or a [list] of such objects. Other objects in #' the list are skipped. #' #' @param spectral.efficiency [data.frame] (**required**): #' Data set containing wavelengths (x-column) and relative spectral response values #' (y-column) (values between 0 and 1). The provided data will be used to correct all spectra if `object` is #' a [list] #' #' @return Returns same object as provided as input #' #' @note #' Please note that the spectral efficiency data from the camera alone may not #' sufficiently correct for spectral efficiency of the entire optical system #' (e.g., spectrometer, camera ...). #' #' @section Function version: 0.2.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS-Université Bordeaux Montaigne (France)\cr #' Johannes Friedrich, University of Bayreuth (Germany) #' #' @seealso [RLum.Data.Spectrum-class], [RLum.Analysis-class] #' #' @keywords manip #' #' @examples #' #' ##(1) - use with your own data (uncomment for usage) #' ## spectral.efficiency <- read.csv("your data") #' ## #' ## your.spectrum <- apply_EfficiencyCorrection(your.spectrum, ) #' #' @md #' @export apply_EfficiencyCorrection <- function( object, spectral.efficiency ){ # self-call ----------------------------------------------------------------------------------- ##case we have a list if(class(object) == "list"){ output_list <- lapply(object, function(o){ if(class(o) == "RLum.Data.Spectrum" || class(o) == "RLum.Analysis"){ apply_EfficiencyCorrection(object = o, spectral.efficiency = spectral.efficiency) }else{ warning(paste0("[apply_EfficiencyCorrection()] Skipping ",class(o)," object in input list."), call. = FALSE) return(o) } }) return(output_list) } ##the case of an RLum.Analysis object if(class(object) == "RLum.Analysis"){ object@records <- lapply(object@records, function(o){ if(class(o) == "RLum.Data.Spectrum"){ apply_EfficiencyCorrection(object = o, spectral.efficiency = spectral.efficiency) }else{ warning(paste0("[apply_EfficiencyCorrection()] Skipping ",class(o)," object in input list."), call. = FALSE) return(o) } }) return(object) } # Integrity check ----------------------------------------------------------- ##check if object is of class RLum.Data.Spectrum if(class(object) != "RLum.Data.Spectrum") stop("[apply_EfficiencyCorrection()] Input object is not of type RLum.Data.Spectrum",call. = FALSE) if(class(spectral.efficiency) != "data.frame") stop("[apply_EfficiencyCorrection()] 'spectral.efficiency' is not of type data.frame", call. = FALSE) ## grep data matrix from the input object temp.matrix <- as(object, "matrix") ## grep efficency values temp.efficiency <- as.matrix(spectral.efficiency[,1:2]) ##test max if(max(temp.efficiency[,2]) > 1) stop("[apply_EfficiencyCorrection()] Relative quantum efficiency values > 1 are not allowed.", call. = FALSE) # Apply method ------------------------------------------------------------ ##the interpolation is needed to align the resolution #set data for interpolation temp.efficiency.x <- as.numeric(row.names(temp.matrix)) temp.efficiency.interpolated <- approx( x = temp.efficiency[,1], y = temp.efficiency[,2], xout = temp.efficiency.x, ties = mean) ##correct for quantum efficiency temp.matrix <- vapply(X = 1:ncol(temp.matrix), FUN = function(x){ temp.matrix[,x]/temp.efficiency.interpolated$y*max(temp.efficiency.interpolated$y, na.rm = TRUE) }, FUN.VALUE = numeric(length = nrow(temp.matrix))) ##remove NA values temp.matrix <- na.exclude(temp.matrix) ##correct colnames colnames(temp.matrix) <- colnames(get_RLum(object)) # Return Output------------------------------------------------------------ temp.output <- set_RLum( class = "RLum.Data.Spectrum", recordType = object@recordType, curveType = object@curveType, data = temp.matrix, info = object@info) invisible(temp.output) } Luminescence/R/names_RLum.R0000644000176200001440000000277113234110325015253 0ustar liggesusers#' S4-names function for RLum S4 class objects #' #' Function calls object-specific names functions for RLum S4 class objects. #' #' The function provides a generalised access point for specific #' [RLum-class] objects.\cr #' Depending on the input object, the corresponding 'names' function will be #' selected. Allowed arguments can be found in the documentations of the #' corresponding [RLum-class] class. #' #' @param object [RLum-class] (**required**): #' S4 object of class `RLum` #' #' @return Returns a [character] #' #' @section Function version: 0.1.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Data.Curve-class], [RLum.Data.Image-class], #' [RLum.Data.Spectrum-class], [RLum.Analysis-class], [RLum.Results-class] #' #' @keywords utilities #' #' @aliases names_RLum #' #' @md #' @export setGeneric("names_RLum", function(object) { standardGeneric("names_RLum") }) # Method for names_RLum method for RLum objects in a list for a list of objects ------------- #' @describeIn names_RLum #' Returns a list of [RLum-class] objects that had been passed to [names_RLum] #' #' #' @md #' @export setMethod("names_RLum", signature = "list", function(object) { ##apply method in the objects and return the same lapply(object, function(x) { if (inherits(x, "RLum")) { return(names_RLum(x)) } else{ return(x) } }) }) Luminescence/R/smooth_RLum.R0000644000176200001440000000376013234105635015470 0ustar liggesusers#' Smoothing of data #' #' Function calls the object-specific smooth functions for provided RLum S4-class objects. #' #' The function provides a generalised access point for specific #' [RLum-class] objects.\cr #' Depending on the input object, the corresponding function will be selected. #' Allowed arguments can be found in the documentations of the corresponding #' [RLum-class] class. The smoothing is based on an internal function #' called `.smoothing`. #' #' @param object [RLum-class] (**required**): #' S4 object of class `RLum` #' #' @param ... further arguments passed to the specifc class method #' #' @return #' An object of the same type as the input object is provided #' #' @section Function version: 0.1.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @note #' Currenlty only `RLum` objects of class `RLum.Data.Curve` and `RLum.Analysis` #' (with curve data) are supported! #' #' @seealso [RLum.Data.Curve-class], [RLum.Analysis-class] #' #' @examples #' #' ##load example data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' #' ##create RLum.Data.Curve object from this example #' curve <- #' set_RLum( #' class = "RLum.Data.Curve", #' recordType = "OSL", #' data = as.matrix(ExampleData.CW_OSL_Curve) #' ) #' #' ##plot data without and with smoothing #' plot_RLum(curve) #' plot_RLum(smooth_RLum(curve)) #' #' @keywords utilities #' #' @md #' @export setGeneric("smooth_RLum", function(object, ...) { standardGeneric("smooth_RLum") }) # Method for smooth_RLum method for RLum objects in a list for a list of objects ------------------- #' @describeIn smooth_RLum #' Returns a list of [RLum-class] objects that had been passed to [smooth_RLum] #' #' #' @md #' @export setMethod("smooth_RLum", signature = "list", function(object, ...){ ##apply method in the objects and return the same lapply(object, function(x){ if(inherits(x, "RLum")){ return(smooth_RLum(x,...)) }else{ return(x) } }) }) Luminescence/R/methods_RLum.R0000644000176200001440000004475413571743147015644 0ustar liggesusers################################################################################## ## METHODS FOR S3 GENERICS ## ################################################################################## ##CAUTION NOTE: ##(1) Please DO NOT access to the S4 objects by using the slots this causes inconsistent ## behaviour, please use the correspong RLum-methods instead! ## ##(2) Especially, please DO NOT include S3-methods for which no S4-method is implemented! Especially ##for coercing. ## ##(3) Finally, what ever you want to implemnt, check whether a S4-method exists, it should ##be just passed to this methods, not the opposite, otherwise this will yield in undesired behaviour ## ##TODO: For this S3 generics so far no proper documentation exists ... we should consider ##to provide an overview within a separat document, as it becomes otherwise rather ##complicated for beginners to work with the documentation. ## ## -------------------- INTRODUCED WITH 0.5.0 ----------------------- ## #' methods_RLum #' #' Methods for S3-generics implemented for the package 'Luminescence'. #' This document summarises all implemented S3-generics. The name of the function #' is given before the first dot, after the dot the name of the object that is #' supported by this method is given, e.g. `plot.RLum.Data.Curve` can be called #' by `plot(object, ...)`, where `object` is the `RLum.Data.Curve` object. #' #' The term S3-generics sounds complicated, however, it just means that something #' has been implemented in the package to increase the usability for users new #' in R and who are not familiar with the underlying `RLum`-object structure of #' the package. The practical outcome is that operations and functions presented #' in standard books on R can be used without knowing the specifica of the R #' package 'Luminescence'. For examples see the example section. #' #' @param x [RLum-class] or [Risoe.BINfileData-class] (**required**): #' input opject #' #' @param object [RLum-class] (**required**): #' input opject #' #' @param y [integer] (*optional*): #' the row index of the matrix, data.frame #' #' @param z [integer] (*optional*): #' the column index of the matrix, data.frame #' #' @param i [character] (*optional*): #' name of the wanted record type or data object or row in the `RLum.Data.Curve` object #' #' @param j [integer] (*optional*): #' column of the data matrix in the `RLum.Data.Curve` object #' #' @param value [numeric] **(required)**: #' numeric value which replace the value in the `RLum.Data.Curve` object #' #' @param drop [logical] (*with default*): #' keep object structure or drop it #' #' @param subset `[subset]` [expression] (**required**): #' logical expression indicating elements or rows to keep, this function works #' in [Risoe.BINfileData-class] objects like [subset.data.frame], but takes care #' of the object structure #' #' @param row.names [logical] (*with default*): #' enables or disables row names (`as.data.frame`) #' #' @param recursive [logical] (*with default*): #' enables or disables further subsetting (`unlist`) #' #' @param optional [logical] (*with default*): #' logical. If TRUE, setting row names and converting column names #' (to syntactic names: see make.names) is optional (see [base::as.data.frame]) #' #' @param ... further arguments that can be passed to the method #' #' @note #' `methods_RLum` are not really new functions, everything given here are mostly just #' surrogates for existing functions in the package. #' #' @examples #' #' ##load example data #' data(ExampleData.RLum.Analysis, envir = environment()) #' #' @keywords internal #' @md #' @name methods_RLum NULL #################################################################################################### # methods for generic: plot() # ################################################################################################## #' @rdname methods_RLum #' @method plot list #' @export plot.list <- function(x, y, ...) { if (all(sapply(x, function(x) inherits(x, "RLum")))) { plot_RLum(object = x, ...) } else { if (missing(y)) y <- NULL plot.default(x, y, ...) } } #' @rdname methods_RLum #' @method plot RLum.Results #' @export plot.RLum.Results <- function(x, y, ...) plot_RLum(object = x, ...) #' @rdname methods_RLum #' @method plot RLum.Analysis #' @export plot.RLum.Analysis <- function(x, y, ...) plot_RLum(object = x, ...) #' @rdname methods_RLum #' @method plot RLum.Data.Curve #' @export plot.RLum.Data.Curve <- function(x, y, ...) plot_RLum(object = x, ...) #' @rdname methods_RLum #' @method plot RLum.Data.Spectrum #' @export plot.RLum.Data.Spectrum <- function(x, y, ...) plot_RLum(object = x, ...) #' @rdname methods_RLum #' @method plot RLum.Data.Image #' @export plot.RLum.Data.Image <- function(x, y, ...) plot_RLum(object = x, ...) #' @rdname methods_RLum #' @method plot Risoe.BINfileData #' @export plot.Risoe.BINfileData <- function(x, y, ...) plot_Risoe.BINfileData(BINfileData = x, ...) #################################################################################################### # methods for generic: hist() # ################################################################################################## #' @rdname methods_RLum #' @export hist.RLum.Results <- function(x, ...) plot_Histogram(data = x, ...) #' @rdname methods_RLum #' @export hist.RLum.Data.Image <- function(x, ...) hist(x =get_RLum(x)@data@values, ...) #' @rdname methods_RLum #' @export hist.RLum.Data.Curve <- function(x, ...) hist(as(get_RLum(x),"matrix")[,2]) #' @rdname methods_RLum #' @export hist.RLum.Analysis <- function(x, ...) lapply(1:length_RLum(x), function(z){ hist(as(get_RLum(x, record.id = z, ...),"matrix")[,2])}) #################################################################################################### # methods for generic: summary() # ################################################################################################## # methods for generic: summary() #' @rdname methods_RLum #' @method summary RLum.Results #' @export summary.RLum.Results <- function(object, ...) get_RLum(object = object, ...) #' @rdname methods_RLum #' @method summary RLum.Analysis #' @export summary.RLum.Analysis <- function(object, ...) lapply(object@records, function(x) summary(x@data)) #' @rdname methods_RLum #' @method summary RLum.Data.Image #' @export summary.RLum.Data.Image <- function(object, ...) summary(object@data@data@values) # summary.RLum.Data.Spectrum <- function(object, ...) #' @rdname methods_RLum #' @method summary RLum.Data.Curve #' @export summary.RLum.Data.Curve <- function(object, ...) summary(object@data, ...) #################################################################################################### # methods for generic: subset() # ################################################################################################## #' @rdname methods_RLum #' @method subset Risoe.BINfileData #' #' @param records.rm [subset] [logical] (*with default*): #' remove records from data set, can be disabled, to just set the column `SET` to `TRUE` or `FALSE` #' #' @md #' @export subset.Risoe.BINfileData <- function(x, subset, records.rm = TRUE, ...) { if(length(list(...))) warning(paste("Argument not supported and skipped:", names(list(...)))) ##select relevant rows sel <- tryCatch(eval( expr = substitute(subset), envir = x@METADATA, enclos = parent.frame() ), error = function(e) { stop("\n\nInvalid subset options. \nValid terms are: ", paste(names(x@METADATA), collapse = ", ")) }) ##probably everything is FALSE now? if (records.rm) { if (any(sel)) { x@METADATA <- x@METADATA[sel, ] x@DATA <- x@DATA[sel] x@METADATA[["ID"]] <- 1:length(x@METADATA[["ID"]]) return(x) } else{ return(NULL) } }else{ x@METADATA[["SEL"]] <- sel return(x) } } #' @rdname methods_RLum #' @method subset RLum.Analysis #' @export subset.RLum.Analysis <- function(x, subset = NULL, ...) { do.call(get_RLum, list(object = x, drop = FALSE, subset = substitute(subset), env = parent.frame())) } #################################################################################################### # methods for generic: bin() # ################################################################################################## #' @rdname methods_RLum #' @export bin.RLum.Data.Curve <- function(x, bin_size = 2, ...) bin_RLum.Data(x, bin_size = bin_size) #' @rdname methods_RLum #' @export bin.RLum.Data.Spectrum <- function(x, bin_size.row = 1, bin_size.col = 1, ...){ bin_RLum.Data(x, bin_size.row = bin_size.row, bin_size.col = bin_size.col) } #################################################################################################### # methods for generic: length() # ################################################################################################## #' @rdname methods_RLum #' @export length.RLum.Results <- function(x, ...) length_RLum(x) #' @rdname methods_RLum #' @export length.RLum.Analysis <- function(x, ...) length_RLum(x) #' @rdname methods_RLum #' @export length.RLum.Data.Curve <- function(x, ...) length_RLum(x) #' @rdname methods_RLum #' @export length.Risoe.BINfileData <- function(x, ...) length(x@METADATA$ID) #################################################################################################### # methods for generic: dim() # ################################################################################################## # methods for generic: dim() #' @rdname methods_RLum #' @export dim.RLum.Data.Curve <- function(x) dim(as(x, "matrix")) #' @rdname methods_RLum #' @export dim.RLum.Data.Spectrum <- function(x) dim(as(x, "matrix")) #################################################################################################### # methods for generic: rep() # ################################################################################################## #' @rdname methods_RLum #' @export rep.RLum <- function(x, ...) replicate_RLum(x, ...) #################################################################################################### # methods for generic: name() # ################################################################################################## #' @rdname methods_RLum #' @export names.RLum.Data.Curve <- function(x, ...) names_RLum(x) #' @rdname methods_RLum #' @export names.RLum.Data.Spectrum <- function(x, ...) names_RLum(x) #' @rdname methods_RLum #' @export names.RLum.Data.Image <- function(x, ...) names_RLum(x) #' @rdname methods_RLum #' @export names.RLum.Analysis <- function(x, ...) names_RLum(x) #' @rdname methods_RLum #' @export names.RLum.Results <- function(x, ...) names_RLum(x) #' @rdname methods_RLum #' @export names.Risoe.BINfileData <- function(x) as.character(x@METADATA$LTYPE) #################################################################################################### # methods for generic: row.name() # ################################################################################################## #' @rdname methods_RLum #' @export row.names.RLum.Data.Spectrum <- function(x, ...) rownames(as(x, "matrix")) #################################################################################################### # methods for generic: as.data.frame() # ################################################################################################## #' @rdname methods_RLum #' @export as.data.frame.RLum.Data.Curve <- function(x, row.names = NULL, optional = FALSE, ...) as(x, "data.frame") #' @rdname methods_RLum #' @export as.data.frame.RLum.Data.Spectrum <- function(x, row.names = NULL, optional = FALSE, ...) as(x, "data.frame") # for RLum.Results ... makes no sense and may yield in unpredictable behaviour #################################################################################################### # methods for generic: as.list() # ################################################################################################## #' @rdname methods_RLum #' @export as.list.RLum.Results <- function(x, ...) as(x, "list") #' @rdname methods_RLum #' @export as.list.RLum.Data.Curve <- function(x, ...) as(x, "list") #' @rdname methods_RLum #' @export as.list.RLum.Analysis <- function(x, ...) as(x, "list") #################################################################################################### # methods for generic: as.matrix() # ################################################################################################## #' @rdname methods_RLum #' @export as.matrix.RLum.Data.Curve <- function(x, ...) as(x, "matrix") #' @rdname methods_RLum #' @export as.matrix.RLum.Data.Spectrum <- function(x, ...) as(x, "matrix") # for RLum.Results ... makes no sense and may yield in unpredictable behaviour #################################################################################################### # methods for generic: is() #################################################################################################### #For this function no S4 method was written, as this would come at the cost of performance and #is totally unnecessary #' @rdname methods_RLum #' @export is.RLum <- function(x, ...) is(x, "RLum") #' @rdname methods_RLum #' @export is.RLum.Data <- function(x, ...) is(x, "RLum.Data") #' @rdname methods_RLum #' @export is.RLum.Data.Curve <- function(x, ...) is(x, "RLum.Data.Curve") #' @rdname methods_RLum #' @export is.RLum.Data.Spectrum <- function(x, ...) is(x, "RLum.Data.Spectrum") #' @rdname methods_RLum #' @export is.RLum.Data.Image <- function(x, ...) is(x, "RLum.Data.Image") #' @rdname methods_RLum #' @export is.RLum.Analysis <- function(x, ...) is(x, "RLum.Analysis") #' @rdname methods_RLum #' @export is.RLum.Results <- function(x, ...) is(x, "RLum.Results") #################################################################################################### # methods for generic: merge() #################################################################################################### #' @rdname methods_RLum #' @export merge.RLum <- function(x, y, ...) merge_RLum(append(list(...), values = c(x, y))) #################################################################################################### # methods for generic: unlist() #################################################################################################### #' @rdname methods_RLum #' @method unlist RLum.Analysis #' @export unlist.RLum.Analysis <- function(x, recursive = TRUE, ...){ temp <- get_RLum(object = x, recursive = recursive, ... ) if(recursive){ unlist(lapply(1:length(temp), function(x){ get_RLum(temp) }), recursive = FALSE) }else{ return(temp) } } #################################################################################################### # methods for generic: `+` #################################################################################################### #' @rdname methods_RLum #' #' @examples #' #' ##combine curve is various ways #' curve1 <- IRSAR.RF.Data[[1]] #' curve2 <- IRSAR.RF.Data[[1]] #' curve1 + curve2 #' curve1 - curve2 #' curve1 / curve2 #' curve1 * curve2 #' #' @export `+.RLum.Data.Curve` <- function(x, y) merge_RLum(list(x, y), merge.method = "sum") #################################################################################################### # methods for generic: `-` #################################################################################################### #' @rdname methods_RLum #' @export `-.RLum.Data.Curve` <- function(x, y) merge_RLum(list(x, y), merge.method = "-") #################################################################################################### # methods for generic: `*` #################################################################################################### #' @rdname methods_RLum #' @export `*.RLum.Data.Curve` <- function(x, y) merge_RLum(list(x, y), merge.method = "*") #################################################################################################### # methods for generic: `/` #################################################################################################### #' @rdname methods_RLum #' @export `/.RLum.Data.Curve` <- function(x, y) merge_RLum(list(x, y), merge.method = "/") #################################################################################################### # methods for generic: `[` #################################################################################################### #' @rdname methods_RLum #' @export `[.RLum.Data.Curve` <- function(x,y,z, drop = TRUE) {as(x, "matrix")[y,z, drop = drop]} #' @rdname methods_RLum #' @export `[.RLum.Data.Spectrum` <- function(x,y,z, drop = TRUE) {as(x, "matrix")[y,z, drop = drop]} #' @rdname methods_RLum #' @export `[.RLum.Data.Image` <- function(x,y,z, drop = TRUE) {as(x, "matrix")[y,z, drop = drop]} #' @rdname methods_RLum #' @export `[.RLum.Analysis` <- function(x, i, drop = FALSE) { if (is(i, "character")) { get_RLum(x, recordType = i, drop = drop) } else{ get_RLum(x, record.id = i, drop = drop) } } #' @rdname methods_RLum #' @export `[.RLum.Results` <- function(x, i, drop = TRUE) {get_RLum(x, data.object = i, drop = drop)} #################################################################################################### # methods for generic: `[<-` #################################################################################################### #' @rdname methods_RLum #' @export `[<-.RLum.Data.Curve` <- function(x, i, j, value){ x@data[i,j] <- value #this is without any S4-method, but otherwise the overhead it too high return(x) } #################################################################################################### # methods for generic: `[[` #################################################################################################### #' @rdname methods_RLum #' @export `[[.RLum.Analysis` <- function(x, i) { if (is(i, "character")) { get_RLum(x, recordType = i) } else{ get_RLum(x, record.id = i) } } #' @rdname methods_RLum #' @export `[[.RLum.Results` <- function(x, i) {get_RLum(x, data.object = i)} #################################################################################################### # methods for generic: `$` #################################################################################################### #' @rdname methods_RLum #' @export `$.RLum.Data.Curve` <- function(x, i) {get_RLum(x, info.object = i)} #' @rdname methods_RLum #' #' @examples #' #' ##`$` access curves #' IRSAR.RF.Data$RF #' #' @export `$.RLum.Analysis` <- function(x, i) {get_RLum(x, recordType = i)} #' @rdname methods_RLum #' @export `$.RLum.Results` <- function(x, i) {get_RLum(x, data.object = i)} Luminescence/R/fit_SurfaceExposure.R0000644000176200001440000004710513437252111017203 0ustar liggesusers#' Nonlinear Least Squares Fit for OSL surface exposure data #' #' This function determines the (weighted) least-squares estimates of the #' parameters of either eq. 1 in *Sohbati et al. (2012a)* or eq. 12 in #' *Sohbati et al. (2012b)* for a given OSL surface exposure data set (**BETA**). #' #' **Weighted fitting** #' #' If `weights = TRUE` the function will use the inverse square of the error (\eqn{1/\sigma^2}) #' as weights during fitting using [minpack.lm::nlsLM]. Naturally, for this to #' take effect individual errors must be provided in the third column of the #' `data.frame` for `data`. Weighted fitting is **not** supported if `data` #' is a list of multiple `data.frame`s, i.e., it is not available for global #' fitting. #' #' **Dose rate** #' #' If any of the arguments `Ddot` or `D0` is at its default value (`NULL`), #' this function will fit eq. 1 in Sohbati et al. (2012a) to the data. If #' the effect of dose rate (i.e., signal saturation) needs to be considered, #' numeric values for the dose rate (`Ddot`) (in Gy/ka) and the characteristic #' saturation dose (`D0`) (in Gy) must be provided. The function will then fit #' eq. 12 in Sohbati et al. (2012b) to the data. #' #' **NOTE**: Currently, this function does **not** consider the variability #' of the dose rate with sample depth (`x`)! In the original equation the dose #' rate `D` is an arbitrary function of `x` (term `D(x)`), but here `D` is assumed #' constant. #' #' **Global fitting** #' #' If `data` is [list] of multiple `data.frame`s, each representing a separate #' sample, the function automatically performs a global fit to the data. This #' may be useful to better constrain the parameters `sigmaphi` or `mu` and #' **requires** that known ages for each sample is provided #' (e.g., `age = c(100, 1000)` if `data` is a list with two samples). #' #' #' @param data [data.frame] or [list] (**required**): #' Measured OSL surface exposure data with the following structure: #' #' ``` #' (optional) #' | depth (a.u.)| intensity | error | #' | [ ,1] | [ ,2] | [ ,3] | #' |-------------|-----------|-------| #' [1, ]| ~~~~ | ~~~~ | ~~~~ | #' [2, ]| ~~~~ | ~~~~ | ~~~~ | #' ... | ... | ... | ... | #' [x, ]| ~~~~ | ~~~~ | ~~~~ | #' #' ``` #' #' Alternatively, a [list] of `data.frames` can be provided, where each #' `data.frame` has the same structure as shown above, with the exception that #' they must **not** include the optional error column. Providing a [list] as #' input automatically activates the global fitting procedure (see details). #' #' @param sigmaphi [numeric] (*optional*): #' A numeric value for sigmaphi, i.e. the charge detrapping rate. #' Example: `sigmaphi = 5e-10` #' #' @param mu [numeric] (*optional*): #' A numeric value for mu, i.e. the light attenuation coefficient. #' Example: `mu = 0.9` #' #' @param age [numeric] (*optional*): #' The age (a) of the sample, if known. If `data` is a [list] of *x* samples, #' then `age` must be a numeric vector of length *x*. #' Example: `age = 10000`, or `age = c(1e4, 1e5, 1e6)`. #' #' @param Ddot [numeric] (*optional*): #' A numeric value for the environmental dose rate (Gy/ka). For this argument #' to be considered a value for `D0` must also be provided; otherwise it will be #' ignored. #' #' @param D0 [numeric] (*optional*): #' A numeric value for the characteristic saturation dose (Gy). For this argument #' to be considered a value for `Ddot` must also be provided; otherwise it will be #' ignored. #' #' @param weights [logical] (*optional*): #' If `TRUE` the fit will be weighted by the inverse square of the error. #' Requires `data` to be a [data.frame] with three columns. #' #' @param plot [logical] (*optional*): #' Show or hide the plot. #' #' @param legend [logical] (*optional*): #' Show or hide the equation inside the plot. #' #' @param error_bars [logical] (*optional*): #' Show or hide error bars (only applies if errors were provided). #' #' @param coord_flip [logical] (*optional*): #' Flip the coordinate system. #' #' @param ... Further parameters passed to [plot]. #' Custom parameters include: #' - `verbose` ([logical]): show or hide console output #' - `line_col`: Color of the fitted line #' - `line_lty`: Type of the fitted line (see `lty` in `?par`) #' - `line_lwd`: Line width of the fitted line (see `lwd` in `?par`) #' #' @return #' #' Function returns results numerically and graphically: #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `$summary` \tab `data.frame` \tab summary of the fitting results \cr #' `$data` \tab `data.frame` \tab the original input data \cr #' `$fit` \tab `nls` \tab the fitting object produced by [minpack.lm::nlsLM] \cr #' `$args` \tab `character` \tab arguments of the call \cr #' `$call` \tab `call` \tab the original function call \cr #' } #' #'**slot:** **`@info`** #' #' Currently unused. #' #' ------------------------\cr #' `[ PLOT OUTPUT ]`\cr #' ------------------------\cr #' #' A scatter plot of the provided depth-intensity OSL surface exposure data #' with the fitted model. #' #' @section Function version: 0.1.0 #' #' @note #' **This function has BETA status. If possible, results should be** #' **cross-checked.** #' #' @author Christoph Burow, University of Cologne (Germany) #' #' @seealso [ExampleData.SurfaceExposure], [minpack.lm::nlsLM] #' #' @references #' #' Sohbati, R., Murray, A.S., Chapot, M.S., Jain, M., Pederson, J., 2012a. #' Optically stimulated luminescence (OSL) as a chronometer for surface exposure #' dating. Journal of Geophysical Research 117, B09202. doi:10.1029/2012JB009383 #' #' Sohbati, R., Jain, M., Murray, A.S., 2012b. Surface exposure dating of #' non-terrestial bodies using optically stimulated luminescence: A new method. #' Icarus 221, 160-166. #' #' @keywords datagen #' #' @examples #' #' ## Load example data #' data("ExampleData.SurfaceExposure") #' #' ## Example 1 - Single sample #' # Known parameters: 10000 a, mu = 0.9, sigmaphi = 5e-10 #' sample_1 <- ExampleData.SurfaceExposure$sample_1 #' head(sample_1) #' results <- fit_SurfaceExposure(sample_1, mu = 0.9, sigmaphi = 5e-10) #' get_RLum(results) #' #' #' ## Example 2 - Single sample and considering dose rate #' # Known parameters: 10000 a, mu = 0.9, sigmaphi = 5e-10, #' # dose rate = 2.5 Gy/ka, D0 = 40 Gy #' sample_2 <- ExampleData.SurfaceExposure$sample_2 #' head(sample_2) #' results <- fit_SurfaceExposure(sample_2, mu = 0.9, sigmaphi = 5e-10, #' Ddot = 2.5, D0 = 40) #' get_RLum(results) #' #' #' ## Example 3 - Multiple samples (global fit) to better constrain 'mu' #' # Known parameters: ages = 1e3, 1e4, 1e5, 1e6 a, mu = 0.9, sigmaphi = 5e-10 #' set_1 <- ExampleData.SurfaceExposure$set_1 #' str(set_1, max.level = 2) #' results <- fit_SurfaceExposure(set_1, age = c(1e3, 1e4, 1e5, 1e6), #' sigmaphi = 5e-10) #' get_RLum(results) #' #' #' ## Example 4 - Multiple samples (global fit) and considering dose rate #' # Known parameters: ages = 1e2, 1e3, 1e4, 1e5, 1e6 a, mu = 0.9, sigmaphi = 5e-10, #' # dose rate = 1.0 Ga/ka, D0 = 40 Gy #' set_2 <- ExampleData.SurfaceExposure$set_2 #' str(set_2, max.level = 2) #' results <- fit_SurfaceExposure(set_2, age = c(1e2, 1e3, 1e4, 1e5, 1e6), #' sigmaphi = 5e-10, Ddot = 1, D0 = 40) #' get_RLum(results) #' #' @md #' @export fit_SurfaceExposure <- function(data, sigmaphi = NULL, mu = NULL, age = NULL, Ddot = NULL, D0 = NULL, weights = FALSE, plot = TRUE, legend = TRUE, error_bars = TRUE, coord_flip = FALSE, ...) { ## SETTINGS ---- settings <- list( verbose = TRUE, info = list() ) settings <- modifyList(settings, list(...)) ## Input object handling ----------------------------------------------------- ## Data type validation if (inherits(data, "RLum.Results")) object <- get_RLum(data, "data") if (inherits(data, "matrix")) data <- as.data.frame(data) if (inherits(data, "data.table")) data <- as.data.frame(data) ## For global fitting of multiple data sets 'data' must be a list if (inherits(data, "list")) { # Global fitting requires and equal amount of ages to be provided if (length(data) != length(age)) stop("If 'data' is a list of data sets for global fitting, 'age' must be of the same length.", call. = FALSE) # TODO: Support weighted fitting for global fit if (weights) { if (settings$verbose) warning("Argument 'weights' is not supported when multiple data sets are provided for global fitting.", call. = FALSE) weights <- FALSE } # collapse list into a data.frame with a $group column to distinguish # between individual samples data_list <- data for (i in 1:length(data)) data[[i]]$group <- LETTERS[[i]] data <- do.call(rbind, data) data$group <- as.factor(data$group) global_fit <- TRUE } else { # ignore 'global_fit' if 'data' is a data.frame global_fit <- FALSE } # Exit if data type is invalid if (!inherits(data, "data.frame")) stop("'data' must be of class data.frame.", call. = FALSE) # Check which parameters have been provided if (!is.null(age) && any(is.na(age))) age <- NULL if (!is.null(sigmaphi) && any(is.na(sigmaphi))) sigmaphi <- NULL if (!is.null(mu) && any(is.na(mu))) mu <- NULL ## Weighting options (only available for global fitting) if (ncol(data) >= 3 && weights && !global_fit) wi <- (1 / data[ ,3]^2) / sum(1 / data[ ,3]^2) else wi <- rep(1, times = nrow(data)) # extract errors into seperate variable if (ncol(data) >= 3 && !global_fit) error <- data[ ,3] else error <- NULL ## Take only the first to columns (depth, signal) if (ncol(data) > 2 && !global_fit) data <- data[ ,1:2] ## remove rows with NA if (any(is.na(data))) { data <- data[complete.cases(data), ] if (settings$verbose) warning("NA values in 'data' were removed.", call. = FALSE) } ## Data preprocessing ---- # set column names if (!global_fit) colnames(data) <- c("x", "y") else colnames(data) <- c("x", "y", "group") ## FITTING ---- ## Functions # w/o dose rate fun <- formula(y ~ exp(-sigmaphi * age * 365.25*24*3600 * exp(-mu * x))) fun_global <- formula(y ~ exp(-sigmaphi * age[group] * 365.25*24*3600 * exp(-mu * x))) # w/ dose rate (Sohbati et al. 2012, eq 12) if (!is.null(Ddot)) Ddot <- Ddot / 1000 / 365.25 / 24 / 60 / 60 fun_w_dr <- formula( y ~ (sigmaphi * exp(-mu * x) * exp(-(age * 365.25*24*3600) * (sigmaphi * exp(-mu * x) + Ddot/D0)) + Ddot/D0) / (sigmaphi * exp(-mu * x) + Ddot/D0) ) fun_global_w_dr <- formula( y ~ (sigmaphi * exp(-mu * x) * exp(-(age[group] * 365.25*24*3600) * (sigmaphi * exp(-mu * x) + Ddot/D0)) + Ddot/D0) / (sigmaphi * exp(-mu * x) + Ddot/D0) ) ## start parameter start <- list(sigmaphi = if (is.null(sigmaphi)) 5.890e-09 else NULL, mu = if (is.null(mu)) 1 else NULL, age = if (is.null(age)) 2 else NULL) start <- start[!sapply(start, is.null)] ## fitting boundaries lower <- list(sigmaphi = if (is.null(sigmaphi)) -Inf else NULL, mu = if (is.null(mu)) 0 else NULL, age = if (is.null(age)) 0 else NULL) upper <- list(sigmaphi = if (is.null(sigmaphi)) Inf else NULL, mu = if (is.null(mu)) Inf else NULL, age = if (is.null(age)) Inf else NULL) ## Decision tree which of the functions to use if (!is.null(Ddot) && !is.null(D0)) { if (global_fit) use_fun <- fun_global_w_dr else use_fun <- fun_w_dr } else { if (global_fit) use_fun <- fun_global else use_fun <- fun } # (un)constrained fitting fit <- tryCatch({ minpack.lm::nlsLM(formula = use_fun, data = data, start = start, lower = unlist(lower), upper = unlist(upper), weights = wi) }, error = function(e) { e } ) # return NULL if fitting failed if (!inherits(fit, "simpleError") && !inherits(try(summary(fit), silent = TRUE), "try-error")) { # Extract coefficients coef <- as.data.frame(coef(summary(fit))) } else { if (settings$verbose) message("[fit_SurfaceExposure()] \n- Unable to fit the data. Original error from minpack::nlsLM():\n\n", fit$message) # Fill with NA values coef <- data.frame( "Estimate" = rep(NA, 3), "Std. Error" = rep(NA, 3), row.names = c("age", "sigmaphi", "mu"), check.names = FALSE ) } ## RESULTS ---- summary <- data.frame( age = if (is.null(age)) coef["age", "Estimate"] else age, age_error = coef["age", "Std. Error"], sigmaphi = if (is.null(sigmaphi)) coef["sigmaphi", "Estimate"] else sigmaphi, sigmaphi_error = coef["sigmaphi", "Std. Error"], mu = if (is.null(mu)) coef["mu", "Estimate"] else mu, mu_error = coef["mu", "Std. Error"] ) ## Create RLum.Results object results <- set_RLum(class = "RLum.Results", originator = "fit_SurfaceExposure", data = list(summary = summary, data = data, fit = fit, args = as.list(sys.call()[-1]), call = sys.call()), info = settings$info ) ## PLOT ---- if (plot) { # remove $group column for easier data handling if (global_fit) data$group <- NULL # re-order x,y columns if (coord_flip) data <- data.frame(data$y, data$x) # set default plot settings plot_settings <- list( x = data, main = "", pch = 21, col = "black", bg = "red", xlab = if (!coord_flip) "Depth (mm)" else "OSL intensity (Ln/Tn)", ylab = if (!coord_flip) "OSL intensity (Ln/Tn)" else "Depth (mm)", cex = 1.0, lty = 1, lwd = 1, log = "", ylim = if (!coord_flip) range(pretty(data[ ,2])) else rev(range(pretty(data[ ,2]))), xlim = range(pretty(data[ ,1])) ) # override default settings with valid arguments in ... plot_settings <- modifyList(plot_settings, list(...)) valid_settings <- c(names(par()), formalArgs("title"), formalArgs("plot.default"), "cex") plot_settings <- plot_settings[names(plot_settings) %in% valid_settings] # set global plot settings par(cex = plot_settings$cex) if (grepl("y", plot_settings$log)) { plot_settings$ylim[1] <- 0.01 plot_settings$x <- data[which(data[ ,2] > 0),] } ## create main plot do.call("plot", modifyList(plot_settings, list(x = NA))) ## add data points if (!global_fit) { points(data, type = "p", pch = plot_settings$pch, bg = plot_settings$bg, col = plot_settings$col) } else { Map(function(d, i) { points(d, type = "p", pch = plot_settings$pch, bg = i, col = plot_settings$col) }, split(results$data, results$data$group), 1:length(unique(results$data$group))) } ## add fitted curve if (!inherits(fit, "error") && !inherits(fit, "simpleError")) { if (coord_flip) { oldx <- data[ ,2] } else { oldx <- data[ ,1] } newx <- seq(range(oldx)[1], range(oldx)[2], length.out = 10000) newy <- suppressWarnings(predict(fit, newdata = list(x = newx))) if (coord_flip) { tmp <- newx newx <- newy newy <- tmp } if (!global_fit) { points(newx, newy, type = "l", col = ifelse("line_col" %in% names(list(...)), list(...)$line_col, "blue"), lty = ifelse("line_lty" %in% names(list(...)), list(...)$line_lty, 1), lwd = ifelse("line_lwd" %in% names(list(...)), list(...)$line_lwd, 1)) } else { for (i in 1:length(data_list)) { seg <- seq(i * 101 - 100, 10000, nrow(data)) points(newx[seg], newy[seg], type = "l", col = ifelse("line_col" %in% names(list(...)), list(...)$line_col, i), lty = ifelse("line_lty" %in% names(list(...)), list(...)$line_lty, 1), lwd = ifelse("line_lwd" %in% names(list(...)), list(...)$line_lwd, 1)) } } } else { legend("center", legend = "Unable to fit the data!\t\t") } # add error bars (if weighted fit) if (!is.null(error) && error_bars) { segments(plot_settings$x[ ,1], plot_settings$x[ ,2] - error, plot_settings$x[ ,1], plot_settings$x[ ,2] + error) } # add formula if (legend && !inherits(fit, "simpleError")) { formula_text <- paste0("y = ", as.character(fit$m$formula())[3], "\t\t") if (!is.null(age)) { if (!global_fit) { formula_text <- gsub("age", age, formula_text) } else { formula_text <- gsub("age", paste0("[", paste(age, collapse = "|"), "]"), formula_text) formula_text <- gsub("\\[group\\]", "", formula_text) } } if (!is.null(sigmaphi)) formula_text <- gsub("sigmaphi", sigmaphi, formula_text) if (!is.null(mu)) formula_text <- gsub("mu", mu, formula_text) legend(ifelse(coord_flip, "bottomleft", "bottomright"), legend = formula_text, cex = 0.8, bty = "n") } } ## CONSOLE ---- if (settings$verbose) { cat("\n [fit_SurfaceExposure()] \n\n") if (!global_fit) { ## STANDARD OUTPUT cat(" Estimated paramater(s):\n", "-----------------------\n") if (is.null(age)) cat(paste0(" age (a):\t", signif(results$summary$age, 3), " \u00B1 ", signif(results$summary$age_error, 3), "\n")) if (is.null(sigmaphi)) cat(paste0(" sigmaphi:\t", signif(results$summary$sigmaphi, 3), " \u00B1 ", signif(results$summary$sigmaphi_error, 3), "\n")) if (is.null(mu)) cat(paste0(" mu:\t\t", signif(results$summary$mu, 3), " \u00B1 ", signif(results$summary$mu_error, 3), "\n")) cat("\n") } else { ## GLOBAL FIT OUTPUT cat(" Shared estimated paramater(s):\n", "-----------------------\n") if (is.null(sigmaphi)) cat(paste0(" sigmaphi:\t", signif(unique(results$summary$sigmaphi), 3), " \u00B1 ", signif(unique(results$summary$sigmaphi_error), 3), "\n")) if (is.null(mu)) cat(paste0(" mu:\t\t", signif(unique(results$summary$mu), 3), " \u00B1 ", signif(unique(results$summary$mu_error), 3), "\n")) cat("\n") } ## STANDARD OUTPUT cat(" Fixed parameters(s):\n", "--------------------\n") if (!is.null(age)) cat(paste0(" age (a):\t", paste(age, collapse = ", "), "\n")) if (!is.null(sigmaphi)) cat(paste0(" sigmaphi:\t", sigmaphi, "\n")) if (!is.null(mu)) cat(paste0(" mu:\t\t", mu, "\n")) cat("\n") if (!is.null(age)) { message(paste0("To apply the estimated parameters to a sample of unknown age run:\n\n", "fit_SurfaceExposure(data = ", capture.output(results$args[[1]]), ", sigmaphi = ", signif(unique(results$summary$sigmaphi), 3), ", mu = ", signif(unique(results$summary$mu), 3), ")\n\n")) } } ## EXIT ---- return(results) } Luminescence/R/methods_DRAC.R0000644000176200001440000002145113242571355015456 0ustar liggesusers################################################################################## ## METHODS FOR S3 GENERICS ## ################################################################################## ## ---------------------------------------------------------------------------## ## DATA FRAME COERCION METHOD ## This is a method for the as.data.frame S3 generic. We need this to intercept the ## DRAC list object after it hast passed the actual list-method. After it was ## coerced to a data.frame we assign new column names (DRAC ID keys) and ## make sure that all columns are either of class 'character' or 'numeric'. ## Finally, we attach a further class name to identify it as a valid DRAC object ## when passed to use_DRAC #' @export as.data.frame.DRAC.list <- function(x, row.names = NULL, optional = FALSE, ...) { DF <- as.data.frame.list(x) colnames(DF) <- paste0("TI:", 1:ncol(DF)) for (i in 1:ncol(DF)) { if (is.factor(DF[ ,i])) DF[ ,i] <- as.character(DF[, i]) } class(DF) <- c("data.frame", "DRAC.data.frame") return(DF) } ## ---------------------------------------------------------------------------## ## PRINT METHOD #' @export print.DRAC.highlights <- function(x, ...) { x <- as.list(x) names <- names(x) mapply(function(el, name) { cat(paste0(attributes(el)$key, " = ", name,":\n ", paste(el, collapse = ",\n "), "\n")) }, x, names) } #' @export print.DRAC.list <- function(x, blueprint = FALSE, ...) { ## CASE 1: Pretty print the structure of the DRAC list if (!blueprint) { limit <- 80 for (i in 1:length(x)) { # for pretty printing we insert newlines and tabs at specified lengths ls <- attributes(x[[i]])$description ls.n <- nchar(ls) ls.block <- floor(ls.n / limit) strStarts <- seq(0, ls.n, limit) strEnds <- seq(limit-1, ls.n + limit, limit) blockString <- paste(mapply(function(start, end) { trimmedString <- paste(substr(ls, start, end), "\n\t\t\t") if (substr(trimmedString, 1, 1) == " ") trimmedString <- gsub("^[ ]*", "", trimmedString) return(trimmedString) }, strStarts, strEnds), collapse="") msg <- paste(attributes(x[[i]])$key, "=>",names(x)[i], "\n", "\t VALUES =", paste(x[[i]], collapse = ", "), "\n", "\t ALLOWS 'X' = ", attributes(x[[i]])$allowsX, "\n", "\t REQUIRED =", attributes(x[[i]])$required, "\n", "\t DESCRIPTION = ", blockString, "\n" ) if (!is.null(levels(x[[i]]))) { msg <- paste(msg, "\t OPTIONS = ", paste(levels(x[[i]]), collapse = ", "), "\n\n") } else { msg <- paste(msg, "\n") } cat(msg) } } ## CASE 2: Return a 'blueprint' that can be copied from the console to a ## script so the user does not need to write down all >50 fields by hand if (blueprint) { var <- as.list(sys.call())[[2]] names <- names(x) for (i in 1:length(x)) { # in case of factors also show available levels as comments so you don't # have to look it up if (is.factor(x[[i]])) options <- paste("# OPTIONS:", paste(levels(x[[i]]), collapse = ", ")) else options <- "" # determine if values need brackets (strings) if (is.numeric(x[[i]]) | is.integer(x[[i]])) values <- paste(x[[i]], collapse = ", ") if (is.character(x[[i]]) | is.factor(x[[i]])) values <- paste0("'", paste0(x[[i]], collapse = "', '"), "'") cat(paste0(var, "$`", names[i], "` <- c(", values,") ", options ,"\n")) } message("\n\t You can copy all lines above to your script and fill in the data.") } } ## ---------------------------------------------------------------------------## ## DOUBLE SQUARE BRACKETS METHOD #' @export `[[<-.DRAC.list` <- function(x, i, value) { ## REJECT ALL INADEQUATE CLASSES ---- acceptedClasses <- c("integer", "character", "numeric", "factor") if (is.na(match(class(value), acceptedClasses))) { warning(paste("I cannot use objects of class", class(value)), call. = FALSE) return(x) } ## CHECK INPUT LENGTH ---- length.old <- length(x[[i]]) length.new <- length(value) if (length.old != length.new) { warning(paste(names(x)[i], ": Input must be of length", length.old), call. = FALSE) return(x) } ## CHECK INPUT CLASS ---- class.old <- class(x[[i]]) class.new <- class(value) ## CHECK INPUT FIELDS THAT ALLOW 'X' ----- # the following checks apply to fields that are normally numeric, but also # accept 'X' as input. this EXCLUDES factors! if (class.old != "factor") { # some input fields allow 'X' as input, so in terms of R can be of class # "character" or "numeric/integer". hence, we check if input is "X" and # if the filed allows it. If so, we change the old class to "character". if (any(value == "X") && attributes(x[[i]])$allowsX) { if (any(is.na(as.numeric(value[which(value != "X")])))) { warning(paste("Cannot coerce <", value[which(value != "X")], "> to a numeric value.", "Input must be numeric or 'X'."), call. = FALSE) return(x) } class.old <- "character" } # where the input field is alreay "X" we have to check whether the new # non-character input is allowed if (!all(is.na(x[[i]]))) { if (any(x[[i]] == "X") && attributes(x[[i]])$allowsX) { if (any(is.na(as.numeric(value[which(value != "X")])))) { warning(paste("Cannot coerce <", value[which(value != "X")], "> to a numeric value.", "Input must be numeric or 'X'. \n"), call. = FALSE) return(x) } class.new <- "character" value <- as.character(value) } } # when a numeric input field was inserted an "X" it was coerced to class # character. since we are now allowed to insert any character (later tests) # we need to make sure that the new input can be coerced to class numeric. # and if the new values are numeric, we coerce them to character if (attributes(x[[i]])$allowsX && class.old == "character") { if (any(is.na(as.numeric(value[which(value != "X")])))) { warning(paste("Cannot coerce <", value[which(value != "X")], "> to a numeric value.", "Input must be numeric or 'X'. \n"), call. = FALSE) return(x) } class.new <- "character" value <- as.character(value) } } # numeric input can be both of class 'integer' or 'numeric'. We will # allow any combination and reject only non-numeric/integer input if (class.old == "numeric" || class.old == "integer") { if (class.new != "numeric" && class.new != "integer") { warning(paste(names(x)[i], ": Input must be of class", class.old), call. = FALSE) return(x) } } # for 'factor' and 'character' elements only 'character' input is allowed if (class.old == "factor" || class.old == "character") { if (class.new != "character") { warning(paste(names(x)[i], ": Input must be of class", "character"), call. = FALSE) return(x) } } ## CHECK IF VALID OPTION ---- # in case of 'factor's the user is only allowed input that matches one of # the options specified by the factor levels. if it is a valid option, # the input is converted to a factor to keep the information. if (class.old == "factor") { levels <- levels(x[[i]]) if (any(`%in%`(value, levels) == FALSE)) { warning(paste(names(x)[i], ": Invalid option. Valid options are:", paste(levels, collapse = ", ")), call. = FALSE) return(x) } else { value <- factor(value, levels) } } ## WRITE NEW VALUES ---- # we strip our custom class and the attributes, pass the object to the default generic and # finally re-attach our class and attributes tmp.attributes <- attributes(x[[i]])[names(attributes(x[[i]])) != "class"] class(x) <- "list" x <- `[[<-`(x, i, value) attributes(x[[i]]) <- tmp.attributes if (class.old == "factor") class(x[[i]]) <- "factor" class(x) <- c("DRAC.list", "list") return(x) } ## ---------------------------------------------------------------------------## ## SINGLE SQUARE BRACKET METHOD #' @export `[<-.DRAC.list` <- function(x, i, value) { return(`[[<-`(x, i, value)) } ## ---------------------------------------------------------------------------## ## DOLLAR SIGN METHOD #' @export `$<-.DRAC.list`<- function(x, name, value) { # this is straightforward; retrieve the index and pass the object # to the custom [[<- function, which does the data verification index <- which(names(x) == name) x[[index]] <- value return(x) }Luminescence/R/plot_RLum.R0000644000176200001440000001250513423370354015134 0ustar liggesusers#' General plot function for RLum S4 class objects #' #' Function calls object specific plot functions for RLum S4 class objects. #' #' The function provides a generalised access point for plotting specific #' [RLum-class] objects.\cr #' Depending on the input object, the #' corresponding plot function will be selected. Allowed arguments can be #' found in the documentations of each plot function. #' #' \tabular{lll}{ #' **object** \tab \tab **corresponding plot function** \cr #' [RLum.Data.Curve-class] \tab : \tab [plot_RLum.Data.Curve] \cr #' [RLum.Data.Spectrum-class] \tab : \tab [plot_RLum.Data.Spectrum]\cr #' [RLum.Data.Image-class] \tab : \tab [plot_RLum.Data.Image]\cr #' [RLum.Analysis-class] \tab : \tab [plot_RLum.Analysis]\cr #' [RLum.Results-class] \tab : \tab [plot_RLum.Results] #' } #' #' @param object [RLum-class] (**required**): #' S4 object of class `RLum`. Optional a [list] containing objects of #' class [RLum-class] can be provided. In this case the function tries to plot #' every object in this list according to its `RLum` class. #' #' @param ... further arguments and graphical parameters that will be passed #' to the specific plot functions. The only argument that is supported directly is `main` #' (setting the plot title). In contrast to the normal behaviour `main` can be here provided as #' [list] and the arguments in the list will dispatched to the plots if the `object` #' is of type `list` as well. #' #' @return Returns a plot. #' #' @note The provided plot output depends on the input object. #' #' @section Function version: 0.4.3 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [plot_RLum.Data.Curve], [RLum.Data.Curve-class], [plot_RLum.Data.Spectrum], #' [RLum.Data.Spectrum-class], [plot_RLum.Data.Image], [RLum.Data.Image-class], #' [plot_RLum.Analysis], [RLum.Analysis-class], [plot_RLum.Results], #' [RLum.Results-class] #' #' #' @keywords dplot #' #' @examples #' #' #load Example data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' #' #transform data.frame to RLum.Data.Curve object #' temp <- as(ExampleData.CW_OSL_Curve, "RLum.Data.Curve") #' #' #plot RLum object #' plot_RLum(temp) #' #' #' @md #' @export plot_RLum<- function( object, ... ){ # Define dispatcher function ---------------------------------------------------------- ##check if object is of class RLum RLum.dispatcher <- function(object, ...) { if (inherits(object, "RLum")) { ##grep object class object.class <- is(object)[1] ##select which plot function should be used and call it switch ( object.class, RLum.Data.Curve = plot_RLum.Data.Curve(object = object, ...), RLum.Data.Spectrum = plot_RLum.Data.Spectrum(object = object, ...), RLum.Data.Image = plot_RLum.Data.Image(object = object, ...), ##here we have to do prevent the partial matching with 'sub' by 'subset' RLum.Analysis = if(!grepl(pattern = "subset", x = paste(deparse(match.call()), collapse = " "), fixed = TRUE)){ plot_RLum.Analysis(object = object, subset = NULL, ...) }else{ plot_RLum.Analysis(object = object, ...) }, RLum.Results = plot_RLum.Results(object = object, ...) ) }else{ stop(paste0( "[plot_RLum()] Sorry, I don't know what to do for object of type '", is(object)[1], "'." ), call. = FALSE) } } # Run dispatcher ------------------------------------------------------------------------------ ##call for the list, if not just proceed as normal if(class(object) == "list") { ##(0) we might have plenty of sublists before we have the list containing only ##RLum-objects object <- .unlist_RLum(object) ##(1) get rid of objects which are not RLum objects to avoid errors object.cleaned <- object[sapply(object, inherits, what = "RLum")] ##(1.1) place warning message if (length(object) > length(object.cleaned)) { warning(paste0( length(object) - length(object.cleaned)," non 'RLum' object(s) removed from list." )) } ##(2) check if empty, if empty do nothing ... if (length(object.cleaned) != 0) { ## If we iterate over a list, this might be extremly useful to have different plot titles if("main" %in% names(list(...))){ if(is(list(...)$main,"list")){ main.list <- rep(list(...)$main, length = length(object.cleaned)) } } ##set also mtext, but in a different way if(!"mtext" %in% names(list(...))){ if(is(object[[1]], "RLum.Analysis")){ mtext <- paste("Record:", 1:length(object.cleaned)) }else{ mtext <- NULL } }else{ mtext <- rep(list(...)$mtext, length.out = length(object.cleaned)) } if(exists("main.list")){ ##dispatch objects for (i in 1:length(object.cleaned)) { RLum.dispatcher(object = object[[i]], main = main.list[[i]], mtext = mtext[[i]], ...) } }else{ for (i in 1:length(object.cleaned)) { RLum.dispatcher(object = object[[i]], mtext = mtext[[i]], ...) } } } }else{ ##dispatch object RLum.dispatcher(object = object, ...) } } Luminescence/R/analyse_SAR.TL.R0000644000176200001440000005211313540652272015677 0ustar liggesusers#' Analyse SAR TL measurements #' #' The function performs a SAR TL analysis on a #' [RLum.Analysis-class] object including growth curve fitting. #' #' This function performs a SAR TL analysis on a set of curves. The SAR #' procedure in general is given by Murray and Wintle (2000). For the #' calculation of the Lx/Tx value the function [calc_TLLxTxRatio] is #' used. #' #' **Provided rejection criteria** #' #' `[recyling.ratio]`: calculated for every repeated regeneration dose point. #' #' `[recuperation.rate]`: recuperation rate calculated by #' comparing the Lx/Tx values of the zero regeneration point with the Ln/Tn #' value (the Lx/Tx ratio of the natural signal). For methodological #' background see Aitken and Smith (1988) #' #' @param object [RLum.Analysis-class] or a [list] of such objects (**required**) : #' input object containing data for analysis #' #' @param object.background currently not used #' #' @param signal.integral.min [integer] (**required**): #' requires the channel number for the lower signal integral bound #' (e.g. `signal.integral.min = 100`) #' #' @param signal.integral.max [integer] (**required**): #' requires the channel number for the upper signal integral bound #' (e.g. `signal.integral.max = 200`) #' #' @param integral_input [character] (*with default*): #' defines the input for the the arguments `signal.integral.min` and #' `signal.integral.max`. These limits can be either provided `'channel'` #' number (the default) or `'temperature'`. If `'temperature'` is chosen the #' best matching channel is selected. #' #' @param sequence.structure [vector] [character] (*with default*): #' specifies the general sequence structure. Three steps are allowed #' (`"PREHEAT"`, `"SIGNAL"`, `"BACKGROUND"`), in addition a #' parameter `"EXCLUDE"`. This allows excluding TL curves which are not #' relevant for the protocol analysis. (**Note:** None TL are removed by default) #' #' @param rejection.criteria [list] (*with default*): #' list containing rejection criteria in percentage for the calculation. #' #' @param dose.points [numeric] (*optional*): #' option set dose points manually #' #' @param log [character] (*with default*): #' a character string which contains "x" if the x axis is to be logarithmic, #' "y" if the y axis is to be logarithmic and "xy" or "yx" if both axes are to be logarithmic. See #' [plot.default]). #' #' @param ... further arguments that will be passed to the function [plot_GrowthCurve] #' #' @return #' A plot (*optional*) and an [RLum.Results-class] object is #' returned containing the following elements: #' #' \item{De.values}{[data.frame] containing De-values and further parameters} #' \item{LnLxTnTx.values}{[data.frame] of all calculated Lx/Tx values including signal, background counts and the dose points.} #' \item{rejection.criteria}{[data.frame] with values that might by used as rejection criteria. NA is produced if no R0 dose point exists.} #' #' **note:** the output should be accessed using the function [get_RLum] #' #' @note #' **THIS IS A BETA VERSION** #' #' None TL curves will be removed #' from the input object without further warning. #' #' @section Function version: 0.3.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CRNS-Universite Bordeaux Montaigne (France) #' #' @seealso [calc_TLLxTxRatio], [plot_GrowthCurve], [RLum.Analysis-class], #' [RLum.Results-class], [get_RLum] #' #' @references #' Aitken, M.J. and Smith, B.W., 1988. Optical dating: recuperation #' after bleaching. Quaternary Science Reviews 7, 387-393. #' #' Murray, A.S. and Wintle, A.G., 2000. Luminescence dating of quartz using an #' improved single-aliquot regenerative-dose protocol. Radiation Measurements #' 32, 57-73. #' #' @keywords datagen plot #' #' @examples #' #' ##load data #' data(ExampleData.BINfileData, envir = environment()) #' #' ##transform the values from the first position in a RLum.Analysis object #' object <- Risoe.BINfileData2RLum.Analysis(TL.SAR.Data, pos=3) #' #' ##perform analysis #' analyse_SAR.TL( #' object = object, #' signal.integral.min = 210, #' signal.integral.max = 220, #' fit.method = "EXP OR LIN", #' sequence.structure = c("SIGNAL", "BACKGROUND")) #' #' @md #' @export analyse_SAR.TL <- function( object, object.background, signal.integral.min, signal.integral.max, integral_input = "channel", sequence.structure = c("PREHEAT", "SIGNAL", "BACKGROUND"), rejection.criteria = list(recycling.ratio = 10, recuperation.rate = 10), dose.points, log = "", ... ){ # Self-call ----------------------------------------------------------------------------------- if(class(object) == "list"){ if(!all(sapply(object, class) == "RLum.Analysis")) stop("[analyse_SAR.TL()] All elements in the input list need to be of class 'RLum.Analysis'!", call. = FALSE) ##run sequence results <- lapply(object, function(o){ analyse_SAR.TL( object = o, object.background = object.background, signal.integral.min = signal.integral.min, signal.integral.max = signal.integral.max, integral_input = integral_input, sequence.structure = sequence.structure, rejection.criteria = rejection.criteria, dose.points = dose.points, log = log, ... ) }) ##combine results results <- merge_RLum(results) ##return return(results) } # CONFIG ----------------------------------------------------------------- ##set allowed curve types type.curves <- c("TL") ##=============================================================================# # General Integrity Checks --------------------------------------------------- ##GENERAL ##MISSING INPUT if(missing("object")==TRUE){ stop("[analyse_SAR.TL()] No value set for 'object'!", call. = FALSE) } if(missing("signal.integral.min") == TRUE){ stop("[analyse_SAR.TL()] No value set for 'signal.integral.min'!", call. = FALSE) } if(missing("signal.integral.max") == TRUE){ stop("[analyse_SAR.TL()] No value set for 'signal.integral.max'!", call. = FALSE) } ##INPUT OBJECTS if(is(object, "RLum.Analysis") == FALSE){ stop("[analyse_SAR.TL()] Input object is not of type 'RLum.Analyis'!", call. = FALSE) } # Protocol Integrity Checks -------------------------------------------------- ##Remove non TL-curves from object by selecting TL curves object@records <- get_RLum(object, recordType = type.curves) ##ANALYSE SEQUENCE OBJECT STRUCTURE ##set vector for sequence structure temp.protocol.step <- rep(sequence.structure,length(object@records))[1:length(object@records)] ##grep object strucute temp.sequence.structure <- structure_RLum(object) ##set values for step temp.sequence.structure[,"protocol.step"] <- temp.protocol.step ##remove TL curves which are excluded temp.sequence.structure <- temp.sequence.structure[which( temp.sequence.structure[,"protocol.step"]!="EXCLUDE"),] ##check integrity; signal and bg range should be equal if(length( unique( temp.sequence.structure[temp.sequence.structure[,"protocol.step"]=="SIGNAL","n.channels"]))>1){ stop(paste( "[analyse_SAR.TL()] Signal range differs. Check sequence structure.\n", temp.sequence.structure )) } ##check if the wanted curves are a multiple of the structure if(length(temp.sequence.structure[,"id"])%%length(sequence.structure)!=0){ stop("[analyse_SAR.TL()] Input TL curves are not a multiple of the sequence structure.") } # # Calculate LnLxTnTx values -------------------------------------------------- ##grep IDs for signal and background curves TL.preheat.ID <- temp.sequence.structure[ temp.sequence.structure[,"protocol.step"] == "PREHEAT","id"] TL.signal.ID <- temp.sequence.structure[ temp.sequence.structure[,"protocol.step"] == "SIGNAL","id"] TL.background.ID <- temp.sequence.structure[ temp.sequence.structure[,"protocol.step"] == "BACKGROUND","id"] ##comfort ... translate integral limits from temperature to channel if(integral_input == "temperature"){ signal.integral.min <- which.min(abs( signal.integral.min - get_RLum(object, record.id = TL.signal.ID[1])[, 1] )) signal.integral.max <- which.min(abs( signal.integral.max - get_RLum(object, record.id = TL.signal.ID[1])[, 1] )) } ##calculate LxTx values using external function for(i in seq(1,length(TL.signal.ID),by=2)){ temp.LnLxTnTx <- get_RLum( calc_TLLxTxRatio( Lx.data.signal = get_RLum(object, record.id = TL.signal.ID[i]), Lx.data.background = if (length(TL.background.ID) == 0) { NULL } else{ get_RLum(object, record.id = TL.background.ID[i]) }, Tx.data.signal = get_RLum(object, record.id = TL.signal.ID[i + 1]), Tx.data.background = if (length(TL.background.ID) == 0){ NULL }else{ get_RLum(object, record.id = TL.background.ID[i + 1]) }, signal.integral.min, signal.integral.max ) ) ##grep dose temp.Dose <- object@records[[TL.signal.ID[i]]]@info$IRR_TIME ##take about NULL values if(is.null(temp.Dose)){ temp.Dose <- NA } ##bind data.frame temp.LnLxTnTx <- cbind(Dose=temp.Dose, temp.LnLxTnTx) if(exists("LnLxTnTx")==FALSE){ LnLxTnTx <- data.frame(temp.LnLxTnTx) }else{ LnLxTnTx <- rbind(LnLxTnTx,temp.LnLxTnTx) } } ##set dose.points manually if argument was set if(!missing(dose.points)){ temp.Dose <- dose.points LnLxTnTx$Dose <- dose.points } # Set regeneration points ------------------------------------------------- #generate unique dose id - this are also the # for the generated points temp.DoseID <- c(0:(length(LnLxTnTx[["Dose"]]) - 1)) temp.DoseName <- paste0("R", temp.DoseID) temp.DoseName <- cbind(Name = temp.DoseName, Dose = LnLxTnTx[["Dose"]]) ##set natural temp.DoseName[temp.DoseName[, "Name"] == "R0", "Name"] <- "Natural" ##set R0 temp.DoseName[temp.DoseName[,"Name"]!="Natural" & temp.DoseName[,"Dose"]==0,"Name"]<-"R0" ##find duplicated doses (including 0 dose - which means the Natural) temp.DoseDuplicated<-duplicated(temp.DoseName[,"Dose"]) ##combine temp.DoseName temp.DoseName<-cbind(temp.DoseName,Repeated=temp.DoseDuplicated) ##correct value for R0 (it is not really repeated) temp.DoseName[temp.DoseName[,"Dose"]==0,"Repeated"]<-FALSE ##combine in the data frame temp.LnLxTnTx <- data.frame(Name = temp.DoseName[, "Name"], Repeated = as.logical(temp.DoseName[, "Repeated"])) LnLxTnTx<-cbind(temp.LnLxTnTx,LnLxTnTx) LnLxTnTx[,"Name"]<-as.character(LnLxTnTx[,"Name"]) # Calculate Recycling Ratio ----------------------------------------------- ##Calculate Recycling Ratio if(length(LnLxTnTx[LnLxTnTx[,"Repeated"]==TRUE,"Repeated"])>0){ ##identify repeated doses temp.Repeated<-LnLxTnTx[LnLxTnTx[,"Repeated"]==TRUE,c("Name","Dose","LxTx")] ##find concering previous dose for the repeated dose temp.Previous<-t(sapply(1:length(temp.Repeated[,1]),function(x){ LnLxTnTx[LnLxTnTx[,"Dose"]==temp.Repeated[x,"Dose"] & LnLxTnTx[,"Repeated"]==FALSE,c("Name","Dose","LxTx")] })) ##convert to data.frame temp.Previous<-as.data.frame(temp.Previous) ##set column names temp.ColNames<-sapply(1:length(temp.Repeated[,1]),function(x){ paste(temp.Repeated[x,"Name"],"/", temp.Previous[temp.Previous[,"Dose"]==temp.Repeated[x,"Dose"],"Name"], sep="") }) ##Calculate Recycling Ratio RecyclingRatio<-as.numeric(temp.Repeated[,"LxTx"])/as.numeric(temp.Previous[,"LxTx"]) ##Just transform the matrix and add column names RecyclingRatio<-t(RecyclingRatio) colnames(RecyclingRatio)<-temp.ColNames }else{RecyclingRatio<-NA} # Calculate Recuperation Rate --------------------------------------------- ##Recuperation Rate if("R0" %in% LnLxTnTx[,"Name"]==TRUE){ Recuperation<-round(LnLxTnTx[LnLxTnTx[,"Name"]=="R0","LxTx"]/ LnLxTnTx[LnLxTnTx[,"Name"]=="Natural","LxTx"],digits=4) }else{Recuperation<-NA} # Combine and Evaluate Rejection Criteria --------------------------------- RejectionCriteria <- data.frame( citeria = c(colnames(RecyclingRatio), "recuperation rate"), value = c(RecyclingRatio,Recuperation), threshold = c( rep(paste("+/-", rejection.criteria$recycling.ratio/100) ,length(RecyclingRatio)), paste("", rejection.criteria$recuperation.rate/100) ), status = c( if(is.na(RecyclingRatio)==FALSE){ sapply(1:length(RecyclingRatio), function(x){ if(abs(1-RecyclingRatio[x])>(rejection.criteria$recycling.ratio/100)){ "FAILED" }else{"OK"}})}else{NA}, if(is.na(Recuperation)==FALSE & Recuperation>rejection.criteria$recuperation.rate){"FAILED"}else{"OK"} )) ##============================================================================## ##PLOTTING ##============================================================================## # Plotting - Config ------------------------------------------------------- ##grep plot parameter par.default <- par(no.readonly = TRUE) on.exit(par(par.default)) ##grep colours col <- get("col", pos = .LuminescenceEnv) ##set layout matrix layout(matrix(c( 1, 1, 2, 2, 1, 1, 2, 2, 3, 3, 4, 4, 3, 3, 4, 4, 5, 5, 5, 5 ), 5, 4, byrow = TRUE)) par(oma = c(0, 0, 0, 0), mar = c(4, 4, 3, 3)) ## 1 -> TL Lx ## 2 -> TL Tx ## 3 -> TL Lx Plateau ## 4 -> TL Tx Plateau ## 5 -> Legend ##recalculate signal.integral from channels to temperature signal.integral.temperature <- c(object@records[[TL.signal.ID[1]]]@data[signal.integral.min,1] : object@records[[TL.signal.ID[1]]]@data[signal.integral.max,1]) ##warning if number of curves exceed colour values if(length(col) 0) { mtext("[FAILED]", col = "red") } } # Plotting GC ---------------------------------------- #reset par par(par.default) ##create data.frame temp.sample <- data.frame( Dose = LnLxTnTx$Dose, LxTx = LnLxTnTx$LxTx, LxTx.Error = LnLxTnTx$LxTx.Error, TnTx = LnLxTnTx$TnTx ) ##set NA values to 0 temp.sample[is.na(temp.sample$LxTx.Error),"LxTx.Error"] <- 0 ##run curve fitting temp.GC <- try(plot_GrowthCurve( sample = temp.sample, ... )) ##check for error if(inherits(temp.GC, "try-error")){ return(NULL) }else{ temp.GC <- get_RLum(temp.GC)[, c("De", "De.Error")] } ##add recjection status if(length(grep("FAILED",RejectionCriteria$status))>0){ temp.GC <- data.frame(temp.GC, RC.Status="FAILED") }else{ temp.GC <- data.frame(temp.GC, RC.Status="OK") } # Return Values ----------------------------------------------------------- newRLumResults.analyse_SAR.TL <- set_RLum( class = "RLum.Results", data = list( data = temp.GC, LnLxTnTx.table = LnLxTnTx, rejection.criteria = RejectionCriteria ), info = list(info = sys.call()) ) return(newRLumResults.analyse_SAR.TL) } Luminescence/R/calc_TLLxTxRatio.R0000644000176200001440000001667213571743147016360 0ustar liggesusers#' Calculate the Lx/Tx ratio for a given set of TL curves -beta version- #' #' Calculate Lx/Tx ratio for a given set of TL curves. #' #' **Uncertainty estimation** #' #' The standard errors are calculated using the following generalised equation: #' #' \deqn{SE_{signal} <- abs(Signal_{net} * BG_f /BG_{signal}} #' #' where \eqn{BG_f} is a term estimated by calculating the standard deviation of the sum of #' the \eqn{L_x} background counts and the sum of the \eqn{T_x} background counts. However, #' if both signals are similar the error becomes zero. #' #' @param Lx.data.signal [RLum.Data.Curve-class] or [data.frame] (**required**): #' TL data (x = temperature, y = counts) (TL signal) #' #' @param Lx.data.background [RLum.Data.Curve-class] or [data.frame] (*optional*): #' TL data (x = temperature, y = counts). #' If no data are provided no background subtraction is performed. #' #' @param Tx.data.signal [RLum.Data.Curve-class] or [data.frame] (**required**): #' TL data (x = temperature, y = counts) (TL test signal) #' #' @param Tx.data.background [RLum.Data.Curve-class] or [data.frame] (*optional*): #' TL data (x = temperature, y = counts). #' If no data are provided no background subtraction is performed. #' #' @param signal.integral.min [integer] (**required**): #' channel number for the lower signal integral bound #' (e.g. `signal.integral.min = 100`) #' #' @param signal.integral.max [integer] (**required**): #' channel number for the upper signal integral bound #' (e.g. `signal.integral.max = 200`) #' #' @return #' Returns an S4 object of type [RLum.Results-class]. #' Slot `data` contains a [list] with the following structure: #' #' ``` #' $ LxTx.table #' .. $ LnLx #' .. $ LnLx.BG #' .. $ TnTx #' .. $ TnTx.BG #' .. $ Net_LnLx #' .. $ Net_LnLx.Error #' ``` #' #' @note #' **This function has still BETA status!** Please further note that a similar #' background for both curves results in a zero error and is therefore set to `NA`. #' #' @section Function version: 0.3.3 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, Université Bordeaux Montaigne (France) \cr #' Christoph Schmidt, University of Bayreuth (Germany) #' #' @seealso [RLum.Results-class], [analyse_SAR.TL] #' #' @keywords datagen #' #' @examples #' #' ##load package example data #' data(ExampleData.BINfileData, envir = environment()) #' #' ##convert Risoe.BINfileData into a curve object #' temp <- Risoe.BINfileData2RLum.Analysis(TL.SAR.Data, pos = 3) #' #' #' Lx.data.signal <- get_RLum(temp, record.id=1) #' Lx.data.background <- get_RLum(temp, record.id=2) #' Tx.data.signal <- get_RLum(temp, record.id=3) #' Tx.data.background <- get_RLum(temp, record.id=4) #' signal.integral.min <- 210 #' signal.integral.max <- 230 #' #' output <- calc_TLLxTxRatio(Lx.data.signal, #' Lx.data.background, #' Tx.data.signal, Tx.data.background, #' signal.integral.min, signal.integral.max) #' get_RLum(output) #' #' @md #' @export calc_TLLxTxRatio <- function( Lx.data.signal, Lx.data.background = NULL, Tx.data.signal, Tx.data.background = NULL, signal.integral.min, signal.integral.max ){ ##--------------------------------------------------------------------------## ##(1) - a few integrity check ##check for MISSING objects if(missing(Lx.data.signal) | missing(Tx.data.signal) | missing(signal.integral.min) | missing(signal.integral.max)){ temp.missing <- paste( c(if(missing(Lx.data.signal)){"Lx.data.signal"}, if(missing(Tx.data.signal)){"Tx.data.signal"}, if(missing(signal.integral.min)){"signal.integral.min"}, if(missing(signal.integral.max)){"signal.integral.max"}), collapse = ", ") stop(paste("[calc_TLLxTxRatio()] Arguments are missing: ",temp.missing, ".", sep=""), call. = FALSE) } ##check DATA TYPE differences if(is(Lx.data.signal)[1]!=is(Tx.data.signal)[1]){ stop("[calc_TLLxTxRatio()] Data type of Lx and Tx data differs!")} ##check for allowed data.types if(!is(Lx.data.signal, "data.frame") & !is(Lx.data.signal, "RLum.Data.Curve")){ stop("[calc_TLLxTxRatio()] Input data type for not allowed. Allowed are 'RLum.Data.Curve' and 'data.frame'") } ##--------------------------------------------------------------------------## ## Type conversion (assuming that all input variables are of the same type) if(is(Lx.data.signal, "RLum.Data.Curve")){ Lx.data.signal <- as(Lx.data.signal, "matrix") Tx.data.signal <- as(Tx.data.signal, "matrix") if(missing(Lx.data.background) == FALSE && is.null(Lx.data.background) == FALSE){ Lx.data.background <- as(Lx.data.background, "matrix") } if(missing(Tx.data.background) == FALSE && is.null(Tx.data.background) == FALSE){ Tx.data.background <- as(Tx.data.background, "matrix") } } ##(d) - check if Lx and Tx curves have the same channel length if(length(Lx.data.signal[,2])!=length(Tx.data.signal[,2])){ stop("[calc_TLLxTxRatio()] Channel number of Lx and Tx data differs!")} ##(e) - check if signal integral is valid if(signal.integral.min < 1 | signal.integral.max > length(Lx.data.signal[,2])){ stop("[calc_TLLxTxRatio()] Signal.integral is not valid!")} # Background Consideration -------------------------------------------------- ##Lx.data if(!is.null(Lx.data.background)){ LnLx.BG <- sum(Lx.data.background[signal.integral.min:signal.integral.max,2]) }else{ LnLx.BG <- NA } ##Tx.data if(!is.null(Tx.data.background)){ TnTx.BG <- sum(Tx.data.background[signal.integral.min:signal.integral.max,2]) }else{ TnTx.BG <- NA } # Calculate Lx/Tx values -------------------------------------------------- LnLx <- sum(Lx.data.signal[signal.integral.min:signal.integral.max,2]) TnTx <- sum(Tx.data.signal[signal.integral.min:signal.integral.max,2]) ##calculate variance of background if(is.na(LnLx.BG) == FALSE & is.na(TnTx.BG) == FALSE){ BG.Error <- sd(c(LnLx.BG, TnTx.BG)) if(BG.Error == 0) { warning( "[calc_TLLxTxRatio()] The background signals for Lx and Tx appear to be similar, no background error was calculated.", call. = FALSE ) BG.Error <- NA } } if(is.na(LnLx.BG) == FALSE){ net_LnLx <- LnLx - LnLx.BG net_LnLx.Error <- abs(net_LnLx * BG.Error/LnLx.BG) }else{ net_LnLx <- NA net_LnLx.Error <- NA } if(is.na(TnTx.BG) == FALSE){ net_TnTx <- TnTx - TnTx.BG net_TnTx.Error <- abs(net_TnTx * BG.Error/TnTx.BG) }else{ net_TnTx <- NA net_TnTx.Error <- NA } if(is.na(net_TnTx)){ LxTx <- LnLx/TnTx LxTx.Error <- NA }else{ LxTx <- net_LnLx/net_TnTx LxTx.Error <- abs(LxTx*((net_LnLx.Error/net_LnLx) + (net_TnTx.Error/net_TnTx))) } ##COMBINE into a data.frame temp.results <- data.frame( LnLx, LnLx.BG, TnTx, TnTx.BG, net_LnLx, net_LnLx.Error, net_TnTx, net_TnTx.Error, LxTx, LxTx.Error ) # Return values ----------------------------------------------------------- newRLumResults.calc_TLLxTxRatio <- set_RLum( class = "RLum.Results", data = list(LxTx.table = temp.results), info = list(call = sys.call()) ) return(newRLumResults.calc_TLLxTxRatio) } Luminescence/R/calc_ThermalLifetime.R0000644000176200001440000002627613237102143017256 0ustar liggesusers#' Calculates the Thermal Lifetime using the Arrhenius equation #' #' The function calculates the thermal lifetime of charges for given E (in eV), s (in 1/s) and #' T (in deg. C.) parameters. The function can be used in two operational modes: #' #' **Mode 1 `(profiling = FALSE)`** #' #' An arbitrary set of input parameters (E, s, T) can be provided and the #' function calculates the thermal lifetimes using the Arrhenius equation for #' all possible combinations of these input parameters. An array with 3-dimensions #' is returned that can be used for further analyses or graphical output (see example 1) #' #' **Mode 2 `(profiling = TRUE)`** #' #' This mode tries to profile the variation of the thermal lifetime for a chosen #' temperature by accounting for the provided E and s parameters and their corresponding #' standard errors, e.g., `E = c(1.600, 0.001)` #' The calculation based on a Monte Carlo simulation, where values are sampled from a normal #' distribution (for E and s). #' #' **Used equation (Arrhenius equation)** #' #' \deqn{\tau = 1/s exp(E/kT)} #' where: #' \eqn{\tau} in s as the mean time an electron spends in the trap for a given \eqn{T}, #' \eqn{E} trap depth in eV, #' \eqn{s} the frequency factor in 1/s, #' \eqn{T} the temperature in K and \eqn{k} the Boltzmann constant in eV/K (cf. Furetta, 2010). #' #' #' @param E [numeric] (**required**): #' vector of trap depths in eV, #' if `profiling = TRUE` only the first two elements are considered #' #' @param s [numeric] (**required**): #' vector of frequency factor in 1/s, #' if `profiling = TRUE` only the first two elements are considered #' #' @param T [numeric] (*with default*): #' temperature in deg. C for which the lifetime(s) will be calculted. #' A vector can be provided. #' #' @param output_unit [character] (*with default*): #' output unit of the calculated lifetimes, accepted #' entries are: `"Ma"`, `"ka"`, `"a"`, `"d"`, `"h"`, `"min"`, `"s"` #' #' @param profiling [logical] (*with default*): #' this option allows to estimate uncertainties based on #' given E and s parameters and their corresponding standard error #' (cf. details and examples section) #' #' @param profiling_config [list] (*optional*): #' allows to set configurate parameters used for the profiling #' (and only have an effect here). Supported parameters are: #' #' - `n` (number of MC runs), #' - `E.distribution` (distribution used for the resampling for E) and #' - `s.distribution` (distribution used for the resampling for s). #' #' Currently only the normal distribution is supported #' (e.g., `profiling_config = list(E.distribution = "norm")` #' #' @param verbose [logical]: #' enables/disables verbose mode #' #' @param plot [logical]: #' enables/disables output plot, currenlty only in combination with `profiling = TRUE`. #' #' @param ... further arguments that can be passed in combination with the plot output. #' Standard plot parameters are supported ([plot.default]) #' #' @return #' A [RLum.Results-class] object is returned a along with a plot (for #' `profiling = TRUE`). The output object contain the following slots: #' #' **`@data`** #' #' \tabular{lll}{ #' **Object** \tab **Type** \tab **Description** \cr #' `lifetimes` \tab [array] or [numeric] \tab calculated lifetimes \cr #' `profiling_matrix` \tab [matrix] \tab profiling matrix used for the MC runs #' } #' #' **`@info`** #' #' \tabular{lll}{ #' **Object** \tab **Type** \tab **Description** \cr #' `call` \tab `call` \tab the original function call #' } #' #' @note #' The profiling is currently based on resampling from a normal distribution, this #' distribution assumption might be, however, not valid for given E and s paramters. #' #' @section Function version: 0.1.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [graphics::matplot], [stats::rnorm][stats::Normal], [get_RLum] #' #' @references #' #' Furetta, C., 2010. Handbook of Thermoluminescence, Second Edition. ed. World Scientific. #' #' @keywords datagen #' #' @examples #' #' ##EXAMPLE 1 #' ##calculation for two trap-depths with similar frequency factor for different temperatures #' E <- c(1.66, 1.70) #' s <- 1e+13 #' T <- 10:20 #' temp <- calc_ThermalLifetime( #' E = E, #' s = s, #' T = T, #' output_unit = "Ma" #' ) #' contour(x = E, y = T, z = temp$lifetimes[1,,], #' ylab = "Temperature [\u00B0C]", #' xlab = "Trap depth [eV]", #' main = "Thermal Lifetime Contour Plot" #' ) #' mtext(side = 3, "(values quoted in Ma)") #' #' ##EXAMPLE 2 #' ##profiling of thermal life time for E and s and their standard error #' E <- c(1.600, 0.003) #' s <- c(1e+13,1e+011) #' T <- 20 #' calc_ThermalLifetime( #' E = E, #' s = s, #' T = T, #' profiling = TRUE, #' output_unit = "Ma" #') #' #' @md #' @export calc_ThermalLifetime <- function( E, s, T = 20, output_unit = "Ma", profiling = FALSE, profiling_config = NULL, verbose = TRUE, plot = TRUE, ... ){ # Integrity ----------------------------------------------------------------------------------- if(missing(E) | missing(s)){ stop("[calc_ThermalLifetime()] 'E' or 's' or both are missing, but required.", call. = FALSE) } # Set variables ------------------------------------------------------------------------------- ##Boltzmann constant k <- 8.6173324e-05 #eV/K ##recalculate temparature T.K <- T + 273.15 #K ##SETTINGS FOR PROFILING ##profiling settings profiling_settings <- list( n = 1000, E.distribution = "norm", s.distribution = "norm" ) ##replace if set if(!is.null(profiling_config)){ profiling_settings <- modifyList(profiling_settings, profiling_config) } ##check for odd input values if (profiling_settings$n < 1000){ profiling_settings$n <- 1000 warning("[calc_ThermalLifetime()] minimum MC runs are 1000, parameter 'n' in profiling_config reset to 1000.") } # Calculation --------------------------------------------------------------------------------- ##set function for the calculation f <- function(E, s, T.K) { 1 / s * exp(E / (k * T.K)) } ##PROFILING if(profiling) { ##set profiling matrix profiling_matrix <- matrix(NA, ncol = 4, nrow = profiling_settings$n) ##fill matrix ##E profiling_matrix[, 1] <- if( profiling_settings$E.distribution == "norm"){ rnorm(profiling_settings$n, mean = E[1], sd = E[2]) }else{ stop("[calc_ThermalLifetime()] unknown distribution setting for E profiling") } ##s profiling_matrix[, 2] <- if (profiling_settings$s.distribution == "norm") { rnorm(profiling_settings$n, mean = s[1], sd = s[2]) } else{ stop("[calc_ThermalLifetime()] unknown distribution setting for s profiling") } ##T profiling_matrix[, 3] <- rep(T.K[1], each = profiling_settings$n) ##calulate lifetimes profiling_matrix[, 4] <- f(profiling_matrix[, 1], profiling_matrix[, 2], profiling_matrix[, 3]) ##reduce E and s vector on the first entry T <- T[1] ##set lifetimes lifetimes <- profiling_matrix[, 4] } else{ ##set empty profiling matrix profiling_matrix <- matrix() ##calculate lifetimes lifetimes <- vapply( X = T.K, FUN = function(i) { vapply( X = E, FUN = function(j) { f(E = j, s = s, T.K = i) }, FUN.VALUE = vector(mode = "numeric", length = length(s)) ) }, FUN.VALUE = matrix(numeric(), ncol = length(E), nrow = length(s)) ) ##transform to an arry in either case to have the same output if (!is(lifetimes, "array")) { lifetimes <- array(lifetimes, dim = c(length(s), length(E), length(T))) } ##set dimnames to make reading more clear dimnames(lifetimes) <- list(s, E, paste0("T = ", T, " \u00B0C")) } ##re-calculate lifetimes accourding to the chosen output unit temp.lifetimes <- switch ( output_unit, "s" = lifetimes, "min" = lifetimes / 60, "h" = lifetimes / 60 / 60, "d" = lifetimes / 60 / 60 / 24, "a" = lifetimes / 60 / 60 / 24 / 365, "ka" = lifetimes / 60 / 60 / 24 / 365 / 1000, "Ma" = lifetimes / 60 / 60 / 24 / 365 / 1000 / 1000 ) ##check for invalid values if(is.null(temp.lifetimes)){ output_unit <- "s" warning("[calc_ThermalLifetime()] 'output_unit' unknown, reset to 's'") }else{ lifetimes <- temp.lifetimes rm(temp.lifetimes) } # Terminal output ----------------------------------------------------------------------------- if(verbose){ cat("\n[calc_ThermalLifetime()]\n\n") if(profiling){ cat("\tprofiling = TRUE") cat("\n\t--------------------------\n") } cat(paste("\tmean:\t", format(mean(lifetimes), scientific = TRUE), output_unit)) cat(paste("\n\tsd:\t", format(sd(lifetimes), scientific = TRUE), output_unit)) cat(paste("\n\tmin:\t", format(min(lifetimes), scientific = TRUE), output_unit)) if(!profiling){ cat(paste0(" (@",T[which(lifetimes == min(lifetimes), arr.ind = TRUE)[3]], " \u00B0C)")) } cat(paste("\n\tmax:\t", format(max(lifetimes), scientific = TRUE), output_unit)) if(!profiling){ cat(paste0(" (@",T[which(lifetimes == max(lifetimes), arr.ind = TRUE)[3]], " \u00B0C)")) } cat("\n\t--------------------------") cat(paste0("\n\t(", length(lifetimes), " lifetimes calculated in total)")) } # Plotting ------------------------------------------------------------------------------------ if(plot & profiling){ ##plot settings plot.settings <- list( main = "Thermal Lifetime Density Plot", xlab = paste0("Thermal lifetime [",output_unit,"]"), ylab = "Density", xlim = NULL, ylim = NULL, log = "", lwd = 1, lty = 1, col = rgb(0, 0, 0, 0.25) ) ##modify on request plot.settings <- modifyList(plot.settings, list(...)) ##split data and calculate density ##set seq id_seq <- seq( from = 1, to = length(lifetimes), length.out = 200) ##calculate lifetime of the density lifetimes_density <- lapply(1:(length(id_seq) - 1), function(x) { density(lifetimes[id_seq[x]:id_seq[x+1]]) }) ##get x values lifetimes_density.x <- matrix(unlist(lapply(1:length(lifetimes_density), function(i){ lifetimes_density[[i]]$x })), nrow = length(lifetimes_density[[1]]$x)) ##get y values lifetimes_density.y <- matrix(unlist(lapply(1:length(lifetimes_density), function(i){ lifetimes_density[[i]]$y })), nrow = length(lifetimes_density[[1]]$y)) ##plot density curves graphics::matplot( lifetimes_density.x, lifetimes_density.y, type = "l", lwd = plot.settings$lwd, lty = plot.settings$lty, col = plot.settings$col, main = plot.settings$main, xlab = plot.settings$xlab, ylab = plot.settings$ylab, xlim = plot.settings$xlim, ylim = plot.settings$ylim, log = plot.settings$log ) } # Return values ------------------------------------------------------------------------------- return(set_RLum( class = "RLum.Results", data = list(lifetimes = lifetimes, profiling_matrix = profiling_matrix), info = list(call = sys.call()) )) } Luminescence/R/calc_CentralDose.R0000644000176200001440000002544213540751607016414 0ustar liggesusers#' Apply the central age model (CAM) after Galbraith et al. (1999) to a given #' De distribution #' #' This function calculates the central dose and dispersion of the De #' distribution, their standard errors and the profile log likelihood function #' for sigma. #' #' This function uses the equations of Galbraith & Roberts (2012). The #' parameters `delta` and `sigma` are estimated by numerically solving #' eq. 15 and 16. Their standard errors are approximated using eq. 17. #' In addition, the profile log-likelihood function for `sigma` is #' calculated using eq. 18 and presented as a plot. Numerical values of the #' maximum likelihood approach are **only** presented in the plot and **not** #' in the console. A detailed explanation on maximum likelihood estimation can #' be found in the appendix of Galbraith & Laslett (1993, 468-470) and #' Galbraith & Roberts (2012, 15) #' #' @param data [RLum.Results-class] or [data.frame] (**required**): #' for [data.frame]: two columns with De `(data[,1])` and De error `(data[,2])` #' #' @param sigmab [numeric] (*with default*): #' additional spread in De values. #' This value represents the expected overdispersion in the data should the sample be #' well-bleached (Cunningham & Walling 2012, p. 100). #' **NOTE**: For the logged model (`log = TRUE`) this value must be #' a fraction, e.g. 0.2 (= 20 \%). If the un-logged model is used (`log = FALSE`), #' sigmab must be provided in the same absolute units of the De values (seconds or Gray). #' #' @param log [logical] (*with default*): #' fit the (un-)logged central age model to De data #' #' @param na.rm [logical] (*with default*): strip `NA` values before the computation proceeds #' #' @param plot [logical] (*with default*): #' plot output #' #' @param ... further arguments (`trace`, `verbose`). #' #' @return Returns a plot (*optional*) and terminal output. In addition an #' [RLum.Results-class] object is returned containing the following elements: #' #' \item{.$summary}{[data.frame] summary of all relevant model results.} #' \item{.$data}{[data.frame] original input data} #' \item{.$args}{[list] used arguments} #' \item{.$call}{[call] the function call} #' \item{.$profile}{[data.frame] the log likelihood profile for sigma} #' #' The output should be accessed using the function [get_RLum] #' #' @section Function version: 1.4.0 #' #' @author #' Christoph Burow, University of Cologne (Germany) \cr #' Based on a rewritten S script of Rex Galbraith, 2010 #' #' @seealso [plot], [calc_CommonDose], [calc_FiniteMixture], #' [calc_FuchsLang2001], [calc_MinDose] #' #' @references #' Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for #' mixed fission track ages. Nuclear Tracks Radiation Measurements 4, 459-470. #' #' Galbraith, R.F., Roberts, R.G., Laslett, G.M., Yoshida, H. & Olley, #' J.M., 1999. Optical dating of single grains of quartz from Jinmium rock #' shelter, northern Australia. Part I: experimental design and statistical #' models. Archaeometry 41, 339-364. #' #' Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error calculation and #' display in OSL dating: An overview and some recommendations. Quaternary #' Geochronology 11, 1-27. #' #' **Further reading** #' #' Arnold, L.J. & Roberts, R.G., 2009. Stochastic modelling of multi-grain equivalent dose #' (De) distributions: Implications for OSL dating of sediment mixtures. #' Quaternary Geochronology 4, 204-230. #' #' Bailey, R.M. & Arnold, L.J., 2006. Statistical modelling of single grain quartz De distributions and an #' assessment of procedures for estimating burial dose. Quaternary Science #' Reviews 25, 2475-2502. #' #' Cunningham, A.C. & Wallinga, J., 2012. Realizing the potential of fluvial archives using robust OSL chronologies. #' Quaternary Geochronology 12, 98-106. #' #' Rodnight, H., Duller, G.A.T., Wintle, A.G. & Tooth, S., 2006. Assessing the reproducibility and accuracy #' of optical dating of fluvial deposits. Quaternary Geochronology, 1 109-120. #' #' Rodnight, H., 2008. How many equivalent dose values are needed to #' obtain a reproducible distribution?. Ancient TL 26, 3-10. #' #' @examples #' #' ##load example data #' data(ExampleData.DeValues, envir = environment()) #' #' ##apply the central dose model #' calc_CentralDose(ExampleData.DeValues$CA1) #' #' @md #' @export calc_CentralDose <- function(data, sigmab, log = TRUE, na.rm = FALSE, plot = TRUE, ...) { ## ============================================================================## ## CONSISTENCY CHECK OF INPUT DATA ## ============================================================================## if (!missing(data)) { if (!is(data, "data.frame") & !is(data, "RLum.Results")) { stop("[calc_CentralDose()] 'data' has to be of type 'data.frame' or 'RLum.Results'!", call. = FALSE) } else { if (is(data, "RLum.Results")) { data <- get_RLum(data, "data") } } } ##remove NA values if(na.rm == TRUE && any(is.na(data))){ warning("[calc_CentralDose()] ", length(which(is.na(data))), " NA value(s) removed from dataset!", call. = FALSE) data <- na.exclude(data) } ##make sure we consider onlyt take the first two columns if(ncol(data) < 2 || nrow(data) < 2) stop("[calc_CentralDose()] 'data' should have at least two columns and two rows!", call. = FALSE) ##extract only the first two columns and set column names data <- data[,1:2] colnames(data) <- c("ED", "ED_Error") if (!missing(sigmab)) { if (sigmab < 0 | sigmab > 1 & log) stop("[calc_CentralDose()] sigmab needs to be given as a fraction between 0 and 1 (e.g., 0.2)!", call. = FALSE) } ## ============================================================================## ## ... ARGUMENTS ## ============================================================================## options <- list(verbose = TRUE, trace = FALSE) options <- modifyList(options, list(...)) ## ============================================================================## ## CALCULATIONS ## ============================================================================## # set default value of sigmab if (missing(sigmab)) sigmab <- 0 # calculate yu = log(ED) and su = se(logED) if (log) { yu <- log(data$ED) su <- sqrt((data$ED_Error / data$ED)^2 + sigmab^2) } else { yu <- data$ED su <- sqrt((data$ED_Error)^2 + sigmab^2) } # What does the code do? # >> email conversation with Rex Galbraith 2019-06-29 # >> "fixed point iteration" method to estimate sigma # >> starting with a fixed value # >> once sqrt(sum((wu^2) * (yu - delta)^2 / sum(wu))) gets equal to 1 # >> the iteration is complete # >> if everything has converged agains those fixed values # >> this is the maximum likelihood estimate for # >> sigma and delta # calculate starting values and weights sigma <- 0.15 # keep in mind that this is a relative value wu <- 1 / (sigma^2 + su^2) delta <- sum(wu * yu) / sum(wu) n <- length(yu) # compute mle's for (j in 1:200) { delta <- sum(wu * yu) / sum(wu) sigma <- sigma * sqrt(sum((wu^2) * (yu - delta)^2 / sum(wu))) wu <- 1 / (sigma^2 + su^2) # print iterations if (options$trace) print(round(c(delta, sigma), 4)) } # save parameters for terminal output out.delta <- ifelse(log, exp(delta), delta) out.sigma <- ifelse(log, sigma * 100, sigma / out.delta * 100) # log likelihood llik <- 0.5 * sum(log(wu)) - 0.5 * sum(wu * (yu - delta)^2) # save parameter for terminal output out.llik <- round(llik, 4) Lmax <- llik # standard errors sedelta <- 1 / sqrt(sum(wu)) sesigma <- 1 / sqrt(2 * sigma^2 * sum(wu^2)) # save parameters for terminal output if (log) { out.sedelta <- sedelta * 100 out.sesigma <- sesigma } else { out.sedelta <- sedelta / out.delta * 100 out.sesigma <- sqrt((sedelta / delta)^2 + (sesigma / out.delta * 100 / out.sigma)^2) * out.sigma / 100 } # profile log likelihood sigmax <- sigma llik <- 0 sig0 <- max(0, sigmax - 8 * sesigma) sig1 <- sigmax + 9.5 * sesigma sig <- try(seq(sig0, sig1, sig1 / 1000), silent = TRUE) if (!inherits(sig, "try-error")) { # TODO: rewrite this loop as a function and maximise with mle2 ll is the actual # log likelihood, llik is a vector of all ll for (s in sig) { wu <- 1 / (s^2 + su^2) mu <- sum(wu * yu)/sum(wu) ll <- 0.5 * sum(log(wu)) - 0.5 * sum(wu * (yu - mu)^2) llik <- c(llik, ll) } llik <- llik[-1] - Lmax } ## ============================================================================## ## TERMINAL OUTPUT ## ============================================================================## if (options$verbose) { cat("\n [calc_CentralDose]") cat(paste("\n\n----------- meta data ----------------")) cat(paste("\n n: ", n)) cat(paste("\n log: ", log)) cat(paste("\n----------- dose estimate ------------")) cat(paste("\n central dose [Gy]: ", format(out.delta, digits = 2, nsmall = 2))) cat(paste("\n SE [Gy]: ", format(out.delta * out.sedelta/100, digits = 2, nsmall = 2))) cat(paste("\n rel. SE [%]: ", format(out.sedelta, digits = 2, nsmall = 2))) cat(paste("\n----------- overdispersion -----------")) cat(paste("\n OD [Gy]: ", format(ifelse(log, sigma * out.delta, sigma), digits = 2, nsmall = 2))) cat(paste("\n SE [Gy]: ", format(ifelse(log, sesigma * out.delta, sesigma), digits = 2, nsmall = 2))) cat(paste("\n OD [%]: ", format(out.sigma, digits = 2, nsmall = 2))) cat(paste("\n SE [%]: ", if (class(sig) != "try-error") { format(out.sesigma * 100, digits = 2, nsmall = 2) } else { "-" })) cat(paste("\n-------------------------------------\n\n")) } ## ============================================================================## ## RETURN VALUES ## ============================================================================## if (class(sig) == "try-error") { out.sigma <- 0 out.sesigma <- NA } if(!log) sig <- sig / delta summary <- data.frame(de = out.delta, de_err = out.delta * out.sedelta / 100, OD = out.sigma, OD_err = out.sesigma * 100, Lmax = Lmax) args <- list(log = log, sigmab = sigmab) newRLumResults.calc_CentralDose <- set_RLum( class = "RLum.Results", data = list( summary = summary, data = data, args = args, profile = data.frame( sig = if(!inherits(sig, "try-error")) sig else NA, llik = llik) ), info = list( call = sys.call() ) ) ## =========## PLOTTING if (plot && class(sig) != "try-error") try(plot_RLum.Results(newRLumResults.calc_CentralDose, ...)) invisible(newRLumResults.calc_CentralDose) } Luminescence/R/calc_FastRatio.R0000644000176200001440000003441413250440021016063 0ustar liggesusers#' Calculate the Fast Ratio for CW-OSL curves #' #' Function to calculate the fast ratio of quartz CW-OSL single grain or single #' aliquot curves after Durcan & Duller (2011). #' #' This function follows the equations of Durcan & Duller (2011). The energy #' required to reduce the fast and medium quartz OSL components to `x` and #' `x2` \% respectively using eq. 3 to determine channels L2 and L3 (start #' and end). The fast ratio is then calculated from: \eqn{(L1-L3)/(L2-L3)}. #' #' @param object [RLum.Analysis-class], [RLum.Data.Curve-class] or [data.frame] (**required**): #' x, y data of measured values (time and counts). #' #' @param stimulation.power [numeric] (*with default*): #' Stimulation power in mW/cm^2 #' #' @param wavelength [numeric] (*with default*): #' Stimulation wavelength in nm #' #' @param sigmaF [numeric] (*with default*): #' Photoionisation cross-section (cm^2) of the fast component. #' Default value after Durcan & Duller (2011). #' #' @param sigmaM [numeric] (*with default*): #' Photoionisation cross-section (cm^2) of the medium component. #' Default value after Durcan & Duller (2011). #' #' @param Ch_L1 [numeric] (*with default*): #' An integer specifying the channel for L1. #' #' @param Ch_L2 [numeric] (*optional*): #' An integer specifying the channel for L2. #' #' @param Ch_L3 [numeric] (*optional*): #' A vector of length 2 with integer values specifying the start and end channels for L3 #' (e.g., `c(40, 50)`). #' #' @param x [numeric] (*with default*): #' \% of signal remaining from the fast component. #' Used to define the location of L2 and L3 (start). #' #' @param x2 [numeric] (*with default*): #' \% of signal remaining from the medium component. #' Used to define the location of L3 (end). #' #' @param dead.channels [numeric] (*with default*): #' Vector of length 2 in the form of `c(x, y)`. #' Channels that do not contain OSL data, i.e. at the start or end of measurement. #' #' @param fitCW.sigma [logical] (*optional*): #' fit CW-OSL curve using [fit_CWCurve] to calculate `sigmaF` and `sigmaM` (**experimental**). #' #' @param fitCW.curve [logical] (*optional*): #' fit CW-OSL curve using [fit_CWCurve] and derive the counts of L2 and L3 #' from the fitted OSL curve (**experimental**). #' #' @param plot [logical] (*with default*): #' plot output (`TRUE`/`FALSE`) #' #' @param ... available options: `verbose` ([logical]). #' Further arguments passed to [fit_CWCurve]. #' #' @return #' Returns a plot (*optional*) and an S4 object of type [RLum.Results-class]. #' The slot `data` contains a [list] with the following elements: #' #' \item{summary}{[data.frame] summary of all relevant results} #' \item{data}{the original input data} #' \item{fit}{[RLum.Results-class] object if either `fitCW.sigma` or `fitCW.curve` is `TRUE`} #' \item{args}{[list] of used arguments} #' \item{call}{`[call]` the function call} #' #' @section Function version: 0.1.1 #' #' @author #' Georgina E. King, University of Bern (Switzerland) \cr #' Julie A. Durcan, University of Oxford (United Kingdom) \cr #' Christoph Burow, University of Cologne (Germany) #' #' @references #' Durcan, J.A. & Duller, G.A.T., 2011. The fast ratio: A rapid measure for testing #' the dominance of the fast component in the initial OSL signal from quartz. #' Radiation Measurements 46, 1065-1072. #' #' Madsen, A.T., Duller, G.A.T., Donnelly, J.P., Roberts, H.M. & Wintle, A.G., 2009. #' A chronology of hurricane landfalls at Little Sippewissett Marsh, Massachusetts, USA, #' using optical dating. Geomorphology 109, 36-45. #' #' **Further reading** #' #' Steffen, D., Preusser, F. & Schlunegger, 2009. OSL quartz age underestimation #' due to unstable signal components. Quaternary Geochronology 4, 353-362. #' #' #' @seealso [fit_CWCurve], [get_RLum], [RLum.Analysis-class], #' [RLum.Results-class], [RLum.Data.Curve-class] #' #' @examples #' # load example CW-OSL curve #' data("ExampleData.CW_OSL_Curve") #' #' # calculate the fast ratio w/o further adjustments #' res <- calc_FastRatio(ExampleData.CW_OSL_Curve) #' #' # show the summary table #' get_RLum(res) #' #' @md #' @export calc_FastRatio <- function(object, stimulation.power = 30.6, wavelength = 470, sigmaF = 2.6E-17, sigmaM = 4.28E-18, Ch_L1 = 1, Ch_L2 = NULL, Ch_L3 = NULL, x = 1, x2 = 0.1, dead.channels = c(0,0), fitCW.sigma = FALSE, fitCW.curve = FALSE, plot = TRUE, ...) { ## Input verification -------------------------------------------------------- if (!is.null(Ch_L3) && length(Ch_L3) != 2) stop("Input for 'Ch_L3' must be a vector of length 2 (e.g., c(40, 50).", call. = FALSE) ## Input object handling ----------------------------------------------------- if (inherits(object, "RLum.Analysis")) object <- get_RLum(object) if (inherits(object, "RLum.Results")) object <- get_RLum(object, "data") if (!inherits(object, "list")) object <-list(object) ## Settings ------------------------------------------------------------------ settings <- list(verbose = TRUE, n.components.max = 3, fit.method = "LM", output.terminal = FALSE, info = list(), fit = NULL) # override defaults with args in ... settings <- modifyList(settings, list(...)) ## Calculations -------------------------------------------------------------- # iterate over all user provided objects and calculate the FR fast.ratios <- lapply(object, function(obj) { if (inherits(obj, "RLum.Data.Curve")) A <- get_RLum(obj) else A <- obj ## Energy calculation # P = user defined stimulation power in mW # lambdaLED = wavelength of stimulation source in nm P <- stimulation.power lamdaLED <- wavelength ## Constants # h = speed of light, h = Planck's constant h <- 6.62607004E-34 c <- 299792458 I0 <- (P / 1000) / (h * c / (lamdaLED * 10^-9)) Ch_width <- max(A[ ,1]) / length(A[ ,1]) # remove dead channels A <- as.data.frame(A[(dead.channels[1] + 1):(nrow(A)-dead.channels[2]), ]) A[ ,1] <- A[ ,1] - A[1,1] # estimate the photo-ionisation crossections of the fast and medium # component using the fit_CWCurve function if (fitCW.sigma | fitCW.curve) { fitCW.res <- try(fit_CWCurve(A, n.components.max = settings$n.components.max, fit.method = settings$fit.method, LED.power = stimulation.power, LED.wavelength = wavelength, output.terminal = settings$output.terminal, plot = plot)) settings$fit <- fitCW.res if (fitCW.sigma) { if (!inherits(fitCW.res, "try-error")) { sigmaF <- get_RLum(fitCW.res)$cs1 sigmaM <- get_RLum(fitCW.res)$cs2 if (settings$verbose) { message("\n [calc_FitCWCurve()]\n") message("New value for sigmaF: ", format(sigmaF, digits = 3, nsmall = 2)) message("New value for sigmaM: ", format(sigmaM, digits = 3, nsmall = 2)) } } else { if (settings$verbose) message("Fitting failed! Please call 'fit_CWCurve()' manually before ", "calculating the fast ratio.") } } if (fitCW.curve) { if (!inherits(fitCW.res, "try-error")) { nls <- get_RLum(fitCW.res, "fit") A[ ,2] <- predict(nls) } } } ## The equivalent time in s of L1, L2, L3 # Use these values to look up the channel t_L1 <- 0 if (is.null(Ch_L2)) t_L2 <- (log(x / 100)) / (-sigmaF * I0) else t_L2 <- A[Ch_L2, 1] if (is.null(Ch_L3)) { t_L3_start <- (log(x / 100)) / (-sigmaM * I0) t_L3_end <- (log(x2 / 100)) / (-sigmaM * I0) } else { t_L3_start <- A[Ch_L3[1], 1] t_L3_end <- A[Ch_L3[2], 1] } ## Channel number(s) of L2 and L3 if (is.null(Ch_L2)) Ch_L2 <- which.min(abs(A[,1] - t_L2)) if (Ch_L2 <= 1) { msg <- sprintf("Calculated time/channel for L2 is too small (%.f, %.f). Returned NULL.", t_L2, Ch_L2) settings$info <- modifyList(settings$info, list(L2 = msg)) warning(msg, call. = FALSE) return(NULL) } Ch_L3st<- which.min(abs(A[,1] - t_L3_start)) Ch_L3end <- which.min(abs(A[,1] - t_L3_end)) ## Counts in channels L1, L2, L3 # L1 ---- Cts_L1 <- A[Ch_L1, 2] # L2 ---- if (Ch_L2 > nrow(A)) { msg <- sprintf(paste("The calculated channel for L2 (%i) is equal", "to or larger than the number of available channels (%i).", "Returned NULL."), Ch_L2, nrow(A)) settings$info <- modifyList(settings$info, list(L2 = msg)) warning(msg, call. = FALSE) return(NULL) } Cts_L2 <- A[Ch_L2, 2] # optional: predict the counts from the fitted curve if (fitCW.curve) { if (!inherits(fitCW.res, "try-error")) { nls <- get_RLum(fitCW.res, "fit") Cts_L2 <- predict(nls, list(x = t_L2)) } } # L3 ---- if (Ch_L3st >= nrow(A) | Ch_L3end > nrow(A)) { msg <- sprintf(paste("The calculated channels for L3 (%i, %i) are equal to or", "larger than the number of available channels (%i).", "\nThe background has instead been estimated from the last", "5 channels."), Ch_L3st, Ch_L3end, nrow(A)) settings$info <- modifyList(settings$info, list(L3 = msg)) warning(msg, call. = FALSE) Ch_L3st <- nrow(A) - 5 Ch_L3end <- nrow(A) t_L3_start <- A[Ch_L3st,1] t_L3_end <- A[Ch_L3end,1] } Cts_L3 <- mean(A[Ch_L3st:Ch_L3end, 2]) # optional: predict the counts from the fitted curve if (fitCW.curve) { if (!inherits(fitCW.res, "try-error")) { nls <- get_RLum(fitCW.res, "fit") Cts_L3 <- mean(predict(nls, list(x = c(t_L3_start, t_L3_end)))) } } # Warn if counts are not in decreasing order if (Cts_L3 >= Cts_L2) warning(sprintf("L3 contains more counts (%.f) than L2 (%.f).", Cts_L3, Cts_L2), call. = FALSE) ## Fast Ratio FR <- (Cts_L1 - Cts_L3) / (Cts_L2 - Cts_L3) if (length(FR) != 1) FR <- NA ## Fast Ratio - Error calculation if (!is.na(FR)) { # number of channels the background was derived from nBG <- abs(Ch_L3end - Ch_L3st) # relative standard errors rse_L1 <- sqrt(Cts_L1 + Cts_L3 / nBG) / (Cts_L1 - Cts_L3) rse_L2 <- sqrt(Cts_L2 + Cts_L3 / nBG) / (Cts_L2 - Cts_L3) # absolute standard errors se_L1 <- rse_L1 * (Cts_L1 - Cts_L3) se_L2 <- rse_L2 * (Cts_L2 - Cts_L3) # absolute standard error on fast ratio FR_se <- (sqrt((se_L1 / (Cts_L1 - Cts_L3))^2 + ((se_L2 / (Cts_L2 - Cts_L3))^2) )) * FR FR_rse <- FR_se / FR * 100 } else { FR_se <- NA FR_rse <- NA } ## Return values ----------------------------------------------------------- summary <- data.frame(fast.ratio = FR, fast.ratio.se = FR_se, fast.ratio.rse = FR_rse, channels = nrow(A), channel.width = Ch_width, dead.channels.start = as.integer(dead.channels[1]), dead.channels.end = as.integer(dead.channels[2]), sigmaF = sigmaF, sigmaM = sigmaM, I0 = I0, stimulation.power = stimulation.power, wavelength = wavelength, t_L1 = t_L1, t_L2 = t_L2, t_L3_start = t_L3_start, t_L3_end = t_L3_end, Ch_L1 = as.integer(Ch_L1), Ch_L2 = as.integer(Ch_L2), Ch_L3_start = as.integer(Ch_L3st), Ch_L3_end = as.integer(Ch_L3end), Cts_L1 = Cts_L1, Cts_L2 = Cts_L2, Cts_L3 = Cts_L3) fast.ratio <- set_RLum(class = "RLum.Results", originator = "calc_FastRatio", data = list(summary = summary, data = obj, fit = settings$fit, args = as.list(sys.call(-2L)[-1]), call = sys.call(-2L)), info = settings$info ) ## Console Output ---------------------------------------------------------- if (settings$verbose) { table.names <- c( "Fast Ratio\t", " \U02EA Absolute error", " \U02EA Relative error (%)", "Channels\t", "Channel width (s)", "Dead channels start", "Dead channels end", "Sigma Fast\t", "Sigma Medium\t", "I0\t\t", "Stim. power (mW/cm^2)", "Wavelength (nm)", "-\n Time L1 (s)\t", "Time L2 (s)\t", "Time L3 start (s)", "Time L3 end (s)", "-\n Channel L1\t", "Channel L2\t", "Channel L3 start", "Channel L3 end\t", "-\n Counts L1\t", "Counts L2\t", "Counts L3\t") cat("\n[calc_FastRatio()]\n") cat("\n -------------------------------") for (i in 1:ncol(summary)) { cat(paste0("\n ", table.names[i],"\t: ", format(summary[1, i], digits = 2, nsmall = 2))) } cat("\n -------------------------------\n\n") } ## Plotting ---------------------------------------------------------------- if (plot) try(plot_RLum.Results(fast.ratio, ...)) # return return(fast.ratio) }) # End of lapply if (length(fast.ratios) == 1) fast.ratios <- fast.ratios[[1]] invisible(fast.ratios) } Luminescence/R/Risoe.BINfileData2RLum.Analysis.R0000644000176200001440000002327213604172447021014 0ustar liggesusers#' Convert Risoe.BINfileData object to an RLum.Analysis object #' #' Converts values from one specific position of a Risoe.BINfileData S4-class #' object to an RLum.Analysis object. #' #' The [RLum.Analysis-class] object requires a set of curves for #' specific further protocol analyses. However, the [Risoe.BINfileData-class] #' usually contains a set of curves for different aliquots and different #' protocol types that may be mixed up. Therefore, a conversion is needed. #' #' @param object [Risoe.BINfileData-class] (**required**): #' `Risoe.BINfileData` object #' #' @param pos [numeric] (*optional*): position number of the `Risoe.BINfileData` #' object for which the curves are stored in the `RLum.Analysis` object. #' If `length(position)>1` a list of `RLum.Analysis` objects is returned. #' If nothing is provided every position will be converted. #' If the position is not valid `NA` is returned. #' #' @param grain [vector], [numeric] (*optional*): #' grain number from the measurement to limit the converted data set #' (e.g., `grain = c(1:48)`). Please be aware that this option may lead to #' unwanted effects, as the output is strictly limited to the choosen grain #' number for all position numbers #' #' @param run [vector], [numeric] (*optional*): #' run number from the measurement to limit the converted data set #' (e.g., `run = c(1:48)`). #' #' @param set [vector], [numeric] (*optional*): #' set number from the measurement to limit the converted data set #' (e.g., `set = c(1:48)`). #' #' @param ltype [vector], [character] (*optional*): #' curve type to limit the converted data. Commonly allowed values are: #' `IRSL`, `OSL`, `TL`, `RIR`, `RBR` and `USER` #' (see also [Risoe.BINfileData-class]) #' #' @param dtype [vector], [character] (*optional*): #' data type to limit the converted data. Commonly allowed values are #' listed in [Risoe.BINfileData-class] #' #' @param protocol [character] (*optional*): #' sets protocol type for analysis object. Value may be used by subsequent #' analysis functions. #' #' @param keep.empty [logical] (*with default*): #' If `TRUE` (default) an `RLum.Analysis` object is returned even if it does #' not contain any records. Set to `FALSE` to discard all empty objects. #' #' @param txtProgressBar [logical] (*with default*): #' enables or disables [txtProgressBar]. #' #' @return Returns an [RLum.Analysis-class] object. #' #' @note #' The `protocol` argument of the [RLum.Analysis-class] #' object is set to 'unknown' if not stated otherwise. #' #' @section Function version: 0.4.2 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [Risoe.BINfileData-class], [RLum.Analysis-class], [read_BIN2R] #' #' @keywords manip #' #' @examples #' #' ##load data #' data(ExampleData.BINfileData, envir = environment()) #' #' ##convert values for position 1 #' Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos = 1) #' #' @md #' @export Risoe.BINfileData2RLum.Analysis<- function( object, pos = NULL, grain = NULL, run = NULL, set = NULL, ltype = NULL, dtype = NULL, protocol = "unknown", keep.empty = TRUE, txtProgressBar = FALSE ){ # Integrity Check --------------------------------------------------------- if (!is(object,"Risoe.BINfileData")){ stop("[Risoe.BINfileData2RLum.Analysis()] Input object is not of type 'Risoe.BINfileData'.") } if (!is.null(pos) && !is(pos,"numeric")){ stop("[Risoe.BINfileData2RLum.Analysis()] Argument 'pos' has to be of type numeric.") } if (is.null(pos)) { pos <- unique(object@METADATA[["POSITION"]]) } else{ ##get and check valid positions and remove invalid numbers from the input positions.valid <- unique(object@METADATA[, "POSITION"]) if (length(setdiff(pos, positions.valid)) > 0) { warning( paste0( "[Risoe.BINfileData2RLum.Analysis()] invalid position number skipped: ", paste(setdiff(pos, positions.valid), collapse = ", ") ), call. = FALSE ) pos <- intersect(pos, positions.valid) } } # Grep run and set data --------------------------------------------------- ##grain if (is.null(grain)) { grain <- unique(object@METADATA[,"GRAIN"]) }else{ grain.valid <- unique(object@METADATA[,"GRAIN"]) if(length(setdiff(grain, grain.valid)) > 0){ warning(paste0("[Risoe.BINfileData2RLum.Analysis()] Invalid grain number skipped: ", paste(setdiff(grain, grain.valid), collapse = ", ")), call. = FALSE) grain <- intersect(grain, grain.valid) } } ##run if (is.null(run)) { run <- unique(object@METADATA[["RUN"]]) } else{ if (TRUE %in% unique(unique(object@METADATA[["RUN"]]) %in% run) != TRUE) { ##get and check valid positions run.valid <- paste(as.character(unique(object@METADATA[, "RUN"])), collapse = ", ") stop( paste( "[Risoe.BINfileData2RLum.Analysis()] run = ", run, " contain invalid run(s). Valid runs are: ", run.valid, sep = "" ) ) } } #set if(is.null(set)){set <- unique(object@METADATA[["SET"]]) } else{ if(TRUE %in% unique(unique(object@METADATA[["SET"]]) %in% set) != TRUE){ ##get and check valid positions set.valid <- paste(as.character(unique(object@METADATA[,"SET"])), collapse=", ") stop(paste("[Risoe.BINfileData2RLum.Analysis] set = ", set, " contain invalid set(s). Valid sets are: ", set.valid, sep="")) } } ##ltype if (is.null(ltype)) { ltype <- unique(object@METADATA[["LTYPE"]]) } else{ if (TRUE %in% unique(unique(object@METADATA[, "LTYPE"]) %in% ltype) != TRUE) { ##get and check valid positions ltype.valid <- paste(as.character(unique(object@METADATA[, "LTYPE"])), collapse = ", ") stop( paste( "[Risoe.BINfileData2RLum.Analysis] ltype = ", ltype, " contain invalid ltype(s). Valid ltypes are: ", ltype.valid, sep = "" ) ) } } ##dtype if (is.null(dtype)) { dtype <- unique(object@METADATA[["DTYPE"]]) } else{ if (TRUE %in% unique(unique(object@METADATA[, "DTYPE"]) %in% dtype) != TRUE) { ##get and check valid positions dtype.valid <- paste(as.character(unique(object@METADATA[, "DTYPE"])), collapse = ", ") stop( paste( "[Risoe.BINfileData2RLum.Analysis] dtype = ", dtype, " contain invalid dtype(s). Valid dtypes are: ", dtype.valid, sep = "" ) ) } } # Select values and convert them----------------------------------------------------------- ##set progressbar to false if only one position is provided if(txtProgressBar & length(pos)<2){ txtProgressBar <- FALSE } ##This loop does: ## (a) iterating over all possible positions ## (b) consider grains in all possible positions ## (c) consider other selections ## (d) create the RLum.Analysis objects ##set progress bar if(txtProgressBar){ pb<-txtProgressBar(min=min(pos),max=max(pos), char="=", style=3) } object <- lapply(pos, function(pos){ ##update progress bar if(txtProgressBar){ setTxtProgressBar(pb, value = pos) } ##if no grain information is given, we select all grains in the particular position if(is.null(grain)){ grain <- unique(object@METADATA[object@METADATA[["POSITION"]] == pos, "GRAIN"]) } ##loop over the grains and produce RLum.Analysis objects object <- lapply(grain, function(grain){ ##select data ##the NA is necessary, as FI readers like to write a NA instead of 0 in the column ##and this causes some trouble if(is.na(grain)){ temp_id <- object@METADATA[ object@METADATA[["POSITION"]] == pos & object@METADATA[["RUN"]] %in% run & object@METADATA[["SET"]] %in% set & object@METADATA[["LTYPE"]] %in% ltype & object@METADATA[["DTYPE"]] %in% dtype , "ID"] }else{ temp_id <- object@METADATA[ object@METADATA[["POSITION"]] == pos & object@METADATA[["GRAIN"]] == grain & object@METADATA[["RUN"]] %in% run & object@METADATA[["SET"]] %in% set & object@METADATA[["LTYPE"]] %in% ltype & object@METADATA[["DTYPE"]] %in% dtype , "ID"] } ##create curve object object <- set_RLum( class = "RLum.Analysis", records = lapply(temp_id,function(x) { .Risoe.BINfileData2RLum.Data.Curve(object, id = x) }), protocol = protocol, originator = "Risoe.BINfileData2RLum.Analysis" ) if (!keep.empty && length(object@records) == 0) return(NULL) ##add unique id of RLum.Analysis object to each curve object as .pid using internal function .set_pid(object) return(object) }) return(object) }) ##this is necessary to not break with previous code, i.e. if only one element is included ##the output is RLum.Analysis and not a list of it if(length(object) == 1){ # special case: single grain data with only 1 position produces a nested list # the outer one is of length 1, the nested list has length 100 (100 grains) if (is.list(object[[1]]) && length(object[[1]]) > 1) invisible(unlist(object)) else invisible(object[[1]][[1]]) }else{ invisible(unlist(object)) } } Luminescence/R/get_RLum.R0000644000176200001440000001046713431556052014742 0ustar liggesusers#' General accessor function for RLum S4 class objects #' #' Function calls object-specific get functions for RLum S4 class objects. #' #' The function provides a generalised access point for specific #' [RLum-class] objects.\cr #' Depending on the input object, the corresponding get function will be selected. #' Allowed arguments can be found in the documentations of the corresponding #' [RLum-class] class. #' #' @param object [RLum-class] (**required**): #' S4 object of class `RLum` or an object of type [list] containing only objects #' of type [RLum-class] #' #' @param ... further arguments that will be passed to the object specific methods. For #' furter details on the supported arguments please see the class #' documentation: [RLum.Data.Curve-class], [RLum.Data.Spectrum-class], #' [RLum.Data.Image-class], [RLum.Analysis-class] and [RLum.Results-class] #' #' @return Return is the same as input objects as provided in the list. #' #' @section Function version: 0.3.3 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' @seealso [RLum.Data.Curve-class], [RLum.Data.Image-class], #' [RLum.Data.Spectrum-class], [RLum.Analysis-class], [RLum.Results-class] #' #' @keywords utilities #' #' @examples #' #' ##Example based using data and from the calc_CentralDose() function #' #' ##load example data #' data(ExampleData.DeValues, envir = environment()) #' #' ##apply the central dose model 1st time #' temp1 <- calc_CentralDose(ExampleData.DeValues$CA1) #' #' ##get results and store them in a new object #' temp.get <- get_RLum(object = temp1) #' #' @md #' @export setGeneric("get_RLum", function (object, ...) {standardGeneric("get_RLum") }) # Method for get_RLum method for RLum objects in a list for a list of objects ------------------- #' @describeIn get_RLum #' Returns a list of [RLum-class] objects that had been passed to [get_RLum] #' #' @param class [character] (*optional*): allows to define the class that gets selected if #' applied to a list, e.g., if a list consists of different type of RLum-class objects, this #' arguments allows to make selection. If nothing is provided, all RLum-objects are treated. #' #' @param null.rm [logical] (*with default*): option to get rid of empty and NULL objects #' #' @md #' @export setMethod("get_RLum", signature = "list", function(object, class = NULL, null.rm = FALSE, ...){ ##take care of the class argument if(!is.null(class)){ sel <- class[1] == vapply(object, function(x) class(x), character(1)) if(any(sel)) object <- object[sel] rm(sel) } ##make remove all non-RLum objects selection <- lapply(1:length(object), function(x){ ##get rid of all objects that are not of type RLum, this is better than leaving that ##to the user if(inherits(object[[x]], what = "RLum")){ ##it might be the case the object already comes with empty objects, this would ##cause a crash if(class(object[[x]]) == "RLum.Analysis" && length(object[[x]]@records) == 0) return(NULL) get_RLum(object[[x]], ...) } else { warning(paste0("[get_RLum()] object #",x," in the list was not of type 'RLum' and has been removed!"), call. = FALSE) return(NULL) } }) ##remove empty or NULL objects after the selection ... if wanted if(null.rm){ ##first set all empty objects to NULL ... for RLum.Analysis objects selection <- lapply(1:length(selection), function(x){ if(class(selection[[x]]) == "RLum.Analysis" && length(selection[[x]]@records) == 0){ return(NULL) }else{ return(selection[[x]]) } }) ##get rid of all NULL objects selection <- selection[!sapply(selection, is.null)] } return(selection) }) #' Method to handle NULL if the user calls get_RLum #' #' @describeIn get_RLum #' #' Returns NULL #' #' @md #' @export setMethod("get_RLum", signature = "NULL", function(object, ...){NULL}) Luminescence/R/convert_Activity2Concentration.R0000644000176200001440000001211013604172511021350 0ustar liggesusers#' @title Convert Nuclide Activities to Concentrations and Vice Versa #' #' @description The function performs the conversion of the specific activities into #' concentrations and vice versa for the nuclides U-238, Th-232 and K-40 to #' harmonise the measurement unit with the required data input unit of #' potential analytical tools for, e.g. dose rate calculation or related #' functions such as [use_DRAC]. #' #' @details The conversion from nuclide activity of a sample to nuclide concentration #' is performed using conversion factors that are based on the mass-related #' specific activity of the respective nuclides. #' The factors can be calculated using the equation: #' #' \deqn{ #' A = avogadronumber * N.freq / N.mol.mass * ln(2) / N.half.life #' } #' #' \deqn{ #' f = A / 10^6 #' } #' #' where: #' #' - `A` - specific activity of the nuclide #' - `N.freq` - natural frequency of the isotop #' - `N.mol.mass` molare mass #' - `n.half.life` half-life of the nuclide #' #' example for U238: #' #' - \eqn{avogadronumber = 6.02214199*10^23} #' - \eqn{uran.half.life = 1.41*10^17} (in s) #' - \eqn{uran.mol.mass = 0.23802891} (in kg/mol) #' - \eqn{uran.freq = 0.992745} (in mol) #' #' - \eqn{A.U = avogadronumber * uran.freq / uran.mol.mass * ln(2) / uran.half.life} (specific activity in Bq/kg) #' - \eqn{f.U = A.kg / 10^6} #' #' @param data [data.frame] **(required)**: #' provide dose rate data (activity or concentration) in three columns. #' The first column indicates the nuclides, the 2nd column measured value and #' in the 3rd column its error value. Allowed nuclide data are #' `'U-238'`, `'Th-232'` and `'K-40'`. See examples for an example. #' #' @param input_unit [character] (*with default*): #' specify unit of input data given in the dose rate data frame, choose between #' 'Bq/kg' and 'ppm/%' the default is 'Bq/kg' #' #' @param verbose [logical] (*with default*): #' enable or disable verbose mode #' #' @section Function version: 0.1.0 #' #' @author Margret C. Fuchs, Helmholtz-Institut Freiberg for Resource Technology (Germany) #' #' @references #' Debertin, K., Helmer, R.G., 1988. Gamma- and X-ray Spectrometry with #' Semiconductor Detectors, Elsevier Science Publishers, p.283 #' #' Wiechen, A., Ruehle, H., Vogl, K., 2013. Bestimmung der massebezogenen #' Aktivitaet von Radionukliden. AEQUIVAL/MASSAKT, ISSN 1865-8725, #' [https://www.bmu.de/fileadmin/Daten_BMU/Download_PDF/Strahlenschutz/aequival-massakt_v2013-07_bf.pdf]() #' #' @keywords IO #' #' @examples #' #' ##construct data.frame #' data <- data.frame( #' NUCLIDES = c("U-238", "Th-232", "K-40"), #' VALUE = c(40,80,100), #' VALUE_ERROR = c(4,8,10), #' stringsAsFactors = FALSE) #' #' ##perform analysis #' convert_Activity2Concentration(data) #' #' @md #' @export convert_Activity2Concentration <- function( data, input_unit = "Bq/kg", verbose = TRUE ){ # Integrity checks ---------------------------------------------------------------------------- if(missing(data)){ stop("[convert_Activity2Concentration()] I'm still waiting for input data ...", call. = FALSE) } if(ncol(data)<3){ stop("[convert_Activity2Concentration()] Input data.frame should have at least three columns.", call. = FALSE) } # Set output data.frame ----------------------------------------------------------------------- output <- data.frame( NUCLIDE = rep(NA, nrow(data)), ACTIVIY = rep(NA, nrow(data)), ACTIVIY_ERROR = rep(NA, nrow(data)), CONC = rep(NA, nrow(data)), CONC_ERROR = rep(NA, nrow(data)), stringsAsFactors = FALSE ) ##set column names colnames(output) <- c( "NUCLIDE", "ACTIVIY (Bq/kg)", "ACTIVIY ERROR (Bq/kg)", "CONC. (ppm/%)", "CONC. ERROR (ppm/%)") ##set column for output output$NUCLIDE = data[[1]] # Set conversion factors ---------------------------------------------------------------------- ############################################################################# u <- which(data$NUCLIDE == "U-238") t <- which(data$NUCLIDE == "Th-232") k <- which(data$NUCLIDE == "K-40") convers.factor.U238 <- 12.35 convers.factor.Th232 <- 4.057 convers.factor.K40 <- 309 # Run conversion ------------------------------------------------------------------------------ ##Activity to concentration if(input_unit == "Bq/kg"){ output[u,4:5] <- data[u,2:3] / convers.factor.U238 output[t,4:5] <- data[t,2:3] / convers.factor.Th232 output[k,4:5] <- data[k,2:3] / convers.factor.K40 output[u,2:3] <- data[u,2:3] output[t,2:3] <- data[t,2:3] output[k,2:3] <- data[k,2:3] } ##Concentration to activity if(input_unit == "ppm/%"){ data[u,2:3] <- data[u,2:3] * convers.factor.U238 data[t,2:3] <- data[t,2:3] * convers.factor.Th232 data[k,2:3] <- data[k,2:3] * convers.factor.K40 output[u,5:6] <- data[u,2:3] output[t,5:6] <- data[t,2:3] output[k,5:6] <- data[k,2:3] } # Return value -------------------------------------------------------------------------------- if(verbose){ print(output) } invisible(set_RLum( class = "RLum.Results", data = list(data = output), info = list(call = sys.call()) )) } Luminescence/R/calc_CommonDose.R0000644000176200001440000001623213231137116016237 0ustar liggesusers#' Apply the (un-)logged common age model after Galbraith et al. (1999) to a #' given De distribution #' #' Function to calculate the common dose of a De distribution. #' #' **(Un-)logged model** #' #' When `log = TRUE` this function #' calculates the weighted mean of logarithmic De values. Each of the estimates #' is weighted by the inverse square of its relative standard error. The #' weighted mean is then transformed back to the dose scale (Galbraith & #' Roberts 2012, p. 14). #' #' The log transformation is not applicable if the #' De estimates are close to zero or negative. In this case the un-logged model #' can be applied instead (`log = FALSE`). The weighted mean is then #' calculated using the un-logged estimates of De and their absolute standard #' error (Galbraith & Roberts 2012, p. 14). #' #' @param data [RLum.Results-class] or [data.frame] (**required**): #' for [data.frame]: two columns with De `(data[,1])` and De error `(data[,2])` #' #' @param sigmab [numeric] (*with default*): #' additional spread in De values. #' This value represents the expected overdispersion in the data should the sample be #' well-bleached (Cunningham & Walling 2012, p. 100). #' **NOTE**: For the logged model (`log = TRUE`) this value must be #' a fraction, e.g. 0.2 (= 20 \%). If the un-logged model is used (`log = FALSE`), #' sigmab must be provided in the same absolute units of the De values (seconds or Gray). #' #' @param log [logical] (*with default*): #' fit the (un-)logged central age model to De data #' #' @param ... currently not used. #' #' @return #' Returns a terminal output. In addition an #' [RLum.Results-class] object is returned containing the #' following element: #' #' \item{.$summary}{[data.frame] summary of all relevant model results.} #' \item{.$data}{[data.frame] original input data} #' \item{.$args}{[list] used arguments} #' \item{.$call}{[call] the function call} #' #' The output should be accessed using the function [get_RLum] #' #' @section Function version: 0.1.1 #' #' @author #' Christoph Burow, University of Cologne (Germany) #' #' @seealso [calc_CentralDose], [calc_FiniteMixture], #' [calc_FuchsLang2001], [calc_MinDose] #' #' @references #' Galbraith, R.F. & Laslett, G.M., 1993. Statistical models for #' mixed fission track ages. Nuclear Tracks Radiation Measurements 4, 459-470. #' #' Galbraith, R.F., Roberts, R.G., Laslett, G.M., Yoshida, H. & Olley, #' J.M., 1999. Optical dating of single grains of quartz from Jinmium rock #' shelter, northern Australia. Part I: experimental design and statistical #' models. Archaeometry 41, 339-364. #' #' Galbraith, R.F. & Roberts, R.G., 2012. Statistical aspects of equivalent dose and error calculation and #' display in OSL dating: An overview and some recommendations. Quaternary #' Geochronology 11, 1-27. #' #' **Further reading** #' #' Arnold, L.J. & Roberts, R.G., 2009. Stochastic modelling of multi-grain equivalent dose #' (De) distributions: Implications for OSL dating of sediment mixtures. #' Quaternary Geochronology 4, 204-230. #' #' Bailey, R.M. & Arnold, L.J., 2006. Statistical modelling of single grain quartz De distributions and an #' assessment of procedures for estimating burial dose. Quaternary Science #' Reviews 25, 2475-2502. #' #' Cunningham, A.C. & Wallinga, J., 2012. Realizing the potential of fluvial archives using robust OSL chronologies. #' Quaternary Geochronology 12, 98-106. #' #' Rodnight, H., Duller, G.A.T., Wintle, A.G. & Tooth, S., 2006. Assessing the reproducibility and accuracy #' of optical dating of fluvial deposits. Quaternary Geochronology, 1 109-120. #' #' Rodnight, H., 2008. How many equivalent dose values are needed to #' obtain a reproducible distribution?. Ancient TL 26, 3-10. #' #' @examples #' #' ## load example data #' data(ExampleData.DeValues, envir = environment()) #' #' ## apply the common dose model #' calc_CommonDose(ExampleData.DeValues$CA1) #' #' @md #' @export calc_CommonDose <- function( data, sigmab, log=TRUE, ... ) { ##============================================================================## ## CONSISTENCY CHECK OF INPUT DATA ##============================================================================## if(missing(data)==FALSE){ if(is(data, "data.frame") == FALSE & is(data,"RLum.Results") == FALSE){ stop("[calc_CentralDose] Error: 'data' object has to be of type 'data.frame' or 'RLum.Results'!") }else{ if(is(data, "RLum.Results") == TRUE){ data <- get_RLum(data, "data") } } } try(colnames(data)<- c("ED","ED_Error"), silent = TRUE) if(colnames(data[1])!="ED"||colnames(data[2])!="ED_Error") { cat(paste("Columns must be named 'ED' and 'ED_Error'"), fill = FALSE) stop(domain=NA) } if(!missing(sigmab)) { if(sigmab <0 | sigmab >1) { cat(paste("sigmab needs to be given as a fraction between", "0 and 1 (e.g. 0.2)"), fill = FALSE) stop(domain=NA) } } ##============================================================================## ## ADDITIONAL ARGUMENTS ##============================================================================## settings <- list(verbose = TRUE) settings <- modifyList(settings, list(...)) ##============================================================================## ## CALCULATIONS ##============================================================================## # set default value of sigmab if (missing(sigmab)) sigmab<- 0 # calculate yu = log(ED) and su = se(logED) if (log) { yu<- log(data$ED) su<- sqrt( (data$ED_Error/data$ED)^2 + sigmab^2 ) } else { yu<- data$ED su<- sqrt((data$ED_Error)^2 + sigmab^2) } # calculate weights wu<- 1/su^2 delta<- sum(wu*yu)/sum(wu) n<- length(yu) #standard error sedelta<- 1/sqrt(sum(wu)) if (!log) { sedelta<- sedelta/delta } if (log){ delta<- exp(delta) } ##============================================================================## ## TERMINAL OUTPUT ##============================================================================## if (settings$verbose) { cat("\n [calc_CommonDose]") cat(paste("\n\n----------- meta data --------------")) cat(paste("\n n: ",n)) cat(paste("\n log: ",if(log==TRUE){"TRUE"}else{"FALSE"})) cat(paste("\n----------- dose estimate ----------")) cat(paste("\n common dose: ", round(delta,2))) cat(paste("\n SE: ", round(delta*sedelta, 2))) cat(paste("\n rel. SE [%]: ", round(sedelta*100,2))) cat(paste("\n------------------------------------\n\n")) } ##============================================================================## ## RETURN VALUES ##============================================================================## summary<- data.frame(de=delta, de_err=delta*sedelta) call<- sys.call() args<- list(log=log, sigmab=sigmab) newRLumResults.calc_CommonDose<- set_RLum( class = "RLum.Results", data = list(summary = summary, data = data, args = args, call = call)) invisible(newRLumResults.calc_CommonDose) } Luminescence/R/set_RLum.R0000644000176200001440000000511613231137116014743 0ustar liggesusers#' General set function for RLum S4 class objects #' #' Function calls object-specific set functions for RLum S4 class objects. #' #' The function provides a generalised access point for specific #' [RLum-class] objects.\cr #' Depending on the given class, the corresponding method to create an object #' from this class will be selected. Allowed additional arguments can be found #' in the documentations of the corresponding [RLum-class] class: #' - [RLum.Data.Curve-class], #' - [RLum.Data.Image-class], #' - [RLum.Data.Spectrum-class], #' - [RLum.Analysis-class], #' - [RLum.Results-class] #' #' @param class [RLum-class] (**required**): #' name of the S4 class to create #' #' @param originator [character] (*automatic*): #' contains the name of the calling function (the function that produces this object); #' can be set manually. #' #' @param .uid [character] (*automatic*): #' sets an unique ID for this object using the internal C++ function `create_UID`. #' #' @param .pid [character] (*with default*): #' option to provide a parent id for nesting at will. #' #' @param ... further arguments that one might want to pass to the specific set method #' #' @return #' Returns an object of the specified class. #' #' @section Function version: 0.3.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Data.Curve-class], [RLum.Data.Image-class], #' [RLum.Data.Spectrum-class], [RLum.Analysis-class], [RLum.Results-class] #' #' @keywords utilities #' #' @examples #' #' ##produce empty objects from each class #' set_RLum(class = "RLum.Data.Curve") #' set_RLum(class = "RLum.Data.Spectrum") #' set_RLum(class = "RLum.Data.Spectrum") #' set_RLum(class = "RLum.Analysis") #' set_RLum(class = "RLum.Results") #' #' ##produce a curve object with arbitrary curve values #' object <- set_RLum( #' class = "RLum.Data.Curve", #' curveType = "arbitrary", #' recordType = "OSL", #' data = matrix(c(1:100,exp(-c(1:100))),ncol = 2)) #' #' ##plot this curve object #' plot_RLum(object) #' #' @md #' @export setGeneric("set_RLum", function (class, originator, .uid = create_UID(), .pid = NA_character_, ... ) { class(class) <- as.character(class) if(missing(originator)) { if (is(sys.call(which = -1)[[1]], "language")) { originator <- as.character(sys.call(which = -1)[[1]]) ##account for calls using the double colons, in this case the vector is ##of length 3, not only 1 if(length(originator) == 3){ originator <- originator[3] } } else{ originator <- NA_character_ } } standardGeneric("set_RLum") }) Luminescence/R/CW2pLM.R0000644000176200001440000001021613231137116014212 0ustar liggesusers#' Transform a CW-OSL curve into a pLM-OSL curve #' #' Transforms a conventionally measured continuous-wave (CW) curve into a #' pseudo linearly modulated (pLM) curve using the equations given in Bulur #' (2000). #' #' According to Bulur (2000) the curve data are transformed by introducing two #' new parameters P (stimulation period) and u (transformed time): #' #' \deqn{P=2*max(t)} \deqn{u=\sqrt{(2*t*P)}} #' #' The new count values are then calculated by #' \deqn{ctsNEW = cts(u/P)} #' #' and the returned `data.frame` is produced by: `data.frame(u,ctsNEW)` #' #' The output of the function can be further used for LM-OSL fitting. #' #' @param values [RLum.Data.Curve-class] or [data.frame] (**required**): #' `RLum.Data.Curve` data object. Alternatively, a `data.frame` of the measured #' curve data of type stimulation time (t) (`values[,1]`) and measured counts (cts) #' (`values[,2]`) can be provided. #' #' @return #' The function returns the same data type as the input data type with #' the transformed curve values ([data.frame] or [RLum.Data.Curve-class]). #' #' @note #' The transformation is recommended for curves recorded with a channel #' resolution of at least 0.05 s/channel. #' #' @section Function version: 0.4.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [CW2pHMi], [CW2pLMi], [CW2pPMi], [fit_LMCurve], [lm], #' [RLum.Data.Curve-class] #' #' #' @references #' Bulur, E., 2000. A simple transformation for converting CW-OSL #' curves to LM-OSL curves. Radiation Measurements, 32, 141-145. #' #' **Further Reading** #' #' Bulur, E., 1996. An Alternative Technique For Optically Stimulated #' Luminescence (OSL) Experiment. Radiation Measurements, 26, 701-709. #' #' @keywords manip #' #' @examples #' #' ##read curve from CWOSL.SAR.Data transform curve and plot values #' data(ExampleData.BINfileData, envir = environment()) #' #' ##read id for the 1st OSL curve #' id.OSL <- CWOSL.SAR.Data@@METADATA[CWOSL.SAR.Data@@METADATA[,"LTYPE"] == "OSL","ID"] #' #' ##produce x and y (time and count data for the data set) #' x<-seq(CWOSL.SAR.Data@@METADATA[id.OSL[1],"HIGH"]/CWOSL.SAR.Data@@METADATA[id.OSL[1],"NPOINTS"], #' CWOSL.SAR.Data@@METADATA[id.OSL[1],"HIGH"], #' by = CWOSL.SAR.Data@@METADATA[id.OSL[1],"HIGH"]/CWOSL.SAR.Data@@METADATA[id.OSL[1],"NPOINTS"]) #' y <- unlist(CWOSL.SAR.Data@@DATA[id.OSL[1]]) #' values <- data.frame(x,y) #' #' ##transform values #' values.transformed <- CW2pLM(values) #' #' ##plot #' plot(values.transformed) #' #' @md #' @export CW2pLM <- function( values ){ # Integrity Checks -------------------------------------------------------- ##(1) data.frame or RLum.Data.Curve object? if(is(values, "data.frame") == FALSE & is(values, "RLum.Data.Curve") == FALSE){ stop("[CW2pLM()] 'values' object has to be of type 'data.frame' or 'RLum.Data.Curve'!", call. = FALSE) } ##(2) if the input object is an 'RLum.Data.Curve' object check for allowed curves if(is(values, "RLum.Data.Curve") == TRUE){ if(!grepl("OSL", values@recordType) & !grepl("IRSL", values@recordType)){ stop(paste("[CW2pLM()] recordType ",values@recordType, " is not allowed for the transformation!", sep=""), call. = FALSE) }else{ temp.values <- as(values, "data.frame") } }else{ temp.values <- values } # Calculation ------------------------------------------------------------- ##curve transformation P<-2*max(temp.values[,1]) u<-((2*temp.values[,1]*P)^0.5) ##cw >> plm conversion, according Bulur, 2000 temp.values[,2]<-temp.values[,2]*(u/P) temp.values<-data.frame(u,temp.values[,2]) # Return values ----------------------------------------------------------- ##returns the same data type as the input if(is(values, "data.frame") == TRUE){ values <- temp.values return(values) }else{ newRLumDataCurves.CW2pLM <- set_RLum( class = "RLum.Data.Curve", recordType = values@recordType, data = as.matrix(temp.values), info = values@info) return(newRLumDataCurves.CW2pLM) } } Luminescence/R/calc_AliquotSize.R0000644000176200001440000004073113231137116016446 0ustar liggesusers#' Estimate the amount of grains on an aliquot #' #' Estimate the number of grains on an aliquot. Alternatively, the packing #' density of an aliquot is computed. #' #' This function can be used to either estimate the number of grains on an #' aliquot or to compute the packing density depending on the the arguments #' provided. #' #' The following function is used to estimate the number of grains `n`: #' #' \deqn{n = (\pi*x^2)/(\pi*y^2)*d} #' #' where `x` is the radius of the aliquot size (microns), `y` is the mean #' radius of the mineral grains (mm) and `d` is the packing density #' (value between 0 and 1). #' #' **Packing density** #' #' The default value for `packing.density` is 0.65, which is the mean of #' empirical values determined by Heer et al. (2012) and unpublished data from #' the Cologne luminescence laboratory. If `packing.density = "Inf"` a maximum #' density of \eqn{\pi/\sqrt12 = 0.9068\ldots} is used. However, note that #' this value is not appropriate as the standard preparation procedure of #' aliquots resembles a PECC (*"Packing Equal Circles in a Circle"*) problem #' where the maximum packing density is asymptotic to about 0.87. #' #' **Monte Carlo simulation** #' #' The number of grains on an aliquot can be estimated by Monte Carlo simulation #' when setting `MC = TRUE`. Each of the parameters necessary to calculate #' `n` (`x`, `y`, `d`) are assumed to be normally distributed with means #' \eqn{\mu_x, \mu_y, \mu_d} and standard deviations \eqn{\sigma_x, \sigma_y, \sigma_d}. #' #' For the mean grain size random samples are taken first from #' \eqn{N(\mu_y, \sigma_y)}, where \eqn{\mu_y = mean.grain.size} and #' \eqn{\sigma_y = (max.grain.size-min.grain.size)/4} so that 95\% of all #' grains are within the provided the grain size range. This effectively takes #' into account that after sieving the sample there is still a small chance of #' having grains smaller or larger than the used mesh sizes. For each random #' sample the mean grain size is calculated, from which random subsamples are #' drawn for the Monte Carlo simulation. #' #' The packing density is assumed #' to be normally distributed with an empirically determined \eqn{\mu = 0.65} #' (or provided value) and \eqn{\sigma = 0.18}. The normal distribution is #' truncated at `d = 0.87` as this is approximately the maximum packing #' density that can be achieved in PECC problem. #' #' The sample diameter has #' \eqn{\mu = sample.diameter} and \eqn{\sigma = 0.2} to take into account #' variations in sample disc preparation (i.e. applying silicon spray to the #' disc). A lower truncation point at `x = 0.5` is used, which assumes #' that aliqouts with smaller sample diameters of 0.5 mm are discarded. #' Likewise, the normal distribution is truncated at 9.8 mm, which is the #' diameter of the sample disc. #' #' For each random sample drawn from the #' normal distributions the amount of grains on the aliquot is calculated. By #' default, `10^5` iterations are used, but can be reduced/increased with #' `MC.iter` (see `...`). The results are visualised in a bar- and #' boxplot together with a statistical summary. #' #' @param grain.size [numeric] (**required**): #' mean grain size (microns) or a range of grain sizes from which the #' mean grain size is computed (e.g. `c(100,200)`). #' #' @param sample.diameter [numeric] (**required**): #' diameter (mm) of the targeted area on the sample carrier. #' #' @param packing.density [numeric] (*with default*): #' empirical value for mean packing density. \cr #' If `packing.density = "Inf"` a hexagonal structure on an infinite plane with #' a packing density of \eqn{0.906\ldots} is assumed. #' #' @param MC [logical] (*optional*): #' if `TRUE` the function performs a monte carlo simulation for estimating the #' amount of grains on the sample carrier and assumes random errors in grain #' size distribution and packing density. Requires a vector with min and max #' grain size for `grain.size`. For more information see details. #' #' @param grains.counted [numeric] (*optional*): #' grains counted on a sample carrier. If a non-zero positive integer is provided this function #' will calculate the packing density of the aliquot. If more than one value is #' provided the mean packing density and its standard deviation is calculated. #' Note that this overrides `packing.density`. #' #' @param plot [logical] (*with default*): #' plot output (`TRUE`/`FALSE`) #' #' @param ... further arguments to pass (`main, xlab, MC.iter`). #' #' @return #' Returns a terminal output. In addition an #' [RLum.Results-class] object is returned containing the #' following element: #' #' \item{.$summary}{[data.frame] summary of all relevant calculation results.} #' \item{.$args}{[list] used arguments} #' \item{.$call}{[call] the function call} #' \item{.$MC}{[list] results of the Monte Carlo simulation} #' #' The output should be accessed using the function [get_RLum]. #' #' @section Function version: 0.31 #' #' @author Christoph Burow, University of Cologne (Germany) #' #' @references #' Duller, G.A.T., 2008. Single-grain optical dating of Quaternary #' sediments: why aliquot size matters in luminescence dating. Boreas 37, #' 589-612. #' #' Heer, A.J., Adamiec, G., Moska, P., 2012. How many grains #' are there on a single aliquot?. Ancient TL 30, 9-16. #' #' **Further reading** #' #' Chang, H.-C., Wang, L.-C., 2010. A simple proof of Thue's #' Theorem on Circle Packing. [http://arxiv.org/pdf/1009.4322v1.pdf](), #' 2013-09-13. #' #' Graham, R.L., Lubachevsky, B.D., Nurmela, K.J., #' Oestergard, P.R.J., 1998. Dense packings of congruent circles in a circle. #' Discrete Mathematics 181, 139-154. #' #' Huang, W., Ye, T., 2011. Global #' optimization method for finding dense packings of equal circles in a circle. #' European Journal of Operational Research 210, 474-481. #' #' @examples #' #' ## Estimate the amount of grains on a small aliquot #' calc_AliquotSize(grain.size = c(100,150), sample.diameter = 1, MC.iter = 100) #' #' ## Calculate the mean packing density of large aliquots #' calc_AliquotSize(grain.size = c(100,200), sample.diameter = 8, #' grains.counted = c(2525,2312,2880), MC.iter = 100) #' #' @md #' @export calc_AliquotSize <- function( grain.size, sample.diameter, packing.density = 0.65, MC = TRUE, grains.counted, plot=TRUE, ... ){ ##==========================================================================## ## CONSISTENCY CHECK OF INPUT DATA ##==========================================================================## if(length(grain.size) == 0 | length(grain.size) > 2) { cat(paste("\nPlease provide the mean grain size or a range", "of grain sizes (in microns).\n"), fill = FALSE) stop(domain=NA) } if(packing.density < 0 | packing.density > 1) { if(packing.density == "inf") { } else { cat(paste("\nOnly values between 0 and 1 allowed for packing density!\n")) stop(domain=NA) } } if(sample.diameter < 0) { cat(paste("\nPlease provide only positive integers.\n")) stop(domain=NA) } if (sample.diameter > 9.8) warning("\n A sample diameter of ", sample.diameter ," mm was specified, but common sample discs are 9.8 mm in diameter.", call. = FALSE) if(missing(grains.counted) == FALSE) { if(MC == TRUE) { MC = FALSE cat(paste("\nMonte Carlo simulation is only available for estimating the", "amount of grains on the sample disc. Automatically set to", "FALSE.\n")) } } if(MC == TRUE && length(grain.size) != 2) { cat(paste("\nPlease provide a vector containing the min and max grain", "grain size(e.g. c(100,150) when using Monte Carlo simulations.\n")) stop(domain=NA) } ##==========================================================================## ## ... ARGUMENTS ##==========================================================================## # set default parameters settings <- list(MC.iter = 10^4, verbose = TRUE) # override settings with user arguments settings <- modifyList(settings, list(...)) ##==========================================================================## ## CALCULATIONS ##==========================================================================## # calculate the mean grain size range.flag<- FALSE if(length(grain.size) == 2) { gs.range<- grain.size grain.size<- mean(grain.size) range.flag<- TRUE } # use ~0.907... from Thue's Theorem as packing density if(packing.density == "inf") { packing.density = pi/sqrt(12) } # function to calculate the amount of grains calc_n<- function(sd, gs, d) { n<- ((pi*(sd/2)^2)/ (pi*(gs/2000)^2))*d return(n) } # calculate the amount of grains on the aliquot if(missing(grains.counted) == TRUE) { n.grains<- calc_n(sample.diameter, grain.size, packing.density) ##========================================================================## ## MONTE CARLO SIMULATION if(MC == TRUE && range.flag == TRUE) { # create a random set of packing densities assuming a normal # distribution with the empirically determined standard deviation of # 0.18. d.mc<- rnorm(settings$MC.iter, packing.density, 0.18) # in a PECC the packing density can not be larger than ~0.87 d.mc[which(d.mc > 0.87)]<- 0.87 d.mc[which(d.mc < 0.25)]<- 0.25 # create a random set of sample diameters assuming a normal # distribution with an assumed standard deviation of # 0.2. For a more conservative estimate this is divided by 2. sd.mc<- rnorm(settings$MC.iter, sample.diameter, 0.2) # it is assumed that sample diameters < 0.5 mm either do not # occur, or are discarded. Either way, any smaller sample # diameter is capped at 0.5. # Also, the sample diameter can not be larger than the sample # disc, i.e. 9.8 mm. sd.mc[which(sd.mc <0.5)]<- 0.5 if (sample.diameter <= 9.8) sd.mc[which(sd.mc >9.8)]<- 9.8 # create random samples assuming a normal distribution # with the mean grain size as mean and half the range (min:max) # as standard deviation. For a more conservative estimate this # is further devided by 2, so half the range is regarded as # two sigma. gs.mc<- rnorm(settings$MC.iter, grain.size, diff(gs.range)/4) # draw random samples from the grain size spectrum (gs.mc) and calculate # the mean for each sample. This gives an approximation of the variation # in mean grain size on the sample disc gs.mc.sampleMean<- vector(mode = "numeric") for(i in 1:length(gs.mc)) { gs.mc.sampleMean[i]<- mean(sample(gs.mc, calc_n( sample(sd.mc, size = 1), grain.size, sample(d.mc, size = 1) ), replace = TRUE)) } # create empty vector for MC estimates of n MC.n<- vector(mode="numeric") # calculate n for each MC data set for(i in 1:length(gs.mc)) { MC.n[i]<- calc_n(sd.mc[i], gs.mc.sampleMean[i], d.mc[i]) } # summarize MC estimates MC.q<- quantile(MC.n, c(0.05,0.95)) MC.n.kde<- density(MC.n, n = 10000) # apply student's t-test MC.t.test<- t.test(MC.n) MC.t.lower<- MC.t.test["conf.int"]$conf.int[1] MC.t.upper<- MC.t.test["conf.int"]$conf.int[2] MC.t.se<- (MC.t.upper-MC.t.lower)/3.92 # get unweighted statistics from calc_Statistics() function MC.stats<- calc_Statistics(as.data.frame(cbind(MC.n,0.0001)))$unweighted } }#EndOf:estimate number of grains ##========================================================================## ## CALCULATE PACKING DENSITY if(missing(grains.counted) == FALSE) { area.container<- pi*sample.diameter^2 if(length(grains.counted) == 1) { area.grains<- (pi*(grain.size/1000)^2)*grains.counted packing.density<- area.grains/area.container } else { packing.densities<- length(grains.counted) for(i in 1:length(grains.counted)) { area.grains<- (pi*(grain.size/1000)^2)*grains.counted[i] packing.densities[i]<- area.grains/area.container } std.d<- sd(packing.densities) } } ##==========================================================================## ##TERMINAL OUTPUT ##==========================================================================## if (settings$verbose) { cat("\n [calc_AliquotSize]") cat(paste("\n\n ---------------------------------------------------------")) cat(paste("\n mean grain size (microns) :", grain.size)) cat(paste("\n sample diameter (mm) :", sample.diameter)) if(missing(grains.counted) == FALSE) { if(length(grains.counted) == 1) { cat(paste("\n counted grains :", grains.counted)) } else { cat(paste("\n mean counted grains :", round(mean(grains.counted)))) } } if(missing(grains.counted) == TRUE) { cat(paste("\n packing density :", round(packing.density,3))) } if(missing(grains.counted) == FALSE) { if(length(grains.counted) == 1) { cat(paste("\n packing density :", round(packing.density,3))) } else { cat(paste("\n mean packing density :", round(mean(packing.densities),3))) cat(paste("\n standard deviation :", round(std.d,3))) } } if(missing(grains.counted) == TRUE) { cat(paste("\n number of grains :", round(n.grains,0))) } if(MC == TRUE && range.flag == TRUE) { cat(paste(cat(paste("\n\n --------------- Monte Carlo Estimates -------------------")))) cat(paste("\n number of iterations (n) :", settings$MC.iter)) cat(paste("\n median :", round(MC.stats$median))) cat(paste("\n mean :", round(MC.stats$mean))) cat(paste("\n standard deviation (mean) :", round(MC.stats$sd.abs))) cat(paste("\n standard error (mean) :", round(MC.stats$se.abs, 1))) cat(paste("\n 95% CI from t-test (mean) :", round(MC.t.lower), "-", round(MC.t.upper))) cat(paste("\n standard error from CI (mean):", round(MC.t.se, 1))) cat(paste("\n ---------------------------------------------------------\n")) } else { cat(paste("\n ---------------------------------------------------------\n")) } } ##==========================================================================## ##RETURN VALUES ##==========================================================================## # prepare return values for mode: estimate grains if(missing(grains.counted) == TRUE) { summary<- data.frame(grain.size = grain.size, sample.diameter = sample.diameter, packing.density = packing.density, n.grains = round(n.grains,0), grains.counted = NA) } # prepare return values for mode: estimate packing density/densities if(missing(grains.counted) == FALSE) { # return values if only one value for counted.grains is provided if(length(grains.counted) == 1) { summary<- data.frame(grain.size = grain.size, sample.diameter = sample.diameter, packing.density = packing.density, n.grains = NA, grains.counted = grains.counted) } else { # return values if more than one value for counted.grains is provided summary<- data.frame(rbind(1:5)) colnames(summary)<- c("grain.size", "sample.diameter", "packing.density", "n.grains","grains.counted") for(i in 1:length(grains.counted)) { summary[i,]<- c(grain.size, sample.diameter, packing.densities[i], n.grains = NA, grains.counted[i]) } } } if(!MC) { MC.n<- NULL MC.stats<- NULL MC.n.kde<- NULL MC.t.test<- NULL MC.q<- NULL } if(missing(grains.counted)) grains.counted<- NA call<- sys.call() args<- as.list(sys.call())[-1] # create S4 object newRLumResults.calc_AliquotSize <- set_RLum( class = "RLum.Results", data = list( summary=summary, MC=list(estimates=MC.n, statistics=MC.stats, kde=MC.n.kde, t.test=MC.t.test, quantile=MC.q)), info = list(call=call, args=args)) ##=========## ## PLOTTING if(plot==TRUE) { try(plot_RLum.Results(newRLumResults.calc_AliquotSize, ...)) } # Return values invisible(newRLumResults.calc_AliquotSize) } Luminescence/R/convert_RLum2Risoe.BINfileData.R0000644000176200001440000002030013417223074020752 0ustar liggesusers#'Converts RLum.Analysis-objects and RLum.Data.Curve-objects to RLum2Risoe.BINfileData-objects #' #'The functions converts [RLum.Analysis-class] and [RLum.Data.Curve-class] objects and a [list] of those #'to [Risoe.BINfileData-class] objects. The funtion intends to provide a minimum of compatiblility #'between both formats. The created [RLum.Analysis-class] object can be later exported to a #'BIN-file using the function [write_R2BIN]. #' #'@param object [RLum.Analysis-class] or [RLum.Data.Curve-class] (**required**): input object to #'be converted #' #'@param keep.position.number [logical] (with default): keeps the original position number or re-calculate #'the numbers to avoid doubling #' #'@section Function version: 0.1.3 #' #'@author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #'@seealso [RLum.Analysis-class], [RLum.Data.Curve-class], [write_R2BIN] #' #'@note The conversion can be never perfect. The `RLum` objects may contain information which are #'not part of the [Risoe.BINfileData-class] definition. #' #'@keywords IO #' #'@examples #' #'##simple conversion using the example dataset #'data(ExampleData.RLum.Analysis, envir = environment()) #'convert_RLum2Risoe.BINfileData(IRSAR.RF.Data) #' #'@return The function returns a [Risoe.BINfileData-class] object. #' #'@md #'@export convert_RLum2Risoe.BINfileData <- function( object, keep.position.number = FALSE ){ # Self call ----------------------------------------------------------------------------------- if(is(object, "list")){ ##call function object_list <- lapply(object, function(x) { convert_RLum2Risoe.BINfileData(x) }) ##merge objects if(length(object_list) == 1){ return(object_list[[1]]) }else{ return(merge_Risoe.BINfileData(object_list, keep.position.number = keep.position.number)) } } # Integrity tests ----------------------------------------------------------------------------- ##RLum.Data.Curve if(class(object) == "RLum.Data.Curve") object <- set_RLum(class = "RLum.Analysis", records = list(object)) ##RLum.Analysis - final check, from here we just accept RLum.Analysis if(class(object) != "RLum.Analysis") stop("[convert_RLum2Risoe.BINfileData()] Input object needs to be of class 'RLum.Analysis', 'RLum.Data.Curve' or a 'list' of such objects!", call. = FALSE) # Set PROTOTYPE & DATA -------------------------------------------------------------------------- ##set Risoe.BINfiledata prototype prototype <- set_Risoe.BINfileData() ##grep allowed names allowed_names <- names(prototype@METADATA) ##grep records (this will avoid further the subsetting) records <- object@records ##write DATA prototype@DATA <- lapply(records, function(x) {x@data[,2]}) # Create METADATA ----------------------------------------------------------------------------- ##create METADATA list METADATA_list <- lapply(records, function(x){ ##grep matching arguments only temp <- x@info[toupper(names(x@info)) %in% allowed_names] ##account for the case that no matching name was found if(length(temp) != 0){ ##correct names names(temp) <- toupper(names(temp)) return(temp) }else{ return(list(ID = NA)) } }) ##make data.frame out of it METADATA_df <- as.data.frame(data.table::rbindlist(METADATA_list, fill = TRUE)) ##write METADATA prototype@METADATA <- merge(prototype@METADATA, METADATA_df, all = TRUE) ## +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# ##fill various missing values ##helper function ...otherwise the code gets too nasty ... only used for NA values! .replace <- function(field, value){ prototype@METADATA[[field]][which(sapply(prototype@METADATA[[field]], is.na))] <<- value } ## >> ID << ## prototype@METADATA[["ID"]] <- 1:length(records) ## >> SEL << ## prototype@METADATA[["SEL"]] <- TRUE ## >> VERSION << ## .replace("VERSION", "08") ## >> RECTYPE << ## .replace("RECTYPE", 0) ## >> NPOINTS << ## if(any(is.na(prototype@METADATA[["NPOINTS"]]))){ prototype@METADATA[["NPOINTS"]] <- vapply(records, function(x){ length(x@data)/2 }, FUN.VALUE = numeric(1)) } ## >> LENGTH << + PREVIOUS if(any(is.na(prototype@METADATA[["LENGTH"]]))){ ##even we have values here before, it will make no difference prototype@METADATA[["LENGTH"]] <- (prototype@METADATA[["NPOINTS"]] * 4) + 507 prototype@METADATA[["PREVIOUS"]] <- c(0,prototype@METADATA[["LENGTH"]][1:length(records) - 1]) } ## >> RUN << ## ##if there is only one NA, we should overwrite it, to be consistent if(any(is.na(prototype@METADATA[["RUN"]]))) prototype@METADATA[["RUN"]] <- 1:length(records) ## >> SET << ## .replace("SET", 1) ## >> GRAIN << ## .replace("GRAIN", 0) ## >> GRAINNUMBER << ## .replace("GRAINNUMBER", 0) ## >> USER << ## .replace("USER", "RLum.Data") ## >> DTYPE << ## .replace("DTYPE", "Natural") ## >> LIGHTSOURCE << ## .replace("LIGHTSOURCE", "None") ## >> SAMPLE << ## if(any(is.na(prototype@METADATA[["SAMPLE"]]))){ ##get only the id's to change temp_id <- which(is.na(prototype@METADATA[["SAMPLE"]])) ##set name prototype@METADATA[["SAMPLE"]] <- vapply(temp_id, function(x){ if(any(names(records[[x]]@info) == "name")){ records[[x]]@info$name }else{ "unknown" } }, character(length = 1)) } ## >> COMMENT << ## .replace("COMMENT", "convert_RLum2Risoe.BINfileData()") ## >> FNAME << ## .replace("FNAME", " ") ## >> DATE << ## + TIME if(any(is.na(prototype@METADATA[["DATE"]]))){ ##get only the id's to change temp_id <- which(is.na(prototype@METADATA[["DATE"]])) ##set date prototype@METADATA[["DATE"]] <- vapply(temp_id, function(x){ if(any(names(records[[x]]@info) == "startDate")){ strtrim(records[[x]]@info[["startDate"]], width = 8) }else{ as.character(format(Sys.Date(),"%Y%m%d")) } }, character(length = 1)) ##set time prototype@METADATA[["TIME"]] <- vapply(temp_id, function(x){ if(any(names(records[[x]]@info) == "startDate")){ substr(records[[x]]@info[["startDate"]], start = 9, stop = 14) }else{ as.character(format(Sys.time(),"%H%m%S")) } }, character(length = 1)) } ## >> LOW << ## if(any(is.na(prototype@METADATA[["LOW"]]))){ ##get only the id's to change temp_id <- which(is.na(prototype@METADATA[["LOW"]])) ##set date prototype@METADATA[["LOW"]] <- vapply(temp_id, function(x){ min(records[[x]]@data[,1]) }, numeric(length = 1)) } ## >> HIGH << ## if(any(is.na(prototype@METADATA[["HIGH"]]))){ ##get only the id's to change temp_id <- which(is.na(prototype@METADATA[["HIGH"]])) ##set date prototype@METADATA[["HIGH"]] <- vapply(temp_id, function(x){ max(records[[x]]@data[,1]) }, numeric(length = 1)) } ## >> SEQUENCE << ## .replace("SEQUENCE", "") # METADA >> correct information ------------------------------------------------------------------------- ##we have to correct the LTYPE, the format is rather strict ##(a) create LTYPE from names of objects LTYPE <- vapply(names(object), function(s){ if(grepl(pattern = " (", x = s, fixed = TRUE)){ strsplit(s, split = " (", fixed = TRUE)[[1]][1] }else{ s } }, FUN.VALUE = character(1)) ##(b) replace characters ##(b.1) irradiation LTYPE <- gsub(pattern = "irradiation", replacement = "USER", fixed = TRUE, x = LTYPE) ##(b.2 RF LTYPE <- gsub(pattern = "RF", replacement = "RL", fixed = TRUE, x = LTYPE) ##set value prototype@METADATA[["LTYPE"]] <- LTYPE ##correct USER ##limit user to 8 characters prototype@METADATA[["USER"]] <- strtrim(prototype@METADATA[["USER"]], 8) ##correct SAMPLE ##limit user to 21 characters prototype@METADATA[["SAMPLE"]] <- strtrim(prototype@METADATA[["SAMPLE"]], 20) ##replace all remaining NA values by 0 ##all remaining values are numbers prototype@METADATA <- replace(prototype@METADATA, is.na(prototype@METADATA), 0L) # Return -------------------------------------------------------------------------------------- return(prototype) } Luminescence/R/CW2pLMi.R0000644000176200001440000002236513231137116014373 0ustar liggesusers#' Transform a CW-OSL curve into a pLM-OSL curve via interpolation under linear #' modulation conditions #' #' Transforms a conventionally measured continuous-wave (CW) OSL-curve into a #' pseudo linearly modulated (pLM) curve under linear modulation conditions #' using the interpolation procedure described by Bos & Wallinga (2012). #' #' The complete procedure of the transformation is given in Bos & Wallinga #' (2012). The input `data.frame` consists of two columns: time (t) and #' count values (CW(t)) #' #' **Nomenclature** #' #' - P = stimulation time (s) #' - 1/P = stimulation rate (1/s) #' #' **Internal transformation steps** #' #' (1) #' log(CW-OSL) values #' #' (2) #' Calculate t' which is the transformed time: #' \deqn{t' = 1/2*1/P*t^2} #' #' (3) #' Interpolate CW(t'), i.e. use the log(CW(t)) to obtain the count values #' for the transformed time (t'). Values beyond `min(t)` and `max(t)` #' produce `NA` values. #' #' (4) #' Select all values for t' < `min(t)`, i.e. values beyond the time resolution #' of t. Select the first two values of the transformed data set which contain #' no `NA` values and use these values for a linear fit using [lm]. #' #' (5) #' Extrapolate values for t' < `min(t)` based on the previously obtained #' fit parameters. #' #' (6) #' Transform values using #' \deqn{pLM(t) = t/P*CW(t')} #' #' (7) #' Combine values and truncate all values for t' > `max(t)` #' #' #' **NOTE:** #' The number of values for t' < `min(t)` depends on the stimulation #' period (P) and therefore on the stimulation rate 1/P. To avoid the #' production of too many artificial data at the raising tail of the determined #' pLM curves it is recommended to use the automatic estimation routine for #' `P`, i.e. provide no own value for `P`. #' #' @param values [RLum.Data.Curve-class] or [data.frame] (**required**): #' [RLum.Data.Curve-class] or `data.frame` with measured curve data of type #' stimulation time (t) (`values[,1]`) and measured counts (cts) (`values[,2]`) #' #' @param P [vector] (*optional*): #' stimulation time in seconds. If no value is given the optimal value is #' estimated automatically (see details). Greater values of P produce more #' points in the rising tail of the curve. #' #' @return #' The function returns the same data type as the input data type with #' the transformed curve values. #' #' **`RLum.Data.Curve`** #' #' \tabular{rl}{ #' `$CW2pLMi.x.t` \tab: transformed time values \cr #' `$CW2pLMi.method` \tab: used method for the production of the new data points #' } #' #' @note #' According to Bos & Wallinga (2012) the number of extrapolated points #' should be limited to avoid artificial intensity data. If `P` is #' provided manually and more than two points are extrapolated, a warning #' message is returned. #' #' @section Function version: 0.3.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne #' #' Based on comments and suggestions from:\cr #' Adrie J.J. Bos, Delft University of Technology, The Netherlands #' #' @seealso [CW2pLM], [CW2pHMi], [CW2pPMi], [fit_LMCurve], #' [RLum.Data.Curve-class] #' #' @references #' Bos, A.J.J. & Wallinga, J., 2012. How to visualize quartz OSL #' signal components. Radiation Measurements, 47, 752-758. #' #' **Further Reading** #' #' Bulur, E., 1996. An Alternative Technique For #' Optically Stimulated Luminescence (OSL) Experiment. Radiation Measurements, #' 26, 701-709. #' #' Bulur, E., 2000. A simple transformation for converting CW-OSL curves to #' LM-OSL curves. Radiation Measurements, 32, 141-145. #' #' @keywords manip #' #' @examples #' #' ##(1) #' ##load CW-OSL curve data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' #' ##transform values #' values.transformed <- CW2pLMi(ExampleData.CW_OSL_Curve) #' #' ##plot #' plot(values.transformed$x, values.transformed$y.t, log = "x") #' #' ##(2) - produce Fig. 4 from Bos & Wallinga (2012) #' ##load data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' values <- CW_Curve.BosWallinga2012 #' #' ##open plot area #' plot(NA, NA, #' xlim = c(0.001,10), #' ylim = c(0,8000), #' ylab = "pseudo OSL (cts/0.01 s)", #' xlab = "t [s]", #' log = "x", #' main = "Fig. 4 - Bos & Wallinga (2012)") #' #' #' values.t <- CW2pLMi(values, P = 1/20) #' lines(values[1:length(values.t[,1]),1],CW2pLMi(values, P = 1/20)[,2], #' col = "red", lwd = 1.3) #' text(0.03,4500,"LM", col = "red", cex = .8) #' #' values.t <- CW2pHMi(values, delta = 40) #' lines(values[1:length(values.t[,1]),1],CW2pHMi(values, delta = 40)[,2], #' col = "black", lwd = 1.3) #' text(0.005,3000,"HM", cex =.8) #' #' values.t <- CW2pPMi(values, P = 1/10) #' lines(values[1:length(values.t[,1]),1], CW2pPMi(values, P = 1/10)[,2], #' col = "blue", lwd = 1.3) #' text(0.5,6500,"PM", col = "blue", cex = .8) #' #' @md #' @export CW2pLMi<- function( values, P ){ # (0) Integrity checks ------------------------------------------------------- ##(1) data.frame or RLum.Data.Curve object? if(is(values, "data.frame") == FALSE & is(values, "RLum.Data.Curve") == FALSE){ stop("[CW2pLMi()] 'values' object has to be of type 'data.frame' or 'RLum.Data.Curve'!", call. = FALSE) } ##(2) if the input object is an 'RLum.Data.Curve' object check for allowed curves if(is(values, "RLum.Data.Curve") == TRUE){ if(!grepl("OSL", values@recordType) & !grepl("IRSL", values@recordType)){ stop(paste("[CW2pLMi()] recordType ",values@recordType, " is not allowed for the transformation!", sep=""), call. = FALSE) }else{ temp.values <- as(values, "data.frame") } }else{ temp.values <- values } # (1) Transform values ------------------------------------------------------------------------ ##(a) log transformation of the CW-OSL count values CW_OSL.log<-log(temp.values[,2]) ##(b) time transformation t >> t' t<-temp.values[,1] ##set P ##if no values for P is set selected a P value for a maximum of ##two extrapolation points if(missing(P)==TRUE){ i<-10 P<-1/i t.transformed<-0.5*1/P*t^2 while(length(t.transformed[t.transformed2){ P<-1/i t.transformed<-0.5*1/P*t^2 i<-i+10 }#end::while }else{ if(P==0){stop("[CW2pLMi] P has to be > 0!", call. = FALSE)} t.transformed<-0.5*1/P*t^2 } #endif # (2) Interpolation --------------------------------------------------------------------------- ##interpolate values, values beyond the range return NA values CW_OSL.interpolated<-approx(t,CW_OSL.log, xout=t.transformed, rule=1 ) ##combine t.transformed and CW_OSL.interpolated in a data.frame temp<-data.frame(x=t.transformed, y=unlist(CW_OSL.interpolated$y)) ##Problem: I rare cases the interpolation is not working properely and Inf or NaN values are returned ##Fetch row number of the invalid values invalid_values.id<-c(which(is.infinite(temp[,2]) | is.nan(temp[,2]))) ##interpolate between the lower and the upper value invalid_values.interpolated<-sapply(1:length(invalid_values.id), function(x) { mean(c(temp[invalid_values.id[x]-1,2],temp[invalid_values.id[x]+1,2])) } ) ##replace invalid values in data.frame with newly interpolated values if(length(invalid_values.id)>0){ temp[invalid_values.id,2]<-invalid_values.interpolated } # (3) Extrapolate first values of the curve --------------------------------------------------- ##(a) - find index of first rows which contain NA values (needed for extrapolation) temp.sel.id<-min(which(is.na(temp[,2])==FALSE)) ##(b) - fit linear function fit.lm<-lm(y ~ x,data.frame(x=t[1:2],y=CW_OSL.log[1:2])) ##select values to extrapolate and predict (extrapolate) values based on the fitted function x.i<-data.frame(x=temp[1:(min(temp.sel.id)-1),1]) y.i<-predict(fit.lm,x.i) ##replace NA values by extrapolated values temp[1:length(y.i),2]<-y.i ##set method values temp.method<-c(rep("extrapolation",length(y.i)),rep("interpolation",(length(temp[,2])-length(y.i)))) ##print a warning message for more than two extrapolation points if(length(y.i)>2){warning("t' is beyond the time resolution and more than two data points have been extrapolated!")} # (4) Convert, transform and combine values --------------------------------------------------- ##unlog CW-OSL count values, i.e. log(CW) >> CW CW_OSL<-exp(temp$y) ##transform CW-OSL values to pLM-OSL values pLM<-1/P*t*CW_OSL ##combine all values and exclude NA values temp.values <- data.frame(x=t,y.t=pLM,x.t=t.transformed, method=temp.method) temp.values <- na.exclude(temp.values) # (5) Return values --------------------------------------------------------------------------- ##returns the same data type as the input if(is(values, "data.frame") == TRUE){ values <- temp.values return(values) }else{ ##add old info elements to new info elements temp.info <- c(values@info, CW2pLMi.x.t = list(temp.values$x.t), CW2pLMi.method = list(temp.values$method)) newRLumDataCurves.CW2pLMi <- set_RLum( class = "RLum.Data.Curve", recordType = values@recordType, data = as.matrix(temp.values[,1:2]), info = temp.info) return(newRLumDataCurves.CW2pLMi) } } Luminescence/R/merge_RLum.R0000644000176200001440000000774613231137116015262 0ustar liggesusers#' General merge function for RLum S4 class objects #' #' Function calls object-specific merge functions for RLum S4 class objects. #' #' The function provides a generalised access point for merge specific #' [RLum-class] objects. Depending on the input object, the #' corresponding merge function will be selected. Allowed arguments can be #' found in the documentations of each merge function. #' Empty list elements (`NULL`) are automatically removed from the input `list`. #' #' \tabular{lll}{ #' **object** \tab \tab **corresponding merge function** \cr #' [RLum.Data.Curve-class] \tab : \tab `merge_RLum.Data.Curve` \cr #' [RLum.Analysis-class] \tab : \tab `merge_RLum.Analysis` \cr #' [RLum.Results-class] \tab : \tab `merge_RLum.Results` #' } #' #' @param objects [list] of [RLum-class] (**required**): #' list of S4 object of class `RLum` #' #' @param ... further arguments that one might want to pass to the specific merge function #' #' @return Return is the same as input objects as provided in the list. #' #' @note So far not for every `RLum` object a merging function exists. #' #' @section Function version: 0.1.2 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Data.Curve-class], [RLum.Data.Image-class], #' [RLum.Data.Spectrum-class], [RLum.Analysis-class], [RLum.Results-class] #' #' #' @keywords utilities #' #' @examples #' #' #' ##Example based using data and from the calc_CentralDose() function #' #' ##load example data #' data(ExampleData.DeValues, envir = environment()) #' #' ##apply the central dose model 1st time #' temp1 <- calc_CentralDose(ExampleData.DeValues$CA1) #' #' ##apply the central dose model 2nd time #' temp2 <- calc_CentralDose(ExampleData.DeValues$CA1) #' #' ##merge the results and store them in a new object #' temp.merged <- get_RLum(merge_RLum(objects = list(temp1, temp2))) #' #' #' @md #' @export merge_RLum<- function( objects, ... ){ # Integrity check ---------------------------------------------------------- if(!is.list(objects)){ stop("[merge_RLum()] argument 'objects' needs to be of type list!") } ##we are friendly and remove all empty list elements, this helps a lot if we place things ##we DO NOT provide a warning as this lower the computation speed in particular cases. objects <- objects[!sapply(objects, is.null)] ##if list is empty afterwards we do nothing if(length(objects) != 0) { ##check if objects are of class RLum temp.class.test <- unique(sapply(1:length(objects), function(x) { if (!is(objects[[x]], "RLum")) { temp.text <- paste( "[merge_RLum()]: At least element", x, "is not of class 'RLum' or a derivative class!" ) stop(temp.text, call. = FALSE) } ##provide class of objects ... so far they should be similar is(objects[[x]])[1] })) ##check if objects are consitent if (length(temp.class.test) > 1) { ##This is not valid for RLum.Analysis objects if (!"RLum.Analysis" %in% temp.class.test) { stop("[merge_RLum()] So far only similar input objects in the list are supported!") } } ##grep object class objects.class <- ifelse("RLum.Analysis" %in% temp.class.test, "RLum.Analysis", temp.class.test) ##select which merge function should be used switch ( objects.class, RLum.Data.Image = stop( "[merge_RLum()] Sorry, merging of 'RLum.Data.Image' objects is currently not supported!" ), RLum.Data.Spectrum = stop( "[merge_RLum()] Sorry, merging of 'RLum.Data.Spectrum' objects is currently not supported!" ), RLum.Data.Curve = merge_RLum.Data.Curve(objects, ...), RLum.Analysis = merge_RLum.Analysis(objects, ...), RLum.Results = merge_RLum.Results(objects, ...) ) }else{ warning("[merge_RLum()] Nothing was merged as the object list was found to be empty!") return(NULL) } } Luminescence/R/calc_gSGC.R0000644000176200001440000003054113231137116014756 0ustar liggesusers#' Calculate De value based on the gSGC by Li et al., 2015 #' #' Function returns De value and De value error using the global standardised growth #' curve (gSGC) assumption proposed by Li et al., 2015 for OSL dating of sedimentary quartz #' #' The error of the De value is determined using a Monte Carlo simulation approach. #' Solving of the equation is realised using [uniroot]. #' Large values for `n.MC` will significantly increase the computation time. #' #' #' @param data [data.frame] (**required**): #' input data of providing the following columns: *'LnTn', 'LnTn.error', Lr1Tr1', 'Lr1Tr1.error', 'Dr1'* #' **Note:** column names are not required. The function expect the input data in the given order #' #' @param gSGC.type [character] (*with default*): #' define the function parameters that #' should be used for the iteration procedure: Li et al., 2015 (Table 2) #' presented function parameters for two dose ranges: `"0-450"` and `"0-250"` #' #' @param gSGC.parameters [list] (*optional*): #' option to provide own function parameters used for fitting as named list. #' Nomenclature follows Li et al., 2015, i.e. `list(A,A.error,D0,D0.error,c,c.error,Y0,Y0.error,range)`, #' range requires a vector for the range the function is considered as valid, e.g. `range = c(0,250)`\cr #' Using this option overwrites the default parameter list of the gSGC, meaning the argument #' `gSGC.type` will be without effect #' #' @param n.MC [integer] (*with default*): #' number of Monte Carlo simulation runs for error estimation, see details. #' #' @param verbose [logical]: #' enable or disable terminal output #' #' @param plot [logical]: #' enable or disable graphical feedback as plot #' #' @param ... parameters will be passed to the plot output #' #' @return Returns an S4 object of type [RLum.Results-class]. #' #' **`@data`**\cr #' `$ De.value` ([data.frame]) \cr #' `.. $ De` \cr #' `.. $ De.error` \cr #' `.. $ Eta` \cr #' `$ De.MC` ([list]) contains the matricies from the error estimation.\cr #' `$ uniroot` ([list]) contains the uniroot outputs of the De estimations\cr #' #' **`@info`**\cr #' `$ call`` ([call]) the original function call #' #' #' @section Function version: 0.1.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montagine (France) #' #' @seealso [RLum.Results-class], [get_RLum], [uniroot] #' #' @references #' Li, B., Roberts, R.G., Jacobs, Z., Li, S.-H., 2015. Potential of establishing #' a 'global standardised growth curve' (gSGC) for optical dating of quartz from sediments. #' Quaternary Geochronology 27, 94-104. doi:10.1016/j.quageo.2015.02.011 #' #' @keywords datagen #' #' @examples #' #' results <- calc_gSGC(data = data.frame( #' LnTn = 2.361, LnTn.error = 0.087, #' Lr1Tr1 = 2.744, Lr1Tr1.error = 0.091, #' Dr1 = 34.4)) #' #' get_RLum(results, data.object = "De") #' #' @md #' @export calc_gSGC<- function( data, gSGC.type = "0-250", gSGC.parameters, n.MC = 100, verbose = TRUE, plot = TRUE, ... ){ ##============================================================================## ##CHECK INPUT DATA ##============================================================================## if(!is(data, "data.frame")){stop("'data' needs to be of type data.frame.")} if(!is(gSGC.type, "character")){stop("'gSGC.type' needs to be of type character.")} ##check length of input data if(ncol(data) != 5){stop("Structure of 'data' does not fit the expectations.")} ##rename columns for consistency reasons colnames(data) <- c('LnTn', 'LnTn.error', 'Lr1Tr1', 'Lr1Tr1.error', 'Dr1') ##============================================================================## ##DEFINE FUNCTION ##============================================================================## ##define function, nomenclature according to publication that should be solved f <- function(x,A,D0,c,Y0,Dr1,Lr1Tr1,LnTn) { (((A * (1 - exp( - Dr1 / D0))) + c * Dr1 + Y0)/Lr1Tr1) - (((A * (1 - exp( - x/D0))) + c * x + Y0)/LnTn) } ##set general parameters if (!missing(gSGC.parameters)) { A <- gSGC.parameters$A A.error <- gSGC.parameters$A.error D0 <- gSGC.parameters$D0 D0.error <- gSGC.parameters$D0.error c <- gSGC.parameters$c c.error <- gSGC.parameters$c.error Y0 <- gSGC.parameters$Y0 Y0.error <- gSGC.parameters$Y0.error range <- gSGC.parameters$range }else{ if (gSGC.type == "0-450") { A <- 0.723 A.error <- 0.014 D0 <- 65.1 D0.error <- 0.9 c <- 0.001784 c.error <- 0.000016 Y0 <- 0.009159 Y0.error <- 0.004795 range <- c(0.1,250) }else if (gSGC.type == "0-250") { A <- 0.787 A.error <- 0.051 D0 <- 73.9 D0.error <- 2.2 c <- 0.001539 c.error <- 0.000068 Y0 <- 0.01791 Y0.error <- 0.00490 range <- c(0.1,250) }else{ stop("Unknown input for 'gSGC.type'") } } ##Define size of output objects output.data <- data.table::data.table( DE = numeric(length = nrow(data)), DE.ERROR = numeric(length = nrow(data)), ETA = numeric(length = nrow(data)) ) ##set list for De.MC output.De.MC <- vector("list", nrow(data)) ##set list for uniroot output.uniroot <- vector("list", nrow(data)) ##============================================================================## ##CALCULATION ##============================================================================## for(i in 1:nrow(data)){ Lr1Tr1 <-data[i,"Lr1Tr1"] Lr1Tr1.error <- data[i,"Lr1Tr1.error"] Dr1 <- data[i,"Dr1"] Dr1.error <- data[i,"Dr1.error"] LnTn <- data[i,"LnTn"] LnTn.error <- data[i,"LnTn.error"] ##calculate mean value temp <- try(uniroot( f, interval = c(0.1,450), tol = 0.001, A = A, D0 = D0, c = c, Y0 = Y0, Dr1 = Dr1, Lr1Tr1 = Lr1Tr1, LnTn = LnTn, extendInt = 'yes', check.conv = TRUE, maxiter = 1000 ), silent = TRUE) if(!inherits(temp, "try-error")){ ##get De De <- temp$root ##calculate Eta, which is the normalisation factor Eta <- ((A * (1 - exp( - Dr1 / D0))) + c * Dr1 + Y0)/Lr1Tr1 ##--------------------------------------------------------------------------## ##Monte Carlo simulation for error estimation ##set matrix temp.MC.matrix <- matrix(nrow = n.MC, ncol = 8) ##fill matrix temp.MC.matrix[,1:6] <- matrix(rnorm( n.MC * 6, mean = c(LnTn, Lr1Tr1, A, D0, c, Y0), sd = c(LnTn.error, Lr1Tr1.error, A.error, D0.error, c.error, Y0.error) ), ncol = 6, byrow = TRUE) ##run uniroot to get the De temp.MC.matrix[,7] <- vapply(X = 1:n.MC, FUN = function(x){ uniroot(f, interval = c(0.1,450), tol = 0.001, A = temp.MC.matrix[x,3], D0 = temp.MC.matrix[x,4], c = temp.MC.matrix[x,5], Y0 = temp.MC.matrix[x,6], Dr1 = Dr1, Lr1Tr1 =temp.MC.matrix[x,2], LnTn = temp.MC.matrix[x,1], check.conv = TRUE, extendInt = 'yes', maxiter = 1000 )$root }, FUN.VALUE = vector(mode = "numeric", length = 1)) ##calculate also the normalisation factor temp.MC.matrix[,8] <- (temp.MC.matrix[,3] * (1 - exp( - Dr1 / temp.MC.matrix[,4])) + temp.MC.matrix[,5] * Dr1 + temp.MC.matrix[,6])/temp.MC.matrix[,2] ##re-name matrix colnames(temp.MC.matrix) <- c("LnTn","Lr1Tr1","A","D0","c","Y0","De","Eta") ##get De error as SD De.error <- sd(temp.MC.matrix[,7]) }else{ warning("No solution was found!") De <- NA Eta <- NA De.error <- NA ##set matrix temp.MC.matrix <- matrix(nrow = n.MC, ncol = 8) ##fill matrix temp.MC.matrix[,1:6] <- matrix(rnorm( n.MC * 6, mean = c(LnTn, Lr1Tr1, A, D0, c, Y0), sd = c(LnTn.error, Lr1Tr1.error, A.error, D0.error, c.error, Y0.error) ), ncol = 6, byrow = TRUE) } ##============================================================================## ##PLOT OUTPUT ##============================================================================## if (plot) { ##set plot settings plot.settings <- list( main = "gSGC and resulting De", xlab = "Dose [a.u.]", ylab = expression(paste("Re-norm. ", L[x]/T[x])), xlim = NULL, ylim = NULL, lwd = 1, lty = 1, pch = 21, col = "red", grid = expression(nx = 10, ny = 10), mtext = "" ) plot.settings <- modifyList(plot.settings, list(...)) ##graphical feedback x <- NA curve( A * (1 - exp(-x / D0)) + c * x + Y0, from = 0, to = 500, xlab = plot.settings$xlab, ylab = plot.settings$ylab, main = plot.settings$main, xlim = plot.settings$xlim, ylim = plot.settings$ylim, lwd = plot.settings$lwd, lty = plot.settings$lty ) mtext(side = 3, plot.settings$mtext) if(!is.null(plot.settings$grid)){ graphics::grid(eval(plot.settings$grid)) } if(!inherits(temp, "try-error")){ if(temp$root < 450 & temp$root > 0){ points(temp$root,Eta*LnTn, col = plot.settings$col, pch = plot.settings$pch) segments(De - De.error,Eta * LnTn, De + De.error,Eta * LnTn) hist <- hist( temp.MC.matrix[, 7], freq = FALSE, add = TRUE, col = rgb(0, 0, 0, 0.2), border = rgb(0, 0, 0, 0.5) ) lines(hist$mids,hist$density) }else{ if(temp$root < 450){ shape::Arrows( x0 = 450, y0 = par()$usr[4] - 0.2, x1 = 500, y1 = par()$usr[4] - 0.2, arr.type = "triangle", col = "red" ) }else{ shape::Arrows( x0 = 50, y0 = par()$usr[4] - 0.2, x1 = 0, y1 = par()$usr[4] - 0.2, arr.type = "triangle", col = "red" ) } mtext(side = 1, text = "Out of bounds!", col = "red") } }else{ mtext(side = 1, text = "No solution found!", col = "red") } } ##============================================================================## ##OUTPUT VISUALISATION ##============================================================================## if (verbose) { cat("\n[calc_gSGC()]") cat("\n\t Corresponding De based on the gSGC\n") cat(paste0("\n\t"," Ln/Tn:\t\t ",LnTn," \u00B1 ", LnTn.error,"\n")) cat(paste0("\t"," Lr1/Tr1:\t ",Lr1Tr1," \u00B1 ", Lr1Tr1.error,"\n")) cat(paste0("\t"," Dr1:\t\t ",Dr1,"\n")) cat(paste0("\t"," f(D):\t\t ",A," * (1 - exp(-D /",D0,")) + c * D + ",Y0,"\n")) cat(paste0("\t"," n.MC:\t\t ",n.MC,"\n")) cat(paste0("\t ------------------------------ \n")) cat(paste0("\t De:\t\t",round(De,digits = 2)," \u00B1 ",round(De.error,digits = 2),"\n")) cat(paste0("\t ------------------------------ \n")) } ##============================================================================## ##CREATE OUTPUT OBJECTS ##============================================================================## ##needed for data.table temp.De <- De temp.De.error <- De.error temp.Eta <- Eta ##replace values in the data.table with values output.data[i, `:=` (DE = temp.De, DE.ERROR = temp.De.error, ETA = temp.Eta)] rm(list = c('temp.De', 'temp.De.error', 'temp.Eta')) ##matrix - to prevent memory overload limit output if(n.MC * nrow(data) > 1e6){ if(i == 1){ output.De.MC[[i]] <- temp.MC.matrix }else{ output.De.MC[[i]] <- NA } warning("Only the first MC matrix is returned to prevent memory overload!") }else{ output.De.MC[[i]] <- temp.MC.matrix } output.uniroot[[i]] <- temp }##end for loop ##============================================================================## ##OUTPUT RLUM ##============================================================================## temp.RLum.Results <- set_RLum( class = "RLum.Results", data = list( De = as.data.frame(output.data), De.MC = output.De.MC, uniroot = output.uniroot ), info = list( call = sys.call()) ) return(temp.RLum.Results) } Luminescence/R/RLum.Data-class.R0000644000176200001440000000117613231137116016045 0ustar liggesusers#' Class `"RLum.Data"` #' #' Generalized virtual data class for luminescence data. #' #' #' @name RLum.Data-class #' #' @docType class #' #' @note Just a virtual class. #' #' @section Objects from the Class: #' A virtual Class: No objects can be created from it. #' #' @section Class version: 0.2.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum-class], [RLum.Data.Curve-class], #' [RLum.Data.Spectrum-class] #' #' @keywords classes internal #' #' @examples #' #' showClass("RLum.Data") #' #' @md #' @export setClass("RLum.Data", contains = c("RLum", "VIRTUAL") ) Luminescence/R/get_rightAnswer.R0000644000176200001440000000063313231137116016344 0ustar liggesusers#' Function to get the right answer #' #' This function returns just the right answer #' #' @param ... you can pass an infinite number of further arguments #' #' @return Returns the right answer #' #' @section Function version: 0.1.0 #' #' @author inspired by R.G. #' #' @examples #' #' ## you really want to know? #' get_rightAnswer() #' #' @md #' @export get_rightAnswer <- function(...) { return(46) } Luminescence/R/analyse_portableOSL.R0000644000176200001440000001652713231137116017123 0ustar liggesusers#' Analyse portable CW-OSL measurements #' #' The function analyses CW-OSL curve data produced by a SUERC portable OSL reader and #' produces a combined plot of OSL/IRSL signal intensities, OSL/IRSL depletion ratios #' and the IRSL/OSL ratio. #' #' This function only works with `RLum.Analysis` objects produced by [read_PSL2R]. #' It further assumes (or rather requires) an equal amount of OSL and IRSL curves that #' are pairwise combined for calculating the IRSL/OSL ratio. For calculating the depletion ratios #' the cumulative signal of the last n channels (same number of channels as specified #' by `signal.integral`) is divided by cumulative signal of the first n channels (`signal.integral`). #' #' @param object [RLum.Analysis-class] (**required**): #' `RLum.Analysis` object produced by [read_PSL2R]. #' #' @param signal.integral [vector] (**required**): #' A vector of two values specifying the lower and upper channel used to #' calculate the OSL/IRSL signal. Can be provided in form of `c(1, 5)` or `1:5`. #' #' #' @param invert [logical] (*with default*): #' `TRUE` to calculate and plot the data in reverse order. #' #' @param normalise [logical] (*with default*): #' `TRUE` to normalise the OSL/IRSL signals by the mean of all corresponding #' data curves. #' #' @param plot [logical] (*with default*): #' enable/disable plot output #' #' @param ... currently not used. #' #' @return #' Returns an S4 [RLum.Results-class] object. #' #' @seealso [RLum.Analysis-class], [RLum.Data.Curve-class] #' #' @author Christoph Burow, University of Cologne (Germany) #' #' @section Function version: 0.0.3 #' #' @keywords datagen plot #' #' @examples #' #' # (1) load example data set #' data("ExampleData.portableOSL", envir = environment()) #' #' # (2) merge and plot all RLum.Analysis objects #' merged <- merge_RLum(ExampleData.portableOSL) #' plot_RLum(merged, combine = TRUE) #' merged #' #' # (3) analyse and plot #' results <- analyse_portableOSL(merged, signal.integral = 1:5, invert = FALSE, normalise = TRUE) #' get_RLum(results) #' #' #' #' @md #' @export analyse_portableOSL <- function(object, signal.integral, invert = FALSE, normalise = FALSE, plot = TRUE, ...) { ## INPUT VERIFICATION ---- if (!inherits(object, "RLum.Analysis")) stop("Only objects of class 'RLum.Analysis' are allowed.", call. = FALSE) if (!all(sapply(object, class) == "RLum.Data.Curve")) stop("The 'RLum.Analysis' object must only contain objects of class 'RLum.Data.Curve'.", call. = FALSE) if (!all(sapply(object, function(x) x@originator) == "read_PSL2R")) stop("Only objects originating from 'read_PSL2R()' are allowed.", call. = FALSE) if (missing(signal.integral)) { signal.integral <- c(1, 1) warning("No value for 'signal.integral' provided. Only the first data point of each curve was used!", call. = FALSE) } ## CALCULATIONS ---- # OSL OSL <- get_RLum(object, recordType = "OSL") OSL <- do.call(rbind, lapply(OSL, function(x) { posl_get_signal(x, signal.integral) })) # IRSL IRSL <- get_RLum(object, recordType = "IRSL") IRSL <- do.call(rbind, lapply(IRSL, function(x) { posl_get_signal(x, signal.integral) })) ## NORMALISE ---- if (normalise) { OSL <- posl_normalise(OSL) IRSL <- posl_normalise(IRSL) } ## INVERT ---- if (invert) { OSL <- posl_invert(OSL) IRSL <- posl_invert(IRSL) } # OSL/IRSL Ratio RATIO <- IRSL$sum_signal / OSL$sum_signal ## PLOTTING ---- if (plot) { par.old.full <- par(no.readonly = TRUE) on.exit(par(par.old.full)) # default: par(mar = c(5, 4, 4, 2) + 0.1) // bottom, left, top, right par(mfrow = c(1, 6)) par(mar = c(5, 4, 4, 1) + 0.1) frame() par(mar = c(5, 0, 4, 1) + 0.1) plot( OSL$sum_signal, 1:nrow(OSL), type = "b", pch = 16, col = "blue", xlim = range(pretty(OSL$sum_signal)), xlab = "BSL", ylab = "Index", bty = "n", yaxt = "n" ) axis(2, line = 3, at = 1:nrow(OSL)) axis(3) mtext("Index", side = 2, line = 6) plot( IRSL$sum_signal, 1:nrow(IRSL), type = "b", pch = 16, col = "red", xlim = range(pretty(IRSL$sum_signal)), xlab = "IRSL", ylab = "", bty = "n", yaxt = "n" ) axis(3) plot( OSL$sum_signal_depletion, 1:nrow(OSL), type = "b", pch = 1, col = "blue", xlim = range(pretty(OSL$sum_signal_depletion)), xlab = "BSL depl.", ylab = "", bty = "n", yaxt = "n", lty = 2 ) axis(3) plot( IRSL$sum_signal_depletion, 1:nrow(IRSL), type = "b", pch = 1, col = "red", xlim = range(pretty(IRSL$sum_signal_depletion)), xlab = "IRSL depl.", ylab = "", bty = "n", yaxt = "n", lty = 2 ) axis(3) plot( RATIO, 1:length(RATIO), type = "b", pch = 16, col = "black", xlim = range(pretty(RATIO)), xlab = "IRSL/BSL", ylab = "", bty = "n", yaxt = "n" ) axis(3) } ## RETURN VALUE ---- call<- sys.call() args <- as.list(call)[2:length(call)] summary <- data.frame(BSL = OSL$sum_signal, BSL_error = OSL$sum_signal_err, IRSL = IRSL$sum_signal, IRSL_error = IRSL$sum_signal_err, BSL_depletion = OSL$sum_signal_depletion, IRSL_depletion = IRSL$sum_signal_depletion, IRSL_BSL_RATIO = RATIO) newRLumResults <- set_RLum( class = "RLum.Results", data = list( summary=summary, data = object, args=args ), info = list(call = call)) return(newRLumResults) } ################################################################################ ## HELPER FUNCTIONS ## ################################################################################ ## This extracts the relevant curve data information of the RLum.Data.Curve ## objects posl_get_signal <- function(x, signal.integral) { raw_signal <- get_RLum(x)[,2] sigint <- range(signal.integral) if (sigint[2] > length(raw_signal)) { sigint[2] <- length(raw_signal) warning("'signal.integral' (", paste(range(signal.integral), collapse = ", "),") ", "exceeded the number of available data points (n = ", length(raw_signal),") and ", "has been automatically reduced to the maximum number.", call. = FALSE) } sum_signal <- sum(raw_signal[sigint[1]:sigint[2]]) sum_signal_err <- sqrt(sum(x@info$raw_data$counts_per_cycle_error[sigint[1]:sigint[2]]^2)) sum_signal_depletion <- sum(raw_signal[(length(raw_signal)-length(sigint[1]:sigint[2])):length(raw_signal)]) / sum_signal return(data.frame(sum_signal, sum_signal_err, sum_signal_depletion)) } ## This function normalises the data curve by the mean signal posl_normalise <- function(x) { rel.error <- x$sum_signal_err / x$sum_signal x$sum_signal <- x$sum_signal / mean(x$sum_signal) x$sum_signal_err <- x$sum_signal * rel.error x$sum_signal_depletion <- x$sum_signal_depletion / mean(x$sum_signal_depletion) return(x) } ## This function invertes the data.frame (useful when the sample are in inverse ## stratigraphic order) posl_invert <- function(x) { x <- x[nrow(x):1, ] } Luminescence/R/structure_RLum.R0000644000176200001440000000350113234106714016207 0ustar liggesusers#' General structure function for RLum S4 class objects #' #' Function calls object-specific get functions for RLum S4 class objects. #' #' The function provides a generalised access point for specific #' [RLum-class] objects.\cr #' Depending on the input object, the corresponding structure function will #' be selected. Allowed arguments can be found in the documentations of the #' corresponding [RLum-class] class. #' #' @param object [RLum-class] (**required**): #' S4 object of class `RLum` #' #' @param ... further arguments that one might want to pass to the specific #' structure method #' #' @return #' Returns a [data.frame] with structure of the object. #' #' @section Function version: 0.2.0 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @seealso [RLum.Data.Curve-class], [RLum.Data.Image-class], #' [RLum.Data.Spectrum-class], [RLum.Analysis-class], [RLum.Results-class] #' #' @keywords utilities #' #' @examples #' #' ##load example data #' data(ExampleData.XSYG, envir = environment()) #' #' ##show structure #' structure_RLum(OSL.SARMeasurement$Sequence.Object) #' #' @md #' @export setGeneric("structure_RLum", function(object, ...) { standardGeneric("structure_RLum") }) # Method for structure_RLum method for RLum objects in a list for a list of objects ------------- #' @describeIn structure_RLum #' Returns a list of [RLum-class] objects that had been passed to [structure_RLum] #' #' #' @md #' @export setMethod("structure_RLum", signature = "list", function(object, ...) { ##apply method in the objects and return the same lapply(object, function(x) { if (inherits(x, "RLum")) { return(structure_RLum(x, ...)) } else{ return(x) } }) }) Luminescence/R/calc_AverageDose.R0000644000176200001440000003720413231137116016363 0ustar liggesusers#'Calculate the Average Dose and the dose rate dispersion #' #'This functions calculates the Average Dose and their extrinsic dispersion and estimates #'the standard errors by bootstrapping based on the Average Dose Model by Guerin et al., 2017 #' #' **`sigma_m`**\cr #' #'The program requires the input of a known value of sigma_m, #'which corresponds to the intrinsic overdispersion, as determined #'by a dose recovery experiment. Then the dispersion in doses (sigma_d) #'will be that over and above sigma_m (and individual uncertainties sigma_wi). #' #' @param data [RLum.Results-class] or [data.frame] (**required**): #' for [data.frame]: two columns with De `(data[,1])` and De error `(values[,2])` #' #' @param sigma_m [numeric] (**required**): #' the overdispersion resulting from a dose recovery #' experiment, i.e. when all grains have received the same dose. Indeed in such a case, any #' overdispersion (i.e. dispersion on top of analytical uncertainties) is, by definition, an #' unrecognised measurement uncertainty. #' #' @param Nb_BE [integer] (*with default*): #' sample size used for the bootstrapping #' #' @param na.rm [logical] (*with default*): #' exclude NA values from the data set prior to any further operation. #' #' @param plot [logical] (*with default*): #' enables/disables plot output #' #' @param verbose [logical] (*with default*): #' enables/disables terminal output #' #' @param ... further arguments that can be passed to [graphics::hist]. As three plots #' are returned all arguments need to be provided as [list], #' e.g., `main = list("Plot 1", "Plot 2", "Plot 3")`. #' Note: not all arguments of `hist` are #' supported, but the output of `hist` is returned and can be used of own plots. \cr #' #' Further supported arguments: `mtext` ([character]), `rug` (`TRUE/FALSE`). #' #' @section Function version: 0.1.4 #' #' @author Claire Christophe, IRAMAT-CRP2A, Universite de Nantes (France), #' Anne Philippe, Universite de Nantes, (France), #' Guillaume Guerin, IRAMAT-CRP2A, Universite Bordeaux Montaigne, (France), #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne, (France) #' #' @seealso [read.table], [graphics::hist] #' #' @return The function returns numerical output and an (*optional*) plot. #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]` \cr #' -----------------------------------\cr #' **`RLum.Results`**-object\cr #' #' **slot:** **`@data`** \cr #' #' `[.. $summary : data.frame]`\cr #' #' \tabular{lll}{ #' **Column** \tab **Type** \tab **Description**\cr #' AVERAGE_DOSE \tab [numeric] \tab the obtained averge dose\cr #' AVERAGE_DOSE.SE \tab [numeric] \tab the average dose error \cr #' SIGMA_D \tab [numeric]\tab sigma \cr #' SIGMA_D.SE \tab [numeric]\tab standard error of the sigma \cr #' IC_AVERAGE_DOSE.LEVEL \tab [character]\tab confidence level average dose\cr #' IC_AVERAGE_DOSE.LOWER \tab [character]\tab lower quantile of average dose \cr #' IC_AVERAGE_DOSE.UPPER \tab [character]\tab upper quantile of average dose\cr #' IC_SIGMA_D.LEVEL \tab [integer]\tab confidence level sigma\cr #' IC_SIGMA_D.LOWER \tab [character]\tab lower sigma quantile\cr #' IC_SIGMA_D.UPPER \tab [character]\tab upper sigma quantile\cr #' L_MAX \tab [character]\tab maximum likelihood value #' } #' #' `[.. $dstar : matrix]` \cr #' #' Matrix with bootstrap values\cr #' #' `[.. $hist : list]`\cr #' #' Object as produced by the function histogram #' #' ------------------------\cr #' `[ PLOT OUTPUT ]`\cr #' ------------------------\cr #' #' The function returns two different plot panels. #' #' (1) An abanico plot with the dose values #' #' (2) A histogram panel comprising 3 histograms with the equivalent dose and the bootstrapped average #' dose and the sigma values. #' #' @references #' Guerin, G., Christophe, C., Philippe, A., Murray, A.S., Thomsen, K.J., Tribolo, C., Urbanova, P., #' Jain, M., Guibert, P., Mercier, N., Kreutzer, S., Lahaye, C., 2017. Absorbed dose, equivalent dose, #' measured dose rates, and implications for OSL age estimates: Introducing the Average Dose Model. #' Quaternary Geochronology 1-32. doi:10.1016/j.quageo.2017.04.002 #' #' **Further reading**\cr #' #' Efron, B., Tibshirani, R., 1986. Bootstrap Methods for Standard Errors, Confidence Intervals, #' and Other Measures of Statistical Accuracy. Statistical Science 1, 54-75. #' #' @note This function has beta status! #' #' @keywords datagen #' #' @examples #' #'##Example 01 using package example data #'##load example data #'data(ExampleData.DeValues, envir = environment()) #' #'##calculate Average dose #'##(use only the first 56 values here) #'AD <- calc_AverageDose(ExampleData.DeValues$CA1[1:56,], sigma_m = 0.1) #' #'##plot De and set Average dose as central value #'plot_AbanicoPlot( #' data = ExampleData.DeValues$CA1[1:56,], #' z.0 = AD$summary$AVERAGE_DOSE) #' #' @md #' @export calc_AverageDose <- function( data, sigma_m = NULL, Nb_BE = 500, na.rm = TRUE, plot = TRUE, verbose = TRUE, ... ){ # Define internal functions ------------------------------------------------------------------ # function which compute mle's for data (yu,su) .mle <- function(yu , su, wu.start, sigma_d.start, delta.start){ ##set start parameters, otherwise the function will try to get them ##from the parent environment, which is not wanted ... delta.temp <- 0 sigma_d.temp <- 0 sigma_d <- sigma_d.start delta <- delta.start wu <- wu.start j <- 0 iteration_limit <- 10000 ##loop until convergence or the iteration limit is reached while(j < iteration_limit) { ##code by Claire; in the 2nd and 3rd line delta and sigma_d are replaced by delta.temp and ##sigma_d.temp; otherwise the iteration and its test for convergence will not work delta.temp <- exp( sum(wu*(yu+(0.5*(sigma_d^2)))) / sum(wu) ) sigma_d.temp <- sigma_d*sum( (wu^2) * (yu-log(delta.temp)+0.5*sigma_d^2)^2) / (sum( wu*(1+yu-log(delta.temp)+0.5*sigma_d^2))) wu <- 1/(sigma_d.temp^2 + su^2) ##break loop if convergence is reached ... if not update values if(is.infinite(delta.temp) | is.infinite(sigma_d.temp)){ break() }else if ( ##compare values ... if they are equal we have convergence all( c(round(c(delta, sigma_d), 4)) == c(round(c(delta.temp, sigma_d.temp), 4)) ) ) { break() } else{ ##update input values delta <- delta.temp sigma_d <- sigma_d.temp j <- j + 1 } } ##if no convergence was reached stop entire function; no stop as this may happen during the ##bootstraping procedure if(j == iteration_limit){ warning("[calc_AverageDoseModel()] .mle() no convergence reached for the given limits. NA returned!") return(c(NA,NA)) }else if(is.infinite(delta.temp) | is.infinite(sigma_d.temp)){ warning("[calc_AverageDoseModel()] .mle() gaves Inf values. NA returned!") return(c(NA,NA)) }else{ return(c(round(c(delta, sigma_d),4))) } } .CredibleInterval <- function(a_chain, level = 0.95) { ## Aim : estimation of the shortest credible interval of the sample of parameter a # A level % credible interval is an interval that keeps N*(1-level) elements of the sample # The level % credible interval is the shortest of all those intervals. ## Parameters : # a_chain : the name of the values of the parameter a # level : the level of the credible interval expected ## Returns : the level and the endpoints sorted_sample <- sort(a_chain) N <- length(a_chain) OutSample <- N * (1 - level) I <- cbind(sorted_sample[1:(OutSample + 1)] , sorted_sample[(N - OutSample):N]) l <- I[, 2] - I[, 1] # length of intervals i <- which.min(l) # look for the shortest interval return(c( level = level, CredibleIntervalInf = I[i, 1], CredibleIntervalSup = I[i, 2] )) } ##//////////////////////////////////////////////////////////////////////////////////////////////// ##HERE THE MAIN FUNCTION STARTS ##//////////////////////////////////////////////////////////////////////////////////////////////// # Integrity checks ---------------------------------------------------------------------------- if(!is(data, "RLum.Results") & !is(data, "data.frame")){ stop("[calc_AverageDose()] input is neither of type 'RLum.Results' nor of type 'data.frame'!") }else { if(is(data, "RLum.Results")){ data <- get_RLum(data) } } if(is.null(sigma_m)){ stop("[calc_AverageDose()] 'sigma_m' is missing but required") } # Data preparation ----------------------------------------------------------------------------- ##problem: the entire code refers to column names the user may not provide... ## >> to avoid changing the entire code, the data will shape to a format that ## >> fits to the code ##check for number of columns if(ncol(data)<2){ try(stop("[calc_AverageDose()] data set contains < 2 columns! NULL returned!", call. = FALSE)) return(NULL) } ##used only the first two colums if(ncol(data)>2){ data <- data[,1:2] warning("[calc_AverageDose()] number of columns in data set > 2. Only the first two columns were used.", call. = FALSE) } ##exclude NA values if(any(is.na(data))){ data <- na.exclude(data) warning("[calc_AverageDose()] NA values in data set detected. Rows with NA values removed!", call. = FALSE) } ##check data set if(nrow(data) == 0){ try(stop("[calc_AverageDose()] data set contains 0 rows! NULL returned!", call. = FALSE)) return(NULL) } ##data becomes to dat (thus, make the code compatible with the code by Claire and Anne) dat <- data ##preset column names, as the code refers to it colnames(dat) <- c("cd", "se") # Pre calculation ----------------------------------------------------------------------------- ##calculate yu = log(CD) and su = se(logCD) yu <- log(dat$cd) su <- sqrt((dat$se / dat$cd) ^ 2 + sigma_m ^ 2) # calculate starting values and weights sigma_d <- sd(dat$cd) / mean(dat$cd) wu <- 1 / (sigma_d ^ 2 + su ^ 2) delta <- mean(dat$cd) n <- length(yu) ##terminal output if (verbose) { cat("\n[calc_AverageDose()]") cat("\n\n>> Initialisation <<") cat(paste("\nn:\t\t", n)) cat(paste("\ndelta:\t\t", delta)) cat(paste("\nsigma_m:\t", sigma_m)) cat(paste("\nsigma_d:\t", sigma_d)) } # mle's computation dhat <- .mle(yu, su, wu.start = wu, sigma_d.start = sigma_d, delta.start = delta) delta <- dhat[1] sigma_d <- dhat[2] wu <- 1 / (sigma_d ^ 2 + su ^ 2) # maximum log likelihood llik <- sum(-log(sqrt(2 * pi / wu)) - (wu / 2) * ((yu - log(delta) + 0.5 * (sigma_d ^ 2)) ^ 2)) ##terminal output if(verbose){ cat(paste("\n\n>> Calculation <<\n")) cat(paste("log likelihood:\t", round(llik, 4))) } # standard errors obtained by bootstrap, we refer to Efron B. and Tibshirani R. (1986) # est ce qu'il faut citer l'article ici ou tout simplement dans la publi ? n <- length(yu) ##calculate dstar ##set matrix for I I <- matrix(data = sample(x = 1:n, size = n * Nb_BE, replace = TRUE), ncol = Nb_BE) ##iterate over the matrix and produce dstar ##(this looks a little bit complicated, but is far more efficient) dstar <- t(vapply( X = 1:Nb_BE, FUN = function(x) { .mle(yu[I[, x]], su[I[, x]], sigma_d.start = sigma_d, delta.start = delta, wu.start = wu) }, FUN.VALUE = vector(mode = "numeric", length = 2) )) ##exclude NA values dstar <- na.exclude(dstar) ##calculate confidence intervalls IC_delta <- .CredibleInterval(dstar[,1],0.95) IC_sigma_d <- .CredibleInterval(dstar[,2],0.95) IC <- rbind(IC_delta, IC_sigma_d) # standard errors sedelta <- sqrt ((1/(Nb_BE-1))*sum((dstar[,1]-mean(dstar[,1]))^2)) sesigma_d <- sqrt ((1/(Nb_BE-1))*sum((dstar[,2]-mean(dstar[,2]))^2)) ##Terminal output if (verbose) { cat("\nconfidence intervals\n") cat("--------------------------------------------------\n") print(t(IC), print.gap = 6, digits = 4) cat("--------------------------------------------------\n") cat(paste("\n>> Results <<\n")) cat("----------------------------------------------------------\n") cat(paste( "Average dose:\t ", round(delta, 4), "\tse(Aver. dose):\t", round(sedelta, 4) )) if(sigma_d == 0){ cat(paste( "\nsigma_d:\t ", round(sigma_d, 4), "\t\tse(sigma_d):\t", round(sesigma_d, 4) )) }else{ cat(paste( "\nsigma_d:\t ", round(sigma_d, 4), "\tse(sigma_d):\t", round(sesigma_d, 4) )) } cat("\n----------------------------------------------------------\n") } ##compile final results data frame results_df <- data.frame( AVERAGE_DOSE = delta, AVERAGE_DOSE.SE = sedelta, SIGMA_D = sigma_d, SIGMA_D.SE = sesigma_d, IC_AVERAGE_DOSE.LEVEL = IC_delta[1], IC_AVERAGE_DOSE.LOWER = IC_delta[2], IC_AVERAGE_DOSE.UPPER = IC_delta[3], IC_SIGMA_D.LEVEL = IC_sigma_d[1], IC_SIGMA_D.LOWER = IC_sigma_d[2], IC_SIGMA_D.UPPER = IC_sigma_d[3], L_MAX = llik, row.names = NULL ) # Plotting ------------------------------------------------------------------------------------ ##the plotting (enable/disable) is controlled below, as with this ##we always get a histogram object ##set data list data_list <- list(dat$cd, dstar[,1], dstar[,2]) ##preset plot arguments plot_settings <- list( breaks = list("FD", "FD", "FD"), probability = list(FALSE, TRUE, TRUE), main = list( "Observed: Equivalent dose", "Bootstrapping: Average Dose", "Bootstrapping: Sigma_d"), xlab = list( "Equivalent dose [a.u.]", "Average dose [a.u.]", "Sigma_d"), axes = list(TRUE, TRUE, TRUE), col = NULL, border = NULL, density = NULL, freq = NULL, mtext = list( paste("n = ", length(data_list[[1]])), paste("n = ", length(data_list[[2]])), paste("n = ", length(data_list[[3]]))), rug = list(TRUE, TRUE, TRUE) ) ##modify this list by values the user provides ##expand all elements in the list ##problem: the user might provid only one item, then the code will break plot_settings.user <- lapply(list(...), function(x){ rep(x, length = 3) }) ##modify plot_settings <- modifyList(x = plot_settings.user, val = plot_settings) ##get change par setting and reset on exit par.default <- par()$mfrow on.exit(par(mfrow = par.default)) par(mfrow = c(1,3)) ##Produce plots ##(1) - histogram of the observed equivalent dose ##(2) - histogram of the bootstrapped De ##(3) - histogram of the bootstrapped sigma_d ##with lapply we get fetch also the return of hist, they user might want to use this later hist <- lapply(1:length(data_list), function(x){ temp <- suppressWarnings(hist( x = data_list[[x]], breaks = plot_settings$breaks[[x]], probability = plot_settings$probability[[x]], main = plot_settings$main[[x]], xlab = plot_settings$xlab[[x]], axes = plot_settings$axes[[x]], freq = plot_settings$freq[[x]], plot = plot, col = plot_settings$col[[x]], border = plot_settings$border[[x]], density = plot_settings$density[[x]] )) if (plot) { ##add rug if (plot_settings$rug[[x]]) { rug(data_list[[x]]) } ##plot mtext mtext(side = 3, text = plot_settings$mtext[[x]], cex = par()$cex) } return(temp) }) # Return -------------------------------------------------------------------------------------- set_RLum( class = "RLum.Results", data = list( summary = results_df, dstar = as.data.frame(dstar), hist = hist ), info = list(call = sys.call()) ) } Luminescence/R/report_RLum.R0000644000176200001440000006505213571743147015506 0ustar liggesusers#' @title Create a HTML-report for (RLum) objects #' #' @details This function creates a HTML-report for a given object, listing its complete #' structure and content. The object itself is saved as a serialised .Rds file. #' The report file serves both as a convenient way of browsing through objects with #' complex data structures as well as a mean of properly documenting and saving #' objects. #' #' The HTML report is created with [rmarkdown::render] and has the #' following structure: #' #' \tabular{ll}{ #' **Section** \tab **Description** \cr #' `Header` \tab A summary of general characteristics of the object \cr #' `Object content` \tab A comprehensive list of the complete structure and content of the provided object. \cr #' `Object structure` \tab Summary of the objects structure given as a table \cr #' `File` \tab Information on the saved RDS file \cr #' `Session Info` \tab Captured output from `sessionInfo()` \cr #' `Plots` \tab (*optional*) For `RLum-class` objects a variable number of plots \cr #' } #' #' The structure of the report can be controlled individually by providing one or more of the #' following arguments (all `logical`): #' #' \tabular{ll}{ #' **Argument** \tab **Description** \cr #' `header` \tab Hide or show general information on the object \cr #' `main` \tab Hide or show the object's content \cr #' `structure` \tab Hide or show object's structure \cr #' `rds` \tab Hide or show information on the saved RDS file \cr #' `session` \tab Hide or show the session info \cr #' `plot` \tab Hide or show the plots (depending on object) \cr #' } #' #' Note that these arguments have higher precedence than `compact`. #' #' Further options that can be provided via the `...` argument: #' #' \tabular{ll}{ #' **Argument** \tab **Description** \cr #' `short_table` \tab If `TRUE` only show the first and last 5 rows of lang tables. \cr #' `theme` \tab Specifies the Bootstrap #' theme to use for the report. Valid themes include "default", "cerulean", "journal", "flatly", #' "readable", "spacelab", "united", "cosmo", "lumen", "paper", "sandstone", "simplex", and "yeti". \cr #' `highlight` \tab Specifies the syntax highlighting #' style. Supported styles include "default", "tango", "pygments", "kate", "monochrome", #' "espresso", "zenburn", "haddock", and "textmate". \cr #' `css` \tab `TRUE` or `FALSE` to enable/disable custom CSS styling \cr #' } #' #' The following arguments can be used to customise the report via CSS (Cascading Style Sheets): #' #' \tabular{ll}{ #' **Argument** \tab **Description** \cr #' `font_family` \tab Define the font family of the HTML document (default: arial) \cr #' `headings_size` \tab Size of the `

` to `
` tags used to define HTML headings (default: 166%). \cr #' `content_color` \tab Color of the object's content (default: #a72925). \cr #' } #' #' Note that these arguments must all be of class [character] and follow standard CSS syntax. #' For exhaustive CSS styling you can provide a custom CSS file for argument `css.file`. #' CSS styling can be turned of using `css = FALSE`. #' #' @param object (**required**): #' The object to be reported on, preferably of any `RLum`-class. #' #' @param file [character] (*with default*): #' A character string naming the output file. If no filename is provided a #' temporary file is created. #' #' @param title [character] (*with default*): #' A character string specifying the title of the document. #' #' @param compact [logical] (*with default*): #' When `TRUE` the following report components are hidden: #' `@@.pid`, `@@.uid`, `'Object structure'`, `'Session Info'` #' and only the first and last 5 rows of long matrices and data frames are shown. #' See details. #' #' @param timestamp [logical] (*with default*): #' `TRUE` to add a timestamp to the filename (suffix). #' #' @param show_report [logical] (*with default*): If set to `TRUE` the function tries to display #' the report output in the local viewer, e.g., within *RStudio* after rendering. #' #' @param launch.browser [logical] (*with default*): #' `TRUE` to open the HTML file in the system's default web browser after #' it has been rendered. #' #' @param css.file [character] (*optional*): #' Path to a CSS file to change the default styling of the HTML document. #' #' @param quiet [logical] (*with default*): #' `TRUE` to supress printing of the pandoc command line. #' #' @param clean [logical] (*with default*): #' `TRUE` to clean intermediate files created during rendering. #' #' @param ... further arguments passed to or from other methods and to control #' the document's structure (see details). #' #' @section Function version: 0.1.1 #' #' @author #' Christoph Burow, University of Cologne (Germany), #' Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) \cr #' #' @note #' This function requires the R packages 'rmarkdown', 'pander' and 'rstudioapi'. #' #' @seealso [rmarkdown::render], [pander::pander_return], #' [pander::openFileInOS], [rstudioapi::viewer], #' [browseURL] #' #' @return #' Writes a HTML and .Rds file. #' #' @examples #' #' \dontrun{ #' ## Example: RLum.Results ---- #' #' # load example data #' data("ExampleData.DeValues") #' #' # apply the MAM-3 age model and save results #' mam <- calc_MinDose(ExampleData.DeValues$CA1, sigmab = 0.2) #' #' # create the HTML report #' report_RLum(object = mam, file = "~/CA1_MAM.Rmd", #' timestamp = FALSE, #' title = "MAM-3 for sample CA1") #' #' # when creating a report the input file is automatically saved to a #' # .Rds file (see saveRDS()). #' mam_report <- readRDS("~/CA1_MAM.Rds") #' all.equal(mam, mam_report) #' #' #' ## Example: Temporary file & Viewer/Browser ---- #' #' # (a) #' # Specifying a filename is not necessarily required. If no filename is provided, #' # the report is rendered in a temporary file. If you use the RStudio IDE, the #' # temporary report is shown in the interactive Viewer pane. #' report_RLum(object = mam) #' #' # (b) #' # Additionally, you can view the HTML report in your system's default web browser. #' report_RLum(object = mam, launch.browser = TRUE) #' #' #' ## Example: RLum.Analysis ---- #' #' data("ExampleData.RLum.Analysis") #' #' # create the HTML report (note that specifying a file #' # extension is not necessary) #' report_RLum(object = IRSAR.RF.Data, file = "~/IRSAR_RF") #' #' #' ## Example: RLum.Data.Curve ---- #' #' data.curve <- get_RLum(IRSAR.RF.Data)[[1]] #' #' # create the HTML report #' report_RLum(object = data.curve, file = "~/Data_Curve") #' #' ## Example: Any other object ---- #' x <- list(x = 1:10, #' y = runif(10, -5, 5), #' z = data.frame(a = LETTERS[1:20], b = dnorm(0:9)), #' NA) #' #' report_RLum(object = x, file = "~/arbitray_list") #' } #' #' @md #' @export report_RLum <- function( object, file = tempfile(), title = "RLum.Report", compact = TRUE, timestamp = TRUE, show_report = TRUE, launch.browser = FALSE, css.file = NULL, quiet = TRUE, clean = TRUE, ...) { ## ------------------------------------------------------------------------ ## ## PRE-CHECKS ---- # check if required namespace(s) are available if (!requireNamespace("rmarkdown", quietly = TRUE)) stop("Creating object reports requires the 'rmarkdown' package.", " To install this package run 'install.packages('rmarkdown')' in your R console.", call. = FALSE) if (!requireNamespace("pander", quietly = TRUE)) stop("Creating object reports requires the 'pander' package.", " To install this package run 'install.packages('pander')' in your R console.", call. = FALSE) if (!requireNamespace("rstudioapi", quietly = TRUE)) { warning("Creating object reports requires the 'rstudioapi' package.", " To install this package run 'install.packages('rstudioapi')' in your R console.", call. = FALSE) isRStudio <- FALSE } else { isRStudio <- TRUE } # check if files exist if (!is.null(css.file)) if(!file.exists(css.file)) stop("Couldn't find the specified CSS file at '", css.file, "'", call. = FALSE) ## ------------------------------------------------------------------------ ## ## STRUCTURE ---- structure <- list(header = TRUE, main = TRUE, structure = ifelse(compact, FALSE, TRUE), rds = TRUE, session = ifelse(compact, FALSE, TRUE), plot = TRUE) # specifying report components has higher precedence than the 'compact' arg structure <- modifyList(structure, list(...)) ## OPTIONS ---- options <- list(short_table = ifelse(compact, TRUE, FALSE), theme = "cerulean", highlight = "haddock", css = TRUE) options <- modifyList(options, list(...)) ## CSS DEFAULTS ---- css <- list(font_family = "arial", headings_size = "166%", content_color = "#a72925") css <- modifyList(css, list(...)) ## ------------------------------------------------------------------------ ## ## CREATE FILE ---- isTemp <- missing(file) # make sure the filename ends with .Rmd extension if (!grepl(".rmd$", file, ignore.case = TRUE)) file <- paste0(file, ".Rmd") # Timestamp: currently added as a suffix to the filename # if we were to change it to a prefix, we need to first figure out the filename # (i.e., separate it from the possible path) using the following regular # expression strsplit(string, "\\\\|\\\\\\\\|\\/|\\/\\/"). This looks for # \, \\, /, // and the last element is the filename. if (timestamp) file <- gsub(".rmd$", paste0(format(Sys.time(), "_%Y%b%d"), ".Rmd"), file, ignore.case = TRUE) # sanitize file name file <- gsub("\\\\", "\\/", file) file.html <- gsub(".rmd$", ".html", file, ignore.case = TRUE) file.rds <- gsub(".rmd$", ".Rds", file, ignore.case = TRUE) # Create and open the file file.create(file) tmp <- file(file, open = "w") # save RDS file saveRDS(object, file.rds) ## ------------------------------------------------------------------------ ## ## WRITE CONTENT ---- # HEADER ---- writeLines("---", tmp) writeLines("output:", tmp) writeLines(" html_document:", tmp) writeLines(" mathjax: null", tmp) writeLines(" title: RLum.Report", tmp) writeLines(paste(" theme:", options$theme), tmp) writeLines(paste(" highlight:", options$highlight), tmp) writeLines(" toc: true", tmp) writeLines(" toc_float: true", tmp) writeLines(" toc_depth: 6", tmp) if (!is.null(css.file)) writeLines(paste(" css:", css.file), tmp) writeLines(" md_extensions: -autolink_bare_uris", tmp) writeLines("---", tmp) # CASCADING STYLE SHEETS ---- if (options$css) { writeLines(paste0( "" ), tmp) } # INFO ---- # check if Luminescence package is installed and get details pkg <- as.data.frame(installed.packages(), row.names = FALSE) if ("Luminescence" %in% pkg$Package) pkg <- pkg[which(pkg$Package == "Luminescence"), ] else pkg <- data.frame(LibPath = "-", Version = "not installed", Built = "-") # Title writeLines(paste("

", title, "

\n\n
"), tmp) # write information on R, Luminescence package, Object if (structure$header) { writeLines(paste("**Date:**", Sys.time(), "\n\n", "**R version:**", R.version.string, "\n\n", "**Luminescence package** \n\n", "**  » Path:**", pkg$LibPath, "\n\n", "**  » Version:**", pkg$Version, "\n\n", "**  » Built:**", pkg$Built, "\n\n", "**Object** \n\n", "**  » Created:**", tryCatch(paste(paste(strsplit(object@.uid, '-|\\.')[[1]][1:3], collapse = "-"), strsplit(object@.uid, '-|\\.')[[1]][4]), error = function(e) "-"), "\n\n", "**  » Class:**", class(object), "\n\n", "**  » Originator:**", tryCatch(object@originator, error = function(e) "-"), "\n\n", "**  » Name:**", deparse(substitute(object)), "\n\n", "**  » Parent ID:**", tryCatch(object@.pid, error = function(e) "-"), "\n\n", "**  » Unique ID:**", tryCatch(object@.uid, error = function(e) "-"), "\n\n", "
"), tmp) if (isTemp) { writeLines(paste("Save report"), tmp) writeLines(paste("Save data \n\n"), tmp) } }#EndOf::Header # OBJECT ---- elements <- .struct_RLum(object, root = deparse(substitute(object))) if (structure$main) { for (i in 1:nrow(elements)) { # SKIP ELEMENT? # hide @.pid and @.uid if this is a shortened report (default) if (elements$bud[i] %in% c(".uid", ".pid") && compact == TRUE) next # HEADER short.name <- elements$bud[i] links <- gsub("[^@$\\[]", "", as.character(elements$branch[i])) type <- ifelse(nchar(links) == 0, "", substr(links, nchar(links), nchar(links))) if (type == "[") type = "" # HTML header level is determined by the elements depth in the object # exception: first row is always the object's name and has depth zero if (i == 1) hlevel <- "#" else hlevel <- paste(rep("#", elements$depth[i]), collapse = "") # write header; number of dots represents depth in the object. because there # may be duplicate header names, for each further occurence of a name # Zero-width non-joiner entities are added to the name (non visible) writeLines(paste0(hlevel, " ", "", paste(rep("..", elements$depth[i]), collapse = ""), type, "", paste(rep("‌", elements$bud.freq[i]), collapse = ""), short.name[length(short.name)], ifelse(elements$endpoint[i], "", "{#root}"), "\n\n"), tmp) # SUBHEADER # contains information on Class, Length, Dimensions, Path writeLines(paste0("
",
                        "",
                        " Class: ", elements$class[i],
                        "",
                        "   Length: ", elements$length[i],
                        "",
                        "   Dimensions: ",
                        ifelse(elements$row[i] != 0, paste0(elements$row[i], ", ", elements$col[i]), "-"),
                        "",
                        "\n Path: ", gsub("@", "@", elements$branch[i]),
                        "
", "\n\n"), tmp) # TABLE CONTENT # the content of a branch is only printed if it was determined an endpoint # in the objects structure if (elements$endpoint[i]) { table <- tryCatch(eval(parse(text = elements$branch[i])), error = function(e) { return(NULL) }) # exceptions: content may be NULL; convert raw to character to stay # compatible with pander::pander if (is.null(table) | length(table) == 0) table <- "NULL" if (any(class(table) == "raw")) table <- as.character(table) # exception: surround objects of class "call" with
 tags to prevent
        # HTML autoformatting
        if (elements$class[i] == "call") {
          table <- capture.output(table)
          writeLines("
", tmp)
          for (i in 1:length(table))
            writeLines(table[i], tmp)
          writeLines("
", tmp) table <- NULL } # shorten the table if it has more than 15 rows if (options$short_table) { if (is.matrix(table) || is.data.frame(table)) { if (nrow(table) > 15) { writeLines(pander::pander_return(rbind(head(table, 5), tail(table, 5)), caption = "shortened (only first and last five rows shown)"), tmp) next } } } # write table using pander and end each table with a horizontal line writeLines(pander::pander_return(table), tmp) writeLines("\n\n
", tmp) } } }#EndOf::Main # OBJECT STRUCTURE ---- if (structure$structure) { writeLines(paste("\n\n# Object structure\n\n"), tmp) elements.html <- elements elements.html$branch <- gsub("\\$", "$", elements$branch) writeLines(pander::pander_return(elements.html, justify = paste(rep("l", ncol(elements)), collapse = "")), tmp) writeLines("\n\n", tmp) }#EndOf::Structure if (structure$rds) { # SAVE SERIALISED OBJECT (.rds file) ---- writeLines(paste("
# File \n\n"), tmp) writeLines(paste0("", "", "Click here to access the data file", "", ""), tmp) writeLines(paste("\nThe R object was saved to ", file.rds, ".", "To import the object into your R session with the following command:", paste0("
",
                            "x <- readRDS('", file.rds, "')",
                            "
"), "**NOTE:** If you moved the file to another directory or", "renamed the file you need to change the path/filename in the", "code above accordingly!"), tmp) }#EndOf::File # SESSION INFO ---- if (structure$session) { writeLines(paste("\n\n
# Session Info\n\n"), tmp) sessionInfo <- capture.output(sessionInfo()) writeLines(paste(sessionInfo, collapse = "\n\n"), tmp) } # PLOTTING ---- if (structure$plot) { isRLumObject <- length(grep("RLum", class(object))) if (is.list(object)) isRLumList <- all(sapply(object, function(x) inherits(x, "RLum.Data.Curve"))) else isRLumList <- FALSE if (isRLumObject | isRLumList) { # mutual exclusivity: it is either a list or an RLum-Object if (isRLumList) plotCommand <- "invisible(sapply(x, plot)) \n" else plotCommand <- "plot(x) \n" writeLines(paste("\n\n
# Plots\n\n"), tmp) writeLines(paste0( "```{r}\n", "library(Luminescence) \n", "x <- readRDS('", file.rds,"') \n", plotCommand, "```"), tmp) if (inherits(object, "RLum.Results")) { # AGE MODELS ---- models <- c("calc_CommonDose", "calc_CentralDose", "calc_FiniteMixture", "calc_MinDose", "calc_MaxDose", "calc_IEU", "calc_FuchsLang2001") if (object@originator %in% models) { writeLines(paste0( "```{r}\n", "plot_AbanicoPlot(x) \n", "plot_Histogram(x) \n", "plot_KDE(x) \n", "plot_ViolinPlot(x) \n", "```"), tmp) } } } }#EndOf::Plot ## ------------------------------------------------------------------------ ## ## CLOSE & RENDER ---- close(tmp) on.exit(closeAllConnections()) rmarkdown::render(file, clean = clean, quiet = quiet) ## ------------------------------------------------------------------------ ## ## SHOW FILE ----- # SHOW REPORT IN RSTUDIOS VIEWER PANE ---- if (isRStudio && show_report) { if (isTemp) { try(rstudioapi::viewer(file.html)) } else { # The Viewer Pane only works for files in a sessions temp directory # see: https://support.rstudio.com/hc/en-us/articles/202133558-Extending-RStudio-with-the-Viewer-Pane file.copy(file.html, file.path(tempdir(), "report.html"), overwrite = TRUE) try(rstudioapi::viewer(file.path(tempdir(), "report.html"))) } } # launch browser if desired # browseURL() listens on localhost to show the file with the problem that # the download links dont work anymore. hence, we try to open the file # with pander::openFileInOS and use browseURL() only as fallback if (launch.browser) { opened <- tryCatch(pander::openFileInOS(file.html), error = function(e) "error") if (!is.null(opened)) try(browseURL(file.html)) } ## ------------------------------------------------------------------------ ## ## CLEANUP ---- # note that 'clean' as also passed to rmarkdown::render if (clean) file.remove(file) invisible() } ################################################################################ ## ## ## HELPER FUNCTIONS ## ## ## ################################################################################ # ---------------------------------------------------------------------------- # # This is a recursive function that goes the objects structure and prints # all slots/elements along with their class, length, depth. # ---------------------------------------------------------------------------- # .tree_RLum <- function(x, root) { if (missing(root)) root <- deparse(substitute(x)) ## S4 object ----- if (isS4(x)) { # print ----- cat(c(root, .class(x), base::length(x), .depth(root), FALSE, .dimension(x), "\n"), sep = "|") for (slot in slotNames(x)) { s4.root <- paste0(root, "@", slot) .tree_RLum(slot(x, slot), root = s4.root) } invisible() ## List objects ----- } else if (inherits(x, "list") | typeof(x) == "list" & !inherits(x, "data.frame")) { if (!is.null(names(x)) && length(x) != 0) { # print ----- cat(c(root, .class(x), base::length(x), .depth(root), FALSE, .dimension(x), "\n"), sep = "|") element <- names(x) for (i in 1:length(x)) { if (grepl(" ", element[i])) element[i] <- paste0("`", element[i], "`") if (element[i] == "") list.root <- paste0(root, "[[", i, "]]") else list.root <- paste0(root, "$", element[i]) .tree_RLum(x[[i]], root = list.root) } } else if (length(x) != 0) { # print ----- cat(c(root, .class(x), base::length(x), .depth(root), FALSE, .dimension(x), "\n"), sep = "|") element <- paste0("[[", seq(1, length(x),1), "]]") for (i in 1:length(x)) { if (grepl(" ", element[i])) element[i] <- paste0("`", element[i], "`") list.root <- paste0(root, element[i]) .tree_RLum(x[[i]], root = list.root) } } else if (length(x) == 0) { cat(c(root, .class(x), base::length(x), .depth(root), FALSE, .dimension(x), "\n"), sep = "|") } invisible() ## Data frames ----- } else if (inherits(x, "data.frame")) { if (any(sapply(x, function(col) { inherits(col, "matrix") } ))) { element <- names(x) for (i in 1:length(x)) { if (grepl(" ", element[i])) element[i] <- paste0("`", element[i], "`") list.root <- paste0(root, "$", element[[i]]) .tree_RLum(x[[i]], root = list.root) } } else { # print ---- cat(c(root, .class(x), base::length(x), .depth(root), TRUE, .dimension(x), "\n"), sep = "|") } invisible() ## Last elements ----- } else { # print ---- cat(c(root, .class(x), base::length(x), .depth(root), TRUE, .dimension(x), "\n"), sep = "|") invisible() } } # ---------------------------------------------------------------------------- # # a) Derive depth in the structure tree by splitting the directory by # indicative accessors @, $, [[ # b) Wrapper for dim() to cope with NULL values # c) Wrapper for class() that collapses the classes of an object # ---------------------------------------------------------------------------- # .depth <- function(x) { length(strsplit(x, split = "\\$|@|\\[\\[")[[1]]) - 1 } .dimension <- function(x) { if (!is.null(dim(x))) dim <- paste(dim(x), collapse = "|") else dim <- c(0, 0) } .class <- function(x) { paste(class(x), collapse = "/") } # ---------------------------------------------------------------------------- # # This function captures the output of the real worker .tree_RLum and returns # the structure of the object as a data.frame # ---------------------------------------------------------------------------- # .struct_RLum <- function(x, root) { if (missing(root)) root <- deparse(substitute(x)) s <- capture.output(.tree_RLum(x, root = root)) df <- as.data.frame(do.call(rbind, strsplit(s, "|", fixed = TRUE)), stringsAsFactors = FALSE) names(df) <- c("branch", "class", "length", "depth", "endpoint", "row", "col") df$depth <- as.integer(df$depth) df$length <- as.numeric(df$length) df$endpoint <- as.logical(df$endpoint) df$row <- as.integer(df$row) df$col <- as.integer(df$col) df$bud <- do.call(c, lapply(strsplit(df$branch, "\\$|@|\\[\\["), function(x) x[length(x)])) if (length(grep("]", df$bud)) != 0) df$bud[grep("]", df$bud)] <- paste0("[[", df$bud[grep("]", df$bud)]) df$bud.freq <- NA # 1:nrow(df) # reorder data.frame df <- df[ ,c("branch", "bud", "bud.freq", "class", "length", "depth", "row", "col", "endpoint")] # for the report we must not have the same last element names of same # depth (HTML cannot discriminate between #links of headers) ## TODO: this is highly inefficient for unnamed list due to recurrent indices dlevel <- max(table(df$bud)) for (i in 1:dlevel) { unique.bud <- unique(df[is.na(df$bud.freq), ]$bud) df[is.na(df$bud.freq), ][match(unique.bud, df[is.na(df$bud.freq), ]$bud), ]$bud.freq <- i - 1 } invisible(df) } Luminescence/R/convert_PSL2CSV.R0000644000176200001440000000514713232572354016061 0ustar liggesusers#' Export PSL-file(s) to CSV-files #' #' This function is a wrapper function around the functions [read_PSL2R] and #' [write_RLum2CSV] and it imports an PSL-file (SUERC portable OSL reader file format) #' and directly exports its content to CSV-files. #' If nothing is set for the argument `path` ([write_RLum2CSV]) the input folder will #' become the output folder. #' #' @param file [character] (**required**): #' name of the PSL-file to be converted to CSV-files #' #' @param ... further arguments that will be passed to the function #' [read_PSL2R] and [write_RLum2CSV] #' #' @return #' The function returns either a CSV-file (or many of them) or for the option #' `export = FALSE` a list comprising objects of type [data.frame] and [matrix] #' #' @section Function version: 0.1.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Analysis-class], [RLum.Data-class], [RLum.Results-class], #' [utils::write.table], [write_RLum2CSV], [read_PSL2R] #' #' @keywords IO #' #' @examples #' #' \dontrun{ #' ##select your BIN-file #' file <- file.choose() #' #' ##convert #' convert_PSL2CSV(file) #' #' } #' #' @md #' @export convert_PSL2CSV <- function( file, ... ){ # General tests ------------------------------------------------------------------------------- ##file is missing? if(missing(file)){ stop("[convert_PSL2CSV()] 'file' is missing!", call. = FALSE) } ##set input arguments convert_PSL2R_settings.default <- list( drop_bg = FALSE, as_decay_curve = TRUE, smooth = FALSE, merge = FALSE, export = TRUE ) ##modify list on demand convert_PSL2R_settings <- modifyList(x = convert_PSL2R_settings.default, val = list(...)) # Import file --------------------------------------------------------------------------------- if(!inherits(file, "RLum")){ object <- read_PSL2R( file = file, drop_bg = convert_PSL2R_settings$drop_bg, as_decay_curve = convert_PSL2R_settings$as_decay_curve, smooth = convert_PSL2R_settings$smooth, merge = convert_PSL2R_settings$merge ) }else{ object <- file } # Export to CSV ------------------------------------------------------------------------------- ##get all arguments we want to pass and remove the doubled one arguments <- c(list(object = object, export = convert_PSL2R_settings$export), list(...)) arguments[duplicated(names(arguments))] <- NULL ##this if-condition prevents NULL in the terminal if(convert_PSL2R_settings$export == TRUE){ invisible(do.call("write_RLum2CSV", arguments)) }else{ do.call("write_RLum2CSV", arguments) } } Luminescence/R/analyse_IRSAR.RF.R0000644000176200001440000022517413571743147016141 0ustar liggesusers#' Analyse IRSAR RF measurements #' #' Function to analyse IRSAR RF measurements on K-feldspar samples, performed #' using the protocol according to Erfurt et al. (2003) and beyond. #' #' The function performs an IRSAR analysis described for K-feldspar samples by #' Erfurt et al. (2003) assuming a negligible sensitivity change of the RF #' signal. #' #' **General Sequence Structure** (according to Erfurt et al., 2003) #' #' #' 1. Measuring IR-RF intensity of the natural dose for a few seconds (\eqn{RF_{nat}}) #' 2. Bleach the samples under solar conditions for at least 30 min without changing the geometry #' 3. Waiting for at least one hour #' 4. Regeneration of the IR-RF signal to at least the natural level (measuring (\eqn{RF_{reg}}) #' 5. Fitting data with a stretched exponential function #' 6. Calculate the the palaeodose \eqn{D_{e}} using the parameters from the fitting #' #' #' Actually two methods are supported to obtain the \eqn{D_{e}}: #' `method = "FIT"` and `method = "SLIDE"`: #' #' **`method = "FIT"`** #' #' The principle is described above and follows the original suggestions by #' Erfurt et al., 2003. For the fitting the mean count value of the RF_nat curve is used. #' #' Function used for the fitting (according to Erfurt et al. (2003)): #' #' \deqn{\phi(D) = \phi_{0}-\Delta\phi(1-exp(-\lambda*D))^\beta} #' #' with #' \eqn{\phi(D)} the dose dependent IR-RF flux, #' \eqn{\phi_{0}} the initial IR-RF flux, #' \eqn{\Delta\phi} the dose dependent change of the IR-RF flux, #' \eqn{\lambda} the exponential parameter, \eqn{D} the dose and #' \eqn{\beta} the dispersive factor. #' #' To obtain the palaeodose #' \eqn{D_{e}} the function is changed to: #' #' \deqn{D_{e} = ln(-(\phi(D) - \phi_{0})/(-\lambda*\phi)^{1/\beta}+1)/-\lambda} #' #' The fitting is done using the `port` algorithm of the [nls] function. #' #' #' **`method = "SLIDE"`** #' #' For this method, the natural curve is slid along the x-axis until #' congruence with the regenerated curve is reached. Instead of fitting this #' allows working with the original data without the need for any physical #' model. This approach was introduced for RF curves by Buylaert et al., 2012 #' and Lapp et al., 2012. #' #' Here the sliding is done by searching for the minimum of the squared residuals. #' For the mathematical details of the implementation see Frouin et al., 2017 #' #' **`method.control`** #' #' To keep the generic argument list as clear as possible, arguments to control the methods #' for De estimation are all preset with meaningful default parameters and can be #' handled using the argument `method.control` only, e.g., #' `method.control = list(trace = TRUE)`. Supported arguments are: #' #' \tabular{lll}{ #' **ARGUMENT** \tab **METHOD** \tab **DESCRIPTION**\cr #' `trace` \tab `FIT`, `SLIDE` \tab as in [nls]; shows sum of squared residuals\cr #' `trace_vslide` \tab `SLIDE` \tab [logical] argument to enable or disable the tracing of the vertical sliding\cr #' `maxiter` \tab `FIT` \tab as in [nls]\cr #' `warnOnly` \tab `FIT` \tab as in [nls]\cr #' `minFactor` \tab `FIT` \tab as in [nls]\cr #' `correct_onset` \tab `SLIDE` \tab The logical argument shifts the curves along the x-axis by the first channel, #' as light is expected in the first channel. The default value is `TRUE`.\cr #' `show_density` \tab `SLIDE` \tab [logical] (*with default*) #' enables or disables KDE plots for MC run results. If the distribution is too narrow nothing is shown.\cr #' `show_fit` \tab `SLIDE` \tab [logical] (*with default*) #' enables or disables the plot of the fitted curve routinely obtained during the evaluation.\cr #' `n.MC` \tab `SLIDE` \tab [integer] (*with default*): #' This controls the number of MC runs within the sliding (assessing the possible minimum values). #' The default `n.MC = 1000`. Note: This parameter is not the same as controlled by the #' function argument `n.MC`. \cr #' `vslide_range` \tab `SLDE` \tab [logical] or [numeric] or [character] (*with default*): #' This argument sets the boundaries for a vertical curve #' sliding. The argument expects a vector with an absolute minimum and a maximum (e.g., `c(-1000,1000)`). #' Alternatively the values `NULL` and `'auto'` are allowed. The automatic mode detects the #' reasonable vertical sliding range (**recommended**). `NULL` applies no vertical sliding. #' The default is `NULL`.\cr #' `cores` \tab `SLIDE` \tab `number` or `character` (*with default*): set number of cores to be allocated #' for a parallel processing of the Monte-Carlo runs. The default value is `NULL` (single thread), #' the recommended values is `'auto'`. An optional number (e.g., `cores` = 8) assigns a value manually. #' } #' #' #' **Error estimation** #' #' For **`method = "FIT"`** the asymmetric error range is obtained by using the 2.5 % (lower) and #' the 97.5 % (upper) quantiles of the \eqn{RF_{nat}} curve for calculating the \eqn{D_{e}} error range. #' #' For **`method = "SLIDE"`** the error is obtained by bootstrapping the residuals of the slided #' curve to construct new natural curves for a Monte Carlo simulation. The error is returned in two #' ways: (a) the standard deviation of the herewith obtained \eqn{D_{e}} from the MC runs and (b) the confidence #' interval using the 2.5 % (lower) and the 97.5 % (upper) quantiles. The results of the MC runs #' are returned with the function output. #' #' **Test parameters** #' #' The argument `test_parameters` allows to pass some thresholds for several test parameters, #' which will be evaluated during the function run. If a threshold is set and it will be exceeded the #' test parameter status will be set to "FAILED". Intentionally this parameter is not termed #' 'rejection criteria' as not all test parameters are evaluated for both methods and some parameters #' are calculated by not evaluated by default. Common for all parameters are the allowed argument options #' `NA` and `NULL`. If the parameter is set to `NA` the value is calculated but the #' result will not be evaluated, means it has no effect on the status ("OK" or "FAILED") of the parameter. #' Setting the parameter to `NULL` disables the parameter entirely and the parameter will be #' also removed from the function output. This might be useful in cases where a particular parameter #' asks for long computation times. Currently supported parameters are: #' #' `curves_ratio` [numeric] (default: `1.001`): #' #' The ratio of \eqn{RF_{nat}} over \eqn{RF_{reg}} in the range of\eqn{RF_{nat}} of is calculated #' and should not exceed the threshold value. #' #' `intersection_ratio` [numeric] (default: `NA`): #' #' Calculated as absolute difference from 1 of the ratio of the integral of the normalised RF-curves, #' This value indicates intersection of the RF-curves and should be close to 0 if the curves #' have a similar shape. For this calculation first the corresponding time-count pair value on the RF_reg #' curve is obtained using the maximum count value of the RF_nat curve and only this segment (fitting to #' the RF_nat curve) on the RF_reg curve is taken for further calculating this ratio. If nothing is #' found at all, `Inf` is returned. #' #' `residuals_slope` [numeric] (default: `NA`; only for `method = "SLIDE"`): #' #' A linear function is fitted on the residuals after sliding. #' The corresponding slope can be used to discard values as a high (positive, negative) slope #' may indicate that both curves are fundamentally different and the method cannot be applied at all. #' Per default the value of this parameter is calculated but not evaluated. #' #' `curves_bounds` [numeric] (default: \eqn{max(RF_{reg_counts})}: #' #' This measure uses the maximum time (x) value of the regenerated curve. #' The maximum time (x) value of the natural curve cannot be larger than this value. However, although #' this is not recommended the value can be changed or disabled. #' #' `dynamic_ratio` [numeric] (default: `NA`): #' #' The dynamic ratio of the regenerated curve is calculated as ratio of the minimum and maximum count values. #' #' `lambda`, `beta` and `delta.phi` #' [numeric] (default: `NA`; `method = "SLIDE"`): #' #' The stretched exponential function suggested by Erfurt et al. (2003) describing the decay of #' the RF signal, comprises several parameters that might be useful to evaluate the shape of the curves. #' For `method = "FIT"` this parameter is obtained during the fitting, for `method = "SLIDE"` a #' rather rough estimation is made using the function [minpack.lm::nlsLM] and the equation #' given above. Note: As this procedure requests more computation time, setting of one of these three parameters #' to `NULL` also prevents a calculation of the remaining two. #' #' #' @param object [RLum.Analysis-class] or a [list] of [RLum.Analysis-class]-objects (**required**): #' input object containing data for protocol analysis. The function expects to #' find at least two curves in the [RLum.Analysis-class] object: (1) RF_nat, (2) RF_reg. #' If a `list` is provided as input all other parameters can be provided as #' `list` as well to gain full control. #' #' @param sequence_structure [vector] [character] (*with default*): #' specifies the general sequence structure. Allowed steps are `NATURAL`, `REGENERATED`. #' In addition any other character is allowed in the sequence structure; #' such curves will be ignored during the analysis. #' #' @param RF_nat.lim [vector] (*with default*): #' set minimum and maximum channel range for natural signal fitting and sliding. #' If only one value is provided this will be treated as minimum value and the #' maximum limit will be added automatically. #' #' @param RF_reg.lim [vector] (*with default*): #' set minimum and maximum channel range for regenerated signal fitting and sliding. #' If only one value is provided this will be treated as minimum value and the #' maximum limit will be added automatically. #' #' @param method [character] (*with default*): #' setting method applied for the data analysis. #' Possible options are `"FIT"` or `"SLIDE"`. #' #' @param method.control [list] (*optional*): #' parameters to control the method, that can be passed to the chosen method. #' These are for (1) `method = "FIT"`: 'trace', 'maxiter', 'warnOnly', 'minFactor' and for #' (2) `method = "SLIDE"`: 'correct_onset', 'show_density', 'show_fit', 'trace'. #' See details. #' #' @param test_parameters [list] (*with default*): #' set test parameters. Supported parameters are: `curves_ratio`, `residuals_slope` (only for #' `method = "SLIDE"`), `curves_bounds`, `dynamic_ratio`, #' `lambda`, `beta` and `delta.phi`. All input: [numeric] #' values, `NA` and `NULL` (s. Details) #' #' (see Details for further information) #' #' @param n.MC [numeric] (*with default*): #' set number of Monte Carlo runs for start parameter estimation (`method = "FIT"`) or #' error estimation (`method = "SLIDE"`). This value can be set to `NULL` to skip the #' MC runs. Note: Large values will significantly increase the computation time #' #' @param txtProgressBar [logical] (*with default*): #' enables `TRUE` or disables `FALSE` the progression bar during MC runs #' #' @param plot [logical] (*with default*): #' plot output (`TRUE` or `FALSE`) #' #' @param plot_reduced [logical] (*optional*): #' provides a reduced plot output if enabled to allow common R plot combinations, #' e.g., `par(mfrow(...))`. If `TRUE` no residual plot #' is returned; it has no effect if `plot = FALSE` #' #' @param ... further arguments that will be passed to the plot output. #' Currently supported arguments are `main`, `xlab`, `ylab`, #' `xlim`, `ylim`, `log`, `legend` (`TRUE/FALSE`), #' `legend.pos`, `legend.text` (passes argument to x,y in #' [graphics::legend]), `xaxt` #' #' #' @return #' The function returns numerical output and an (*optional*) plot. #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' `[.. $data : data.frame]` #' #' \tabular{lll}{ #' **Column** \tab **Type** \tab **Description**\cr #' `DE` \tab `numeric` \tab the obtained equivalent dose\cr #' `DE.ERROR` \tab `numeric` \tab (only `method = "SLIDE"`) standard deviation obtained from MC runs \cr #' `DE.LOWER` \tab `numeric`\tab 2.5% quantile for De values obtained by MC runs \cr #' `DE.UPPER` \tab `numeric`\tab 97.5% quantile for De values obtained by MC runs \cr #' `DE.STATUS` \tab `character`\tab test parameter status\cr #' `RF_NAT.LIM` \tab `charcter`\tab used RF_nat curve limits \cr #' `RF_REG.LIM` \tab `character`\tab used RF_reg curve limits\cr #' `POSITION` \tab `integer`\tab (*optional*) position of the curves\cr #' `DATE` \tab `character`\tab (*optional*) measurement date\cr #' `SEQUENCE_NAME` \tab `character`\tab (*optional*) sequence name\cr #' `UID` \tab `character`\tab unique data set ID #' } #' #' `[.. $De.MC : numeric]` #' #' A `numeric` vector with all the De values obtained by the MC runs. #' #' `[.. $test_parameters : data.frame]` #' #' \tabular{lll}{ #' **Column** \tab **Type** \tab **Description**\cr #' `POSITION` \tab `numeric` \tab aliquot position \cr #' `PARAMETER` \tab `character` \tab test parameter name \cr #' `THRESHOLD` \tab `numeric` \tab set test parameter threshold value \cr #' `VALUE` \tab `numeric` \tab the calculated test parameter value (to be compared with the threshold)\cr #' `STATUS` \tab `character` \tab test parameter status either `"OK"` or `"FAILED"` \cr #' `SEQUENCE_NAME` \tab `character` \tab name of the sequence, so far available \cr #' `UID` \tab `character`\tab unique data set ID #' } #' #' `[.. $fit : data.frame]` #' #' An [nls] object produced by the fitting. #' #' `[.. $slide : list]` #' #' A [list] with data produced during the sliding. Some elements are previously #' reported with the summary object data. List elements are: #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `De` \tab `numeric` \tab the final De obtained with the sliding approach \cr #' `De.MC` \tab `numeric` \tab all De values obtained by the MC runs \cr #' `residuals` \tab `numeric` \tab the obtained residuals for each channel of the curve \cr #' `trend.fit` \tab `lm` \tab fitting results produced by the fitting of the residuals \cr #' `RF_nat.slided` \tab `matrix` \tab the slided RF_nat curve \cr #' `t_n.id` \tab `numeric` \tab the index of the t_n offset \cr #' `I_n` \tab `numeric` \tab the vertical intensity offset if a vertical slide was applied \cr #' `algorithm_error` \tab `numeric` \tab the vertical sliding suffers from a systematic effect induced by the used #' algorithm. The returned value is the standard deviation of all obtained De values while expanding the #' vertical sliding range. I can be added as systematic error to the final De error; so far wanted.\cr #' `vslide_range` \tab `numeric` \tab the range used for the vertical sliding \cr #' `squared_residuals` \tab `numeric` \tab the squared residuals (horizontal sliding) #' } #' #' #' **slot:** **`@info`** #' #' The original function call ([methods::language-class][methods::LanguageClasses]-object) #' #' The output (`data`) should be accessed using the function [get_RLum] #' #' ------------------------\cr #' `[ PLOT OUTPUT ]`\cr #' ------------------------\cr #' #' The slided IR-RF curves with the finally obtained De #' #' @note #' This function assumes that there is no sensitivity change during the #' measurements (natural vs. regenerated signal), which is in contrast to the #' findings by Buylaert et al. (2012). #' #' @section Function version: 0.7.5 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @seealso [RLum.Analysis-class], [RLum.Results-class], [get_RLum], #' [nls], [minpack.lm::nlsLM], `parallel::mclapply` #' #' #' @references #' Buylaert, J.P., Jain, M., Murray, A.S., Thomsen, K.J., Lapp, T., #' 2012. IR-RF dating of sand-sized K-feldspar extracts: A test of accuracy. #' Radiation Measurements 44 (5-6), 560-565. doi: 10.1016/j.radmeas.2012.06.021 #' #' Erfurt, G., Krbetschek, M.R., 2003. IRSAR - A single-aliquot #' regenerative-dose dating protocol applied to the infrared radiofluorescence #' (IR-RF) of coarse- grain K-feldspar. Ancient TL 21, 35-42. #' #' Erfurt, G., 2003. Infrared luminescence of Pb+ centres in potassium-rich #' feldspars. physica status solidi (a) 200, 429-438. #' #' Erfurt, G., Krbetschek, M.R., 2003. Studies on the physics of the infrared #' radioluminescence of potassium feldspar and on the methodology of its #' application to sediment dating. Radiation Measurements 37, 505-510. #' #' Erfurt, G., Krbetschek, M.R., Bortolot, V.J., Preusser, F., 2003. A fully #' automated multi-spectral radioluminescence reading system for geochronometry #' and dosimetry. Nuclear Instruments and Methods in Physics Research Section #' B: Beam Interactions with Materials and Atoms 207, 487-499. #' #' Frouin, M., Huot, S., Kreutzer, S., Lahaye, C., Lamothe, M., Philippe, A., Mercier, N., 2017. #' An improved radiofluorescence single-aliquot regenerative dose protocol for K-feldspars. #' Quaternary Geochronology 38, 13-24. doi:10.1016/j.quageo.2016.11.004 #' #' Lapp, T., Jain, M., Thomsen, K.J., Murray, A.S., Buylaert, J.P., 2012. New #' luminescence measurement facilities in retrospective dosimetry. Radiation #' Measurements 47, 803-808. doi:10.1016/j.radmeas.2012.02.006 #' #' Trautmann, T., 2000. A study of radioluminescence kinetics of natural #' feldspar dosimeters: experiments and simulations. Journal of Physics D: #' Applied Physics 33, 2304-2310. #' #' Trautmann, T., Krbetschek, M.R., Dietrich, A., Stolz, W., 1998. #' Investigations of feldspar radioluminescence: potential for a new dating #' technique. Radiation Measurements 29, 421-425. #' #' Trautmann, T., Krbetschek, M.R., Dietrich, A., Stolz, W., 1999. Feldspar #' radioluminescence: a new dating method and its physical background. Journal #' of Luminescence 85, 45-58. #' #' Trautmann, T., Krbetschek, M.R., Stolz, W., 2000. A systematic study of the #' radioluminescence properties of single feldspar grains. Radiation #' Measurements 32, 685-690. #' #' #' @keywords datagen #' #' #' @examples #' #' ##load data #' data(ExampleData.RLum.Analysis, envir = environment()) #' #' ##(1) perform analysis using the method 'FIT' #' results <- analyse_IRSAR.RF(object = IRSAR.RF.Data) #' #' ##show De results and test paramter results #' get_RLum(results, data.object = "data") #' get_RLum(results, data.object = "test_parameters") #' #' ##(2) perform analysis using the method 'SLIDE' #' results <- analyse_IRSAR.RF(object = IRSAR.RF.Data, method = "SLIDE", n.MC = 1) #' #' \dontrun{ #' ##(3) perform analysis using the method 'SLIDE' and method control option #' ## 'trace #' results <- analyse_IRSAR.RF( #' object = IRSAR.RF.Data, #' method = "SLIDE", #' method.control = list(trace = TRUE)) #' #' } #' #' @md #' @export analyse_IRSAR.RF<- function( object, sequence_structure = c("NATURAL", "REGENERATED"), RF_nat.lim = NULL, RF_reg.lim = NULL, method = "FIT", method.control = NULL, test_parameters = NULL, n.MC = 10, txtProgressBar = TRUE, plot = TRUE, plot_reduced = FALSE, ... ){ ##TODO ## - if a file path is given, the function should try to find out whether an XSYG-file or ## a BIN-file is provided ## - add NEWS for vslide_range ## - update documentary ... if it works as expected. # SELF CALL ----------------------------------------------------------------------------------- if(is.list(object)){ ##extent the list of arguments if set ##sequence_structure sequence_structure <- rep(list(sequence_structure), length = length(object)) ##RF_nat.lim RF_nat.lim <- rep(list(RF_nat.lim), length = length(object)) ##RF_reg.lim RF_reg.lim <- rep(list(RF_reg.lim), length = length(object)) ##method method <- rep(list(method), length = length(object)) ##method.control method.control <- rep(list(method.control), length = length(object)) ##test_parameters if(is(test_parameters[[1]], "list")){ test_parameters <- rep(test_parameters, length = length(object)) }else{ test_parameters <- rep(list(test_parameters), length = length(object)) } ##n.MC n.MC <- rep(list(n.MC), length = length(object)) ##main if("main"%in% names(list(...))){ if(is(list(...)$main, "list")){ temp_main <- rep(list(...)$main, length = length(object)) }else{ temp_main <- rep(list(list(...)$main), length = length(object)) } }else{ temp_main <- as.list(paste0("ALQ #",1:length(object))) } ##run analysis temp <- lapply(1:length(object), function(x){ analyse_IRSAR.RF( object = object[[x]], sequence_structure = sequence_structure[[x]], RF_nat.lim = RF_nat.lim[[x]], RF_reg.lim = RF_reg.lim[[x]], method = method[[x]], method.control = method.control[[x]], test_parameters = test_parameters[[x]], n.MC = n.MC[[x]], txtProgressBar = txtProgressBar, plot = plot, plot_reduced = plot_reduced, main = temp_main[[x]], ...) }) ##combine everything to one RLum.Results object as this as what was written ... only ##one object ##merge results and check if the output became NULL results <- merge_RLum(temp) ##DO NOT use invisible here, this will stop the function from stopping if(length(results) == 0){ return(NULL) }else{ return(results) } } ##===============================================================================================# ## INTEGRITY TESTS AND SEQUENCE STRUCTURE TESTS ##===============================================================================================# ##MISSING INPUT if(missing("object")){ stop("[analyse_IRSAR.RF()] No input 'object' set!", call. = FALSE) } ##INPUT OBJECTS if(!is(object, "RLum.Analysis")){ stop("[analyse_IRSAR.RF()] Input object is not of type 'RLum.Analysis'!", call. = FALSE) } ##CHECK OTHER ARGUMENTS if(!is(sequence_structure, "character")){ stop("[analyse_IRSAR.RF()] argument 'sequence_structure' needs to be of type character.", call. = FALSE) } ##n.MC if((!is(n.MC, "numeric") || n.MC <= 0) && !is.null(n.MC)){ stop("[analyse_IRSAR.RF()] argument 'n.MC' has to be of type integer and >= 0", call. = FALSE) } ##SELECT ONLY MEASURED CURVES ## (this is not really necessary but rather user friendly) if(!length(suppressWarnings(get_RLum(object, curveType= "measured"))) == 0){ object <- get_RLum(object, curveType= "measured", drop = FALSE) } ##INVESTIGATE SEQUENCE OBJECT STRUCTURE ##grep object strucute temp.sequence_structure <- structure_RLum(object) ##check whether both curve have the same length, in this case we cannot proceed (sliding ##is not allowed) if(length(unique(temp.sequence_structure[["x.max"]])) == 1 && method == "SLIDE" && (is.null(RF_nat.lim) & is.null(RF_reg.lim))) { stop("[analyse_IRSAR.RF()] There is no further sliding space left. All curves have the same length and no limitation was set!", call. = FALSE) } ##grep name of the sequence and the position this will be useful later on ##name if (!is.null(suppressWarnings(get_RLum(get_RLum(object, record.id = 1), info.object = "name")))) { aliquot.sequence_name <- get_RLum(get_RLum(object, record.id = 1), info.object = "name") }else{ aliquot.sequence_name <- NA } ##position if (!is.null(suppressWarnings(get_RLum(get_RLum(object, record.id = 1), info.object = "position")))){ aliquot.position <- get_RLum(get_RLum(object, record.id = 1), info.object = "position") }else{ aliquot.position <- NA } ##date if (!is.null(suppressWarnings(get_RLum(get_RLum(object, record.id = 1), info.object = "startDate")))){ aliquot.date <- get_RLum(get_RLum(object, record.id = 1), info.object = "startDate") ##transform so far the format can be identified if (nchar(aliquot.date) == 14) { aliquot.date <- paste(c( substr(aliquot.date, 1,4),substr(aliquot.date, 5,6), substr(aliquot.date, 7,8) ), collapse = "-") } }else{ aliquot.date <- NA } ##set structure values temp.sequence_structure$protocol.step <- rep(sequence_structure, length_RLum(object))[1:length_RLum(object)] ##check if the first curve is shorter than the first curve if (temp.sequence_structure[which(temp.sequence_structure[["protocol.step"]] == "NATURAL"),"n.channels"] > temp.sequence_structure[which(temp.sequence_structure[["protocol.step"]] == "REGENERATED"),"n.channels"]) { stop("[analyse_IRSAR.RF()] Number of data channels in RF_nat > RF_reg. This is not supported!", call. = FALSE) } ##===============================================================================================# ## SET CURVE LIMITS ##===============================================================================================# ##the setting here will be valid for all subsequent operations ##01 ##first get allowed curve limits, this makes the subsequent checkings easier and the code ##more easier to read RF_nat.lim.default <- c(1,max( subset( temp.sequence_structure, temp.sequence_structure$protocol.step == "NATURAL" )$n.channels )) RF_reg.lim.default <- c(1,max( subset( temp.sequence_structure, temp.sequence_structure$protocol.step == "REGENERATED" )$n.channels )) ##02 - check boundaris ##RF_nat.lim if (is.null(RF_nat.lim) || is.na(RF_nat.lim)) { RF_nat.lim <- RF_nat.lim.default }else { ##this allows to provide only one boundary and the 2nd will be added automatically if (length(RF_nat.lim) == 1) { RF_nat.lim <- c(RF_nat.lim, RF_nat.lim.default[2]) } if (min(RF_nat.lim) < RF_nat.lim.default[1] | max(RF_nat.lim) > RF_nat.lim.default[2]) { RF_nat.lim <- RF_nat.lim.default warning(paste0( "RF_nat.lim out of bounds, reset to: RF_nat.lim = c(", paste(range(RF_nat.lim), collapse = ":") ),")", call. = FALSE) } } ##RF_reg.lim ## if (is.null(RF_reg.lim)) { RF_reg.lim <- RF_reg.lim.default }else { ##this allows to provide only one boundary and the 2nd will be added automatically if (length(RF_reg.lim) == 1) { RF_reg.lim <- c(RF_reg.lim, RF_reg.lim.default[2]) } if (min(RF_reg.lim) < RF_reg.lim.default[1] | max(RF_reg.lim) > RF_reg.lim.default[2]) { RF_reg.lim <- RF_reg.lim.default warning(paste0( "RF_reg.lim out of bounds, reset to: RF_reg.lim = c(", paste(range(RF_reg.lim), collapse = ":") ),")", call. = FALSE) } } ##check if intervalls make sense at all if(length(RF_reg.lim[1]:RF_reg.lim[2]) < RF_nat.lim[2]){ RF_reg.lim[2] <- RF_reg.lim[2] + abs(length(RF_reg.lim[1]:RF_reg.lim[2]) - RF_nat.lim[2]) + 1 warning(paste0("Length intervall RF_reg.lim < length RF_nat. Reset to RF_reg.lim = c(", paste(range(RF_reg.lim), collapse=":")),")", call. = FALSE) } # Method Control Settings --------------------------------------------------------------------- ##===============================================================================================# ## SET METHOD CONTROL PARAMETER - FOR BOTH METHODS ##===============================================================================================# ## ##set supported values with default method.control.settings <- list( trace = FALSE, trace_vslide = FALSE, maxiter = 500, warnOnly = FALSE, minFactor = 1 / 4096, correct_onset = TRUE, show_density = TRUE, show_fit = FALSE, n.MC = if(is.null(n.MC)){NULL}else{1000}, vslide_range = NULL, cores = NULL ) ##modify list if necessary if(!is.null(method.control)){ if(!is(method.control, "list")){ stop("[analyse_IRSAR.RF()] 'method.control' has to be of type 'list'!", call. = FALSE) } ##check whether this arguments are supported at all if (length(which( names(method.control) %in% names(method.control.settings) == FALSE ) != 0)) { temp.text <- paste0( "[analyse_IRSAR.RF()] Argument(s) '", paste(names(method.control)[which(names(method.control) %in% names(method.control.settings) == FALSE)], collapse = " and "), "' are not supported for 'method.control'. Supported arguments are: ", paste(names(method.control.settings), collapse = ", ") ) warning(temp.text, call. = FALSE) rm(temp.text) } ##modify list method.control.settings <- modifyList(x = method.control.settings, val = method.control) } ##===============================================================================================# ## SET PLOT PARAMETERS ##===============================================================================================# ##get channel resolution (should be equal for all curves, but if not the mean is taken) resolution.RF <- round(mean((temp.sequence_structure$x.max/temp.sequence_structure$n.channels)),digits=1) plot.settings <- list( main = "IR-RF", xlab = "Time [s]", ylab = paste0("IR-RF [cts/", resolution.RF," s]"), log = "", cex = 1, legend = TRUE, legend.text = c("RF_nat","RF_reg"), legend.pos = "top", xaxt = "s" ##xlim and ylim see below as they has to be modified differently ) ##modify list if something was set plot.settings <- modifyList(plot.settings, list(...)) ##=============================================================================# ## ANALYSIS ##=============================================================================# ##grep first regenerated curve RF_reg <- as.data.frame(object@records[[ temp.sequence_structure[temp.sequence_structure$protocol.step=="REGENERATED","id"]]]@data) ##correct of the onset of detection by using the first time value if (method == "SLIDE" & method.control.settings$correct_onset == TRUE) { RF_reg[,1] <- RF_reg[,1] - RF_reg[1,1] } RF_reg.x <- RF_reg[RF_reg.lim[1]:RF_reg.lim[2],1] RF_reg.y <- RF_reg[RF_reg.lim[1]:RF_reg.lim[2],2] ##grep values from natural signal RF_nat <- as.data.frame(object@records[[ temp.sequence_structure[temp.sequence_structure$protocol.step=="NATURAL","id"]]]@data) ##correct of the onset of detection by using the first time value if (method == "SLIDE" & method.control.settings$correct_onset == TRUE) { RF_nat[,1] <- RF_nat[,1] - RF_nat[1,1] } ##limit values to fit range (at least to the minimum) RF_nat.limited<- RF_nat[min(RF_nat.lim):max(RF_nat.lim),] ##calculate some useful parameters RF_nat.mean <- mean(RF_nat.limited[,2]) RF_nat.sd <- sd(RF_nat.limited[,2]) RF_nat.error.lower <- quantile(RF_nat.limited[,2], 0.975, na.rm = TRUE) RF_nat.error.upper <- quantile(RF_nat.limited[,2], 0.025, na.rm = TRUE) ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# ##METHOD FIT ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# ## REGENERATED SIGNAL # set function for fitting ------------------------------------------------ fit.function <- as.formula(y ~ phi.0 - (delta.phi * ((1 - exp( -lambda * x )) ^ beta))) ##stretched expontial function according to Erfurt et al. (2003) ## + phi.0 >> initial IR-RF flux ## + delta.phi >> dose dependent change of the IR-RF flux ## + lambda >> exponential parameter ## + beta >> dispersive factor # set start parameter estimation ------------------------------------------ fit.parameters.start <- c( phi.0 = max(RF_reg.y), lambda = 0.0001, beta = 1, delta.phi = 1.5 * (max(RF_reg.y) - min(RF_reg.y)) ) if(method == "FIT"){ # start nls fitting ------------------------------------------------------- ##Monte Carlo approach for fitting fit.parameters.results.MC.results <- data.frame() ##produce set of start paramters phi.0.MC <- rep(fit.parameters.start["phi.0"], n.MC) lambda.MC <- seq(0.0001, 0.001, by=(0.001-0.0001)/n.MC) beta.MC <- rep(fit.parameters.start["beta"], n.MC) delta.phi.MC <- rep(fit.parameters.start["delta.phi"], n.MC) ##start fitting loop for MC runs for(i in 1:n.MC){ fit.MC <- try(nls( fit.function, trace = FALSE, data = list(x = RF_reg.x, y = RF_reg.y), algorithm = "port", start = list( phi.0 = phi.0.MC[i], delta.phi = delta.phi.MC[i], lambda = lambda.MC[i], beta = beta.MC[i] ), nls.control( maxiter = 100, warnOnly = FALSE, minFactor = 1 / 1024 ), lower = c( phi.0 = .Machine$double.xmin, delta.phi = .Machine$double.xmin, lambda = .Machine$double.xmin, beta = .Machine$double.xmin ), upper = c( phi.0 = max(RF_reg.y), delta.phi = max(RF_reg.y), lambda = 1, beta = 100 ) ), silent = TRUE ) if(inherits(fit.MC,"try-error") == FALSE) { temp.fit.parameters.results.MC.results <- coef(fit.MC) fit.parameters.results.MC.results[i,"phi.0"] <- temp.fit.parameters.results.MC.results["phi.0"] fit.parameters.results.MC.results[i,"lambda"] <- temp.fit.parameters.results.MC.results["lambda"] fit.parameters.results.MC.results[i,"delta.phi"] <- temp.fit.parameters.results.MC.results["delta.phi"] fit.parameters.results.MC.results[i,"beta"] <- temp.fit.parameters.results.MC.results["beta"] } } ##FINAL fitting after successful MC if(length(na.omit(fit.parameters.results.MC.results)) != 0){ ##choose median as final fit version fit.parameters.results.MC.results <- sapply(na.omit(fit.parameters.results.MC.results), median) ##try final fitting fit <- try(nls( fit.function, trace = method.control.settings$trace, data = data.frame(x = RF_reg.x, y = RF_reg.y), algorithm = "port", start = list( phi.0 = fit.parameters.results.MC.results["phi.0"], delta.phi = fit.parameters.results.MC.results["delta.phi"], lambda = fit.parameters.results.MC.results["lambda"], beta = fit.parameters.results.MC.results["beta"] ), nls.control( maxiter = method.control.settings$maxiter, warnOnly = method.control.settings$warnOnly, minFactor = method.control.settings$minFactor ), lower = c( phi.0 = .Machine$double.xmin, delta.phi = .Machine$double.xmin, lambda = .Machine$double.xmin, beta = .Machine$double.xmin ), upper = c( phi.0 = max(RF_reg.y), delta.phi = max(RF_reg.y), lambda = 1, beta = 100 ) ), silent = FALSE ) }else{ fit <- NA class(fit) <- "try-error" } # get parameters ---------------------------------------------------------- # and with that the final De if (!inherits(fit,"try-error")) { fit.parameters.results <- coef(fit) }else{ fit.parameters.results <- NA } ##calculate De value if (!is.na(fit.parameters.results[1])) { De <- suppressWarnings(round(log( -((RF_nat.mean - fit.parameters.results["phi.0"]) / -fit.parameters.results["delta.phi"] ) ^ (1 / fit.parameters.results["beta"]) + 1 ) / -fit.parameters.results["lambda"], digits = 2)) ##This could be solved with a MC simulation, but for this the code has to be adjusted ##The question is: Where the parameters are coming from? ##TODO De.error <- NA De.lower <- suppressWarnings(round(log( -((RF_nat.error.lower - fit.parameters.results["phi.0"]) / -fit.parameters.results["delta.phi"] ) ^ (1 / fit.parameters.results["beta"]) + 1 ) / -fit.parameters.results["lambda"],digits = 2)) De.upper <- suppressWarnings(round(log( -((RF_nat.error.upper - fit.parameters.results["phi.0"]) / -fit.parameters.results["delta.phi"] ) ^ (1 / fit.parameters.results["beta"]) + 1 ) / -fit.parameters.results["lambda"],digits = 2)) }else{ De <- NA De.error <- NA De.lower <- NA De.upper <- NA } } ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# ##METHOD SLIDE - ANALYSIS ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# else if(method == "SLIDE"){ ##convert to matrix (in fact above the matrix data were first transfered to data.frames ... here ##we correct this ... again) RF_nat.limited <- as.matrix(RF_nat.limited) RF_reg.limited <- matrix(c(RF_reg.x, RF_reg.y), ncol = 2) RF_nat <- as.matrix(RF_nat) ##DEFINE FUNCTION FOR SLIDING ##FIND MINIMUM - this is done in a function so that it can be further used for MC simulations # sliding() ----------------------------------------------------------------------------------- sliding <- function(RF_nat, RF_nat.limited, RF_reg.limited, n.MC = method.control.settings$n.MC, vslide_range = method.control.settings$vslide_range, trace = method.control.settings$trace_vslide, numerical.only = FALSE){ ##check for odd user input if(length(vslide_range) > 2){ vslide_range <- vslide_range[1:2] warning("[anlayse_IRSAR.RF()] method.control = list(vslide_range) has more than 2 elements. Only the first two were used!", call. = FALSE) } ##(0) set objects ... nomenclature as used in Frouin et al., please note that here the index ##is used instead the real time values t_max.id <- nrow(RF_reg.limited) t_max_nat.id <- nrow(RF_nat.limited) t_min.id <- 1 t_min <- RF_nat.limited[1,1] ##(1) calculate sum of residual squares using internal Rcpp function #pre-allocate object temp.sum.residuals <- vector("numeric", length = t_max.id - t_max_nat.id) ##initialise slide range for specific conditions, namely NULL and "auto" if (is.null(vslide_range)) { vslide_range <- 0 } else if (vslide_range[1] == "auto") { vslide_range <- -(max(RF_reg.limited[, 2]) - min(RF_reg.limited[, 2])):(max(RF_reg.limited[, 2]) - min(RF_reg.limited[, 2])) algorithm_error <- NA } else{ vslide_range <- vslide_range[1]:vslide_range[2] algorithm_error <- NULL } ##problem: the optimisation routine slightly depends on the chosen input sliding vector ##and it might get trapped in a local minimum ##therefore we run the algorithm by expanding the sliding vector if(!is.null(vslide_range) && any(vslide_range != 0)){ ##even numbers makes it complicated, so let's make it odd if not already the case if(length(vslide_range) %% 2 == 0){ vslide_range <- c(vslide_range[1], vslide_range, vslide_range) } ##construct list of vector ranges we want to check for, this should avoid that we ##got trapped in a local minium median_vslide_range.index <- median(1:length(vslide_range)) vslide_range.list <- lapply(seq(1, median_vslide_range.index, length.out = 10), function(x){ c(median_vslide_range.index - as.integer(x), median_vslide_range.index + as.integer(x)) }) ##correct for out of bounds problem; it might occur vslide_range.list[[10]] <- c(0, length(vslide_range)) ##TODO ... this is not really optimal, but ok for the moment, better would be ##the algorithm finds sufficiently the global minimum. ##now run it in a loop and expand the range from the inner to the outer part ##at least this is considered for the final error range ... temp_minium_list <- lapply(1:10, function(x){ src_analyse_IRSARRF_SRS( values_regenerated_limited = RF_reg.limited[,2], values_natural_limited = RF_nat.limited[,2], vslide_range = vslide_range[vslide_range.list[[x]][1]:vslide_range.list[[x]][2]], n_MC = 0, #we don't need MC runs here, so make it quick trace = trace)[c("sliding_vector_min_index","vslide_minimum", "vslide_index")] }) ##get all horizontal index value for the local minimum (corresponding to the vslide) temp_hslide_indices <- vapply(temp_minium_list, function(x){ x$sliding_vector_min_index}, FUN.VALUE = numeric(length = 1)) ##get also the vertical slide indicies temp_vslide_indicies <- vapply(temp_minium_list, function(x){ x$vslide_index}, FUN.VALUE = numeric(length = 1)) ##get all the minimum values temp_minium <- vapply(temp_minium_list, function(x){x$vslide_minimum}, FUN.VALUE = numeric(length = 1)) ##get minimum and set it to the final range vslide_range <- vslide_range[ vslide_range.list[[which.min(temp_minium)]][1]:vslide_range.list[[which.min(temp_minium)]][2]] ##get all possible t_n values for the range expansion ... this can be considered ##as somehow systematic uncertainty, but it will be only calculated of the full range ##is considered, otherwise it is too biased by the user's choice ##ToDo: So far the algorithm error is not sufficiently documented if(!is.null(algorithm_error)){ algorithm_error <- sd(vapply(1:length(temp_vslide_indicies), function(k){ temp.sliding.step <- RF_reg.limited[temp_hslide_indices[k]] - t_min matrix(data = c(RF_nat[,1] + temp.sliding.step, RF_nat[,2] + temp_vslide_indicies[k]), ncol = 2)[1,1] }, FUN.VALUE = numeric(length = 1))) }else{ algorithm_error <- NA } }else{ algorithm_error <- NA } ##now run the final sliding with the identified range that corresponds to the minimum value temp.sum.residuals <- src_analyse_IRSARRF_SRS( values_regenerated_limited = RF_reg.limited[,2], values_natural_limited = RF_nat.limited[,2], vslide_range = vslide_range, n_MC = if(is.null(n.MC)){0}else{n.MC}, trace = trace ) #(2) get minimum value (index and time value) index_min <- which.min(temp.sum.residuals$sliding_vector) t_n.id <- index_min if (is.null(vslide_range)) { I_n <- 0 } else{ I_n <- vslide_range[temp.sum.residuals$vslide_index] } temp.sliding.step <- RF_reg.limited[t_n.id] - t_min ##(3) slide curve graphically ... full data set we need this for the plotting later RF_nat.slided <- matrix(data = c(RF_nat[,1] + temp.sliding.step, RF_nat[,2] + I_n), ncol = 2) t_n <- RF_nat.slided[1,1] ##the same for the MC runs of the minimum values if(!is.null(n.MC)) { t_n.MC <- vapply( X = 1:length(temp.sum.residuals$sliding_vector_min_MC), FUN = function(x) { ##get minimum for MC t_n.id.MC <- which( temp.sum.residuals$sliding_vector == temp.sum.residuals$sliding_vector_min_MC[x] ) ##there is low change to get two indicies, in ##such cases we should take the mean temp.sliding.step.MC <- RF_reg.limited[t_n.id.MC] - t_min if(length(temp.sliding.step.MC)>1){ t_n.MC <- (RF_nat[, 1] + mean(temp.sliding.step.MC))[1] }else{ t_n.MC <- (RF_nat[, 1] + temp.sliding.step.MC)[1] } return(t_n.MC) }, FUN.VALUE = vector(mode = "numeric", length = 1) ) } else{ t_n.MC <- NA_integer_ } ##(4) get residuals (needed to be plotted later) ## they cannot be longer than the RF_reg.limited curve if((t_n.id+length(RF_nat.limited[,2])-1) >= nrow(RF_reg.limited)){ residuals <- (RF_nat.limited[1:length(t_n.id:nrow(RF_reg.limited)),2] + I_n) - RF_reg.limited[t_n.id:nrow(RF_reg.limited), 2] }else{ residuals <- (RF_nat.limited[,2] + I_n) - RF_reg.limited[t_n.id:(t_n.id+length(RF_nat.limited[,2])-1), 2] } ##(4.1) calculate De from the first channel ... which is t_n here De <- round(t_n, digits = 2) De.MC <- round(t_n.MC, digits = 2) temp.trend.fit <- NA ##(5) calculate trend fit if(length(RF_nat.limited[,1]) > length(residuals)){ temp.trend.fit <- coef(lm(y~x, data.frame(x = RF_nat.limited[1:length(residuals),1], y = residuals))) }else{ temp.trend.fit <- coef(lm(y~x, data.frame(x = RF_nat.limited[,1], y = residuals))) } ##return values and limited if they are not needed if (numerical.only == FALSE) { return( list( De = De, De.MC = De.MC, residuals = residuals, trend.fit = temp.trend.fit, RF_nat.slided = RF_nat.slided, t_n.id = t_n.id, I_n = I_n, algorithm_error = algorithm_error, vslide_range = if(is.null(vslide_range)){NA}else{range(vslide_range)}, squared_residuals = temp.sum.residuals$sliding_vector ) ) }else{ return(list(De = De, De.MC = De.MC)) } }##end of function sliding() ##PERFORM sliding and overwrite values slide <- sliding( RF_nat = RF_nat, RF_nat.limited = RF_nat.limited, RF_reg.limited = RF_reg.limited ) ##write results in variables De <- slide$De residuals <- slide$residuals RF_nat.slided <- slide$RF_nat.slided I_n <- slide$I_n # ERROR ESTIMATION # MC runs for error calculation --------------------------------------------------------------- ##set residual matrix for MC runs, i.e. set up list of pseudo RF_nat curves as function ##(i.e., bootstrap from the natural curve distribution) if(!is.null(n.MC)){ slide.MC.list <- lapply(1:n.MC,function(x) { ##also here we have to account for the case that user do not understand ##what they are doing ... if(slide$t_n.id + nrow(RF_nat.limited)-1 > nrow(RF_reg.limited)){ cbind( RF_nat.limited[1:length(slide$t_n.id:nrow(RF_reg.limited)),1], (RF_reg.limited[slide$t_n.id:nrow(RF_reg.limited) ,2] + sample(residuals, size = length(slide$t_n.id:nrow(RF_reg.limited)), replace = TRUE) ) ) }else{ cbind( RF_nat.limited[,1], (RF_reg.limited[slide$t_n.id:(slide$t_n.id + nrow(RF_nat.limited)-1) ,2] + sample(residuals, size = nrow(RF_nat.limited), replace = TRUE) ) ) } }) ##set parallel calculation if wanted if(is.null(method.control.settings$cores)){ cores <- 1 }else{ ##case 'auto' if(method.control.settings$cores == 'auto'){ if(parallel::detectCores() <= 2){ warning("[analyse_IRSAR.RF()] For the multicore auto mode at least 4 cores are needed!", call. = FALSE) cores <- 1 }else{ cores <- parallel::detectCores() - 2 } }else if(is.numeric(method.control.settings$cores)){ if(method.control.settings$cores > parallel::detectCores()){ warning(paste0("[analyse_IRSAR.RF()] What do you want? Your machine has only ", parallel::detectCores(), " cores!"), call. = FALSE) ##assign all they have, it is not our problem cores <- parallel::detectCores() } else if (method.control.settings$cores >= 1 && method.control.settings$cores <= parallel::detectCores()) { cores <- method.control.settings$cores } else { # Negative values cores <- 1 } }else{ try(stop("[analyse_IRSAR.RF()] Invalid value for control argument 'cores'. Value set to 1", call. = FALSE)) cores <- 1 } ##return message if (cores == 1) message(paste("[analyse_IRSAR.RF()] Singlecore mode")) else message(paste("[analyse_IRSAR.RF()] Multicore mode using", cores, "cores...")) } ## SINGLE CORE ----- if (cores == 1) { if(txtProgressBar){ ##progress bar cat("\n\t Run Monte Carlo loops for error estimation\n") pb<-txtProgressBar(min=0, max=n.MC, initial=0, char="=", style=3) } De.MC <- sapply(1:n.MC, function(i) { # update progress bar if (txtProgressBar) setTxtProgressBar(pb, i) sliding( RF_nat = RF_nat, RF_reg.limited = RF_reg.limited, RF_nat.limited = slide.MC.list[[i]], numerical.only = TRUE )[[2]] }) ## MULTICORE ----- } else { ## Create the determined number of R copies cl <- parallel::makeCluster(cores) ##run MC runs De.MC <- parallel::parSapply(cl, X = slide.MC.list, FUN = function(x){ sliding( RF_nat = RF_nat, RF_reg.limited = RF_reg.limited, RF_nat.limited = x, numerical.only = TRUE )[[2]] }) ##destroy multicore cluster parallel::stopCluster(cl) } ##calculate absolute deviation between De and the here newly calculated De.MC ##this is, e.g. ^t_n.1* - ^t_n in Frouin et al. De.diff <- diff(x = c(De, De.MC)) De.error <- round(sd(De.MC), digits = 2) De.lower <- De - quantile(De.diff, 0.975, na.rm = TRUE) De.upper <- De - quantile(De.diff, 0.025, na.rm = TRUE) }else{ De.diff <- NA_integer_ De.error <- NA_integer_ De.lower <- NA_integer_ De.upper <- NA_integer_ De.MC <- NA_integer_ } }else{ warning("[analyse_IRSAR.RF()] Analysis skipped: Unknown method or threshold of test parameter exceeded.", call. = FALSE) } ##===============================================================================================# ## TEST PARAMETER ##===============================================================================================# ## Test parameter are evaluated after all the calculations have been done as ## it should be up to the user to decide whether a value should be taken into account or not. ##(0) ##set default values and overwrite them if there was something new ##set defaults TP <- list( curves_ratio = 1.001, intersection_ratio = NA, residuals_slope = NA, curves_bounds = ceiling(max(RF_reg.x)), dynamic_ratio = NA, lambda = NA, beta = NA, delta.phi = NA ) ##modify default values by given input if(!is.null(test_parameters)){TP <- modifyList(TP, test_parameters)} ##remove NULL elements from list TP <- TP[!sapply(TP, is.null)] ##set list with values we want to evaluate TP <- lapply(TP, function(x){ data.frame(THRESHOLD = as.numeric(x), VALUE = NA, STATUS = "OK", stringsAsFactors = TRUE) }) ##(1) check if RF_nat > RF_reg, considering the fit range ##TP$curves_ratio if ("curves_ratio" %in% names(TP)) { TP$curves_ratio$VALUE <- sum(RF_nat.limited[,2]) / sum(RF_reg[RF_nat.lim[1]:RF_nat.lim[2], 2]) if (!is.na(TP$curves_ratio$THRESHOLD)) { TP$curves_ratio$STATUS <- ifelse(TP$curves_ratio$VALUE > TP$curves_ratio$THRESHOLD, "FAILED", "OK") } } ##(1.1) check if RF_nat > RF_reg, considering the fit range ##TP$intersection_ratio if ("intersection_ratio" %in% names(TP)) { ##It is, as always, a little bit more complicated ... ##We cannot just normalise both curves and compare ratios. With increasing De the curve ##shape of the RF_nat curve cannot be the same as the RF_reg curve at t = 0. Therefore we ##have to find the segment in the RF_reg curve that fits to the RF_nat curve ## ##(1) get maximum count value for RF_nat IR_RF_nat.max <- max(RF_nat.limited[,2]) ##(2) find corresponding time value for RF_reg (here no limited) IR_RF_reg.corresponding_id <- which.min(abs(RF_reg[,2] - IR_RF_nat.max)) ##(3) calculate ratio, but just starting from the point where both curves correspond ##in terms of intensiy, otherwise the ratio cannot be correct ##the boundary check is necessary to avoid errors if((IR_RF_reg.corresponding_id + length(RF_nat.lim[1]:RF_nat.lim[2])) > length(RF_reg[,2])){ TP$intersection_ratio$VALUE <- Inf }else{ TP$intersection_ratio$VALUE <- abs(1 - sum((RF_nat.limited[, 2] / max(RF_nat.limited[, 2]))) / sum(RF_reg[IR_RF_reg.corresponding_id:(IR_RF_reg.corresponding_id + length(RF_nat.lim[1]:RF_nat.lim[2]) - 1), 2] / max(RF_reg[IR_RF_reg.corresponding_id:(IR_RF_reg.corresponding_id + length(RF_nat.lim[1]:RF_nat.lim[2]) - 1), 2]))) if (!is.na(TP$intersection_ratio$THRESHOLD)) { TP$intersection_ratio$STATUS <- ifelse(TP$intersection_ratio$VALUE > TP$intersection_ratio$THRESHOLD, "FAILED", "OK") } rm(IR_RF_nat.max, IR_RF_reg.corresponding_id) } } ##(2) check slop of the residuals using a linear fit ##TP$residuals_slope if ("residuals_slope" %in% names(TP)) { if (exists("slide")) { TP$residuals_slope$VALUE <- abs(slide$trend.fit[2]) if (!is.na(TP$residuals_slope$THRESHOLD)) { TP$residuals_slope$STATUS <- ifelse( TP$residuals_slope$VALUE > TP$residuals_slope$THRESHOLD, "FAILED", "OK") } } } ##(3) calculate dynamic range of regenrated curve ##TP$dynamic_ratio if ("dynamic_ratio"%in%names(TP)){ TP.dynamic_ratio <- subset(temp.sequence_structure, temp.sequence_structure$protocol.step == "REGENERATED") TP$dynamic_ratio$VALUE <- TP.dynamic_ratio$y.max/TP.dynamic_ratio$y.min if (!is.na(TP$dynamic_ratio$THRESHOLD)){ TP$dynamic_ratio$STATUS <- ifelse( TP$dynamic_ratio$VALUE < TP$dynamic_ratio$THRESHOLD , "FAILED", "OK") } } ##(4) decay parameter ##TP$lambda if ("lambda"%in%names(TP) & "beta"%in%names(TP) & "delta.phi"%in%names(TP)){ fit.lambda <- try(minpack.lm::nlsLM( fit.function, data = data.frame(x = RF_reg.x, y = RF_reg.y), algorithm = "LM", start = list( phi.0 = fit.parameters.start["phi.0"], delta.phi = fit.parameters.start["delta.phi"], lambda = fit.parameters.start["lambda"], beta = fit.parameters.start["beta"] ), lower = c( phi.0 = .Machine$double.xmin, delta.phi = .Machine$double.xmin, lambda = .Machine$double.xmin, beta = .Machine$double.xmin ), upper = c( phi.0 = max(RF_reg.y), delta.phi = max(RF_reg.y), lambda = 1, beta = 100 ) ), silent = TRUE ) if(!inherits(fit.lambda, "try-error")){ temp.coef <- coef(fit.lambda) TP$lambda$VALUE <- temp.coef["lambda.lambda"] TP$beta$VALUE <- temp.coef["beta.beta"] TP$delta.phi$VALUE <- temp.coef["delta.phi.delta.phi"] if (!is.na( TP$lambda$THRESHOLD)){ TP$lambda$STATUS <- ifelse(TP$lambda$VALUE <= TP$lambda$THRESHOLD, "FAILED", "OK") } if (!is.na( TP$beta$THRESHOLD)){ TP$beta$STATUS <- ifelse(TP$beta$VALUE <= TP$beta$THRESHOLD, "FAILED", "OK") } if (!is.na( TP$delta.phi$THRESHOLD)){ TP$delta.phi$STATUS <- ifelse(TP$delta.phi$VALUE <= TP$delta.phi$THRESHOLD, "FAILED", "OK") } } } ##(99) check whether after sliding the ##TP$curves_bounds if (!is.null(TP$curves_bounds)) { if(exists("slide")){ ## add one channel on the top to make sure that it works TP$curves_bounds$VALUE <- max(RF_nat.slided[RF_nat.lim,1]) + (RF_nat[2,1] - RF_nat[1,1]) if (!is.na(TP$curves_bounds$THRESHOLD)){ TP$curves_bounds$STATUS <- ifelse(TP$curves_bounds$VALUE >= floor(max(RF_reg.x)), "FAILED", "OK") } }else if(exists("fit")){ TP$curves_bounds$VALUE <- De.upper if (!is.na(TP$curves_bounds$THRESHOLD)){ TP$curves_bounds$STATUS <- ifelse(TP$curves_bounds$VALUE >= max(RF_reg.x), "FAILED", "OK") } } } ##Combine everything in a data.frame if(length(TP) != 0) { TP.data.frame <- as.data.frame( cbind( POSITION = as.integer(aliquot.position), PARAMETER = c(names(TP)), do.call(data.table::rbindlist, args = list(l = TP)), SEQUENCE_NAME = aliquot.sequence_name, UID = NA ) ) ##set De.status to indicate whether there is any problem with the De according to the test parameter if ("FAILED" %in% TP.data.frame$STATUS) { De.status <- "FAILED" }else{ De.status <- "OK" } }else{ De.status <- "OK" TP.data.frame <- NULL } ##===============================================================================================# # Plotting ------------------------------------------------------------------------------------ ##===============================================================================================# if(plot){ ##get internal colour definition col <- get("col", pos = .LuminescenceEnv) if (!plot_reduced) { ##grep par default and define reset def.par <- par(no.readonly = TRUE) on.exit(par(def.par)) ##set plot frame, if a method was choosen if (method == "SLIDE" | method == "FIT") { layout(matrix(c(1, 2), 2, 1, byrow = TRUE), c(2), c(1.3, 0.4), TRUE) par( oma = c(1, 1, 1, 1), mar = c(0, 4, 3, 0), cex = plot.settings$cex ) } }else{ if(plot.settings[["cex"]] != 1){ def.par <- par()[["cex"]] on.exit(par(def.par)) par(cex = plot.settings[["cex"]]) } } ##here control xlim and ylim behaviour ##xlim xlim <- if ("xlim" %in% names(list(...))) { list(...)$xlim } else { if (plot.settings$log == "x" | plot.settings$log == "xy") { c(min(temp.sequence_structure$x.min),max(temp.sequence_structure$x.max)) }else{ c(0,max(temp.sequence_structure$x.max)) } } ##ylim ylim <- if("ylim" %in% names(list(...))) {list(...)$ylim} else {c(min(temp.sequence_structure$y.min), max(temp.sequence_structure$y.max))} ##open plot area plot( NA,NA, xlim = xlim, ylim = ylim, xlab = ifelse((method != "SLIDE" & method != "FIT") | plot_reduced, plot.settings$xlab," "), xaxt = ifelse((method != "SLIDE" & method != "FIT") | plot_reduced, plot.settings$xaxt,"n"), yaxt = "n", ylab = plot.settings$ylab, main = plot.settings$main, log = plot.settings$log, ) if(De.status == "FAILED"){ ##build list of failed TP mtext.message <- paste0( "Threshold exceeded for: ", paste(subset(TP.data.frame, TP.data.frame$STATUS == "FAILED")$PARAMETER, collapse = ", "),". For details see manual.") ##print mtext mtext(text = mtext.message, side = 3, outer = TRUE, col = "red", cex = 0.8 * par()[["cex"]]) warning(mtext.message, call. = FALSE) } ##use scientific format for y-axis labels <- axis(2, labels = FALSE) axis(side = 2, at = labels, labels = format(labels, scientific = TRUE)) ##(1) plot points that have been not selected points(RF_reg[-(min(RF_reg.lim):max(RF_reg.lim)),1:2], pch=3, col=col[19]) ##(2) plot points that has been used for the fitting points(RF_reg.x,RF_reg.y, pch=3, col=col[10]) ##show natural points if no analysis was done if(method != "SLIDE" & method != "FIT"){ ##add points points(RF_nat, pch = 20, col = "grey") points(RF_nat.limited, pch = 20, col = "red") ##legend if (plot.settings$legend) { legend( plot.settings$legend.pos, legend = plot.settings$legend.text, pch = c(19, 3), col = c("red", col[10]), horiz = TRUE, bty = "n", cex = .9 * par()[["cex"]] ) } } ##Add fitted curve, if possible. This is a graphical control that might be considered ##as useful before further analysis will be applied if (method.control.settings$show_fit) { if(!is(fit.lambda, "try-error")){ fit.lambda_coef <- coef(fit.lambda) curve(fit.lambda_coef[[1]]- (fit.lambda_coef[[2]]* ((1-exp(-fit.lambda_coef[[3]]*x))^fit.lambda_coef[[4]])), add=TRUE, lty = 2, col="red") rm(fit.lambda_coef) }else{ warning("[analyse_IRSAR.RF()] No fit possible, no fit shown.", call. = FALSE) } } ## ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# ## PLOT - METHOD FIT ## ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# if(method == "FIT"){ ##dummy to cheat R CMD check x<-NULL; rm(x) ##plot fitted curve curve(fit.parameters.results["phi.0"]- (fit.parameters.results["delta.phi"]* ((1-exp(-fit.parameters.results["lambda"]*x))^fit.parameters.results["beta"])), add=TRUE, from = RF_reg[min(RF_reg.lim), 1], to = RF_reg[max(RF_reg.lim), 1], col="red") ##plotting to show the limitations if RF_reg.lim was chosen ##show fitted curve GREY (previous red curve) curve(fit.parameters.results["phi.0"]- (fit.parameters.results["delta.phi"]* ((1-exp(-fit.parameters.results["lambda"]*x))^fit.parameters.results["beta"])), add=TRUE, from = min(RF_reg[, 1]), to = RF_reg[min(RF_reg.lim), 1], col="grey") ##show fitted curve GREY (after red curve) curve(fit.parameters.results["phi.0"]- (fit.parameters.results["delta.phi"]* ((1-exp(-fit.parameters.results["lambda"]*x))^fit.parameters.results["beta"])), add=TRUE, from = RF_reg[max(RF_reg.lim), 1], to = max(RF_reg[, 1]), col="grey") ##add points points(RF_nat, pch = 20, col = col[19]) points(RF_nat.limited, pch = 20, col = col[2]) ##legend if (plot.settings$legend) { legend( plot.settings$legend.pos, legend = plot.settings$legend.text, pch = c(19, 3), col = c("red", col[10]), horiz = TRUE, bty = "n", cex = .9 * par()[["cex"]] ) } ##plot range choosen for fitting abline(v=RF_reg[min(RF_reg.lim), 1], lty=2) abline(v=RF_reg[max(RF_reg.lim), 1], lty=2) ##plot De if De was calculated if(is.na(De) == FALSE & is.nan(De) == FALSE){ lines(c(0,De.lower), c(RF_nat.error.lower,RF_nat.error.lower), lty=2, col="grey") lines(c(0,De), c(RF_nat.mean,RF_nat.mean), lty=2, col="red") lines(c(0,De.upper), c(RF_nat.error.upper,RF_nat.error.upper), lty=2, col="grey") lines(c(De.lower, De.lower), c(0,RF_nat.error.lower), lty=2, col="grey") lines(c(De,De), c(0, RF_nat.mean), lty=2, col="red") lines(c(De.upper, De.upper), c(0,RF_nat.error.upper), lty=2, col="grey") } ##Insert fit and result if(is.na(De) != TRUE & (is.nan(De) == TRUE | De > max(RF_reg.x) | De.upper > max(RF_reg.x))){ try(mtext(side=3, substitute(D[e] == De, list(De=paste( De," (",De.lower," ", De.upper,")", sep=""))), line=0, cex=0.8 * par()[["cex"]], col="red"), silent=TRUE) De.status <- "VALUE OUT OF BOUNDS" } else{ if ("mtext" %in% names(list(...))) { mtext(side = 3, list(...)$mtext) }else{ try(mtext( side = 3, substitute(D[e] == De, list( De = paste(De," [",De.lower," ; ", De.upper,"]", sep = "") )), line = 0, cex = 0.7 * par()[["cex"]] ), silent = TRUE) } De.status <- "OK" } if (!plot_reduced) { ##==lower plot==## par(mar = c(4.2, 4, 0, 0)) ##plot residuals if (is.na(fit.parameters.results[1]) == FALSE) { plot( RF_reg.x, residuals(fit), xlim = c(0, max(temp.sequence_structure$x.max)), xlab = plot.settings$xlab, yaxt = "n", xaxt = plot.settings$xaxt, type = "p", pch = 20, col = "grey", ylab = "E", log = "" ) ##add 0 line abline(h = 0) } else{ plot( NA, NA, xlim = c(0, max(temp.sequence_structure$x.max)), ylab = "E", xlab = plot.settings$xlab, xaxt = plot.settings$xaxt, ylim = c(-1, 1) ) text(x = max(temp.sequence_structure$x.max) / 2, y = 0, "Fitting Error!") } } } ## ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# ## PLOT - METHOD SLIDE ## ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# else if(method == "SLIDE"){ ##(0) density plot if (method.control.settings$show_density) { ##showing the density makes only sense when we see at least 10 data points if (!any(is.na(De.MC)) && length(unique(De.MC)) >= 15) { ##calculate density De.MC density.De.MC <- density(De.MC) ##calculate transformation function x.1 <- max(density.De.MC$y) x.2 <- min(density.De.MC$y) ##with have to limit the scaling a little bit if (RF_nat.limited[1,2] > max(RF_reg.limited[,2]) - (max(RF_reg.limited[,2]) - min(RF_reg.limited[,2]))*.5) { y.1 <- max(RF_reg.limited[,2]) - (max(RF_reg.limited[,2]) - min(RF_reg.limited[,2]))*.5 }else{ y.1 <- RF_nat.limited[1,2] } y.2 <- par("usr")[3] m <- (y.1 - y.2) / (x.1 + x.2) n <- y.1 - m * x.1 density.De.MC$y <- m * density.De.MC$y + n rm(x.1,x.2,y.1,y.2,m,n) polygon(density.De.MC$x, density.De.MC$y, col = rgb(0,0.4,0.8,0.5)) }else{ warning("[analyse_IRSAR.RF()] Narrow density distribution, no density distribution plotted!", call. = FALSE) } } ##(1) plot unused points in grey ... unused points are points outside of the set limit points( matrix(RF_nat.slided[-(min(RF_nat.lim):max(RF_nat.lim)),1:2], ncol = 2), pch = 21, col = col[19] ) ##(2) add used points points(RF_nat.slided[min(RF_nat.lim):max(RF_nat.lim),], pch = 21, col = col[2], bg = col[2]) ##(3) add line to show the connection between the first point and the De lines(x = c(RF_nat.slided[1,1], RF_nat.slided[1,1]), y = c(.Machine$double.xmin,RF_nat.slided[1,2]), lty = 2, col = col[2] ) ##(4) add arrow at the lowest y-coordinate possible to show the sliding if (plot.settings$log != "y" & plot.settings$log != "xy") { shape::Arrows( x0 = 0, y0 = ylim[1], y1 = ylim[1], x1 = RF_nat.slided[1,1], arr.type = "triangle", arr.length = 0.3 * par()[["cex"]], code = 2, col = col[2], arr.adj = 1, arr.lwd = 1 ) } ##(5) add vertical shift as arrow; show nothing if nothing was shifted if (plot.settings$log != "y" & plot.settings$log != "xy" & I_n != 0) { shape::Arrows( x0 = (0 + par()$usr[1])/2, y0 = RF_nat[1,2], y1 = RF_nat[1,2] + I_n, x1 = (0 + par()$usr[1])/2, arr.type = "triangle", arr.length = 0.3 * par()[["cex"]], code = 2, col = col[2], arr.adj = 1, arr.lwd = 1 ) } ##TODO ##uncomment here to see all the RF_nat curves produced by the MC runs ##could become a polygone for future versions #lapply(1:n.MC, function(x){lines(slide.MC.list[[x]], col = rgb(0,0,0, alpha = 0.2))}) ##plot range choosen for fitting abline(v=RF_reg[min(RF_reg.lim), 1], lty=2) abline(v=RF_reg[max(RF_reg.lim), 1], lty=2) if (plot.settings$legend) { legend( plot.settings$legend.pos, legend = plot.settings$legend.text, pch = c(19, 3), col = c("red", col[10]), horiz = TRUE, bty = "n", cex = .9 * par()[["cex"]] ) } ##write information on the De in the plot if("mtext" %in% names(list(...))) { mtext(side = 3, list(...)$mtext) }else{ try(mtext(side=3, substitute(D[e] == De, list(De=paste0(De," [", De.lower, " ; ", De.upper, "]"))), line=0, cex=0.7 * par()[["cex"]]), silent=TRUE) } if (!plot_reduced) { ##==lower plot==## ##RESIDUAL PLOT par(mar = c(4, 4, 0, 0)) plot( NA, NA, ylim = range(residuals), xlim = xlim, xlab = plot.settings$xlab, type = "p", pch = 1, col = "grey", xaxt = plot.settings$xaxt, ylab = "E", yaxt = "n", log = ifelse( plot.settings$log == "y" | plot.settings$log == "xy", "", plot.settings$log ) ) ##add axis for 0 ... means if the 0 is not visible there is labelling axis(side = 4, at = 0, labels = 0) ##add residual indicator (should circle around 0) col.ramp <- colorRampPalette(c(col[19], "white", col[19])) col.polygon <- col.ramp(100) if (plot.settings$log != "x") { shape::filledrectangle( mid = c((xlim[2]) + (par("usr")[2] - xlim[2]) / 2, max(residuals) - diff(range(residuals)) / 2), wx = par("usr")[2] - xlim[2], wy = diff(range(residuals)), col = col.polygon ) } ##add 0 line abline(h = 0, lty = 3) ##0-line indicator and arrows if this is not visible ##red colouring here only if the 0 point is not visible to avoid too much colouring if (max(residuals) < 0 & min(residuals) < 0) { shape::Arrowhead( x0 = xlim[2] + (par("usr")[2] - xlim[2]) / 2, y0 = max(residuals), angle = 270, lcol = col[2], arr.length = 0.4, arr.type = "triangle", arr.col = col[2] ) } else if (max(residuals) > 0 & min(residuals) > 0) { shape::Arrowhead( x0 = xlim[2] + (par("usr")[2] - xlim[2]) / 2, y0 = min(residuals), angle = 90, lcol = col[2], arr.length = 0.4, arr.type = "triangle", arr.col = col[2] ) } else{ points(xlim[2], 0, pch = 3) } ##add residual points if (length(RF_nat.slided[c(min(RF_nat.lim):max(RF_nat.lim)), 1]) > length(residuals)) { temp.points.diff <- length(RF_nat.slided[c(min(RF_nat.lim):max(RF_nat.lim)), 1]) - length(residuals) points(RF_nat.slided[c(min(RF_nat.lim):(max(RF_nat.lim) - temp.points.diff)), 1], residuals, pch = 20, col = rgb(0, 0, 0, 0.4)) } else{ points(RF_nat.slided[c(min(RF_nat.lim):max(RF_nat.lim)), 1], residuals, pch = 20, col = rgb(0, 0, 0, 0.4)) } ##add vertical line to mark De (t_n) abline(v = De, lty = 2, col = col[2]) ##add numeric value of De ... t_n axis( side = 1, at = De, labels = De, cex.axis = 0.8 * plot.settings$cex, col = "blue", padj = -1.55, ) ##TODO- CONTROL PLOT! ... can be implemented in appropriate form in a later version if (method.control.settings$trace) { par(new = TRUE) plot( RF_reg.limited[1:length(slide$squared_residuals),1], slide$squared_residuals, ylab = "", type = "l", xlab = "", xaxt = plot.settings$xaxt, axes = FALSE, xlim = xlim, log = "y" ) } } } }#endif::plot # Return -------------------------------------------------------------------------------------- ##=============================================================================# ## RETURN ##=============================================================================# ##catch up worst case scenarios ... means something went wrong if(!exists("De")){De <- NA} if(!exists("De.error")){De.error <- NA} if(!exists("De.MC")){De.MC <- NA} if(!exists("De.lower")){De.lower <- NA} if(!exists("De.upper")){De.upper <- NA} if(!exists("De.status")){De.status <- NA} if (!exists("fit")) { if (exists("fit.lambda")) { fit <- fit.lambda }else{ fit <- list() } } if(!exists("slide")){slide <- list()} ##combine values for De into a data frame De.values <- data.frame( DE = De, DE.ERROR = De.error, DE.LOWER = De.lower, DE.UPPER = De.upper, DE.STATUS = De.status, RF_NAT.LIM = paste(RF_nat.lim, collapse = ":"), RF_REG.LIM = paste(RF_reg.lim, collapse = ":"), POSITION = as.integer(aliquot.position), DATE = aliquot.date, SEQUENCE_NAME = aliquot.sequence_name, UID = NA, row.names = NULL, stringsAsFactors = FALSE ) ##generate unique identifier UID <- create_UID() ##update data.frames accordingly De.values$UID <- UID if(!is.null(TP.data.frame)){ TP.data.frame$UID <- UID } ##produce results object newRLumResults.analyse_IRSAR.RF <- set_RLum( class = "RLum.Results", data = list( data = De.values, De.MC = De.MC, test_parameters = TP.data.frame, fit = fit, slide = slide ), info = list(call = sys.call()) ) invisible(newRLumResults.analyse_IRSAR.RF) } Luminescence/R/analyse_Al2O3C_CrossTalk.R0000644000176200001440000002654513270641406017653 0ustar liggesusers#' Al2O3:C Reader Cross Talk Analysis #' #' The function provides the analysis of cross-talk measurements on a #' FI lexsyg SMART reader using Al2O3:C chips #' #' @param object [RLum.Analysis-class] **(required)**: #' measurement input #' #' @param signal_integral [numeric] (*optional*): #' signal integral, used for the signal and the background. #' If nothing is provided the full range is used #' #' @param dose_points [numeric] (*with default*): #' vector with dose points, if dose points are repeated, only the general #' pattern needs to be provided. Default values follow the suggestions #' made by Kreutzer et al., 2018 #' #' @param recordType [character] (*with default*): input curve selection, which is passed to #' function [get_RLum]. To deactivate the automatic selection set the argument to `NULL` #' #' @param irradiation_time_correction [numeric] or [RLum.Results-class] (*optional*): #' information on the used irradiation time correction obained by another experiements. #' #' @param method_control [list] (*optional*): #' optional parameters to control the calculation. #' See details for further explanations #' #' @param plot [logical] (*with default*): #' enable/disable plot output #' #' @param ... further arguments that can be passed to the plot output #' #' @return #' Function returns results numerically and graphically: #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `$data` \tab `data.frame` \tab summed apparent dose table \cr #' `$data_full` \tab `data.frame` \tab full apparent dose table \cr #' `$fit` \tab `lm` \tab the linear model obtained from fitting \cr #' `$col.seq` \tab `numeric` \tab the used colour vector \cr #' } #' #' **slot:** **`@info`** #' #' The original function call #' #' ------------------------\cr #' `[ PLOT OUTPUT ]`\cr #' ------------------------\cr #' #' - An overview of the obtained apparent dose values #' #' #' #' @section Function version: 0.1.2 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @seealso [analyse_Al2O3C_ITC] #' #' @references #' #' Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., 2018. Environmental Dose Rate #' Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. #' Geochromometria 45, 56-67. doi: 10.1515/geochr-2015-0086 #' #' @keywords datagen #' #' @examples #' #' ##load data #' data(ExampleData.Al2O3C, envir = environment()) #' #' ##run analysis #' analyse_Al2O3C_CrossTalk(data_CrossTalk) #' #' @md #' @export analyse_Al2O3C_CrossTalk <- function( object, signal_integral = NULL, dose_points = c(0,4), recordType = c("OSL (UVVIS)"), irradiation_time_correction = NULL, method_control = NULL, plot = TRUE, ... ){ # Integretiy check --------------------------------------------------------------------------- ##check input object if(!all(unlist(lapply(object, function(x){is(x, "RLum.Analysis")})))){ stop("[analyse_Al2O3C_CrossTalk()] The elements in 'object' are not all of type 'RLum.Analsyis'", call. = FALSE) } ##TODO ... do more, push harder ##Accept the entire sequence ... including TL and extract ##Add sufficient unit tests # Preparation --------------------------------------------------------------------------------- ##select curves based on the recordType selection; if not NULL if(!is.null(recordType)){ object <- get_RLum(object, recordType = recordType, drop = FALSE) } #set method control method_control_settings <- list( fit.method = "EXP" ) ##modify on request if(!is.null(method_control)){ method_control_settings <- modifyList(x = method_control_settings, val = method_control) } ##set signal integral if(is.null(signal_integral)){ signal_integral <- c(1:nrow(object[[1]][[1]][])) }else{ ##check whether the input is valid, otherwise make it valid if(min(signal_integral) < 1 | max(signal_integral) > nrow(object[[1]][[1]][])){ signal_integral <- c(1:nrow(object[[1]][[1]][])) warning( paste0( "[analyse_Al2O3C_ITC()] Input for 'signal_integral' corrected to 1:", nrow(object[[1]][[1]][]) ), call. = FALSE ) } } ##check irradiation time correction if (!is.null(irradiation_time_correction)) { if (is(irradiation_time_correction, "RLum.Results")) { if (irradiation_time_correction@originator == "analyse_Al2O3C_ITC") { irradiation_time_correction <- get_RLum(irradiation_time_correction) ##insert case for more than one observation ... if(nrow(irradiation_time_correction)>1){ irradiation_time_correction <- c(mean(irradiation_time_correction[[1]]), sd(irradiation_time_correction[[1]])) }else{ irradiation_time_correction <- c(irradiation_time_correction[[1]], irradiation_time_correction[[2]]) } } else{ stop( "[analyse_Al2O3C_CrossTalk()] The object provided for the argument 'irradiation_time_correction' was created by an unsupported function!", call. = FALSE ) } } } # Calculation --------------------------------------------------------------------------------- ##we have two dose points, and one background curve, we do know only the 2nd dose ##create signal table list signal_table_list <- lapply(1:length(object), function(i) { ##calculate all the three signals needed BACKGROUND <- sum(object[[i]][[3]][, 2]) NATURAL <- sum(object[[i]][[1]][, 2]) REGENERATED <- sum(object[[i]][[2]][, 2]) temp_df <- data.frame( POSITION = get_RLum(object[[i]][[1]], info.object = "position"), DOSE = if(!is.null(irradiation_time_correction)){ dose_points + irradiation_time_correction[1] }else{ dose_points }, DOSE_ERROR = if(!is.null(irradiation_time_correction)){ dose_points * irradiation_time_correction[2]/irradiation_time_correction[1] }else{ 0 }, STEP = c("NATURAL", "REGENERATED"), INTEGRAL = c(NATURAL, REGENERATED), BACKGROUND = c(BACKGROUND, BACKGROUND), NET_INTEGRAL = c(NATURAL - BACKGROUND, REGENERATED - BACKGROUND), row.names = NULL ) ##0 dose points should not be biased by the correction .. id_zero <- which(dose_points == 0) temp_df$DOSE[id_zero] <- 0 temp_df$DOSE_ERROR[id_zero] <- 0 return(temp_df) }) APPARENT_DOSE <- as.data.frame(data.table::rbindlist(lapply(1:length(object), function(x){ ##run in MC run if(!is.null(irradiation_time_correction)){ DOSE <- rnorm(1000, mean = signal_table_list[[x]]$DOSE[2], sd = signal_table_list[[x]]$DOSE_ERROR[2]) }else{ DOSE <- signal_table_list[[x]]$DOSE[2] } ##calculation temp <- (DOSE * signal_table_list[[x]]$NET_INTEGRAL[1])/signal_table_list[[x]]$NET_INTEGRAL[2] data.frame( POSITION = signal_table_list[[x]]$POSITION[1], AD = mean(temp), AD_ERROR = sd(temp)) }))) ##add apparent dose to the information signal_table_list <- lapply(1:length(signal_table_list), function(x){ cbind(signal_table_list[[x]], rep(APPARENT_DOSE[x,2:3], 2)) }) ##combine data_full <- as.data.frame(data.table::rbindlist(signal_table_list), stringsAsFactors = FALSE) # Plotting ------------------------------------------------------------------------------------ ##get plot settings par.default <- par(no.readonly = TRUE) on.exit(par(par.default)) ##settings plot_settings <- list( main = "Sample Carousel Crosstalk", mtext = "" ) ##modify on request plot_settings <- modifyList(x = plot_settings, list(...)) ##pre-calculations for graphical parameters n.positions <- length(unique(APPARENT_DOSE$POSITION)) arc.step <- (2 * pi) / n.positions step <- 0 ##condense data.frame, by calculating the mean for similar positions AD_matrix <- t(vapply(sort(unique(APPARENT_DOSE$POSITION)), function(x){ c(x,mean(APPARENT_DOSE[["AD"]][APPARENT_DOSE[["POSITION"]] == x]), sd(APPARENT_DOSE[["AD"]][APPARENT_DOSE[["POSITION"]] == x])) }, FUN.VALUE = vector(mode = "numeric", length = 3))) ##create colour ramp col.seq <- data.frame(POSITION = AD_matrix[order(AD_matrix[,2]),1], COLOUR = plotrix::smoothColors("green", nrow(AD_matrix) - 2, "red"), stringsAsFactors = FALSE) col.seq <- col.seq[["COLOUR"]][order(col.seq[["POSITION"]])] ##calculate model fit <- lm( formula = y ~ poly(x, 2, raw=TRUE), data = data.frame(y = APPARENT_DOSE$AD[order(APPARENT_DOSE$POSITION)], x = sort(APPARENT_DOSE$POSITION))) ##enable or disable plot ... we cannot put the condition higher, because we here ##calculate something we are going to need later if (plot) { ##set layout matrix layout(mat = matrix( c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 3), 5, 5, byrow = TRUE )) ##create empty plot par( mar = c(1, 1, 1, 1), omi = c(1, 1, 1, 1), oma = c(0.2, 0.2, 0.2, 0.2), cex = 1.1 ) shape::emptyplot(c(-1.15, 1.15), main = plot_settings$main, frame.plot = FALSE) ##add outher circle shape::plotcircle(r = 1.1, col = rgb(0.9, 0.9, 0.9, 1)) ##add inner octagon shape::filledcircle( r1 = 0.6, mid = c(0, 0), lwd = 1, lcol = "black", col = "white" ) ##add circles for (i in 1:n.positions) { shape::plotcircle( r = 0.05, mid = c(cos(step), sin(step)), cex = 6, pch = 20, col = col.seq[i] ) text(x = cos(step) * 0.85, y = sin(step) * .85, labels = i) step <- step + arc.step } ##add center plot with position plot(NA, NA, xlim = range(AD_matrix[,1]), ylim = range(APPARENT_DOSE[,2]), frame.plot = FALSE, type = "l") ##add points points(x = APPARENT_DOSE, pch = 20, col = rgb(0,0,0,0.3)) ##add linear model lines(sort(APPARENT_DOSE$POSITION), predict(fit), col = "red") ##add colour legend shape::emptyplot(c(-1.2, 1.2), frame.plot = FALSE) plotrix::gradient.rect( xleft = -0.6, ybottom = -1.2, xright = 0, ytop = 1.2, col = plotrix::smoothColors("green", 40, "red"), gradient = "y", border = NA ) ##add scale text text( x = -0.3, y = 1.2, label = "[s]", pos = 3, cex = 1.1 ) text( x = 0.4, y = 1, label = round(max(AD_matrix[, 2]),2), pos = 3, cex = 1.1 ) text( x = 0.4, y = -1.5, label = 0, pos = 3, cex = 1.1 ) } # Output -------------------------------------------------------------------------------------- output <- set_RLum( class = "RLum.Results", data = list( data = data.frame( POSITION = AD_matrix[,1], AD = AD_matrix[,2], AD_ERROR = AD_matrix[,3] ), data_full = data_full, fit = fit, col.seq = col.seq ), info = list( call = sys.call() ) ) } Luminescence/R/tune_Data.R0000644000176200001440000000506113231137116015114 0ustar liggesusers#' Tune data for experimental purpose #' #' The error can be reduced and sample size increased for specific purpose. #' #' @param data [data.frame] (**required**): #' input values, structure: data (`values[,1]`) and data error (`values [,2]`) #' are required #' #' @param decrease.error [numeric]: #' factor by which the error is decreased, ranges between 0 and 1. #' #' @param increase.data [numeric]: #' factor by which the error is decreased, ranges between 0 and inf. #' #' @return Returns a [data.frame] with tuned values. #' #' @note #' You should not use this function to improve your poor data set! #' #' @section Function version: 0.5.0 #' #' @author #' Michael Dietze, GFZ Potsdam (Germany) #' #' @keywords manip #' #' @examples #' #' ## load example data set #' data(ExampleData.DeValues, envir = environment()) #' x <- ExampleData.DeValues$CA1 #' #' ## plot original data #' plot_AbanicoPlot(data = x, #' summary = c("n", "mean")) #' #' ## decrease error by 10 % #' plot_AbanicoPlot(data = tune_Data(x, decrease.error = 0.1), #' summary = c("n", "mean")) #' #' ## increase sample size by 200 % #' #plot_AbanicoPlot(data = tune_Data(x, increase.data = 2) , #' # summary = c("n", "mean")) #' #' @md #' @export tune_Data <- function( data, decrease.error = 0, increase.data = 0 ){ if(missing(decrease.error) == FALSE) { error.rel <- data[,2] / data[,1] data[,2] <- error.rel * (1 - decrease.error) * data[,1] } if(missing(increase.data) == FALSE) { n <- round(x = increase.data * 100, digits = 0) i.new <- sample(x = 1:nrow(data), size = n, replace = TRUE) x.new <- rnorm(n = n, mean = data[i.new, 1], sd = data[i.new, 2]) e.new <- rnorm(n = n, mean = data[i.new, 2], sd = data[i.new, 2] * 0.05) x.merge <- c(data[,1], x.new) e.merge <- c(data[,2], e.new) e.merge <- e.merge[order(x.merge)] x.merge <- x.merge[order(x.merge)] data.out <- data.frame(x.merge, e.merge) names(data.out) <- names(data) data <- data.out } info <- Sys.info() user <- info[length(info)] os <- info[1] warning(paste("Dear ", user, ", these activities on your ", os, " machine have been tracked and will be submitted to ", "the R.Lum data base. Cheating does not pay off! [", Sys.time(), "]", sep = "")) return(data) } Luminescence/R/extract_IrradiationTimes.R0000644000176200001440000003457513231137116020225 0ustar liggesusers#' Extract Irradiation Times from an XSYG-file #' #' Extracts irradiation times, dose and times since last irradiation, from a #' Freiberg Instruments XSYG-file. These information can be further used to #' update an existing BINX-file. #' #' The function was written to compensate missing information in the BINX-file #' output of Freiberg Instruments lexsyg readers. As all information are #' available within the XSYG-file anyway, these information can be extracted #' and used for further analysis or/and to stored in a new BINX-file, which can #' be further used by other software, e.g., Analyst (Geoff Duller). #' #' Typical application example: g-value estimation from fading measurements #' using the Analyst or any other self written script. #' #' Beside the some simple data transformation steps the function applies the #' functions [read_XSYG2R], [read_BIN2R], [write_R2BIN] for data import and export. #' #' @param object [character], [RLum.Analysis-class] or [list] (**required**): #' path and file name of the XSYG file or an [RLum.Analysis-class] #' produced by the function [read_XSYG2R]; #' alternatively a `list` of [RLum.Analysis-class] can be provided. #' #' **Note**: If an [RLum.Analysis-class] is used, any input for #' the arguments `file.BINX` and `recordType` will be ignored! #' #' @param file.BINX [character] (*optional*): #' path and file name of an existing BINX-file. If a file name is provided the #' file will be updated with the information from the XSYG file in the same #' folder as the original BINX-file. #' #' **Note:** The XSYG and the BINX-file have to be originate from the #' same measurement! #' #' @param recordType [character] (*with default*): #' select relevant curves types from the XSYG file or [RLum.Analysis-class] #' object. As the XSYG-file format comprises much more information than usually #' needed for routine data analysis and allowed in the BINX-file format, only #' the relevant curves are selected by using the function #' [get_RLum]. The argument `recordType` works as #' described for this function. #' #' **Note:** A wrong selection will causes a function error. Please change this #' argument only if you have reasons to do so. #' #' @param compatibility.mode [logical] (*with default*): #' this option is parsed only if a BIN/BINX file is produced and it will reset all position #' values to a max. value of 48, cf.[write_R2BIN] #' #' @param txtProgressBar [logical] (*with default*): #' enables `TRUE` or disables `FALSE` the progression bars during import and export #' #' @return #' An [RLum.Results-class] object is returned with the #' following structure: #' #' ``` #' .. $irr.times (data.frame) #' ``` #' #' If a BINX-file path and name is set, the output will be additionally #' transferred into a new BINX-file with the function name as suffix. For the #' output the path of the input BINX-file itself is used. Note that this will #' not work if the input object is a file path to an XSYG-file, instead of a #' link to only one file. In this case the argument input for `file.BINX` is ignored. #' #' In the self call mode (input is a `list` of [RLum.Analysis-class] objects #' a list of [RLum.Results-class] is returned. #' #' @note #' The produced output object contains still the irradiation steps to #' keep the output transparent. However, for the BINX-file export this steps #' are removed as the BINX-file format description does not allow irradiations #' as separat sequences steps. #' #' **BINX-file 'Time Since Irradiation' value differs from the table output?** #' #' The way the value 'Time Since Irradiation' is defined differs. In the BINX-file the #' 'Time Since Irradiation' is calculated as the 'Time Since Irradiation' plus the 'Irradiation #' Time'. The table output returns only the real 'Time Since Irradiation', i.e. time between the #' end of the irradiation and the next step. #' #' **Negative values for `TIMESINCELAS.STEP`?** #' #' Yes, this is possible and no bug, as in the XSYG-file multiple curves are stored for one step. #' Example: TL step may comprise three curves: #' #' - (a) counts vs. time, #' - (b) measured temperature vs. time and #' - (c) predefined temperature vs. time. #' #' Three curves, but they are all belonging to one TL measurement step, but with regard to #' the time stamps this could produce negative values as the important function #' ([read_XSYG2R]) do not change the order of entries for one step #' towards a correct time order. #' #' @section Function version: 0.3.1 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [RLum.Analysis-class], [RLum.Results-class], [Risoe.BINfileData-class], #' [read_XSYG2R], [read_BIN2R], [write_R2BIN] #' #' @references #' Duller, G.A.T., 2015. The Analyst software package for luminescence data: overview and #' recent improvements. Ancient TL 33, 35-42. #' #' @keywords IO manip #' #' @examples #' #' #' ## (1) - example for your own data #' ## #' ## set files and run function #' # #' # file.XSYG <- file.choose() #' # file.BINX <- file.choose() #' # #' # output <- extract_IrradiationTimes(file.XSYG = file.XSYG, file.BINX = file.BINX) #' # get_RLum(output) #' # #' ## export results additionally to a CSV.file in the same directory as the XSYG-file #' # write.table(x = get_RLum(output), #' # file = paste0(file.BINX,"_extract_IrradiationTimes.csv"), #' # sep = ";", #' # row.names = FALSE) #' #' @md #' @export extract_IrradiationTimes <- function( object, file.BINX, recordType = c("irradiation (NA)", "IRSL (UVVIS)", "OSL (UVVIS)", "TL (UVVIS)"), compatibility.mode = TRUE, txtProgressBar = TRUE ){ # SELF CALL ----------------------------------------------------------------------------------- if(is.list(object)){ ##show message for non-supported arguments if(!missing(file.BINX)){ warning("[extract_IrradiationTimes()] argument 'file.BINX' is not supported in the self call mode.", call. = FALSE) } ##extent arguments ##extent recordType if(is(recordType, "list")){ recordType <- rep(recordType, length = length(object)) }else{ recordType <- rep(list(recordType), length = length(object)) } ##run function results <- lapply(1:length(object), function(x) { extract_IrradiationTimes( object = object[[x]], recordType = recordType[[x]], txtProgressBar = txtProgressBar ) }) ##DO NOT use invisible here, this will stop the function from stopping if(length(results) == 0){ return(NULL) }else{ return(results) } } # Integrity tests ----------------------------------------------------------------------------- ##check whether an character or an RLum.Analysis object is provided if(is(object)[1] != "character" & is(object)[1] != "RLum.Analysis"){ stop("[extract_IrradiationTimes()] Input object is neither of type 'character' nor of type 'RLum.Analysis'.") }else if(is(object)[1] == "character"){ ##set object to file.XSYG file.XSYG <- object ##XSYG ##check if file exists if(file.exists(file.XSYG) == FALSE){ stop("[extract_IrradiationTimes()] Wrong XSYG file name or file does not exsits!") } ##check if file is XML file if(tail(unlist(strsplit(file.XSYG, split = "\\.")), 1) != "xsyg" & tail(unlist(strsplit(file.XSYG, split = "\\.")), 1) != "XSYG" ){ stop("[extract_IrradiationTimes()] File is not of type 'XSYG'!") } ##BINX if(!missing(file.BINX)){ ##check if file exists if(file.exists(file.BINX) == FALSE){ stop("[extract_IrradiationTimes()] Wrong BINX file name or file does not exist!", call. = FALSE) } ##check if file is XML file if(tail(unlist(strsplit(file.BINX, split = "\\.")), 1) != "binx" & tail(unlist(strsplit(file.BINX, split = "\\.")), 1) != "BINX" ){ stop("[extract_IrradiationTimes()] File is not of type 'BINX'!", call. = FALSE) } } # Settings and import XSYG -------------------------------------------------------------------- temp.XSYG <- read_XSYG2R(file.XSYG, txtProgressBar = txtProgressBar) if(!missing(file.BINX)){ temp.BINX <- read_BIN2R(file.BINX, txtProgressBar = txtProgressBar) temp.BINX.dirname <- (dirname(file.XSYG)) } # Some data preparation ----------------------------------------------------------------------- ##set list temp.sequence.list <- list() ##select all analysis objects and combinde them for(i in 1:length(temp.XSYG)){ ##select sequence and reduce the data set to really wanted values temp.sequence.list[[i]] <- get_RLum(temp.XSYG[[i]]$Sequence.Object, recordType = recordType, drop = FALSE) ##get corresponding position number, this will be needed later on temp.sequence.position <- as.numeric(as.character(temp.XSYG[[i]]$Sequence.Header["position",])) } }else{ ##now we assume a single RLum.Analysis object ##select sequence and reduce the data set to really wanted values, note that no ##record selection was made! temp.sequence.list <- list(object) } ##merge objects if(length(temp.sequence.list)>1){ temp.sequence <- merge_RLum(temp.sequence.list) }else{ temp.sequence <- temp.sequence.list[[1]] } # Grep relevant information ------------------------------------------------------------------- ##Sequence STEP STEP <- vapply(X = 1:length_RLum(temp.sequence), FUN = function(x){ get_RLum(temp.sequence, record.id = x)@recordType }, FUN.VALUE = vector(mode = "character", length = 1)) #START time of each step temp.START <- unname(vapply(X = 1:length_RLum(temp.sequence), FUN = function(x){ get_RLum(get_RLum(temp.sequence, record.id = x), info.object = c("startDate")) }, FUN.VALUE = vector(mode = "character", length = 1))) ##DURATION of each STEP DURATION.STEP <- vapply(X = 1:length_RLum(temp.sequence), FUN = function(x){ # get_RLum(get_RLum(temp.sequence, record.id = x), info.object = c("endDate")) max(get_RLum(get_RLum(temp.sequence, record.id = x))[,1]) #print(get_RLum(temp.sequence, record.id = x)) }, FUN.VALUE = vector(mode = "numeric", length = 1)) #print(DURATION.STEP) ##a little bit reformatting. START <- strptime(temp.START, format = "%Y%m%d%H%M%S", tz = "GMT") ##Calculate END time of each STEP END <- START + DURATION.STEP ##add position number so far an XSYG file was the input if(exists("file.XSYG")){ POSITION <- rep(temp.sequence.position, each = length_RLum(temp.sequence)) }else if(!inherits(try( get_RLum( get_RLum(temp.sequence, record.id = 1), info.object = "position"), silent = TRUE), "try-error")){ ##DURATION of each STEP POSITION <- unname(sapply(1:length_RLum(temp.sequence), function(x){ get_RLum(get_RLum(temp.sequence, record.id = x),info.object = "position") })) }else{ POSITION <- NA } ##Combine the results temp.results <- data.frame(POSITION,STEP,START,DURATION.STEP,END) # Calculate irradiation duration ------------------------------------------------------------ ##set objects time.irr.duration <- NA IRR_TIME <- unlist(sapply(1:nrow(temp.results), function(x){ if(temp.results[x,"STEP"] == "irradiation (NA)"){ time.irr.duration <<- temp.results[x,"DURATION.STEP"] return(0) }else{ if(is.na(time.irr.duration)){ return(0) }else{ return(time.irr.duration) } } })) # Calculate time since irradiation ------------------------------------------------------------ ##set objects time.irr.end <- NA TIMESINCEIRR <- unlist(sapply(1:nrow(temp.results), function(x){ if(temp.results[x,"STEP"] == "irradiation (NA)"){ time.irr.end<<-temp.results[x,"END"] return(-1) }else{ if(is.na(time.irr.end)){ return(-1) }else{ return(difftime(temp.results[x,"START"],time.irr.end, units = "secs")) } } })) # Calculate time since last step -------------------------------------------------------------- TIMESINCELAST.STEP <- unlist(sapply(1:nrow(temp.results), function(x){ if(x == 1){ return(0) }else{ return(difftime(temp.results[x,"START"],temp.results[x-1, "END"], units = "secs")) } })) # Combine final results ----------------------------------------------------------------------- ##results table, export as CSV results <- cbind(temp.results,IRR_TIME, TIMESINCEIRR,TIMESINCELAST.STEP) # Write BINX-file if wanted ------------------------------------------------------------------- if(!missing(file.BINX)){ ##(1) remove all irradiation steps as there is no record in the BINX file and update information results.BINX <- results[-which(results[,"STEP"] == "irradiation (NA)"),] ##(1a) update information on the irradiation time temp.BINX@METADATA[["IRR_TIME"]] <- results.BINX[["IRR_TIME"]] ##(1b) update information on the time since irradiation by using the Risoe definition of thi ##paramter, to make the file compatible to the Analyst temp.BINX@METADATA[["TIMESINCEIRR"]] <- results.BINX[["IRR_TIME"]] + results.BINX[["TIMESINCEIRR"]] ##(2) compare entries in the BINX-file with the entries in the table to make sure ## that both have the same length if(!missing(file.BINX)){ if(nrow(results.BINX) == nrow(temp.BINX@METADATA)){ ##update BINX-file try <- write_R2BIN(temp.BINX, version = "06", file = paste0(file.BINX,"_extract_IrradiationTimes.BINX"), compatibility.mode = compatibility.mode, txtProgressBar = txtProgressBar) ##set message on the format definition if(!inherits(x = try, 'try-error')){ message("[extract_IrradiationTimes()] 'Time Since Irradiation' was redefined in the exported BINX-file to: 'Time Since Irradiation' plus the 'Irradiation Time' to be compatible with the Analyst.") } } }else{ try( stop("[extract_IrradiationTimes()] XSYG-file and BINX-file did not contain similar entries. BINX-file update skipped!",call. = FALSE)) } } # Output -------------------------------------------------------------------------------------- return(set_RLum(class = "RLum.Results", data = list(irr.times = results))) } Luminescence/R/plot_NRt.R0000644000176200001440000001704213231137116014753 0ustar liggesusers#' Visualise natural/regenerated signal ratios #' #' This function creates a Natural/Regenerated signal vs. time (NR(t)) plot #' as shown in Steffen et al. 2009 #' #' This function accepts the individual curve data in many different formats. If #' `data` is a `list`, each element of the list must contain a two #' column `data.frame` or `matrix` containing the XY data of the curves #' (time and counts). Alternatively, the elements can be objects of class #' [RLum.Data.Curve-class]. #' #' Input values can also be provided as a `data.frame` or `matrix` where #' the first column contains the time values and each following column contains #' the counts of each curve. #' #' @param data [list], [data.frame], [matrix] or [RLum.Analysis-class] (**required**): #' X,Y data of measured values (time and counts). See details on individual data structure. #' #' @param log [character] (*optional*): #' logarithmic axes (`c("x", "y", "xy")`). #' #' @param smooth [character] (*optional*): #' apply data smoothing. Use `"rmean"` to calculate the rolling where `k` #' determines the width of the rolling window (see [rollmean]). `"spline"` #' applies a smoothing spline to each curve (see [smooth.spline]) #' #' @param k [integer] (*with default*): #' integer width of the rolling window. #' #' @param legend [logical] (*with default*): #' show or hide the plot legend. #' #' @param legend.pos [character] (*with default*): #' keyword specifying the position of the legend (see [legend]). #' #' @param ... further parameters passed to [plot] (also see [par]). #' #' #' @author Christoph Burow, University of Cologne (Germany) #' #' @seealso [plot] #' #' @return Returns a plot and [RLum.Analysis-class] object. #' #' @references #' Steffen, D., Preusser, F., Schlunegger, F., 2009. OSL quartz underestimation due to #' unstable signal components. Quaternary Geochronology, 4, 353-362. #' #' @examples #' #' ## load example data #' data("ExampleData.BINfileData", envir = environment()) #' #' ## EXAMPLE 1 #' #' ## convert Risoe.BINfileData object to RLum.Analysis object #' data <- Risoe.BINfileData2RLum.Analysis(object = CWOSL.SAR.Data, pos = 8, ltype = "OSL") #' #' ## extract all OSL curves #' allCurves <- get_RLum(data) #' #' ## keep only the natural and regenerated signal curves #' pos <- seq(1, 9, 2) #' curves <- allCurves[pos] #' #' ## plot a standard NR(t) plot #' plot_NRt(curves) #' #' ## re-plot with rolling mean data smoothing #' plot_NRt(curves, smooth = "rmean", k = 10) #' #' ## re-plot with a logarithmic x-axis #' plot_NRt(curves, log = "x", smooth = "rmean", k = 5) #' #' ## re-plot with custom axes ranges #' plot_NRt(curves, smooth = "rmean", k = 5, #' xlim = c(0.1, 5), ylim = c(0.4, 1.6), #' legend.pos = "bottomleft") #' #' ## re-plot with smoothing spline on log scale #' plot_NRt(curves, smooth = "spline", log = "x", #' legend.pos = "top") #' #' ## EXAMPLE 2 #' #' # you may also use this function to check whether all #' # TD curves follow the same shape (making it a TnTx(t) plot). #' posTD <- seq(2, 14, 2) #' curves <- allCurves[posTD] #' #' plot_NRt(curves, main = "TnTx(t) Plot", #' smooth = "rmean", k = 20, #' ylab = "TD natural / TD regenerated", #' xlim = c(0, 20), legend = FALSE) #' #' ## EXAMPLE 3 #' #' # extract data from all positions #' data <- lapply(1:24, FUN = function(pos) { #' Risoe.BINfileData2RLum.Analysis(CWOSL.SAR.Data, pos = pos, ltype = "OSL") #' }) #' #' # get individual curve data from each aliquot #' aliquot <- lapply(data, get_RLum) #' #' # set graphical parameters #' par(mfrow = c(2, 2)) #' #' # create NR(t) plots for all aliquots #' for (i in 1:length(aliquot)) { #' plot_NRt(aliquot[[i]][pos], #' main = paste0("Aliquot #", i), #' smooth = "rmean", k = 20, #' xlim = c(0, 10), #' cex = 0.6, legend.pos = "bottomleft") #' } #' #' # reset graphical parameters #' par(mfrow = c(1, 1)) #' #' #' @md #' @export plot_NRt <- function(data, log = FALSE, smooth = c("none", "spline", "rmean"), k = 3, legend = TRUE, legend.pos = "topright", ...) { ## DATA INPUT EVALUATION ----- if (inherits(data, "list")) { if (length(data) < 2) stop(paste("The provided list only contains curve data of the natural signal"), call. = FALSE) if (all(sapply(data, class) == "RLum.Data.Curve")) curves <- lapply(data, get_RLum) } else if (inherits(data, "data.frame") || inherits(data, "matrix")) { if (ncol(data) < 3) stop(paste("The provided", class(data), "only contains curve data of the natural signal"), call. = FALSE) if (is.matrix(data)) data <- as.data.frame(data) curves <- apply(data[2:ncol(data)], MARGIN = 2, function(curve) { data.frame(data[ ,1], curve) }) } else if (inherits(data, "RLum.Analysis")) { RLum.objects <- get_RLum(data) if (!any(sapply(RLum.objects, class) == "RLum.Data.Curve")) stop(paste("The provided RLum.Analysis object must exclusively contain RLum.Data.Curve objects."), call. = FALSE) curves <- lapply(RLum.objects, get_RLum) if (length(curves) < 2) stop(paste("The provided RLum.Analysis object only contains curve data of the natural signal"), call. = FALSE) } ## BASIC SETTINGS ------ natural <- curves[[1]] regCurves <- curves[2:length(curves)] time <- curves[[1]][ ,1] ## DATA TRANSFORMATION ----- # calculate ratios NR <- lapply(regCurves, FUN = function(reg, nat) { nat[ ,2] / reg[ ,2] }, natural) # smooth spline if (smooth[1] == "spline") { NR <- lapply(NR, function(nr) { smooth.spline(nr)$y }) } if (smooth[1] == "rmean") { NR <- lapply(NR, function(nr) { zoo::rollmean(nr, k) }) time <- zoo::rollmean(time, k) } # normalise data NRnorm <- lapply(NR, FUN = function(nr) { nr / nr[1] }) ## EXTRA ARGUMENTS ----- # default values settings <- list( xlim = if (log == "x" || log == "xy") c(0.1, max(time)) else c(0, max(time)), ylim = range(pretty(c(min(sapply(NRnorm, min)), max(sapply(NRnorm, max))))), xlab = "Time [s]", ylab = "Natural signal / Regenerated signal", cex = 1L, main = "NR(t) Plot") # override defaults with user settings settings <- modifyList(settings, list(...)) ## PLOTTING ---------- # set graphical parameter par(cex = settings$cex) # empty plot if (is.na(pmatch(log, c("x", "y", "xy")))) log <- "" do.call(plot, modifyList(list(x = NA, y = NA, log = log, xaxs = "i", yaxs = "i"), settings)) # horizontal line abline(h = 1, lty = 3, col = "grey") col <- 1:length(NRnorm) # add N/R lines mapply(FUN = function(curve, col) { points(time, curve, type = "l", col = col) }, NRnorm, col) # add legend if (legend) { labels <- paste0("N/R", 1:length(NRnorm)) ncol <- ifelse(length(NRnorm) > 4, ceiling(length(NRnorm) / 4) , 1) legend(legend.pos, legend = labels, col = col, lty = 1, ncol = ncol, cex = 0.8, bty = "n") } ## RETURN VALUES ---- obj <- set_RLum("RLum.Analysis", protocol = "UNKNOWN", records = mapply(FUN = function(curve, id) { set_RLum("RLum.Data.Curve", recordType = paste0("N/R", id), curveType = "NRt", data = matrix(c(time, curve), ncol = 2), info = list( data = curves, call = sys.call(-6L), args = as.list(sys.call(-6L)[-1]) )) }, NRnorm, seq_len(length(NRnorm))) ) invisible(obj) } Luminescence/R/plot_RLum.Data.Curve.R0000644000176200001440000001567213231137116017071 0ustar liggesusers#' Plot function for an RLum.Data.Curve S4 class object #' #' The function provides a standardised plot output for curve data of an #' RLum.Data.Curve S4 class object #' #' Only single curve data can be plotted with this function. Arguments #' according to [plot]. #' #' @param object [RLum.Data.Curve-class] (**required**): #' S4 object of class `RLum.Data.Curve` #' #' @param par.local [logical] (*with default*): #' use local graphical parameters for plotting, e.g. the plot is shown in one #' column and one row. If `par.local = FALSE`, global parameters are inherited. #' #' @param norm [logical] (*with default*): #' allows curve normalisation to the highest count value #' #' @param smooth [logical] (*with default*): #' provides an automatic curve smoothing based on [zoo::rollmean] #' #' @param ... further arguments and graphical parameters that will be passed #' to the `plot` function #' #' @return Returns a plot. #' #' @note Not all arguments of [plot] will be passed! #' #' @section Function version: 0.2.3 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [plot], [plot_RLum] #' #' @keywords aplot #' #' @examples #' #' ##plot curve data #' #' #load Example data #' data(ExampleData.CW_OSL_Curve, envir = environment()) #' #' #transform data.frame to RLum.Data.Curve object #' temp <- as(ExampleData.CW_OSL_Curve, "RLum.Data.Curve") #' #' #plot RLum.Data.Curve object #' plot_RLum.Data.Curve(temp) #' #' #' @md #' @export plot_RLum.Data.Curve<- function( object, par.local = TRUE, norm = FALSE, smooth = FALSE, ... ){ # Integrity check ------------------------------------------------------------- ##check if object is of class RLum.Data.Curve if(class(object) != "RLum.Data.Curve"){ stop("[plot_RLum.Data.Curve()] Input object is not of type RLum.Data.Curve") } ##stop for NA values if (!all(is.na(object@data))) { ##set labeling unit if(!is.na(object@recordType)){ lab.unit <- if (object@recordType == "OSL" | object@recordType == "IRSL" | object@recordType == "RL" | object@recordType == "RF" | object@recordType == "LM-OSL" | object@recordType == "RBR") { "s" } else if (object@recordType == "TL") { "\u00B0C" } else { "Unknown" } }else{ lab.unit <- "Unknown" } if(!is.na(object@recordType)){ lab.xlab <- if (object@recordType == "OSL" | object@recordType == "IRSL" | object@recordType == "RL" | object@recordType == "RF" | object@recordType == "RBR" | object@recordType == "LM-OSL"){ "Stimulation time" } else if (object@recordType == "TL") { "Temperature" } else { "Independent" } }else{ lab.xlab <- "Independent" } ##XSYG ##check for curveDescripter if ("curveDescripter" %in% names(object@info)) { temp.lab <- strsplit(object@info$curveDescripter, split = ";", fixed = TRUE)[[1]] xlab.xsyg <- temp.lab[1] ylab.xsyg <- temp.lab[2] } else{ xlab.xsyg <- NA ylab.xsyg <- NA } ##normalise curves if argument has been set if (norm) { object@data[,2] <- object@data[,2] / max(object@data[,2]) } ##deal with additional arguments extraArgs <- list(...) main <- if ("main" %in% names(extraArgs)) { extraArgs$main } else { object@recordType } xlab <- if ("xlab" %in% names(extraArgs)) { extraArgs$xlab } else { if (!is.na(xlab.xsyg)) { xlab.xsyg } else { paste0(lab.xlab, " [", lab.unit, "]") } } ylab <- if ("ylab" %in% names(extraArgs)) { extraArgs$ylab }else if (!is.na(ylab.xsyg)) { ylab.xsyg } else if (lab.xlab == "Independent") { "Dependent [unknown]" } else { paste( object@recordType, " [cts/", round(max(object@data[,1]) / length(object@data[,1]),digits = 2) , " ", lab.unit,"]", sep = "" ) } sub <- if ("sub" %in% names(extraArgs)) { extraArgs$sub } else { if ((grepl("TL", object@recordType) == TRUE) & "RATE" %in% names(object@info)) { paste("(",object@info$RATE," K/s)", sep = "") } if ((grepl("OSL", object@recordType) | grepl("IRSL", object@recordType)) & "interval" %in% names(object@info)) { paste("(resolution: ",object@info$interval," s)", sep = "") } } cex <- if ("cex" %in% names(extraArgs)) { extraArgs$cex } else { 1 } type <- if ("type" %in% names(extraArgs)) { extraArgs$type } else { "l" } lwd <- if ("lwd" %in% names(extraArgs)) { extraArgs$lwd } else { 1 } lty <- if ("lty" %in% names(extraArgs)) { extraArgs$lty } else { 1 } pch <- if ("pch" %in% names(extraArgs)) { extraArgs$pch } else { 1 } col <- if ("col" %in% names(extraArgs)) { extraArgs$col } else { 1 } ylim <- if ("ylim" %in% names(extraArgs)) { extraArgs$ylim } else { c(min(object@data[,2], na.rm = TRUE),max(object@data[,2], na.rm = TRUE)) } xlim <- if ("xlim" %in% names(extraArgs)) { extraArgs$xlim } else { c(min(object@data[,1]),max(object@data[,1])) } log <- if ("log" %in% names(extraArgs)) { extraArgs$log } else { "" } mtext <- if ("mtext" %in% names(extraArgs)) { extraArgs$mtext } else { "" } fun <- if ("fun" %in% names(extraArgs)) { extraArgs$fun } else { FALSE } ##to avoid problems with plot method of RLum.Analysis plot.trigger <- if ("plot.trigger" %in% names(extraArgs)) { extraArgs$plot.trigger } else { FALSE } ##par setting for possible combination with plot method for RLum.Analysis objects if (par.local == TRUE) { par(mfrow = c(1,1), cex = cex) } ##smooth if(smooth){ k <- ceiling(length(object@data[, 2])/100) object@data[, 2] <- zoo::rollmean(object@data[, 2], k = k, fill = NA) } ##plot curve plot( object@data[,1], object@data[,2], main = main, xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, sub = sub, type = type, log = log, col = col, lwd = lwd, pch = pch, lty = lty ) ##plot additional mtext mtext(mtext, side = 3, cex = cex * 0.8) if (fun == TRUE) { sTeve() } }else{ warning("[plot_RLum.Data.Curve()] Curve contains only NA-values, nothing plotted.", call. = FALSE) } } Luminescence/R/calc_HomogeneityTest.R0000644000176200001440000001017613231137116017324 0ustar liggesusers#' Apply a simple homogeneity test after Galbraith (2003) #' #' A simple homogeneity test for De estimates #' #' For details see Galbraith (2003). #' #' @param data [RLum.Results-class] or [data.frame] (**required**): #' for [data.frame]: two columns with De `(data[,1])` and De error `(values[,2])` #' #' @param log [logical] (*with default*): #' perform the homogeneity test with (un-)logged data #' #' @param ... further arguments (for internal compatibility only). #' #' @return #' Returns a terminal output. In addition an #' [RLum.Results-class]-object is returned containing the #' following elements: #' #' \item{summary}{[data.frame] summary of all relevant model results.} #' \item{data}{[data.frame] original input data} #' \item{args}{[list] used arguments} #' \item{call}{[call] the function call} #' #' The output should be accessed using the function [get_RLum] #' #' @section Function version: 0.3.0 #' #' @author Christoph Burow, University of Cologne (Germany), Sebastian Kreutzer, #' IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @seealso [pchisq] #' #' @references #' Galbraith, R.F., 2003. A simple homogeneity test for estimates #' of dose obtained using OSL. Ancient TL 21, 75-77. #' #' #' @examples #' #' ## load example data #' data(ExampleData.DeValues, envir = environment()) #' #' ## apply the homogeneity test #' calc_HomogeneityTest(ExampleData.DeValues$BT998) #' #' ## using the data presented by Galbraith (2003) #' df <- #' data.frame( #' x = c(30.1, 53.8, 54.3, 29.0, 47.6, 44.2, 43.1), #' y = c(4.8, 7.1, 6.8, 4.3, 5.2, 5.9, 3.0)) #' #' calc_HomogeneityTest(df) #' #' #' @md #' @export calc_HomogeneityTest <- function( data, log = TRUE, ... ){ ##============================================================================## ## CONSISTENCY CHECK OF INPUT DATA ##============================================================================## if(missing(data)==FALSE){ if(!is(data, "data.frame") & !is(data, "RLum.Results")){ stop( "[calc_HomogeneityTest()] 'data' object has to be of type 'data.frame' or 'RLum.Results'!", call. = FALSE ) } else { if(is(data, "RLum.Results")){ data <- get_RLum(data, "data") } } } ##==========================================================================## ## ... ARGUMENTS ##==========================================================================## extraArgs <- list(...) ## set plot main title if("verbose" %in% names(extraArgs)) { verbose<- extraArgs$verbose } else { verbose<- TRUE } ##============================================================================## ## CALCULATIONS ##============================================================================## if(log) { dat <- log(data) dat[[2]] <- data[[2]]/data[[1]] } else { dat <- data } wi <- 1 / dat[[2]] ^ 2 wizi <- wi * dat[[1]] mu <- sum(wizi) / sum(wi) gi <- wi * (dat[[1]] - mu) ^ 2 G <- sum(gi) df <- length(wi) - 1 n <- length(wi) P <- pchisq(G, df, lower.tail = FALSE) ##============================================================================## ## OUTPUT ##============================================================================## if(verbose) { cat("\n [calc_HomogeneityTest()]") cat(paste("\n\n ---------------------------------")) cat(paste("\n n: ", n)) cat(paste("\n ---------------------------------")) cat(paste("\n mu: ", round(mu,4))) cat(paste("\n G-value: ", round(G,4))) cat(paste("\n Degrees of freedom:", df)) cat(paste("\n P-value: ", round(P,4))) cat(paste("\n ---------------------------------\n\n")) } ##============================================================================## ## RETURN VALUES ##============================================================================## summary <- data.frame( n = n, g.value = G, df = df, P.value = P ) args <- list(log = log) return(set_RLum( class = "RLum.Results", data = list( summary = summary, data = data, args = args ), info = list(call = sys.call()) )) } Luminescence/R/plot_Histogram.R0000644000176200001440000007070313231137116016210 0ustar liggesusers#' Plot a histogram with separate error plot #' #' Function plots a predefined histogram with an accompanying error plot as #' suggested by Rex Galbraith at the UK LED in Oxford 2010. #' #' If the normal curve is added, the y-axis in the histogram will show the #' probability density. #' #' #' A statistic summary, i.e. a collection of statistic measures of #' centrality and dispersion (and further measures) can be added by specifying #' one or more of the following keywords: #' - `"n"` (number of samples), #' - `"mean"` (mean De value), #' - `"mean.weighted"` (error-weighted mean), #' - `"median"` (median of the De values), #' - `"sdrel"` (relative standard deviation in percent), #' - `"sdrel.weighted"` (error-weighted relative standard deviation in percent), #' - `"sdabs"` (absolute standard deviation), #' - `"sdabs.weighted"` (error-weighted absolute standard deviation), #' - `"serel"` (relative standard error), #' - `"serel.weighted"` (error-weighted relative standard error), #' - `"seabs"` (absolute standard error), #' - `"seabs.weighted"` (error-weighted absolute standard error), #' - `"kurtosis"` (kurtosis) and #' - `"skewness"` (skewness). #' #' @param data [data.frame] or [RLum.Results-class] object (**required**): #' for `data.frame`: two columns: De (`data[,1]`) and De error (`data[,2]`) #' #' @param na.rm [logical] (*with default*): #' excludes `NA` values from the data set prior to any further operations. #' #' @param mtext [character] (*optional*): #' further sample information ([mtext]). #' #' @param cex.global [numeric] (*with default*): #' global scaling factor. #' #' @param se [logical] (*optional*): #' plots standard error points over the histogram, default is `FALSE`. #' #' @param rug [logical] (*optional*): #' adds rugs to the histogram, default is `TRUE`. #' #' @param normal_curve [logical] (*with default*): #' adds a normal curve to the histogram. Mean and sd are calculated from the #' input data. More see details section. #' #' @param summary [character] (*optional*): #' add statistic measures of centrality and dispersion to the plot. #' Can be one or more of several keywords. See details for available keywords. #' #' @param summary.pos [numeric] or [character] (*with default*): #' optional position coordinates or keyword (e.g. `"topright"`) #' for the statistical summary. Alternatively, the keyword `"sub"` may be #' specified to place the summary below the plot header. However, this latter #' option in only possible if `mtext` is not used. In case of coordinate #' specification, y-coordinate refers to the right y-axis. #' #' @param colour [numeric] or [character] (*with default*): #' optional vector of length 4 which specifies the colours of the following #' plot items in exactly this order: histogram bars, rug lines, normal #' distribution curve and standard error points #' (e.g., `c("grey", "black", "red", "grey")`). #' #' @param interactive [logical] (*with default*): #' create an interactive histogram plot (requires the 'plotly' package) #' #' @param ... further arguments and graphical parameters passed to [plot] or #' [hist]. If y-axis labels are provided, these must be specified as a vector #' of length 2 since the plot features two axes #' (e.g. `ylab = c("axis label 1", "axis label 2")`). Y-axes limits #' (`ylim`) must be provided as vector of length four, with the first two #' elements specifying the left axes limits and the latter two elements giving #' the right axis limits. #' #' @note The input data is not restricted to a special type. #' #' @section Function version: 0.4.4 #' #' @author #' Michael Dietze, GFZ Potsdam (Germany)\cr #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France) #' #' @seealso [hist], [plot] #' #' @examples #' #' ## load data #' data(ExampleData.DeValues, envir = environment()) #' ExampleData.DeValues <- #' Second2Gray(ExampleData.DeValues$BT998, dose.rate = c(0.0438,0.0019)) #' #' ## plot histogram the easiest way #' plot_Histogram(ExampleData.DeValues) #' #' ## plot histogram with some more modifications #' plot_Histogram(ExampleData.DeValues, #' rug = TRUE, #' normal_curve = TRUE, #' cex.global = 0.9, #' pch = 2, #' colour = c("grey", "black", "blue", "green"), #' summary = c("n", "mean", "sdrel"), #' summary.pos = "topleft", #' main = "Histogram of De-values", #' mtext = "Example data set", #' ylab = c(expression(paste(D[e], " distribution")), #' "Standard error"), #' xlim = c(100, 250), #' ylim = c(0, 0.1, 5, 20)) #' #' #' @md #' @export plot_Histogram <- function( data, na.rm = TRUE, mtext, cex.global, se, rug, normal_curve, summary, summary.pos, colour, interactive = FALSE, ... ) { # Integrity tests --------------------------------------------------------- ## check/adjust input data structure if(is(data, "RLum.Results") == FALSE & is(data, "data.frame") == FALSE) { stop(paste("[plot_Histogram()] Input data format is neither", "'data.frame' nor 'RLum.Results'")) } else { if(is(data, "RLum.Results") == TRUE) { data <- get_RLum(data, "data")[,1:2] } } ## handle error-free data sets if(length(data) < 2) { data <- cbind(data, rep(NA, length(data))) } ## Set general parameters --------------------------------------------------- ## Check/set default parameters if(missing(cex.global) == TRUE) { cex.global <- 1 } if(missing(mtext) == TRUE) { mtext <- "" } if(missing(se) == TRUE) { se = TRUE } if(missing(rug) == TRUE) { rug = TRUE } if(missing(colour) == TRUE) { colour = c("white", "black", "red", "black") } if(missing(summary) == TRUE) { summary <- "" } if(missing(summary.pos) == TRUE) { summary.pos <- "sub" } if(missing(normal_curve) == TRUE) { normal_curve = FALSE } ## read out additional arguments list extraArgs <- list(...) ## define fun if("fun" %in% names(extraArgs)) { fun <- extraArgs$fun } else { fun <- FALSE } ## optionally, count and exclude NA values and print result if(na.rm == TRUE) { n.NA <- sum(is.na(data[,1])) if(n.NA == 1) { print("1 NA value excluded.") } else if(n.NA > 1) { print(paste(n.NA, "NA values excluded.")) } data <- data[!is.na(data[,1]),] } if("main" %in% names(extraArgs)) { main.plot <- extraArgs$main } else { main.plot <- "Histogram" } if("xlab" %in% names(extraArgs)) { xlab.plot <- extraArgs$xlab } else { xlab.plot <- expression(paste(D[e], " [Gy]")) } if("ylab" %in% names(extraArgs)) { ylab.plot <- extraArgs$ylab } else { ylab.plot <- c("Frequency", "Standard error") } if("breaks" %in% names(extraArgs)) { breaks.plot <- extraArgs$breaks breaks_calc <- hist(x = data[,1], breaks = breaks.plot, plot = FALSE)$breaks } else { breaks.plot <- hist(x = data[,1], plot = FALSE)$breaks breaks_calc <- breaks.plot } if("xlim" %in% names(extraArgs)) { xlim.plot <- extraArgs$xlim } else { xlim.plot <- range(breaks_calc) } if("ylim" %in% names(extraArgs)) { ylim.plot <- extraArgs$ylim } else { H.lim <- hist(data[,1], breaks = breaks.plot, plot = FALSE) if(normal_curve == TRUE) { left.ylim <- c(0, max(H.lim$density)) } else { left.ylim <- c(0, max(H.lim$counts)) } range.error <- try(expr = range(data[,2], na.rm = TRUE), silent = TRUE) range.error[1] <- ifelse(is.infinite(range.error[1]), 0, range.error[1]) range.error[2] <- ifelse(is.infinite(range.error[2]), 0, range.error[2]) ylim.plot <- c(left.ylim, range.error) } if("pch" %in% names(extraArgs)) { pch.plot <- extraArgs$pch } else { pch.plot <- 1 } ## Set plot area format par(mar = c(4.5, 4.5, 4.5, 4.5), cex = cex.global) ## Plot histogram ----------------------------------------------------------- HIST <- hist(data[,1], main = "", xlab = xlab.plot, ylab = ylab.plot[1], xlim = xlim.plot, ylim = ylim.plot[1:2], breaks = breaks.plot, freq = !normal_curve, col = colour[1] ) ## add title title(line = 2, main = main.plot) ## Optionally, add rug ------------------------------------------------------ if(rug == TRUE) {rug(data[,1], col = colour[2])} ## Optionally, add a normal curve based on the data ------------------------- if(normal_curve == TRUE){ ## cheat the R check routine, tztztz how neat x <- NULL rm(x) ## add normal distribution curve curve(dnorm(x, mean = mean(na.exclude(data[,1])), sd = sd(na.exclude(data[,1]))), col = colour[3], add = TRUE, lwd = 1.2 * cex.global) } ## calculate and paste statistical summary data.stats <- list(data = data) ## calculate and paste statistical summary De.stats <- matrix(nrow = length(data), ncol = 18) colnames(De.stats) <- c("n", "mean", "mean.weighted", "median", "median.weighted", "kde.max", "sd.abs", "sd.rel", "se.abs", "se.rel", "q25", "q75", "skewness", "kurtosis", "sd.abs.weighted", "sd.rel.weighted", "se.abs.weighted", "se.rel.weighted") for(i in 1:length(data)) { statistics <- calc_Statistics(data) De.stats[i,1] <- statistics$weighted$n De.stats[i,2] <- statistics$unweighted$mean De.stats[i,3] <- statistics$weighted$mean De.stats[i,4] <- statistics$unweighted$median De.stats[i,5] <- statistics$unweighted$median De.stats[i,7] <- statistics$unweighted$sd.abs De.stats[i,8] <- statistics$unweighted$sd.rel De.stats[i,9] <- statistics$unweighted$se.abs De.stats[i,10] <- statistics$weighted$se.rel De.stats[i,11] <- quantile(data[,1], 0.25) De.stats[i,12] <- quantile(data[,1], 0.75) De.stats[i,13] <- statistics$unweighted$skewness De.stats[i,14] <- statistics$unweighted$kurtosis De.stats[i,15] <- statistics$weighted$sd.abs De.stats[i,16] <- statistics$weighted$sd.rel De.stats[i,17] <- statistics$weighted$se.abs De.stats[i,18] <- statistics$weighted$se.rel ##kdemax - here a little doubled as it appears below again if(nrow(data) >= 2){ De.density <-density(x = data[,1], kernel = "gaussian", from = xlim.plot[1], to = xlim.plot[2]) De.stats[i,6] <- De.density$x[which.max(De.density$y)] }else{ De.denisty <- NA De.stats[i,6] <- NA } } label.text = list(NA) if(summary.pos[1] != "sub") { n.rows <- length(summary) for(i in 1:length(data)) { stops <- paste(rep("\n", (i - 1) * n.rows), collapse = "") summary.text <- character(0) for(j in 1:length(summary)) { summary.text <- c(summary.text, paste( "", ifelse("n" %in% summary[j] == TRUE, paste("n = ", De.stats[i,1], "\n", sep = ""), ""), ifelse("mean" %in% summary[j] == TRUE, paste("mean = ", round(De.stats[i,2], 2), "\n", sep = ""), ""), ifelse("mean.weighted" %in% summary[j] == TRUE, paste("weighted mean = ", round(De.stats[i,3], 2), "\n", sep = ""), ""), ifelse("median" %in% summary[j] == TRUE, paste("median = ", round(De.stats[i,4], 2), "\n", sep = ""), ""), ifelse("median.weighted" %in% summary[j] == TRUE, paste("weighted median = ", round(De.stats[i,5], 2), "\n", sep = ""), ""), ifelse("kdemax" %in% summary[j] == TRUE, paste("kdemax = ", round(De.stats[i,6], 2), " \n ", sep = ""), ""), ifelse("sdabs" %in% summary[j] == TRUE, paste("sd = ", round(De.stats[i,7], 2), "\n", sep = ""), ""), ifelse("sdrel" %in% summary[j] == TRUE, paste("rel. sd = ", round(De.stats[i,8], 2), " %", "\n", sep = ""), ""), ifelse("seabs" %in% summary[j] == TRUE, paste("se = ", round(De.stats[i,9], 2), "\n", sep = ""), ""), ifelse("serel" %in% summary[j] == TRUE, paste("rel. se = ", round(De.stats[i,10], 2), " %", "\n", sep = ""), ""), ifelse("skewness" %in% summary[j] == TRUE, paste("skewness = ", round(De.stats[i,13], 2), "\n", sep = ""), ""), ifelse("kurtosis" %in% summary[j] == TRUE, paste("kurtosis = ", round(De.stats[i,14], 2), "\n", sep = ""), ""), ifelse("sdabs.weighted" %in% summary[j] == TRUE, paste("abs. weighted sd = ", round(De.stats[i,15], 2), "\n", sep = ""), ""), ifelse("sdrel.weighted" %in% summary[j] == TRUE, paste("rel. weighted sd = ", round(De.stats[i,16], 2), "\n", sep = ""), ""), ifelse("seabs.weighted" %in% summary[j] == TRUE, paste("abs. weighted se = ", round(De.stats[i,17], 2), "\n", sep = ""), ""), ifelse("serel.weighted" %in% summary[j] == TRUE, paste("rel. weighted se = ", round(De.stats[i,18], 2), "\n", sep = ""), ""), sep = "")) } summary.text <- paste(summary.text, collapse = "") label.text[[length(label.text) + 1]] <- paste(stops, summary.text, stops, sep = "") } } else { for(i in 1:length(data)) { summary.text <- character(0) for(j in 1:length(summary)) { summary.text <- c(summary.text, ifelse("n" %in% summary[j] == TRUE, paste("n = ", De.stats[i,1], " | ", sep = ""), ""), ifelse("mean" %in% summary[j] == TRUE, paste("mean = ", round(De.stats[i,2], 2), " | ", sep = ""), ""), ifelse("mean.weighted" %in% summary[j] == TRUE, paste("weighted mean = ", round(De.stats[i,3], 2), " | ", sep = ""), ""), ifelse("median" %in% summary[j] == TRUE, paste("median = ", round(De.stats[i,4], 2), " | ", sep = ""), ""), ifelse("median.weighted" %in% summary[j] == TRUE, paste("weighted median = ", round(De.stats[i,5], 2), " | ", sep = ""), ""), ifelse("kdemax" %in% summary[j] == TRUE, paste("kdemax = ", round(De.stats[i,6], 2), " | ", sep = ""), ""), ifelse("sdrel" %in% summary[j] == TRUE, paste("rel. sd = ", round(De.stats[i,8], 2), " %", " | ", sep = ""), ""), ifelse("sdabs" %in% summary[j] == TRUE, paste("abs. sd = ", round(De.stats[i,7], 2), " | ", sep = ""), ""), ifelse("serel" %in% summary[j] == TRUE, paste("rel. se = ", round(De.stats[i,10], 2), " %", " | ", sep = ""), ""), ifelse("seabs" %in% summary[j] == TRUE, paste("abs. se = ", round(De.stats[i,9], 2), " | ", sep = ""), ""), ifelse("skewness" %in% summary[j] == TRUE, paste("skewness = ", round(De.stats[i,13], 2), " | ", sep = ""), ""), ifelse("kurtosis" %in% summary[j] == TRUE, paste("kurtosis = ", round(De.stats[i,14], 2), " | ", sep = ""), ""), ifelse("sdabs.weighted" %in% summary[j] == TRUE, paste("abs. weighted sd = ", round(De.stats[i,15], 2), " %", " | ", sep = ""), ""), ifelse("sdrel.weighted" %in% summary[j] == TRUE, paste("rel. weighted sd = ", round(De.stats[i,16], 2), " %", " | ", sep = ""), ""), ifelse("seabs.weighted" %in% summary[j] == TRUE, paste("abs. weighted se = ", round(De.stats[i,17], 2), " %", " | ", sep = ""), ""), ifelse("serel.weighted" %in% summary[j] == TRUE, paste("rel. weighted se = ", round(De.stats[i,18], 2), " %", " | ", sep = ""), "") ) } summary.text <- paste(summary.text, collapse = "") label.text[[length(label.text) + 1]] <- paste( " ", summary.text, sep = "") } ## remove outer vertical lines from string for(i in 2:length(label.text)) { label.text[[i]] <- substr(x = label.text[[i]], start = 3, stop = nchar(label.text[[i]]) - 3) } } ## remove dummy list element label.text[[1]] <- NULL ## convert keywords into summary placement coordinates if(missing(summary.pos) == TRUE) { summary.pos <- c(xlim.plot[1], ylim.plot[2]) summary.adj <- c(0, 1) } else if(length(summary.pos) == 2) { summary.pos <- summary.pos summary.adj <- c(0, 1) } else if(summary.pos[1] == "topleft") { summary.pos <- c(xlim.plot[1], ylim.plot[2]) summary.adj <- c(0, 1) } else if(summary.pos[1] == "top") { summary.pos <- c(mean(xlim.plot), ylim.plot[2]) summary.adj <- c(0.5, 1) } else if(summary.pos[1] == "topright") { summary.pos <- c(xlim.plot[2], ylim.plot[2]) summary.adj <- c(1, 1) } else if(summary.pos[1] == "left") { summary.pos <- c(xlim.plot[1], mean(ylim.plot[1:2])) summary.adj <- c(0, 0.5) } else if(summary.pos[1] == "center") { summary.pos <- c(mean(xlim.plot), mean(ylim.plot[1:2])) summary.adj <- c(0.5, 0.5) } else if(summary.pos[1] == "right") { summary.pos <- c(xlim.plot[2], mean(ylim.plot[1:2])) summary.adj <- c(1, 0.5) }else if(summary.pos[1] == "bottomleft") { summary.pos <- c(xlim.plot[1], ylim.plot[1]) summary.adj <- c(0, 0) } else if(summary.pos[1] == "bottom") { summary.pos <- c(mean(xlim.plot), ylim.plot[1]) summary.adj <- c(0.5, 0) } else if(summary.pos[1] == "bottomright") { summary.pos <- c(xlim.plot[2], ylim.plot[1]) summary.adj <- c(1, 0) } ## add summary content for(i in 1:length(data.stats)) { if(summary.pos[1] != "sub") { text(x = summary.pos[1], y = summary.pos[2], adj = summary.adj, labels = label.text[[i]], col = colour[2], cex = cex.global * 0.8) } else { if(mtext == "") { mtext(side = 3, line = 1 - i, text = label.text[[i]], col = colour[2], cex = cex.global * 0.8) } } } ## Optionally, add standard error plot -------------------------------------- if(sum(is.na(data[,2])) == length(data[,2])) { se <- FALSE } if(se == TRUE) { par(new = TRUE) plot.data <- data[!is.na(data[,2]),] plot(x = plot.data[,1], y = plot.data[,2], xlim = xlim.plot, ylim = ylim.plot[3:4], pch = pch.plot, col = colour[4], main = "", xlab = "", ylab = "", axes = FALSE, frame.plot = FALSE ) axis(side = 4, labels = TRUE, cex = cex.global ) mtext(ylab.plot[2], side = 4, line = 3, cex = cex.global) # par(new = FALSE) } ## Optionally add user-defined mtext mtext(side = 3, line = 0.5, text = mtext, cex = 0.8 * cex.global) ## FUN by R Luminescence Team if(fun & !interactive) sTeve() ## Optionally: Interactive Plot ---------------------------------------------- if (interactive) { if (!requireNamespace("plotly", quietly = TRUE)) stop("The interactive histogram requires the 'plotly' package. To install", " this package run 'install.packages('plotly')' in your R console.", call. = FALSE) ## tidy data ---- data <- as.data.frame(data) colnames(data) <- c("x", "y") x <- y <- NULL # suffice CRAN check for no visible binding if (length(grep("paste", as.character(xlab.plot))) > 0) xlab.plot <- "Equivalent dose [Gy]" ## create plots ---- # histogram hist <- plotly::plot_ly(data = data, x = x, type = "histogram", showlegend = FALSE, name = "Bin", opacity = 0.75, marker = list(color = "428BCA", line = list(width = 1.0, color = "white")), histnorm = ifelse(normal_curve, "probability density", ""), yaxis = "y" ) # normal curve ---- if (normal_curve) { density.curve <- density(data$x) normal.curve <- data.frame(x = density.curve$x, y = density.curve$y) hist <- plotly::add_trace(hist, data = normal.curve, x = x, y = y, type = "scatter", mode = "lines", marker = list(color = "red"), name = "Normal curve", yaxis = "y") } # scatter plot of individual errors if (se) { yaxis2 <- list(overlaying = "y", side = "right", showgrid = FALSE, title = ylab.plot[2], ticks = "", showline = FALSE) se.text <- paste0("Measured value:
", data$x, " ± ", data$y,"
") hist <- plotly::add_trace(hist, data = data, x = x, y = y, type = "scatter", mode = "markers", name = "Error", hoverinfo = "text", text = se.text, marker = list(color = "black"), yaxis = "y2") hist <- plotly::layout(yaxis2 = yaxis2) } # set layout ---- hist <- plotly::layout(hist, hovermode = "closest", title = paste("", main.plot, ""), margin = list(r = 90), xaxis = list(title = xlab.plot, ticks = ""), yaxis = list(title = ylab.plot[1], ticks = "", showline = FALSE, showgrid = FALSE) ) ## show and return plot ---- print(hist) return(hist) } } Luminescence/R/RLum-class.R0000644000176200001440000000477013231137116015200 0ustar liggesusers#' @include replicate_RLum.R RcppExports.R NULL #' Class `"RLum"` #' #' Abstract class for data in the package Luminescence #' Sublasses are: #' #' **RLum-class**\cr #' |\cr #' |----[RLum.Data-class]\cr #' |----|-- [RLum.Data.Curve-class]\cr #' |----|-- [RLum.Data.Spectrum-class]\cr #' |----|-- [RLum.Data.Image-class]\cr #' |----[RLum.Analysis-class]\cr #' |----[RLum.Results-class] #' #' @name RLum-class #' #' @docType class #' #' @slot originator #' Object of class [character] containing the name of the producing #' function for the object. Set automatically by using the function [set_RLum]. #' #' @slot info #' Object of class [list] for additional information on the object itself #' #' @slot .uid #' Object of class [character] for a unique object identifier. This id is #' usually calculated using the internal function `create_UID()` if the funtion [set_RLum] #' is called. #' #' @slot .pid #' Object of class [character] for a parent id. This allows nesting RLum-objects #' at will. The parent id can be the uid of another object. #' #' @note `RLum` is a virtual class. #' #' @section Objects from the Class: #' A virtual Class: No objects can be created from it. #' #' @section Class version: 0.4.0 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @seealso [RLum.Data-class], [RLum.Data.Curve-class], [RLum.Data.Spectrum-class], [RLum.Data.Image-class], #' [RLum.Analysis-class], [RLum.Results-class], [methods_RLum] #' #' @keywords classes #' #' @examples #' #' showClass("RLum") #' #' @md #' @export setClass("RLum", slots = list( originator = "character", info = "list", .uid = "character", .pid = "character" ), contains = "VIRTUAL", prototype = prototype( originator = NA_character_, info = list(), .uid = NA_character_, .pid = NA_character_ ) ) # replication method for object class ------------------------------------------ #' @describeIn RLum #' Replication method RLum-objects #' #' @param object [RLum-class] (**required**): #' an object of class [RLum-class] #' #' @param times [integer] (*optional*): #' number for times each element is repeated element #' #' @md #' @export setMethod( "replicate_RLum", "RLum", definition = function(object, times = NULL) { ##The case this is NULL if (is.null(times)) { times <- 1 } lapply(1:times, function(x) { object }) } ) Luminescence/R/read_PSL2R.R0000644000176200001440000002706713417364147015073 0ustar liggesusers#' Import PSL files to R #' #' Imports PSL files produced by a SUERC portable OSL reader into R **(BETA)**. #' #' This function provides an import routine for the SUERC portable OSL Reader PSL #' format. PSL files are just plain text and can be viewed with any text editor. #' Due to the formatting of PSL files this import function relies heavily on #' regular expression to find and extract all relevant information. See **note**. #' #' @param file [character] (**required**): #' path and file name of the PSL file. If input is a `vector` it should comprise #' only `character`s representing valid paths and PSL file names. #' Alternatively the input character can be just a directory (path). In this case the #' the function tries to detect and import all PSL files found in the directory. #' #' @param drop_bg [logical] (*with default*): #' `TRUE` to automatically remove all non-OSL/IRSL curves. #' #' @param as_decay_curve [logical] (*with default*): #' Portable OSL Reader curves are often given as cumulative light sum curves. #' Use `TRUE` (default) to convert the curves to the more usual decay form. #' #' @param smooth [logical] (*with default*): #' `TRUE` to apply Tukey's Running Median Smoothing for OSL and IRSL decay curves. #' Smoothing is encouraged if you see random signal drops within the decay curves related #' to hardware errors. #' #' @param merge [logical] (*with default*): #' `TRUE` to merge all `RLum.Analysis` objects. Only applicable if multiple #' files are imported. #' #' @param ... currently not used. #' #' @return #' Returns an S4 [RLum.Analysis-class] object containing #' [RLum.Data.Curve-class] objects for each curve. #' #' @seealso [RLum.Analysis-class], [RLum.Data.Curve-class], [RLum.Data.Curve-class] #' #' @author Christoph Burow, University of Cologne (Germany) #' #' @section Function version: 0.0.1 #' #' @note #' Because this function relies heavily on regular expressions to parse #' PSL files it is currently only in beta status. If the routine fails to import #' a specific PSL file please report to so the #' function can be updated. #' #' @keywords IO #' #' @examples #' #' # (1) Import PSL file to R #' #' file <- system.file("extdata", "DorNie_0016.psl", package = "Luminescence") #' psl <- read_PSL2R(file, drop_bg = FALSE, as_decay_curve = TRUE, smooth = TRUE, merge = FALSE) #' print(str(psl, max.level = 3)) #' plot(psl, combine = TRUE) #' #' @md #' @export read_PSL2R <- function(file, drop_bg = FALSE, as_decay_curve = TRUE, smooth = FALSE, merge = FALSE, ...) { ## INPUT VALIDATION ---- if (length(file) == 1) { if (!grepl(".psl$", file, ignore.case = TRUE)) { file <- list.files(file, pattern = ".psl$", full.names = TRUE, ignore.case = TRUE) message("The following files were found and imported: \n", paste(file, collapse = "\n")) } } if (!all(file.exists(file))) stop("The following files do not exist, please check: \n", paste(file[!file.exists(file)], collapse = "\n"), call. = FALSE) ## MAIN ---- results <- vector("list", length(file)) for (i in 1:length(file)) { ## Read in file ---- doc <- readLines(file[i]) ## Document formatting ---- # remove lines with i) blanks only, ii) dashes, iii) equal signs doc <- gsub("^[ ]*$", "", doc) doc <- gsub("^[ -]*$", "", doc) doc <- gsub("^[ =]*$", "", doc) # the header ends with date and time with the previous line starting with a single slash lines_with_slashes <- doc[grepl("\\", doc, fixed = TRUE)] ## OFFENDING LINE: this deletes the line with sample name and time and date sample_and_date <- lines_with_slashes[length(lines_with_slashes)] sample <- gsub("[^0-9a-zA-Z\\-_]", "",strsplit(sample_and_date, "@")[[1]][1], perl = TRUE) date_and_time <- strsplit(strsplit(sample_and_date, "@")[[1]][2], " ")[[1]] date_and_time_clean <- date_and_time[date_and_time != "" & date_and_time != "/" & date_and_time != "PM" & date_and_time != "AM"] date <- as.Date(date_and_time_clean[1], "%m/%d/%Y") time <- format(date_and_time_clean[2], format = "%h:%M:%S") doc <- gsub(lines_with_slashes[length(lines_with_slashes)], "", fixed = TRUE, doc) # last delimiting line before measurements are only apostrophes and dashes lines_with_apostrophes <- doc[grepl("'", doc, fixed = TRUE)] doc <- gsub(lines_with_apostrophes[length(lines_with_apostrophes)], "", fixed = TRUE, doc) # finally remove all empty lines doc <- doc[doc != ""] ## Split document ---- begin_of_measurements <- grep("Measurement :", doc, fixed = TRUE) number_of_measurements <- length(begin_of_measurements) # Parse and format header header <- doc[1:(begin_of_measurements[1]-1)] header <- format_Header(header) # add sample name, date and time to header list header$Date <- date header$Time <- time header$Sample <- sample # Parse and format the easurement values measurements_split <- vector("list", number_of_measurements) # save lines of each measurement to individual list elements for (j in seq_len(number_of_measurements)) { if (j != max(number_of_measurements)) measurements_split[[j]] <- doc[begin_of_measurements[j]:(begin_of_measurements[j+1] - 1)] else measurements_split[[j]] <- doc[begin_of_measurements[j]:length(doc)] } # format each measurement; this will return a list of RLum.Data.Curve objects measurements_formatted <- lapply(measurements_split, function(x) { format_Measurements(x, convert = as_decay_curve, header = header) }) # drop dark count measurements if needed if (drop_bg) { measurements_formatted <- lapply(measurements_formatted, function(x) { if (x@recordType != "USER") return(x) }) measurements_formatted <- measurements_formatted[!sapply(measurements_formatted, is.null)] } # decay curve smoothing using Tukey's Running Median Smoothing (?smooth) if (smooth) { measurements_formatted <- lapply(measurements_formatted, function(x) { if (x@recordType != "USER") x@data[,2] <- smooth(x@data[ ,2]) return(x) }) } ## RETURN ---- results[[i]] <- set_RLum("RLum.Analysis", protocol = "portable OSL", info = header, records = measurements_formatted) }#Eof::Loop ## MERGE ---- if (length(results) > 1 && merge) results <- merge_RLum(results) ## RETURN ---- if (length(results) == 1) results <- results[[1]] return(results) } ################################################################################ ## HELPER FUNCTIONS ################################################################################ ## ------------------------- FORMAT MEASUREMENT ----------------------------- ## format_Measurements <- function(x, convert, header) { ## measurement parameters are given in the first line settings <- x[1] settings_split <- unlist(strsplit(settings, "|", fixed = TRUE)) # welcome to regex/strsplit hell settings_measurement <- trimws(gsub(".*: ", "", settings_split[which(grepl("Measure", settings_split))])) settings_stimulation_unit <- gsub("[^0-9]", "", settings_split[which(grepl("Stim", settings_split))]) settings_on_time <- as.integer(unlist(strsplit(gsub("[^0-9,]", "", settings_split[which(grepl("Off", settings_split))]), ","))[1]) settings_off_time <- as.integer(unlist(strsplit(gsub("[^0-9,]", "", settings_split[which(grepl("Off", settings_split))]), ","))[2]) settings_cycle <- na.omit(as.integer(unlist(strsplit(gsub("[^0-9,]", "", settings_split[which(grepl("No", settings_split))]), ","))))[1] settings_stimulation_time <- na.omit(as.integer(unlist(strsplit(gsub("[^0-9,]", "", settings_split[which(grepl("No", settings_split))]), ","))))[2] settings_list <- list("measurement" = settings_measurement, "stimulation_unit" = switch(settings_stimulation_unit, "0" = "USER", "1" = "IRSL", "2" = "OSL"), "on_time" = settings_on_time, "off_time" = settings_off_time, "cycle" = settings_cycle, "stimulation_time" = settings_stimulation_time) ## terminal counts are given in the last line terminal_count_text <- x[length(x)] terminal_count_text_formatted <- gsub("[^0-9]", "", unlist(strsplit(terminal_count_text, "/"))) terminal_count <- as.numeric(terminal_count_text_formatted[1]) terminal_count_error <- as.numeric(terminal_count_text_formatted[2]) ## parse values and create a data frame x_stripped <- x[-c(1, 2, length(x))] df <- data.frame(matrix(NA, ncol = 5, nrow = length(x_stripped))) for (i in 1:length(x_stripped)) { x_split <- unlist(strsplit(x_stripped[i], " ")) x_split <- x_split[x_split != ""] x_split_clean <- gsub("[^0-9\\-]", "", x_split) x_split_cleaner <- x_split_clean[x_split_clean != "-"] df[i, ] <- as.numeric(x_split_cleaner) } names(df) <- c("time", "counts", "counts_error", "counts_per_cycle", "counts_per_cycle_error") # shape of the curve: decay or cumulative if (convert) data <- matrix(c(df$time, df$counts_per_cycle), ncol = 2) else data <- matrix(c(df$time, df$counts), ncol = 2) # determine the stimulation type if (grepl("Stim 0", settings)) { recordType <- "USER" } if (grepl("Stim 1", settings)) { recordType <- "IRSL" } if (grepl("Stim 2", settings)) { recordType <- "OSL" } object <- set_RLum(class = "RLum.Data.Curve", originator = "read_PSL2R", recordType = recordType, curveType = "measured", data = data, info = list(settings = c(settings_list, header), raw_data = df)) return(object) } ## ---------------------------- FORMAT HEADER ------------------------------- ## format_Header <- function(x) { header_formatted <- list() # split by double blanks header_split <- strsplit(x, " ", fixed = TRUE) # check wether there are twice as many values # as colons; if there is an equal amount, the previous split was not sufficient # and we need to further split by a colon (that is followed by a blank) header_split_clean <- lapply(header_split, function(x) { x <- x[x != ""] n_elements <- length(x) n_properties <- length(grep(":", x, fixed = TRUE)) if (n_elements / n_properties == 1) x <- unlist(strsplit(x, ": ", fixed = TRUE)) return(x) }) # format parameter/settings names and corresponding values values <- vector(mode = "character") names <- vector(mode = "character") for (i in 1:length(header_split_clean)) { for (j in seq(1, length(header_split_clean[[i]]), 2)) { names <- c(names, header_split_clean[[i]][j]) values <- c(values, header_split_clean[[i]][j + 1]) } } # some RegExing for nice reading names <- gsub("[: ]$", "", names, perl = TRUE) names <- gsub("^ ", "", names) names <- gsub(" $", "", names) # for some weird reason "offset subtract" starts with '256 ' names <- gsub("256 ", "", names) # finally, replace all blanks with underscores names <- gsub(" ", "_", names) values <- gsub("[: ]$", "", values, perl = TRUE) values <- gsub("^ ", "", values) values <- gsub(" $", "", values) # return header as list header <- as.list(values) names(header) <- names return(header) }Luminescence/R/analyse_Al2O3C_ITC.R0000644000176200001440000002616313270641406016361 0ustar liggesusers#' Al2O3 Irradiation Time Correction Analysis #' #' The function provides a very particular analysis to correct the irradiation #' time while irradiating Al2O3:C chips in a luminescence reader. #' #' Background: Due to their high dose sensitivity Al2O3:C chips are usually #' irradiated for only a very short duration or under the closed beta-source #' within a luminescence reader. However, due to its high dose sensitivity, during #' the movement towards the beta-source, the pellet already receives and non-negligible #' dose. Based on measurements following a protocol suggested by Kreutzer et al., 2018, #' a dose response curve is constructed and the intersection (absolute value) with the time axis #' is taken as real irradiation time. #' #' **`method_control`** #' #' To keep the generic argument list as clear as possible, arguments to allow a deeper control of the method #' are all preset with meaningful default parameters and can be #' handled using the argument `method_control` only, e.g., #' `method_control = list(fit.method = "LIN")`. Supported arguments are: #' #' \tabular{lll}{ #' **ARGUMENT** \tab **FUNCTION** \tab **DESCRIPTION**\cr #' `mode` \tab `plot_GrowthCurve` \tab as in [plot_GrowthCurve]; sets the mode used for fitting\cr #' `fit.method` \tab `plot_GrowthCurve` \tab as in [plot_GrowthCurve]; sets the function applied for fitting\cr #' } #' #' @param object [RLum.Analysis-class] or [list] **(required)**: #' results obtained from the measurement. #' Alternatively a list of 'RLum.Analysis' objects can be provided to allow an automatic analysis. #' #' @param signal_integral [numeric] (*optional*): #' signal integral, used for the signal and the background. #' If nothing is provided the full range is used. Argument can be provided as [list]. #' #' @param dose_points [numeric] (*with default*): #' vector with dose points, if dose points are repeated, only the general #' pattern needs to be provided. Default values follow the suggestions #' made by Kreutzer et al., 2018. Argument can be provided as [list]. #' #' @param recordType [character] (*with default*): input curve selection, which is passed to #' function [get_RLum]. To deactivate the automatic selection set the argument to `NULL` #' #' @param method_control [list] (*optional*): #' optional parameters to control the calculation. #' See details for further explanations #' #' @param verbose [logical] (*with default*): #' enable/disable verbose mode #' #' @param plot [logical] (*with default*): #' enable/disable plot output #' #' @param ... further arguments that can be passed to the plot output #' #' @return #' Function returns results numerically and graphically: #' #' -----------------------------------\cr #' `[ NUMERICAL OUTPUT ]`\cr #' -----------------------------------\cr #' #' **`RLum.Results`**-object #' #' **slot:** **`@data`** #' #' \tabular{lll}{ #' **Element** \tab **Type** \tab **Description**\cr #' `$data` \tab `data.frame` \tab correction value and error \cr #' `$table` \tab `data.frame` \tab table used for plotting \cr #' `$table_mean` \tab `data.frame` \tab table used for fitting \cr #' `$fit` \tab `lm` or `nls` \tab the fitting as returned by the function [plot_GrowthCurve] #' } #' #'**slot:** **`@info`** #' #' The original function call #' #' ------------------------\cr #' `[ PLOT OUTPUT ]`\cr #' ------------------------\cr #' #' - A dose response curve with the marked correction values #' #' @section Function version: 0.1.1 #' #' @author Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @seealso [plot_GrowthCurve] #' #' @references #' #' Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., 2018. Environmental Dose Rate #' Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. #' Geochromometria 45, 56-67. doi: 10.1515/geochr-2015-0086 #' #' @keywords datagen #' #' @examples #' #' ##load data #' data(ExampleData.Al2O3C, envir = environment()) #' #' ##run analysis #' analyse_Al2O3C_ITC(data_ITC) #' #' @md #' @export analyse_Al2O3C_ITC <- function( object, signal_integral = NULL, dose_points = c(2,4,8,12,16), recordType = c("OSL (UVVIS)"), method_control = NULL, verbose = TRUE, plot = TRUE, ... ){ # SELF CALL ----------------------------------------------------------------------------------- if(is.list(object)){ ##check whether the list contains only RLum.Analysis objects if(!all(unique(sapply(object, class)) == "RLum.Analysis")){ stop("[analyse_Al2O3C()] All objects in the list provided by 'objects' need to be of type 'RLum.Analysis'", call. = FALSE) } ##expand input arguments if(!is.null(signal_integral)){ signal_integral <- rep(list(signal_integral, length = length(object))) } ##dose points if(is(dose_points, "list")){ dose.points <- rep(dose_points, length = length(object)) }else{ dose_points <- rep(list(dose_points), length = length(object)) } ##method_control ##verbose ##plot ##run analysis results_full <- lapply(1:length(object), function(x){ ##run analysis results <- try(analyse_Al2O3C_ITC( object = object[[x]], signal_integral = signal_integral[[x]], dose_points = dose_points[[x]], method_control = method_control, verbose = verbose, plot = plot, main = ifelse("main"%in% names(list(...)), list(...)$main, paste0("ALQ #",x)), ... )) ##catch error if(inherits(results, "try-error")){ return(NULL) }else{ return(results) } }) ##return return(merge_RLum(results_full)) } # Integretiy check --------------------------------------------------------------------------- ##check input object if(class(object) != "RLum.Analysis"){ stop("[analyse_Al2O3C_ITC()] 'object' needs to be of type 'RLum.Analysis'", call. = FALSE) } ##TODO ##implement more checks ... if you find some time, somehow, somewhere # Preparation --------------------------------------------------------------------------------- ##select curves based on the recordType selection; if not NULL if(!is.null(recordType)){ object <- get_RLum(object, recordType = recordType, drop = FALSE) } #set method control method_control_settings <- list( mode = "extrapolation", fit.method = "EXP" ) ##modify on request if(!is.null(method_control)){ method_control_settings <- modifyList(x = method_control_settings, val = method_control) } ##dose points enhancement ##make sure that the dose_point is enhanced dose_points <- rep(dose_points, times = length(object)/2) # Calculation --------------------------------------------------------------------------------- ##set signal integral if(is.null(signal_integral)){ signal_integral <- c(1:nrow(object[[1]][])) }else{ ##check whether the input is valid, otherwise make it valid if(min(signal_integral) < 1 | max(signal_integral) > nrow(object[[1]][])){ signal_integral <- c(1:nrow(object[[1]][])) warning( paste0( "[analyse_Al2O3C_ITC()] Input for 'signal_integral' corrected to 1:", nrow(object[[1]][]) ), call. = FALSE ) } } ##calcuate curve sums, assuming the background net_SIGNAL <- vapply(1:length(object[seq(1,length(object), by = 2)]), function(x){ temp_signal <- sum(object[seq(1,length(object), by = 2)][[x]][,2]) temp_background <- sum(object[seq(2,length(object), by = 2)][[x]][,2]) return(temp_signal - temp_background) }, FUN.VALUE = vector(mode = "numeric", length = 1)) ##create data.frames ##single points df <- data.frame( DOSE = dose_points, net_SIGNAL = net_SIGNAL, net_SIGNAL.ERROR = 0, net_SIGNAL_NORM = net_SIGNAL/max(net_SIGNAL), net_SIGNAL_NORM.ERROR = 0 ) ##take mean ##make data frame for all curves for MC runs df_mean <- as.data.frame(data.table::rbindlist(lapply(unique(df$DOSE), function(x){ data.frame( DOSE = x, net_SIGNAL = mean(df[df$DOSE == x, "net_SIGNAL"]), net_SIGNAL.ERROR = sd(df[df$DOSE == x, "net_SIGNAL"]), net_SIGNAL_NORM = mean(df[df$DOSE == x, "net_SIGNAL_NORM"]), net_SIGNAL_NORM.ERROR = sd(df[df$DOSE == x, "net_SIGNAL_NORM"]) ) }))) ##calculate GC GC <- plot_GrowthCurve( sample = df_mean, mode = method_control_settings$mode, output.plotExtended = FALSE, output.plot = FALSE, fit.method = method_control_settings$fit.method, verbose = FALSE ) ##output if(verbose){ cat("\n[analyse_Al2O3C_ITC()]\n") cat(paste0("\n Used fit:\t\t",method_control_settings$fit.method)) cat(paste0("\n Time correction value:\t", round(GC$De$De,3), " \u00B1 ", GC$De$De.Error)) cat("\n\n") } # Plotting ------------------------------------------------------------------------------------ if(plot){ ##set plot settings plot_settings <- list( xlab = "Dose [s]", ylab = "Integrated net GSL [a.u.]", main = "Irradiation Time Correction", xlim = c(-5, max(df$DOSE)), ylim = c(0,max(df$net_SIGNAL)), legend.pos = "right", legend.text = "dose points", mtext = "" ) ##modfiy list on request plot_settings <- modifyList(x = plot_settings, val = list(...)) ##make plot area plot(NA, NA, xlim = plot_settings$xlim, ylim = plot_settings$ylim, xlab = plot_settings$xlab, ylab = plot_settings$ylab, main = plot_settings$main ) ##add zero lines abline(v = 0) abline(h = 0) ##add dose points points(x = df$DOSE, y = df$net_SIGNAL) ##add dose response curve x <- seq(min(plot_settings$xlim), max(plot_settings$xlim), length.out = 100) lines( x = x, y = eval(GC$Formula) ) ##show offset x <- 0 lines(x = c(-GC$De[1], -GC$De[1]), y = c(eval(GC$Formula), 0), lty = 2, col = "red") shape::Arrows( x0 = 0, y0 = eval(GC$Formula), x1 = as.numeric(-GC$De[1]), y1 = eval(GC$Formula), arr.type = "triangle", arr.adj = -0.5, col = 'red', cex = par()$cex ) ##add text text( x = -GC$De[1] / 2, y = eval(GC$Formula), pos = 3, labels = paste(round(GC$De[1],3), "\u00B1", GC$De[2]), col = 'red', cex = 0.8 ) ##add 2nd x-axis axis( side = 1, at = axTicks(side = 1), labels = paste0("(",(axTicks(side = 1) + round(as.numeric(GC$De[1]),2)), ")"), line = 1, col.axis = "red", lwd.ticks = 0, lwd = 0, cex.axis = 0.9 ) ##add legend legend( plot_settings$legend.pos, bty = "n", pch = 1, legend = plot_settings$legend.text ) ##add mtext mtext(side = 3, text = plot_settings$mtext) } # Output -------------------------------------------------------------------------------------- return(set_RLum( class = "RLum.Results", data = list( data = data.frame( VALUE = as.numeric(GC$De$De), VALUE_ERROR = as.numeric(sd(GC$De.MC)) ), table = df, table_mean = df_mean, fit = GC$Fit ), info = list(call = sys.call()) )) } Luminescence/R/use_DRAC.R0000644000176200001440000004100113242571355014600 0ustar liggesusers#' Use DRAC to calculate dose rate data #' #' The function provides an interface from R to DRAC. An R-object or a #' pre-formatted XLS/XLSX file is passed to the DRAC website and the #' results are re-imported into R. #' #' #' @param file [character] (**required**): #' spreadsheet to be passed to the DRAC website for calculation. Can also be a #' DRAC template object obtained from `template_DRAC()`. #' #' @param name [character] (*with defautl*): #' Optional user name submitted to DRAC. If omitted, a random name will be generated #' #' @param print_references (*with default*): #' Print all references used in the input data table to the console. #' #' @param citation_style (*with default*): #' If `print_references = TRUE` this argument determines the output style of the #' used references. Valid options are `"Bibtex"`, `"citation"`, `"html"`, `"latex"` #' or `"R"`. Default is `"text"`. #' #' @param ... Further arguments. #' #' - `url` [character]: provide an alternative URL to DRAC #' - `verbose` [logical]: show or hide console output #' #' @return Returns an [RLum.Results-class] object containing the following elements: #' #' \item{DRAC}{[list]: a named list containing the following elements in slot `@@data`: #' #' \tabular{lll}{ #' `$highlights` \tab [data.frame] \tab summary of 25 most important input/output fields \cr #' `$header` \tab [character] \tab HTTP header from the DRAC server response \cr #' `$labels` \tab [data.frame] \tab descriptive headers of all input/output fields \cr #' `$content` \tab [data.frame] \tab complete DRAC input/output table \cr #' `$input` \tab [data.frame] \tab DRAC input table \cr #' `$output` \tab [data.frame] \tab DRAC output table \cr #' `references`\tab [list] \tab A list of [bibentry]s of used references \cr #' } #' #' } #' \item{data}{[character] or [list] path to the input spreadsheet or a DRAC template} #' \item{call}{[call] the function call} #' \item{args}{[list] used arguments} #' #' The output should be accessed using the function [get_RLum]. #' #' @section Function version: 0.1.3 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Universite Bordeaux Montaigne (France)\cr #' Michael Dietze, GFZ Potsdam (Germany)\cr #' Christoph Burow, University of Cologne (Germany) #' #' @references #' Durcan, J.A., King, G.E., Duller, G.A.T., 2015. DRAC: Dose Rate and Age Calculator for trapped charge dating. #' Quaternary Geochronology 28, 54-61. doi:10.1016/j.quageo.2015.03.012 #' #' @examples #' #' ## (1) Method using the DRAC spreadsheet #' #' file <- "/PATH/TO/DRAC_Input_Template.csv" #' #' # send the actual IO template spreadsheet to DRAC #' \dontrun{ #' use_DRAC(file = file) #' } #' #' #' #' ## (2) Method using an R template object #' #' # Create a template #' input <- template_DRAC(preset = "DRAC-example_quartz") #' #' # Fill the template with values #' input$`Project ID` <- "DRAC-Example" #' input$`Sample ID` <- "Quartz" #' input$`Conversion factors` <- "AdamiecAitken1998" #' input$`External U (ppm)` <- 3.4 #' input$`errExternal U (ppm)` <- 0.51 #' input$`External Th (ppm)` <- 14.47 #' input$`errExternal Th (ppm)` <- 1.69 #' input$`External K (%)` <- 1.2 #' input$`errExternal K (%)` <- 0.14 #' input$`Calculate external Rb from K conc?` <- "N" #' input$`Calculate internal Rb from K conc?` <- "N" #' input$`Scale gammadoserate at shallow depths?` <- "N" #' input$`Grain size min (microns)` <- 90 #' input$`Grain size max (microns)` <- 125 #' input$`Water content ((wet weight - dry weight)/dry weight) %` <- 5 #' input$`errWater content %` <- 2 #' input$`Depth (m)` <- 2.2 #' input$`errDepth (m)` <- 0.22 #' input$`Overburden density (g cm-3)` <- 1.8 #' input$`errOverburden density (g cm-3)` <- 0.1 #' input$`Latitude (decimal degrees)` <- 30.0000 #' input$`Longitude (decimal degrees)` <- 70.0000 #' input$`Altitude (m)` <- 150 #' input$`De (Gy)` <- 20 #' input$`errDe (Gy)` <- 0.2 #' #' # use DRAC #' \dontrun{ #' output <- use_DRAC(input) #' } #' #' @md #' @export use_DRAC <- function( file, name, print_references = TRUE, citation_style = "text", ... ){ ## TODO: ## (1) Keep the data set as unmodified as possible. Check structure and order of parameters ## for meaningful cominbination. ## ## (2) ## Leave it to the user where the calculations made in our package should be used # Integrity tests ----------------------------------------------------------------------------- if (inherits(file, "character")) { if(!file.exists(file)){ stop("[use_DRAC()] It seems that the file doesn't exist!") } # Import data --------------------------------------------------------------------------------- ## Import and skip the first rows and remove NA lines and the 2 row, as this row contains ## only meta data ## DRAC v1.1 - XLS sheet ##check if is the original DRAC table if (tools::file_ext(file) == "xls" || tools::file_ext(file) == "xlsx") { if (readxl::excel_sheets(file)[1] != "DRAC_1.1_input") stop("[use_DRAC()] It looks like that you are not using the original DRAC v1.1 XLSX template. This is currently not supported!") warning("\n[use_DRAC()] The current DRAC version is 1.2, but you provided the v1.1 excel input template.", "\nPlease transfer your data to the new CSV template introduced with DRAC v1.2.", call. = FALSE) input.raw <- na.omit(as.data.frame(readxl::read_excel(path = file, sheet = 1, skip = 5)))[-1, ] } ## DRAC v1.2 - CSV sheet if (tools::file_ext(file) == "csv") { if (read.csv(file, nrows = 1, header = FALSE)[1] != "DRAC v.1.2 Inputs") stop("[use_DRAC()] It looks like that you are not using the original DRAC v1.2 CSV template. This is currently not supported!") input.raw <- read.csv(file, skip = 8, check.names = FALSE, header = TRUE, stringsAsFactors = FALSE)[-1, ] } } else if (inherits(file, "DRAC.list")) { input.raw <- as.data.frame(file) } else if (inherits(file, "DRAC.data.frame")) { input.raw <- file } else { stop("The provided data object is not a valid DRAC template.", call. = FALSE) } if (nrow(input.raw) > 50) stop("DRAC can only handle 50 data sets at once. Please reduce the number of rows and re-run this function again.", call. = FALSE) # Settings ------------------------------------------------------------------------------------ settings <- list(name = ifelse(missing(name), paste(sample(if(runif(1,-10,10)>0){LETTERS}else{letters}, runif(1, 2, 4)), collapse = ""), name), verbose = TRUE, url = "https://www.aber.ac.uk/en/dges/research/quaternary/luminescence-research-laboratory/dose-rate-calculator/?show=calculator") # override defaults with args in ... settings <- modifyList(settings, list(...)) # Set helper function ------------------------------------------------------------------------- ## The real data are transferred without any encryption, so we have to mask the original ##(0) set masking function .masking <- function(mean, sd, n) { temp <- rnorm(n = 30 * n, mean = mean,sd = sd) temp.result <- sapply(seq(1, length(temp), by = 30), function(x) { c(format(mean(temp[x:(x + 29)]), digits = 2), format(sd(temp[x:(x + 29)]), digits = 2)) }) return(t(temp.result)) } # Process data -------------------------------------------------------------------------------- if (settings$verbose) message("\n\t Preparing data...") ##(1) expand the rows in the data.frame a little bit mask.df <- input.raw[rep(1:nrow(input.raw), each = 3), ] ##(2) generate some meaningful randome variables mask.df <- lapply(seq(1, nrow(input.raw), by = 3), function(x) { if (mask.df[x,"TI:52"] != "X") { ##replace some values - the De value mask.df[x:(x + 2), c("TI:52","TI:53")] <- .masking( mean = as.numeric(mask.df[x,"TI:52"]), sd = as.numeric(mask.df[x,"TI:53"]), n = 3) return(mask.df) } }) ##(3) bin values DRAC_submission.df <- rbind(input.raw,mask.df[[1]]) ##(4) replace ID values DRAC_submission.df$`TI:1` <- paste0(paste0(paste0(sample(if(runif(1,-10,10)>0){LETTERS}else{letters}, runif(1, 2, 4)), collapse = ""), ifelse(runif(1,-10,10)>0, "-", "")), gsub(" ", "0", prettyNum(seq(sample(1:50, 1, prob = 50:1/50, replace = FALSE), by = 1, length.out = nrow(DRAC_submission.df)), width = 2))) ##(5) store the real IDs in a sperate object DRAC_results.id <- DRAC_submission.df[1:nrow(input.raw), "TI:1"] ##(6) create DRAC submission string DRAC_submission.df <- DRAC_submission.df[sample(x = 1:nrow(DRAC_submission.df), nrow(DRAC_submission.df), replace = FALSE), ] ##convert all columns of the data.frame to class 'character' for (i in 1:ncol(DRAC_submission.df)) DRAC_submission.df[ ,i] <- as.character(DRAC_submission.df[, i]) if (settings$verbose) message("\t Creating submission string...") ##get line by line and remove unwanted characters DRAC_submission.string <- sapply(1:nrow(DRAC_submission.df), function(x) { paste0(gsub(",", "", toString(DRAC_submission.df[x, ])), "\n") }) ##paste everything together to get the format we want DRAC_input <- paste(DRAC_submission.string, collapse = "") # Send data to DRAC --------------------------------------------------------------------------- if (settings$verbose) message(paste("\t Establishing connection to", settings$url)) ## send data set to DRAC website and receive repsonse DRAC.response <- httr::POST(settings$url, body = list("drac_data[name]" = settings$name, "drac_data[table]" = DRAC_input)) ## check for correct response if (DRAC.response$status_code != 200) { stop(paste0("[use_DRAC()] transmission failed with HTTP status code: ", DRAC.response$status_code)) } else { if (settings$verbose) message("\t The request was successful, processing the reply...") } ## assign DRAC response data to variables http.header <- DRAC.response$header DRAC.content <- httr::content(x = DRAC.response, as = "text") ## if the input was valid from a technical standpoint, but not with regard ## contents, we indeed get a valid response, but no DRAC output if (!grepl("DRAC Outputs", DRAC.content)) { error_start <- max(gregexpr("drac_field_error", DRAC.content)[[1]]) error_end <- regexec('textarea name=', DRAC.content)[[1]] error_msg <- substr(DRAC.content, error_start, error_end) on.exit({ reply <- readline("Do you want to see the DRAC error message (Y/N)?") if (reply == "Y" || reply == "y" || reply == 1) cat(error_msg) }) stop(paste("\n\t We got a response from the server, but it\n", "\t did not contain DRAC output. Please check\n", "\t your data and verify its validity.\n"), call. = FALSE) } else { if (settings$verbose) message("\t Finalising the results...") } ## split header and content DRAC.content.split <- strsplit(x = DRAC.content, split = "DRAC Outputs\n\n") ## assign DRAC header part DRAC.header <- as.character(DRAC.content.split[[1]][1]) ## assign DRAC content part DRAC.raw <- read.table(text = as.character(DRAC.content.split[[1]][2]), sep = ",", stringsAsFactors = FALSE) ## remove first two lines DRAC.content <- data.table::fread(as.character(DRAC.content.split[[1]][2]), sep = ",", skip = 2, stringsAsFactors = FALSE, colClasses = c(V3 = "character"), data.table = FALSE) ##Get rid of all the value we do not need anymore DRAC.content <- subset(DRAC.content, DRAC.content$V1 %in% DRAC_results.id) DRAC.content <- DRAC.content[with(DRAC.content, order(V1)), ] ##replace by original names DRAC.content[ ,1] <- input.raw[ ,1] ## assign column names colnames(DRAC.content) <- DRAC.raw[1, ] ## save column labels and use them as attributes for the I/O table columns DRAC.labels <- DRAC.raw[2, ] for (i in 1:length(DRAC.content)) { attr(DRAC.content[ ,i], "description") <- DRAC.labels[1,i] } ## DRAC also returns the input, so we need to split input and output DRAC.content.input <- DRAC.content[ ,grep("TI:", names(DRAC.content))] DRAC.content.output <- DRAC.content[ ,grep("TO:", names(DRAC.content))] ## The DRAC ouput also contains a hightlight table, which results in ## duplicate columns. When creating the data.frame duplicate columns ## are automatically appended '.1' in their names, so we can identify ## and remove them easily DRAC.content.input <- DRAC.content.input[ ,-grep("\\.1", names(DRAC.content.input))] DRAC.content.output <- DRAC.content.output[ ,-grep("\\.1", names(DRAC.content.output))] ## for some reason the returned input table is unsorted, so we resort it in increasing order DRAC.content.input <- DRAC.content.input[ , paste0("TI:", 1:ncol(DRAC.content.input))] ## The output table (v1.2) has 198 columns, making it unreasonable complex ## for standard data evaluation. We reproduce the DRAC highlight table ## and use the descriptions (saved as attributes) as column names. highlight.keys <- c("TI:1","TI:2","TI:3","TO:FQ","TO:FR", "TO:FS", "TO:FT", "TO:FU", "TO:FV", "TO:FW", "TO:FX", "TO:FY", "TO:FZ", "TO:GG", "TO:GH", "TO:GI", "TO:GJ", "TO:GK", "TO:GL", "TO:GM", "TO:GN", "TI:52", "TI:53", "TO:GO", "TO:GP") DRAC.highlights <- subset(DRAC.content, select = highlight.keys) DRAC.highlights.labels <- as.character(DRAC.labels[1, which(unique(names(DRAC.content)) %in% highlight.keys)]) colnames(DRAC.highlights) <- DRAC.highlights.labels for (i in 1:length(DRAC.highlights)) { attr(DRAC.highlights[ ,i], "key") <- highlight.keys[i] } ## finally, we add the 'DRAC.highlights' class so that we can use a custom print method class(DRAC.highlights) <- c("DRAC.highlights", "data.frame") ## Final Disclaimer messages <- list("\t Done! \n", "\t We, the authors of the R package 'Luminescence', do not take any responsibility and we are not liable for any ", "\t mistakes or unforeseen misbehaviour. All calculations are done by DRAC and it is outside our reference to", "\t verify the input and output. \n", "\t Note that this function is only compatible with DRAC version 1.2. Before using this function make sure that", "\t this is the correct version, otherwise expect unspecified errors.\n", "\t Please ensure you cite the use of DRAC in your work, published or otherwise. Please cite the website name and", "\t version (e.g. DRAC v1.2) and the accompanying journal article:", "\t Durcan, J.A., King, G.E., Duller, G.A.T., 2015. DRAC: Dose rate and age calculation for trapped charge", "\t dating. Quaternary Geochronology 28, 54-61. \n", "\t Use 'verbose = FALSE' to hide this message. \n") if (settings$verbose) lapply(messages, message) ## Get and print used references references <- get_DRAC_references(DRAC.content.input) if (print_references && settings$verbose) { for (i in 1:length(references$refs)) { message("\nReference for: ", references$desc[i]) print(references$refs[[i]], style = citation_style) } } ## return output DRAC.return <- set_RLum("RLum.Results", data = list( DRAC = list(highlights = DRAC.highlights, header = DRAC.header, labels = DRAC.labels, content = DRAC.content, input = DRAC.content.input, output = DRAC.content.output, references = references), data = file, call = sys.call(), args = as.list(sys.call()[-1]))) invisible(DRAC.return) } Luminescence/R/read_XSYG2R.R0000644000176200001440000006655113571743147015232 0ustar liggesusers#' Import XSYG files to R #' #' Imports XSYG files produced by a Freiberg Instrument lexsyg reader into R. #' #' **How does the import function work?** #' #' The function uses the [xml] package to parse the file structure. Each #' sequence is subsequently translated into an [RLum.Analysis-class] object. #' #' **General structure XSYG format** #' #' ``` #' #' #' #' #' #' x0 , y0 ; x1 , y1 ; x2 , y2 ; x3 , y3 #' #' #' #' ``` #' #' So far, each #' XSYG file can only contain one ``, but multiple #' sequences. #' #' Each record may comprise several curves. #' #' **TL curve recalculation** #' #' On the FI lexsyg device TL curves are recorded as time against count values. #' Temperature values are monitored on the heating plate and stored in a #' separate curve (time vs. temperature). If the option #' `recalculate.TL.curves = TRUE` is chosen, the time values for each TL #' curve are replaced by temperature values. #' #' Practically, this means combining two matrices (Time vs. Counts and Time vs. #' Temperature) with different row numbers by their time values. Three cases #' are considered: #' #' 1. HE: Heating element #' 2. PMT: Photomultiplier tube #' 3. Interpolation is done using the function [approx] #' #' CASE (1): `nrow(matrix(PMT))` > `nrow(matrix(HE))` #' #' Missing temperature values from the heating element are calculated using #' time values from the PMT measurement. #' #' CASE (2): `nrow(matrix(PMT))` < `nrow(matrix(HE))` #' #' Missing count values from the PMT are calculated using time values from the #' heating element measurement. #' #' CASE (3): `nrow(matrix(PMT))` == `nrow(matrix(HE))` #' #' A new matrix is produced using temperature values from the heating element #' and count values from the PMT. #' #' **Note:** #' Please note that due to the recalculation of the temperature #' values based on values delivered by the heating element, it may happen that #' mutiple count values exists for each temperature value and temperature #' values may also decrease during heating, not only increase. #' #' **Advanced file import** #' #' To allow for a more efficient usage of the function, instead of single path #' to a file just a directory can be passed as input. In this particular case #' the function tries to extract all XSYG-files found in the directory and import #' them all. Using this option internally the function constructs as list of #' the XSYG-files found in the directory. Please note no recursive detection #' is supported as this may lead to endless loops. #' #' @param file [character] or [list] (**required**): #' path and file name of the XSYG file. If input is a `list` it should comprise #' only `character`s representing each valid path and xsyg-file names. #' Alternatively the input character can be just a directory (path), in this case the #' the function tries to detect and import all xsyg files found in the directory. #' #' @param recalculate.TL.curves [logical] (*with default*): #' if set to `TRUE`, TL curves are returned as temperature against count values #' (see details for more information) Note: The option overwrites the time vs. #' count TL curve. Select `FALSE` to import the raw data delivered by the #' lexsyg. Works for TL curves and spectra. #' #' @param fastForward [logical] (*with default*): #' if `TRUE` for a more efficient data processing only a list of `RLum.Analysis` #' objects is returned. #' #' @param import [logical] (*with default*): #' if set to `FALSE`, only the XSYG file structure is shown. #' #' @param pattern [regex] (*with default*): #' optional regular expression if `file` is a link to a folder, to select just #' specific XSYG-files #' #' @param verbose [logical] (*with default*): enable or disable verbose mode. If verbose is `FALSE` #' the `txtProgressBar` is also switched off #' #' @param txtProgressBar [logical] (*with default*): #' enables `TRUE` or disables `FALSE` the progression bar during import #' #' @return #' **Using the option `import = FALSE`** #' #' A list consisting of two elements is shown: #' - [data.frame] with information on file. #' - [data.frame] with information on the sequences stored in the XSYG file. #' #' **Using the option `import = TRUE` (default)** #' #' A list is provided, the list elements #' contain: \item{Sequence.Header}{[data.frame] with information on the #' sequence.} \item{Sequence.Object}{[RLum.Analysis-class] #' containing the curves.} #' #' @note #' This function is a beta version as the XSYG file format is not yet #' fully specified. Thus, further file operations (merge, export, write) should #' be done using the functions provided with the package [xml]. #' #' **So far, no image data import is provided!** \cr #' Corresponding values in the XSXG file are skipped. #' #' #' @section Function version: 0.6.8 #' #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montaigne (France) #' #' #' @seealso [xml], [RLum.Analysis-class], [RLum.Data.Curve-class], [approx] #' #' #' @references #' Grehl, S., Kreutzer, S., Hoehne, M., 2013. Documentation of the #' XSYG file format. Unpublished Technical Note. Freiberg, Germany #' #' **Further reading** #' #' XML: [http://en.wikipedia.org/wiki/XML]() #' #' @keywords IO #' #' @examples #' #' ##(1) import XSYG file to R (uncomment for usage) #' #' #FILE <- file.choose() #' #temp <- read_XSYG2R(FILE) #' #' ##(2) additional examples for pure XML import using the package XML #' ## (uncomment for usage) #' #' ##import entire XML file #' #FILE <- file.choose() #' #temp <- XML::xmlRoot(XML::xmlTreeParse(FILE)) #' #' ##search for specific subnodes with curves containing 'OSL' #' #getNodeSet(temp, "//Sample/Sequence/Record[@@recordType = 'OSL']/Curve") #' #' ##(2) How to extract single curves ... after import #' data(ExampleData.XSYG, envir = environment()) #' #' ##grep one OSL curves and plot the first curve #' OSLcurve <- get_RLum(OSL.SARMeasurement$Sequence.Object, recordType="OSL")[[1]] #' #' ##(3) How to see the structure of an object? #' structure_RLum(OSL.SARMeasurement$Sequence.Object) #' #' @md #' @export read_XSYG2R <- function( file, recalculate.TL.curves = TRUE, fastForward = FALSE, import = TRUE, pattern = ".xsyg", verbose = TRUE, txtProgressBar = TRUE ){ ##TODO: this function should be reshaped: ## - metadata from the sequence should go into the info slot of the RLum.Analysis object ## >> however, the question is whether this works with subsequent functions ## - currently not all metadata are supported, it should be extended ## - the should be a mode importing ALL metadata ## - xlum should be general, xsyg should take care about subsequent details # Self Call ----------------------------------------------------------------------------------- # Option (a): Input is a list, every element in the list will be treated as file connection # with that many file can be read in at the same time # Option (b): The input is just a path, the function tries to grep ALL xsyg/XSYG files in the # directory and import them, if this is detected, we proceed as list if(is(file, "character")) { ##If this is not really a path we skip this here if (dir.exists(file) & length(dir(file)) > 0) { if(verbose) ("[read_XSYG2R()] Directory detected, trying to extract '*.xsyg' files ...\n") file <- as.list(paste0(file,dir( file, recursive = TRUE, pattern = pattern ))) } } if (is(file, "list")) { temp.return <- lapply(1:length(file), function(x) { read_XSYG2R( file = file[[x]], recalculate.TL.curves = recalculate.TL.curves, fastForward = fastForward, import = import, verbose = verbose, txtProgressBar = txtProgressBar ) }) ##return if (fastForward) { if(import){ return(unlist(temp.return, recursive = FALSE)) }else{ return(as.data.frame(data.table::rbindlist(temp.return))) } }else{ return(temp.return) } } # On exit case -------------------------------------------------------------------------------- ##set file_link for internet downloads file_link <- NULL on_exit <- function(){ ##unlink internet connection if(!is.null(file_link)){ unlink(file_link) } } on.exit(expr = on_exit()) # Consistency check ------------------------------------------------------- ##check if file exists if(!file.exists(file)){ ##check if the file as an URL ... you never know if(grepl(pattern = "http", x = file, fixed = TRUE)){ if(verbose){ cat("[read_XSYG2R()] URL detected, checking connection ... ") } ##check URL if(!httr::http_error(file)){ if(verbose) cat("OK") ##dowload file file_link <- tempfile("read_XSYG2R_FILE") download.file(file, destfile = file_link, quiet = if(verbose){FALSE}else{TRUE}) file <- file_link }else{ cat("FAILED") file <- NULL try(stop("[read_XSYG2R()] File does not exist! Return NULL!", call. = FALSE)) return(NULL) } }else{ file <- NULL try(stop("[read_XSYG2R()] File does not exist, return NULL!", call. = FALSE)) return(NULL) } } #TODO to be included again in a future version, if the format is given in the file itself # ##check if file is XML file # if(tail(unlist(strsplit(file, split = "\\.")), 1) != "xsyg" & # tail(unlist(strsplit(file, split = "\\.")), 1) != "XSYG" ){ # # warning("[read_XSYG2R()] File is not of type 'XSYG', nothing imported!") # return(NULL) # # } # (0) config -------------------------------------------------------------- #version.supported <- c("1.0") #additional functions #get spectrum values # TODO: This function could be written also in C++, however, not necessary due to a low demand get_XSYG.spectrum.values <- function(curve.node){ ##1st grep wavelength table wavelength <- XML::xmlAttrs(curve.node)["wavelengthTable"] ##string split wavelength <- as.numeric(unlist(strsplit(wavelength, split = ";", fixed = TRUE))) ##2nd grep time values curve.node <- unlist(strsplit(XML::xmlValue(curve.node), split = ";", fixed = TRUE)) curve.node <- unlist(strsplit(curve.node, split = ",", fixed = TRUE), recursive = FALSE) curve.node.time <- as.numeric(curve.node[seq(1,length(curve.node),2)]) ##3rd grep count values curve.node.count <- as.character(curve.node[seq(2,length(curve.node),2)]) ##remove from pattern... curve.node.count <- do.call("gsub", list(pattern="[[]|[]]", replacement=" ", x=curve.node.count)) ##4th combine to spectrum matrix spectrum.matrix <- matrix(0,length(wavelength),length(curve.node.time)) spectrum.matrix <- sapply(1:length(curve.node.time), function(x){ as.numeric(unlist(strsplit(curve.node.count[x], "[|]"))) }) ##change row names (rows are wavelength) rownames(spectrum.matrix) <- round(wavelength, digits=3) ##change column names (columns are time/temp values) colnames(spectrum.matrix) <- round(curve.node.time, digits=3) return(spectrum.matrix) } # (1) Integrity tests ----------------------------------------------------- ##set HUGE for larger nodes HUGE <- 524288 ##parse XML tree using the package XML temp <- try( XML::xmlRoot(XML::xmlTreeParse(file, useInternalNodes = TRUE, options = HUGE)), silent = TRUE) ##show error if(is(temp, "try-error") == TRUE){ try(stop("[read_XSYG2R()] XML file not readable, nothing imported!)", call. = FALSE)) return(NULL) } # (2) Further file processing --------------------------------------------- ##==========================================================================## ##SHOW STRUCTURE if(import == FALSE){ ##sample information temp.sample <- as.data.frame(XML::xmlAttrs(temp), stringsAsFactors = FALSE) ##grep sequences files ##set data.frame temp.sequence.header <- data.frame(t(1:length(names(XML::xmlAttrs(temp[[1]])))), stringsAsFactors = FALSE) colnames(temp.sequence.header) <- names(XML::xmlAttrs(temp[[1]])) ##fill information in data.frame for(i in 1:XML::xmlSize(temp)){ temp.sequence.header[i,] <- t(XML::xmlAttrs(temp[[i]])) } ##additional option for fastForward == TRUE if(fastForward){ ##change column header temp.sample <- t(temp.sample) colnames(temp.sample) <- paste0("sample::", colnames(temp.sample)) output <- cbind(temp.sequence.header, temp.sample) }else{ output <- list(Sample = temp.sample, Sequences = temp.sequence.header) } return(output) }else{ ##==========================================================================## ##IMPORT XSYG FILE ##Display output if(verbose) paste0("[read_XSYG2R()]\n Importing: ",file) ##PROGRESS BAR if(verbose && txtProgressBar){ pb <- txtProgressBar(min=0,max=XML::xmlSize(temp), char = "=", style=3) } ##loop over the entire sequence by sequence output <- lapply(1:XML::xmlSize(temp), function(x){ ##read sequence header temp.sequence.header <- as.data.frame(XML::xmlAttrs(temp[[x]]), stringsAsFactors = FALSE) ##account for non set value if(length(temp.sequence.header)!= 0) colnames(temp.sequence.header) <- "" ###----------------------------------------------------------------------- ##LOOP ##read records >> records are combined to one RLum.Analysis object temp.sequence.object <- unlist(lapply(1:XML::xmlSize(temp[[x]]), function(i){ ##get recordType temp.sequence.object.recordType <- try(XML::xmlAttrs(temp[[x]][[i]])["recordType"], silent = TRUE) ##the XSYG file might be broken due to a machine error during the measurement, this ##control flow helps; if a try-error is observed NULL is returned if(!inherits(temp.sequence.object.recordType, "try-error")){ ##create a fallback, the function should not fail if(is.null(temp.sequence.object.recordType) || is.na(temp.sequence.object.recordType)){ temp.sequence.object.recordType <- "not_set" } ##correct record type in depending on the stimulator if(temp.sequence.object.recordType == "OSL"){ if(XML::xmlAttrs(temp[[x]][[i]][[ XML::xmlSize(temp[[x]][[i]])]])["stimulator"] == "ir_LED_850" | XML::xmlAttrs(temp[[x]][[i]][[ XML::xmlSize(temp[[x]][[i]])]])["stimulator"] == "ir_LD_850"){ temp.sequence.object.recordType <- "IRSL" } } ##loop 3rd level lapply(1:XML::xmlSize(temp[[x]][[i]]), function(j){ ##get values temp.sequence.object.curveValue <- temp[[x]][[i]][[j]] ##get curveType temp.sequence.object.curveType <- as.character( XML::xmlAttrs(temp[[x]][[i]][[j]])["curveType"]) ##get detector temp.sequence.object.detector <- as.character( XML::xmlAttrs(temp[[x]][[i]][[j]])["detector"]) ##get stimulator temp.sequence.object.stimulator <- as.character( XML::xmlAttrs(temp[[x]][[i]][[j]])["stimulator"]) ##get additional information temp.sequence.object.info <- as.list(XML::xmlAttrs(temp.sequence.object.curveValue)) ##add stimulator and detector and so on temp.sequence.object.info <- c(temp.sequence.object.info, position = as.integer(as.character(temp.sequence.header["position",])), name = as.character(temp.sequence.header["name",])) ## TL curve recalculation ============================================ if(recalculate.TL.curves){ ##TL curve heating values is stored in the 3rd curve of every set if(temp.sequence.object.recordType == "TL" && j == 1){ #grep values from PMT measurement or spectrometer if("Spectrometer" %in% temp.sequence.object.detector == FALSE){ temp.sequence.object.curveValue.PMT <- src_get_XSYG_curve_values(XML::xmlValue( temp[[x]][[i]][[j]])) ##round values (1 digit is the technical resolution of the heating element) temp.sequence.object.curveValue.PMT[,1] <- round( temp.sequence.object.curveValue.PMT[,1], digits = 1) #grep values from heating element temp.sequence.object.curveValue.heating.element <- src_get_XSYG_curve_values(XML::xmlValue( temp[[x]][[i]][[3]])) }else{ temp.sequence.object.curveValue.spectrum <- get_XSYG.spectrum.values( temp.sequence.object.curveValue) ##get time values which are stored in the row labels temp.sequence.object.curveValue.spectrum.time <- as.numeric( colnames(temp.sequence.object.curveValue.spectrum)) ##round values (1 digit is technical resolution of the heating element) temp.sequence.object.curveValue.spectrum.time <- round( temp.sequence.object.curveValue.spectrum.time, digits = 1) } #grep values from heating element temp.sequence.object.curveValue.heating.element <- src_get_XSYG_curve_values(XML::xmlValue( temp[[x]][[i]][[3]])) if("Spectrometer" %in% temp.sequence.object.detector == FALSE){ #reduce matrix values to values of the detection temp.sequence.object.curveValue.heating.element <- temp.sequence.object.curveValue.heating.element[ temp.sequence.object.curveValue.heating.element[,1] >= min(temp.sequence.object.curveValue.PMT[,1]) & temp.sequence.object.curveValue.heating.element[,1] <= max(temp.sequence.object.curveValue.PMT[,1]), ,drop = FALSE] }else{ #reduce matrix values to values of the detection temp.sequence.object.curveValue.heating.element <- temp.sequence.object.curveValue.heating.element[ temp.sequence.object.curveValue.heating.element[,1] >= min(temp.sequence.object.curveValue.spectrum.time) & temp.sequence.object.curveValue.heating.element[,1] <= max(temp.sequence.object.curveValue.spectrum.time),] } ## calculate corresponding heating rate, this makes only sense ## for linear heating, therefore is has to be the maximum value ##remove 0 values (not measured) and limit to peak heating.rate.values <- temp.sequence.object.curveValue.heating.element[ temp.sequence.object.curveValue.heating.element[,2] > 0 & temp.sequence.object.curveValue.heating.element[,2] <= max(temp.sequence.object.curveValue.heating.element[,2]),,drop = FALSE] heating.rate <- (heating.rate.values[length(heating.rate.values[,2]), 2] - heating.rate.values[1,2])/ (heating.rate.values[length(heating.rate.values[,1]), 1] - heating.rate.values[1,1]) ##round values heating.rate <- round(heating.rate, digits=1) ##add to info element temp.sequence.object.info <- c(temp.sequence.object.info, RATE = heating.rate) ##PERFORM RECALCULATION ##check which object contains more data if("Spectrometer" %in% temp.sequence.object.detector == FALSE){ ##CASE (1) if(nrow(temp.sequence.object.curveValue.PMT) > nrow(temp.sequence.object.curveValue.heating.element)){ temp.sequence.object.curveValue.heating.element.i <- approx( x = temp.sequence.object.curveValue.heating.element[,1], y = temp.sequence.object.curveValue.heating.element[,2], xout = temp.sequence.object.curveValue.PMT[,1], rule = 2) temperature.values <- temp.sequence.object.curveValue.heating.element.i$y count.values <- temp.sequence.object.curveValue.PMT[,2] ##CASE (2) }else if((nrow(temp.sequence.object.curveValue.PMT) < nrow(temp.sequence.object.curveValue.heating.element))){ temp.sequence.object.curveValue.PMT.i <- approx( x = temp.sequence.object.curveValue.PMT[,1], y = temp.sequence.object.curveValue.PMT[,2], xout = temp.sequence.object.curveValue.heating.element[,1], rule = 2) temperature.values <- temp.sequence.object.curveValue.heating.element[,2] count.values <- temp.sequence.object.curveValue.PMT.i$y ##CASE (3) }else{ temperature.values <- temp.sequence.object.curveValue.heating.element[,2] count.values <- temp.sequence.object.curveValue.PMT[,2] } ##combine as matrix temp.sequence.object.curveValue <- as.matrix(cbind( temperature.values, count.values)) ##set curve identifier temp.sequence.object.info$curveDescripter <- "Temperature [\u00B0C]; Counts [a.u.]" }else{ ##CASE (1) here different approach. in contrast to the PMT measurements, as ## usually the resolution should be much, much lower for such measurements ## Otherwise we would introduce some pseudo signals, as we have to ## take care of noise later one if(length(temp.sequence.object.curveValue.spectrum.time) != nrow(temp.sequence.object.curveValue.heating.element)){ temp.sequence.object.curveValue.heating.element.i <- approx( x = temp.sequence.object.curveValue.heating.element[,1], y = temp.sequence.object.curveValue.heating.element[,2], xout = temp.sequence.object.curveValue.spectrum.time, rule = 2, ties = mean) temperature.values <- temp.sequence.object.curveValue.heating.element.i$y ##check for duplicated values and if so, increase this values if(anyDuplicated(temperature.values)>0){ temperature.values[which(duplicated(temperature.values))] <- temperature.values[which(duplicated(temperature.values))]+1 warning("[read_XSYG2R()] Temperatures values are found to be duplicated and increased by 1 K") } ##CASE (2) (equal) }else{ temperature.values <- temp.sequence.object.curveValue.heating.element[,2] } ##reset values of the matrix colnames(temp.sequence.object.curveValue.spectrum) <- temperature.values temp.sequence.object.curveValue <- temp.sequence.object.curveValue.spectrum ##change curve descriptor temp.sequence.object.info$curveDescripter <- "Temperature [\u00B0C]; Wavelength [nm]; Counts [1/ch]" } }##endif }##endif recalculate.TL.curves == TRUE # Cleanup info objects ------------------------------------------------------------------------ if("curveType" %in% names(temp.sequence.object.info)) temp.sequence.object.info[["curveType"]] <- NULL # Set RLum.Data-objects ----------------------------------------------------------------------- if("Spectrometer" %in% temp.sequence.object.detector == FALSE){ if(is(temp.sequence.object.curveValue, "matrix") == FALSE){ temp.sequence.object.curveValue <- src_get_XSYG_curve_values(XML::xmlValue(temp.sequence.object.curveValue)) } set_RLum( class = "RLum.Data.Curve", originator = "read_XSYG2R", recordType = paste(temp.sequence.object.recordType, " (", temp.sequence.object.detector,")", sep = ""), curveType = temp.sequence.object.curveType, data = temp.sequence.object.curveValue, info = temp.sequence.object.info) }else if("Spectrometer" %in% temp.sequence.object.detector == TRUE) { if(is(temp.sequence.object.curveValue, "matrix") == FALSE){ temp.sequence.object.curveValue <- get_XSYG.spectrum.values(temp.sequence.object.curveValue) } set_RLum( class = "RLum.Data.Spectrum", originator = "read_XSYG2R", recordType = paste(temp.sequence.object.recordType, " (",temp.sequence.object.detector,")", sep = ""), curveType = temp.sequence.object.curveType, data = temp.sequence.object.curveValue, info = temp.sequence.object.info) } }) }else{ return(NULL) }##if-try condition }), use.names = FALSE) ##if the XSYG file is broken we get NULL as list element if (!is.null(temp.sequence.object)) { ##set RLum.Analysis object temp.sequence.object <- set_RLum( originator = "read_XSYG2R", class = "RLum.Analysis", records = temp.sequence.object, protocol = as.character(temp.sequence.header["protocol",1]) ) ##set parent uid of RLum.Anlaysis as parent ID of the records temp.sequence.object <- .set_pid(temp.sequence.object) ##update progress bar if (verbose && txtProgressBar) { setTxtProgressBar(pb, x) } ##merge output and return values if(fastForward){ return(temp.sequence.object) }else{ return(list(Sequence.Header = temp.sequence.header, Sequence.Object = temp.sequence.object)) } }else{ return(temp.sequence.object) } })##end loop for sequence list ##close ProgressBar if(verbose && txtProgressBar ){close(pb)} ##show output informatioj if(length(output[sapply(output, is.null)]) == 0){ if(verbose) paste("\t >>",XML::xmlSize(temp), " sequence(s) loaded successfully.\n") }else{ if(verbose){ paste("\t >>",XML::xmlSize(temp), " sequence(s) in file.", XML::xmlSize(temp)-length(output[sapply(output, is.null)]), "sequence(s) loaded successfully. \n") } warning(paste0(length(output[sapply(output, is.null)])), " incomplete sequence(s) removed.") } ##output invisible(output) }#end if ##get rid of the NULL elements (as stated before ... invalid files) return(output[!sapply(output,is.null)]) } Luminescence/R/read_SPE2R.R0000644000176200001440000003333713231137116015045 0ustar liggesusers#' Import Princeton Intruments (TM) SPE-file into R #' #' Function imports Princeton Instruments (TM) SPE-files into R environment and #' provides `RLum` objects as output. #' #' Function provides an import routine for the Princton Instruments SPE format. #' Import functionality is based on the file format description provided by #' Princton Instruments and a MatLab script written by Carl Hall (s. #' references). #' #' @param file [character] (**required**): #' spe-file name (including path), e.g. #' - `[WIN]`: `read_SPE2R("C:/Desktop/test.spe")` #' - `[MAC/LINUX]`: `readSPER("/User/test/Desktop/test.spe")`. Additionally internet connections #' are supported. #' #' @param output.object [character] (*with default*): #' set `RLum` output object. Allowed types are `"RLum.Data.Spectrum"`, #' `"RLum.Data.Image"` or `"matrix"` #' #' @param frame.range [vector] (*optional*): #' limit frame range, e.g. select first 100 frames by `frame.range = c(1,100)` #' #' @param txtProgressBar [logical] (*with default*): #' enables or disables [txtProgressBar]. #' #' @param verbose [logical] (*with default*): enables or disables verbose mode #' #' @return #' Depending on the chosen option the functions returns three different #' type of objects: #' #' `output.object` #' #' `RLum.Data.Spectrum` #' #' An object of type [RLum.Data.Spectrum-class] is returned. Row #' sums are used to integrate all counts over one channel. #' #' `RLum.Data.Image` #' #' An object of type [RLum.Data.Image-class] is returned. Due to #' performace reasons the import is aborted for files containing more than 100 #' frames. This limitation can be overwritten manually by using the argument #' `frame.frange`. #' #' `matrix` #' #' Returns a matrix of the form: Rows = Channels, columns = Frames. For the #' transformation the function [get_RLum] is used, #' meaning that the same results can be obtained by using the function #' [get_RLum] on an `RLum.Data.Spectrum` or `RLum.Data.Image` object. #' #' @note #' **The function does not test whether the input data are spectra or pictures for spatial resolved analysis!** #' #' The function has been successfully tested for SPE format versions 2.x. #' #' *Currently not all information provided by the SPE format are supported.* #' #' @section Function version: 0.1.2 #' #' @author #' Sebastian Kreutzer, IRAMAT-CRP2A, Université Bordeaux Montaigne (France) #' #' @seealso [readBin], [RLum.Data.Spectrum-class], [raster::raster] #' #' @references #' Princeton Instruments, 2014. Princeton Instruments SPE 3.0 File #' Format Specification, Version 1.A (for document URL please use an internet search machine) #' #' Hall, C., 2012: readSPE.m. #' [http://www.mathworks.com/matlabcentral/fileexchange/35940-readspe/content/readSPE.m]() #' #' @keywords IO #' #' @examples #' #' ## to run examples uncomment lines and run the code #' #' ##(1) Import data as RLum.Data.Spectrum object #' #file <- file.choose() #' #temp <- read_SPE2R(file) #' #temp #' #' ##(2) Import data as RLum.Data.Image object #' #file <- file.choose() #' #temp <- read_SPE2R(file, output.object = "RLum.Data.Image") #' #temp #' #' ##(3) Import data as matrix object #' #file <- file.choose() #' #temp <- read_SPE2R(file, output.object = "matrix") #' #temp #' #' ##(4) Export raw data to csv, if temp is a RLum.Data.Spectrum object #' # write.table(x = get_RLum(temp), #' # file = "[your path and filename]", #' # sep = ";", row.names = FALSE) #' #' #' @md #' @export read_SPE2R <- function( file, output.object = "RLum.Data.Image", frame.range, txtProgressBar = TRUE, verbose = TRUE ){ # Consistency check ------------------------------------------------------- ##check if file exists if(!file.exists(file)){ ##check if the file as an URL ... you never know if(grepl(pattern = "http", x = file, fixed = TRUE)){ if(verbose){ cat("[read_SPE2R()] URL detected, checking connection ... ") } ##check URL if(!httr::http_error(file)){ if(verbose) cat("OK") ##dowload file file_link <- tempfile("read_SPE2R_FILE", fileext = ".SPE") download.file(file, destfile = file_link, quiet = if(verbose){FALSE}else{TRUE}, mode = "wb") file <- file_link }else{ cat("FAILED") file <- NULL try(stop("[read_SPE2R()] File does not exist! Return NULL!", call. = FALSE)) return(NULL) } }else{ file <- NULL try(stop("[read_SPE2R()] File does not exist! Return NULL!", call. = FALSE)) return(NULL) } } ##check file extension if(!grepl(basename(file), pattern = "SPE$", ignore.case = TRUE)){ if(strsplit(file, split = "\\.")[[1]][2] != "SPE"){ temp.text <- paste("[read_SPE2R()] Unsupported file format: *.", strsplit(file, split = "\\.")[[1]][2], sep = "") stop(temp.text, call. = FALSE) }} # Open Connection --------------------------------------------------------- #open connection con <- file(file, "rb") # read header ------------------------------------------------------------- temp <- readBin(con, what="int", 2, size=2, endian="little", signed = TRUE) ControllerVersion <- temp[1] #Hardware version LogicOutput <- temp[2] #Definition of Output BNC temp <- readBin(con, what="int", 2, size=2, endian="little", signed = FALSE) AmpHiCapLowNoise <- temp[1] #Amp Switching Mode xDimDet <- temp[2] #Detector x dimension of chip. #timing mode mode <- readBin(con, what="int", 1, size=2, endian="little", signed = TRUE) #alternative exposure, in sec. exp_sec <- readBin(con, what="double", 1, size=4, endian="little") temp <- readBin(con, what="int", 2, size=2, endian="little", signed = TRUE) VChipXdim <- temp[1] # Virtual Chip X dim VChipYdim <- temp[2] # Virtual Chip Y dim #y dimension of CCD or detector. yDimDet <- readBin(con, what="int", 1, size=2, endian="little", signed = TRUE) #Date Date <- readChar(con, 10, useBytes=TRUE) ##jump stepping <- readBin(con, what="raw", 4, size=1, endian="little", signed = TRUE) #Old number of scans - should always be -1 noscan <- readBin(con, what="int", 1, size=2, endian="little", signed = TRUE) #Detector Temperature Set DetTemperature <- readBin(con, what="double", 1, size=4, endian="little") # CCD/DiodeArray type DetType <- readBin(con, what="int", 1, size=2, endian="little", signed = TRUE) #actual # of pixels on x axis xdim <- readBin(con, what="int", 1, size=2, endian="little", signed = FALSE) ##jump stepping <- readBin(con, what="raw", 64, size=1, endian="little", signed = TRUE) ##experiment data type ##0 = 32f (4 bytes) ##1 = 32s (4 bytes) ##3 = 16u (2 bytes) ##8 = 32u (4 bytes) datatype <- readBin(con, what="int", 1, size=2, endian="little", signed = TRUE) ##jump stepping <- readBin(con, what="raw", 546, size=1, endian="little") #y dimension of raw data. ydim <- readBin(con, what="int", 1, size=2, endian="little", signed = FALSE) ##0=scrambled,1=unscrambled scramble <- readBin(con, what="int", 1, size=2, endian="little", signed = FALSE) ##jump stepping <- readBin(con, what="raw", 4, size=1, endian="little") #Number of scans (Early WinX) lnoscan <- readBin(con, what="int", 1, size=4, endian="little", signed = TRUE) #Number of Accumulations lavgexp <- readBin(con, what="int", 1, size=4, endian="little", signed = TRUE) ##Experiment readout time ReadoutTime <- readBin(con, what="double", 1, size=4, endian="little") #T/F Triggered Timing Option TriggeredModeFlag <- readBin(con, what="int", 1, size=2, endian="little", signed = TRUE) ##jump stepping <- readBin(con, what="raw", 768, size=1, endian="little") ##number of frames in file. NumFrames <- readBin(con, what="int", 1, size=4, endian="little", signed = TRUE) if(NumFrames > 100 & missing(frame.range) & output.object == "RLum.Data.Image"){ error.message <- paste0("[read_SPE2R()] Import aborted. This file containes > 100 (", NumFrames, "). Use argument 'frame.range' to force import.") stop(error.message) } ##set frame.range if(missing(frame.range) == TRUE){frame.range <- c(1,NumFrames)} ##jump stepping <- readBin(con, what="raw", 542, size=1, endian="little") #file_header_ver file_header_ver <- readBin(con, what="double", 1, size=4, endian="little") ##jump stepping <- readBin(con, what="raw", 1000, size=1, endian="little") ##WinView_id - set to 19,088,743 (or 1234567 hex) (required for legacy reasons) WinView_id <- readBin(con, what="integer", 1, size=4, endian="little", signed = TRUE) ##jump stepping <- readBin(con, what="raw", 1098, size=1, endian="little") ##lastvalue - set to 21,845 (or 5555 hex) (required for legacy reasons) lastvalue <- readBin(con, what="integer", 1, size=2, endian="little", signed = TRUE) ##end header ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ##create info element list from data temp.info <- list(ControllerVersion, LogicOutput, AmpHiCapLowNoise, xDimDet, yDimDet, xdim, ydim, VChipXdim, VChipYdim, Date, noscan, mode, exp_sec, DetTemperature, DetType, datatype, scramble, lnoscan, lavgexp, ReadoutTime, TriggeredModeFlag, NumFrames, file_header_ver) ##set name for list elements names(temp.info) <- c("ControllerVersion", "LogicOutput", "AmpHiCapLowNoise", "xDimDet", "yDimDet", "xdim", "ydim", "VChipXdim", "VChipYdim", "Date", "noscan", "mode", "exp_sec", "DetTemperature", "DetType", "datatype", "scramble", "lnoscan", "lavgexp", "ReadoutTime", "TriggeredModeFlag", "NumFrames", "file_header_ver") # read count value data --------------------------------------------------- ##set functions if(datatype == 0){ read.data <- function(n.counts){ readBin(con, what="double", n.counts, size=4, endian="little") } }else if(datatype == 1){ read.data <- function(n.counts){ readBin(con, what="integer", n.counts, size=4, endian="little", signed = TRUE) } }else if(datatype == 2){ read.data <- function(n.counts){ readBin(con, what="integer", n.counts, size=2, endian="little", signed = TRUE) } }else if(datatype == 3){ read.data <- function(n.counts){ readBin(con, what="int", n.counts, size=2, endian="little", signed = FALSE) } }else if(datatype == 8){ read.data <- function(n.counts){ readBin(con, what="integer", n.counts, size=4, endian="little", signed = FALSE) } }else{ stop("[read_SPE2R()] Unknown 'datatype'.") } ##loop over all frames ##output cat(paste("\n[read_SPE2R.R]\n\t >> ",file,sep=""), fill=TRUE) ##set progressbar if(txtProgressBar & verbose){ pb<-txtProgressBar(min=0,max=diff(frame.range)+1, char="=", style=3) } ##stepping for frame range temp <- readBin(con, what = "raw", (min(frame.range)-1)*2, size = 1, endian = "little") for(i in 1:(diff(frame.range)+1)){#NumFrames temp.data <- matrix(read.data(n.counts = (xdim * ydim)), ncol = ydim, nrow = xdim) if(exists("data.list") == FALSE){ data.list <- list(temp.data) }else{ data.list <- c(data.list, list(temp.data)) } ##update progress bar if(txtProgressBar & verbose){ setTxtProgressBar(pb, i) } } ##close if(txtProgressBar & verbose){close(pb) ##output cat(paste("\t >> ",i," records have been read successfully!\n\n", sep="")) } # Output ------------------------------------------------------------------ if(output.object == "RLum.Data.Spectrum" | output.object == "matrix"){ ##to create a spectrum object the matrix has to transposed and ##the row sums are needed data.spectrum.vector <- sapply(1:length(data.list), function(x){ rowSums(data.list[[x]]) }) ##split vector to matrix data.spectrum.matrix <- matrix(data.spectrum.vector, nrow = xdim, ncol = length(data.list)) ##set column and row names colnames(data.spectrum.matrix) <- as.character(1:ncol(data.spectrum.matrix)) rownames(data.spectrum.matrix) <- as.character(1:nrow(data.spectrum.matrix)) ##set output object object <- set_RLum( class = "RLum.Data.Spectrum", originator = "read_SPE2R", recordType = "Spectrum", curveType = "measured", data = data.spectrum.matrix, info = temp.info) ##optional matrix object if(output.object == "matrix"){ object <- get_RLum(object)} }else if(output.object == "RLum.Data.Image"){ ##combine to raster data.raster.list <- lapply(1:length(data.list), function(x){ if(txtProgressBar==TRUE){ cat(paste("\r Converting to RasterLayer: ", x, "/",length(data.list), sep = "")) } raster::raster(t(data.list[[x]]), xmn = 0, xmx = max(xdim), ymn = 0, ymx = max(ydim)) }) ##Convert to raster brick data.raster <- raster::brick(x = data.raster.list) ##Create RLum.object object <- set_RLum( class = "RLum.Data.Image", originator = "read_SPE2R", recordType = "Image", curveType = "measured", data = data.raster, info = temp.info) }else{ stop("[read_SPE2R()] Chosen 'output.object' not supported. Please check manual!") } ##close con close(con) ##return values return(object) } Luminescence/NEWS.md0000644000176200001440000004275113604173246014001 0ustar liggesusers ## Changes in version 0.9.7 (2020-01-04) ### Bugfixes and changes #### `analyse_Al2O3C_Measurements()` - Supplementary argument `main` never worked; fixed. - New supplementary argument `title` which allows to pass further names of a sample to the plot output. #### `plot_AbanicoPlot()` - The plot now can handle negative values, fixed in combination with \#88 (reported by Sébastien Huot) #### `plot_RadialPlot()` - Fix display problem when `log.z = FALSE` combined with negative values (\#88, reported by Sébastien Huot) #### `read_BIN2R()` - Due to a typo, some bytes were read in the wrong order (big-endian instead of little-endian). While the fix does not affect the behaviour of the function, the non-fixed function would have soon crashed on CRAN servers. A big thanks to Tomas Kalibera from the CRAN team for his support and for pointing out this issue. #### `internal_as.latex.table()` - The function gained a new argument `rm.zero` which is preset to `TRUE`. If set, all columns containing zero values only are removed. The new argument enables a better fine tuning of the latex output. ## Changes in version 0.9.6 (2019-12-05) ### Bugfixes and changes #### `analyse_SAR_CWOSL()` - Setting the argument `plot.single = TRUE` does not any longer interferes with the global par settings for the rejection criteria plot. #### `fit_OSLLifeTimes()` - The function gained a new argument `plot_simple` to produce plots without residuals that can be combined using standard R layout plot functionality, e.g., `par(mfrow = c(2,1))`. - Function now handles the `log = "x"`, `log = "y"` and `log = "xy"` options better. - In the self-call mode (imput is `RLum.Analysis-class` object), all `...` objects are supported as `list` and get recycled if the list does not equal the number of objects #### `plot_GrowthCurve()` - The y-axis labelling of the test-dose response plot finally shows what was done since the beginning: the Tx values are normalised by the Tn values, i.e. Tn/Tx and not Tn/Tx. Thanks to Andrea Junge for spotting this flaw. ### `read_XSYG2R()` - The function now supports the temperature recalculation of TL curves even they consist of only a single point value. ### Internals - Changes in the development version of R caused error messages on CRAN and would have stopped the package from function properly in the future. This was corrected; the changes do not have any user-visible effect. - The internal function for the DRAC output `.as.latex.table()` gained a new argument `tabular_only` which gives a little bit more flexibility at hand of the output the DRAC is combined with LaTeX. ## Changes in version 0.9.4/0.9.5 (2019-09-20/2019-09-21) ### Important user visible changes 1. **`plot_KDE()`** wrong error bars As it turned out, unfortunately, the error bars produced by `plot_KDE()` had been wrong since 2014. The input error values (2nd column in the data.frame) had been, for reasons we cannot recall anymore, divided by two leading to an ‘improved’ visualisation of uncertainties. A big thank goes to Andrea Junge (\#93) who made us aware of this bug. 2. **The way the x-axis channels from BIN/BINX-files are generated changed** So far, during the file import, minimum and maximum values were taken, and channels were distributed equally according to the number of available data points. Thanks to Sébastien Huot for a constructive discussion. - Example old behaviour: For an OSL curve recorded over 40 s with 100 channels, the first channel was set at 0 s, the last channel to at 40 s. Thus, the width of each channel was 0.404 s. - Example new behaviour: For an OSL curve recorded over 40 s with 100 channels, the first channel is set to 0.4 s, the last channel to at 40 s. Thus, the width of each channel is 0.4 s. - In other words, the first channel does not start anymore at 0 s, as it is the case, e.g., in the *Analyst*. **Please note that these changes will impact your analysis results\!** ### Difference between version 0.9.4 and 0.9.5 Non-used dependency to ‘interp’ removed. ### Bugfixes and changes #### `analyse_SAR.CWOSL()` - The automated curve type recognition introduced with the last release broke the vignette of the R package ‘RLum.Model’; now refactored code prevent other people’s code from breaking. #### `calc_CentralDose()` - The function does not crash anymore with an odd error if `NA` values are provided (\#82 reported by Sébastien Huot). - The function gained a new argument `na.rm` which is set to `FALSE` by default. #### `plot_DetPlot()` - The argument `analyse_function.control` now understands the arguments `fit.method` and `fit.force_through_origin`. #### `calc_MinDose()` - The final parameter estimates and their confidence intervals in the console output are now always reported in their absolute values, independent of whether the logged or non-logged model was used (\#84 suggested by Sébastien Huot). This is a purely cosmetic change, and no changes to the calculations were made. - The values in the profile log-likelihood plots are now always given in their absolute values, independent of whether the logged or non-logged model was used (\#84 suggested by Sébastien Huot). This is a purely cosmetic change, and no changes to the calculations were made. The output object still returns the original profile objects with the logged values. - Improved handling of values close to zero if `bootstrap = TRUE` (\#91, reported by Andrea Junge). #### `plot_GrowthCurve()` - Remove rounding from numerical output values (\#81 suggested by Sébastien Huot); this affects all subsequent functions. However, terminal and graphical output do not change. #### Internals - The package `data/datalist` file listing the example data shipped with the package was not correct (\#85, CRAN comment); now corrected. ## Changes in version 0.9.3 (2019-07-31) - This version takes care of a CRAN note complaining about a non-used package declared in the Import fields. ## Changes in version 0.9.2 (2019-07-30) - This version fixes an URL problem discovered by the CRAN precheck ## Changes in version 0.9.1 (2019-07-30) ### Removed functions - `model_LuminescenceSignals()`: This wrapper function for the R package ‘RLumModel’ was removed due to changes on CRAN inflicting a warning ‘object is masked from ’package:Luminescence’ each time ‘RLumModel was loaded. In ’Luminescence’ the function had no real scope, but provided a place holder to point the users on the existence of the package ‘RLumModel’. ### Bugfixes and changes #### `apply_EfficiencyCorrection()` - The function now suppors `RLum.Analysis` objects as input - All allowed input objects can now be provided as `list` (self-call support) - Minor code polish #### `analyse_baSAR()` - The manual stated that `RLum.Analysis-class` are supported, but actually this is not true, it supports a list of `RLum.Analysis` objects only. - Some minor manual corrections. #### `analyse_SAR.CWOLS()` - Add support for `POSL` data,before the function crashed (reported by Alice Versendaal). `POSL` curves had been not listed as supported curve types though. - Improve dataset recognition (`OSL`, `IRSL`, `POSL`). #### `calc_Lamothe2003()` - The documentation wrongly stated that an input `data.frame` should have the columns ‘dose’, ‘De’ and ‘De error’, indeed ‘dose’, ‘LxTx’ and ‘LxTx error’ are expected (spotted by Jeong-Heon Choi) #### `calc_TLLxTxRatio()` - The `LxTx.Error` is no longer negative, even if the subtration lead to negative count values (\#78, reported by Sébastian Huot) #### `calc_SourceDoseRate()` - The function learned about the half-life of Cs-137 #### `fit_OSLLifeTimes()` - `0` count values crashed the function for obvious reasons (`a / 0 = Inf`), this can happen in particular for artificial datasets. Now, if `0` is detected, count values are increased by `0.1` (reported by Dirk Mittelstrass) #### `plot_AbanicoPlot()` - If `summary = "se.abs"` was chosen, the abanico plot showed `se.rel` instead; corrected (reported by Maryam Heydari) #### `plot_DRTResults()` - Correct documentation, where it was erroneously implicated that the `...` argument pipes all arguments, - add support for argument `las`. #### `plot_GrowthCurve()` - If the dose points included `NA` values the function crashed unexpectetly. #### S4-classes and methods - `RLum.Analysis-class` - Code polish (no visible changes) - `RLum.Data.Curve-class` - Code polish and clearifying the documentation - `RLum.Data.Image-class` - Conversion methods to a `matrix` and from and to `data.frame` never worked as intended. - Code housekeeping #### Internals - The ‘DESCRIPTION’ file contained the string “\[upcoming\]”. This was a left-over from the development version and is now removed. - Version numbering is now again more R canonical consisting only of three numbers (‘stable.major.minor’). Development versions are only indicated by `.9000` in the version number. Numbers after `9000` (e.g., `9000-1`) indicate build numbers. Both is used for GitHub build versions only. - The file name `RisoeBINfileData-class.R` was changed to `Risoe.BINfileData-class.R` for consistency reasons, however, this has no furhter visible effect (\#76, spotted by Sébastien Huot) ## Changes in version 0.9.0.110 (2019-04-21) ‘Luminescence’ 0.9.X releases will be the last versions supporting **R** \< 3.5.0. ### Important R related changes (R \>= 3.6.0) The behaviour of the base R function `sample()` was changed / corrected (see news on for details). This function is used heavily within functions by ‘Luminescence’ (e.g., `plot_GrowthCurve()`). That being said, it means that old data re-analysed by ‘Luminescence’ using R versions \>= 3.6.0 will not give the same results. Differences are small, however, they may significant. If you want to reproduce your data, we recommend to use one of our [Docker images](https://github.com/R-Lum/RLumDocker) which include the R version from the package release date. ### New functions - `convert_Wavelength2Energy()` Smooth and quick emission spectra conversion from wavelength to energy scales - `fit_OSLLifeTimes()` Fit and deconvolution of OSL lifetimes using off-time measurements, i.e. after the stimulation was switched off. - `plot_DRCSummary()` Summarise all dose-response curves from your SAR OSL analysis in one single plot. - `scale_GammaDose()` Scale the gamma dose rate considering layer-to-layer variations in soil radioactivity. Contributed by S. Riedesel and M. Autzen. - `fit_ThermalQuenching()` Provide an easy option to fit thermal quenching data and determine the activation energy. ### Bugfixes and changes #### `apply_CosmicRayRemoval()` - Thanks to black magic, this function now runs also over a list of `RLum.Data.Spectrum` objects and and `RLum.Analysis` objects (also nested in a list) #### `apply_EfficiencyCorrection()` - The function produced a warning (“collapsing to unique ‘x’ values”) on R-devel; fixed. #### `analyse_SAR.CWOSL()` - If the function was used in a self-call mode (object of type `list`), the arguments contrary to what is written in the manual, the arguments such as `signal.integral.min` had not been expanded properly; no one complained, however, corrected. - The argument `main` no also supports the input `list` in the self-call mode. #### `analyse_SAR.TL()` - The plot legend was not correct, if no dose points had been provided; fixed - The function returned a confusion warning; fixed. - If background signals are provided, now the background subtracted signal is shown automatically instead of the uncorrected curves (suggestion by Tilmann Wolpert) - The function now supports a list of `RLum.Analysis` objects as input - The plot settings were sufficiently reset; fixed. - Code polish #### `analyse_FadingMeasurements()` - Enable support for data imported from a BIN/BINX-file - If the g-value is negative, now it also returns a negative g-value; before always the absolute values was taken, which was not meaningful - If rho was negative, the log10 value could not be calculated, this caused an unwanted warning; now suppressed - It showed a warning on R-devel due to changes in `stats::approx()`; fixed - Records with negative ‘time since irradiation’ are now automatically removed (thanks to input by Sébastien Huot) - Function is now more relaxed when odd data a provided and it is less talkative #### `calc_Huntley2006()` - The function now checks if rho’ (`rhop`) is a sensible non-zero positive value and, if otherwise, stops with a more meaningful error message. (Issue \#74) - Now the function is a little bit less talkative if extreme dataset are provided as input #### `read_BIN2R()` - Add support for missing BINX-file version 5 (pointed out by Harrison Gray) - In the auto-file recognition (only a path instead of a file is provided), sometimes the found BIN/BINX-files were imported twice. The corresponding code was simplified to avoid such errors in future - Sometimes the time stamp is invalid since it is missing a leading `0`. For BINX-files \>= version 6 such timestamps now get corrected during the import #### `read_XSYG2R()` - The function failed for R-devel importing spectrometer data (`approx(..., ties = -2)`); fixed. #### `plot_GrowthCurve()` - The function crashed with `Error in data.MC[, i] : incorrect number of dimensions` under very particular circumstances (reported by Alice Versendaal). This was due to unwanted internal object conversion; fixed. #### `plot_RLum.Data.Spectrum()` - The wavelength to energy scale conversion was not correct. This was corrected and internally, the function now calls consistently the new function `convert_Wavelength2Enegy()` (the corresponding code and documentation have been updated or removed). - The function gained a new argument `norm` for normalising the data to the highest or the lowest count value. - The function gained a new argument `bg.spectrum` allow to pass a background spectrum that can be used for the subtraction - The function returned a warning if a single matrix was used but `plot.type` was already set to `single`; fixed. - Wrong settings in `bg.channels` crashed the function. Now it auto-corrects the wrong values. - Channel binning now consistently calls the internal function `Luminescence:::.matrix_binning()`, which is more efficient cleaner than the code used before - The function now longer crashes for `plot.type = 'persp'` if ’xaxis.energy = TRUE\`. - The colour picking option was not always working; fixed. - Warnings are formatted more nicely. #### `verify_SingeGrainData()` - The function crashed for crazy datasets; fixed - Cleaning-up `RLum.Analysis-class` objects did not worked; fixed #### `write_RLum2CSV()` - The function gained a new argument `compact` which is set `TRUE` be default, and keeps the element output as simple as possible, which is in particular helpful for `RLum.Results` objects - The function now behaves more friendly to `RLum.Results` objects, before the output was hard to understand. #### `write_RLum2BIN()` - The export failed if the time stamp was invalid (`NA`); now `NA` values are consistently replaced by `000000` (other values are not possible, otherwise the re-import of such file would fail) (reported by Alice Versendaal via email, 2019-03-06) ### Changes in S4-objects and methods #### `get_RLum` - The method working on `list` elements got a new argument `class` to remove unwanted `RLum` objects in from list of `RLum` objects #### `RLum.Analysis-class` - The show-method crashed under very rare circumstances if inconsistent objects had been combined; fixed. #### `RLum.Data.Spectrum-class` - Support added for `bin_RLum.Data()` ### New example datasets - `ExampleData.TR_OSL` provides a single fast-pulsing OSL curve that can be used to test the function `fit_OSLLifeTimes()`. ### Internals - Package news are now provided as markdown document (NEWS.md) which is supported by CRAN - The package version numbering scheme has been modified to simplify the distinction between different developer versions. While relevant numbers for CRAN remain X.X.X, which reads ‘stable.major.minor’ version, a 4th number indicates the running package build number, e.g., 0.9.0.7. - New internal function `Luminescence:::.matrix_binning()` to get the matrix (and curve) binning consistent; so far used only by the the `bin_RLum.Data()` method for `RLum.Data.Spectrum-class` - Vignette S4-class object structure updated - Internal changes to address changes in R-devel checking logical arguments with length more than one (e.g., `if(is.na(x))` returned an error if `x` was not a vector of length 1) - `src/Makevars` file removed to prevent unconditional stripping in response to a recent CRAN request; in return the installation size of the package increased in size on some platforms. Luminescence/MD50000644000176200001440000006121213605457601013205 0ustar liggesusers2071345c7385863cabe221d0f5e503d2 *DESCRIPTION 88f7682eaeea37f9db6d269408011e86 *NAMESPACE 7a5ac0fac0a0aab28a06b45a5754450b *NEWS.md 08ad54d35a831768c9cb360d75bacbb7 *R/Analyse_SAR.OSLdata.R 56db50408f37b2a2307d4eea646201c6 *R/CW2pHMi.R e90529fb617771c1ec0d8a44ff2fc97c *R/CW2pLM.R 7e3d8763aceb983ba14476b304be2698 *R/CW2pLMi.R ac4868e82bf03d1a13b050aade171b98 *R/CW2pPMi.R 886ef07441d46dd35c9988392a59866c *R/Luminescence-package.R 39fe8c1c69bbac4d563f5540bea32c3f *R/PSL2Risoe.BINfileData.R c38ec9a11c3f43a6b19b28b5904d07dc *R/RLum-class.R bedbd0b9f5069273d23648861f436b99 *R/RLum.Analysis-class.R 4abb5d38d6a999187a497c20d93f2f70 *R/RLum.Data-class.R 22d2028f857dd33c022a99b87803219e *R/RLum.Data.Curve-class.R 2e2d921557d2e516d4a09767384d9951 *R/RLum.Data.Image-class.R d250abbfec98464f446865e5bf784c39 *R/RLum.Data.Spectrum-class.R d00144aadb592375b9e231c5f0b2917a *R/RLum.Results-class.R 047363cf43452f576e13706b73027b87 *R/RcppExports.R 6181e301c05eeec7cf8302ae99a61bd7 *R/Risoe.BINfileData-class.R 580a208ba9989fef6dafa94bc3d8c1c0 *R/Risoe.BINfileData2RLum.Analysis.R 127842662f5cac561b59aa7babbb500f *R/Risoe.BINfileData2RLum.Data.Curve.R e096de94add4a78fd65d4700e267fc02 *R/Second2Gray.R 7e295b132ead31d080915fe03c189ac2 *R/addins_RLum.R 72d36146276ad5ae90034fdc7006cb59 *R/analyse_Al2O3C_CrossTalk.R 275c2734181c60066148fc28dd91dfcb *R/analyse_Al2O3C_ITC.R 9b46c0eb6575b4fe638e7f928a2429ac *R/analyse_Al2O3C_Measurement.R ff80b0f82e54675142f564ea269318dd *R/analyse_FadingMeasurement.R 23a7c00cbb223b66b391a10c951a37f8 *R/analyse_IRSAR.RF.R 3d9671fa90216fda84424beab0fa22c2 *R/analyse_SAR.CWOSL.R 449decada726adb1ccdd77fce91b1234 *R/analyse_SAR.TL.R 74300480bd39e0c9c6e27ee70610b612 *R/analyse_baSAR.R a786a79f9c66babee93cd4b2d7543aba *R/analyse_pIRIRSequence.R d5a341507e433f5145c9a7f9149b637c *R/analyse_portableOSL.R c900e474980e7a2f9c5a302e826794d4 *R/app_RLum.R f20ec8ca7a922aba1db27231dd8be0b8 *R/apply_CosmicRayRemoval.R 0b7c51681fa039b537b736e347b3e57c *R/apply_EfficiencyCorrection.R 646a101cd94b1149a2eecfabf44e1693 *R/bin_RLum.Data.R 9e15f5abbd8aa8a12bd0ddb15508d290 *R/calc_AliquotSize.R 57f947262c512919c5f75fd37fd81766 *R/calc_AverageDose.R 37ec179ba41d750b0ad3dd5436f41c69 *R/calc_CentralDose.R 2963d9a6593cc3c3a0cbddedb18e55b7 *R/calc_CommonDose.R 31dcc1f85ea791e9d9d657c4b2206e95 *R/calc_CosmicDoseRate.R 3102d6c8dc66f0c0fd651f2aa9ff3108 *R/calc_FadingCorr.R 081c8ad483baea23bfc0e1556c913fae *R/calc_FastRatio.R 587cacbeb0176f0c78645391a2ba6264 *R/calc_FiniteMixture.R 2587cfde6bae43fa041160e98bcf53f2 *R/calc_FuchsLang2001.R a37b114c2bf86627b6ac039a84a0312a *R/calc_HomogeneityTest.R 1dd9cdd14a34d81c4bac2d76ef220fa0 *R/calc_Huntley2006.R a9984ec9f1fbd3166addffd8bfb24afe *R/calc_IEU.R e961e05a49e053cf2a06203ff301ab67 *R/calc_Kars2008.R e1f7ce008e6507ec9ff08fd2723f8614 *R/calc_Lamothe2003.R 911aa5ff3dd08844fd037b785bcfa8a7 *R/calc_MaxDose.R 8eeabe2ce0370aaae4fc91dbb31ff0d3 *R/calc_MinDose.R 548d8eaa56588b81f22646231522bd3a *R/calc_OSLLxTxRatio.R e065651308288b20a631e1a5953e26d0 *R/calc_SourceDoseRate.R ca8d9a3466b2df583204982b3f1de530 *R/calc_Statistics.R ee257f33baa83568e93d9fd52054b532 *R/calc_TLLxTxRatio.R e700f8be820a718875acab7aa0976647 *R/calc_ThermalLifetime.R a2c253a8e83a909a682f0cfe32fcdd47 *R/calc_WodaFuchs2008.R d6d84fc6268770e8c606bbe1b39bdf84 *R/calc_gSGC.R b186037a3aec6abacdacf01c7ac0f017 *R/convert_Activity2Concentration.R a9a33f23362a8f80ad8d26a762d3bdc9 *R/convert_BIN2CSV.R 0a80188930e0fbc5e078702e1cfd181d *R/convert_Daybreak2CSV.R ff8f86257eb61f356ba693bfca11fb57 *R/convert_PSL2CSV.R 825dc35cdd079fd93352f65dd6bbd0ea *R/convert_RLum2Risoe.BINfileData.R a2eac67e7d99b839f762ad03bab1108c *R/convert_Wavelength2Energy.R 31b0fcd098787b070210bf6b63ec49aa *R/convert_XSYG2CSV.R a86a798ede366c358d90647117c43e9f *R/extract_IrradiationTimes.R b3865fd6097b6e9ad386c251e2b591fe *R/fit_CWCurve.R 33c53f4a664cd5e6d6008afe9bcd4ffb *R/fit_LMCurve.R df2195a00381fe3468ef52f229219457 *R/fit_OSLLifeTimes.R 2c04f3abe0a27109dd4b44bb51ec0ebb *R/fit_SurfaceExposure.R 7569c57c8522643b7021f07ea2a884da *R/fit_ThermalQuenching.R a4cb2d1c8b454e0009a92b7b0c98d818 *R/get_Layout.R 74a1c629384a18423fda876d87dba9fa *R/get_Quote.R 3a3932cd2e289e1ba9644104d5f55674 *R/get_RLum.R 9dc9b28337384ec7db0d173db6e5c62d *R/get_Risoe.BINfileData.R fd947c56470a6462fc0bcbf49b84daca *R/get_rightAnswer.R ef3847d3b47a86332d71f23afa421d2b *R/github.R 25c9ef15c500b88f1fdd2814400c069a *R/install_DevelopmentVersion.R bcec18025b2b68683ca37136fcbeaf66 *R/internal_as.latex.table.R 31a20541bc7b6fa32c86868556c373b7 *R/internals_RLum.R c3ab292b762ef9f92fb57ee9c335ea91 *R/length_RLum.R b2f36935b3d55f9c1203ea305bd42cc5 *R/merge_RLum.Analysis.R c3e996aa83d3e69368dcd4b4c94d7b03 *R/merge_RLum.Data.Curve.R b0ccc3557fa1e74d218e616ecc5152b3 *R/merge_RLum.R fbfbe9062536dea23de84fc383252d25 *R/merge_RLum.Results.R 814f25e25f9d998d2a29ccd491813b07 *R/merge_Risoe.BINfileData.R 918618008c45ec5d0f05427e75b249eb *R/methods_DRAC.R c36e7d9fc2291a7e9b7465465306cb4e *R/methods_RLum.R fd08574c05c6425c2f8ee09480bfb057 *R/names_RLum.R 1dbe0d3d7293e62d33dff57f77392ca7 *R/plot_AbanicoPlot.R 9ade91100d86d4250ee450309fe6360c *R/plot_DRCSummary.R 9af1cc185af8534a250c656fd882967b *R/plot_DRTResults.R d1e67a3275d70c00c81a04ee6fec1dc5 *R/plot_DetPlot.R e8a43bdf747d6291694b268759326f95 *R/plot_FilterCombinations.R ba500b9a6192861958d8bb079e644f04 *R/plot_GrowthCurve.R 17e81cf72043421c8df0cba460389d77 *R/plot_Histogram.R fd38b9230bee9fda7dc8673cfad99c1c *R/plot_KDE.R f9c7be74c4ecc85c3166a5dc0c55f6d3 *R/plot_NRt.R b0749bac3118d962d03256482aad9abb *R/plot_RLum.Analysis.R fa582ace439eb1ec900078b84b5746a7 *R/plot_RLum.Data.Curve.R 415159f99a9c01504042b3a43350f331 *R/plot_RLum.Data.Image.R de11ad5b081975224a105698daf99cc8 *R/plot_RLum.Data.Spectrum.R 1b51bbdedaeb33afc464fca4cae2bc12 *R/plot_RLum.R 2a683fe6eee32a2ab4814d27649b8849 *R/plot_RLum.Results.R 0e26a9b1678a5a2697f63aaa511a7ae5 *R/plot_RadialPlot.R 0ed2c9ee0134a5ad2b48eafc18aff696 *R/plot_Risoe.BINfileData.R 7d91d59ac8463bdeb8c255c3bbc273f9 *R/plot_ViolinPlot.R 8928b3bd02ca3b820f3b1173bcfee0f6 *R/read_BIN2R.R 8ad62ee54d122073ace6a4e57c15e114 *R/read_Daybreak2R.R 7b40a737488214ac2a9d4b3de4250413 *R/read_PSL2R.R 3477b192dcf8f1e98cd57567fa7e29b6 *R/read_SPE2R.R 402193be6eb4fd5b5b37f54dccccfa2a *R/read_XSYG2R.R 8691b75dc8c55badde2945508fed185d *R/replicate_RLum.R af128863eaca5f4dd18895d38b2a44ec *R/report_RLum.R d8b33bbd1c0d88c658fdb7032af22453 *R/scale_GammaDose.R 5c31f82f6f7b0a37d60e0422228881b3 *R/set_RLum.R 5d5b238f0a807c8e9af44799bcdf83df *R/set_Risoe.BINfileData.R aa5f31d43e97ea847b4bda5ec3633d1a *R/smooth_RLum.R 8c0a1d64d24db5eca7bc81f9728ab34e *R/structure_RLum.R 18a57e204d189543d43269014cb63552 *R/template_DRAC.R 9cd53759e9cdbfaf3f31aa42be8ae241 *R/tune_Data.R 2c9673ff24f7187828d63214dfaefb65 *R/use_DRAC.R f0c4d24dafd7b92924d2e3e4276cf500 *R/utils_DRAC.R 725799289aca42ce9c7f7b8cc18cb18f *R/verify_SingleGrainData.R e202cbde07730054d86a646738e173eb *R/write_R2BIN.R 3f911e836efb0e936391222dc504aea7 *R/write_RLum2CSV.R 2870e7b8100e8011016af62ecb63a268 *R/zzz.R de1f23efd9572468357a83303a2bba28 *README.md 4a3178d021fd53177a6962e394ee5584 *build/partial.rdb 8c9571aaef3a1510bab1286ac2719328 *build/vignette.rds f349477c8f12f4397aeb0d9491669d74 *data/BaseDataSet.ConversionFactors.rda 3fd02aa03b62ed93d291df5585160c62 *data/BaseDataSet.CosmicDoseRate.rda 902b0e4d39fa28d1d35a17de092285cb *data/BaseDataSet.FractionalGammaDose.rda 300a9f7f2b667aa572b23ffba3b5281f *data/ExampleData.Al2O3C.rda 9c8f76e70d6a022c14f9b9937a6e48b2 *data/ExampleData.BINfileData.rda 98b340cd13dfc0c89873bc8c53ec092c *data/ExampleData.CW_OSL_Curve.rda 420acab318984756ac02db5396ca91b1 *data/ExampleData.DeValues.rda c3e0676c122950754bcaf1c9ceb1fb11 *data/ExampleData.Fading.rda cc538618675734cda21cd2f4aec30c35 *data/ExampleData.FittingLM.rda a194b7273caa95df091f6a0b32b1a0be *data/ExampleData.LxTxData.rda 0fb9b7fc02638049534e67c103251193 *data/ExampleData.LxTxOSLData.rda af5f6dfb15646a13a093355407077247 *data/ExampleData.RLum.Analysis.rda a0e58c892fc6a8dc2ad53a3936fee797 *data/ExampleData.RLum.Data.Image.rda 90a303d00d0678d2dc2419209cfd534a *data/ExampleData.ScaleGammaDose.rda 00927dacb7eb977ed78471bd00f6095b *data/ExampleData.SurfaceExposure.rda 6bdc582e91b77789c0927656181a5415 *data/ExampleData.TR_OSL.rda 2efbd548fbf40f3051896c67744f3968 *data/ExampleData.XSYG.rda 46cb02411ce47f89f50800707f91e90c *data/ExampleData.portableOSL.rda 0cfe323d7622f0ebc02e5cb0b521ccbe *data/datalist 531909b6322fdbc2e01d30d1f5ac6b5c *inst/CITATION 2ac8ea88202327203eaf374c66883174 *inst/doc/HowTo_analyse_Al2O3.html a28588f695a420a4f0bd866ba47d1063 *inst/doc/HowTo_analyse_Al2O3.html.asis 8180ed648774e681a2f72eba3f75c09f *inst/doc/HowTo_analyse_pIRIRMeasurements.html c55ae6925d8c2b477c23334587ded253 *inst/doc/HowTo_analyse_pIRIRMeasurements.html.asis 70a5293d08957c1bfe43b55afeaf9203 *inst/doc/S4classObjects.pdf 6beca119f1590a3626e80cab485f8002 *inst/doc/S4classObjects.pdf.asis 5c342b7cba17fe33015293da421bc228 *inst/extdata/Daybreak_TestFile.DAT 251753a30fe82779d40ad3f17ba448be *inst/extdata/Daybreak_TestFile.txt fb458a7047c03e4ebae19b8aefa610f0 *inst/extdata/DorNie_0016.psl d70d1998a51bfeb77e3ba3f54c474418 *inst/extdata/QNL84_2_bleached.txt a06f36f48c83f10917989c829a8aafe9 *inst/extdata/QNL84_2_unbleached.txt 6a11fbc707237b71db2c72115934f679 *inst/extdata/RF_file.rf 4473f6a1f4375066efe52ff7824966d1 *inst/extdata/STRB87_1_bleached.txt 17e386e1832e46b61e912f7c9535e8d0 *inst/extdata/STRB87_1_unbleached.txt 189b1a6916c8026334dd49b372468e61 *inst/extdata/XSYG_file.xsyg b286b56ca5e51dd11b14da750452f2c7 *inst/rstudio/addins.dcf 4f8da702fe7dfd612948a9f8be04d4de *man/Analyse_SAR.OSLdata.Rd c2ff398a7070934885aadfa72bd2cddf *man/BaseDataSet.ConversionFactors.Rd 7a1ecf17293711c7cbaac8b4ce16cb3a *man/BaseDataSet.CosmicDoseRate.Rd b65540d672ded924ee2de7bd8c845845 *man/BaseDataSet.FractionalGammaDose.Rd 62416c20a80372f694416f949dd6387f *man/CW2pHMi.Rd c4370165ed5e373ef7278894b326d3c5 *man/CW2pLM.Rd f3781cf20edd59c655526030e886c644 *man/CW2pLMi.Rd 3f26a2d267dbdc6fde554ad469c6e907 *man/CW2pPMi.Rd a18ee66eabd73737c99f241cd927a975 *man/ExampleData.Al2O3C.Rd ebd8c612f8a7fb7b06884e081d73f8bb *man/ExampleData.BINfileData.Rd 511d5ba56095de4401c1177741c12b8a *man/ExampleData.CW_OSL_Curve.Rd 4edc4c4d08595c7bf57188a63343399c *man/ExampleData.DeValues.Rd ff1e6bd0fe77be4804733090c3603645 *man/ExampleData.Fading.Rd e75cca5b854f89236535fb583e0c699c *man/ExampleData.FittingLM.Rd 1b0e5b51d9d530b187fed10ee88850c2 *man/ExampleData.LxTxData.Rd bc501d790ddf91d95232ac8ee82fa72c *man/ExampleData.LxTxOSLData.Rd eed089c997952e7b3e0df9a306698293 *man/ExampleData.RLum.Analysis.Rd 7ad58b0eefc01afbc061e41810b57669 *man/ExampleData.RLum.Data.Image.Rd 97b530484df3a2a633d8015f324a4ed3 *man/ExampleData.ScaleGammaDose.Rd 41e41bbee2858d3a9d88d64b4b582271 *man/ExampleData.SurfaceExposure.Rd b136574ee81d9755546331615940ef23 *man/ExampleData.TR_OSL.Rd b997e19903b22e603aa15aaefd1a3d90 *man/ExampleData.XSYG.Rd 8f2182a4144aba13187cb14cffdb094a *man/ExampleData.portableOSL.Rd 0dee862c722355e687ffb331c1ba9d0f *man/GitHub-API.Rd 976ef279a0667ae064145b635ab5f499 *man/Luminescence-package.Rd e81ed023c769d8749159302bfa03166c *man/PSL2Risoe.BINfileData.Rd 9580c6aaf1e823160c1c766aa36f66d1 *man/RLum-class.Rd 075fde4e77ce10f25c6d4decfa58bce0 *man/RLum.Analysis-class.Rd 7d02e0f00f29bad7c5d7dc9958a40714 *man/RLum.Data-class.Rd f8d1cc9588d0cb627815c43c65d9e72c *man/RLum.Data.Curve-class.Rd 726bc4478a439ddea3cca097da435261 *man/RLum.Data.Image-class.Rd 357b17cb033c028e2d615594c291397a *man/RLum.Data.Spectrum-class.Rd ce1523cdb1952b424bf44fac0486a0da *man/RLum.Results-class.Rd 39d838f5e95e2214d2dbec103a1c0a84 *man/Risoe.BINfileData-class.Rd c5af6beb5f2324d08afa751eca66d1ae *man/Risoe.BINfileData2RLum.Analysis.Rd cbe7b22e7677792249a38b848cbc9253 *man/Second2Gray.Rd 406ddc1bf9ecf4bc8f80d5e2749f464a *man/analyse_Al2O3C_CrossTalk.Rd c83175865c535b850d8b10b40c6bf1c4 *man/analyse_Al2O3C_ITC.Rd 1c2ce00d4baa2158b84f274def7307fb *man/analyse_Al2O3C_Measurement.Rd a5bc23ce55750590b52fab3687731dca *man/analyse_FadingMeasurement.Rd bfd72e57488a02d53ca43faac06e4078 *man/analyse_IRSAR.RF.Rd 9376d871df79a8ba9ad75ba5dd116bf0 *man/analyse_SAR.CWOSL.Rd 679db7768e9d913997f480b948d8896f *man/analyse_SAR.TL.Rd 40a038e1b59152100eb42b6a95ac21d3 *man/analyse_baSAR.Rd f1bdfd7b3e016b2837cb11be991fecd7 *man/analyse_pIRIRSequence.Rd 86bac7fd4410947ab232a4f9d481d286 *man/analyse_portableOSL.Rd a921d95b278d396cee1e928e3bc4fc22 *man/app_RLum.Rd c40fdad18733725600a71462510b905c *man/apply_CosmicRayRemoval.Rd d7823e0717b1ce34e4456b9268243605 *man/apply_EfficiencyCorrection.Rd 146bb2397d97edc3f7abe9b7ff9c5ea7 *man/as.Rd c918d8e2af485ca41402322cdd3754f8 *man/bin_RLum.Data.Rd 54bdaeea7b8e8214885901d02f6895f7 *man/calc_AliquotSize.Rd c72a72d5af0d2f685f7432745fda30cf *man/calc_AverageDose.Rd 9316922cd6d3953e9f07a61ab38b2062 *man/calc_CentralDose.Rd 9d94b0ea39d57de29e266456da08d125 *man/calc_CommonDose.Rd d3b31a83739aa29ce2a0dd99000fb9b0 *man/calc_CosmicDoseRate.Rd a9d29b05e4c501e984226a1647450089 *man/calc_FadingCorr.Rd d1e91b28cad6a0d3ebf0f59fdc05bcad *man/calc_FastRatio.Rd 8f5bb05bdaf0157b5656b4533c721aaf *man/calc_FiniteMixture.Rd f8a0fcde740086fa55a6c7c53b0f9482 *man/calc_FuchsLang2001.Rd 19c9dc1dbef4ee545f11db3842405882 *man/calc_HomogeneityTest.Rd 46c6cb25f723cbc0cc9a6f55fe509e80 *man/calc_Huntley2006.Rd 97c45a3194ee3b8c9dfea6342d7e17ad *man/calc_IEU.Rd 70fa40b41f22b8286bfcd3fb5d027262 *man/calc_Kars2008.Rd c33c534be982cffbf11d56a49f2143c9 *man/calc_Lamothe2003.Rd 01785820f373dd9fd4d1026ceafeef60 *man/calc_MaxDose.Rd 3402d34a416c44994acd5980896e3a19 *man/calc_MinDose.Rd ea010e6d84ad2ce59e0c1176ac7f9be4 *man/calc_OSLLxTxRatio.Rd 578c5caac3feead3ee3bfb194247bfe3 *man/calc_SourceDoseRate.Rd 265c9c4470fa2ad4991a8b2666f012f4 *man/calc_Statistics.Rd 650ff8ca7ab33fdd0d854b48c0a8f414 *man/calc_TLLxTxRatio.Rd 498d610a73fdf9f1243ef538618100a2 *man/calc_ThermalLifetime.Rd 8623762cbfa9d66a16623682d24da3cd *man/calc_WodaFuchs2008.Rd aadb11830a729dcd60200e72f05d3b73 *man/calc_gSGC.Rd f3a5613ee94e91fdf99c64e23d780b62 *man/convert_Activity2Concentration.Rd 3fdee600ba4a90ee4464806a5219980d *man/convert_BIN2CSV.Rd 2a269e7f10bf1df14bb94ddf994590a1 *man/convert_Daybreak2CSV.Rd c5af465b4df6c760aca3fb730175940b *man/convert_PSL2CSV.Rd 7dd44f60ff0214395f785113df45cab8 *man/convert_RLum2Risoe.BINfileData.Rd 1678e8b19f39b7f333cd6ac4685e2dd1 *man/convert_Wavelength2Energy.Rd 885a088e7e55815300118b0dbdb8c310 *man/convert_XSYG2CSV.Rd 0528ad6ca9dd7198980703a833a03631 *man/extdata.Rd 030bc4f672d369685b2d1c1de43ccaad *man/extract_IrradiationTimes.Rd 9deae2151082199416fb228477744c47 *man/figures/README-Package_DependencyGraph.png 29deb119b951b526ca2049533e297386 *man/figures/README-Screenshot_AddIn.png 182a1e0fb960045c3861715c446222a3 *man/figures/RL_Logo.png a642025a9acdc25e42a98e18527ec42d *man/figures/RL_Logo.svg e4b80277b2302ef29ba29912a69db9e2 *man/fit_CWCurve.Rd bf2ff97985bd635ea2a695fd993e10b4 *man/fit_LMCurve.Rd bdc9dbd21edf5d797847480fcce5313c *man/fit_OSLLifeTimes.Rd b0ac4a3b0e064db0641d7363b73a6d6d *man/fit_SurfaceExposure.Rd 8d0f8b819c537762d25ea7c1355625bd *man/fit_ThermalQuenching.Rd 23826089189180a0f9ed01052b59f3ef *man/get_Layout.Rd 2d61191475e8e3444fdb638d1bdb59c9 *man/get_Quote.Rd f9b94b7d1bc355a4bdd50be9e756f575 *man/get_RLum.Rd 30399d8b446b720bdf46a05c941d9a65 *man/get_Risoe.BINfileData.Rd b47a3f0c54f26fc728ca76560bd0e015 *man/get_rightAnswer.Rd 672c20ef48c2d72157ea441d61a426b5 *man/install_DevelopmentVersion.Rd 2c6c8b72e94a4db4431f3dbf94fd582d *man/length_RLum.Rd ec37b74f3ade0f1084eb364539929744 *man/merge_RLum.Analysis.Rd 4377f09e787994de72b92ce0c317860c *man/merge_RLum.Data.Curve.Rd 3fe6a5074eb9184041b855bc0ba66a95 *man/merge_RLum.Rd c8ac36ee2e852fa07344e3e486e7d5a7 *man/merge_RLum.Results.Rd 9147da7ae5c9167ec4e39309bafad33b *man/merge_Risoe.BINfileData.Rd e340a035da07959dc9505a859f869492 *man/methods_RLum.Rd f8c44069ecdcadbbdecff5a1fd297e60 *man/names_RLum.Rd e7bec029a99cfba28ebc299ed1a55895 *man/plot_AbanicoPlot.Rd 4f8725c8cc396fa972c261bb2f7dafda *man/plot_DRCSummary.Rd c34b643a07010de63a091f516ede7615 *man/plot_DRTResults.Rd 14aa05102d72b7cad3cb355e06790bcf *man/plot_DetPlot.Rd de5e7fb43d53134cb8301e0d4429b4ae *man/plot_FilterCombinations.Rd 712d76eda0f87ccd9c2413aedb69c7a3 *man/plot_GrowthCurve.Rd a81df11c63d75d39a35954df2acb45c4 *man/plot_Histogram.Rd 37578ca044223f6e490b8eb01595a836 *man/plot_KDE.Rd 1b0aad3ba5b3b9c4348e21603668afd8 *man/plot_NRt.Rd c09b7c061d5f9d96babea311616ad402 *man/plot_RLum.Analysis.Rd 959c1a9743c7ca44b3ab4fa50273a81e *man/plot_RLum.Data.Curve.Rd 185861cb17cedc348e21bf2d312d4f5f *man/plot_RLum.Data.Image.Rd f138996e2e4010a0d24c0cf612667f3f *man/plot_RLum.Data.Spectrum.Rd c71be768654a8e4f8d6211bb37c5b442 *man/plot_RLum.Rd 1a41bfd4e4a5ae7dbba3c13b39e96f62 *man/plot_RLum.Results.Rd 17335dcb2acf90fd04046082804d57c4 *man/plot_RadialPlot.Rd a786b0eea6b33c2f361a922232a2e922 *man/plot_Risoe.BINfileData.Rd 6d7572351e4077c207079fd43327438f *man/plot_ViolinPlot.Rd 1682f142185c1ba551436ee4cf9c7ce4 *man/read_BIN2R.Rd 40c4b0ee0beff4bc4bab5895e86f6f3e *man/read_Daybreak2R.Rd 35ec1b9784da161dbef8f929ad6521cb *man/read_PSL2R.Rd 837f5eac108ee25672fb805ca7b827a8 *man/read_SPE2R.Rd 29eca9a8a3d1acae6579b0824b36fed9 *man/read_XSYG2R.Rd 185c7450f00fc9c8586f100ac3fd4c1f *man/replicate_RLum.Rd b1e817292a790320cf69a34527229eb6 *man/report_RLum.Rd 4e97cde62175e3c3aae376d471f86d57 *man/sTeve.Rd cf4497d43ae77e66c3fe19bed70227bc *man/scale_GammaDose.Rd 59f1e9c76296d8b5d14f8ea77184e81f *man/set_RLum.Rd 13b87fd27387f21ea5e1e5f7676cf61e *man/set_Risoe.BINfileData.Rd b1ead1e7319178903ac4effd1ca428d9 *man/smooth_RLum.Rd 052a8f4689a30b7bfb130341d9247789 *man/structure_RLum.Rd 15b0884598d8b8f1467bd86cf8e89055 *man/template_DRAC.Rd 60e890ca88a5c2e985d1aafac79647a3 *man/tune_Data.Rd 02dd6951d00e3b6a9f186c41fc318969 *man/use_DRAC.Rd f8ef0ca8f3c95c57c9f91caf6daa4e20 *man/verify_SingleGrainData.Rd 69aa3ca22b37ff7519b5f233304060b0 *man/write_R2BIN.Rd 2e0d72aba624995eab32fcf2e5e10a6b *man/write_RLum2CSV.Rd e86e2f4e85907023c9bb059f21e06291 *src/RcppExports.cpp c91bae4a391622c1b457078b273c0e19 *src/create_UID.cpp 4f9ac4ec02b570707e2175d38bd655af *src/src_analyse_IRSARRF_SRS.cpp 1ffcaa4ae64e2e1fa799736f16fd8515 *src/src_create_RLumDataCurve_matrix.cpp 65700497a5621c8124a2310871012df2 *src/src_get_XSYG_curve_values.cpp 5c33e2021366df205be163e82ca1a759 *tests/testthat.R d2f92135b240169e83217a24b85b2978 *tests/testthat/test_Analyse_SAROSLdata.R da722a979208f3b55ae9a4c5dc1c02cf *tests/testthat/test_CW2pX.R ae1bd3189f87dcd90077ba3f99e55798 *tests/testthat/test_PSL2RisoeBINfiledata.R 0f2307492507116af5bf6404d9863d22 *tests/testthat/test_RLum.Analysis-class.R 568cf636aaaa24744dd1510e80f41cf0 *tests/testthat/test_RLum.Data.Curve.R dc53fade4e08ff24f7efa11a477c368e *tests/testthat/test_RLum.Data.Image.R 88f9eee51bf4edf6c144edbf81454872 *tests/testthat/test_RLum.Data.Spectrum.R 48b901e4741064fafc9459cf139c5404 *tests/testthat/test_RLum.R 9fd7414e002780b450ddfb57e152be62 *tests/testthat/test_RisoeBINfileData-class.R db9a35c16345701c372e7404378c2c18 *tests/testthat/test_Second2Gray.R 8017965f5e1004fec15380448da1f9c0 *tests/testthat/test_analyse_Al2O3C_CrossTalk.R b30aa61240e5df0a7c59e798e7d82884 *tests/testthat/test_analyse_Al2O3C_ITC.R 0b478996e169c711ab13f0b42a13794a *tests/testthat/test_analyse_Al2O3C_Measurement.R 07e9e47fdd67491abff39223c3b7e9fc *tests/testthat/test_analyse_FadingMeasurement.R 2d14bd7f7e11abbe598a54020c6a3805 *tests/testthat/test_analyse_IRSARRF.R 07acd1f44857cadbeb8ae1ae154abc84 *tests/testthat/test_analyse_SARCWOSL.R 21a665b5c9115151302b340bbbfb5c37 *tests/testthat/test_analyse_SARTL.R 037f0df6b55c08f0ae3fe4479eb120b8 *tests/testthat/test_analyse_baSAR.R bdb9c6f8a5b273b178918ebcf8df575a *tests/testthat/test_analyse_pIRIRSequence.R 76adf42e17285a575784c7bfd8e3a18a *tests/testthat/test_analyse_portableOSL.R aae42771c60634491a2cf667bc059e1c *tests/testthat/test_app_RLum.R 3de84c419fd6e1fbff9374639cddd08c *tests/testthat/test_apply_CosmicRayRemoval.R f4f06cfc88ca1c9030a5fdeeec735768 *tests/testthat/test_apply_EfficiencyCorrection.R 1efb37204882884f681f98f047e20d35 *tests/testthat/test_as_latex_table.R 1bb9365493a2e71633428776fc34fdd4 *tests/testthat/test_bin_RLumData.R 98de480108724dc6567f8988f2169f9e *tests/testthat/test_calc_AliquotSize.R 7ab999724414f0364bd4af8809f46cf0 *tests/testthat/test_calc_AverageDose.R ff1260295f40992e88ff1b6275bc0f5f *tests/testthat/test_calc_CentralDose.R f3e684f9cfefc721a9bfddfbc9c01950 *tests/testthat/test_calc_CommonDose.R a79160ac6df7646fc3c440b1f346ad29 *tests/testthat/test_calc_CosmicDoseRate.R 724351aaa09692863ea987e71174b3ee *tests/testthat/test_calc_FadingCorr.R ef8a337f704ba7404eb9d52b6ef25f98 *tests/testthat/test_calc_FastRatio.R 6c4411e2879e2ac4f1879d57682e3ff0 *tests/testthat/test_calc_FiniteMixture.R c7bdbb30555290c3c9a14797a8ad7357 *tests/testthat/test_calc_FuchsLang2001.R d62ed2f8e71f9cba82bdd621d895fb1e *tests/testthat/test_calc_HomogeneityTest.R 68752bd3c19548063b126735ef354d1d *tests/testthat/test_calc_Huntley2008.R 146dee29d539c495009caea0724edd4c *tests/testthat/test_calc_IEU.R 130aeb20327f22e4d7767f924d3cad9e *tests/testthat/test_calc_Lamothe2003.R 805f6801c97d278d3f0926eadf1bbb10 *tests/testthat/test_calc_MaxDose.R 5372c993a1deda7cfebc1b4068fc96e0 *tests/testthat/test_calc_MinDose.R 62ed9a239cd105ee304dab2c62f3ee7c *tests/testthat/test_calc_OSLLxTxRatio.R 495da3ae6b1874bb4efb183416306fa3 *tests/testthat/test_calc_SourceDoseRate.R 5ac0a85aac35aea2706fd15a279bdf5a *tests/testthat/test_calc_Statistics.R 66dd969ef474afd721b2eb204b39e186 *tests/testthat/test_calc_TLLxTxRatio.R cd8b38179b45ab6338ffdc86d77f4089 *tests/testthat/test_calc_ThermalLifetime.R 6f25e9b75c9512ce6b9b4cfd6bf08163 *tests/testthat/test_calc_WodaFuchs2008.R c3cdee0f0c5b8cf4f170d3e646835f13 *tests/testthat/test_calc_gSGC.R 6508ffd66a7dc0e05069b963f6818991 *tests/testthat/test_convert_Activity2Concentration.R 2708d7e1d9a01edaa06eec4cc1386d2b *tests/testthat/test_convert_PSL2CSV.R 1b35aef7bf18d5f5092f941df7dac955 *tests/testthat/test_convert_RLum2Risoe.BINfileData.R 66b748129fb95a57acd53a7fa029bda3 *tests/testthat/test_convert_Wavelength2Energy.R 18f3912635a3a51be3dda5e79856c88f *tests/testthat/test_convert_X2CSV.R a4815c79555ea3948bfc9bd54afb911f *tests/testthat/test_extract_IrradiationTimes.R 9b5bdd4c3932976a9272a889719577dd *tests/testthat/test_fit_CWCurve.R 91848da84ca024db4185fc9d4f89583e *tests/testthat/test_fit_LMCurve.R 40f07246427348cfb021f2334f8623b0 *tests/testthat/test_fit_OSLLifeTimes.R 9f4aa96d0bd5f465f9700b57fa08c9a8 *tests/testthat/test_fit_SurfaceExposure.R 3d00144a947b55ff63d473633b70198f *tests/testthat/test_fit_ThermalQuenching.R 3b395de68c88499311c89360681b1948 *tests/testthat/test_get_RLum.R 3f13e8866f3380385c8228bf05087c34 *tests/testthat/test_github.R 9a1232bf570a25fcf5d106fe628dcddf *tests/testthat/test_internals.R d2c03b0a20fdfa55aabf976c061e26cc *tests/testthat/test_merge_RLumDataCurve.R 7784fdf16b40b1d753986fa5915dcc32 *tests/testthat/test_merge_RisoeBINfileData.R 7bb4b2c4025b9672bcc68fab9af9c2e2 *tests/testthat/test_methods_DRAC.R 33b6f7dfcb837c564be6b1c16743ead2 *tests/testthat/test_names_RLum.R 05a1d28dbfc14cd4c670b2c7dcecbd93 *tests/testthat/test_plot_AbanicoPlot.R 29d1051df6a6a71c4d8932ff7f9da742 *tests/testthat/test_plot_DRCSummary.R 7adbcd32e17189db33e6600231833871 *tests/testthat/test_plot_Functions.R 8ff65c605668d406113b03820efdee36 *tests/testthat/test_plot_GrowthCurve.R 4c2bbd7b07134f7605dc66446b164772 *tests/testthat/test_plot_RLum.Analysis.R 351036ebab4a9ff17f1b0f86d62510d0 *tests/testthat/test_plot_RLum.Data.Spectrum.R 81ede032e5380aa3d18a5f54702f7222 *tests/testthat/test_read_BIN2R.R 146690f74c00298835157b4c8e2205f7 *tests/testthat/test_read_Daybreak2R.R 7cfafce89ccc22399ce83573d0bdc08b *tests/testthat/test_read_PSL2R.R c57d70b7857418a75a988d0567560900 *tests/testthat/test_read_SPE2R.R fd7b6621d97cb41971444f853f12e36f *tests/testthat/test_read_XSYG2R.R 98448c29eea9b5471bd91ddbbc63f495 *tests/testthat/test_replicate_RLum.R 07653d4b213f88c281ef9c5881df9b4a *tests/testthat/test_report_RLum.R 11e3b095de35fd1c098ef51f21aff2ef *tests/testthat/test_scale_GammaDose.R bab755a2b009d3c896a5f4b1dc97f8f9 *tests/testthat/test_smooth_RLum.R e3f9184d42248b82911ec6356b7b24b1 *tests/testthat/test_structure_RLum.R 3c413b3a9be7616b7fd8feaf6bd99351 *tests/testthat/test_subset_RLum.R 460deb1b106e547a3537f5ce63ec7374 *tests/testthat/test_template_DRAC.R e813278f148e17597038db8ee7702811 *tests/testthat/test_use_DRAC.R 02e44c39805dd4af2a19eba78a5904bf *tests/testthat/test_verify_SingleGrainData.R 2afaa90b7e1beab1eec38da3d4cc97d5 *tests/testthat/test_write_R2BIN.R 06478e0319f31de70d530550667e0f96 *tests/testthat/test_write_RLum2CSV.R e5a3943417178a8de633027365eae77d *tests/testthat/test_zzz.R 71554f02a96d1da63cce50b2a754f3e0 *vignettes/HowTo_analyse_Al2O3.Rmd a28588f695a420a4f0bd866ba47d1063 *vignettes/HowTo_analyse_Al2O3.html.asis 67fed7a75d9f7d0436196d640365ee0c *vignettes/HowTo_analyse_pIRIRMeasurements.Rmd c55ae6925d8c2b477c23334587ded253 *vignettes/HowTo_analyse_pIRIRMeasurements.html.asis 6beca119f1590a3626e80cab485f8002 *vignettes/S4classObjects.pdf.asis 8b1b6744f2a4e81f488d6375b037b1e3 *vignettes/Tutorial_Analysing_pIRIR_protocol.bib Luminescence/inst/0000755000176200001440000000000013604173345013647 5ustar liggesusersLuminescence/inst/rstudio/0000755000176200001440000000000013540751607015342 5ustar liggesusersLuminescence/inst/rstudio/addins.dcf0000644000176200001440000000042713540751607017265 0ustar liggesusersName: Search for TODOs Description: List TODOs in the source code Binding: .listTODO Interactive: false Name: Install package development version Description: Install the developement version of the R package 'Luminescence' Binding: .installDevelopmentVersion Interactive: true Luminescence/inst/doc/0000755000176200001440000000000013604173345014414 5ustar liggesusersLuminescence/inst/doc/S4classObjects.pdf0000644000176200001440000030670313604173344017745 0ustar liggesusers%PDF-1.5 % 1 0 obj << /Type /ObjStm /Length 1210 /Filter /FlateDecode /N 19 /First 132 >> stream xVmS7_d%P N<6i2S:繓[OI'0!@?;餕ճ{HHHCTP! 2T,.$qF9q$qA\/ cı+  ,&Xd4/Ihl"Ќ[W$8,hR]Vŗ!%kv5s_7Ւ:n|.*h'x8`Sa cown-5INYy*wlO7q/ǥz;[~خ8 //e[(1fZRJ ۥwKߓ)]ziI׮W@T'U1M]߮9O^.n.0 *'3B .đcμk368c)TܼCds`gaQW]VI:cojAq83ev.kwj>ʗ(Vl4FMl4Xnt|_ջx_~<_ϛ:A\j7]HGuۑw|HjBEu܄z~\6 SSI &BǦl oЦ4 V^3# `?SQ%i%H"k$[p2O-!Z=1kKvSMXibi*VVht1&hVc<䡱2ϮƫbinGrtz2, U=qqϐŹ/P`Ÿfy̻_I B>f?fC;?{vQ2۔2T T-{=' S$#kZy2g `dGI7aй_vTf\ʌ Pqe(ow\ʸU*Te\ qBqG'Ps q3.t8n>x*> stream GPL Ghostscript 9.23 2019-01-25T14:04:09-05:00 2019-01-25T14:04:09-05:00 UnknownApplication UntitledSebastian Kreutzer endstream endobj 22 0 obj << /Filter /FlateDecode /Length 7121 >> stream x][q~_1嗜Nprm%erI*xKd:UݳK*YV ht}ΛIrg NߝTf6=8{&霙v*j볯D}~&V2Nޅ5(3zOӇlLM=KL ϔ*-Wgrbsэ-kM5{"zRSzeM #od{3xv&)GtR))f݃=!1v"xaoІ]{LB[wޟ8G/۽T+`oϾB~\pQեĪYᗥLt;s7g H9 vÔDS\Xt ? ZJ_z6ʏ*C6To7%p gR[Ϋ4JzKt:&L1E08LE6ATv 0L@-an-vW~ 4!;U=\v@J}p@cD$=Qf_I ?W#jvXshm6KGR $kER -P4puJ-D~ٻ|2k(q4PAi?@/bƇ?|N$~*|F?1'cAggdk$weDng /n^LWi!fF\Ul[Δ֭(w*a0J)_6>@>iDvټӈq*.8-Cy"iNMCZ]q4Ȓ$V芎`D׿/ so36ce`(dA)Z Q"m #L]-ER,`$[`Q«C5I[?}edɋM2߷{1 ":Av1tJ@RdR B3X=2;j%9Y΍E.±Iq6*>P@Fn5I+L }6&Nxs%̘9kmAY-bAJ-[S*’!僼S%C@w&G#pם[TUisŒRջgE_ ͺd_Wi8oZWJi꺜]yݐKQ FܓrMa  \ʻP*̌u+6-#E@{, V @9;fllQ!X;2O!UDBB ND` ILIm;~IPyZD&m~I_״a~=Z.sau +BIvL;=324؃IUb1Ud*K^'eH@HJmI B'%$dߔB`ˢ*.H LBh -4^]CǞU7 ^4yS'jAwҚ6s{Qӥ_@A nAɻX1GC>  a5)\r¦]-%|΢By|P,US0Q`U<=yivӋUK IMi-}>c:#a76V/ %Ah|m-là;HMy$htb5S^\ŧ @!JiQi-B-8hcۮL\UN7^pw@(5c)[|=l?)_f(gv"{lNJƢX,0ầ. A|.sod-/E]K(f*T3? LUim+ :#iTnO˂KU#h.;%ta=9n86T[-g^}?4C#V-3cдT%dv*''."yn eXj5PF]Rr (TES7֏k1ؗ~lۈmLꗯ +ܶ) XHdҒťUrNb~BTnED^`iŏ4A>ݤ3-TL):Jm^.3'v󉮥 p=ҥ=yki?/|˜JrNGi즌g6NWH}WߙQp P\d@fADjHPB}wLvӒtY6ZZщRn>%=t%_o N3iA8ڒbb ,-pf qZK RC]}Z ^<1hC ':1l_|4I j]Nb(YڔYzxfRW<_Mn<>L5!0fkflN`K[Hc<"T[!5j ?i|ݩW;ޠ'5oUJrKv}"R)#R٨#Ɣe]_VJo5ҭۦk`~DS lPH ZhXL,NCSv}l;vSZ@i/:RK~^rT3ڔ^qqƗI7[? 0+|~ &%.LrA(% (t n=nB i~rL]1V]{ gl ,-8s9>W3nΤۑm핣1xPĎf %C=k4,e#9Z*WW$Rub^Tj|  eZ.aCJn0Ek)^ӷO*?TP۶FBRОNk:^}> stream x]n@{=@?7k5v"A(d0%rXNb Maoe>߆2tK|,S w{Nx/T!}ƀ0Ø| zbrrӃO:AV@2k`AH0yNwR.-LʥSI`J0)L &҂)\Z0%K ri`?^گS>_0},KoZky>(U8T]endstream endobj 24 0 obj << /Filter /FlateDecode /Length1 83904 /Length 39382 >> stream x}`TEm˥bP#R(ED^Bb.K% vlذW4"@ŮPEQŊR%ODB(-@ԣ?{jE_{*fG!KAH`} yxx_-I~ X6c5ѹv{v*r1iSk_ZOA햺P$jK]-}.wG(bP'w4U,-1l(\WuȏZŇo"gh3z=V67Z[C{7-AtZ@B!?Z&ٽmi|?t(ć=(棍nT=*Eȋ%]ԆEmP.z4 #/%3I^2Amg-Z<[!ֲý`Cr!zc9]NjoGoÎV'X+Fo;b@ 2 nF}g!dkG:=n љh*~]ۭr!OE#tt>"Vΰ~=v:''p< (}nAנ(tTH}'yf{R4Dد77 V~ހѰT@UGF;Rʦ&J_R}+iP/E|@vm۴nJ3sF PW]cӊg6 -ktd]~ۣw,vQjd]QV%VJy,I_:BF +6v8cARxi;7NH(FyƬw̷(#Q4 k_"ױ~Ox--mIɵ7뛩9NkZZԈZD/QkS?Ԉ6vdZ_FXPiZAwfA7%}юwiٽUJn4a4shP h4Iu+5 M!U<`%FF:/G[,afeߠ$7Y&~Ijjka|hn)44]7,Y˺^ϙV"ҩ?@nE1.>oG>i(nplCånJiGP\♧.6#s/ϴbϳĉ Y;B@@MUvlTyGY<ݰاL]Lxu, +B)P{bAqA0ҩyݎȊ RETrLw@'MV}-$a&KʊJK;ZzKYwwjl%P[+in% xzη|4zR V_e=6F7V0aX{XCYgM|Ay;aPW\V#_S-)m*ke 81w{k8c٥O "^A7HarzYB~cV?9LA)`TU`|LrNŁI 퀑 B:vch2 &LnTܣ ],Y"K8%eWFONwj6 6ѯgwwi$'35mUQfɆY]iaz`Kav+DQFm`FLjn'mAaToH*;-ijnNhq*m̐sc?t!.ve7ˊ|PJ^Hdɷ~6w lsv[&u XI55Z|TtZ$9{OvAL :6+Ħ ir%mpM*_Ci N/ WZ:.P%$̴%T@`u4TǖLvL胋`zUҐ҄hg<t.=3,)З @DiC+0oho; 5' knD-(P₎EԙwgZAe=pEþ=܈>Nu`˥[pieVgS]ʥ3%VKҚ]Z :Hm]z8چcه9-{x.xNw)VV|CZz ' MAn`ٱI/uj,Rt x! JaB^O9϶WF_*T~ڥ6}z <WgƎFx.Ah췑 h|;@c1qc +} (_e:ͯ@9B+Hw cR;=Bgp}wL쒼.cPJi&dv˜~r@-@g>Vs  ػ=`9ʓ*cFƦ v0nF5*f2Ɣ1:x|iAشBr8UlɸvR0~wSe_I(h6 c)(}$RGCR2NqmI?XNE9\QAjc@ dzJIoI\%S9ȱ,-waޑc_Y˱,arRvl$b&TC{c:^%e9O5@gHCnc|B"2o}qT< k "O@:@9cVg.<z˾_ƥ7v"upP/X 5#] 1cP.G޿Х0GV Hka.XCw<Їo#= !ԍza_ߣmPZ:KPg)/B0>u^һu K?ƻX͋` ZZơ,|7 #hwNG<G)2ԏNBCn ܊:(?mGj>2[>ǪyDӁQ:zkkFп7#{ ˠ}9N7BŚ$VN-;/QkZ'n}'iXBl$9`7!0l9h*hjNmԚY,<> TІj|% llhhjg(-JWu̳^MXu쮖\Yogɴ@m8߂^8?*@6̥9(F/T(14_ 1Jb?9 v3 \dK߄yKM cO#a`ްd\=ce?Cb;$S Ę T?WkR! Ot41 Ǡ -{Xmȶj v?-s=م^s9Ja]}yCS-mI0@/<ұvC:NfE0߄XWΰ JTl.=P~,?-u\7&` N;B`>79(d@y]RJvh[j`}/XΣ.駃I=8`QSd 27uǯw`ThG}R0Vu#pyZ2ӭ3o9+ m;`Nɿ^RokKІKcg&ʙs:0~enZhAL,ؗ[erd>ǘ\3fYg=ftj-NpZYf5G+Rh\N2)Sa,IGwu>8FCOBӨe#Lcs"pV`{P9͛~~9W([k`hH /^ W`Lr<)fו^oe '?r'1+hk\AFr g)yDnEKm ^8ziH^ѮN]*1q,(U'͇yz/.XG;t3V k'@-Ia?؋ЖRԘtp3O\&ȯFRz qSX ϵe2n^Mawr)Y+8HzxbJ] d/O!f&T3)3>5GSQ>yԐꗡ!)n$+(M -Xߙ= 8jПwj{.&q o$l+ R!='QHB!>y3 Az9;MOy#w|+:͏n~%'inXC0 r;[ aͩsN4:s$Z'N9NGp>{}j.0@׫5-;gz|ssjsֺfzYMq/cHpGv^Kv,Jصi;R8?þ`;{.y^9l>@QG󯼻Pgv9,XM=?}n{X߅>j'օ4)׉i{PW坒|h=?ܑ&˾vҿ;]njOkߡ4n5%=NO8;.]? Iqi!:ހʾ==ߛ/GGXz uةG;LVC=v z u$!AԀӃ}E+/ذ`I4YvvȐj-?@>*"ʡ] K}Jy#a#"`&.zpp2r V$mOl~4e܌9Nt/H9r_^`[ l7on!'´?h9I[=͈Aޗs-^:dagk?9gYQsڸyTl@<}d^Zou+lXܢ,ĩî^L>@wȷEX3^!a yʦM9cn=2h)ρ1 溩 Nh,S~>ƣm$ Rxka]6 > #GǤ=MoFYBGE7t95`9Կ` r; נ|gA4 MSZ.Ca -sx7yd ~AX( eG|u_zg/GɷbK>4V:9B~|?ݣ'=S {$_\gp&{@\Ǩ7>4ȷ )Xn| q]|2%ڭ`'AX'; הC+ը8hY }k-!cp֭WAl2Mgm^̀|?VOՆOl힃 \m )F!_X2ecQKu,uRX{(|]"w5G_0_L~#:6Q˅rOS=uΘz('9`lo=F+ Ik< pF?!lZ h*[a_jM/2.pZPY C%&~۫ *@+\{9h*.Fl9@2m+rTys W`!ʿ;ۀxIRC2h(=?Z퍭c610G`r(I!ɷZIF5`LXZ%6xfwU09 i0$`tczQht%.yҖ!ַDL76`|; |8 v,2&s[{>0[sK` ,1: /SAwcRr17~B!qE:x|<tCAA]uf򖀾RRTLqm^#PvʽL(Q/6::HڡQN]p8߇w 96:lo3$l>v ;g?잗ʻ310 Zsjf: &DNBd}]|w}cyo\_Saΰ;2z㷡0dEh",m|;m|#QQdG~Gc/Lqtxx Ќ6>l|*Ksn2*siSƧ6~:4?Neq gv>RGz`' sSi sl#W6m\/x]]AOG}x5YLMCf%6QvU6N)2qDS6QZ 67m@m6.PF.7QN6Di.sm<ee 3QSl< I6el<^`㹨U6 xȕx;dd}>_x>ۣ}6YRSs99]pxVdWY^l͜ QxRx+Uw(a究(HEI↥fLE߂ " ZQ 2jP#P=C~(=G0C0y'""~ /eL5pu vpeKJ h@ hab]F'ڽ泭:l& jDl^^ C/j !5q9U+xRBp/rfd*)䟠aI6'mW]w Ъ{LQ5%W7϶-ydsZe sqԲVc/$ӿ'ifHsТJC޿-rC>#; dPJ!w%N9y_$ UF5)G\z{{[ k+/ 1^--^8v=N{[DZowbߓo?JQO T9c#B_{dmEw`Э"/ y*^;q{ѰݡCX2¾@$ @;t]jnO=9!d|ee§! g7P j#uJ0z}@Um]8=*8K9Cpp[a(MUłp{z }'{,ۭI Oj0XDA?Уǜ9s{*=5ڐ4{EF-.W"$(&wtrwPICַ_}ztOӧs {҉O? Aڈ@_T@EdJ瞨jHE#S}a{P$ ;q|#bup ESf7Xw.}sr}]**sǑq!*ߘ@'h܉_y7ǝTvꈀw(2'a{'}1ʄ2?6M0;?5DuԀ/*)^P(+f^코׀4iRQC9C6=Uo{gڽxR(ZHG?dF^?I=[I3ե(FQG:2@&JC.2P&B(V5jڢv(Q{t$: u9#:cP't,:uFǣ|+膺'*DPoNB}Q? @'4|}`pNF<lJ>Na=NGt3DglTl-oD*=N̈́U >:4 J ֞90ףy_<G Bt-DK"tj@t9B*t5]Ѩ2k:݆nGNt[3н@Ѓ!0ZAF=Vh5zAkSi z=֡ "ڈ^B/M*z @oCЇh H3V mCv@_AdDߠo.F?Og A^SKC~tDq iBkZeiZZkmZ;-O;BkGjGi4Q;Z+Ў:iji.ZW] ^ZovvWh'kSAک`mv6T+Ҋapm6RhcqxDM&i)TtmVMfhgkeG+*4**ͯZjZ-iѢZLj>׶k_h_j;7ڷ.;{mGM]۫kZ#a aul`M]8gLqŭpkp>H|ݸ#>cp'|,>wpwq^7O'Ᾰ@| OŃ|p1x$G1x,<Oēd<OŧiOg3Yx>a.؋}Wa?q !\g0(x<<|_/ %R_b_W|-^& ߌox9߉w{>|?~?(n+cx~?W'?gsx^ E_ƛ+U~o;x3~oşO6o_/o.?Og ^>`B%pb4"$d,MrH.iEZ6-iG$'GH&ѤC:cq39@Nz"Ir"9%H2LS r*LPRD02 #(2!c82 d"D&)d*9L#d:9I"3٤xH9 ^#I$$HjH- :2IDI&s\ROr.9O ȅ"r1YH.!E2@%rr\E&אkRr@n$7efr Fn'NrC%Ay GI#YI# |Hd+|Jd;|Ivd'|Kvd7H~"?_ȯdN?ȟ/'AjSB)eSTPQM4flCsi+ښmi;G==vvGz DxzBn;A{ΤMI/Gd:*t0BOCi-`3`3xZB'ЉtLЩt: z&=Πg2崂zV*:V 4D,1:Ρsi=Gs]@/ҋt!^Jh]LJz^CKuzzDћ-Vz.w;]nzGCa>Bt%}'j$]Cҧ,}t}H7җt}J_7-6}n!B?ӭ)F? %A_ӝ-E#L=7;Kҿ>i!11tfy&Ҙ ɲX6akڰcG|֞ɎbudGv ĎeDZxvºn;zB֋f}؉$֗cv2Naةl0NcCY+fp6dh6exV&l̦t6 v&;`g2a嬂yU*g6U a,,fe16asY=sع[.`l!]X[̖Jv]îeKuzvĖ-Vv-gw;]nvgCa=el%{b'j$[ֲ,{cl{6l{^c7؛-6{mf!>b)>c %b_-žc߳#~a=7;`>`Y#q qunpM]Od>OiOg3Y|?q/}Wq?𙼚y !^g0(|<>~_/ %R_b_ίW~-_ʯ&o|9w{>~??(o+c|?W'?şgs|_ϟ E_+Uo;|3oO6o_/o.?Og ^?:5D:ӹ.tSO]zgYzzNӏQzݭwԏ cNqzgxUw{=B[@}~>XՋb}>\G1X}>^/'Id}>U?]33z ݫJJ}^Vu,=Ggsz>O/////}D\BRJZFV___ߠߨߤ/oooooחwwww+GGF}J\B_?OOE}IEUM]CSK[G߬o??ַ+k}KN^߭7 5 0 aF2ҍ #2#he6mvFqo742:nqQ`ct253:+э & 1FQh2z}F?`l 4N1!iP(6ÍHc1c5c1јdL6SӍiF1888˘amܨ0Ϩ4 0fFШ1jQg2FĈ1c1ǘk9ƹyB"bcqqȸh0Kˍ++kku ƍM2fV6vcqqqqqqqqxxh4VǍ'ƓcXo[GcU|">gs]|!;WkS|#w{[ ~?/WG&~{O'⠈Lln0M3tfiffk2[m̶f;3<7ۛGGL<,01;ǚǙ.fWa4 ^foyyg7'SA`sy9,2aps9e6ǘcqxĜ`N4')TtsYjN7042gge,7+L3+*o̙f4kZ3d̰֙1f̜m1`>h>d>l015͕c*q s\k>e>m>c>k>g3כϛ͍K&U5u M-msj~b~jn3?3?7__;̯̯͝7.;{skaie3̃f< ii844#MiiP7+  zIM0 @eGb55yٮ8@mW=^y%/LO]]847!Uj=~]􆢞lTFu~_- xACJo{T Uj}պJӆVʠonZEC-U8/4Go!h]/+ShըO%8!`zJy}VSԪJQkxBRo[.k}^tD$#=a_|d4x@%HےmH˒徑+#G̴Q)mLG'5KN:c<U6&3I:-^=qZK)I\k6)V_m>@x!/{-ED+ [-OLi9D尕LD:RI 7 zɖѤtʨ)VWƬb bS V-$9>=RNOQqN ^'q>2p%wz"p9 j"l?=E߼Lқ|r ML*}5冪HzR'IVDXN$@\R35@&}@p UT: L=1k>QBQj7Ld˖CH("0Xɰ1R]p Ն"ހJe!:G#'/qE A]>2dcjƴ]SR5*)'Mc1z/a=`n7D@Cl0*[baccx\ O vӸ$@&ClRC&{b)PH }d Y6G`z|ŭcZ(\S%a^_0matJFURVV-jcxnf e CfDS)_BIL'u`VdYH::+ S,B~ C*Ҋf$.ԑf訤1)%q?Y0:Y1 eh^O IpZhNMÓhQlhzR9=#z@ўZL5I6$)8D# trҨ)ʤGv&G Mڤ$O$E25ʏm^,&fM UFSKI$pH@R@1zShr`' 6eFShu5ޔ~cPՌ.|G=~ݎ+ -0 |FEɚ嫜zjS US 4dy³FTH"YmojCauO$;Q8 }mQOeJY:e$#HY#ZGnRLZW Vs \^tATF=D  @7u0%Y&lY ̴@/D~-1<;|aV W{*oT;60)2jc!83ʤR:[h)%i`KJ5zS11fIXɖ0ha`}ϘE1ʦ=5lVj<^GSS;39-ɨ6cߜQuBw-,+xz2}v 3}QUSMN%QAE8Q}'`mF|>.#,?/%Ux:lf|NNƊU'08[Qfm0Z-,H,xʨDtWnjO$̦)fW6,s G:mCvpHr4ǦEaՕ!PFNH`#fG'!ppgLۄݢO1\!vQ(z# Z˖B6 XͥMs08&cϛSdpN& 'BMLD6p}H2f'*'dWV'ڭ 9 fC79hV(pӆF23.|lT6+!3cv>mVDYS:"% ٕ*/C՞D_! w)<&3sRTx aX=U2k!**! *..P#!RӼQx4$j jJz6 Fe@Zj+V y}J1RT^%[B EY*{ ܕ3D].sթFmPZ$Zn*PuFMFqúXo(¡pLxXjCA)'kڍonz9a|s_b|K}j-"Ԃ/ԵR %QEUJYTf(jBPUG5nQh:RsbJ%KSŒ蔤Sjk唤ShKR%׫D&tO\_+Xi|:OO^3 ӭV6y3/?U} R]r&O)]*bO7*±y@8DAVTE|QO ՊZ8&rZ HuGcUayATy{⵱:!M@[vdF&14 Q]a:S˜B9ivdHPSJ$ eNV@>OyhMi#o:uk3֒DR^Z"H) ˬR\Jw22jTx"f"H|+`ͺph" kȵ^~3qWތ,LOش(uo܅jKajcv$ )Ͷz"ţ.UBq|̊y¾9x,<ˢM v#V3=Nj] GsR߉% f+n[$[1784CZW". PXC+S!wel{5PUH%Ǻ˺RUM(^ En5{(צAFMS)Iהlm%mBjRs-&LoBobbӂpJ3#`LMv,7IJu$d 󔼺Ns[usZEt'U޺NRۤamaKm#[]^g)JʕEH{=nH^}8]wPS*\WٙBMn3 "WfS2*Mj+K陠HQ*訤&5̒*fJBLOQRSkߤVyRVIbj(URTD'um><:Ĕ6Xf50VI589gdy l(l†0=VF-L"g}/&9rÝo{F;HqsZ aD<.1ؙቲ :I`6.& ,0A M{Һwa떎އ6{fCbZle϶OMxl[:n\xHi!m:^-٫6[4[8[L܄Ƕ3oa6{fCbZleT.k)Ņx|aZM#վߝُ'Ή>~9**EyZ|Bյ( m PE,uǡtlr%ljB@SX*TijcZ|JS UZRSa*aLp+#U!/>"!aEC);OrWeXM1ʹ|vޡ{-C*:h_HJCqXr&r,ZjIpЊ!ajФv۔TmrR^I)h")ar^- 6Tg.MMR\7@HcN.~q ope! @eXwnҳ,Lf@L 1=,g !Dt)imʥ9ǒ)2lifض,-lǪ8=Ȱ*7&FS]2X ,͵YA"moљ"`7q3!32Jzfi dQ2RlzA17 -[ftovAfږL[oYlfs< $dҚ.8!wsRU8{^Ț`g3X{v-e_RD3s|ea\Cq MTq~EY?'mpzGT'ٱnCl$%7N]$kyYԔ'+7Dz.aR ʓ1沱. ~(r0Aie d-mk:פJ' X52m xKf>WwkW{u)qe4a\ۺyesyhl#ߎ+fР[v{u84uA&>uSoEVi$1y]ݝ]ݎUUn-f[7vj =m^]N125~[,H&sא5dl2o XZpe[K,ǁ$iZEme^9}GzjSk_R_ZoY)hĝlp6NG8ќ<`cVxwZ#p1WЙk"o!-[ԙkyϘ"`Y4߆l; 6[Zt!zmˊvkGUs孍w'Uؐ vV;DCfq;Ĭv!:3C,Ct!f gX/e^Q VT X# X'tU*' X.`&Ovu-dQf<"!}4| eėv|͚Uĝ=lc|*yƜDlƪn|5|l\zYlt&PTOF`O(,ׁ6ju+:9ٱ; eEVgWY: ۩;W;4wb!/#s#3Te#lw^,iogꞟh;ӑ-3 QX*u ST~kjaVMugTytR_dco[c1zE\MH{8Sa$┳7vzz]Do~goo!Yh_ZRnsIv}fIZK qOїX7p_bt||Zfջec\*TK7exi禔K87q.Kl;]%l^[l^kKٖ?IfF~Bfv3qR5& X.`NJjk"`TZ.` ΰ`_. _. ~_W. ~_B_!W+D""_W!U~_E___)W _)W _)W _)W d?Y,O' ɂdo7Y,UDU"*J_%W%W%TWE"jZ_-WT >"?Ղ_O[-ʯZkFkFkFk)o7E"M~S)o7E"MI"T Q?*GT Q?*G_+ _+ _+ _+ _' u_' uo[' uo[' utw;].N| tw;].N| z^z^z^|zAi|AAk 6 o 6|6 ?C!f>3 g3C!|f4ˎkg:>.a\_ޜ򴡷-|bзz)i9eE;#Z;x'-nk$UL.S X+`޼*.]mW5Âx?fu%{;mjopw2^˃=%inZ6h(E&Y#LEH~HvyEc5Zg!jc>6;ù:s>[EMElWl`cy#O] O⍭"&+ 聰^g wVLɤN*;i=ɤ֚JZdrZӒ֛Orf)JGfUOY]].R S:.kFcݲa/O,>w :VCm9A_xe{qԹe6eOztF'B>Ow]۟~E2S'Z>P½3uE BPQ.>zF k ¡ y-UXm^~5Ŏ[\qKmnlOKw#ӑ4LMǜ7ot- hMbNxUyBKtgn"m969bʌшgLCv4eI[ qDo[&5,a( a}҄e]2,ξlJhIѩIj٨g)ie7M;ΰ99Q/Z=Bojm֔5?NBCغ=z(=+66xw5k,ujmo]mZ wgIT6Qtne H%'޽ikoj [䥛;:E,3ZFO[I"QZa-(TnOf ,Se3m]$Zy#2p ӌ⿳̴9ЛkU8-,e/L+\"+ '=:72i׊Id\(ey|3Zmyu6%zuk9-g!4= c8u8ͷS 9Ӳ)aOf}gϒzYEmijv5z֦zʚY t̘oIă[ZWmkfomĖ񽀙gv{BTXc~ͱ9ɖYdksNsam!KaV'4tNiM|sW6j0k)Ls&yⳄN,nM;ӛd~texT,9VTzNhRO{YVɬZSG[-U#T5bPq R a3lc$OHV؞ͽx|$/i']Y9-}|!^ l%2Lo`$2\S7-y22,`z&D#Ǻe$1;곎vJðU',$l>+p@]ƓݒY[XV :W0Êud{wʈ58 ՘?MZFIѲŚ8[Az{^̄S^"i![I#lۇzC+ɜ&mY8K*]lFJ 㤴{Xމ+|74Vgojhe5t2G5>l#/XZXHKZε)u]&1g4'CSCVzA,*\a qlQ[bR]%WqƓժ-ArO}vzxC阷ɉ6td>De>@} ֵk8I%5d-YeYLܬ&dYx{>/ e=Y,7Z{:yܤZ >t>cZ7e$~}+ClRJ6kmK9FMa}b:ICO&k-5,v,:a-? mސ6Ӹٖ9ZEV ηN3i]͛{<|>8BJNdm~8ݾչGṷ..^(lMXbK2%? }&S!fzx.%* ~6woX݊FW'sҴ>eʂ6q"ǂbd6i :&fmS'k3vپS3ʹ]kmخ"50ovkZlخ˶$f&[pJQ<rx{B_tPy6t.f)lӚv.4}v;.+qR#ccr"٤[4BvLZaY2/`;=[yż@L-LC)q=|Q͗-wn7_?O"}ݝ}.;u ʎVϪ=uq4w;̙4;3:̹Y\ۤe}$j,ܤln\7\*Eo*osKK73[߁[Ҍꨃ/$޴=DN"Č->h4NO#80vW1wXYn鰌6R [3olN #fiq4D''O8-Økr֊5e_Q?FYVZ4C\Ѻ:ΘȞv[0^s.#=!?)O4}C׃ji{w3mNF,k}wdd5Žd ;P$XDT0=X,?awd1IhrT̚gx@,k6²DDRX֒X*kIJZlբ/ M\.[+wEsjsEwʵ}8G)%+-/{[Wc#}I~LBI΍-3/步kͅ޳c5ifֱP 8iV']̤0K>ԭb c ҃$i~4SCR[Q}벳<kRqY)}-7핛4k"Ʌ 7~x@l 6S =)5;[b٫DNPқZ6g6<|ݫy3"^flASĜۦǐm!Oim^M KL\J2ɱ ;}]-UI%aO[:vT/Lw8{;SZ_Wb>0lXlzKG渒Lp,D'@nű2B`eg/3$yD&Y|i iI2'sWXNe)B rc7'-i؍}YCt5u];5w,>e$GJjkQ+IddEm^2L@ AkH""y|fE`/]̜T녝o@?IdaF.\[ラ.|u]}MNqueWӫիHڰoމ9;%k;uCmUom[[h=J[lW/r.@(#|!xI6vm5"d[6Èԅ,#G¢o20phXtПa42ǡK,|FbGG##1oQ#{0;kD[O ?t5O"%L?`E/P{Nd,Z?&k^2AZ!` ϲÈKx*? v0Qˁm yop{ఌY%o(כALR#GYy#XXD6,+d Yoet{\A;s26H. CfIc"NΕ"=2 px|Qlm=OFbKH9 wֹ/%iX*w%%|-{o<7 aQ5N/N׺U!vWų_g Cؾw0|G.^OEϼ BX p3(n,Ko@1KSoP,Ń;X0=7\VQa3(荹B|3GbZ/`d0{<{cD1Җ%t.H2h-#$75I}C! yh >/E8#'M%xī//4|Dxk? ׷$/Jr--j+&?!^b Bܐ3D(}>ޔaKA푦 /*AM-40]f.#^jJ΁s%8 &B﹐?Rr wTx.GgPnu(TO!wEH/B,$;;~11='b*.5Fy u˭j77[@~DE*-,*2 ZLΧ*O a_zn+sN:~ةZ-_L/oesX.2eVAcHSm# xE2xmk^w(^:d Xeáj$B9 ߓ S^B%Rͤ?&* nJ < iWl*^H $2. y70 ݀ۏɭ~^lP_MUhjD1 V|U=i:T=3rAEŹO#ʊpŦ&󵦦ch:#rmӑpe Q:+U4Z]C(j(Tұ4;7+:MU?A-:y*"6H5PU5( /]uէó ^ ~FYD @(3a ءJB{&YTD@H).˲5R5iOr<(#(QGf 3FB\N?ZiZbpŴߖO/J%U%>j$a|-++6O?cQ<鷱k p>&dswqN NG q888sH5A#'7Qq*sBnP` KMp3\E.r|WRsIG /}=:G%$V)>|1֋<W6ZcXi&y!zL vcc++rBAU+ =v ߹{~u''Ɛo|֣?斍?hzP^{~zkHFq?DGl$ 0f$'MVC%k[AY*PGn ]S<RNhh:_]_ĥ:uł#\N35i$t߈mRύ91 ݺ/'Wrr%݆ikM該ȩ&iM3/%|%+gYTTVTW%U5ž%{(/(:ږL̙$g±/`U/up߀)%\ &\ůDu88qt[b æp%+&,+nN( NX Q޿D(95tUXⷉD E9Ė'{vc I%ob#qlb6q += J&0AOP uD/q, L,I\e3M bٌ^U^8KfAQ|G/#/咸<5=:͇wT &/-hY*U]JpuF){<aJ`D~dp^u@ BGB'B .2X'Veya6= >aϋpUU$*d46y>zi{' +.l:Q#Y6){鶦/cSIC9.#U+{v` .r5ݥ/J./Vzk]'ӹr%^Cw]XxA.o/9HŨNybpOl֍]8vZW >">Z!UB@[VPA^^d:WQaÜQ$fEjS(YazʇWWy ɕ֗jj'Bji1NJG>89]wp'%ꁅ8msAS',TAݮ9%eOَ'@wP:v6IK822UVYW-C*\9lyi=(>Hk<[[:=[Gu띤HZ>vT47O-[cLeLQHfTJksJ-uT0lݩ X ؂:exZy_!=t]{T9 PL|ߔz=X8Irhma!]j(EWp]S .\MHÉ 'gW?{m7U{4x.fġ?1lʝ\7͑-`WdL?On2Q%Ç:i/;9>w>5FE ) QL2 &`ku5цG|j68ɼVsa:IjTIc6f@bmJ#sXI))΃ZvM_vo~_yiEs(hܷA˧ca}r KZıG.4I{$quwZ+_LoKpѺ 2٫t_b̾,^4ҥXHde*1H!UUH'q3D)jl`hSN3f澡v4D9isX$^$U H%1P.% 9}܃_OձL+)yEAU(Ddֹ 7%K.#Y@NRC͕FC&:/oЩ Q,.CΓNMuY$ls9.SSF'ɚJsL]mk3b5.PoSGTMBtG>QUBzr+[PyXnʚEP}G͗}M|E>:\=4aWZδ9&t4*ab^/vțOO yd9*y;鈍WDA"1504h*W>mT7cA,ԙѨܮ:0.̙3s26Mx`rLj%`t'9+C9]FL/vIBfȧ~E:V%pB<MA9րFAT/JrdxX{4z.:,Է:áyhr9VES7~!G f >2֔Yʸj{n{}d}Rd!J4*̑xH$.nJ^e&|d%`(xkG\ιNzyy7y?B\X_x}w *G_"KPY{2C`TUHmF0&4r6Y^8$ZLPԕƔt ,&fNGl{M/`_/f^/Yvu-QFbUQ H@~G}U/|ܳkܕ׭Ye퇷=+z/=꺟Ǭm&mjҼXyؑL%lWPܮܦPE5HOn%o)QZ  T$-)+;:﫣E_[UNpB?>MR[GS$h6IHt: [=juA}H LMm^UQ[Ob ROB]Uh]NoF)% `;ez~=Sp+iVT \iU狚֗@-Y4WQ? -HGź&}8r܌Ԛ\uz%WBwK-|.wv-xScG`B[ 9BF}qKel 퀔XT*޽L>/Azt:6f'Ic}L,!K# ']sEeRcwB.yuGaFAߨQ"0ehVqi _lpMI'9IBl;nˁN!P2exd|)!t1\R7p\uU WDjEEYh(55enXR=T~6SgwMV;vݻ^~F~K⟿N/ؑI'@^*f8`ӎ2yQhmK '%hUJTI U_9!;[kL#_'Fw ^auN2'U25d*4“=~˃2'4i;Ť=,C,D#9oXr=i%(NJO׉oТ,UX:%>%סhKAifI5j({R~UpNhY+Coq#0e/Դд(2d`{+/h }Kc{Kдu`U&eFjJrz5ƹm7eubi\ɮ[C|0kuVJ_PԒQ;E>xǪf}= D|xb?AS^g9o-x>KoB=k(|MCL0fcqlnϞ/dժ~?t ũr>v|}NuzJ=\zz.*,Tl2Tq4:tSTUgKTR4QJ)&Njd\wD^ܡZ]:`=ZG˯%f P#_H(Cϓs+Oϱ9$\|ѣjE0 Y&6!? 0L;?D~'|YѠOC{P;)=Xl9JKjzJT'T;g8Xɑ;gdS|̽3W3y%o` iH zZ/ЂXPd~*֭xW6f#6 lY#ѣ7wncw|O>;Cm'm-B _3.25n63ԍ})r $.QZ2%%Z>I, 2Gcʃ*D*'~2ĉ|;є²u}=XFL].hdxR|jA`B^4w<ㄉ󝳝tZ6qAqN`s7=}d7۶8F›V5)c05ha21hԛfOk yOڦ,qԴ``#Dre~:Mn otG}GrӕbXZ\kz@SjҮ )"` Z3v*?uI}=g?q l~߾yӞꊆֺ5?;y^o=ǖ3(9w|J5p9TMO6U3&(/otT]F {z2ħ' YRKD!|RE?Acd?-5= %ڏT`b|E,+77ŵE&"T4HE :i0w9>r;:x.xSa@6ldɴ8bvsF˜o;O:?w:Tw}[ѰJ'%4}Nz>S_?LP4ݡTjNEUvXY.R KSsT0NpK(P; DبF4ZŎ˱C]7PbWk.&"*+_lZ]J$"?c[eܡK&S7bi}j"c?R?+߃>H|u,Ů/~ K]ވu钾zH;-2y*Cg6]@ְq< ge6hiohSNR]i  ))AIRc`oB;4@Đt .l"9L%j <1NV2%> stream x]An@ E@أH7&VU 0D,Eo.H4[Y}s[9S|[S.|bהLv)vYr> stream x}`TE޾\:ՠF Rz -&(K;rɅ+@(6Ů(6`EQl(]lE .!*~l{QBh"p򴞽wQy ~}Y>7FVCyj`]E~AW!|t *WG BݺCV9NmT{[W/wg׆"Q+ x6=!cڃ/\'Kj@ޭ~D`KT6Qh7q ,G7x4 .mǃѯ ЎE/`e<.͌^+lD_; In8H݈C!ֵ6$~>ʟAtZGztՌ{#-Gb~ĝh ~gh!Z=B.Bw+Cv?ъEh8zMFyh=>A@OGߠ&!n3ǰSZˣ~ï`U)̌XD?ԶPF|^].@x*f`5X$Zp 8o9^ iwg>x==ϭS*yه>BߣWqz/Ͷߖ¨/ 23yU.C 轜-!e~Zjgy!~LB)ǫN%yd>kB y]{-ysQ]M7B+'X 6u!O4p@~}ճGn]O8qv>&NG!ڶi*;+3#=-Օb C n]Cڏ,mpf\~سettYj`]PVа% ksI sOwA xL-vN41S3;q!r,TX,aC'v*Sr%%-) Ѥqs35'u#PuIj-~А.]A 4Գg7AMj lٹX[E;S3”u)ܑ>H:3()"jaPUp: `,n ÖvDɆKTsl h#tK wa-q붹~ TVzN+n `XH g (p_r~w=%o)͗ބJ|(#/9! ҂̮ R_. AݤN78A;P ZaG!6cjp-Ͷ|ϳN :@MU6tTyGv`~Ouu`vỲ3 【vоX__ UxA{Ke(H/DNW l6]%hhq'& 3e5YR_Rw`m;_zDsCݻZ\zߠ,>iO=/q;'DziX^w~N&y`WQrݣK=3r׹\n51]0jiICF,m SN3XNs:e8<Ul3x{93\rܣdxQ!!cɌbg/@x)sA`m FadܛbSAHNr-0 Aaєb+Fe9a=ؕʒNIdS^ cndr­atpg,&9i9DbfW_CvUM Jg_mFoRȄ`KnZXQ*im$APUR͒g^˝\gş+U6;dw%'UN'c\IC iߪ_йnC0m(]QopxPLQidɱ~47qljY9 Jo@Ӻ߀/+ـ/܀܈"gӻ|h23Nuߔ_["Wzn/IP/ =g+ G-[ :K Zpv8٤,kMLZ&'4IgG&:ˤ-t& s&'[8چccṇey-[x>]:Bjٱ +S~N[[_"RdV6i\z58nh9k8e#3xD~7j zʷj,O̖}BNu@0;|=3P1@{RۃBo]`eзe(@hQV>F6C wgI>)Aɓ vcЁ!Զ|owW [JVqm$ȶUt @>b%]snrI>#}S Ď٠/mϙh ZMJTPp3i/g|˖%I'ZW+곜fu15GHr^K?! V|ȆZq@Er>J1A;Uyt"J<]2ȹ, :⎚I(GpO#1K g߈Hk4NOݞWA=jjB,v3ve/\ypj A-?j1>Eίɰl_ XiոՉȚCRo9Ҟ/ո.r^ #`~^y{!~^(궕4Ubb# ]H?DtVƯ%KC[N_:ʡ J66,v \-S~!πЪKI24Vj~e(ېv!ĖkP;Զ@y c8q@فlThZ(]l|36mf5ey)%VC0OԙBV4 R:rdşN?]P9Ȃ5NO0ЧA_VB*B4CqZ\mIUJ= uzg5']Rنjs/x`dz @p!? | OlAN= `a <Q7 hσ<<>͂U> o 3!U7@OF. D QWվ@]ijK ,k(\ 6Yql4r\Pv;ufOl3:B]Zhr;m:x'R?ڮqL4ƤUl0ȫꁊ|oCH" $ظkTT$NE?#6dnFw` te^eػF}yEQuNK>H$@? D> KMgǂ}Ѿv_Ch*k BP\_UA; {B} Jf ć`:uAh^LTk8ЩV>^:P.UW@UGR!qI^ YF+=҅Q-Cg\/cFh@0_< ;0/ Ya-RB |0RTHw Pφ=| ?U fڃKS638J{HyH @P\{8m:_-s`4r5h5 :?PL.:_k.' !+>~ &p^آr39HڰK[:GCEk-ԅVH_,Mt3htP*& ~+\ȏDWd+md>Â}hZfk5 ~ {,hh X4> R|~:7G 7N|U8^= 5埡.34L _Uk[T㏬{d*6$g&ay:&ˊ:ת3_ƉF87^`GB~߉[ ShCJH!7u5qK3+ uTwqcŋHkMnqm450ݸ[dc _59m}NTFsd(A;yH `g2o` ZmA j -fZkL度G 5m?tiuMx;!|urv-OGysE,qpu~mTzEz+XLM̡x'a5Z Cޮ~4-v&[OL{Ҵ&{R؏:{>=!0'|$jNBQϙq~ ܫI9lGn N S:9iKZ$ M{܅tӐ=L`Omۓ-+4_`e#v?yqw|mS?)ΟV[H=Mq": O'8qt;.v*g?!k̒r 6*2Ty*dަq!Btk'p[G+,]NX6ֽĸ ]6~vy8C|ςuU4.Ehr>6;F/L1g6 fK:; u~5սsfguTwZȺے%p=l a)>GvgO~Y܂kv?VC/g9IW{UF=Wuu@hBr8õ_yw!mDiq4O=zOa;w8ߣ݁];f)T;o;%<{#uRI?rHN6CԾKޱ^gM.٩sO_Sw\Nwl%R?'l: vc (wpʇAؑ@ F[߀~6 g)$V`&].Z.WIƝҭ܄h;h=ru[.z|WkIq7@_A[p2P@|p"d%aނ~p߃)s02϶cG.loG_!u+mD(Ni)h3y\L ME0n9L~-Sk.\p tz)H;oli]?-W o{z)+7moD]K b剷!6ݑxC1]_>b.NYtES|# (_dXPqG[zǕ5h#rD`ɝ e3>Jg-~V='om|$*pdhQݱy=NOōgkf<8j Ew=S.c䛈ze*렴|ې0Ꝣyz!& .* 6Y>CK0|@FrZ nF3` 2 ֕p GvC*;RQWVC7^.}r\+2{݂:1Y|5 )c(mw 9  }'Ro;.}px5ԁzor{'v!#T!7BXK;3_P73fb@V>o徥#@_7w5mG- bH{*7OWB8`{ڏPu?Xz dZޫ UhPkzg=S/4b['P&&` ̇C!9EP_»6PrVj?·Ạj ~4N[}3ʎ9dr ƣ9ESLTk=H٘Qg=|NYк3jzӔsH^X/ø4g/qaG#ǐ^nk\޷'9l|c.˅RMe.l#H@]A7~L~o [{|R)`M~Xيoo#?GH}ꛢprrk*HwB|k>ۙ- A\y'sY`(N-381BƉ<Ձh)[?D,4:P ̵h1 ^Wgo@oA1hY6#1`t*A02 sasOuD89װF\x.2+(uފQ πO`\x}C?qOw`bnm7]Ԅ1Io3v{vG} AyuY!ʵ44 b, cwr{qԛu@_6+,k{Jy Bmqā>ڀ֘z7Tp44d@OȻ`lA`6S0g@s}~!; u{Knn0^;[f7/5 p 3T`.B7`#Ĝg!!] P鎊K`O} <=Pm-e{N[R▀!pK.@d#o@(uVʲLW ]7:IN;ٱU<7^h ɰAh)/6>6~ā6{@ fwd5]n#Q1e6^G6> _ǞQ~XTj8otAm|e6>6> n "/a :=eOFJSJ] "TOA:cQ {im>Gꈦq8EYAgH?9Lu4(ck6rҿqjax*q%.?XbWظ23nqZx8G)l\Gv߱\_۸@iٸ2 ]wTPfmCd ϰL.3fYPRF2Wx+Yg㭑+Eo ߲vۃ>xkPVndQNl0YYnd QPxSo5A.E/SxYYg*E g=;Dh T7Ux*k"yeuPi_ECoD(mBpK`w^y޽tտ{^C.*$RY(lP֞سyzx*+<ա4EPY\PѯDHz~ Lrӆ}@1ݷo^ 3=Az23Up (̈S /h'. Er@'O02-V B_,jF|ckxGzjQOͳ9Ն*ÞZ]0lMTz1ܑj;+ NPB>ju' w'@d'*iRdG,$NA'%'r ‚ XY(p5+Th-Ot# "!_NA @Q΃FxV׋*Iȧd@6t FO k&OkAG;84i'T""8ґ2Q rTQDY(BQC(u@GPG(:cq :{@wDPh: FCIh(: <6{84bDd4N!SP1`fSih:JeG ٮAauOσx]Lt:sy|t].F%-AKѥhh9]V՘`Z9*t# F7[Э6l;@wtEQZ@у!0zɿPx=GO'&4z="z WЫhz 7Л-6z@bD.!}>A }B_oзh}~@?+ E?p GAԈ؅é8 p6n[6-n#pG؍;q.>wp|<>wpwĽpྸx>CIx(><< cX<D< Oƅx x.§b\gSix>Kr>\+l\spGp\<ux^g9x> Eb__x9_W|-*|# 7[6|;߉w5|/^?Caހ7Gcq~o›Si ~o E~oůW6~oo7[mށ}ޅ?'Sw/Wk Gſwއ|7jHÚjL㚮L-EsiZehZZkmZ;vuЎԎ:jnvu֎ՎӺhk'h]nZwSi>Z__ i'j!IPdm6\m6Zi Dm6Y+ԦhSitmVk%LT4mvvVy2\j>BZ@UiAZBZ6G k-Ŵ) h2%x2L$dRHdNf"r )&%d&9FfxH)'^#I&U$HI Z2IDI%|RGLr9Csy|r\D.&%,!Kɥd\N+ȕd\M!גJr="7jr3Jn#;ȝ.r7YC!>r?i d=y%d7|E&ߐo@~$?/WK~'}O I#ESJ)Ԡ4h*M4f,M[ְhKC;#Q#uNhK8څOO]i7ڝ=i/G{>/G?DOإ 'ne8AG|Z@Gt KeH'ɴNS4:ΠEZLKLz*=΢3h)2ZNG+h%MhVt K. ,z6=.z!^LKh=]BK2z.W+ z^Cѕzz]Eo7fz Fow;]nCk}~@z }>LF(}>NOMt3}>M-9<}H_/ӭ*F_ &}Mߡ;=O?#1~J?/n%~M=;=H?_7Nt?=@Ff#28ә6`&Ka.X:`,eV5kڲv=;Hvܬ;cXgv,;uadzXW֍ug=XO֋ެl`6Ć06`#Y>+`h6ex6MddVȦlf"v +f%l&;fyX+g^cYfU,ȪY Z6YEYe|VLv;sy|v].f%-aK٥l]Ζ+ؕl]ͮaײJv=b7jv3nc;؝.v7[a>v?k`l={=f l#{=gO'&=Şfϰg{^d/V {mcv{f]g]C}>el7}žf߰o~d?/W~g}O`Y#GsN9/| O~Op>>>Oy!§i|:)T~OgRe{WJ>W 5y_3Yl~_B~/z/e2~9_ίW*~5_˯+o~+w| A?7Q?f?ß[sy_[+U_Mww{|'wGc ?_Kÿ{w{_o|/?~~:ұD:ӹ.tSO]zzgzNo#[c. zW]{yzoWA`}~>T?YG#|@cqx}>QO )T}>]z>S?U?ML/׽O+ugUzPk^zD1}>O Y9"\<|B"b}~^/ї *jZ:}~~JQI_߬ߢߪߦ߮ߡߩߥ߭ :}Aߨ?????o7OO[+65u}CWOߩ???????׿w___{__>O}~@?7fn0L#pFndFm2ZmF;qct042:nqkct653ˌѵ :+n7;FOg6}~Fc13'!IPdc1a4c1c5 Dc1(4SitcQdb%LT4cqqQjx2>¨4FmTAڨ1BF1#jČqhbxP<$ bxT<&O'&Y<%ψgx^ ^/VxUlvxS%])]CX|">bR|%߈o^ ~?/W+~}O_ELlj&1Lna 4SLjfiff+lk3ۛG9fH(6;G1fgX8yyfv7{=^fc5@sy9bd5O6H3,0G1Xs9ޜ`N4'Bs9՜fN7gE)fYb4O5O3gg,3M3+Jof43d֚s̰1f̜k3usyyyy<<<߼мȼ\l^b֛K̥22rsyy¼ʼڼƼּ\i^o`2o4o2W7wwwwk{{͵}f|\o>h>d>l>bn07OOS39yE%es||naiemc053w͏̏OO/W7;{G'gW7so0)(h)$)"LIqՄ91OPKcy鵞|7**$-5KtQsjʰU%%ʫv{jkáqqjOy8TQb7D"꯫jXuDu Xn=Vʇ}s}ܣ}x2T=V2<.UW}Sox~x=AzXF F}*qa> lE|V pVeZ* xC`2Yh,\Zc$8S S?c*= =k$`ouJqImN'4KJ*<屨U2!3OH:%zA,5IIRj8dٹƲsMB&)$J}mmɖBXNY:VZr8Va+fՉXu%Y->o Q-ţ t9Q93YC9H #1@M%bFS1gCg)I*KKgZ\`ɝwƂ(~Pc7t,f$ GAePIM$4(Kj dR_ DPL Mj*=XuU)UjvJAδOlTʉo,K9iv^FJ΢P$XZ9 3Xz}c@M(`' 8o-;H6P l3a@80DZyLMIIjZYbN <9 aO/BgkS6P E/Lʤjk}x >[(s32P Ƈ&nK8>\v_wE4ՅvU UNRsG6ci5ڐu:?3ʤZ[hI%)`K5nzSmt NvƚMmN|+T/Y[qxO"ZL b<ϼOیu>sF5 _ E& BU9$XG8JpO;rh\8R㩰9V8 +6VlU2kBx2Ž OTFgrVy"D6E1۸qf.ylŪb چiMK>g+B|G,,̎N>yC!΄HDŽݢO1\!vQ(x# Z˖kC6 X%9;u08&|wSdp^ ]M̎{6p}HƱJ2W‘V'ڭ 9 f09hV(ep҆FF>>DV6>N*4X1;6~|IhFJU*O/{L]Dy\p< 0,螚J5k^S\lKe EB OI )^(<ee5%=W-UR K5TVԀ>%@DJ]Rj/Ւr![Uz* yݕ1Dm6sժFmPZ$Zf*PFuZqúXguPCᘢj QxТ̃n3z,5 "n@dU1٣>V-zu- ϰDaժ lϬZ*,Šw2K iB5[)TM("J]VSG˪`en )aBS!wozj4PeH%˺˺RUM(^ En5{J]WӜO9Dη=A&8HA&9HLuFqLVELx8V&ı`j8wA22M{Һw^떎އ6{fCb[le>϶yyOMx޷tk86{fC}H[lwm'i6/q6/ μy[:fC}Hiwmn͖yPzWmxVMKNx53g]]%E7}?vMyوӫ&NUm*/T4(jAU<'&u\Y1*E76Dh4q@*ض[#pH<4+Ȇu0R*De<ɣڨڨ~ڨڨڨڨAdT^/<}S}^d!s 1{)Tȋ ̷ԭk[Qv&ʏ*Cm0SE˥bW+'RS MbiPhj)M-Tij1JMG_[WL\uL 5j**X=*Rĺpuaብ"&8bYYSculNdnzb[q.+>wN "{dgZ_Z:)JZ Lm\6;ŤeXQX6%&}wUȋKESSd=j"̬ yO'+M>P$B[UM$ѓehq"S}!Ԅɔ4/9>3IPhS#|+ &L@l"V[pTkbMͲ jR6Omder^}y|\U=w;LֶITFh2;Y&MؕVB$dd&̴$(-PP"<\yBZ**ji Ux4,wDW3{ls96čU3.P Ϙ}^LB(c >`q _āSw< ěƍHwpaKF{,miLB (د1L(~?h -0BZ0b^$E]DۖW/i|?}yLI>Ġ1} R= 21UIİ9wAT]2b]o4xQ 14t0Cy(ǔJdcI˕<3im2dڔX¬%0LJ"[Z]a[7a +q'oA½<`XmZ~Hoh^ Y)y-ē7x1mBQc2k)zڭ:B`cC1D~y^;;3r{qϚ])$/6Իt飵\LKkʎ0A!XDG:*c~+ }D2>}Uׅena Qxo<' pFb}x2S߫҉[At!gG|,e8>&[# 79XܰW <a` GdLD=* ŝkSc=60Q%e5vu`ʚPuOxf<3fmCfmB~F~>~de#egip1 6P;(١=Y3ɜߝ5+LwJ%eM5N fx$Ccjka'bQi)D0[{;El e,|e=}MWF-NmX1 D)}.9̈[{G莪[I\dQ9Rj!Z9Db"`0B])sZq-VsqsZb9DDj vPgFla6dX|jk 1VҸڌGS5g($jzQ4JL\EN\T§wwڋDFU*rʬx*=^71]>~lcN^ |7a;jMq p].(VH^&aMm"!G{Yբ噶*j{5Tnv"j;[cij= -gzzZZzjkWO3Di+*?d}Bθ"n i<#G\]Q0R]\q#es?@Av꾆tuj:ҰNmwHlkcȀo0 Q'a"-(&--ZmKnXC*Q1cшocPes~ٌ1,74:wQ]7hRE}n|&uzʒzʶ,_k~Jݔjڝ2.cM.c NK7cTzttݪw\+y%5WR[u}6=6-?=FIۈ`p7>-N^oьZz2Zn3w{~fFɬt0+;כzq;GɚiN+N-~좵 mݻ堊w:5+b>F筺8oܭP@Ӫh&5Y{!ɨ¤䄢 |w3q,jk 1f:l`&[lev vPX05L~ _05L^ W0y5L^ bX!bX!_ 1y!&/䅘*/fL~3a&?䇙0fL~3a&?䇙0_2L~-_2L^-W2yL^-WcױXu,:__SSeg׳Y,z=_ggrY~꙼zz&鯞o`&o`&o`&o`F&kdF&kdF&kdF&Q71ML~71ML~71ML~71ML~0-L~ 0-L~ 0-L~ 0-L~ 2L~+2Ln+2Ln+2Ln1mLn1mLn1mLn1Ln;3L^;3yL^;39LNɉ29Q&'DY\Lnɍ2Q&7F(gɏ2Q&`r;&`r:&`r:O[LtC6Τ>.!1mH f)xL%3daл?/33x`&ݙ焋l>ǝt#%pBt`- ʱD!aV|uP&x~hl<1ۼF.IuSڒ$ n #S˒W˒GGP K~ KvMKQʠChʆy@Ҷ` xՙrG*;ΰo[VoF(.qKB:#OM=3eo1eԤe hIВV֓דԒQ7$-KݳiޑB52.4e蠜zV%|{6`k4n%Ctg+^']srtOp;dɡ!oJ03\͋]Ykk$Mu.[<zgܣx:uMlJZ328Acp$̤ #O]xwgfxgu}/D']ɤ g v TEKHTqW2BL)fBԊA01S# d=fot|!$Nj cy%Fm)ujxI-j\HzY"8MV9~ܪis*0 6E y<HI +Yoi"͏9Egh!߶Xa: z ji<]NjtL'~kCc/FaJQ\]Ez]xSL9feS,1(-.8}ANWHy7zNf8Sgj:/^J.۾HBk{p$+cq+87N#PǾKOgޟ X7P;zcdU'&xC%( 8bs9QBZF{xD&1Lwǯ>,2-{N?uHp$G7'KM,)pm4;~xF>yތO .gf#<=ByQٞq7IJ C-H*\d46;YKHn̮Aο![CHyig5lBK(9\hXZnU19CÑg%شw-S566vcش ,~$1H@FZ*lB v`nq1JXbCtC8W?1(:5B?vM$0}0Ʌ}5 yT.w/[,<JKd-uod4i"&̈93ba.`z^[́| (C2 ,6(F&IA{l|6a@?mE=H&^Ok#,1\c#vr{bv;9JJ"HYa•e'c0$֫ё/;>"%㘼&kt&0 QYw,csV62K#sg$OZ.H4JZZR;)ic튒hi@pP@"M`/L a-0f+U٘,iփ~XȐ$)Is9L0XxbxwD۩H c_xĤ$.&dGM. Ogb2aɐ`XGȤOmШāEstMiU580tAKF=I74ٸ=;bx_3tRG5 /IY¦c{xutX@A_e$RccxP,G9Ƕ N;:lOтNb谎nMb*!eT6|Lǝ瀁lJd:0E].;՞P)T6eZȢ1IJw_HiTt6R't3LFfW35Hv>=>&ӾL{I33yH%Nt=Θiq,N۱I.`߹1j W܏md<{44bܕ$qp`d㿇mRRiat-p*Yhj#A2Q'zB[wRFA; Q:jيiBFctp(En=-l"ȻciYV)TږbIveٱ|^Ȁ# {jn*uIs+k$l`Uꧡ& ҬҖ4. 0MMRMH%O$`o%cɌ-عT]T4CD4x*ԶsA;:lji.ֿgװN ۠a5lmְ4l&a ]ZTfmհCvi6ȷTJQkWtv](RUP鵐Z2y.$Bm&#G6iyPv&kr,=QJ9h]d<3n[;Dr& Ԥ]9['05- ɚFip4gM%X`8k*gMhR_@›xno,9Y˽A"!\Ԃvûˮ>6 :P̼% St?&>Mf(> :9 - pYɢKvLQ;3QIbs (ɑ  [$غ,OlĥqU5\t{CZ~FE{/>PIE}FOcD^ѦiIMwp+,|"M/TX?m,,%mt n#) ' BGq()ph8yh4 @n!Yj}CWKqLŇ 'J*'S1~ksIğ9f7[˕|9g*4 \~~)1̭n:ERxD S:WP@\J\np)k!-<Џ8 2 ~s ~}67|sb"砼-!.C7AR^EN&ϗ\9\< @Wo6'd{'s@Des3'Qt9b ħ@@'/lg(ȽgB>{kDŽc3())k̀Oɹ;I~dĚ=Q!N 9H`b9CNe4h8d'8ѥ9K' A=.U}a_bG_CIi|34ZFx; K$ |"s^~ s! 'q:=fRUKuV-9(կ#N˕C e4\Jh) V-X5n V ZI^~<@KJa-R\j@yD9RY> t&(.oMz}Fo\M|xK(Lq>D3G? ΅?I0qX~Z iY~ F5ߊA\po6o5p5@sYC> jp 5Ku@X ʽ*A[UUerG:eiڜ(99;9W^.t ˂XYehZ (E.wͫW4/oh^\\h5{fK5wѬoi{lu6tR: nqq޿3m>yǍPӱT)[޷8ϵ΢g݈A7V}RxGwm}#;tx;tl#/meUi#u|[/b2-FӋ.pVe?vcn|ų(2>~\~R3 ~N/0=.}Mpuc 灨Y CX76Ym¶Iζ$'eSNr90 /g8" D-Jow%_:,qor0C#pK"n$DQBOX"bMuFג; B ?%]\(̣fjs+Qf,k2#_9QЃ‘*\e*p{5h]zs+,ڍzs;r>y? 鸅įKsviupp"*yr_ ڐUpcN\`좫~o9w9͝^]pz:?& ZaM0_ )gr|Mk. Qw=[SzCp @(<|K ,ۉ>AߔJHm h';Jwv4)G7)7V̠GH+W2ϻglP[0{)-zMBRdIT;XN9ln| fՇC\UҪ VU-Q--a8T>ޟEbܷP ;v[=[Gg>p yD~:7~1݌@ɎKzh\@uU}%#d])hxM U\޼E| }=O<q!ǠT~ n>p0?k2 D$,*`_͍qOA1'2_#فtj7w)6Bq A *JAae]j*Ȣuٴ+8tpztKH$ =奼G.y{%OAgǽޛ{HT.aEo GSOKZn0HoBPVuȫ"__}#ѿp#}m_{cXT<_n9揶%}C/6zk+y^YžBhwjs `/O;\6)G!\& W OPშX9sR,I%*foR+Z5H8w>`nz_?/?ABMޛqO0ɑ:y E#l4]R-CPb b8{ˉ yy,s(Hd)#,5d^~&1Z#WI$s ii,iqr̻S ȭ Rf=U1A_X׍.\=wk<")ozszM W⮉C$=ND#V .R"G 7v r EVv脐ˆ\"i7m=G!RXd6(PaB>Hܓt ]\"7+ѧOn}4w,w APcܛ0|&ΊlPi5Aix& Yv:%* *?=)ɎVp)%VQ}heU mUxv?tJ^y_j閆IS]mll,f+[*dznJGOyɎm%yYBq),씌TAJD Zm;n6`X˭ko[#Xy[Lل"p9A?~~@7\ nZ:bz 3Ný*;@OG"CŬa-VxF_.jgף܏@]{~ #n's>c۠Vq_ Rdmq'zDF~o/EEK.W/AD =XlKKpQ&.WH;]^(@iE$|]Ň\ŕ*?TΗɭPq;VU * |EMlnO1,z i]jWxGr b?}=;o'/vw2{Gg_{wyBwl~k z*::g)|4-+V+)!6Hu5=z?0^4@|nWq}S =E#U]ҴR`a(8PPHaIugpq3SKAׯv}PA t}G:[!=)O}Kإw~5ƶ^-tz}Rnh$[>>ac$J/"r&{Kf gʖq+ =3kܼ9UK[ˋˏݸ+N7:0ʭ4{Z䈩0\*>Zc9R€`Z J2\W[ZW(A~a ~/*ǿzrzsSܯ]?t{zs/l}&Wz} oy+RDVW}wKWu[~?%X>k,žNgsGXy#~PmDz(U~rs?V;"pkzئ؉̈́Y%08wz8$6À6Zoγ;0O=AF +'?[{:ɧF>W\`hq/|sA] +PdׅZ\ow}+ן]ҔfAD5]"/Z#Pä9xis+ݧ!x|P|GN/g)c|P,_i>i&k.y+}TT-+?eh̗淡|ʷF侌rrOFчQ s= 䵔[ٳc~rtt9юlvwsŒo]yM+:oԻ+ԗw!%e֪cr`wٌGy,>oٔJ.pGUDV:4YȚ2+l{BRm@,h z՟ö_Q*#/)R%jӏ#{7?ʑ4za8;ݗ܏cʿ#+ԥg޼Ќf&^|ߴJ;\)F. DhSZSjܬVBs?Aōo>% +^a|_bο'S"[DYkr_'_KbvX/Z"mN< WYLݢ6Qa?JGq갋)E$"yд)Re»,Xxٲζז=iE|`s[E̺6 sFR{ln|V Pz׳edDȰ5F0 j*@G'Yd%x *Q{?WW+Y%9U%i6pKdqH?s׊ğ!򃲀o;lcv \BH\TwC ,$.KH@M{4f<H>w1҆A;Л! H_}Shɝ1ɽzW]M&¥O}zg:^$}Vgr{|^`E8ExEw@{ɷOɼC»ȭ9C*{{=&!HѯE>."||y'P\*Dׂ .bSoXD6;:ґO;͟U ʅF"cIٺ)tKPa߄tS#f`  gykQI/V*.s: 3 蒓Bґq:e0/;\ƗU!*Xg&w?GnfRY?y~X'NeO޽.lOozgwև>U6rñ*K+-n} 2i : >>f^y@. N' HHZ-;;` ,f5ӵ+k5.8&rJkJ,׫%.5Ck!uz8 +눥 ZH>V;jn|˗x)JhA^,`؟I7"0̕p $Z$tA(zP EI@1'RD$ w<)kג.tz hMO0\Z˞P;==!x]Qx]XKO`*Q="jv/N( N}r\6G/??~7߿vJ҈v3k7ֿێZNٯt,sq+" %pYO0Q^E\ %+*rpk8. Aڹmtu}+ݸOK"k._{sIX/\ /ZU2l% 5kZC{W5pxos\pmk3\[.{;nt?'kkM d;r|gz_Ge4Kh_ݗns$endstream endobj 27 0 obj << /BitsPerComponent 8 /ColorSpace /DeviceGray /DecodeParms << /Columns 112 /Predictor 15 >> /Filter /FlateDecode /Height 83 /Subtype /Image /Width 112 /Length 95 >> stream xر DAI2~ fJ 2 /K@ s\`@ @ @ ޑi@ !8ubTendstream endobj 28 0 obj << /BitsPerComponent 8 /ColorSpace 8 0 R /Filter /FlateDecode /Height 78 /SMask 27 0 R /Subtype /Image /Width 105 /Length 141 >> stream xI 1@ѮaF&JW *q\KH1SBPKU"%3fv<}~2QQmQsՐ[dZdl#הLW觡on=t<endstream endobj 29 0 obj << /BitsPerComponent 8 /ColorSpace /DeviceGray /DecodeParms << /Columns 129 /Predictor 15 >> /Filter /FlateDecode /Height 122 /Subtype /Image /Width 129 /Length 126 >> stream xΡ 0@AH>,fgh`b~K½ޥiti4`["9Rp 9RPﶠs_<{endstream endobj 30 0 obj << /BitsPerComponent 8 /ColorSpace 12 0 R /Filter /FlateDecode /Height 115 /SMask 29 0 R /Subtype /Image /Width 121 /Length 1048 >> stream xZr0 <[ʡ-0wW$fe2bpA>6 '1Uhu>ǦNfQ2׽'/mSDQmez 8պ> /Filter /FlateDecode /Height 112 /Subtype /Image /Width 129 /Length 122 >> stream xΡ0A28RI (`>an44@@@@@@@@F-ó{_u\w[9 qendstream endobj 32 0 obj << /BitsPerComponent 8 /ColorSpace 12 0 R /Filter /FlateDecode /Height 105 /SMask 31 0 R /Subtype /Image /Width 121 /Length 835 >> stream xYr0uow d@2M[`ׯ~o瀟$g6"{Yf=ͶPؚuwS7‹U҈77q - W0Wx  v$'=?|5K|$ջ83a#!)&umUiԒ1`LΊCeؑcWO:ox4 j Rvĥ@)ķ@l#zM4ՈNX;4E %AgIJ9]眧iEI;`c Mr,E&F\ƣ#N`1D ר4 Xzg׎b[Gߗyހ9͛:qlH|1RWWy&\%6$6%|xDŽ9(zi#6UyeJ67™ҡtYWmė:CFmD Seqe 3RnEۆmۛJh;q(<{.TIhh;59972U3=k}>?(&ri^--ہԽ>SCܖ]1q8F(yS6QUikL]j_6sKRζӅX3$WĖ(,J=Y҂v{]gFE M q D/W/O^؛34endstream endobj 33 0 obj << /BitsPerComponent 8 /ColorSpace /DeviceGray /DecodeParms << /Columns 129 /Predictor 15 >> /Filter /FlateDecode /Height 110 /Subtype /Image /Width 129 /Length 121 >> stream xΡ0AbA3  Jfk6MKo9> stream x횇v0 E+b?H$rܖpdk_V~2eʔ)S[rv0ڻo"cf6OV݈.O[RXLÌ83aDa`MUE'ZZ9; Bgv/%boiX uX}Ϭ `w>nf+/*4M@(o 6$!Z_,N' v!H{{a#ȡR^׭ 8-crDaU`G+unP#mPeaGLj7<6P Xym^UNm %nȳqRJ =`h6Dzjg8{w _s@ d!^e/1TJgFЛ^8ݒyȌ0ĈN`CFG&<0$[v8f7/w,q~eswY7Z`d&ǙplșoxH;x~o5߫%)݂`ޑx`tYF~*gYP$dB<;ʕđFQ*%{F)hY;-Y7YVH\NRg`Z ^Z>s s,߸4>Z4ج݇=?(&rޔ[(;;R5M,v`">_۲Y+#&EޔLUFZ"8=?Xؼ۞T QXS4)t:jTTЄ ohojh"> /Filter /FlateDecode /Height 92 /Subtype /Image /Width 123 /Length 101 >> stream x1 0@:Pa\FVЄ;vn{ޖnvnvnvs_vn37t]endstream endobj 36 0 obj << /BitsPerComponent 8 /ColorSpace 12 0 R /Filter /FlateDecode /Height 86 /SMask 35 0 R /Subtype /Image /Width 116 /Length 790 >> stream xYn@ IKqis5Y`0mq… .\x} nG9=XodH(ActI#4K"~:'"6AD8uVfs2 1DLMajv6xON9qeAZHHw>BisS!N+=ȣy}amI3mt2jw{&uIņm^ 0Ŧsy:pvJdzUF9_S8|h|BHr&,W[N%0xԫZj8OGTYVWqV!n=7Bܵ~YƵùRJNJ@򦦩/T *a_|UCб4|"Ntq9H~ ">-fqb*1Kg+gȪ6dU^Qd?]j6y4*I5enUw;EUA[z̵jyghJrgmxk'QwV_Z_npumY/g6yٲ)n"G*)1^FŽxumn P-D6@" > /Filter /FlateDecode /Height 116 /Subtype /Image /Width 112 /Length 109 >> stream xͱ @ALpװf_Jf2vGL .-@ N9߮1X@@ @ @ @ @} =  uendstream endobj 38 0 obj << /BitsPerComponent 8 /ColorSpace 12 0 R /Filter /FlateDecode /Height 109 /SMask 37 0 R /Subtype /Image /Width 105 /Length 986 >> stream xYv0 \ޑ!mpyoQAؖg4?gqgyFK)iw)T3GJUU۝y1fx+e 0\W-, C$ @QP8x^#ʝEۺc@c" +*R SJ¢WQ <ʼÁ}'=빀ߍSX*r2ŷV{Қ 3\ikU<|)7fŭtByiidY੯;2.o%v!ۈ72 4G@[-5ԅ/+JשQ@qcYmZ1k3n`TK*CiQE(5FRP4d)<>F ZeM)VGy;t4Vm:)duV=nӥ|`Yoy.FnZR>p_2|2{)Eof6l{setR^4ζW{@H8Zƃ/B?FdA~ u4랅%4JL]ٰ_.q܍Ř_GnzMG+iK>lV//}56$+'.gqg d2Fendstream endobj 39 0 obj << /BitsPerComponent 8 /ColorSpace /DeviceGray /DecodeParms << /Columns 112 /Predictor 15 >> /Filter /FlateDecode /Height 104 /Subtype /Image /Width 112 /Length 103 >> stream x1 @QYUh ΡQK@ ,G X@ @ @ =l /퓌iendstream endobj 40 0 obj << /BitsPerComponent 8 /ColorSpace 12 0 R /Filter /FlateDecode /Height 98 /SMask 39 0 R /Subtype /Image /Width 105 /Length 698 >> stream xXn0 [wr^PK6%]p… ú^-h˿x]yqrW 2c ڕ G잫=ZcfRbVҦܚ~IqgGr׬]L$&!& H@CV$LJ^=uHEsGB"hD?tOyegG em&&i a<0X}m3)T%u5E:Hu=L:YH nj7+O*z6н:7 U˜yd?mB'G@=NDA.Hv6,۶bTiDu#x4J;jL&TgiEI&Q"Y')ED)!0#څ[)ђtھ=ceWJ$SRp9T"à'&>v_Eu7 ۼ"B#!Wr8>bjӥ~4"~OfR[SKϬhJy5A B:n64UŀN̗gC_V\I$);T84Ubyzi6w|W3>FX`I iqi~Z>!eS(-7K?9pu,պ۽;_ϩTSDх .\J)endstream endobj 41 0 obj << /BitsPerComponent 8 /ColorSpace /DeviceGray /DecodeParms << /Columns 140 /Predictor 15 >> /Filter /FlateDecode /Height 80 /Subtype /Image /Width 140 /Length 155 >> stream x١ @ѽJBCX!P &A!f7Q6J %CPb(1 fh >lx]J>g;1J %CPb(1J %CPb(1J %Ja)B8l^b(1J %C.c\Qendstream endobj 42 0 obj << /BitsPerComponent 8 /ColorSpace 12 0 R /Filter /FlateDecode /Height 75 /SMask 41 0 R /Subtype /Image /Width 132 /Length 634 >> stream xX햪0 \I`wYD9 ~} Cr^?E(AM&Lb2PyB5z'qr-PÈ文_VSvs#w$'` h9nO: oMƢfM#+E4X PS)&°=7MLXy8 pTAC#t7p\p$T9PN7끠'/{hV *ut7@"Ԯ1y紅CI`ETrp▷U"PQq"\nl%*/ǯ I) KA25p^rxzX&dCƠIC 8m9mҸëq`P,ֽeSxaN4h笕ʕF٧ƸIt[ITfM)m6/= ٲ".6A;.V`ڵKqÜ‹yy#DyPV%5_x9UIGwyؘޞrKoML+Z D]r8|@} %&endstream endobj 43 0 obj << /BitsPerComponent 8 /ColorSpace /DeviceGray /DecodeParms << /Columns 112 /Predictor 15 >> /Filter /FlateDecode /Height 49 /Subtype /Image /Width 112 /Length 100 >> stream x @@FN[*uu!(f6$>S `:|Crmy#A:! ks.cn_2endstream endobj 44 0 obj << /BitsPerComponent 8 /ColorSpace 12 0 R /Filter /FlateDecode /Height 46 /SMask 43 0 R /Subtype /Image /Width 105 /Length 211 >> stream xn0F?xUqE.8Id0 0?-\ "󱹼v6- 2VI&QV52uBZUl{E-EhTVZB<*2Wi $%—/kjE!-JG+HҎ,q 2p *>$,[ 3I;rs-]zߋ3} ha/ 2endstream endobj 45 0 obj << /BitsPerComponent 8 /ColorSpace /DeviceGray /DecodeParms << /Columns 146 /Predictor 15 >> /Filter /FlateDecode /Height 66 /Subtype /Image /Width 146 /Length 159 >> stream x!1FaZ#q+` @YL[ 6yOf>=0\)P'$$$$$$$ ]ݮzl-LjI S8)_"NIIIIIIIIIIiVybˏO_NIIIIIIi@wBCendstream endobj 46 0 obj << /BitsPerComponent 8 /ColorSpace 12 0 R /Filter /FlateDecode /Height 62 /SMask 45 0 R /Subtype /Image /Width 137 /Length 793 >> stream xᒛ0 kK$Ir3BKolXja~gO0M9Q;f27Ѯ"*povA"v̤<[K#s21"HD5fp}V0Zt0qc"w͒O}wKtN1e4_N6mwaAfEzTG<:YD9KQ%q/]CFFgR8è$sssOxri_r("ҹmzL 1/| rչ&ֲW)ʣլGGn1y[D"DBH%!sɲGɇKru 76/I"\J(h+Gb}>_K ]yvzD4*غOܣ0Wy4d-WS1$v0pT'TkZxufyg3UbyY5>LEj@<(DkV?Fz(3Qѓ蘈:ycQ=&k_ĕt,t@tz`R1f~X<_FِU޶4쯯_bE J/Yy4ygt>K$yWee_cY/W,x+~O$O鿉'u7 DX+Per3^seWD)'rJ > /Filter /FlateDecode /Height 55 /Subtype /Image /Width 127 /Length 106 >> stream xѱ 0 DQCd"ffGI0[Dil;TbS|||||||x_m'GP8?N||||||-f~&|翣(8endstream endobj 48 0 obj << /BitsPerComponent 8 /ColorSpace 15 0 R /Filter /FlateDecode /Height 52 /SMask 47 0 R /Subtype /Image /Width 119 /Length 500 >> stream xUR0 ֱY9M 2M!xPc1f;r76γm?{|43yoGW s]{T}Cy EP55^"="4+7wm0z edȨFf>|iad dJj#9;FO/6 ?}-N{{eoֶK6ozɫD$i\<6hvf/ؘl.h=@1 C%NZ{IS3jG(mZY`(-#k͍Ll4FdNҠv^֧j?#|%oKJ!e'ӊzd0_žFMAfPzQ?T ,OAs |> /Filter /FlateDecode /Height 58 /Subtype /Image /Width 110 /Length 102 >> stream x֡0 EĖݳ!,a5i7pppppppܚ\ŅC97mG$'WszboffD.;endstream endobj 50 0 obj << /BitsPerComponent 8 /ColorSpace 15 0 R /Filter /FlateDecode /Height 54 /SMask 49 0 R /Subtype /Image /Width 103 /Length 473 >> stream xUr ˾wwSIts`@ mcccccc|VdӈA,xHs#[^g2.T)*PUnUCٺ! CuUZJ haTi^#=Ƚ"!i>ne5a2e"Kw фLVݤje~62fQȼA'ţ%yPz͂<2xi,IbtЌ㴁 sA*iV~ &g$bAvAT`h*0;pI I.!-/\[vWsUd68 M擃.f4}.K3דiFlw'gvVdT 'ae-V> /Filter /FlateDecode /Height 118 /Subtype /Image /Width 123 /Length 129 >> stream xͱ @@NjjT@R'>n;vn7e}᜗Sy;ônvnvnvnvn;nwyqnWwendstream endobj 52 0 obj << /BitsPerComponent 8 /ColorSpace 12 0 R /Filter /FlateDecode /Height 110 /SMask 51 0 R /Subtype /Image /Width 116 /Length 1178 >> stream xZz* \gc{$$qL2v ϟ..S[N)ri)4_"a ХVU* ^*AkccN.%mp3%̋F**<|ڧK4Ͱll.=ӽ͵eN`{l5Z}='1O -L/c\4;|U^o᰹nMG&1=ܥX IÅ|MX|*`n~p}<],]Z+MyA T)^BXGSWۘof}r p&1(UBHUDׂ2z?MH0qTEF/B)!}ٛo-C[DE` E ht1{?}'t ԟSD?H}tcAh3o=y~iNi/tPǤDnÈy^ŔD+G{60o; d|d&y0O ;~Mav)̌>@L3JVR2TӞG71-uAℼES 9|\ބ'xOalK ƽn/aMMbF* {pAq GT8$]b;&o-{=Ol5EL'de#\;&AI v'x~xfA~gW|YN6pY;*HR -?>  $t:8].u\qX2)RD(Ӈ M`zJ RDLeeCqpV8Tм] nq(>u)%=& 8F0npL\ʓV]`jSK+ַ]a[8^'M3FoT;Zht,O`yݪ;=y6fʘgeRTAОO_%Y Yu''j R"}BN"A!yک!H[%}f57Y'ҢC2*"UYnNJ H`=աSɞvg7WW0ч"yvit#f[VpY]Sw8)#Au<ɉj&[ޚ|zSOs.> /Filter /FlateDecode /Height 66 /Subtype /Image /Width 130 /Length 113 >> stream xΡ0 0ݠ8*@@DO{ vy]k'> stream xW r0 Bvvi> /Filter /FlateDecode /Height 55 /Subtype /Image /Width 215 /Length 160 >> stream x!@Ѭ‰8Fqr,*8vz-g 9+[)]tJ+]tJu?>W˱-M{O,+epE>KQ/9]tJ+]v:8iϥ^vI.VXb.VXb,8endstream endobj 56 0 obj << /BitsPerComponent 8 /ColorSpace 15 0 R /Filter /FlateDecode /Height 52 /SMask 55 0 R /Subtype /Image /Width 201 /Length 666 >> stream xVr %qq3>llllllllllll<FB %TvٻuMfl=PR-U@%V<Q~: |m04-. um y؏H3w)VV)-34HĚ I* C'{ GM&!ܸ+|,kFy{9˿FNn"A'>5v.Y%3oMp$ Yh4~vmmg:圩o^!{$Y"4Bwǣ,e=u&xAeuuN!PVT(2Q1iH( nSv C?KMVm2,xfgWhF(xthEb5)$H+..^zS> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 58 /ID [<806a2db34dfffd2066b3056e0f97218e><4a28cac1e4fe6a197c208e2af18992f3>] >> stream xcb&F~0 $H'?; ddNA$ ذ fTA$^0D<lF0[ fׁ`[b%ٲ`q0YOtXiG32$$ endstream endobj startxref 101409 %%EOF Luminescence/inst/doc/HowTo_analyse_Al2O3.html.asis0000644000176200001440000000026713231137116021711 0ustar liggesusers%\VignetteIndexEntry{Analyse Al2O3:C Measurements} %\VignetteEngine{R.rsp::asis} %\VignetteKeyword{PDF} %\VignetteKeyword{HTML} %\VignetteKeyword{vignette} %\VignetteKeyword{package} Luminescence/inst/doc/S4classObjects.pdf.asis0000644000176200001440000000030613231137116020662 0ustar liggesusers%\VignetteIndexEntry{S4-class Object Structure in 'Luminescence'} %\VignetteEngine{R.rsp::asis} %\VignetteKeyword{PDF} %\VignetteKeyword{HTML} %\VignetteKeyword{vignette} %\VignetteKeyword{package} Luminescence/inst/doc/HowTo_analyse_pIRIRMeasurements.html.asis0000644000176200001440000000027413233636635024421 0ustar liggesusers%\VignetteIndexEntry{Analyse post-IR IRSL Measurements} %\VignetteEngine{R.rsp::asis} %\VignetteKeyword{PDF} %\VignetteKeyword{HTML} %\VignetteKeyword{vignette} %\VignetteKeyword{package} Luminescence/inst/doc/HowTo_analyse_Al2O3.html0000644000176200001440000101747213604173344020771 0ustar liggesusers How to Analyse Al2O3:C Measurements

How to Analyse Al2O3:C Measurements

Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montagine, France

Last modified: 2019-12-04 (‘Luminescence’ version: 0.9.6)

1 Scope

The package ‘Luminescence’ offers three distinct functions dealing with the analysis of Al\(_{2}\)O\(_{3}\):C chip measurements:

  1. analyse_Al2O3C_ITC()
  2. analyse_Al2O3C_CrossTalk()
  3. analyse_Al2O3C_Measurement()

Only the last function is usually needed to routinely estimate the (environmental \(\gamma\)-) dose the chip had received. However, the first two functions are needed to correct for equipment related issues. If you have already performed the first two analyses or you do not feel the need for them, you can directly start with the Sec. 4.

The following tutorial assumes that all measurements have been performed on a Freiberg Instruments lexsyg SMART luminescence reader (Richter et al., 2015). Nevertheless, the general procedure should also work for other luminescence readers. However, please keep in mind that for readers without a separate measurement chamber the determination of the irradiation cross-talk becomes indispensable and should not be skipped.

Please further note that this vignette covers only the R related part of the data analysis and will not explain the theoretical and physical background. Please see Kreutzer et al. (2018), for details.

2 Determine irradiation time correction factors

2.1 Data import

To determine the irradiation time correction factor the function analyse_Al2O3C_ITC() is used. The measurement sequence is based on the suggestions made by Kreutzer et al. (2018). To import the measurement data run the function read_XSYG2R(). The curve selection is done automatically by the function using the argument recordType (preset). Modify this argument if the selection here does not fit your equipment.

library(Luminescence)
data_ITC <- read_XSYG2R("MyIrradiationTimeCorrectionMeasurement.XSYG", fastForward = TRUE)
data_ITC
## 
##  [RLum.Analysis-class]
##   originator: read_XSYG2R()
##   protocol: 
##   additional info elements:  0
##   number of records: 400
##   .. : RLum.Data.Curve : 400
##   .. .. : #1 TL (UVVIS) <> #2 TL (NA) <> #3 TL (NA) 
##   .. .. : #4 OSL (UVVIS) <> #5 OSL (NA) <> #6 OSL (NA) <> #7 OSL (NA) <> #8 OSL (NA)
##   .. .. : #9 TL (UVVIS) <> #10 TL (NA) <> #11 TL (NA) 
## ... <remaining records truncated manually>

2.2 Run analysis

The imported data are either a single RLum.Analysis object or a list of such objects, which can be directly passed to the function analyse_Al2O3C_ITC().

Please note that if you follow the suggestions by Kreutzer et al. (in press) no further function arguments are necessary.

results_ITC <- analyse_Al2O3C_ITC(object = data_ITC)
## 
## [analyse_Al2O3C_ITC()]
## 
##  Used fit:       EXP
##  Time correction value:  2.59 ± 0.06
Dose response curve used to correct the irradiation time for the movement duration of the sample carrier.

Dose response curve used to correct the irradiation time for the movement duration of the sample carrier.

The analysis returns a plot (Fig.) and the output is stored in the object results_ITC and will be used later.

3 Irradiation cross-talk estimation

3.1 Data import

The data import is similar to the data import given above (including the automatic curve selection).

library(Luminescence)
data_CT <- read_XSYG2R("MyCrossTalkIrradiationMeasurement.XSYG", fastForward = TRUE)
data_CT
## 
##  [RLum.Analysis-class]
##   originator: read_XSYG2R()
##   protocol: 
##   additional info elements:  0
##   number of records: 21
##   .. : RLum.Data.Curve : 21
##   .. .. : #1 OSL (UVVIS) <> #2 OSL (NA) <> #3 OSL (NA) <> #4 OSL (NA) <> #5 OSL (NA)
##   .. .. : #6 TL (UVVIS) <> #7 TL (NA) <> #8 TL (NA)
##   .. .. : #9 OSL (UVVIS) <> #10 OSL (NA) <> #11 OSL (NA) <> #12 OSL (NA) <> #13 OSL (NA)
##   .. .. : #14 TL (UVVIS) <> #15 TL (NA) <> #16 TL (NA)
##   .. .. : #17 OSL (UVVIS) <> #18 OSL (NA) <> #19 OSL (NA) <> #20 OSL (NA) <> #21 OSL (NA)

3.2 Data analysis

For the data analysis the function analyse_Al2O3C_CrossTalk() is called. Amongst others, the function has a parameter called irradiation_time_correction. This parameter can be left empty or the results from the previous irradiation time correction measurements can be directly passed to the function. Graphical results are shown in Fig.\(~\). The numerical output is again an RLum.Results object which can be kept for a later usage, i.e. for an automatic correction during the dose estimation.

results_CT <- analyse_Al2O3C_CrossTalk(
  object = data_CT,
  irradiation_time_correction = results_ITC)
Graphical function output of the crosstalk data analysis. Please note that the plot shows the results from repeated cross-talk measurements as described by Kreutzer et al. (2018).

Graphical function output of the crosstalk data analysis. Please note that the plot shows the results from repeated cross-talk measurements as described by Kreutzer et al. (2018).

4 Al\(_{2}\)O\(_{3}\):C dose determination

This section describes the workflow for the final apparent dose estimation of an Al\(_{2}\)O\(_{3}\) chip. The analyses done above are not necessary but recommended to correct for the equipment characteristics.

4.1 Data import

The data import follows the examples given below and consists of calling the function read_XSYG2R(). An additional curve selection is not necessary, so far the sequence follows the suggestion by Kreutzer et al. (2018)

library(Luminescence)
data_EnvD <- read_XSYG2R("MyEnvironmentalDoseMeasurement.XSYG", fastForward = TRUE)
data_EnvD

4.2 Data analysis

For the analysis of the environmental dose the Al\(_{2}\)O\(_{3}\) chips received, the results from the estimation of the irradiation time correction analysis and the cross-talk measurements are provided as input objects. The function automatically corrects for both effects. If this is not wanted the arguments can be set to NULL (the default)

results_EnvD <- analyse_Al2O3C_Measurement(
  object = data_EnvD, 
  irradiation_time_correction = results_ITC, 
  cross_talk_correction = results_CT)
##  [analyse_Al2O3_Measurement()] #2 DE: 26.86 ± 0.39
Representive curve plots for one chip. Shown are OSL and TL curves.

Representive curve plots for one chip. Shown are OSL and TL curves.

## Warning: Stimulation power was not stable for ALQ 1! Results are likely to be
## wrong!

For this example six Al\(_{2}\)O\(_{3}\):C chips have been measured. The function returns an object of type RLum.Results:

results_EnvD
## 
##  [RLum.Results-class]
##   originator: analyse_Al2O3C_Measurement()
##   data: 3
##       .. $data : data.frame
##   .. $data_table : data.frame
##   .. $test_parameters : data.frame
##   additional info elements:  1

To access the numerical results the given data objects within the RLum.Results object can be accessed using the $ sign:

results_EnvD$data
DE DE_ERROR POSITION INTEGRAL_RATIO TRAVEL_DOSIMETER CT_CORRECTION CT_CORRECTION_Q2.5 CT_CORRECTION_Q97.5 TL_DE TL_DE.ERROR UID
-2.634401 0.0377035 1 -0.4003952 NA -0.0048094 -0.0103929 0.0007741 NA NA 2019-12-04-04:17.0.233461152995005
26.836016 0.3814688 2 4.0749423 NA -0.0039107 -0.0089520 0.0011306 NA NA 2019-12-04-04:17.0.194019496208057
27.699794 0.4060110 3 4.2050228 NA -0.0029422 -0.0074892 0.0016047 NA NA 2019-12-04-04:17.0.427538436371833
29.683922 0.4184611 4 4.5070818 NA -0.0019040 -0.0060076 0.0021996 NA NA 2019-12-04-04:17.0.0943601704202592
29.007260 0.3994199 5 4.3999199 NA -0.0007961 -0.0045108 0.0029187 NA NA 2019-12-04-04:17.0.900255984859541
28.818951 0.4210171 6 4.3757229 NA 0.0003816 -0.0030023 0.0037656 NA NA 2019-12-04-04:17.0.0321574283298105

This table is as data.frame and can be further treated with standard methods in R. For example, the table show one extreme value on position (the first row). This value has obviously no meaning and indeed, here the value was biased due to a technical error and should be removed from the data set.

results_table <- results_EnvD$data[-1,]

In a last step the results can be plotted, e.g., using the Abancio Plot (Dietze et al., 2016).

plot_AbanicoPlot(
  data = results_table[,1:2], 
  zlab = expression(paste(D[e], " [s]")),
  main = "Dose Distribution", 
  summary = c("n", "mean", "sd.abs", "sd.rel")
  )

##Further analysis

To further data processing steps might be of particular interest in the given context:

4.2.1 Travel dosimeter(s)

Usually, the data set to analyse contains chips, which were used as travel dosimeters, i.e. this chips were reset in the field at the time the other dosimeters were taken and then transported along with the field dosimeters. The dose of the travel dosimeters is subtracted from the measured dose of the field dosimeters. This can be done manually or automatically using the argument travel_dosimeter in the function analyse_Al2O3C_Measurement(). For the data set given above the analysis can be used as follows, assuming that the travel dosimeters were placed on measurement position 2 and 3:

results_EnvD_alternative <- analyse_Al2O3C_Measurement(
  object = data_EnvD[-1], 
  travel_dosimeter = c(2,3),
  irradiation_time_correction = results_ITC, 
  cross_talk_correction = results_CT, 
  verbose = FALSE, 
  plot = FALSE)
results_EnvD_alternative
## 
##  [RLum.Results-class]
##   originator: analyse_Al2O3C_Measurement()
##   data: 4
##       .. $data : data.frame
##   .. $data_table : data.frame
##   .. $test_parameters : data.frame
##   .. $data_TDcorrected : data.frame
##   additional info elements:  1

The original data set remains untouched, but the now the returned object contains a new, additional, object called data_TDcorrected with the corrected values:

results_EnvD_alternative$data_TDcorrected
##         DE  DE_ERROR POSITION
## 1 2.397130 0.7484986        4
## 2 1.696485 0.7460078        5
## 3 1.501021 0.7511837        6

4.2.2 Gray to seconds

So far all returned values had the unit s (seconds), however, usually, the energy dose is wanted. To transform the values from the time domain to the dose domain, the function Second2Gray() can be used. Please make sure that you recalculate your calibration values to the measurement date.

5 Ease your workflow

The analysis steps for the irradiation time correction and the irradiation cross-talk estimation might do not want to be repeated for every analysis. Thus, R allows the save results and recall them for the next analysis using the functions save() and load()

save(list = c("results_CT", "results_ITC"), file = "MyResults.Rdata")
load(file = "MyResults.Rdata")

References

Dietze, M., Kreutzer, S., Burow, C., Fuchs, M.C., Fischer, M., Schmidt, C., 2016. The abanico plot: visualising chronometric data with individual standard errors 31, 12–18. doi:10.1016/j.quageo.2015.09.003

Kreutzer, S., Martin, L., Guérin, G., Tribolo, C., Selva, P., Mercier, N., in press. Environmental Dose Rate Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips. Gechromometrie 45, 56-67. doi: 10.1515/geochr-2015-0086

Richter, D., Richter, A., Dornich, K., 2015. Lexsyg smart — a luminescence detection system for dosimetry, material research and dating application. Geochronometria 42, 202–209. doi: 10.1515/geochr-2015-0022

Luminescence/inst/doc/HowTo_analyse_pIRIRMeasurements.html0000644000176200001440000073236213604173344023470 0ustar liggesusers How to analyse post-IR IRSL measurements?

How to analyse post-IR IRSL measurements?

Sebastian Kreutzer, IRAMAT-CRP2A, UMR 5060, CNRS - Université Bordeaux Montagine, France

Last modified: 2019-12-04 (‘Luminescence’ version: 0.9.6)

1 Scope and introduction

Using the function analyse_SAR.CWOSL() are Analyse_SAR.OSLdata() from the R package ‘Luminescence’ allows to analyse standard OSL (quartz) measurements based on the SAR protocol (Murray and Wintle 2000).

The function analyse_SAR.CWOSL() can also be used for analysing measurements based on the post-IR IRSL protocol (pIRIR, Thomsen et al. (2008)), since the measurement protocol based on the SAR structure (see the following table) comprising a set of curves with a \(L_{x}\) and \(T_{x}\) signal pattern.

Step Example Signal
Irradiation beta-irr. -
Heating preheat/TL -
Stimulation OSL \(L_x\)
Irradiation beta-irr. -
Heating cutheat/TL -
Stimulation OSL \(T_x\)

To lower the entry level and make the analysis of post-IR IRSL data more straightforward, some time ago, the function analyse_pIRIRSequence() was developed. This function is basically a wrapper around the two functions analyse_SAR.CWOSL() and plot_GrowthCurve().

This vignette provides a short tutorial exemplifying the analysis of post-IR IRSL data in R. To avoid misunderstandings, please keep in mind that the post-IR IRSL protocol is a simple extension of the SAR structure by introducing further stimulations steps only:

Step Example Signal
Irradiation beta-irr. -
Heating preheat/TL -
Stimulation IR\(_{50}\) \(L_{x_{_1}}\)
Stimulation pIRIR\(_{225}\) \(L_{x_{_2}}\)
Irradiation beta-irr. -
Heating preheat/TL -
Stimulation IR\(_{50}\) \(T_{x_{_1}}\)
Stimulation pIRIR\(_{225}\) \(T_{x_{_2}}\)

While the number of IRSL stimulation steps is not limited in general (cf. Fu, Li, and Li (2012)), the number of steps used for recording the signal of interest and the test dose signal must be equal. Example, if the sequence has two stimulation steps for the signal (\(L_{x_{_1}}\), \(L_{x_{_2}}\)) (as in the table given above), it also needs two stimulations steps for measuring the test dose. Further steps, e.g., hot bleach steps at the end of the cycle, are allowed, but do not belong to the SAR structure and should be removed prior any analysis using the function analyse_pIRIRSequence().

Note: The terminal and graphical output show below is partly truncated to shorten the length of this vignette, however, calling the functions in R will show the full output.

2 Running example

In our example, the measurement was carried out on a Freiberg Instruments lexsyg luminescence reader. Measurement data are stored in XML-based file format called XSYG. Two pIRIR signals were measured: A IR\(_{50}\) and a pIRIR\(_{225}\) signal. The preheat steps were carried out as TL.

2.1 Data import

To start with, the package ‘Luminescence’ itself has to be loaded. In a next step, measurement data are imported using the function read_XSYG2R(). If your input format is a BIN/BINX-file, replace the function read_XSYG2R() by read_BIN2R().

library(Luminescence)
temp <-  read_XSYG2R("pIRIR_measurementData.xsyg", fastForward = TRUE, txtProgressBar = FALSE)

To se the dataset in the R terminal, just call the object temp

temp
## [[1]]
## 
##  [RLum.Analysis-class]
##   originator: read_XSYG2R()
##   protocol: pIRIR225
##   additional info elements:  0
##   number of records: 229
##   .. : RLum.Data.Curve : 229
##   .. .. : #1 TL (UVVIS) <> #2 TL (NA) <> #3 TL (NA) 
##   .. .. : #4 IRSL (UVVIS) <> #5 IRSL (NA) <> #6 IRSL (NA) <> #7 IRSL (NA) <> #8 IRSL (NA)
##   .. .. : #9 IRSL (UVVIS) <> #10 IRSL (NA) <> #11 IRSL (NA) <> #12 IRSL (NA) <> #13 IRSL (NA) 
##   .. .. : #14 irradiation (NA)
##   .. .. : #15 TL (UVVIS) <> #16 TL (NA) <> #17 TL (NA) 
##   .. .. : #18 IRSL (UVVIS) <> #19 IRSL (NA) <> #20 IRSL (NA) <> #21 IRSL (NA) <> #22 IRSL (NA)
##   .. .. : #23 IRSL (UVVIS) <> #24 IRSL (NA) <> #25 IRSL (NA) <> #26 IRSL (NA) <> #27 IRSL (NA) 
## ... <remaining records truncated manually>

The output shows an RLum.Analysis object, which contains all recorded curves (RLum.Data.Curve objects) from one aliquot (e.g., cup/disc). In total, the dataset contains the curves of 7 aliquots. All records are numbered, here from #1 to #208 (shown only until #29) and named by their corresponding record type (TL, IRSL). So far available, within round brackets, information on the detector are given (UVVIS and NA). This reveals that the object contains curves which are not wanted for the analysis.

Curves which belong to a specific measurement step (e.g., IRSL stimulation) are connected with the <> symbol. However, curves with (NA) are curves recorded by technical components (e.g., temperature sensor) other than the photomultiplier tube and not wanted, even they belong to the dataset. In our case, unfortunately, the information (UVVIS) is rather uninformative, but a usual case, since it depends on the measurement device whether information on the detector are available or not. This example emphasises that prior knowledge of the data structure and the used technical components are indispensable.

2.2 Select wanted curves

To select only wanted curves wanted for the analysis the function get_RLum() can be used:

temp_sel <- get_RLum(temp, recordType = "UVVIS", drop = FALSE)
temp_sel
## [[1]]
## 
##  [RLum.Analysis-class]
##   originator: read_XSYG2R()
##   protocol: pIRIR225
##   additional info elements:  0
##   number of records: 49
##   .. : RLum.Data.Curve : 49
##   .. .. : #1 TL (UVVIS) | #2 IRSL (UVVIS) | #3 IRSL (UVVIS) | #4 TL (UVVIS) | #5 IRSL (UVVIS) | #6 IRSL (UVVIS) | #7 IRSL (UVVIS)
##   .. .. : #8 TL (UVVIS) | #9 IRSL (UVVIS) | #10 IRSL (UVVIS) | #11 TL (UVVIS) | #12 IRSL (UVVIS) | #13 IRSL (UVVIS) | #14 IRSL (UVVIS)
## ... <remaining records truncated manually>

The function get_RLum() is very powerful and supports sophisticated subsetting of a RLum.Analysis objects. Further useful arguments are curveType and record.id. The latter one allows a subsetting by record id (e.g., record.id = 2 to select #2) and supports also negative subsetting (e.g., to remove only #2, type record.id = -2). To understand the meaning of the argument drop = FALSE, please call the function get_RLum() another time with drop = TRUE and see the difference in the R terminal. For all supported arguments see the manual of the function by typing ?get_RLum in the R terminal.

In our example, however, the dataset does not yet follow the SAR structure. The sequence comprises a hotbleach at the end of each cycles (record #7 in the terminal output example above). This curves are not wanted a have to be removed. This can be done using again the function get_RLum() with the argument record.id. Please note that by executing the following example the object temp_sel will be replaced.

temp_sel <- get_RLum(temp_sel, record.id = -seq(7,length(temp_sel[[1]]), by = 7), drop = FALSE)
temp_sel
## [[1]]
## 
##  [RLum.Analysis-class]
##   originator: read_XSYG2R()
##   protocol: pIRIR225
##   additional info elements:  0
##   number of records: 42
##   .. : RLum.Data.Curve : 42
##   .. .. : #1 TL (UVVIS) | #2 IRSL (UVVIS) | #3 IRSL (UVVIS) | #4 TL (UVVIS) | #5 IRSL (UVVIS) | #6 IRSL (UVVIS) | #7 TL (UVVIS)
##   .. .. : #8 IRSL (UVVIS) | #9 IRSL (UVVIS) | #10 TL (UVVIS) | #11 IRSL (UVVIS) | #12 IRSL (UVVIS) | #13 TL (UVVIS) | #14 IRSL (UVVIS)
## ... <remaining records truncated manually>

Using a negative subsetting, all hotbleach curves have been removed using the call -seq(7,length(temp_sel[[1]]), by = 7). Important is to understand that the function length() was called for the first list element of temp_sel, which contains the recorded curves for the first aliquot only. To see the differences type:

length(temp_sel)
## [1] 7
length(temp_sel[[1]])
## [1] 42

In other words, our measurement record has data from 7 aliquots and each aliquot consits of (at least) 42 records. We here further assume that the number of records is similar for each aliquot.

3 Analyse sequence

Now the object temp_sel only comprises TL and IRSL curves, and this data can be directly passed to the function analyse_pIRIRSequence():

results <- analyse_pIRIRSequence(
  object = temp_sel[[1]],
  signal.integral.min = 1,
  signal.integral.max = 10,
  background.integral.min = 800, 
  background.integral.max = 999,
  dose.points = c(0, 340, 680, 1020, 1360, 0, 340),
  verbose = FALSE)

The function expects the setting of some arguments, for details and meaning, please see ?analyse_pIRIRSequence. If the imported measurement data do not carry information on the dose.points, as it is the case in our example, these values have to be provided manually. Please note that the information needed for dose.points is something which was defined while writing the measurement sequence (your irradiation times).

The function output is a comprehensive plot scheme and a so-called RLum.Results object, which contains all relevant calculations from the analysis.

results
## 
##  [RLum.Results-class]
##   originator: analyse_pIRIRSequence()
##   data: 4
##       .. $data : data.frame
##   .. $LnLxTnTx.table : data.frame
##   .. $rejection.criteria : data.frame
##   .. $Formula : list
##   additional info elements:  14

The \(D_{e}\) values (here in seconds) can be seen by calling the $data element from the object results, which is a data.frame (and here limited to three columns):

results$data[,c("De", "De.Error", "RC.Status", "Signal")]
##           De De.Error RC.Status   Signal
## 1   803.5434    39.43        OK     IR50
## 2  1099.8584    31.16        OK pIRIR225
## 3   632.8027    30.91        OK     IR50
## 4   873.7713    33.79        OK pIRIR225
## 5  1166.2585    28.45        OK     IR50
## 6  1058.8393    21.06        OK pIRIR225
## 7   741.3041    27.75    FAILED     IR50
## 8  1049.1700    27.86        OK pIRIR225
## 9   873.9625    60.19        OK     IR50
## 10 1236.3436    51.34        OK pIRIR225
## 11  827.6762    37.46        OK     IR50
## 12 1095.2448    33.50        OK pIRIR225
## 13  714.1201    21.49        OK     IR50
## 14  807.3030    16.54        OK pIRIR225

The column RC.Status informs you about a failed rejection criterium, which one is not revealed, but the column provides a possibilty for further subsetting. For a quick data processing this is, together with the plot output, usually enough information. However, to see all rejection criteria type results$rejection.criteria. To see all information type results$data without further information.

If you want to combine the two tables for a more virtous data processing, you can merge both tables by calling:

df <- merge(results$data, results$rejection.criteria, by = "UID")
head(df)
##                                   UID        De De.Error      D01 D01.ERROR D02
## 1 2019-12-04-04:17.0.0100606137420982 1058.8393    21.06 1191.583  50.97889  NA
## 2 2019-12-04-04:17.0.0100606137420982 1058.8393    21.06 1191.583  50.97889  NA
## 3 2019-12-04-04:17.0.0100606137420982 1058.8393    21.06 1191.583  50.97889  NA
## 4 2019-12-04-04:17.0.0100606137420982 1058.8393    21.06 1191.583  50.97889  NA
## 5 2019-12-04-04:17.0.0100606137420982 1058.8393    21.06 1191.583  50.97889  NA
## 6  2019-12-04-04:17.0.108055846765637  803.5434    39.43 1081.348 113.94030  NA
##   D02.ERROR     De.MC Fit RC.Status signal.range background.range
## 1        NA 1061.1464 EXP        OK       1 : 10        800 : 999
## 2        NA 1061.1464 EXP        OK       1 : 10        800 : 999
## 3        NA 1061.1464 EXP        OK       1 : 10        800 : 999
## 4        NA 1061.1464 EXP        OK       1 : 10        800 : 999
## 5        NA 1061.1464 EXP        OK       1 : 10        800 : 999
## 6        NA  804.5936 EXP        OK       1 : 10        800 : 999
##   signal.range.Tx background.range.Tx Signal.x                Criteria
## 1         NA : NA             NA : NA pIRIR225 Recycling ratio (R5/R1)
## 2         NA : NA             NA : NA pIRIR225     Recuperation rate 1
## 3         NA : NA             NA : NA pIRIR225          Testdose error
## 4         NA : NA             NA : NA pIRIR225        Palaeodose error
## 5         NA : NA             NA : NA pIRIR225    De > max. dose point
## 6         NA : NA             NA : NA     IR50 Recycling ratio (R5/R1)
##          Value Threshold Status Signal.y
## 1 1.045100e+00       0.1     OK pIRIR225
## 2 6.100000e-03       0.1     OK pIRIR225
## 3 8.671441e-03       0.1     OK pIRIR225
## 4 1.989000e-02       0.1     OK pIRIR225
## 5 1.058839e+03    1360.0     OK pIRIR225
## 6 9.099000e-01       0.1     OK     IR50

The result may appear confusing in a first instance, since, e.g. the column De appears to contain duplicated entries. But still, each row is unique and in sum contains unique information.

4 Plot results

To plot the pIRIR225 \(D_{e}\) values the following call can be used:

plot_KDE(
  data = subset(results$data, Signal == "pIRIR225")[, c("De", "De.Error")],
  xlab = expression(paste(D[e], " [s]")), 
  summary = c("n", "mean", "sd.abs")
  )

5 Compacted call

The above-listed steps can also be shortened to a concise R call using the so-called magriitr operator, which basically pipes the results from function to function. To have a difference the final plot is an Abanico plot Dietze et al. (2016).

Please note that for this last example the arguments plot and verbose have been set to FALSE for most of the functions.

results <- read_XSYG2R("pIRIR_measurementData.xsyg", fastForward = TRUE, verbose = FALSE) %>%
  get_RLum(recordType = "UVVIS", drop = FALSE) %>%
  get_RLum(.,record.id = -seq(7,length(.[[1]]), by = 7), drop = FALSE) %>%
  analyse_pIRIRSequence(
    signal.integral.min = 1,
    signal.integral.max = 10,
    background.integral.min = 800, 
    background.integral.max = 999,
    dose.points = c(0, 340, 680, 1020, 1360, 0, 340), 
    verbose = FALSE, 
    plot = FALSE) %>%
  get_RLum() %>%
  subset(.,subset = Signal == "pIRIR225") %>%
  plot_AbanicoPlot(
  data = .[, c("De", "De.Error")],
  zlab = expression(paste(D[e], " [s]")), 
  summary = c("n", "mean", "sd.abs")
  )

#References {-}

Dietze, Michael, Sebastian Kreutzer, Christoph Burow, Margret C Fuchs, Manfred Fischer, and Christoph Schmidt. 2016. “The abanico plot: visualising chronometric data with individual standard errors.” Quaternary Geochronology 31: 12–18.

Fu, Xiao, Bo Li, and Sheng-Hua Li. 2012. “Testing a multi-step post-IR IRSL dating method using polymineral fine grains from Chinese loess.” Radiation Measurements 10: 8–15.

Murray, A S, and Ann G Wintle. 2000. “Luminescence dating of quartz using an improved single-aliquot regenerative-dose protocol.” Radiation Measurements 32 (1): 57–73.

Thomsen, K J, A S Murray, M Jain, and L Boetter-Jensen. 2008. “Laboratory fading rates of various luminescence signals from feldspar-rich sediment extracts.” Radiation Measurements 43 (9-10): 1474–86.

Luminescence/inst/extdata/0000755000176200001440000000000013417222471015276 5ustar liggesusersLuminescence/inst/extdata/STRB87_1_unbleached.txt0000644000176200001440000000103313231137116021352 0ustar liggesusers# -------------------------------------------------------------------------------------- # CURVE FITTING TEST DATASET # Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. # # SAMPLE: STRB87 - 1 (unbbleached) # -------------------------------------------------------------------------------------- 0 20522.2 0 19373.6 0 21040.6 0 18899.1 1 50382.5 1 48570.2 1 49529.5 2 77706.6 2 75291.3 2 74563.8 4 111547.5 4 113899.1 4 109461.1 8 164564.9 8 151504.2 8 168042.1 16 204726.5 16 201964.3 16 193457.6 Luminescence/inst/extdata/QNL84_2_unbleached.txt0000644000176200001440000000075113231137116021236 0ustar liggesusers# -------------------------------------------------------------------------------------- # CURVE FITTING TEST DATASET # Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. # # SAMPLE: QNL84 - 2 (Unbleached) # -------------------------------------------------------------------------------------- 0 38671 0 40646 0 38149 0 35836 120 65931 120 67887 120 66133 240 82496 240 86708 240 86580 480 110978 480 113807 480 114192 480 109652 960 130373 960 137789Luminescence/inst/extdata/QNL84_2_bleached.txt0000644000176200001440000000070513231137116020672 0ustar liggesusers# -------------------------------------------------------------------------------------- # CURVE FITTING TEST DATASET # Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. # # SAMPLE: QNL84 - 2 (bleached) # -------------------------------------------------------------------------------------- 0 20766 0 21393 0 22493 120 31290 120 33779 240 43221 240 43450 240 41427 480 51804 480 59555 480 54013 960 75748 960 76613Luminescence/inst/extdata/STRB87_1_bleached.txt0000644000176200001440000000076013231137116021015 0ustar liggesusers# -------------------------------------------------------------------------------------- # CURVE FITTING TEST DATASET # Berger, G.W., Huntley, D.J., 1989. Test data for exponential fits. Ancient TL 7, 43-46. # # SAMPLE: STRB87 - 1 (bleached) # -------------------------------------------------------------------------------------- 0 11814.6 0 11587.8 0 11708.6 1 26645.2 1 26445.2 1 26368.6 2 41487.1 2 39125.1 2 40582.5 4 61532.1 4 57023.6 8 93015.8 8 87907.7 8 87655.2 16 107618.3 16 110394.02 Luminescence/inst/extdata/XSYG_file.xsyg0000644000176200001440000016250313232574234020014 0ustar liggesusers 0.1,6;0.2,6;0.3,5;0.4,5;0.5,3;0.6,5;0.7,0;0.8,2;0.9,10;1,3;1.1,11;1.2,7;1.3,4;1.4,9;1.5,6;1.6,7;1.7,6;1.8,3;1.9,5;2,5;2.1,0;2.2,7;2.3,5;2.4,11;2.5,5;2.6,9;2.7,10;2.8,6;2.9,6;3,1;3.1,7;3.2,5;3.3,4;3.4,1;3.5,4;3.6,3;3.7,5;3.8,7;3.9,3;4,8;4.1,3;4.2,1;4.3,6;4.4,10;4.5,3;4.6,4;4.7,4;4.8,8;4.9,4;5,1;5.1,11;5.2,5;5.3,3;5.4,1;5.5,2;5.6,4;5.7,4;5.8,4;5.9,5;6,4;6.1,6;6.2,3;6.3,3;6.4,2;6.5,8;6.6,7;6.7,4;6.8,3;6.9,6;7,5;7.1,2;7.2,5;7.3,3;7.4,3;7.5,3;7.6,9;7.7,7;7.8,7;7.9,3;8,6;8.1,11;8.2,6;8.3,5;8.4,8;8.5,2;8.6,3;8.7,7;8.8,2;8.9,2;9,4;9.1,6;9.2,6;9.3,2;9.4,5;9.5,9;9.6,6;9.7,7;9.8,3;9.9,4;10,6;10.1,6;10.2,13;10.3,7;10.4,8;10.5,7;10.6,4;10.7,5;10.8,9;10.9,4;11,8;11.1,3;11.2,6;11.3,2;11.4,8;11.5,3;11.6,8;11.7,7;11.8,4;11.9,5;12,5;12.1,5;12.2,6;12.3,4;12.4,1;12.5,5;12.6,1;12.7,3;12.8,4;12.9,12;13,2;13.1,4;13.2,13;13.3,1;13.4,5;13.5,4;13.6,7;13.7,6;13.8,6;13.9,3;14,3;14.1,6;14.2,9;14.3,5;14.4,7;14.5,2;14.6,4;14.7,1;14.8,3;14.9,9;15,8;15.1,4;15.2,2;15.3,8;15.4,4;15.5,7;15.6,9;15.7,3;15.8,7;15.9,4;16,5;16.1,1;16.2,4;16.3,8;16.4,3;16.5,6;16.6,2;16.7,4;16.8,3;16.9,4;17,7;17.1,2;17.2,3;17.3,4;17.4,3;17.5,9;17.6,5;17.7,2;17.8,1;17.9,14;18,2;18.1,1;18.2,4;18.3,6;18.4,6;18.5,11;18.6,11;18.7,10;18.8,4;18.9,5;19,8;19.1,0;19.2,2;19.3,4;19.4,3;19.5,6;19.6,2;19.7,9;19.8,2;19.9,2;20,5;20.1,7;20.2,1;20.3,7;20.4,4;20.5,4;20.6,4;20.7,6;20.8,3;20.9,2;21,4;21.1,4;21.2,6;21.3,6;21.4,5;21.5,5;21.6,2;21.7,10;21.8,4;21.9,7;22,8;22.1,6;22.2,6;22.3,7;22.4,10;22.5,4;22.6,7;22.7,4;22.8,5;22.9,8;23,8;23.1,2;23.2,2;23.3,2;23.4,11;23.5,6;23.6,3;23.7,4;23.8,4;23.9,3;24,5;24.1,10;24.2,4;24.3,2;24.4,3;24.5,6;24.6,6;24.7,6;24.8,10;24.9,3;25,5;25.1,10;25.2,8;25.3,4;25.4,5;25.5,7;25.6,5;25.7,8;25.8,7;25.9,1;26,7;26.1,4;26.2,7;26.3,5;26.4,8;26.5,13;26.6,4;26.7,8;26.8,4;26.9,7;27,6;27.1,9;27.2,2;27.3,8;27.4,5;27.5,6;27.6,10;27.7,3;27.8,10;27.9,12;28,10;28.1,8;28.2,10;28.3,3;28.4,9;28.5,13;28.6,7;28.7,5;28.8,12;28.9,5;29,7;29.1,9;29.2,9;29.3,9;29.4,12;29.5,7;29.6,8;29.7,10;29.8,12;29.9,6;30,13;30.1,11;30.2,7;30.3,14;30.4,18;30.5,18;30.6,18;30.7,20;30.8,19;30.9,19;31,17;31.1,11;31.2,20;31.3,17;31.4,14;31.5,25;31.6,23;31.7,21;31.8,20;31.9,16;32,20;32.1,25;32.2,28;32.3,30;32.4,25;32.5,31;32.6,23;32.7,19;32.8,25;32.9,30;33,36;33.1,31;33.2,29;33.3,30;33.4,40;33.5,35;33.6,34;33.7,47;33.8,43;33.9,45;34,36;34.1,56;34.2,56;34.3,63;34.4,61;34.5,47;34.6,61;34.7,66;34.8,76;34.9,73;35,80;35.1,79;35.2,85;35.3,81;35.4,79;35.5,86;35.6,84;35.7,97;35.8,91;35.9,103;36,112;36.1,112;36.2,105;36.3,107;36.4,128;36.5,117;36.6,139;36.7,146;36.8,156;36.9,151;37,145;37.1,136;37.2,137;37.3,185;37.4,182;37.5,173;37.6,189;37.7,197;37.8,223;37.9,209;38,208;38.1,257;38.2,232;38.3,218;38.4,279;38.5,263;38.6,254;38.7,290;38.8,275;38.9,289;39,330;39.1,328;39.2,309;39.3,353;39.4,361;39.5,361;39.6,353;39.7,338;39.8,413;39.9,400;40,417;40.1,431;40.2,473;40.3,482;40.4,446;40.5,470;40.6,502;40.7,526;40.8,535;40.9,508;41,529;41.1,498;41.2,572;41.3,552;41.4,581;41.5,593;41.6,650;41.7,668;41.8,613;41.9,653;42,680;42.1,659;42.2,729;42.3,715;42.4,798;42.5,765;42.6,742;42.7,773;42.8,852;42.9,834;43,844;43.1,918;43.2,886;43.3,885;43.4,977;43.5,952;43.6,1013;43.7,967;43.8,1043;43.9,1007;44,1005;44.1,1007;44.2,1100;44.3,1094;44.4,1117;44.5,1134;44.6,1220;44.7,1236;44.8,1188;44.9,1254;45,1242;45.1,1299;45.2,1334;45.3,1352;45.4,1406;45.5,1339;45.6,1413;45.7,1439;45.8,1504;45.9,1488;46,1579;46.1,1655;46.2,1643;46.3,1687;46.4,1628;46.5,1725;46.6,1743;46.7,1800;46.8,1831;46.9,1871;47,1870;47.1,1905;47.2,1911;47.3,2007;47.4,2037;47.5,2087;47.6,2133;47.7,2084;47.8,2157;47.9,2195;48,2256;48.1,2190;48.2,2360;48.3,2348;48.4,2260;48.5,2329;48.6,2317;48.7,2340;48.8,2346;48.9,2402;49,2385;49.1,2400;49.2,2419;49.3,2413;49.4,2370;49.5,2468;49.6,2404;49.7,2472;49.8,2405;49.9,2357;50,2423;50.1,2418;50.2,2535;50.3,2503;50.4,2436;50.5,2350;50.6,2484;50.7,2358;50.8,2454;50.9,2286;51,2448;51.1,2422;51.2,2455;51.3,2377;51.4,2305;51.5,2317;51.6,2366;51.7,2329;51.8,2334;51.9,2310;52,2321;52.1,2288;52.2,2250;52.3,2235;52.4,2216;52.5,2221;52.6,2176;52.7,2198;52.8,2123;52.9,2174;53,2219;53.1,2176;53.2,2146;53.3,2113;53.4,2153;53.5,2098;53.6,2093;53.7,2034;53.8,2087;53.9,2081;54,2004;54.1,1998;54.2,1961;54.3,1935;54.4,1895;54.5,1904;54.6,1838;54.7,1843;54.8,1852;54.9,1856;55,1822;55.1,1857;55.2,1766;55.3,1728;55.4,1748;55.5,1777;55.6,1721;55.7,1770;55.8,1669;55.9,1707;56,1685;56.1,1651;56.2,1654;56.3,1652;56.4,1576;56.5,1594;56.6,1566;56.7,1626;56.8,1557;56.9,1592 0,25;47,260;57,260;77,60;197,60 0.2,27.0804882049561;0.4,27.0798873901367;0.6,27.0809726715088;0.8,27.0831527709961;1,27.079402923584;1.2,27.0986099243164;1.4,27.1498222351074;1.6,27.4493923187256;1.8,27.7364311218262;2,28.1406993865967;2.2,29.3179683685303;2.4,30.0697174072266;2.6,30.8972682952881;2.8,32.6637191772461;3,33.5518417358398;3.2,34.4185752868652;3.4,36.0412330627441;3.6,36.7972145080566;3.8,37.5352096557617;4,38.9637413024902;4.2,39.6741523742676;4.4,40.3962631225586;4.6,41.8783111572266;4.8,42.6388664245605;5,43.4063034057617;5.2,44.9716835021973;5.4,45.7557830810547;5.6,46.534740447998;5.8,48.0852394104004;6,48.8538589477539;6.2,49.6121978759766;6.4,51.1420059204102;6.6,51.8980140686035;6.8,52.6662635803223;7,54.2057876586914;7.2,54.9717025756836;7.4,55.7467079162598;7.6,57.299144744873;7.8,58.0755920410156;8,58.8502769470215;8.2,60.3935203552246;8.4,61.1761207580566;8.6,61.9391098022461;8.8,63.4827728271484;9,64.2569732666016;9.2,65.0282821655273;9.4,66.5849609375;9.6,67.3621063232422;9.8,68.1395645141602;10,69.6889266967773;10.2,70.4668197631836;10.4,71.241340637207;10.6,72.7888565063477;10.8,73.569206237793;11,74.3448333740234;11.2,75.892333984375;11.4,76.6711959838867;11.6,77.4519729614258;11.8,79.0002975463867;12,79.7770538330078;12.2,80.5531387329102;12.4,82.1066741943359;12.6,82.8803253173828;12.8,83.6636352539063;13,85.2125549316406;13.2,85.9880065917969;13.4,86.7693023681641;13.6,88.3218078613281;13.8,89.0941390991211;14,89.8683547973633;14.2,91.4216079711914;14.4,92.1995391845703;14.6,92.979362487793;14.8,94.5301284790039;15,95.3131713867188;15.2,96.0798416137695;15.4,97.6346435546875;15.6,98.4167404174805;15.8,99.1912155151367;16,100.738433837891;16.2,101.516716003418;16.4,102.289123535156;16.6,103.838607788086;16.8,104.619148254395;17,105.393913269043;17.2,106.942420959473;17.4,107.717399597168;17.6,108.493896484375;17.8,110.046989440918;18,110.817626953125;18.2,111.597969055176;18.4,113.14608001709;18.6,113.917327880859;18.8,114.693809509277;19,116.238647460938;19.2,117.017677307129;19.4,117.788551330566;19.6,119.33903503418;19.8,120.111328125;20,120.892349243164;20.2,122.442260742188;20.4,123.205062866211;20.6,123.975341796875;20.8,125.522903442383;21,126.289619445801;21.2,127.067794799805;21.4,128.617599487305;21.6,129.396072387695;21.8,130.169692993164;22,131.714385986328;22.2,132.482055664063;22.4,133.251129150391;22.6,134.79736328125;22.8,135.571197509766;23,136.341293334961;23.2,137.880615234375;23.4,138.646469116211;23.6,139.423828125;23.8,140.970932006836;24,141.745162963867;24.2,142.518051147461;24.4,144.061538696289;24.6,144.825424194336;24.8,145.598175048828;25,147.139190673828;25.2,147.901931762695;25.4,148.677780151367;25.6,150.212387084961;25.8,150.982879638672;26,151.75358581543;26.2,153.292510986328;26.4,154.066223144531;26.6,154.832290649414;26.8,156.379409790039;27,157.139953613281;27.2,157.91096496582;27.4,159.454376220703;27.6,160.217498779297;27.8,160.987350463867;28,162.522369384766;28.2,163.289428710938;28.4,164.056579589844;28.6,165.584976196289;28.8,166.359237670898;29,167.126953125;29.2,168.658935546875;29.4,169.438171386719;29.6,170.199920654297;29.8,171.738403320313;30,172.50244140625;30.2,173.267990112305;30.4,174.79801940918;30.6,175.5595703125;30.8,176.326644897461;31,177.857833862305;31.2,178.626693725586;31.4,179.398071289063;31.6,180.936935424805;31.8,181.697387695313;32,182.463363647461;32.2,183.990783691406;32.4,184.752243041992;32.6,185.516082763672;32.8,187.049896240234;33,187.808700561523;33.2,188.581573486328;33.4,190.114852905273;33.6,190.872268676758;33.8,191.641326904297;34,193.167633056641;34.2,193.924880981445;34.4,194.690124511719;34.6,196.223922729492;34.8,196.992218017578;35,197.74934387207;35.2,199.275482177734;35.4,200.034881591797;35.6,200.803588867188;35.8,202.325668334961;36,203.088180541992;36.2,203.857315063477;36.4,205.385589599609;36.6,206.148376464844;36.8,206.910629272461;37,208.424377441406;37.2,209.183807373047;37.4,209.955673217773;37.6,211.479721069336;37.8,212.24462890625;38,213.007949829102;38.2,214.533065795898;38.4,215.2900390625;38.6,216.052368164063;38.8,217.570816040039;39,218.326766967773;39.2,219.090530395508;39.4,220.613418579102;39.6,221.377105712891;39.8,222.141738891602;40,223.659576416016;40.2,224.421234130859;40.4,225.183578491211;40.6,226.702407836914;40.8,227.457229614258;41,228.221450805664;41.2,229.746429443359;41.4,230.503631591797;41.6,231.264053344727;41.8,232.785171508789;42,233.552597045898;42.2,234.305999755859;42.4,235.822235107422;42.6,236.583145141602;42.8,237.334365844727;43,238.853652954102;43.2,239.620758056641;43.4,240.379119873047;43.6,241.898056030273;43.8,242.660369873047;44,243.413619995117;44.2,244.93571472168;44.4,245.695297241211;44.6,246.450881958008;44.8,247.961395263672;45,248.722198486328;45.2,249.476806640625;45.4,250.992065429688;45.6,251.753646850586;45.8,252.515777587891;46,254.033126831055;46.2,254.793487548828;46.4,255.552825927734;46.6,257.073181152344;46.8,257.824768066406;47,258.58056640625;47.2,260.091033935547;47.4,260.847320556641;47.6,261.493530273438;47.8,262.138610839844;48,262.115509033203;48.2,261.912475585938;48.4,261.2255859375;48.6,260.858612060547;48.8,260.529357910156;49,260.125823974609;49.2,260.076873779297;49.4,260.136444091797;49.6,260.468597412109;49.8,260.676849365234;50,260.878051757813;50.2,261.156524658203;50.4,261.207946777344;50.6,261.216674804688;50.8,261.105773925781;51,261.019134521484;51.2,260.9423828125;51.4,260.819061279297;51.6,260.793029785156;51.8,260.791900634766;52,260.858612060547;52.2,260.905822753906;52.4,260.957702636719;52.6,261.050323486328;52.8,261.079284667969;53,261.092193603516;53.2,261.094604492188;53.4,261.093994140625;53.6,261.075714111328;53.8,261.056945800781;54,261.048553466797;54.2,261.042602539063;54.4,261.053253173828;54.6,261.062683105469;54.8,261.072814941406;55,261.088165283203;55.2,261.101715087891;55.4,261.113525390625;55.6,261.12939453125;55.8,261.126953125;56,261.135833740234;56.2,261.135314941406;56.4,261.128875732422;56.6,261.134704589844;56.8,261.128875732422;57,261.12939453125;57.2,261.134704589844;57.4,261.099945068359;57.6,260.892761230469;57.8,260.446807861328;58,259.040069580078;58.2,258.224456787109;58.4,256.755523681641;58.6,253.193466186523;58.8,251.453521728516;59,249.752563476563;59.2,246.430221557617;59.4,244.806564331055;59.6,243.209350585938;59.8,240.094573974609;60,238.556488037109;60.2,237.046081542969;60.4,234.089981079102;60.6,232.649429321289;60.8,231.218368530273;61,228.399658203125;61.2,227.032424926758;61.4,225.676895141602;61.6,223.024215698242;61.8,221.716033935547;62,220.423355102539;62.2,217.906204223633;62.4,216.662475585938;62.6,215.430816650391;62.8,212.999862670898;63,211.818008422852;63.2,210.635437011719;63.4,208.332061767578;63.6,207.189147949219;63.8,206.061294555664;64,203.851501464844;64.2,202.75505065918;64.4,201.680480957031;64.6,199.544631958008;64.8,198.490310668945;65,197.442199707031;65.2,195.382476806641;65.4,194.368988037109;65.6,193.359420776367;65.8,191.373626708984;66,190.388824462891;66.2,189.414733886719;66.4,187.494064331055;66.6,186.543563842773;66.8,185.606567382813;67,183.743392944336;67.2,182.82405090332;67.4,181.912994384766;67.6,180.105880737305;67.8,179.215347290039;68,178.338424682617;68.2,176.585922241211;68.4,175.719039916992;68.6,174.863342285156;68.8,173.159088134766;69,172.325500488281;69.2,171.495529174805;69.4,169.844390869141;69.6,169.031860351563;69.8,168.223068237305;70,166.627349853516;70.2,165.827835083008;70.4,165.044616699219;70.6,163.475372314453;70.8,162.702651977539;71,161.936935424805;71.2,160.411987304688;71.4,159.660614013672;71.6,158.900985717773;71.8,157.422973632813;72,156.683319091797;72.2,155.945175170898;72.4,154.504684448242;72.6,153.785354614258;72.8,153.069152832031;73,151.66162109375;73.2,150.967590332031;73.4,150.26628112793;73.6,148.895736694336;73.8,148.216705322266;74,147.534896850586;74.2,146.172744750977;74.4,145.513717651367;74.6,144.855392456055;74.8,143.533325195313;75,142.88916015625;75.2,142.232864379883;75.4,140.955703735352;75.6,140.318008422852;75.8,139.690628051758;76,138.439239501953;76.2,137.815124511719;76.4,137.202072143555;76.6,135.979049682617;76.8,135.376571655273;77,134.767471313477;77.2,133.564407348633;77.4,132.979965209961;77.6,132.38737487793;77.8,131.223968505859;78,130.640701293945;78.2,130.053131103516;78.4,128.916748046875;78.6,128.349349975586;78.8,127.783264160156;79,126.668235778809;79.2,126.114944458008;79.4,125.563949584961;79.6,124.461952209473;79.8,123.927589416504;80,123.386833190918;80.2,122.321189880371;80.4,121.794692993164;80.6,121.26774597168;80.8,120.214950561523;81,119.701385498047;81.2,119.188995361328;81.4,118.16577911377;81.6,117.657684326172;81.8,117.153617858887;82,116.160949707031;82.2,115.662902832031;82.4,115.173240661621;82.6,114.19718170166;82.8,113.712287902832;83,113.228805541992;83.2,112.279266357422;83.4,111.799919128418;83.6,111.335151672363;83.8,110.401062011719;84,109.93384552002;84.2,109.475608825684;84.4,108.56623840332;84.6,108.111137390137;84.8,107.660675048828;85,106.757667541504;85.2,106.318016052246;85.4,105.877777099609;85.6,105.000068664551;85.8,104.577430725098;86,104.13655090332;86.2,103.279029846191;86.4,102.856719970703;86.6,102.432334899902;86.8,101.588821411133;87,101.180786132813;87.2,100.758453369141;87.4,99.9411315917969;87.6,99.5317993164063;87.8,99.126823425293;88,98.3296432495117;88.2,97.9283752441406;88.4,97.5387878417969;88.6,96.7560195922852;88.8,96.3656845092773;89,95.9857482910156;89.2,95.2189788818359;89.4,94.8394241333008;89.6,94.453857421875;89.8,93.7114715576172;90,93.3393478393555;90.2,92.9710845947266;90.4,92.2304916381836;90.6,91.8669128417969;90.8,91.5017547607422;91,90.786979675293;91.2,90.4297180175781;91.4,90.0780181884766;91.6,89.3754730224609;91.8,89.0322799682617;92,88.686393737793;92.2,87.9870910644531;92.4,87.6550979614258;92.6,87.3139038085938;92.8,86.6430816650391;93,86.3086242675781;93.2,85.9720001220703;93.4,85.3243255615234;93.6,84.9928359985352;93.8,84.6707305908203;94,84.0191116333008;94.2,83.7049789428711;94.4,83.3874282836914;94.6,82.7576904296875;94.8,82.4469833374023;95,82.1352081298828;95.2,81.5245208740234;95.4,81.2173767089844;95.6,80.9107437133789;95.8,80.3119659423828;96,80.0120468139648;96.2,79.7183074951172;96.4,79.1320114135742;96.6,78.8399353027344;96.8,78.5524520874023;97,77.9741058349609;97.2,77.6871719360352;97.4,77.4086151123047;97.6,76.8514633178711;97.8,76.5680618286133;98,76.2945175170898;98.2,75.7460098266602;98.4,75.472038269043;98.6,75.2009201049805;98.8,74.6652297973633;99,74.3968658447266;99.2,74.132453918457;99.4,73.6074829101563;99.6,73.3448486328125;99.8,73.0854263305664;100,72.5733871459961;100.2,72.3168411254883;100.4,72.0712356567383;100.6,71.5643768310547;100.8,71.3123168945313;101,71.0647048950195;101.2,70.5778656005859;101.4,70.3347015380859;101.6,70.0816421508789;101.8,69.6113662719727;102,69.3711090087891;102.2,69.1319580078125;102.4,68.6619338989258;102.6,68.4288482666016;102.8,68.1968841552734;103,67.7358093261719;103.2,67.507698059082;103.4,67.2813034057617;103.6,66.8351745605469;103.8,66.6088256835938;104,66.3952331542969;104.2,65.9519195556641;104.4,65.7383651733398;104.6,65.5165023803711;104.8,65.0887298583984;105,64.876335144043 30.1,2263;30.2,2328;30.3,2229;30.4,2069;30.5,2142;30.6,1923;30.7,1812;30.8,1858;30.9,1698;31,1660;31.1,1584;31.2,1537;31.3,1457;31.4,1388;31.5,1322;31.6,1331;31.7,1260;31.8,1208;31.9,1146;32,1103;32.1,1123;32.2,1033;32.3,1004;32.4,1091;32.5,953;32.6,961;32.7,876;32.8,877;32.9,885;33,859;33.1,775;33.2,796;33.3,731;33.4,735;33.5,740;33.6,733;33.7,710;33.8,681;33.9,670;34,608;34.1,625;34.2,630;34.3,617;34.4,584;34.5,567;34.6,530;34.7,570;34.8,553;34.9,476;35,503;35.1,517;35.2,513;35.3,472;35.4,470;35.5,477;35.6,418;35.7,491;35.8,461;35.9,435;36,404;36.1,424;36.2,391;36.3,423;36.4,410;36.5,426;36.6,389;36.7,358;36.8,372;36.9,354;37,406;37.1,366;37.2,374;37.3,358;37.4,338;37.5,365;37.6,355;37.7,372;37.8,379;37.9,358;38,338;38.1,363;38.2,371;38.3,369;38.4,323;38.5,324;38.6,321;38.7,360;38.8,321;38.9,297;39,295;39.1,336;39.2,293;39.3,283;39.4,301;39.5,304;39.6,287;39.7,295;39.8,298;39.9,332;40,305;40.1,284;40.2,319;40.3,292;40.4,274;40.5,289;40.6,286;40.7,254;40.8,288;40.9,258;41,271;41.1,285;41.2,283;41.3,299;41.4,283;41.5,316;41.6,266;41.7,286;41.8,282;41.9,292;42,269;42.1,269;42.2,277;42.3,288;42.4,312;42.5,247;42.6,260;42.7,273;42.8,265;42.9,256;43,272;43.1,259;43.2,231;43.3,287;43.4,268;43.5,247;43.6,267;43.7,258;43.8,257;43.9,279;44,277;44.1,278;44.2,283;44.3,267;44.4,249;44.5,247;44.6,253;44.7,256;44.8,256;44.9,281;45,246;45.1,232;45.2,221;45.3,263;45.4,233;45.5,251;45.6,282;45.7,257;45.8,245;45.9,228;46,263;46.1,240;46.2,263;46.3,236;46.4,269;46.5,261;46.6,252;46.7,251;46.8,224;46.9,210;47,251;47.1,252;47.2,250;47.3,255;47.4,254;47.5,228;47.6,241;47.7,266;47.8,264;47.9,257;48,237;48.1,222;48.2,243;48.3,248;48.4,221;48.5,263;48.6,233;48.7,239;48.8,252;48.9,232;49,250;49.1,235;49.2,230;49.3,236;49.4,255;49.5,248;49.6,212;49.7,234;49.8,238;49.9,265;50,256;50.1,245;50.2,241;50.3,237;50.4,244;50.5,242;50.6,228;50.7,225;50.8,214;50.9,240;51,248;51.1,255;51.2,231;51.3,202;51.4,235;51.5,220;51.6,239;51.7,253;51.8,281;51.9,247;52,215;52.1,230;52.2,214;52.3,254;52.4,220;52.5,256;52.6,259;52.7,246;52.8,234;52.9,218;53,204;53.1,233;53.2,227;53.3,236;53.4,229;53.5,225;53.6,222;53.7,236;53.8,247;53.9,228;54,234;54.1,256;54.2,265;54.3,240;54.4,240;54.5,230;54.6,240;54.7,236;54.8,229;54.9,200;55,219;55.1,220;55.2,221;55.3,212;55.4,224;55.5,235;55.6,220;55.7,242;55.8,247;55.9,222;56,228;56.1,239;56.2,226;56.3,213;56.4,218;56.5,247;56.6,236;56.7,227;56.8,235;56.9,238;57,192;57.1,247;57.2,228;57.3,220;57.4,243;57.5,225;57.6,226;57.7,235;57.8,200;57.9,237;58,200;58.1,223;58.2,227;58.3,214;58.4,193;58.5,221;58.6,225;58.7,226;58.8,238;58.9,267;59,236;59.1,233;59.2,222;59.3,210;59.4,226;59.5,209;59.6,220;59.7,215;59.8,239;59.9,224;60,259;60.1,236;60.2,240;60.3,226;60.4,237;60.5,213;60.6,218;60.7,234;60.8,220;60.9,219;61,201;61.1,235;61.2,196;61.3,202;61.4,216;61.5,198;61.6,229;61.7,249;61.8,211;61.9,225;62,224;62.1,204;62.2,214;62.3,213;62.4,209;62.5,201;62.6,212;62.7,219;62.8,216;62.9,191;63,245;63.1,209;63.2,244;63.3,229;63.4,224;63.5,211;63.6,226;63.7,227;63.8,221;63.9,228;64,229;64.1,242;64.2,218;64.3,198;64.4,207;64.5,216;64.6,209;64.7,239;64.8,241;64.9,220;65,196;65.1,211;65.2,239;65.3,226;65.4,219;65.5,238;65.6,226;65.7,246;65.8,231;65.9,253;66,220;66.1,209;66.2,206;66.3,209;66.4,215;66.5,220;66.6,220;66.7,216;66.8,242;66.9,211;67,224;67.1,232;67.2,226;67.3,239;67.4,232;67.5,222;67.6,238;67.7,235;67.8,218;67.9,226;68,223;68.1,196;68.2,223;68.3,225;68.4,237;68.5,236;68.6,215;68.7,203;68.8,229;68.9,223;69,214;69.1,213;69.2,207;69.3,229;69.4,222;69.5,210;69.6,226;69.7,224;69.8,222;69.9,241;70,228;70.1,220;70.2,224;70.3,226;70.4,229;70.5,213;70.6,207;70.7,236;70.8,218;70.9,213;71,206;71.1,212;71.2,219;71.3,213;71.4,215;71.5,207;71.6,231;71.7,201;71.8,238;71.9,213;72,232;72.1,223;72.2,242;72.3,185;72.4,229;72.5,212;72.6,228;72.7,183;72.8,216;72.9,224;73,200;73.1,229;73.2,193;73.3,233;73.4,206;73.5,218;73.6,194;73.7,217;73.8,226;73.9,214;74,198;74.1,228;74.2,226;74.3,227;74.4,217;74.5,230;74.6,212;74.7,222;74.8,230;74.9,218;75,205;75.1,239;75.2,201;75.3,218;75.4,215;75.5,208;75.6,198;75.7,208;75.8,233;75.9,199;76,231;76.1,226;76.2,191;76.3,219;76.4,193;76.5,223;76.6,216;76.7,236;76.8,230;76.9,217;77,223;77.1,211;77.2,247;77.3,204;77.4,232;77.5,260;77.6,237;77.7,219;77.8,215;77.9,225;78,224;78.1,225;78.2,226;78.3,228;78.4,243;78.5,235;78.6,190;78.7,221;78.8,191;78.9,237;79,191;79.1,229;79.2,213;79.3,214;79.4,213;79.5,245;79.6,218;79.7,245;79.8,220;79.9,214;80,222;80.1,201;80.2,231;80.3,243;80.4,230;80.5,205;80.6,245;80.7,220;80.8,215;80.9,220;81,209;81.1,242;81.2,240;81.3,212;81.4,196;81.5,212;81.6,221;81.7,202;81.8,247;81.9,211;82,237;82.1,212;82.2,208;82.3,211;82.4,219;82.5,205;82.6,223;82.7,224;82.8,209;82.9,218;83,223;83.1,228;83.2,250;83.3,228;83.4,211;83.5,210;83.6,216;83.7,183;83.8,219;83.9,203;84,205;84.1,210;84.2,191;84.3,206;84.4,197;84.5,212;84.6,242;84.7,211;84.8,218;84.9,223;85,209;85.1,209;85.2,200;85.3,190;85.4,234;85.5,200;85.6,198;85.7,227;85.8,202;85.9,229;86,213;86.1,248;86.2,219;86.3,213;86.4,213;86.5,197;86.6,221;86.7,223;86.8,220;86.9,216;87,207;87.1,230;87.2,200;87.3,232;87.4,208;87.5,222;87.6,170;87.7,231;87.8,202;87.9,225;88,238;88.1,213;88.2,192;88.3,212;88.4,206;88.5,225;88.6,202;88.7,247;88.8,204;88.9,206;89,189;89.1,224;89.2,217;89.3,227;89.4,222;89.5,214;89.6,196;89.7,250;89.8,214;89.9,216;90,214;90.1,231;90.2,206;90.3,223;90.4,221;90.5,200;90.6,200;90.7,243;90.8,199;90.9,215;91,218;91.1,187;91.2,200;91.3,244;91.4,213;91.5,213;91.6,201;91.7,203;91.8,195;91.9,224;92,222;92.1,197;92.2,228;92.3,220;92.4,210;92.5,185;92.6,202;92.7,188;92.8,233;92.9,220;93,204;93.1,237;93.2,241;93.3,225;93.4,233;93.5,223;93.6,220;93.7,212;93.8,196;93.9,228;94,198;94.1,191;94.2,189;94.3,184;94.4,224;94.5,218;94.6,224;94.7,198;94.8,216;94.9,193;95,204;95.1,223;95.2,197;95.3,223;95.4,186;95.5,205;95.6,221;95.7,240;95.8,173;95.9,240;96,230;96.1,201;96.2,219;96.3,234;96.4,222;96.5,235;96.6,221;96.7,214;96.8,207;96.9,196;97,206;97.1,202;97.2,200;97.3,194;97.4,195;97.5,232;97.6,207;97.7,224;97.8,226;97.9,221;98,198;98.1,209;98.2,217;98.3,192;98.4,198;98.5,198;98.6,214;98.7,208;98.8,215;98.9,201;99,219;99.1,216;99.2,211;99.3,215;99.4,207;99.5,212;99.6,255;99.7,223;99.8,220;99.9,228;100,226;100.1,228;100.2,183;100.3,204;100.4,222;100.5,216;100.6,207;100.7,228;100.8,231;100.9,213;101,217;101.1,199;101.2,206;101.3,231;101.4,208;101.5,210;101.6,197;101.7,225;101.8,197;101.9,219;102,212;102.1,223;102.2,212;102.3,219;102.4,218;102.5,210;102.6,203;102.7,215;102.8,217;102.9,217;103,224;103.1,227;103.2,210;103.3,202;103.4,240;103.5,208;103.6,206;103.7,237;103.8,217;103.9,219;104,216;104.1,216;104.2,210;104.3,229;104.4,201;104.5,216;104.6,212;104.7,200;104.8,191;104.9,203;105,201;105.1,193;105.2,214;105.3,208;105.4,217;105.5,191;105.6,205;105.7,230;105.8,201;105.9,205;106,204;106.1,233;106.2,202;106.3,230;106.4,215;106.5,206;106.6,232;106.7,214;106.8,196;106.9,200;107,238;107.1,184;107.2,225;107.3,221;107.4,198;107.5,191;107.6,225;107.7,218;107.8,222;107.9,237;108,226;108.1,199;108.2,206;108.3,207;108.4,210;108.5,194;108.6,203;108.7,206;108.8,187;108.9,201;109,238;109.1,210;109.2,215;109.3,179;109.4,223;109.5,198;109.6,204;109.7,230;109.8,214;109.9,205;110,201;110.1,200;110.2,214;110.3,225;110.4,206;110.5,217;110.6,199;110.7,210;110.8,205;110.9,190;111,219;111.1,213;111.2,183;111.3,224;111.4,195;111.5,191;111.6,207;111.7,197;111.8,195;111.9,214;112,198;112.1,232;112.2,232;112.3,215;112.4,219;112.5,217;112.6,214;112.7,194;112.8,194;112.9,207;113,208;113.1,202;113.2,206;113.3,200;113.4,216;113.5,219;113.6,214;113.7,193;113.8,229;113.9,205;114,176;114.1,183;114.2,199;114.3,219;114.4,210;114.5,209;114.6,216;114.7,210;114.8,212;114.9,219;115,212;115.1,193;115.2,178;115.3,231;115.4,182;115.5,200;115.6,207;115.7,182;115.8,206;115.9,214;116,201;116.1,186;116.2,195;116.3,231;116.4,217;116.5,219;116.6,204;116.7,210;116.8,211;116.9,219;117,215;117.1,214;117.2,215;117.3,215;117.4,205;117.5,173;117.6,198;117.7,202;117.8,224;117.9,215;118,212;118.1,211;118.2,212;118.3,215;118.4,235;118.5,227;118.6,204;118.7,220;118.8,235;118.9,203;119,198;119.1,199;119.2,209;119.3,216;119.4,193;119.5,204;119.6,221;119.7,226;119.8,218;119.9,197;120,215;120.1,191;120.2,212;120.3,180;120.4,233;120.5,202;120.6,210;120.7,206;120.8,220;120.9,197;121,202;121.1,197;121.2,225;121.3,220;121.4,190;121.5,181;121.6,216;121.7,185;121.8,231;121.9,202;122,228;122.1,218;122.2,198;122.3,180;122.4,208;122.5,200;122.6,218;122.7,207;122.8,213;122.9,201;123,182;123.1,189;123.2,219;123.3,208;123.4,229;123.5,190;123.6,209;123.7,227;123.8,208;123.9,217;124,232;124.1,205;124.2,201;124.3,216;124.4,230;124.5,192;124.6,224;124.7,185;124.8,204;124.9,209;125,208;125.1,216;125.2,210;125.3,217;125.4,226;125.5,207;125.6,209;125.7,215;125.8,202;125.9,215;126,215;126.1,228;126.2,234;126.3,215;126.4,182;126.5,237;126.6,217;126.7,194;126.8,180;126.9,198;127,217;127.1,187;127.2,221;127.3,238;127.4,225;127.5,210;127.6,188;127.7,192;127.8,210;127.9,216;128,201;128.1,197;128.2,198;128.3,205;128.4,215;128.5,235;128.6,224;128.7,217;128.8,197;128.9,195;129,208;129.1,187;129.2,197;129.3,209;129.4,181;129.5,234;129.6,201;129.7,223;129.8,222;129.9,209 0,25;20,125;130,125;136,60;256,60 0.2,39.7064247131348;0.4,39.6876220703125;0.6,39.6655387878418;0.8,39.6451530456543;1,39.6338691711426;1.2,39.6161575317383;1.4,39.5904350280762;1.6,39.5738143920898;1.8,39.556583404541;2,39.5318374633789;2.2,39.5103645324707;2.4,39.5028419494629;2.6,39.4786987304688;2.8,39.4873123168945;3,39.5404472351074;3.2,39.8584518432617;3.4,40.1625366210938;3.6,40.5810928344727;3.8,41.7658767700195;4,42.4898452758789;4.2,43.2938079833984;4.4,44.9959945678711;4.6,45.8377342224121;4.8,46.6636543273926;5,48.2162895202637;5.2,48.9440460205078;5.4,49.6450691223145;5.6,51.017650604248;5.8,51.7064933776855;6,52.3967132568359;6.2,53.8201637268066;6.4,54.5556182861328;6.6,55.2924613952637;6.8,56.7957305908203;7,57.5503349304199;7.2,58.3024253845215;7.4,59.8001937866211;7.6,60.5326728820801;7.8,61.2675323486328;8,62.737133026123;8.2,63.4740943908691;8.4,64.2151336669922;8.6,65.7017669677734;8.8,66.450798034668;9,67.1968231201172;9.2,68.6919555664063;9.4,69.4382476806641;9.6,70.182746887207;9.8,71.6857528686523;10,72.4256057739258;10.2,73.1812210083008;10.4,74.6761474609375;10.6,75.4326400756836;10.8,76.1799697875977;11,77.6811447143555;11.2,78.4361114501953;11.4,79.1841201782227;11.6,80.6909408569336;11.8,81.4454574584961;12,82.1952209472656;12.2,83.6999206542969;12.4,84.4549942016602;12.6,85.2054138183594;12.8,86.7225036621094;13,87.469841003418;13.2,88.2296524047852;13.4,89.7452239990234;13.6,90.4993591308594;13.8,91.2458877563477;14,92.75830078125;14.2,93.5157852172852;14.4,94.2712097167969;14.6,95.7909240722656;14.8,96.5478134155273;15,97.3038635253906;15.2,98.8213806152344;15.4,99.567756652832;15.6,100.331108093262;15.8,101.834899902344;16,102.600677490234;16.2,103.362907409668;16.4,104.876678466797;16.6,105.636390686035;16.8,106.397003173828;17,107.916793823242;17.2,108.66707611084;17.4,109.427169799805;17.6,110.941955566406;17.8,111.700622558594;18,112.455581665039;18.2,113.973770141602;18.4,114.73511505127;18.6,115.495742797852;18.8,117.019912719727;19,117.777381896973;19.2,118.536209106445;19.4,120.050338745117;19.6,120.807106018066;19.8,121.569854736328;20,123.083839416504;20.2,123.846885681152;20.4,124.608947753906;20.6,125.848709106445;20.8,126.18286895752;21,126.311019897461;21.2,126.139694213867;21.4,125.937522888184;21.6,125.715599060059;21.8,125.316909790039;22,125.183181762695;22.2,125.107444763184;22.4,125.087791442871;22.6,125.146751403809;22.8,125.22200012207;23,125.394401550293;23.2,125.47688293457;23.4,125.541557312012;23.6,125.628517150879;23.8,125.649917602539;24,125.65998840332;24.2,125.668441772461;24.4,125.65998840332;24.6,125.666709899902;24.8,125.675788879395;25,125.686981201172;25.2,125.700416564941;25.4,125.73127746582;25.6,125.748191833496;25.8,125.767845153809;26,125.802673339844;26.2,125.81275177002;26.4,125.83464050293;26.6,125.861022949219;26.8,125.871726989746;27,125.884658813477;27.2,125.902076721191;27.4,125.914886474609;27.6,125.920608520508;27.8,125.945373535156;28,125.946487426758;28.2,125.964401245117;28.4,125.975601196289;28.6,125.986801147461;28.8,125.995880126953;29,126.010437011719;29.2,126.0205078125;29.4,126.028350830078;29.6,126.042419433594;29.8,126.052612304688;30,126.057586669922;30.2,126.069396972656;30.4,126.080101013184;30.6,126.082344055176;30.8,126.086822509766;31,126.097511291504;31.2,126.106483459473;31.4,126.116058349609;31.6,126.115432739258;31.8,126.126136779785;32,126.133979797363;32.2,126.135215759277;32.4,126.139068603516;32.6,126.144668579102;32.8,126.146911621094;33,126.155990600586;33.2,126.160980224609;33.4,126.155990600586;33.6,126.168312072754;33.8,126.173408508301;34,126.17391204834;34.2,126.177276611328;34.4,126.17552947998;34.6,126.177276611328;34.8,126.183486938477;35,126.185722351074;35.2,126.189582824707;35.4,126.190208435059;35.6,126.196426391602;35.8,126.195808410645;36,126.187973022461;36.2,126.183486938477;36.4,126.185104370117;36.6,126.18684387207;36.8,126.183990478516;37,126.189582824707;37.2,126.183486938477;37.4,126.189094543457;37.6,126.190826416016;37.8,126.188461303711;38,126.187339782715;38.2,126.194068908691;38.4,126.181747436523;38.6,126.180122375488;38.8,126.180625915527;39,126.173408508301;39.2,126.17839050293;39.4,126.17391204834;39.6,126.171051025391;39.8,126.16943359375;40,126.16495513916;40.2,126.16047668457;40.4,126.15648651123;40.6,126.15648651123;40.8,126.15648651123;41,126.153625488281;41.2,126.15251159668;41.4,126.151390075684;41.6,126.145301818848;41.8,126.137954711914;42,126.131851196289;42.2,126.13346862793;42.4,126.129005432129;42.6,126.127891540527;42.8,126.119422912598;43,126.113319396973;43.2,126.112075805664;43.4,126.105361938477;43.6,126.101997375488;43.8,126.103622436523;44,126.093536376953;44.2,126.08967590332;44.4,126.087440490723;44.6,126.081230163574;44.8,126.072265625;45,126.073883056641;45.2,126.063804626465;45.4,126.056587219238;45.6,126.055961608887;45.8,126.051986694336;46,126.047004699707;46.2,126.044769287109;46.4,126.038558959961;46.6,126.032455444336;46.8,126.028350830078;47,126.020027160645;47.2,126.017272949219;47.4,126.017768859863;47.6,126.009323120117;47.8,126.00658416748;48,125.997001647949;48.2,125.992515563965;48.4,125.98518371582;48.6,125.97908782959;48.8,125.971618652344;49,125.974479675293;49.2,125.96216583252;49.4,125.955940246582;49.6,125.95482635498;49.8,125.957061767578;50,125.948112487793;50.2,125.939643859863;50.4,125.936904907227;50.6,125.927963256836;50.8,125.919990539551;51,125.917259216309;51.2,125.906555175781;51.4,125.908790588379;51.6,125.901458740234;51.8,125.891998291016;52,125.891372680664;52.2,125.885284423828;52.4,125.877319335938;52.6,125.873458862305;52.8,125.869483947754;53,125.86164855957;53.2,125.854927062988;53.4,125.849822998047;53.6,125.843109130859;53.8,125.840873718262;54,125.83576965332;54.2,125.829048156738;54.4,125.823455810547;54.6,125.816101074219;54.8,125.813369750977;55,125.804916381836;55.2,125.802673339844;55.4,125.798202514648;55.6,125.789115905762;55.8,125.784141540527;56,125.779663085938;56.2,125.77742767334;56.4,125.77294921875;56.6,125.755523681641;56.8,125.758758544922;57,125.751541137695;57.2,125.74259185791;57.4,125.744201660156;57.6,125.731887817383;57.8,125.731887817383;58,125.734634399414;58.2,125.727905273438;58.4,125.714477539063;58.6,125.712242126465;58.8,125.707138061523;59,125.70092010498;59.2,125.696563720703;59.4,125.694816589355;59.6,125.680267333984;59.8,125.678031921387;60,125.678031921387;60.2,125.671188354492;60.4,125.665588378906;60.6,125.665588378906;60.8,125.651519775391;61,125.647674560547;61.2,125.645927429199;61.4,125.635231018066;61.6,125.627395629883;61.8,125.627395629883;62,125.626281738281;62.2,125.624664306641;62.4,125.620185852051;62.6,125.606628417969;62.8,125.602638244629;63,125.606628417969;63.2,125.594306945801;63.4,125.588706970215;63.6,125.58423614502;63.8,125.582489013672;64,125.579132080078;64.2,125.583602905273;64.4,125.571296691895;64.6,125.563331604004;64.8,125.560592651367;65,125.557235717773;65.2,125.548774719238;65.4,125.545913696289;65.6,125.542678833008;65.8,125.539825439453;66,125.530853271484;66.2,125.527503967285;66.4,125.521789550781;66.6,125.52352142334;66.8,125.518424987793;67,125.51456451416;67.2,125.508964538574;67.4,125.503868103027;67.6,125.496658325195;67.8,125.500511169434;68,125.497650146484;68.2,125.483093261719;68.4,125.483093261719;68.6,125.476379394531;68.8,125.47688293457;69,125.47127532959;69.2,125.462326049805;69.4,125.465682983398;69.6,125.467926025391;69.8,125.455604553223;70,125.454986572266;70.2,125.450004577637;70.4,125.447769165039;70.6,125.438194274902;70.8,125.43147277832;71,125.430847167969;71.2,125.429229736328;71.4,125.424125671387;71.6,125.417419433594;71.8,125.42015838623;72,125.41854095459;72.2,125.414558410645;72.4,125.401741027832;72.6,125.407341003418;72.8,125.398384094238;73,125.394401550293;73.2,125.396026611328;73.4,125.390419006348;73.6,125.391052246094;73.8,125.380348205566;74,125.376365661621;74.2,125.375862121582;74.4,125.371887207031;74.6,125.371887207031;74.8,125.36629486084;75,125.370269775391;75.2,125.358955383301;75.4,125.356216430664;75.6,125.351119995117;75.8,125.351737976074;76,125.342651367188;76.2,125.345024108887;76.4,125.34440612793;76.6,125.340545654297;76.8,125.334815979004;77,125.329216003418;77.2,125.330963134766;77.4,125.324119567871;77.6,125.322006225586;77.8,125.323623657227;78,125.318023681641;78.2,125.314170837402;78.4,125.309074401855;78.6,125.309074401855;78.8,125.306205749512;79,125.30509185791;79.2,125.30347442627;79.4,125.293266296387;79.6,125.297752380371;79.8,125.292785644531;80,125.2900390625;80.2,125.286552429199;80.4,125.282585144043;80.6,125.284942626953;80.8,125.278228759766;81,125.274238586426;81.2,125.269760131836;81.4,125.27075958252;81.6,125.263046264648;81.8,125.266906738281;82,125.260810852051;82.2,125.265289306641;82.4,125.26180267334;82.6,125.260810852051;82.8,125.251724243164;83,125.257942199707;83.2,125.24340057373;83.4,125.249488830566;83.6,125.247253417969;83.8,125.24227142334;84,125.241653442383;84.2,125.237174987793;84.4,125.237174987793;84.6,125.236549377441;84.8,125.228713989258;85,125.229965209961;85.2,125.229347229004;85.4,125.221382141113;85.6,125.22200012207;85.8,125.219261169434;86,125.212417602539;86.2,125.221382141113;86.4,125.215278625488;86.6,125.215278625488;86.8,125.210678100586;87,125.209060668945;87.2,125.205207824707;87.4,125.200736999512;87.6,125.201225280762;87.8,125.205841064453;88,125.206321716309;88.2,125.196754455566;88.4,125.192764282227;88.6,125.191650390625;88.8,125.188911437988;89,125.186050415039;89.2,125.186553955078;89.4,125.185554504395;89.6,125.184936523438;89.8,125.181579589844;90,125.187797546387;90.2,125.179832458496;90.4,125.17374420166;90.6,125.17423248291;90.8,125.171997070313;91,125.169761657715;91.2,125.166397094727;91.4,125.164161682129;91.6,125.167526245117;91.8,125.163047790527;92,125.165908813477;92.2,125.158073425293;92.4,125.163047790527;92.6,125.165283203125;92.8,125.160804748535;93,125.158561706543;93.2,125.154586791992;93.4,125.153465270996;93.6,125.15682220459;93.8,125.154586791992;94,125.146751403809;94.2,125.147369384766;94.4,125.141151428223;94.6,125.14338684082;94.8,125.144020080566;95,125.140029907227;95.2,125.13493347168;95.4,125.141151428223;95.6,125.138916015625;95.8,125.138290405273;96,125.136672973633;96.2,125.133316040039;96.4,125.133316040039;96.6,125.12996673584;96.8,125.12996673584;97,125.129333496094;97.2,125.131080627441;97.4,125.12996673584;97.6,125.124855041504;97.8,125.12149810791;98,125.117515563965;98.2,125.12149810791;98.4,125.117515563965;98.6,125.119262695313;98.8,125.115905761719;99,125.118141174316;99.2,125.115409851074;99.4,125.109680175781;99.6,125.118644714355;99.8,125.110801696777;100,125.111923217773;100.2,125.112548828125;100.4,125.105827331543;100.6,125.106941223145;100.8,125.109680175781;101,125.101348876953;101.2,125.098991394043;101.4,125.100723266602;101.6,125.098991394043;101.8,125.098991394043;102,125.09561920166;102.2,125.092887878418;102.4,125.096252441406;102.6,125.104705810547;102.8,125.096870422363;103,125.096252441406;103.2,125.088287353516;103.4,125.092399597168;103.6,125.09065246582;103.8,125.09065246582;104,125.096252441406;104.2,125.090034484863;104.4,125.094017028809;104.6,125.084434509277;104.8,125.084434509277;105,125.081703186035;105.2,125.08219909668;105.4,125.08219909668;105.6,125.083320617676;105.8,125.077713012695;106,125.077713012695;106.2,125.085556030273;106.4,125.080581665039;106.6,125.08219909668;106.8,125.081085205078;107,125.077713012695;107.2,125.08381652832;107.4,125.078826904297;107.6,125.073860168457;107.8,125.077224731445;108,125.070999145508;108.2,125.070381164551;108.4,125.072120666504;108.6,125.069877624512;108.8,125.075981140137;109,125.070381164551;109.2,125.072120666504;109.4,125.064155578613;109.6,125.068145751953;109.8,125.072120666504;110,125.068145751953;110.2,125.061424255371;110.4,125.063659667969;110.6,125.067024230957;110.8,125.067024230957;111,125.068145751953;111.2,125.057441711426;111.4,125.061424255371;111.6,125.056945800781;111.8,125.062049865723;112,125.064155578613;112.2,125.062049865723;112.4,125.050231933594;112.6,125.054206848145;112.8,125.056945800781;113,125.062538146973;113.2,125.058067321777;113.4,125.057441711426;113.6,125.05135345459;113.8,125.050727844238;114,125.050231933594;114.2,125.056449890137;114.4,125.055824279785;114.6,125.052467346191;114.8,125.050231933594;115,125.053588867188;115.2,125.046875;115.4,125.048492431641;115.6,125.047370910645;115.8,125.047988891602;116,125.047370910645;116.2,125.050727844238;116.4,125.048492431641;116.6,125.05135345459;116.8,125.047988891602;117,125.047988891602;117.2,125.04288482666;117.4,125.047988891602;117.6,125.047988891602;117.8,125.045753479004;118,125.045127868652;118.2,125.044013977051;118.4,125.047370910645;118.6,125.046875;118.8,125.040649414063;119,125.040649414063;119.2,125.038414001465;119.4,125.034439086914;119.6,125.045127868652;119.8,125.03678894043;120,125.03678894043;120.2,125.045127868652;120.4,125.044639587402;120.6,125.041282653809;120.8,125.036178588867;121,125.037300109863;121.2,125.033317565918;121.4,125.038414001465;121.6,125.041282653809;121.8,125.033935546875;122,125.035057067871;122.2,125.031700134277;122.4,125.035057067871;122.6,125.03231048584;122.8,125.033317565918;123,125.034439086914;123.2,125.03231048584;123.4,125.033935546875;123.6,125.034439086914;123.8,125.03231048584;124,125.03108215332;124.2,125.03231048584;124.4,125.033317565918;124.6,125.02995300293;124.8,125.025482177734;125,125.026596069336;125.2,125.032821655273;125.4,125.033317565918;125.6,125.034439086914;125.8,125.034439086914;126,125.031700134277;126.2,125.03231048584;126.4,125.027221679688;126.6,125.027221679688;126.8,125.027717590332;127,125.027717590332;127.2,125.027717590332;127.4,125.027221679688;127.6,125.027717590332;127.8,125.033317565918;128,125.024971008301;128.2,125.025482177734;128.4,125.022735595703;128.6,125.027717590332;128.8,125.021614074707;129,125.022735595703;129.2,125.028831481934;129.4,125.021614074707;129.6,125.020500183105;129.8,125.024360656738;130,125.025482177734;130.2,125.022735595703;130.4,125.004089355469;130.6,124.89525604248;130.8,124.68692779541;131,123.83283996582;131.2,123.110824584961;131.4,122.418769836426;131.6,121.063827514648;131.8,120.399826049805;132,119.754051208496;132.2,118.476852416992;132.4,117.854606628418;132.6,117.242012023926;132.8,116.039070129395;133,115.444869995117;133.2,114.852989196777;133.4,113.699516296387;133.6,113.127235412598;133.8,112.569427490234;134,111.464576721191;134.2,110.918518066406;134.4,110.387062072754;134.6,109.325691223145;134.8,108.798645019531;135,108.279472351074;135.2,107.25471496582;135.4,106.753219604492;135.6,106.261192321777;135.8,105.272605895996;136,104.787696838379;136.2,104.310028076172;136.4,103.357948303223;136.6,102.894561767578;136.8,102.429130554199;137,101.502754211426;137.2,101.049858093262;137.4,100.608001708984;137.6,99.7148361206055;137.8,99.2816696166992;138,98.8530120849609;138.2,97.9905242919922;138.4,97.5648574829102;138.6,97.1460189819336;138.8,96.3052978515625;139,95.8938980102539;139.2,95.4903182983398;139.4,94.6834182739258;139.6,94.2828063964844;139.8,93.8863220214844;140,93.1000137329102;140.2,92.7063598632813;140.4,92.3200378417969;140.6,91.5559005737305;140.8,91.1714248657227;141,90.799186706543;141.2,90.0549697875977;141.4,89.6922225952148;141.6,89.3213882446289;141.8,88.6113586425781;142,88.236930847168;142.2,87.8806381225586;142.4,87.183723449707;142.6,86.8316192626953;142.8,86.4849624633789;143,85.8002471923828;143.2,85.4559173583984;143.4,85.1123580932617;143.6,84.450065612793;143.8,84.1181640625;144,83.7869110107422;144.2,83.1300582885742;144.4,82.8115692138672;144.6,82.4926223754883;144.8,81.8487930297852;145,81.5360794067383;145.2,81.2262268066406;145.4,80.6058731079102;145.6,80.2954788208008;145.8,79.9846343994141;146,79.3774032592773;146.2,79.0820083618164;146.4,78.7862548828125;146.6,78.1936187744141;146.8,77.8962249755859;147,77.6039047241211;147.2,77.033561706543;147.4,76.7408294677734;147.6,76.4579238891602;147.8,75.8883972167969;148,75.6101303100586;148.2,75.3323593139648;148.4,74.7714996337891;148.6,74.511344909668;148.8,74.2376022338867;149,73.6994934082031;149.2,73.4362335205078;149.4,73.1712799072266;149.6,72.6494216918945;149.8,72.3851470947266;150,72.1308212280273;150.2,71.6179504394531;150.4,71.3582916259766;150.6,71.1051483154297;150.8,70.6094741821289;151,70.3647232055664;151.2,70.1111755371094;151.4,69.6239852905273;151.6,69.3836059570313;151.8,69.1390609741211;152,68.6608428955078;152.2,68.4239501953125;152.4,68.1924743652344;152.6,67.7189178466797;152.8,67.4912872314453;153,67.2590103149414;153.2,66.7975921630859;153.4,66.5767669677734;153.6,66.3493270874023;153.8,65.9007568359375;154,65.6810913085938;154.2,65.4614410400391;154.4,65.0216827392578;154.6,64.8125991821289 0,50;100,50 0.012,47.8;0.512,48.9;1.012,50.1;1.512,50;2.012,50;2.512,50;3.012,50;3.512,50;4.012,50.1;4.512,50;5.012,50;5.512,50;6.012,50;6.512,50;7.012,50;7.512,50;8.012,50;8.512,50;9.012,50;9.512,49.9;10.012,50;10.512,49.9;11.012,49.9;11.512,50;12.012,50.1;12.512,50;13.012,49.9;13.512,49.9;14.012,49.9;14.512,50;15.012,50;15.512,49.9;16.012,50;16.512,50;17.012,49.9;17.512,49.8;18.012,49.9;18.512,49.9;19.012,50;19.512,50;20.012,50.1;20.512,50;21.012,50;21.512,50;22.012,50.1;22.512,50;23.012,49.9;23.512,50;24.012,50;24.512,50.1;25.012,50;25.512,50;26.012,50;26.512,50;27.012,50;27.512,49.9;28.012,49.8;28.512,49.9;29.012,50;29.512,50;30.012,49.9;30.512,49.9;31.012,50;31.512,50;32.012,50;32.512,50.1;33.012,50;33.512,49.9;34.012,49.9;34.512,50;35.012,49.9;35.512,49.9;36.012,50.1;36.512,50;37.012,49.9;37.512,49.9;38.012,49.9;38.512,49.9;39.012,49.9;39.512,50;40.012,50;40.512,50;41.012,49.9;41.512,50;42.012,50.1;42.512,50;43.012,50;43.512,50.1;44.012,50.1;44.512,50;45.012,49.9;45.512,49.9;46.012,50;46.512,50;47.012,50.1;47.512,50;48.012,49.9;48.512,50;49.012,50;49.512,50;50.012,50;50.512,50;51.012,50;51.512,50;52.012,50;52.512,50.1;53.012,50.2;53.512,50.1;54.012,50.1;54.512,50;55.012,49.9;55.512,49.9;56.012,49.9;56.512,49.9;57.012,49.9;57.512,50;58.012,50;58.512,50.1;59.012,50;59.512,50;60.012,50;60.512,50;61.012,50;61.512,50.1;62.012,50.1;62.512,50;63.012,50;63.512,49.9;64.012,49.8;64.512,49.9;65.012,50;65.512,50;66.012,50.1;66.512,50.1;67.012,50.1;67.512,49.9;68.012,50;68.512,50;69.012,49.9;69.512,49.9;70.012,50;70.512,50.1;71.012,50.1;71.512,50.2;72.012,50.1;72.512,50;73.012,50;73.512,50;74.012,50;74.512,50;75.012,49.9;75.512,50;76.012,50;76.512,50;77.012,50;77.512,50.1;78.012,50;78.512,50;79.012,50.1;79.512,50;80.012,50;80.512,50;81.012,50;81.512,50;82.012,50;82.512,50.1;83.012,50;83.512,50;84.012,50.1;84.512,50;85.012,49.9;85.512,49.8;86.012,49.9;86.512,50;87.012,50.1;87.512,50.1;88.012,50;88.512,50;89.012,50.1;89.512,50;90.012,49.9;90.512,50;91.012,50;91.512,50;92.012,50.1;92.512,50;93.012,49.9;93.512,49.9;94.012,49.9;94.512,50;95.012,50;95.512,49.9;96.012,50;96.512,50.1;97.012,50.1;97.512,50;98.012,50;98.512,50;99.012,49.9;99.512,49.9;100.012,50;100.512,50.1;101.011,25;101.511,0;102.011,0;102.511,0;103.011,0;103.511,0;104.011,0;104.511,0;105.011,0;105.511,0;106.011,0;106.511,0;107.011,0;107.511,0;108.011,0;108.511,0;109.011,0;109.511,0;110.011,0;110.511,0;111.011,0;111.511,0;112.011,0;112.511,0;113.011,0;113.511,0;114.011,0;114.511,0;115.011,0;115.511,0;116.011,0;116.511,0;117.011,0;117.511,0;118.011,0;118.511,0;119.011,0;119.511,0;120.011,0;120.511,0;121.011,0;121.511,0;122.011,0;122.511,0;123.011,0;123.511,0;124.011,0;124.511,0 0,1;80,1 0.1,89;0.2,69;0.3,77;0.4,81;0.5,71;0.6,74;0.7,69;0.8,70;0.9,63;1,66;1.1,66;1.2,63;1.3,72;1.4,76;1.5,68;1.6,62;1.7,61;1.8,79;1.9,64;2,69;2.1,73;2.2,72;2.3,73;2.4,70;2.5,67;2.6,77;2.7,87;2.8,73;2.9,71;3,63;3.1,79;3.2,67;3.3,79;3.4,72;3.5,79;3.6,75;3.7,73;3.8,75;3.9,63;4,89;4.1,77;4.2,84;4.3,87;4.4,79;4.5,102;4.6,89;4.7,89;4.8,92;4.9,91;5,100;5.1,99;5.2,105;5.3,107;5.4,85;5.5,120;5.6,102;5.7,118;5.8,101;5.9,114;6,103;6.1,116;6.2,107;6.3,113;6.4,133;6.5,120;6.6,149;6.7,140;6.8,157;6.9,157;7,148;7.1,181;7.2,187;7.3,157;7.4,164;7.5,189;7.6,195;7.7,186;7.8,237;7.9,200;8,210;8.1,229;8.2,217;8.3,246;8.4,248;8.5,267;8.6,258;8.7,290;8.8,298;8.9,292;9,284;9.1,325;9.2,297;9.3,317;9.4,371;9.5,376;9.6,390;9.7,356;9.8,425;9.9,415;10,433;10.1,454;10.2,443;10.3,479;10.4,496;10.5,529;10.6,539;10.7,535;10.8,543;10.9,587;11,625;11.1,593;11.2,633;11.3,650;11.4,690;11.5,695;11.6,752;11.7,798;11.8,778;11.9,778;12,802;12.1,897;12.2,881;12.3,957;12.4,994;12.5,958;12.6,1055;12.7,1077;12.8,1085;12.9,1179;13,1146;13.1,1247;13.2,1260;13.3,1274;13.4,1342;13.5,1417;13.6,1472;13.7,1432;13.8,1528;13.9,1632;14,1655;14.1,1601;14.2,1716;14.3,1724;14.4,1807;14.5,1902;14.6,1930;14.7,1965;14.8,2006;14.9,2035;15,2216;15.1,2194;15.2,2318;15.3,2218;15.4,2428;15.5,2556;15.6,2682;15.7,2622;15.8,2727;15.9,2801;16,2931;16.1,2916;16.2,2967;16.3,3042;16.4,3140;16.5,3277;16.6,3185;16.7,3247;16.8,3294;16.9,3442;17,3561;17.1,3603;17.2,3674;17.3,3726;17.4,3796;17.5,3728;17.6,3855;17.7,3832;17.8,3798;17.9,3946;18,3910;18.1,3930;18.2,3937;18.3,3898;18.4,4057;18.5,3985;18.6,3801;18.7,3786;18.8,3762;18.9,3756;19,3630;19.1,3609;19.2,3537;19.3,3555;19.4,3541;19.5,3367;19.6,3345;19.7,3146;19.8,3108;19.9,2975;20,2788;20.1,2688;20.2,2478;20.3,2452;20.4,2349;20.5,2141;20.6,2065;20.7,1890;20.8,1803;20.9,1604;21,1498;21.1,1394;21.2,1291;21.3,1204;21.4,1059;21.5,1028;21.6,931;21.7,866;21.8,770;21.9,691;22,614;22.1,559;22.2,582;22.3,476;22.4,490;22.5,450;22.6,370;22.7,403;22.8,390;22.9,317;23,342;23.1,319;23.2,291;23.3,307;23.4,306;23.5,278;23.6,311;23.7,297;23.8,296;23.9,330;24,323;24.1,289;24.2,284;24.3,305;24.4,284;24.5,294;24.6,271;24.7,288;24.8,317;24.9,307;25,282;25.1,297;25.2,303;25.3,281;25.4,323;25.5,327;25.6,294;25.7,290;25.8,276;25.9,271;26,272;26.1,282;26.2,262;26.3,241;26.4,286;26.5,277;26.6,247;26.7,267;26.8,289;26.9,263;27,261;27.1,262;27.2,256;27.3,239;27.4,263;27.5,253;27.6,241;27.7,235;27.8,229;27.9,220;28,203;28.1,225;28.2,225;28.3,206;28.4,209;28.5,188;28.6,214;28.7,196;28.8,185;28.9,174;29,168;29.1,175;29.2,184;29.3,187;29.4,176;29.5,162;29.6,173;29.7,168;29.8,132;29.9,151;30,134;30.1,175;30.2,143;30.3,127;30.4,137;30.5,140;30.6,115;30.7,138;30.8,139;30.9,120;31,124;31.1,121;31.2,111;31.3,149;31.4,137;31.5,120;31.6,132;31.7,113;31.8,127;31.9,116;32,108;32.1,117;32.2,116;32.3,98;32.4,120;32.5,101;32.6,87;32.7,111;32.8,107;32.9,108;33,101;33.1,118;33.2,86;33.3,106;33.4,121;33.5,105;33.6,87;33.7,80;33.8,101;33.9,105;34,103;34.1,86;34.2,87;34.3,79;34.4,84;34.5,84;34.6,95;34.7,96;34.8,77;34.9,99;35,82;35.1,81;35.2,78;35.3,91;35.4,81;35.5,95;35.6,90;35.7,100;35.8,90;35.9,79;36,92;36.1,78;36.2,95;36.3,96;36.4,81;36.5,82;36.6,71;36.7,78;36.8,67;36.9,79 0,25;27,160;37,160;47,60;167,60 0.2,30.7578144073486;0.4,30.749828338623;0.6,30.7449951171875;0.8,30.7407608032227;1,30.7310848236084;1.2,30.7257633209229;1.4,30.7359218597412;1.6,30.7620487213135;1.8,30.9956016540527;2,31.2360744476318;2.2,31.5899219512939;2.4,32.6551284790039;2.6,33.3335723876953;2.8,34.1144981384277;3,35.822639465332;3.2,36.6979446411133;3.4,37.552906036377;3.6,39.1688652038574;3.8,39.9255523681641;4,40.6455459594727;4.2,42.0563163757324;4.4,42.7529144287109;4.6,43.4563598632813;4.8,44.8957214355469;5,45.6382179260254;5.2,46.3881988525391;5.4,47.9173316955566;5.6,48.6773834228516;5.8,49.4495544433594;6,50.9787979125977;6.2,51.7368240356445;6.4,52.4800605773926;6.6,53.98974609375;6.8,54.7403602600098;7,55.4934730529785;7.2,56.9991645812988;7.4,57.7553939819336;7.6,58.5142555236816;7.8,60.0343589782715;8,60.8039169311523;8.2,61.5676765441895;8.4,63.0930557250977;8.6,63.855655670166;8.8,64.6153869628906;9,66.1405181884766;9.2,66.9011611938477;9.4,67.665901184082;9.6,69.199089050293;9.8,69.9642486572266;10,70.7248153686523;10.2,72.2572479248047;10.4,73.0180969238281;10.6,73.7821807861328;10.8,75.3098907470703;11,76.0807800292969;11.2,76.8520736694336;11.4,78.3845138549805;11.6,79.1556015014648;11.8,79.9225692749023;12,81.4514846801758;12.2,82.2133102416992;12.4,82.9787368774414;12.6,84.5177536010742;12.8,85.2853012084961;13,86.0507965087891;13.2,87.5911712646484;13.4,88.3543395996094;13.6,89.1260528564453;13.8,90.664436340332;14,91.4320907592773;14.2,92.2000350952148;14.4,93.7308502197266;14.6,94.4997711181641;14.8,95.2650299072266;15,96.7986221313477;15.2,97.5664596557617;15.4,98.3374176025391;15.6,99.8789672851563;15.8,100.646324157715;16,101.412368774414;16.2,102.946762084961;16.4,103.704116821289;16.6,104.481300354004;16.8,106.01197052002;17,106.784912109375;17.2,107.550453186035;17.4,109.088424682617;17.6,109.864204406738;17.8,110.630226135254;18,112.167663574219;18.2,112.926696777344;18.4,113.696151733398;18.6,115.221885681152;18.8,115.993766784668;19,116.762702941895;19.2,118.296165466309;19.4,119.069755554199;19.6,119.839645385742;19.8,121.374252319336;20,122.141204833984;20.2,122.905448913574;20.4,124.441665649414;20.6,125.207939147949;20.8,125.970626831055;21,127.496505737305;21.2,128.26904296875;21.4,129.039993286133;21.6,130.574813842773;21.8,131.350021362305;22,132.108551025391;22.2,133.64616394043;22.4,134.416809082031;22.6,135.176849365234;22.8,136.704681396484;23,137.470108032227;23.2,138.233657836914;23.4,139.769119262695;23.6,140.533935546875;23.8,141.302368164063;24,142.833724975586;24.2,143.610900878906;24.4,144.36669921875;24.6,145.90007019043;24.8,146.661865234375;25,147.426055908203;25.2,148.952987670898;25.4,149.718017578125;25.6,150.48828125;25.8,152.018951416016;26,152.778060913086;26.2,153.551071166992;26.4,155.086044311523;26.6,155.845001220703;26.8,156.606582641602;27,158.139953613281;27.2,158.892471313477;27.4,159.657852172852;27.6,160.827529907227;27.8,161.087600708008;28,161.141891479492;28.2,160.823669433594;28.4,160.564743041992;28.6,160.285919189453;28.8,159.846450805664;29,159.716018676758;29.2,159.65950012207;29.4,159.734176635742;29.6,159.842422485352;29.8,159.968490600586;30,160.21923828125;30.2,160.31672668457;30.4,160.389678955078;30.6,160.449066162109;30.8,160.448455810547;31,160.43766784668;31.2,160.404434204102;31.4,160.389678955078;31.6,160.388534545898;31.8,160.398315429688;32,160.413116455078;32.2,160.442169189453;32.4,160.495681762695;32.6,160.523132324219;32.8,160.545959472656;33,160.592041015625;33.2,160.6103515625;33.4,160.632049560547;33.6,160.654235839844;33.8,160.662231445313;34,160.667999267578;34.2,160.687286376953;34.4,160.698822021484;34.6,160.709594726563;34.8,160.729034423828;35,160.737548828125;35.2,160.748352050781;35.4,160.778045654297;35.6,160.786560058594;35.8,160.802597045898;36,160.817886352539;36.2,160.826538085938;36.4,160.829925537109;36.6,160.853225708008;36.8,160.853225708008;37,160.861251831055;37.2,160.876693725586;37.4,160.860641479492;37.6,160.729034423828;37.8,159.924011230469;38,159.381454467773;38.2,158.391235351563;38.4,156.103927612305;38.6,155.00016784668;38.8,153.917785644531;39,151.826400756836;39.2,150.803482055664;39.4,149.796920776367;39.6,147.840072631836;39.8,146.885192871094;40,145.955184936523;40.2,144.127105712891;40.4,143.218612670898;40.6,142.34147644043;40.8,140.618179321289;41,139.770263671875;41.2,138.937942504883;41.4,137.314926147461;41.6,136.512878417969;41.8,135.73030090332;42,134.185089111328;42.2,133.424102783203;42.4,132.679168701172;42.6,131.209274291992;42.8,130.489364624023;43,129.784790039063;43.2,128.397766113281;43.4,127.703979492188;43.6,127.028472900391;43.8,125.690956115723;44,125.033935546875;44.2,124.385581970215;44.4,123.114181518555;44.6,122.48316192627;44.8,121.858070373535;45,120.640235900879;45.2,120.034065246582;45.4,119.440391540527;45.6,118.255317687988;45.8,117.675064086914;46,117.100479125977;46.2,115.96696472168;46.4,115.405158996582;46.6,114.846908569336;46.8,113.754699707031;47,113.218643188477;47.2,112.687225341797;47.4,111.629211425781;47.6,111.10595703125;47.8,110.589569091797;48,109.565940856934;48.2,109.056579589844;48.4,108.559059143066;48.6,107.566543579102;48.8,107.089897155762;49,106.58903503418;49.2,105.639739990234;49.4,105.161338806152;49.6,104.683753967285;49.8,103.761405944824;50,103.304016113281;50.2,102.845703125;50.4,101.93480682373;50.6,101.486671447754;50.8,101.044288635254;51,100.161399841309;51.2,99.7320098876953;51.4,99.297737121582;51.6,98.4410781860352;51.8,98.0126419067383;52,97.6025238037109;52.2,96.7665100097656;52.4,96.3555526733398;52.6,95.9420471191406;52.8,95.1333160400391;53,94.7310638427734;53.2,94.3320541381836;53.4,93.5500793457031;53.6,93.1568908691406;53.8,92.7682876586914;54,91.9939346313477;54.2,91.616081237793;54.4,91.2365188598633;54.6,90.4877777099609;54.8,90.1127777099609;55,89.7463226318359;55.2,89.0256195068359;55.4,88.6665496826172;55.6,88.3052978515625;55.8,87.5967178344727;56,87.2427139282227;56.2,86.8883819580078;56.4,86.1946105957031;56.6,85.8464202880859;56.8,85.5104598999023;57,84.8315963745117;57.2,84.4985504150391;57.4,84.1595153808594;57.6,83.5019683837891;57.8,83.171875;58,82.8478546142578;58.2,82.1979293823242;58.4,81.8703155517578;58.6,81.5580902099609;58.8,80.931640625;59,80.6152114868164;59.2,80.3119659423828;59.4,79.6913986206055;59.6,79.3894500732422;59.8,79.08642578125;60,78.4937286376953;60.2,78.1925048828125;60.4,77.8928985595703;60.6,77.3071823120117;60.8,77.0236206054688;61,76.7281875610352;61.2,76.1668319702148;61.4,75.8796997070313;61.6,75.5985794067383;61.8,75.0381698608398;62,74.7599639892578;62.2,74.4850845336914;62.4,73.9419097900391;62.6,73.6765518188477;62.8,73.4071655273438;63,72.8792572021484;63.2,72.6132431030273;63.4,72.3490982055664;63.6,71.8273544311523;63.8,71.5741806030273;64,71.3145294189453;64.2,70.8061981201172;64.4,70.5586090087891;64.6,70.3095855712891;64.8,69.8118209838867;65,69.56884765625;65.2,69.3230743408203;65.4,68.8386917114258;65.6,68.5990982055664;65.8,68.3627166748047;66,67.895622253418;66.2,67.6616134643555;66.4,67.4270095825195;66.6,66.9568557739258;66.8,66.7256011962891;67,66.500373840332;67.2,66.0505599975586;67.4,65.8270950317383;67.6,65.604248046875;67.8,65.1671600341797;68,64.9509582519531 Luminescence/inst/extdata/RF_file.rf0000644000176200001440000017312513417222471017146 0ustar liggesusers ROI: ROI 1 ROI 2 ROI 3 ROI 4 ROI 5 ROI 6 ROI 7 ROI 8 ROI 9 ROI 10 ROI 11 x: 19 117 135 54 49 78 209 108 246 232 28 y: 19 73 82 168 175 167 110 54 175 172 169 width: 351 20 20 20 20 20 20 20 20 20 20 height: 351 20 20 20 20 20 20 20 20 20 20 area: 96765 316 316 316 316 316 316 316 316 316 316 No. time (sec) aliquot grain 1 grain 2 grain 3 grain 4 grain 5 grain 6 grain 7 grain 8 grain 9 grain 10 1 12.5 613.6036 650.769 643.1361 646.7025 644.8513 629.8418 626.5759 627.2722 618.2816 619.0475 623.5696 2 37.5 615.3152 651.269 644.5728 649.4241 646.5759 631.7563 629.0032 628.8703 619.8639 621.1551 626.2848 3 62.5 615.2543 651.9747 645.1139 649.3703 646.712 630.5253 628.0095 628.962 619.7184 620.6994 625.5443 4 87.5 616.8258 652.0823 646.2278 650.3987 648.3861 632.5475 630.3165 630.2563 621.5348 622.0475 627.1835 5 112.5 617.073 652.5411 646.9177 649.6266 648.0475 632.4747 630.0222 629.9272 621.981 623 627.5633 6 137.5 618.0075 653.7753 647.9937 651.3354 648.9494 634.1614 631.9715 631.5696 622.4968 622.9684 627.5158 7 162.5 617.5387 653.9209 646.3797 651.0443 649.0348 633.4367 630.481 630.4747 622.519 623.2975 627.7089 8 187.5 618.7645 654.5538 648.3861 652.8513 650.4019 633.9399 632.2816 631.6487 624.1519 625.0823 629.0158 9 212.5 618.8933 654.8228 647.731 653.0506 650.2975 634.5823 632.0348 632.693 623.8101 624.962 628.8386 10 237.5 618.4924 654.6329 648.269 651.8291 649.788 634.8228 631.8291 631.7152 623.212 624.0759 628.3987 11 262.5 618.6822 654.4241 647.7278 651.3513 649.9146 634.2405 632.8829 632.0506 623.2184 624.3513 628.8829 12 287.5 619.1796 654.0253 648.481 653.1424 651.1551 634.3259 632.212 632.3101 625.1867 625.4019 630.5665 13 312.5 618.8217 653.9462 647.8544 652.3924 650.0886 633.5854 632.1867 633.1139 623.538 624.4589 628.6772 14 337.5 618.9307 654.1108 649.0443 652.3133 650.019 634.3987 632.2215 631.9747 623.9778 624.5285 628.443 15 362.5 619.3048 654.7722 648.2816 653.1171 650.7658 634.3861 631.8892 632.3956 624.212 624.6424 629.6266 16 387.5 619.0163 654.6171 648.943 651.8513 649.5728 634.6392 632.3323 632.8386 624.6551 625.5095 628.1329 17 412.5 618.9434 654.5285 649.2057 651.7753 649.8165 634.5538 632.0728 632.5063 624.3608 624.1013 629.7627 18 437.5 618.737 654.5949 648.0696 651.4525 648.8608 634.0443 632.4873 631.6804 623.231 624.4873 628.7627 19 462.5 618.7779 654.2563 648.1171 651.5127 649.6899 633.6835 631.6203 632.5886 623.9272 624.5222 628.5285 20 487.5 619.3553 654.6582 648.6772 652.0949 650.1076 635.0918 632.6487 632.3259 624.2848 624.9525 629.0949 21 512.5 619.4536 654.6234 648.6994 653.0854 650.5253 635.1614 632.4209 633.2658 624.1266 625.2911 629.0032 22 537.5 618.7475 653.8101 647.7658 651.9209 649.943 634.1456 631.7975 631.6677 624.3101 624.5696 628.1677 23 562.5 618.4579 654.2532 647.5728 651.0823 649.0601 634.2405 631.4589 632.3544 623.5918 625.1424 628.4968 24 587.5 618.923 653.9114 647.8608 651.6835 650.2215 634.4019 631.9968 631.8196 624.4272 624.1582 628.6266 25 612.5 618.8005 653.6139 648.0443 651.3892 649.3323 634.6108 632.3133 632.3418 623.8703 624.7437 628.6994 26 637.5 619.1491 653.5411 647.693 652.3608 649.557 634.6994 632.3513 632.5222 624.1962 624.2722 628.731 27 662.5 618.89 654.4905 648.2563 652.2184 650.3354 634.3766 632.3892 631.3797 624.0633 624.943 628.5854 28 687.5 618.9607 654.693 647.8734 652.4873 649.6361 634.0475 632.4114 631.8639 624.4557 625.1899 629.2753 29 712.5 618.8 654.7658 647.0728 651.0665 648.5222 634.2658 632.0032 632.4747 623.8165 624.2057 629.0886 30 737.5 619.0514 653.5032 647.5411 651.807 650.0918 634.0222 631.6804 632.4082 623.6203 624.7247 628.9019 31 762.5 619.311 654.7943 647.5601 652.1709 650.1804 634.6329 632.0285 632.8449 624.5348 624.9747 629.0601 32 787.5 618.7398 653.4525 647.3924 650.8101 649.0316 634.2373 631.5032 631.1013 623.8354 625.3513 627.7627 33 812.5 619.0412 654.1234 648.4557 652.557 650.0063 634.1456 631.7057 632.5728 624.2373 624.5601 628.4905 34 837.5 619.0312 653.9335 647.8133 650.7627 649.4747 634.4272 631.3671 632.0601 624.0696 624.3513 629.1899 35 862.5 618.9551 654.1899 647.6456 651.0886 648.7975 633.9304 631.9177 632.5918 623.6013 624.3671 628.7057 36 887.5 618.4504 653.8513 647.269 651.5063 648.481 634.9241 631.2658 632.0095 623.9684 624 627.6013 37 912.5 619.1043 654.5886 648.0063 651.3259 649.2025 634.0127 632.3323 632.0633 624.0411 625.0443 629.8924 38 937.5 619.1799 653.9241 648.1044 651.1867 649 634.6361 631.5949 631.0348 624.6677 624.557 628.7595 39 962.5 619.041 653.2753 647.9873 651.788 649.1899 634.1456 631.557 632.2658 624.2658 624.4778 628.7753 40 987.5 619.302 654.4525 648.0032 651.731 649.1899 634.5095 632.3956 632.6614 624.9114 625.6108 628.9873 41 1012.5 618.2681 652.7184 646.9778 650.7278 648.5095 633.3513 631.1361 631.2278 622.4652 623.5696 627.6646 42 1037.5 618.4242 652.693 646.9525 650.1614 648.3766 632.9747 631.6139 631.2943 622.9968 624.4367 627.9304 43 1062.5 619.0967 653.4399 646.8133 650.3449 648.2184 635.1392 632.1456 631.8797 624.1899 624.6108 629.0443 44 1087.5 618.6087 653.2532 646.7342 651.5949 649.1487 633.9177 631.3956 631.7468 623.981 624.9146 628.7943 45 1112.5 619.049 653.8165 648.0981 650.8797 648.8924 634.8323 631.6677 631.8418 624.3386 624.8544 628.9462 46 1137.5 619.0551 653.1424 647.1171 650.5665 648.3038 634.1772 631.25 632.0285 624.5506 624.731 629.1297 47 1162.5 618.4463 653.3639 646.2943 649.9747 648.2785 633.7468 631.5665 631.6424 623.7405 623.9241 628.3703 48 1187.5 618.7559 654 647.1424 650.6044 647.8956 633.9146 630.8892 632.2753 624.1741 624.1867 628.212 49 1212.5 618.488 652.8101 647.1171 650.3703 648.8386 633.9272 631.3101 631.9462 622.9114 623.807 628.0759 50 1237.5 618.7313 652.7627 647.2247 651.0475 648.462 633.9019 632.8354 631.6582 623.8259 624.731 628.1551 51 1262.5 618.7288 652.981 647.8165 650.3703 648.6203 633.8259 631.6741 631.6392 624.0348 624.712 628.1392 52 1287.5 618.7365 652.7753 647.019 650.9272 648.0759 633.7658 632.0886 631.7532 623.7184 624.269 629.0981 53 1312.5 618.9991 653.269 647.519 650.3418 648.0475 634.5348 631.5601 631.8892 624.3323 624.8639 628.8766 54 1337.5 618.4219 652.8291 647.0506 649.7437 648.1203 633.25 630.9747 631.3829 623.7753 624.4051 627.7468 55 1362.5 618.9692 653.0886 646.9177 650.9241 649.5316 635.4051 632.2753 631.5253 624.9114 624.6677 628.8608 56 1387.5 619.48 653.5918 647.8481 651.5949 649.2184 634.7816 631.5158 631.8892 624.1867 625.1044 628.5633 57 1412.5 618.6523 653.1772 647.0316 650.288 648.3703 634.193 631.4494 631.6487 623.7785 624.4462 628.6171 58 1437.5 618.9045 654.0411 646.9968 650.193 648.057 633.3165 631.6709 631.7342 624.519 623.9399 628.8006 59 1462.5 618.7706 652.6234 647.0759 650.3513 648.3449 633.5443 631.7342 631.1013 623.7342 624.5728 628.4589 60 1487.5 618.4059 652.1835 646.6582 649.807 647.7025 633.0095 631.1108 630.8323 623.6297 623.5728 627.6994 61 1512.5 618.8161 653.0285 648.1076 650.1013 648.1804 634.1835 631.9747 631.3766 623.7532 625.5095 627.4335 62 1537.5 618.4463 652.4715 646.8165 649.1804 647.2532 633.8513 631.1044 631.9715 623.1171 623.8101 627.2373 63 1562.5 618.9866 653.0949 647.0633 650.0981 647.519 634.3228 631.8418 631.7563 623.1424 624.4842 628.1835 64 1587.5 618.334 652.5475 646.3133 649.9462 647.7532 633.2278 630.8133 630.9684 623.2025 623.8797 627.2152 65 1612.5 618.952 653.3354 647.8291 650.7911 648.7722 634.5475 631.7975 632.3924 624.1013 625.0475 629.0886 66 1637.5 618.6641 651.6646 647.1772 649.1677 647.0538 633.25 631.0601 631.7057 623.8259 623.8639 627.7627 67 1662.5 618.5372 652.2785 646.7342 650.1646 647.8956 633.5127 631.1203 630.981 623.5665 624.443 627.7722 68 1687.5 618.8991 652.8639 646.7025 650.3639 648.2025 634.1772 631.0823 631.5665 624.1772 624.9241 628.0633 69 1712.5 618.3047 651.5222 646.6772 648.7722 646.9937 633.2247 630.8766 631.0633 623.0601 624.481 627.0316 70 1737.5 618.8147 653.2342 647.193 649.3861 647.6203 634.5127 631.1804 631.4652 623.7563 624.4652 628.8734 71 1762.5 618.501 652.0728 646.9589 648.8513 647.1424 633.2278 631.0633 631.3165 623.5759 624.4177 628.1108 72 1787.5 619.1409 654.0158 646.712 650.2753 648.0506 634.3133 631.9652 631.9652 623.5665 624.2658 628.2911 73 1812.5 618.8329 652.3608 647.4304 649.5791 647.7753 634.2184 631.5316 631.0696 624.3608 625.0506 627.4399 74 1837.5 618.4696 653.3544 646.0981 649.3259 647.4494 633.8639 630.7753 631.1203 622.8734 626.2722 627.8196 75 1862.5 618.4497 652.5854 647.1709 648.3639 646.4051 633.5348 631.5918 631.0095 623.9399 624.2057 628.2627 76 1887.5 618.8617 652.2278 646.0411 649.5506 647.8861 633.519 631.4367 631.1203 624.0759 624.2215 628.0095 77 1912.5 618.2807 651.7278 646.557 649.1962 646.8924 632.481 631.9051 631.7753 622.8861 623.6361 627.3133 78 1937.5 619.117 653.0918 647.0949 649.3418 647.8797 634.1203 631.3797 631.9525 624.8671 624.7532 628.25 79 1962.5 618.8144 652.9747 646.5348 649.9873 647.5095 634.2468 631.6171 630.9272 624.1424 624.6709 627.7658 80 1987.5 618.6177 652.7405 646.6614 649.6867 646.6329 633.5823 631.6108 631.231 623.9304 624.1709 628.6487 81 2012.5 618.7662 652.481 646.3481 649.7278 647.4209 634.1171 631.2785 630.6962 623.6266 625.2247 627.5728 82 2037.5 618.5925 652.3956 646.0063 649.6139 647.0475 633.6013 631.0949 631.3513 623.4557 624.7373 627.5601 83 2062.5 619.0939 652.7785 646.5728 649.981 647.6044 633.8449 631.0063 632.2532 624.1392 624.5316 628.1994 84 2087.5 619.1902 652.3797 647.1962 649.4367 647.1835 633.0696 631.3924 632.0285 624.3703 624.6614 628.3861 85 2112.5 618.8515 652.519 646.0665 649.6203 647.7215 633.5285 631.3703 632.1108 624.2627 624 627.9905 86 2137.5 618.7562 653.0158 646.75 649.7753 648.4051 633.8101 632.0222 630.6772 624.1361 623.8671 627.6266 87 2162.5 618.5491 652.1013 646.0032 649.6646 647.9494 633.2658 631.25 631.2184 624.1424 624.0285 627.8291 88 2187.5 618.7426 652.3671 646.4367 649.0443 647.4652 633.8165 630.6519 630.7247 623.8797 624.8259 628.0538 89 2212.5 618.5531 652.0601 646.0158 647.9589 646.462 634.2025 631.1772 630.7152 623.3101 624.1171 628.0158 90 2237.5 618.6534 652.3165 646.4146 649.1108 646.9525 632.8671 631.7816 631.712 623.1772 623.5538 628.0696 91 2262.5 618.6187 652.4905 646.7057 648.4114 646.5728 633.5348 631.5443 631.3513 623.8449 625.1361 628.2658 92 2287.5 618.4215 651.0918 645.9905 649.0633 646.9937 632.712 631.0127 631.4241 623.2785 623.9747 627.5032 93 2312.5 618.7115 651.693 645.0127 648.7342 647.5 632.8481 631.3133 631.038 623.1171 624.1646 627.4494 94 2337.5 619.0205 652.3892 646.4652 648.8924 646.6013 633.7532 632.9399 631.4778 624.769 624.4304 628.2532 95 2362.5 618.5737 651.2405 646.6108 648.25 646.3924 633.3291 631.6203 631.7405 624.0032 623.8703 629.1203 96 2387.5 617.6356 650.3861 645.2089 646.943 645.4715 632.9715 630.2595 629.5823 622.4747 623.3449 627.038 97 2412.5 618.6216 651.9462 646.1677 648.962 646.9335 633.712 631.2722 631.1203 623.8766 624.519 627.8038 98 2437.5 618.6898 651.8196 645.7278 648.4968 646.3861 633.7405 631.288 631.3354 624.2342 624.3386 628.0918 99 2462.5 618.0993 650.731 644.7089 647.9177 646.5728 632.8513 629.9272 630.9462 622.6361 623.4241 627.7373 100 2487.5 618.5218 651.4873 645.4304 649.0285 646.9082 632.9051 631.0316 630.4589 623.1392 624.0316 627.4652 101 2512.5 619.0366 651.538 646.4019 648.9684 646.8449 633.8418 631.288 632.3481 624.1297 624.2563 628.5285 102 2537.5 618.8007 651.6519 645.8481 648.5759 646.7595 633.3386 631.3228 630.8038 624.1835 624.2785 627.3133 103 2562.5 618.554 652.0253 645.5886 648.7658 646.8228 632.1646 630.1741 630.8608 623.3924 623.6171 627.7342 104 2587.5 618.0766 651.2278 645.057 647.3956 645.5601 632.6392 630.5759 630.3956 622.6867 623.6646 626.4462 105 2612.5 618.5946 650.9937 645.8861 647.9873 646.3449 632.9873 631.9272 631.0222 623.693 623.5759 626.7816 106 2637.5 617.9828 650.7563 645.7215 647.6424 645.75 632.5759 630.4652 629.8576 622.3386 623.2595 626.7468 107 2662.5 619.0805 651.9304 645.9747 648.6392 646.9209 634.6234 631.3956 631.3987 624.019 624.8766 627.8418 108 2687.5 618.3992 651.4399 646.1709 647.5854 646.4905 632.4937 630.3101 630.4747 622.962 623.1456 628.2532 109 2712.5 618.4353 651.6171 646.6297 647.7405 646.0601 633.5886 630.5475 630.4019 623.0348 623.5316 627.4082 110 2737.5 618.943 651.4462 645.5443 649.0253 646.9335 634.2437 631.2595 630.9177 623.981 624.8133 627.3892 111 2762.5 618.6558 651.6203 646.0285 647.9335 646.4209 633.3924 631.2563 630.9968 622.9905 624.0127 627.6013 112 2787.5 618.4095 651.962 645.6108 648.7816 646.519 633.3196 631.1994 630.481 623.4209 623.5538 627.4652 113 2812.5 618.5409 651.3038 645.4082 648.0063 646.1203 633.1519 631.8259 630.2373 623.8386 624.3449 626.6835 114 2837.5 618.4967 651.4146 645.3766 648.2816 645.9462 633.2943 630.1614 631.3259 623.3797 624.0759 627.4715 115 2862.5 618.8603 651.3165 646.3766 648.3576 646.5918 633.25 631.8829 631.1297 624.2215 624.2278 627.8544 116 2887.5 619.1797 651.481 646.2405 648.4905 647.0411 634.2658 630.9557 631.7184 624.0095 624.2816 627.981 117 2912.5 618.3571 651.3956 645.1234 647.1551 645.6456 633.3418 631.0854 630.6835 623.2658 623.5728 627.8924 118 2937.5 618.0335 650.3924 645.481 647.3892 645.5 632.1741 630.3196 630.4114 623.2658 623.4241 627.1266 119 2962.5 618.1177 650.0063 645.8006 647.8165 645.4873 632.4335 630.8291 630.788 623.0316 623.8861 627.2437 120 2987.5 618.9252 650.75 645.769 648.7911 646.8544 633.462 631.0854 631.0063 623.6266 624.1519 627.6456 121 3012.5 618.2519 651.3006 645.9462 647.8418 645.6392 632.3354 630.2057 630.2278 623.1076 623.8101 627.4304 122 3037.5 618.8213 650.7342 645.8259 648.1551 646.7595 632.7437 630.6013 631.2785 623.9873 624.3101 628.019 123 3062.5 618.7147 651.7627 645.8133 647.9684 645.9715 633.8924 631.231 630.6835 623.1361 624.1709 627.5728 124 3087.5 618.7709 650.8671 646.1551 647.8987 645.9778 633.2025 631.0823 631.2373 623.6076 624.2215 628.1139 125 3112.5 618.1167 650.9684 645.4209 647.4019 645.7152 632.7785 630.1962 630.2089 623.1013 624.269 626.9462 126 3137.5 619.3078 650.8892 646.0063 648.2184 646.6835 633.5791 631.3386 631.4082 624.3576 625.7184 627.8513 127 3162.5 618.7881 651.2532 646.2215 647.6044 646.0981 633.3608 631.0728 631.1741 624.212 624.3038 627.6709 128 3187.5 618.6196 651.038 646.038 647.6551 646.3449 632.8797 630.9367 630.9842 623.3006 624.0854 627.0696 129 3212.5 618.9873 650.8449 646.25 648.231 645.9525 633.9747 630.7278 631.4209 623.8576 624.1234 628.2215 130 3237.5 618.4587 650.8386 645.557 647.6329 645.3861 632.9937 631.0601 631.4209 623.6709 623.3576 627.019 131 3262.5 618.0786 649.6329 644.2437 646.9494 644.5601 631.712 630.1835 630.5918 622.6804 623.6013 626.8449 132 3287.5 618.625 650.6044 644.8892 648.2184 645.5316 632.8481 630.5665 631.3133 623.3101 623.9019 627.019 133 3312.5 618.6231 651.6424 645.8829 647.1234 645.5253 633.9019 630.7152 631.0538 623.3987 623.1551 627.6424 134 3337.5 618.6622 650.5 645.1044 647.9177 645.3101 632.9304 630.6677 630.3259 623.7247 623.7943 627.4146 135 3362.5 618.5464 650.6044 644.7089 646.8038 645.3924 632.6329 630.481 630.7278 624.0443 624.2785 626.9589 136 3387.5 618.7836 650.3671 645.6835 647.2595 645.2563 633.4177 630.693 631.1835 623.3386 624.3418 626.9241 137 3412.5 618.8434 651.3323 644.9684 647.3671 645.3449 633.3449 630.943 630.3418 623.5759 623.9525 628.3797 138 3437.5 618.6767 650.7437 645.8133 646.9082 644.5981 633.2911 630.2785 631.0918 623.8639 624.1139 626.5791 139 3462.5 618.2843 649.8323 644.7595 647.7943 645.6392 632.7405 629.9304 630.1329 622.5886 623.2247 626.7437 140 3487.5 618.3255 651.6203 644.9146 647.5854 645.3481 632.6614 630.1424 630.0601 623.7057 623.8829 627.0348 141 3512.5 618.8023 650.8291 644.9462 647.7184 645.6804 632.4146 630.9209 631.0063 623.8133 624.1456 627.5791 142 3537.5 618.2801 650.3734 644.4146 647.8165 645.1361 632.9715 630.4462 630.1962 623.2563 624.462 626.481 143 3562.5 618.6223 650.7215 644.8703 647.1392 645.5538 632.7816 630.5601 629.9873 623.5665 623.4209 626.8544 No. time (sec) aliquot grain 1 grain 2 grain 3 grain 4 grain 5 grain 6 grain 7 grain 8 grain 9 grain 10 1 12.5 616.0312 658.6906 649.9153 658.2347 655.7046 633.8033 631.1412 632.9204 621.5427 622.2058 629.0102 2 37.5 612.2725 655.4048 646.6503 655.4416 652.5029 629.4791 626.8322 628.7589 618.7269 618.154 625.8077 3 62.5 613.7485 656.4286 647.5412 656.7557 653.8102 631.0449 628.7583 629.5886 619.1664 619.6345 626.887 4 87.5 614.9674 658.2303 648.6834 657.0633 653.7187 632.5811 629.3318 631.163 620.3468 620.5988 628.0899 5 112.5 615.9991 657.9084 648.8509 657.479 654.627 633.0045 632.493 631.985 622.7405 623.5552 629.6421 6 137.5 616.2784 658.4475 649.7241 658.8042 656.2519 634.8908 631.4959 632.1283 621.2177 622.3616 629.4942 7 162.5 616.1318 659.4131 649.8941 658.4598 656.1778 633.8129 631.2871 632.5899 620.932 622.107 628.9506 8 187.5 616.8854 659.6138 650.2727 658.8553 656.225 634.6475 632.155 632.6959 622.0203 622.6792 629.8228 9 212.5 617.3767 660.7341 651.209 659.1971 656.6876 634.6072 632.0881 633.5808 622.4895 623.2277 629.7101 10 237.5 617.4728 659.6334 650.7071 658.6893 656.4377 635.3214 632.2872 632.9481 622.7366 623.164 630.1439 11 262.5 618.0988 660.6009 651.5431 660.2137 658.0555 635.9507 632.8362 633.7608 623.0455 623.0344 630.3384 12 287.5 618.1591 660.6857 651.6801 660.0739 657.2581 636.1234 633.2454 634.4526 623.7032 624.5858 630.8197 13 312.5 618.4908 661.368 652.1233 660.3118 657.2619 636.2012 633.1976 634.0285 624.1963 624.7369 631.0497 14 337.5 618.6913 661.0175 652.3968 661.1084 658.283 636.5828 633.3447 634.4143 623.9483 624.9399 631.358 15 362.5 618.5761 660.6205 652.3064 660.9544 658.1209 636.4211 633.6617 634.9462 623.7633 624.4986 631.2697 16 387.5 619.0878 661.6962 652.0899 660.2787 657.1431 636.2823 634.0592 635.3468 624.1583 625.4925 631.0609 17 412.5 619.1123 661.0516 653.4275 661.4284 658.7825 636.6452 633.8941 635.0008 624.4952 625.1524 631.2235 18 437.5 619.1219 661.83 652.6724 660.51 657.6139 637.0458 634.0824 635.157 625.0518 625.1468 631.8565 19 462.5 619.1186 660.596 652.6746 660.2996 657.7479 636.0863 634.0373 635.409 625.2233 626.069 631.0531 20 487.5 618.6514 660.9863 652.2574 659.3774 657.0476 635.8082 633.5955 634.8518 624.9103 624.4519 631.1783 21 512.5 618.6185 661.108 652.826 660.109 657.1883 636.7313 633.7887 634.8854 623.9059 624.4369 631.8754 22 537.5 619.0751 660.9557 653.2617 660.274 657.0657 636.7279 633.791 635.2694 624.6487 625.6795 630.7416 23 562.5 618.8382 660.4928 651.8307 659.4779 656.3989 636.0958 633.3189 634.919 624.4194 624.9968 631.0872 24 587.5 619.1382 660.9742 652.1005 660.8257 657.4648 636.3843 633.3951 635.4573 624.4644 624.8242 631.8506 25 612.5 619.7459 661.0917 652.6333 660.569 658.3732 637.754 634.7651 635.4046 624.9538 626.0424 631.7531 26 637.5 619.3593 661.2368 652.5708 660.5365 657.9042 636.8119 634.2966 634.7033 624.1106 625.0724 631.2318 27 662.5 619.4168 661.4003 652.8548 660.5254 658.0511 636.8973 633.3523 635.1406 624.1972 625.0206 631.5518 28 687.5 619.8258 660.9017 652.6172 660.6988 658.0219 637.6791 634.7789 635.3681 624.8823 626.8548 631.3864 29 712.5 619.4522 661.8692 651.9301 660.1876 657.8309 637.6165 634.5131 635.77 624.4221 625.0042 631.6815 30 737.5 619.6236 661.1969 653.3451 660.4172 657.922 636.145 634.2412 635.0599 625.5943 625.7517 632.543 31 762.5 619.3767 660.958 652.1418 660.562 658.2986 636.2887 634.3318 635.3714 625.2461 624.884 631.8345 32 787.5 619.1879 660.5759 652.2561 659.0396 656.5601 635.7813 634.2179 635.4272 624.7261 625.9287 632.3679 33 812.5 619.5383 660.4396 652.2926 659.4839 657.0866 636.5578 633.9843 635.2885 624.8658 625.742 631.2716 34 837.5 619.8293 661.4764 652.6684 660.3092 658.0901 636.7865 634.6386 635.0527 625.0525 626.3872 632.4097 35 862.5 619.3868 661.2335 652.5278 659.1183 656.4801 637.0851 634.5441 634.3174 624.1022 624.6214 630.8086 36 887.5 619.5994 660.5377 652.3172 660.0516 657.437 636.587 634.1476 635.7901 625.9885 626.2344 632.9402 37 912.5 619.0942 659.3482 652.157 658.9576 655.6765 637.0414 633.3473 633.5141 624.4448 623.8679 630.0886 38 937.5 619.4612 659.5308 652.6359 660.1829 656.9747 637.1701 634.1015 636.1454 625.1366 626.2227 631.9286 39 962.5 619.23 660.2986 651.4306 659.4451 656.2312 636.3953 633.2892 634.6489 624.569 625.2839 631.4595 40 987.5 619.5328 660.9764 651.7374 659.2391 656.6079 636.4839 633.713 635.7279 624.8239 626.4778 631.2861 41 1012.5 619.5093 659.6945 651.6841 659.2604 655.8882 635.5746 633.858 635.6404 625.4522 625.3535 631.8243 42 1037.5 618.6795 659.061 651.4754 658.0793 655.6121 636.4216 632.8933 633.8865 624.0987 624.48 630.4105 43 1062.5 619.2866 659.7778 652.5493 659.2619 656.6086 637.068 633.6919 634.7811 624.4517 624.8592 631.1715 44 1087.5 619.1471 660.3686 652.1764 658.2279 656.1815 636.1857 634.2407 634.8808 625.1851 625.1814 630.8224 45 1112.5 619.6001 660.9443 652.8683 658.8397 656.3722 636.2745 634.5092 635.2982 624.781 624.8203 631.3737 46 1137.5 619.6578 660.1256 652.3762 659.4315 656.9237 636.7025 634.0145 635.1175 625.2248 626.2819 631.1542 47 1162.5 619.195 659.4838 651.4366 658.5505 655.7848 636.5201 633.5235 634.7034 624.5456 625.142 631.4184 48 1187.5 619.4215 659.7769 651.7305 657.9934 655.585 636.2133 633.8861 635.2471 624.3509 625.1392 631.3784 49 1212.5 619.4365 660.417 651.6828 658.4544 656.0292 636.839 633.8779 636.0436 624.8158 625.3572 631.5027 50 1237.5 619.2843 659.2626 652.1786 658.4305 656.2508 636.8049 632.9646 634.1576 624.554 625.3334 631.2679 51 1262.5 619.6429 659.0053 651.6919 658.7217 656.6364 636.5588 634.3722 635.2047 624.8602 625.6231 631 52 1287.5 619.0538 659.3861 651.2924 657.7921 655.3807 635.633 633.1669 634.139 624.5008 625.173 631.1169 53 1312.5 620.032 660.536 652.2995 660.0736 657.1688 637.2071 634.6034 634.9736 625.1411 626.5192 631.8403 54 1337.5 619.4964 659.4269 651.6734 658.4676 656.1291 636.8711 633.8782 634.7606 624.4274 625.0013 630.9104 55 1362.5 619.7869 660.1535 652.2897 658.3939 655.9227 637.2328 633.873 635.0491 625.0588 626.2417 631.6512 56 1387.5 619.4803 660.4408 651.9966 658.3815 656.0329 637.2461 633.9087 634.7836 624.2137 625.2806 631.6511 57 1412.5 619.6992 660.2073 651.504 658.0831 655.4986 636.1798 634.1972 635.0368 625.1055 626.2759 631.6616 58 1437.5 619.5442 658.662 651.3918 657.9064 655.5442 635.9153 633.7881 634.1599 624.1967 625.1631 631.4295 59 1462.5 619.4614 659.7977 651.5216 658.4922 655.9853 636.4371 634.2494 634.2409 625.1905 625.6924 632.02 60 1487.5 619.6884 659.9299 651.3604 658.3508 655.3531 636.6494 634.5504 634.9116 624.3557 625.6612 631.0877 61 1512.5 619.321 658.5944 651.0599 658.6518 655.6142 636.0016 633.4034 634.5415 624.6537 625.0472 630.9542 62 1537.5 618.9828 658.6687 651.6243 657.6624 655.6001 635.7524 633.4592 634.2011 624.1516 624.5758 631.3773 63 1562.5 618.8885 658.6872 650.5297 657.4567 654.5753 635.5946 632.2904 634.1618 623.8654 625.468 630.3673 64 1587.5 619.4941 658.6496 651.5327 657.9368 655.829 636.1975 633.2635 634.4595 625.245 625.7107 631.5946 65 1612.5 619.3935 659.1844 650.7135 657.4629 654.9711 636.492 634.176 635.4589 624.9972 624.9031 630.5326 66 1637.5 620.0406 659.5483 651.796 658.3489 655.9309 637.283 634.1377 635.8601 624.9593 625.9175 632.1007 67 1662.5 619.3584 658.9118 651.6015 657.316 655.3936 636.2646 633.7975 633.9978 625.6658 625.9268 630.9313 68 1687.5 619.5718 659.0942 651.0267 656.9502 654.5683 636.1923 633.5457 634.6133 624.7929 625.713 631.9673 69 1712.5 619.2193 658.7454 651.3887 657.2803 655.3566 635.1547 634.0597 634.3789 624.0603 624.6939 631.1956 70 1737.5 618.9174 657.8332 651.453 657.6417 654.7302 635.399 634.1921 634.3673 624.1435 624.8887 630.1608 71 1762.5 619.4594 658.7201 651.0777 657.3186 654.5621 636.7052 633.9346 634.8716 624.0879 625.0527 631.0306 72 1787.5 619.7125 659.3703 651.2185 656.8614 654.7953 636.302 633.8636 634.1315 624.8647 626.2475 630.7097 73 1812.5 619.5472 657.7522 651.5681 657.1853 654.6893 635.6448 633.6509 634.3233 624.8097 625.5929 631.1763 74 1837.5 619.0432 657.8746 651.3865 656.5779 655.0572 635.7624 633.8879 634.907 624.4179 625.2225 630.7489 75 1862.5 619.6406 659.345 651.2913 657.4556 655.0584 635.8297 633.7887 634.3467 625.2603 625.3803 630.8645 76 1887.5 619.2242 657.9095 651.259 656.5765 654.3004 636.3673 633.3679 634.3054 624.8213 625.1158 629.9752 77 1912.5 619.1395 657.4696 650.5891 656.6981 653.9891 635.7321 633.3386 634.003 623.9899 624.5865 629.89 78 1937.5 619.5915 659.051 651.9002 656.2175 653.737 636.5006 633.1654 634.2198 625.5754 625.6681 630.9369 79 1962.5 619.1232 658.8884 651.6986 656.3629 654.0432 636.0789 633.3622 634.2651 624.0047 625.3912 630.3894 80 1987.5 619.1797 657.7236 650.7777 656.6434 654.3553 636.203 633.3624 633.9448 624.7588 625.7231 631.2615 81 2012.5 619.4284 657.9564 650.2802 657.3472 654.7189 635.5213 632.7538 633.3559 624.2177 625.1658 630.4095 82 2037.5 619.8186 659.4537 650.7789 656.8068 653.9533 635.7599 634.2697 634.5317 625.5351 625.7355 630.6639 83 2062.5 619.0361 658.4525 650.6979 655.9353 654.8402 636.3408 633.0554 634.0612 625.0946 625.2953 630.1426 84 2087.5 619.0679 658.2028 650.7546 656.263 653.9579 635.3997 632.9751 633.8175 624.7005 625.0064 630.2469 85 2112.5 619.5036 657.6676 650.9825 656.0741 653.8184 635.8177 633.756 634.3187 625.1032 625.7711 631.028 86 2137.5 619.5838 658.24 651.1151 656.7514 654.7543 636.0051 633.4436 635.0968 624.5704 625.4066 630.8816 87 2162.5 619.7774 658.8586 650.8417 656.5809 653.8432 636.021 633.6121 635.1417 625.2152 625.9074 631.0059 88 2187.5 619.3818 658.008 651.2315 655.5812 653.7966 635.8189 633.3187 634.5345 625.5258 625.5007 629.6714 89 2212.5 619.4099 657.9034 650.5906 656.2516 654.5323 635.8962 633.5433 634.6133 624.7552 624.6984 630.9105 90 2237.5 619.4089 657.8727 650.5439 656.8112 653.9223 636.1951 633.3068 634.0905 624.7773 625.0253 629.772 91 2262.5 619.0846 657.3841 650.6034 654.7351 652.6919 635.3334 633.3407 634.7331 625.0086 624.9161 630.2352 92 2287.5 619.1711 657.5069 650.5812 655.9594 653.3334 635.3078 633.4448 634.1535 624.0512 624.5332 630.9288 93 2312.5 619.6704 657.979 650.9181 656.2473 654.2735 635.9193 634.164 634.3943 625.4596 625.8996 630.9707 94 2337.5 618.9883 657.2547 649.8742 655.3017 653.7401 634.9956 633.05 633.1128 624.573 625.1108 630.315 95 2362.5 618.77 656.8524 650.6318 655.4716 652.9324 635.1069 632.2976 633.7103 623.8329 624.6676 630.0484 96 2387.5 619.18 656.759 649.9029 654.9803 653.013 635.6059 633.0676 634.4104 624.1588 625.0173 630.1128 97 2412.5 619.5943 657.0053 650.5318 655.9102 653.5507 636.4233 633.3031 634.4716 625.0143 626.319 630.4698 98 2437.5 619.5089 657.0871 650.8551 655.4628 653.8055 635.4576 633.061 634.1545 624.5815 625.6028 630.2713 99 2462.5 619.1695 656.976 650.3881 654.7235 652.6026 635.3256 633.1645 633.4862 624.8209 624.662 630.4411 100 2487.5 618.513 656.6107 649.1075 654.3781 652.9325 634.9442 632.8046 633.1775 623.6661 624.6586 629.0236 101 2512.5 619.2534 657.1351 650.5383 656.098 653.0957 635.2848 633.0267 634.5299 624.5957 624.9733 629.6232 102 2537.5 619.2428 656.8518 649.9549 655.1723 652.813 635.4614 633.5816 633.4532 624.7441 625.315 630.2144 103 2562.5 619.3259 657.2634 650.6989 655.4452 652.9216 635.4495 632.8607 633.0796 624.5442 625.4914 629.8364 104 2587.5 618.8588 655.8009 649.4842 654.8497 652.251 635.2849 633.4936 633.4721 624.7792 624.7345 629.9124 105 2612.5 619.1994 656.7192 650.2401 655.0245 652.8849 635.9081 633.3239 633.1811 624.7036 625.19 630.1405 106 2637.5 619.9097 656.9048 650.4774 655.2567 653.4062 635.5823 633.2125 634.011 625.769 625.796 631.052 107 2662.5 619.4985 657.6028 650.6427 655.4614 653.269 635.1971 633.4538 633.2507 624.8212 625.5746 630.1575 108 2687.5 619.5252 657.6984 649.8065 655.6652 653.447 635.831 633.2355 633.1174 624.3204 625.6367 629.853 109 2712.5 618.6822 656.1635 650.075 653.9919 651.5348 634.6455 633.0949 633.5289 624.6241 623.9609 629.9132 110 2737.5 619.5611 657.4272 650.3853 655.1591 653.2736 634.8469 633.1436 634.2127 624.3819 625.4382 629.7827 111 2762.5 619.2262 656.3097 649.845 654.0503 652.607 635.4814 633.1084 633.1744 624.3562 625.4302 629.8924 112 2787.5 619.0123 656.8555 650.4595 654.914 652.7917 634.7565 632.5618 633.6617 624.5954 625.0725 629.9964 113 2812.5 619.0788 655.4473 649.5705 654.366 652.6426 635.1044 633.3604 633.324 623.7475 624.5335 629.8858 114 2837.5 618.9793 656.731 649.5823 654.2256 652.1266 634.853 633.2974 633.3168 624.1936 624.96 629.4152 115 2862.5 618.7719 655.694 649.4038 654.2175 652.1053 635.1691 633.1611 632.8076 623.1571 624.4319 630.0259 116 2887.5 619.1117 656.9854 650.0139 653.9536 651.8239 635.6695 632.1369 632.7 623.9512 624.8383 629.6973 117 2912.5 619.1237 656.0197 650.2586 654.5489 652.2038 635.4597 632.4124 633.5215 624.9163 625.3797 629.4735 118 2937.5 618.9059 656.6491 648.8929 654.4913 651.8942 635.3355 631.9433 633.5877 623.8877 624.385 629.1411 119 2962.5 619.3233 656.8432 649.3856 654.7177 653.3717 635.5564 633.6213 633.4085 625.1093 624.9929 630.0352 120 2987.5 619.8558 656.845 650.6094 654.5986 652.7007 636.4113 633.2379 633.598 625.1809 626.0924 630.5962 121 3012.5 618.9967 655.3859 649.56 654.115 652.3905 635.3777 632.0422 633.4079 624.9945 624.5787 629.6779 122 3037.5 619.3723 656.248 649.6002 654.6798 653.4212 635.6108 632.0718 633.4992 625.0539 624.9248 629.7259 123 3062.5 619.1507 656.0902 649.1019 653.3824 651.2278 635.6783 633.2289 633.1657 623.6423 624.6499 630.4444 124 3087.5 619.0089 656.5217 649.2501 653.7768 651.6482 634.6413 632.5473 633.2417 623.4923 623.9783 629.4027 125 3112.5 619.3791 656.786 649.4983 654.3871 652.0517 635.4073 632.6579 632.9998 624.485 624.9732 629.9129 126 3137.5 618.9518 655.0029 649.4529 653.5898 651.7109 634.7494 632.6919 632.8519 623.8593 624.4421 629.7089 127 3162.5 618.9306 654.9382 649.198 653.5625 652.1641 635.0823 632.6989 633.2711 623.9617 625.4174 630.0145 128 3187.5 619.4031 656.3798 649.6674 654.3949 652.3793 635.1739 632.3031 633.898 624.6795 625.2214 630.1257 129 3212.5 619.3513 655.6458 648.8299 653.5794 651.51 635.39 633.0969 632.8097 624.3643 625.5809 629.4613 130 3237.5 619.3712 655.9213 650.0245 654.0766 651.2707 635.3219 632.8387 633.6629 624.7969 625.624 629.8997 131 3262.5 619.7244 656.536 649.5171 653.7458 652.416 635.3601 633.2222 633.986 625.4069 625.3191 630.0709 132 3287.5 619.252 655.9579 649.5582 653.9075 651.657 635.4694 633.4507 633.4692 624.5755 625.2649 630.2173 133 3312.5 618.7681 655.9811 649.3099 652.7297 650.7115 634.4634 632.3627 633.4456 624.0453 623.7238 629.3844 134 3337.5 618.5923 655.5339 649.1084 653.0037 651.1682 634.2581 632.8341 632.6062 623.3144 624.769 629.4238 135 3362.5 618.7292 655.2741 648.5827 653.3296 650.7324 635.1216 631.6936 632.1359 624.1303 624.507 629.8434 136 3387.5 618.8453 655.0162 649.3634 653.23 650.9911 634.3368 632.558 633.0361 624.3333 625.1946 629.0829 137 3412.5 619.0023 655.6075 648.5613 653.0687 650.2505 634.5349 632.2745 633.4133 624.0704 624.4851 630.5593 138 3437.5 619.4083 656.1359 649.5212 653.6081 650.8799 635.3503 632.8908 633.652 624.937 626.2503 630.021 139 3462.5 619.1464 655.9159 650.2939 652.9551 651.3079 635.4584 632.5818 633.2102 625.1875 625.5281 629.0551 140 3487.5 618.6135 654.8158 648.183 651.9918 650.1188 634.7103 631.5634 632.6345 624.2616 624.8559 628.3536 141 3512.5 618.9261 655.6843 648.6419 653.0123 650.9351 634.9509 631.7626 632.6395 623.7726 624.7664 629.5252 142 3537.5 619.0118 655.2039 649.4776 653.4562 651.3888 634.5172 632.2866 632.4602 624.7291 624.9897 629.2447 143 3562.5 618.9974 656.1514 649.4982 653.2829 650.7573 635.6186 632.8572 632.0904 624.5361 625.1963 629.7383 144 3587.5 619.2582 654.5657 649.0555 653.6096 651.5511 634.9013 632.5776 633.134 624.4961 624.8664 629.3546 145 3612.5 618.9818 654.2458 648.5193 652.3381 650.2084 634.7433 631.8828 632.7137 624.1804 625.0461 629.5779 146 3637.5 618.885 655.1888 648.4858 653.0299 650.7826 634.6405 633.4996 632.4273 623.8651 624.1743 628.95 147 3662.5 618.738 654.3687 649.2513 651.7636 649.9919 634.0802 631.0274 632.905 624.3196 625.1019 627.5411 148 3687.5 619.1882 655.8884 648.9971 652.8715 651.0577 635.1871 632.2535 633.0031 624.3674 624.9991 629.8081 149 3712.5 619.0847 655.2343 649.6264 653.1473 651.5538 635.2048 632.7107 633.1395 625.5345 624.3397 629.1202 150 3737.5 619.2066 655.369 648.4612 652.5426 650.9735 635.1124 632.6933 632.6564 624.7077 624.9398 629.0221 151 3762.5 618.6994 654.3894 648.0961 652.2881 649.7615 635.0301 631.8474 632.3641 623.9357 624.5118 628.5999 152 3787.5 618.9899 654.8033 648.2224 652.3129 649.9764 634.7707 632.9237 632.5437 623.5759 624.7902 629.6734 153 3812.5 619.3187 654.9825 649.1985 652.5379 650.9844 634.6611 633.0142 633.3398 624.703 625.1121 629.1493 154 3837.5 618.8222 654.5908 648.3707 652.4884 650.0787 634.7839 632.0598 632.7098 624.1325 624.4341 629.077 155 3862.5 618.3551 654.2616 647.2054 650.4855 648.6326 634.2486 631.8976 632.4435 622.8922 623.8339 628.2844 156 3887.5 618.9439 654.8196 648.0153 652.6733 650.5895 634.6042 632.3701 632.7489 624.1487 624.2561 628.8858 157 3912.5 619.2987 655.654 648.8183 652.8049 650.9519 635.0246 632.7168 633.0453 624.6938 625.5101 629.5049 158 3937.5 619.1912 654.6866 648.7767 651.8879 649.8744 634.7485 632.1456 632.5525 625.0925 625.234 629.4699 159 3962.5 619.1578 655.1568 648.922 652.4542 649.8878 634.1395 632.115 632.5593 624.7185 624.8243 629.4701 160 3987.5 618.954 653.5426 648.1742 652.4137 650.4536 635.095 632.2903 632.9483 624.0543 625.6944 628.8961 161 4012.5 619.0859 654.4974 648.9592 652.6077 650.5737 634.72 632.2438 632.2209 624.3072 625.0119 628.9657 162 4037.5 618.9742 654.8866 649.5078 651.8472 649.6595 634.9389 632.4768 632.697 624.3441 625.2724 628.9327 163 4062.5 618.5499 654.163 648.7347 651.0778 649.625 634.4678 632.9044 631.8673 623.7386 623.8049 628.7251 164 4087.5 618.9236 654.6388 647.8996 652.0784 649.914 634.8287 631.9617 632.2116 624.1101 625.0873 628.792 165 4112.5 618.4588 653.8673 647.7265 651.611 648.9233 633.7383 631.6205 631.6542 624.0559 624.7463 628.8468 166 4137.5 618.7164 654.4114 648.5582 651.5499 649.8556 634.1824 632.1235 631.708 623.8893 623.9264 628.8113 167 4162.5 618.8752 654.1756 647.7811 651.4162 649.5315 634.7821 632.1283 632.8556 624.2156 624.1171 628.1944 168 4187.5 619.3569 654.6552 648.099 651.7625 650.9024 634.5983 632.9384 633.0293 624.1294 624.8762 629.8321 169 4212.5 618.4404 653.5798 647.4559 652.1242 649.8545 634.6389 632.2939 631.8735 623.4035 624.022 628.4769 170 4237.5 618.6147 653.7813 647.3801 650.9257 648.8427 633.3048 631.7706 633.0439 623.0475 624.9094 628.6247 171 4262.5 618.5714 652.9237 646.916 651.2937 648.8039 634.0328 632.0201 632.2848 623.6776 624.8949 628.707 172 4287.5 618.4561 654.454 648.1065 651.0211 649.0695 633.6688 631.2935 631.864 623.6754 624.7348 629.1651 173 4312.5 619.0053 654.5872 648.0999 652.0448 649.8237 634.4566 631.8808 632.6611 624.0995 624.6749 628.3811 174 4337.5 618.7321 654.0076 647.7945 651.0182 649.4733 634.1226 631.7433 632.5805 624.262 624.6751 628.9124 175 4362.5 618.8796 654.0749 648.2349 651.6931 649.5934 634.7741 632.4299 632.2821 623.5638 624.6331 629.2547 176 4387.5 618.9158 653.4054 648.0452 651.3552 649.3086 634.5742 632.0158 632.6057 623.9611 625.1525 629.1208 177 4412.5 618.8451 655.1949 647.7965 651.2479 649.4624 634.492 631.9341 632.1602 624.2743 623.874 627.9982 178 4437.5 619.0385 654.3906 647.7725 651.0811 649.1565 634.1971 632.6773 632.0087 624.7515 624.6455 628.8649 179 4462.5 618.9516 654.4443 648.1627 650.8967 649.2179 634.1522 632.333 631.9168 623.8803 624.6349 628.3874 180 4487.5 619.0969 654.2161 648.8333 652.1127 650.1702 634.502 631.7016 632.349 623.9839 624.3378 628.9979 181 4512.5 619.0494 653.2785 647.5071 651.651 650.4825 634.0314 632.3306 633.0096 623.6339 624.9298 629.3816 182 4537.5 618.364 653.3339 647.7706 650.8002 649.297 633.0787 630.4672 631.3637 624.3894 624.2077 628.7537 183 4562.5 619.2493 653.2704 648.3747 651.6715 649.2655 634.2729 631.8156 632.6542 624.8062 625.5303 630.5464 184 4587.5 618.8602 653.9763 648.3001 650.8425 649.372 634.2168 631.493 631.8857 623.7743 624.0267 628.5333 185 4612.5 618.8645 653.8773 648.1708 651.5908 649.8723 634.3427 632.026 632.8272 623.6715 624.4646 628.6919 186 4637.5 618.9328 653.9892 648.087 651.1106 649.2112 634.2443 632.2555 632.2057 623.7227 624.9694 629.0577 187 4662.5 618.8254 653.7014 647.6111 650.5595 648.8868 633.9651 631.9907 631.7869 624.0993 624.759 627.9094 188 4687.5 618.9911 653.4959 648.0767 651.2509 648.958 634.2695 631.7548 632.4656 624.4515 624.6573 628.4349 189 4712.5 619.1516 653.4524 647.6667 650.73 648.4824 634.3106 632.5616 632.5456 624.0686 624.596 628.713 190 4737.5 618.5762 653.2439 647.0638 650.0874 648.158 635.021 631.304 631.6893 623.7921 623.4843 627.5759 191 4762.5 618.6338 653.1327 647.8505 650.8884 648.4176 633.7389 631.458 631.6995 623.4723 623.624 628.1091 192 4787.5 618.7654 652.8275 648.0166 650.9777 649.6025 633.3308 631.7606 631.9956 623.6587 624.4399 627.9926 193 4812.5 618.4063 653.5255 647.7662 650.8618 648.3132 633.7052 631.0402 631.5484 623.2467 624.2804 627.402 194 4837.5 618.1688 652.8664 646.6762 650.8303 648.195 633.496 630.8218 631.3971 623.5202 623.6875 627.8834 195 4862.5 619.1283 654.2303 647.4854 650.7154 649.0818 634.702 632.3014 632.281 623.2738 624.3089 628.6441 196 4887.5 619.2227 653.2196 648.4142 651.187 649.3789 634.2461 631.2062 632.2326 624.1006 625.3618 629.5652 197 4912.5 618.7722 653.8734 647.7479 650.426 648.2083 633.5101 631.7288 631.7939 623.9939 624.3515 627.9678 198 4937.5 619.203 653.266 647.6406 651.2157 649.2556 634.3605 632.2576 632.7565 624.7757 624.7714 628.4938 199 4962.5 618.6023 652.9386 647.7863 650.4492 648.3712 633.7404 631.6671 631.9933 623.9734 624.5115 628.193 200 4987.5 618.5643 653.054 646.8557 649.6625 647.9863 633.8609 631.5303 631.7036 623.9881 624.3636 627.8617 201 5012.5 618.9945 653.9163 647.5579 650.4969 648.5955 633.7961 632.3524 631.8314 624.0592 625.0209 628.7744 202 5037.5 618.8036 653.3631 647.6549 650.2506 647.6292 633.5085 631.7846 632.1385 623.4423 624.4778 627.9361 203 5062.5 618.9353 653.2382 647.4943 649.9781 648.3486 634.2175 631.3141 632.1607 623.8845 624.994 628.1153 204 5087.5 618.8769 652.2405 647.3362 649.9139 648.6146 634.112 632.1768 631.6975 624.1771 625.1053 628.9446 205 5112.5 618.9629 652.2856 648.2336 651.5798 649.2323 633.871 631.3724 632.2623 624.4001 624.2176 628.3257 206 5137.5 618.5888 653.0329 646.4013 650.6897 648.7911 633.3618 631.4572 631.302 623.522 623.9393 627.7615 207 5162.5 618.5332 652.966 647.0297 650.0679 648.0988 632.9859 630.8757 631.049 623.7867 624.8207 627.6892 208 5187.5 619.2727 653.5433 648.2781 650.5863 649.1801 633.7943 632.6711 632.0327 624.2103 624.7424 629.4621 209 5212.5 618.7765 652.9074 647.416 649.6592 647.4137 634.0584 631.0725 632.0022 623.3719 624.4958 627.8934 210 5237.5 618.9615 652.582 647.2526 650.5398 648.5964 633.5985 632.227 631.8234 624.2246 624.4247 628.0643 211 5262.5 619.0009 652.7223 647.438 650.2225 647.9529 634.3578 632.3067 632.3832 624.3578 624.3336 628.4965 212 5287.5 618.9458 652.6265 647.3358 649.77 647.63 633.5897 632.2106 631.3565 624.2521 625.0172 628.9265 213 5312.5 618.8072 652.4581 646.9609 649.2367 647.822 633.8555 632.4647 631.2376 624.2442 624.3329 628.6963 214 5337.5 618.5354 652.092 647.0957 649.1379 647.9707 633.4178 631.472 631.2896 623.2217 624.1828 628.4373 215 5362.5 618.7768 652.0957 647.1739 649.217 648.299 634.1184 631.7813 631.9216 623.8468 624.4162 628.0313 216 5387.5 618.9748 652.3557 646.764 649.8083 647.7198 634.2619 632.4328 631.9969 624.6183 625.6374 628.4755 217 5412.5 619.439 653.6304 648.3546 650.8978 648.4926 634.5473 632.1081 632.3116 624.7527 625.4262 628.7746 218 5437.5 618.7111 653.4909 647.0508 649.2834 646.8163 633.4722 631.3457 631.6077 624.0118 623.8951 627.7093 219 5462.5 618.8112 652.3801 647.352 649.8355 647.8631 633.4998 631.3351 632.343 623.451 624.0698 628.0892 220 5487.5 618.2131 652.578 646.3652 648.7379 647.3924 634.0418 631.1596 631.0277 623.2672 623.6799 627.7253 221 5512.5 618.5854 653.149 646.8393 649.188 646.6886 633.166 631.5174 631.1911 623.5634 623.8946 627.9638 222 5537.5 618.5213 652.0917 645.9027 649.1051 646.8119 634.7473 632.009 631.2304 623.8827 624.8598 627.0569 223 5562.5 619.0323 652.4625 646.7866 650.3601 647.4203 633.6518 632.1259 631.6863 624.6704 624.6772 628.4052 224 5587.5 618.6252 652.0872 646.4479 648.502 646.9255 633.5339 632.1697 630.8464 623.5466 624.4852 627.8798 225 5612.5 618.5673 652.2964 647.0101 649.1359 647.6871 633.5874 630.7991 632.2396 624.2537 624.4285 627.2764 226 5637.5 618.8451 652.1103 647.0721 649.4681 647.1686 634.5487 631.163 631.5285 624.3839 624.7169 629.0421 227 5662.5 618.9258 652.987 647.0167 650.0562 647.9343 634.1851 632.0204 631.5921 623.5986 624.1024 628.602 228 5687.5 619.0433 652.563 647.48 649.4209 647.8456 633.9053 631.3552 631.7692 623.9901 625.3299 628.6911 229 5712.5 618.9093 653.0607 647.2772 649.6101 648.1616 633.8955 631.5607 631.7954 623.5102 624.4634 628.29 230 5737.5 618.2741 651.683 646.157 649.5037 647.0727 633.3031 630.8218 631.244 623.6108 624.7005 627.5605 231 5762.5 619.143 652.87 647.2256 650.0595 647.7934 634.1636 631.401 632.8653 624.1015 624.1788 627.9962 232 5787.5 618.9545 652.5454 646.8469 649.2721 647.5413 634.1884 631.5245 631.0145 624.1489 624.9394 628.0456 233 5812.5 618.8908 652.3326 647.5903 649.397 648.1425 634.0223 631.2271 631.0431 623.513 624.3311 627.9605 234 5837.5 618.9236 652.8054 646.764 648.8079 647.2703 633.6219 631.3885 630.6707 624.1685 624.5713 627.8141 235 5862.5 618.811 651.6793 646.3536 649.6607 647.9751 634.0148 631.5314 631.5793 624.4696 624.8814 627.7988 236 5887.5 619.0164 652.2158 646.9457 649.9317 647.8622 633.8169 630.9459 632.0796 624.1904 624.4197 627.5697 237 5912.5 618.739 652.3353 647.0415 648.8077 646.7442 633.8036 630.6798 631.9432 623.8518 624.3821 627.8839 238 5937.5 618.4722 652.3319 646.5427 648.6056 646.5049 633.3466 630.897 630.6872 624.8246 624.0144 627.0977 239 5962.5 618.4676 651.7141 645.9844 648.5367 646.2891 633.38 631.3219 631.1309 623.3089 624.2309 627.3183 240 5987.5 618.5736 651.9366 646.8758 648.7231 646.8516 634.081 631.132 631.3326 622.8598 624.5669 627.4215 241 6012.5 618.5775 652.3941 646.6208 648.9467 647.336 633.8886 631.0278 631.8381 623.9088 624.679 627.7326 242 6037.5 618.4216 651.8822 645.9825 648.4448 646.5095 633.8057 630.9413 631.1101 623.8846 624.1879 627.4334 243 6062.5 618.093 651.1863 645.6364 648.0116 645.9717 632.7636 630.8826 630.7844 623.6428 623.76 628.4916 244 6087.5 618.011 650.7627 645.1145 647.7979 645.8697 632.7751 630.0246 629.9156 622.7402 623.46 627.2273 245 6112.5 618.393 651.7139 645.8012 648.4682 646.4808 633.1117 630.7762 630.6297 623.1604 623.7627 627.5273 246 6137.5 618.4089 651.1519 645.9778 647.8734 646.1238 633.5215 631.1628 630.885 624.1536 624.0203 627.4756 247 6162.5 618.4086 650.6174 646.2551 647.4361 645.6442 633.1803 630.8233 631.2525 623.3068 623.3222 628.213 248 6187.5 618.4941 650.7368 646.2873 648.206 645.8069 633.2143 631.2049 630.8191 624.0545 624.7554 627.856 249 6212.5 618.3587 651.5488 646.0264 648.4196 646.4547 633.1145 630.4532 630.44 623.2864 624.6866 626.9993 250 6237.5 618.8981 652.0637 646.6025 648.0815 646.7123 632.9885 631.2276 632.0408 623.7819 624.257 628.1196 251 6262.5 618.2112 651.0093 645.9306 647.6297 645.5979 632.6874 630.309 631.9392 622.8574 623.9678 626.5563 252 6287.5 618.3797 650.8463 645.115 648.2466 646.4703 632.9657 631.0185 630.9376 623.6062 624.1074 627.3731 253 6312.5 618.6171 651.0803 646.4533 647.7855 645.7398 632.955 630.9232 631.6985 623.9085 623.8961 627.4292 254 6337.5 618.6821 651.1905 646.9589 648.7276 646.8346 633.3911 631.5856 631.1552 623.1318 624.2305 627.5624 255 6362.5 618.6308 651.8107 646.0864 648.1322 646.6225 633.5327 631.3765 631.0332 623.7023 623.9898 627.2907 256 6387.5 618.6265 651.64 646.1128 648.3339 646.3668 632.7732 630.5766 630.7189 623.9275 625.5066 627.7556 257 6412.5 618.4316 651.2918 647.3736 646.9403 645.4584 633.2405 630.9283 631.7579 623.6013 623.7031 627.3668 258 6437.5 618.8508 651.4332 647.1757 648.4575 646.2968 634.4756 631.2028 631.7496 624.1447 625.1056 628.1667 259 6462.5 618.2028 651.6175 645.2017 648.1664 646.5113 632.6745 631.1601 630.247 622.4272 623.6615 626.9515 260 6487.5 618.1785 650.8184 645.2308 647.8696 645.9977 632.9228 630.7756 630.3073 622.8306 623.8459 626.6337 261 6512.5 618.2871 651.1972 646.1493 648.7333 646.6203 633.7166 630.508 630.387 623.4867 623.6911 626.775 262 6537.5 618.9505 651.377 646.7259 648.7837 646.4083 633.7539 631.0143 631.2833 625.0178 624.33 627.725 263 6562.5 618.5302 650.8953 645.8485 648.3132 646.1617 633.0473 631.371 630.3833 623.5478 623.6528 627.4871 264 6587.5 618.5667 650.5818 645.5633 647.6813 645.8256 632.8955 631.3222 631.2744 624.285 624.4284 627.361 265 6612.5 618.1587 650.5808 646.0063 647.2793 645.5067 633.5839 630.3443 630.6163 622.3919 623.8034 626.9281 266 6637.5 618.4789 651.6074 646.1382 647.9849 646.1978 632.0409 631.0717 630.9895 623.8639 624.0661 627.6185 267 6662.5 618.6895 651.1743 645.0934 648.2558 646.2116 632.8265 630.8822 630.2263 623.8095 623.5925 627.3405 268 6687.5 618.824 652.0283 646.1471 647.5767 645.7159 634.2792 630.9498 631.313 623.7428 624.2681 628.0645 269 6712.5 618.6317 650.9717 645.122 648.1999 645.7176 633.7947 631.0049 631.0455 623.8011 623.8533 626.908 270 6737.5 618.5133 651.2345 645.0101 647.6289 646.5774 632.9885 631.0599 631.5557 624.08 623.6674 626.7621 271 6762.5 618.4715 650.5145 646.1657 646.9936 645.7424 632.5369 630.9801 631.683 623.3716 623.7228 627.4534 272 6787.5 618.512 651.0828 645.5219 646.6826 644.795 633.5704 630.6304 630.1678 624.1887 624.2213 626.3114 273 6812.5 618.6452 651.2954 645.7911 647.2936 645.9355 632.8019 631.0129 630.7956 623.8557 624.532 626.6105 274 6837.5 618.8978 651.0166 646.9765 648.2441 645.8021 633.5285 631.0142 631.1585 623.8816 624.6734 628.1299 275 6862.5 618.3586 650.302 644.2611 648.542 645.8646 632.945 630.6007 630.6872 623.4219 623.7298 626.6519 276 6887.5 618.4875 650.8549 645.873 647.2126 645.4785 632.9577 630.4811 630.9634 623.2826 623.3281 627.4324 277 6912.5 618.6757 650.3267 645.4689 647.3181 645.8541 632.8485 631.4462 631.3532 623.8546 624.0416 628.1203 278 6937.5 618.2115 650.3977 645.1668 646.0031 644.3501 632.735 630.4579 631.429 623.5469 624.2447 626.2979 279 6962.5 618.3628 650.3816 644.6562 646.7577 645.142 633.5831 630.3956 630.2956 622.966 623.9322 626.9581 280 6987.5 618.5374 650.4845 645.6938 647.8725 646.2198 633.1813 631.1056 630.7955 623.8157 623.9563 626.9682 281 7012.5 618.5039 650.4199 645.5979 647.872 645.4787 632.1809 630.9974 630.6805 623.1477 624.2579 627.2339 282 7037.5 618.7678 651.0885 645.2875 647.7262 645.6397 632.7452 631.1904 630.6738 623.8352 624.3883 627.5483 283 7062.5 618.2261 650.6754 645.1489 647.1713 645.9271 632.7379 630.2042 630.4537 622.8319 623.7862 626.3857 284 7087.5 618.5986 650.8426 646.1051 646.8343 645.2433 633.3667 630.7269 630.8247 623.623 623.9239 627.6721 285 7112.5 618.3443 651.1782 645.5043 646.5859 645.2304 633.4053 630.373 631.0143 623.5604 623.8627 627.244 286 7137.5 618.7396 650.7602 645.9398 648.3223 645.9961 633.6356 630.7913 630.7576 623.138 624.1686 627.8974 287 7162.5 619.0795 651.3266 645.8681 647.5622 645.5512 634.4992 631.1298 631.4903 623.7069 624.4254 628.2348 288 7187.5 618.3279 650.2048 644.8116 646.5444 644.5427 632.7031 630.5645 630.8341 623.2977 624.2077 627.2958 289 7212.5 618.2973 650.6044 645.1635 646.8084 644.6295 632.4833 630.0304 630.7622 623.752 624.2131 627.506 290 7237.5 618.859 650.8688 645.9185 647.6035 645.3818 633.1843 631.3684 631.0757 623.5035 624.3592 626.7896 291 7262.5 618.2963 650.1216 645.4653 646.7793 645.3006 633.4122 630.8581 630.724 623.6299 623.5449 626.261 292 7287.5 618.7335 650.2448 646.2549 646.7894 644.9746 632.6145 631.1034 630.9787 623.3859 624.0314 627.3662 293 7312.5 618.4626 650.4178 645.0825 646.8085 645.4283 632.6787 630.565 630.6512 623.6212 623.6796 626.8558 294 7337.5 618.6748 649.8851 645.3406 647.0794 644.8849 632.8644 631.1923 631.0913 623.8293 625.2235 627.555 295 7362.5 618.2332 648.9786 644.2948 646.8331 644.4878 633.0242 629.955 630.1825 623.5684 623.5459 626.9243 296 7387.5 618.4091 650.1669 644.8131 646.3815 644.3585 632.6979 630.2908 629.8451 623.6335 624.1096 626.9751 297 7412.5 618.4987 650.7976 644.4052 646.1215 644.2645 633.0426 630.2771 630.1676 623.1582 623.245 627.1333 298 7437.5 618.4745 650.0045 644.9857 646.6431 645.1226 632.4447 630.7097 630.8005 622.7082 623.7896 627.8796 299 7462.5 618.7395 650.1959 645.7572 646.3635 644.1454 632.8343 630.291 630.8889 623.9828 623.597 626.7385 300 7487.5 618.7043 650.5124 645.9306 647.0941 645.1926 633.5556 630.5643 630.8277 623.6326 624.196 627.044 301 7512.5 619.0649 650.7897 645.3888 647.4891 645.7593 633.4466 631.4775 630.8706 624.4205 624.4989 627.328 302 7537.5 618.5242 650.8239 645.1647 646.4609 644.4305 632.8471 631.4025 630.4154 623.3036 623.7999 627.6354 303 7562.5 618.4352 650.4218 645.1364 646.7873 645.2778 632.3872 630.5753 630.4445 623.0562 624.3973 626.6241 304 7587.5 618.4319 649.9424 644.7924 646.772 645.3847 633.2489 630.5264 630.5124 622.8138 624.1622 627.0306 305 7612.5 618.4504 650.152 645.4666 646.6695 644.4017 632.5882 630.2798 630.1396 623.9076 623.6975 627.1513 306 7637.5 618.3089 650.7886 644.414 647.0801 645.5416 632.6067 630.0021 630.5348 623.5274 623.8616 626.629 307 7662.5 618.6721 650.4858 645.5833 646.2829 644.9763 633.039 630.5968 630.9221 623.7247 624.3492 626.4539 308 7687.5 618.3895 650.6616 644.9204 646.2094 644.0641 632.6111 630.8601 630.9184 623.2283 623.6508 626.9643 309 7712.5 619.0706 651.3523 645.5729 646.3172 644.9272 633.3322 630.9194 630.8345 623.827 624.3725 627.6029 310 7737.5 618.3439 649.728 645.1075 646.5331 644.3375 632.4734 630.283 629.9038 623.1606 623.4886 627.0319 311 7762.5 618.9544 650.4042 645.4888 646.8574 645.0176 633.0137 631.1789 630.9664 623.8693 624.6666 627.8776 312 7787.5 618.1977 649.8716 644.6861 645.3126 644.4462 631.8128 630.6642 629.7904 623.3611 623.0477 626.8131 313 7812.5 618.1473 649.7101 643.9476 645.6463 644.142 632.8172 630.344 630.3991 622.835 622.895 626.627 314 7837.5 618.862 650.1162 644.3231 647.1564 645.4676 632.6408 631.6468 630.9702 623.8692 623.9261 626.4991 315 7862.5 618.4185 649.6833 645.1225 646.0168 644.3929 632.5783 630.7495 630.5514 622.6778 624.2944 626.9491 316 7887.5 618.3152 649.4984 644.7363 645.7152 644.3641 632.9649 629.771 630.5899 623.5342 624.5155 626.9422 317 7912.5 618.2918 649.4324 644.4608 647.1509 644.4112 633.0677 629.9365 629.7813 623.2294 622.9217 626.7955 318 7937.5 618.4807 650.8046 645.1009 645.7318 644.1698 632.419 630.4016 630.3552 624.068 623.7154 626.8045 319 7962.5 618.2901 649.5621 644.9576 645.4758 644.1941 632.5734 630.2385 629.8744 623.7524 623.7316 626.7106 320 7987.5 618.364 649.5074 643.8624 646.2436 644.4735 632.7982 630.3678 630.9499 623.7981 624.6382 626.9544 321 8012.5 618.8354 650.1159 645.4016 647.2214 645.6133 632.8678 630.6392 630.8544 623.2534 624.1601 626.9893 322 8037.5 618.6215 649.928 644.6645 645.8989 643.6923 633.2473 630.9215 630.2488 623.0786 624.1518 626.9227 323 8062.5 618.9782 650.3453 644.4418 646.5901 644.9668 633.245 630.9773 630.8305 624.1211 624.282 627.9896 324 8087.5 618.0019 648.5852 644.6289 645.2636 643.9906 632.0613 630.5516 629.8139 622.9913 623.1181 626.175 325 8112.5 618.2685 649.8962 645.0746 646.1486 643.9857 631.9631 631.4108 630.1446 623.9561 623.8182 626.7123 326 8137.5 618.2903 649.6125 644.0575 644.9006 643.1916 631.7471 630.3981 629.9039 623.3989 623.994 627.0982 327 8162.5 618.5256 649.6985 644.9219 646.5496 644.2649 632.31 630.8333 630.2354 623.1154 623.4292 626.9045 328 8187.5 618.6263 649.13 644.3508 645.7005 643.6443 631.9247 630.8098 630.7128 624.0049 623.5818 627.8002 329 8212.5 618.565 651.0052 644.5112 645.7753 644.4512 632.8293 630.2637 630.5492 623.4625 623.5892 626.6317 330 8237.5 618.3657 649.8342 644.8589 645.9157 643.9619 633.1589 630.0736 630.5122 622.9489 623.7293 627.0794 331 8262.5 618.3354 649.5816 643.9188 645.6946 643.9122 632.5254 630.2308 630.3366 623.895 623.3626 626.2339 332 8287.5 618.489 650.1469 644.9995 646.0249 643.9678 632.7201 630.8447 630.2843 623.1595 623.6675 627.4992 333 8312.5 618.4103 649.2427 643.7491 645.1183 643.585 632.7074 630.0562 630.8584 623.9041 624.5068 627.1074 334 8337.5 618.775 649.9411 645.0695 646.5077 644.542 633.5217 630.9005 631.5416 623.9577 623.5901 627.5317 335 8362.5 618.6649 649.6666 645.0583 647.0481 644.9319 632.4419 630.9266 630.2653 624.0309 624.2949 627.7756 336 8387.5 618.5972 650.51 644.6694 645.3269 643.7284 632.702 630.8527 630.397 623.556 623.7085 626.4101 337 8412.5 618.2018 649.096 644.4631 645.6769 643.7876 632.2838 630.0569 629.9414 623.4953 623.3194 626.4029 338 8437.5 618.5864 650.9217 644.8315 645.6452 643.2927 632.428 630.5806 630.6544 624.0461 623.5732 627.5798 339 8462.5 618.0637 648.3583 644.2733 645.4712 643.3705 632.1783 630.677 630.224 622.2156 623.6408 625.8602 340 8487.5 618.3464 650.4567 644.1441 646.0295 643.9643 631.7144 630.3444 631.081 623.825 624.0898 627.4506 341 8512.5 618.6875 649.3386 645.7851 646.3093 643.873 632.8727 630.5329 630.3559 623.0921 623.7292 627.4044 342 8537.5 618.226 648.528 644.6844 645.4502 643.583 631.4378 629.3295 630.3208 623.9322 623.3319 625.8648 343 8562.5 618.3907 649.3694 643.7564 645.5451 643.8227 632.4523 630.2999 629.6681 623.0829 623.3485 625.5932 344 8587.5 618.6668 649.3559 644.3465 645.8364 643.7917 632.6209 630.7156 630.521 623.3915 624.6564 626.7504 345 8612.5 618.4699 649.3436 644.804 646.1493 644.0891 633.1641 630.395 630.5449 623.3689 623.4048 626.454 346 8637.5 618.5243 648.9337 644.5682 645.1041 643.015 633.1662 630.1375 629.7279 622.9173 623.6332 627.0146 347 8662.5 618.2064 648.5491 643.8893 645.4478 643.2366 632.1683 630.1043 629.596 622.4739 624.0199 626.508 348 8687.5 618.4275 649.0491 644.4008 645.3 643.4925 632.7113 630.6609 630.2512 623.1657 623.8705 626.3277 349 8712.5 618.5667 650.2475 643.9579 645.3668 643.8265 633.2668 631.1766 630.0999 623.5194 623.6311 626.7078 350 8737.5 618.5017 649.0488 644.7107 645.0626 644.102 631.778 630.7302 630.2522 623.2993 624.086 626.9363 351 8762.5 618.4543 648.3395 644.4734 645.1038 642.6601 632.8849 630.4172 630.5424 623.0777 623.0308 627.3421 352 8787.5 618.8428 649.9798 644.0939 645.4286 643.7637 632.8143 630.5114 630.8691 624.4572 623.7593 627.2918 353 8812.5 618.4342 649.3737 644.246 645.4604 642.9952 632.1868 630.2811 629.83 623.5446 624.1014 626.0372 354 8837.5 618.7335 649.1455 645.1523 645.6904 643.6562 632.4996 630.3286 630.1065 623.2224 624.2629 627.9404 355 8862.5 618.8819 649.1021 644.8431 645.9183 643.7847 632.6263 630.8952 630.333 623.681 624.6149 627.1219 356 8887.5 618.7337 648.9698 645.845 645.7788 644.5348 632.7433 629.8293 630.2963 623.6382 624.24 626.7588 357 8912.5 618.5966 649.8865 644.7819 645.2094 643.2773 631.7107 630.7093 630.3165 623.5106 623.9584 626.8737 358 8937.5 618.2953 648.5237 643.2282 644.8661 642.9077 632.307 630.0069 629.4535 623.3817 624.2337 626.9178 359 8962.5 618.4522 648.8473 643.9328 644.5106 642.81 632.3212 630.2171 630.0487 624.0746 623.7379 625.8119 360 8987.5 618.1987 649.5801 644.1971 644.4061 642.3501 631.8334 630.0769 629.7371 623.4826 623.7143 625.7715 361 9012.5 618.6604 648.8025 644.5723 644.9618 643.9131 633.0034 631.1027 630.4953 623.3564 624.161 626.3954 362 9037.5 618.4766 648.3843 643.907 645.296 642.8569 632.3137 629.6657 629.796 622.7613 623.72 626.1989 363 9062.5 617.9552 647.5717 643.8892 644.3449 642.853 632.3802 629.3721 629.4203 622.4843 623.1292 626.0959 364 9087.5 618.0586 648.6588 643.6195 644.3138 642.8918 632.3866 630.0131 629.6002 623.2503 624.1777 625.8431 365 9112.5 618.2538 648.8638 644.0906 644.6384 643.2183 632.2125 629.9573 630.113 623.1203 623.9502 625.9283 366 9137.5 618.3348 649.3889 643.5032 644.5769 642.7786 633.0311 630.1023 630.3598 622.764 623.3851 626.3935 367 9162.5 618.2285 648.3715 643.6462 643.8999 643.0072 631.9955 630.0289 630.4701 622.5838 624.1422 626.8893 368 9187.5 618.3764 648.4641 644.1332 644.8917 642.5606 631.5427 630.1089 629.5101 623.274 623.6395 626.2112 369 9212.5 618.4925 648.7271 644.0112 644.3022 642.6092 632.4091 630.4191 630.0229 623.7411 624.1999 626.5547 370 9237.5 617.9371 648.4581 643.4852 644.2721 642.51 631.1778 630.1696 628.9105 622.242 623.0448 626.217 371 9262.5 618.0937 649.1332 644.2218 644.5459 642.7458 631.5524 630.086 629.4436 622.7374 623.4835 626.7291 372 9287.5 617.9835 648.7028 643.4191 644.2373 642.3337 631.7539 629.1179 630.0249 622.7879 622.9156 625.9305 373 9312.5 618.6342 649.1277 643.5998 644.5541 643.1012 632.5915 629.2926 630.784 623.463 623.4691 626.2484 374 9337.5 617.7979 648.6783 643.8123 644.7457 642.3512 631.7824 629.779 628.7636 622.6756 623.4227 625.9601 375 9362.5 618.1734 647.8283 644.1797 644.8148 642.7339 631.7168 630.1532 629.9965 622.914 623.3928 626.1645 376 9387.5 618.7312 649.4527 644.3163 644.5857 643.1813 632.8515 630.3759 629.651 623.5558 624.5426 626.368 377 9412.5 618.0063 648.1059 643.4379 643.7064 642.2692 631.7398 629.3768 629.355 622.5813 623.0003 626.8682 378 9437.5 618.5759 648.2842 643.6792 644.917 643.1413 632.4457 630.8254 630.3641 623.4134 623.9466 626.8719 379 9462.5 618.3521 648.3616 644.0281 644.0362 642.2481 631.0841 630.0707 629.6455 622.8475 623.1573 626.0066 380 9487.5 618.157 647.7067 643.0498 644.0736 642.7409 631.2069 629.6264 629.6725 622.9569 622.7905 625.7662 381 9512.5 617.9725 648.8606 643.9327 644.3573 642.2167 631.1654 628.9784 629.2826 623.0738 623.7036 625.817 382 9537.5 618.7489 648.3842 644.2837 645.1919 643.1051 632.1657 631.0128 630.867 623.4777 623.5722 627.5416 383 9562.5 617.8265 646.8775 643.2375 644.2315 642.5984 631.5545 629.6814 629.7804 623.1103 622.591 625.656 384 9587.5 618.4409 648.4052 643.6649 643.5457 642.6738 632.0575 629.8965 629.6938 622.908 623.8966 626.826 385 9612.5 619.0416 649.5977 643.6657 644.7118 643.0229 632.8607 631.5474 630.3487 624.4663 623.8067 627.0891 386 9637.5 618.3461 648.2963 642.905 644.1572 642.5398 631.8768 629.5719 630.1785 622.8795 623.7967 626.035 387 9662.5 618.3498 648.3428 643.8818 644.5497 642.6002 631.9827 629.6941 629.5495 623.1228 622.9446 626.1307 388 9687.5 618.1572 648.089 643.369 644.2119 642.1496 631.1705 629.9102 629.8906 622.5808 623.9903 626.354 389 9712.5 618.3647 648.6657 644.0529 644.183 642.9127 632.0826 629.4771 630.1224 623.7083 623.5812 626.0413 390 9737.5 618.5168 648.5977 643.9052 645.2474 643.2351 631.804 630.3215 629.216 623.5666 624.1424 626.3207 391 9762.5 618.0275 648.2418 643.6906 643.7616 642.0941 631.4929 629.4654 629.28 622.3514 623.2917 625.2658 392 9787.5 618.2667 647.3649 643.4853 644.7407 643.2198 632.41 629.6619 630.2525 623.24 623.6723 626.3983 393 9812.5 618.3468 648.5295 644.1379 643.5131 642.0088 632.3274 628.9454 630.5922 623.2969 624.2035 625.8965 394 9837.5 618.4358 649.0129 643.9139 644.2854 642.763 631.7873 629.3355 629.2727 623.1277 623.571 625.6473 395 9862.5 618.1271 647.406 643.7066 642.863 642.0241 632.1009 629.909 629.0679 622.7357 623.1508 626.1223 396 9887.5 617.9229 647.6817 642.7045 643.7678 642.1103 631.4221 628.8112 628.9842 622.3607 622.786 626.744 397 9912.5 618.6469 648.8355 643.3469 644.6775 643.2816 631.9663 630.1039 629.7807 623.4054 624.1391 626.7914 398 9937.5 617.7221 646.9633 642.0548 643.6641 642.0682 631.4566 629.7685 628.4507 622.8747 623.107 626.0303 399 9962.5 618.2958 648.0546 642.8984 644.578 642.1019 631.8155 629.3374 628.9635 623.1402 622.9276 625.9091 400 9987.5 618.0993 647.0377 642.5144 644.368 642.0623 631.8915 629.7869 629.003 622.5751 623.6693 625.8073 401 10012.5 618.1272 647.2462 642.4072 644.4166 641.7524 631.1989 629.263 629.419 622.7052 623.7405 625.7961 402 10037.5 618.4154 647.6229 643.4176 644.0985 643.03 631.8913 629.5855 630.1053 623.6872 623.5963 626.1135 403 10062.5 618.5835 648.9072 644.0621 644.5265 642.5212 633.0004 630.1475 629.7055 622.766 624.4352 625.7347 404 10087.5 618.0216 647.6785 642.9856 643.8038 642.4717 631.2042 629.8713 629.7814 622.5835 623.238 626.5525 405 10112.5 618.2201 647.6977 643.0132 644.4261 642.548 631.6777 629.1582 629.689 623.3602 623.2825 625.6037 406 10137.5 618.2251 647.7091 642.9163 643.212 641.877 631.297 629.2489 629.2729 623.52 623.3742 626.0713 407 10162.5 618.3507 648.5285 643.1431 643.9415 642.2181 631.7845 630.0111 630.0578 622.9511 623.6954 625.783 408 10187.5 618.1182 647.6229 642.7609 642.9742 641.2529 631.3859 629.7497 629.6435 623.1985 624.1358 625.8631 409 10212.5 617.9782 647.1648 643.171 643.8479 641.2091 631.2998 628.9912 629.2563 623.5774 623.1912 625.8413 410 10237.5 617.6732 647.3444 642.3775 642.8249 641.534 630.1176 629.1761 628.7322 622.1397 622.1891 625.0741 411 10262.5 618.1717 647.6369 643.7163 644.1544 642.0439 631.5784 628.9904 629.7674 622.5278 623.7442 626.3526 412 10287.5 618.6397 647.2312 643.4321 644.5102 642.5814 632.5985 629.7049 629.5013 624.2129 623.7569 626.4116 413 10312.5 618.3368 648.2129 643.3842 643.9775 642.4043 631.991 630.7045 628.701 623.5927 624.3752 626.6277 414 10337.5 618.0742 647.8487 643.1686 643.7808 642.3062 631.3772 629.9242 629.0876 623.2576 623.6289 625.4375 415 10362.5 617.7497 647.013 641.7815 643.2179 641.4163 630.8726 629.2208 629.2222 622.5629 622.5882 625.5398 416 10387.5 618.4549 647.6987 643.0436 643.9575 642.4734 631.6887 629.8469 629.3193 623.7414 623.7411 625.8074 417 10412.5 618.2528 647.8874 643.2693 643.1774 642.4411 631.8712 629.4941 629.2646 623.7571 624.1682 625.9747 418 10437.5 618.131 647.8884 642.6342 643.031 641.5251 631.7709 629.5714 629.9105 623.192 623.973 625.5489 419 10462.5 618.3785 647.001 643.1022 643.6928 642.6541 631.5855 629.5517 629.547 622.94 624.0887 626.5947 420 10487.5 617.9627 647.6881 643.2336 642.816 641.5233 631.1578 628.8576 629.158 622.453 622.9434 625.2868 421 10512.5 618.729 647.9235 643.2214 643.8734 642.0082 631.4508 630.1806 629.7437 623.5755 623.8893 626.5532 422 10537.5 617.5241 646.6207 642.648 642.2259 641.1804 630.6105 628.5996 628.8971 621.6964 622.9705 624.9856 423 10562.5 618.4024 648.6393 642.9753 643.8009 642.6087 632.1996 629.7879 629.7999 623.0841 623.8905 626.3713 424 10587.5 618.0944 647.4147 642.4924 642.7857 641.5279 632.8795 630.2167 629.3568 622.8895 623.4556 625.6895 425 10612.5 618.331 647.5699 642.7685 644.1743 641.4024 631.2979 629.5014 628.7688 623.1797 622.6307 626.3287 426 10637.5 618.2501 648.0763 643.1176 643.3727 641.4119 631.1455 629.911 628.9497 623.8503 623.3407 625.3356 427 10662.5 618.2344 646.9236 643.4489 643.5155 641.561 631.5596 629.6456 629.5593 623.126 623.1235 625.3179 428 10687.5 617.8233 646.361 642.6951 643.7252 641.8952 631.7112 628.3871 628.7961 623.5494 623.0217 626.3651 429 10712.5 617.9861 647.0634 643.0438 642.6142 640.8954 632.15 629.1689 629.1661 622.4247 622.9066 625.6096 430 10737.5 618.3168 647.869 643.2796 644.0039 642.3156 631.5433 629.3045 629.8434 624.0207 624.1194 625.8442 431 10762.5 618.2615 648.145 643.1503 644.2762 642.096 631.206 629.7108 629.457 623.3217 623.8368 626.7375 Luminescence/inst/extdata/Daybreak_TestFile.DAT0000644000176200001440000015422013231137116021150 0ustar liggesusers_ c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b s@A@B@H@A@H@t@&@@ج@t@@.@H@P@z@Щ@@@^@@@@l@@J@@ @@@@@@@@@Ф@@@@ڣ@@@@@Ң@@@@2@@@v@@L@r@@@@4@@F@T@ȟ@0@@Ȟ@@@@@؜@H@h@4@@l@@@8@@@@@T@h@0@@ؚ@ܙ@Ԙ@L@p@@@x@_ c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b t@H@F@D@G@r@.@"@@@@@@@@f@@@0@@L@D@@Ψ@t@ @8@2@@@r@,@@@@r@@Ԥ@"@t@@@N@@(@@@@Ң@@.@&@@@@@@@ @@@@@@J@ȟ@`@D@H@L@@@@$@x@̝@@@8@t@d@\@ܛ@@@X@X@\@ě@@0@p@x@@ @_ c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b pt@A@A@D@C@@ @@.@J@@֪@@@ة@:@̨@@@p@@@ܦ@@@Ȧ@<@ @@@@x@@@@@t@&@\@@Ƣ@*@̡@H@.@@:@ơ@@j@@@@@@@J@@ğ@@(@@@@@@D@D@@@ܜ@@@@@|@@@H@4@@l@@ @|@H@@@x@@@X@,@@Ę@_ c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b q@A@G@A@E@֨@-@\@@X@D@x@ڬ@$@̫@@|@@@2@ک@h@0@$@@ا@@@@@@`@F@@@@@@8@h@J@V@(@أ@p@@@j@@@@P@@@@@v@b@P@@@@@F@@P@@@4@@@p@@@,@ܝ@\@@؝@@P@@@@H@@@t@x@@`@@x@ę@l@@_ c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b pp@@@A@G@?@t@@@d@@&@@@@@R@:@N@@@@ @̦@b@@L@v@d@ڤ@T@@X@t@@@Σ@F@@F@D@@ޡ@.@@@ܡ@r@@@@d@ @@@@@@ @T@$@@@ @@,@@@d@@8@@@D@ @@d@̚@@@@ @l@@\@@@4@x@(@ @@x@@@ @@_ c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b r@=@B@@@A@@@@n@L@<@Ȩ@.@,@@@@p@@D@@<@@<@T@ @4@@.@@n@@@Ƣ@L@@@@@^@@@ @T@^@@p@@@@@ܞ@$@l@d@@4@H@@@|@@@@@$@\@@T@@@@`@@ؘ@Ę@t@@@@X@H@@ @@@З@Ė@ė@@@@@@P@ @_ c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b s@@@@@<@F@@6@@h@@@@ҩ@v@$@<@@ @v@|@ڦ@@@4@@>@@@@6@@d@@@@8@Ģ@x@@@4@(@@@h@0@Ԡ@Ơ@ @p@ؠ@@(@@@@@@@L@@@@@|@,@@@t@@D@@@@@X@@@@H@d@P@(@@И@ @@@@@(@@d@0@0@_ c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b s@@@<@D@B@ @@ܪ@R@@:@L@r@@@@@@x@ʦ@ @@2@@@Z@@X@@ܣ@@@ң@@f@@@D@,@@@@@t@:@@ܝ@ʠ@r@@@@@*@@@,@ȝ@L@@8@@@Ĝ@\@@H@ě@@@h@d@@p@T@@ԙ@@@p@p@@\@@Ę@@@4@ @@x@ @@X@0@@_ c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b s@B@B@D@G@@ @@t@B@@@H@@$@ @L@@d@6@Ĩ@@@@@@@֦@&@d@@@P@B@ @t@@f@,@@@@@@@8@@|@@@@ @@^@$@@ @0@|@Ġ@D@@Ƞ@@0@@\@`@|@@D@@ܝ@T@\@@М@X@d@ @@@̚@ܛ@@Ԛ@H@К@@ԙ@@ؙ@@l@t@_  c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b q@D@@@A@C@@ګ@«@@P@B@@X@>@v@*@̧@x@@^@0@&@@@n@Ф@>@Ф@@֣@У@@@@ @|@<@@@@@t@@ء@@ @J@@j@@@0@@P@ @h@<@@@@T@؝@@@@$@T@@p@@@@`@@p@@@@@ @$@@@|@8@@@ @4@̖@@@,@@`@@_  c=@??I@Z# Gy/min u-2/secnone  Sr-90 Am-241b x@E@H@N@E@@d@S@@^@@@T@R@@@@L@@#@@P@R@ȭ@@|@V@2@B@^@@@n@Ы@@@@ʩ@$@b@@@*@Χ@&@6@@@@&@v@@f@@@@֤@ԥ@@t@@x@p@ң@D@2@@@ģ@@@"@¢@f@R@B@J@@@@@@@X@@@@@ʠ@@@@ @@ @,@_  c=@??I@Z# Gy/min u-2/secnone  Sr-90 Am-241b `y@H@G@I@G@@Q@ȳ@@B@@˲@h@@ޱ@o@b@@@g@@@w@ʮ@X@@@b@.@@@\@@~@Ϋ@h@@ث@^@(@`@@:@@F@@@"@@T@@@@B@4@@¦@6@@v@6@ȥ@>@@*@f@@@h@l@@`@X@@@n@@ܢ@¢@@@B@@@¡@@r@@@@,@ڠ@@@@ܠ@:@@@_  c=@??I@Z# Gy/min u-2/secnone  Sr-90 Am-241b pu@F@J@F@F@@@@@W@@@@@#@[@B@ @@@@&@@ȭ@t@4@l@@ī@@H@֪@@@@z@@@p@@`@@ @4@&@P@@@"@Х@ƥ@@@@@Ĥ@@@ܤ@@@@V@@h@΢@̢@¢@@@P@ʡ@@D@@@:@@N@@Р@p@b@Ơ@D@@(@@@Ԟ@|@@@@@ܞ@_  c=@??I@Z# Gy/min u-2/secnone  Sr-90 Am-241b x@K@I@K@J@x@@@@@ϲ@{@'@@@ @ް@@E@G@n@@`@~@@@@@L@Ҭ@v@`@@$@@@l@@@@@@@l@B@h@@@F@ @@@@t@@`@ȥ@إ@@@@ؤ@¤@@ڣ@@Σ@@*@L@@D@@X@@@.@:@ڡ@n@^@@@@F@@ @Z@@Ҡ@@@d@,@p@@_ c=@??I@Z# Gy/min u-2/secnone  Sr-90 Am-241b y@D@I@F@G@\@@ײ@@@ű@S@ܰ@@b@_@@f@Į@t@<@(@@:@@@@$@@@@v@@@|@v@j@@J@6@@@Ч@@,@@Ʀ@@0@@¥@@@J@@Ҥ@|@\@0@@\@@@,@R@0@@̢@@ @z@P@6@@@@0@֡@Ġ@@@^@r@@@@X@H@d@؞@@@̞@t@\@ @_ c=@??Y@Z# Gy/min u-2/secnone  Sr-90 Am-241b P@L@F@F@L@@@@&@ @@V@@ٲ@@@@3@@@@m@@!@~@@@@@@@b@@@ȫ@@ @Ϊ@v@p@@z@@ީ@@@ʧ@@ʧ@@@ڦ@4@x@(@@2@ĥ@@@@Ĥ@@@¤@6@@Z@@r@x@&@ @L@@@4@j@@T@@@ڡ@@@V@@@~@@Ơ@h@@H@6@_ c=@??Y@Z# Gy/min u-2/secnone  Sr-90 Am-241b `|@K@K@P@L@n@A@@@4@Y@@R@@i@@v@A@@`@@@@v@ձ@@@@:@گ@@b@@@b@@@@6@@x@*@@@|@Ϋ@@@@@@b@@@T@@@@*@@@Z@"@F@@@<@@r@@@@Τ@@@0@@@(@֣@@@@@B@ @@ @\@@T@N@@@|@@_ c=@??Y@Z# Gy/min u-2/secnone  Sr-90 Am-241b e@J@M@H@O@1@@E@@@͵@@0@@@(@8@=@@w@@@@)@Y@@j@'@@@@@@ί@9@@@ޮ@<@@@@ @`@,@^@@l@@Z@@@@@ܩ@@@P@@@@P@|@@6@@8@@@@ڥ@@@ƥ@*@@(@@@@Ԥ@l@8@@R@@ @@ @ʣ@@X@Ģ@¢@@@_ c=@??Y@Z# Gy/min u-2/secnone  Sr-90 Am-241b 0v@J@K@J@L@V@@@ݵ@@J@@}@@@|@Y@@~@b@@@l@h@@@@@@%@@@@@@B@@ޭ@ڬ@@8@,@\@@@F@F@@x@@@@h@@@@Ҩ@@L@@J@ @@Ħ@@@ئ@@@j@@:@@@@@b@f@@@@@v@D@@@̣@@6@ڢ@V@@t@¢@@@_ c=@??Y@Z# Gy/min u-2/secnone  Sr-90 Am-241b ~@Q@L@N@K@@@ö@b@@@@e@´@@@R@@@g@@@w@x@ڱ@@˰@w@{@@@z@̮@<@J@@Ҭ@x@@@@ȫ@֪@z@@b@ܪ@P@V@@\@@@@*@Ƨ@"@@b@x@̦@@@2@$@@@إ@0@@8@^@@ʤ@>@@ @L@x@@Z@4@Т@8@\@r@>@,@@@@@@@@_ c=@??@i@Z# Gy/min u-2/secnone  Sr-90 Am-241b P@@S@S@@T@@R@¶@@@C@@@t@+@]@@@w@@T@@_@@۶@¶@@͵@C@}@-@Ӵ@@@@@d@(@W@@@@W@;@@@@X@@h@@@[@!@ʯ@@ԯ@@@@@ @@@Ь@@@l@֫@r@@2@ƪ@Ҫ@l@Ī@&@@@J@D@<@f@@b@6@@@@@@ @f@N@.@@>@@_ c=@??@i@Z# Gy/min u-2/secnone  Sr-90 Am-241b @Q@S@Q@R@@2@@ϻ@@g@@@@@ܸ@]@$@@@@Ѷ@^@J@ϵ@h@@@F@ϴ@´@@@@Y@d@>@Բ@@|@@@@(@@S@@y@ɰ@@-@_@<@@@>@ޮ@@@@@@`@*@,@h@@@z@t@@V@@b@@@T@@@@R@v@@0@@,@X@h@ا@2@>@@H@@@_ c=@??@i@Z# Gy/min u-2/secnone  Sr-90 Am-241b @R@Q@Q@@T@S@@ݻ@6@d@@B@ڹ@@O@@j@@@N@@@$@j@1@@[@@@@@@ó@@@@@@6@m@W@>@y@@@ @ٰ@ް@@G@X@0@ @@F@@Ү@@>@@8@@@ު@@2@B@n@f@@@.@@,@@@@J@@@^@@@4@@@@J@@F@@@ʦ@r@@@@@_ c=@??@i@Z# Gy/min u-2/secnone  Sr-90 Am-241b Ȁ@O@@Q@O@Q@0@@d@Y@@'@@ѷ@G@@@B@]@@_@@@@\@J@@@:@ @@g@W@@/@@@0@X@)@°@/@@@f@@Я@:@.@@@ԭ@@̭@p@n@@ެ@@ʬ@@@ī@0@@֪@@f@@@Ʃ@b@L@@ި@@$@$@֧@@@֧@>@@l@\@@~@Z@0@*@ @@@@@_ c=@??@i@Z# Gy/min u-2/secnone  Sr-90 Am-241b @T@T@S@Q@d@2@@׺@ @3@@p@@@V@@@@ڶ@@b@5@@@@@n@D@@@@/@@@@q@p@I@@U@@ɰ@@@@@Я@@@@@V@L@@@@@@ޭ@@@@@Ȭ@@֫@@J@.@ @ڪ@"@4@P@X@\@T@@@L@x@ڧ@@@<@@T@<@ڦ@@@@@@_ c=@??@y@Z# Gy/min u-2/secnone  Sr-90 Am-241b @\@]@@]@^@n@u@L@@ @@@ @@E@1@@G@e@@@Q@@@@@/@@@x@@g@=@ѽ@޼@@@)@Ļ@g@e@غ@@@@@@@Ը@V@^@@ڷ@v@L@ @@@ֶ@>@_@ߵ@@@D@@@z@T@>@@@@y@_@(@;@6@v@@@y@@D@@@߱@@@Q@@@@@E@_ c=@??@y@Z# Gy/min u-2/secnone  Sr-90 Am-241b X@[@@Z@W@Z@M@@@@z@q@@}@@@@|@;@I@(@¿@@5@ν@m@@@@`@׻@@9@@I@@r@@@T@@&@(@@@J@+@@߶@@@@ǵ@o@U@@@\@T@@@@@9@@A@[@?@}@@@@ @@̱@ϱ@@@v@@@@ڰ@ɰ@I@R@@@@ޯ@@|@®@®@:@&@_ c=@??@y@Z# Gy/min u-2/secnone  Sr-90 Am-241b H@W@U@@X@@V@W@P@/@@X@P@@7@T@@@@H@@t@@@>@4@ߺ@]@u@@A@@@<@@9@`@\@?@@g@B@ص@̵@@e@U@@ƴ@o@@@@@_@;@Dz@@c@ܲ@@@@@@?@ڱ@@@\@߰@@@@@;@@0@|@@¯@®@@@@@@ʭ@@~@.@2@@@@(@@_ c=@??@y@Z# Gy/min u-2/secnone  Sr-90 Am-241b @@[@Z@@[@Z@@v@*@}@F@@@L@@|@J@@@@g@@@ھ@@@B@@i@@@x@@r@'@@@@@ù@1@@@@0@[@@@@@J@@A@?@F@~@ @@ߴ@@@G@W@ @@b@@@@@@g@@"@@@@@@)@԰@@v@ڰ@G@1@@̯@ @5@|@®@X@v@l@@6@_ c=@??@y@Z# Gy/min u-2/secnone  Sr-90 Am-241b @Y@\@\@@\@@@|@B@@M@j@@U@(@@@S@@@b@@@@p@@;@@@6@@Ž@ҽ@,@@@m@@»@@@ں@@H@@ǹ@K@U@"@@n@ݷ@@5@ @@,@@@Ķ@@@@@@@[@Ҵ@޴@S@@ҳ@ݳ@@@7@@@@˲@@|@8@@@̱@@R@"@Ͱ@@@2@@@_ c=@??@Z# Gy/min u-2/secnone  Sr-90 Am-241b @\@]@@^@_@.@E@@@d@@A@$@@]@@@p@)@@@^@@@@s@/@@@T@@@@@@,@@@T@/@Ӽ@}@@̻@@T@@@@@@@@@@@@Y@@@2@@ж@@g@@A@ @M@@@D@H@|@@@@4@˳@س@@K@R@@@@@@j@i@9@@@@@_  c=@??@Z# Gy/min u-2/secnone  Sr-90 Am-241b @\@[@@[@X@l@Q@@@d@@M@@@}@&@@@s@@@^@R@@@q@@@@D@@@@r@@μ@ż@6@b@I@ں@@@}@_@*@@@߸@ɸ@@@@׷@*@2@@^@z@8@@@@w@δ@@@Ŵ@K@@$@@@@@B@@@Բ@k@L@0@K@Ա@@α@@@Q@@@@װ@ǰ@@_ ! c=@??@Z# Gy/min u-2/secnone  Sr-90 Am-241b @\@_@`@@^@@@T@@@j@@q@*@@w@&@@@q@@@a@H@@@G@b@3@B@T@E@@e@2@@@c@j@ @@]@@}@7@@@@@@\@@@@@@C@@m@)@@͵@͵@@d@`@P@#@̴@ô@r@1@Z@o@@h@@J@@t@@@@@@@@@u@@;@$@@@ư@_!" c=@??@Z# Gy/min u-2/secnone  Sr-90 Am-241b @a@]@\@`@N@@@o@Y@@@@@/@@@@@@@p@2@@@I@2@@@w@A@4@@S@ξ@~@@˽@@@a@@@\@ @W@@@@@$@@ @˹@&@@x@@3@'@@ܷ@l@@ж@@5@\@4@#@@@}@ܴ@K@@@G@@2@̳@@@@s@l@IJ@@@ @y@@@:@B@_"# c=@??@Z# Gy/min u-2/secnone  Sr-90 Am-241b @@Z@]@@_@]@$@w@@@z@"@@@@@@ @@@i@@@@*@@@@9@@@i@@@ӽ@f@G@E@ @m@f@ @k@b@@>@@;@@@k@˸@ȸ@U@@@\@@w@g@@ƶ@@@@@@Z@@@޴@ @@6@i@@+@@@{@j@@@@@@x@.@ @@@@n@@u@@@_#$ cf@=@??I@Z# Gy/min u-2/secnone  Sr-90 Am-241b \@(@,@0@*@@@@H@@@T@<@\@@d@t@@@l@@@@@@P@@@@@@@@@p@@X@@@@@ @`@P@@@@`@@@ȉ@@x@@@@@@@@@ @@І@@@@@`@@@@(@p@X@p@@@X@@`@@؄@@ȃ@@X@8@p@X@@P@@@@@@@X@_$% cf@=@??I@Z# Gy/min u-2/secnone  Sr-90 Am-241b U@"@.@$@.@@@x@@@@đ@@X@@@(@`@@@@h@h@@T@@P@0@@H@P@@@@x@ @0@x@ @ȉ@؈@ȉ@H@@@@@0@Ї@P@@@@h@؅@؇@@(@P@؅@@@@@X@p@X@@@@@(@h@ @8@@@@@@@@x@@@Ё@@@@؀@@h@@X@@@@0@@_%& cf@=@??I@Z# Gy/min u-2/secnone  Sr-90 Am-241b \@*@0@,@0@@|@@@@@@̓@h@@̒@@@@@@@@h@@p@@@@0@T@@@ @@ȍ@@@H@0@@@p@x@@@x@@@ @@@X@@P@@@@@@@@@@@@@P@@@@@@@p@ @Ѕ@@h@Є@@@@@@P@@@(@@@@0@@@@@_&( cf@=@??Y@Z# Gy/min u-2/secnone  Sr-90 Am-241b h@5@<@9@8@,@ʢ@@@@r@2@@0@|@@@x@@@<@@@@L@@4@@@x@h@8@@ؙ@|@̚@8@Ę@@̘@Й@@@@4@@@`@L@@@@@@@@Ԗ@L@\@@@ē@<@L@@@(@ē@@T@l@@T@@<@@<@ؑ@@@@@@0@ @ @D@@@@@<@l@D@@@_') cf@=@??Y@Z# Gy/min u-2/secnone  Sr-90 Am-241b `i@5@<@A@>@~@@@2@b@@*@T@|@"@V@ޢ@֢@@ @Ρ@n@"@@@@x@@0@؟@@@@@ @T@P@@@ě@8@@<@@@Ě@@ @p@@@@ȗ@h@d@@ܗ@ @h@@@\@P@`@@t@@t@P@x@@@@$@@ @@@L@ @4@X@x@0@ @@@@@@@@8@0@0@_(* cf@=@??Y@Z# Gy/min u-2/secnone  Sr-90 Am-241b o@<@6@<@:@@@@@ܢ@ @&@j@@0@@@@@&@@@@d@p@Ԟ@`@P@@@@@X@p@ԛ@@x@L@@$@@@@$@,@ @8@ȗ@@ė@|@ܖ@@P@ԕ@@@@@(@4@ܔ@Д@@@ؔ@@8@@ @@D@@@@L@@@\@@@@@đ@đ@@@x@@@@T@@@@@X@@ @@_)+ cf@=@??c@Z# Gy/min u-2/secnone  Sr-90 Am-241b o@>@B@C@E@@@@@<@P@ĩ@t@@@D@d@@0@\@@B@@@ĥ@@@@@@n@@2@~@@֢@ʢ@D@ @Ģ@@r@֠@@@H@6@r@p@@r@<@D@@(@@@\@@@@@؜@P@<@@@t@@@(@p@d@@t@Й@@$@0@ԙ@@@@З@@l@D@ؗ@@З@@ @@X@@_*, cf@=@??c@Z# Gy/min u-2/secnone  Sr-90 Am-241b 0r@A@D@@@C@@Ь@@n@ @@ @@@@@~@@ @.@X@@x@x@@l@8@@<@@@@p@T@@@\@@آ@ʢ@@@@@@@@t@N@ڠ@\@@R@@@ @@؟@؟@@@@@H@@@Ԝ@@$@@l@@@@@@@@ܚ@@Ț@@p@@|@T@@@@@Ę@@@`@(@T@_+- cf@=@??c@Z# Gy/min u-2/secnone  Sr-90 Am-241b @w@C@B@C@B@\@,@J@@>@@@@@@@@F@@,@@@@@@ @@@@|@(@(@B@@"@4@@أ@@ @¢@@@@~@@ȡ@.@@@@ @@Ƞ@@R@2@@>@,@̞@@p@ܞ@L@@@Н@ @@`@@P@D@H@|@@@ؚ@p@@@\@,@@<@@X@\@Ԙ@@ @@@@\@H@_,. cf@=@??@i@Z# Gy/min u-2/secnone  Sr-90 Am-241b 0y@H@H@G@L@R@@@@±@o@@\@@@@7@@ʯ@\@"@@@ƭ@l@@j@.@@ @&@F@Ԫ@@Ω@@ީ@@@$@D@8@ @ @`@@@8@@@6@@Х@z@@@@@@6@@~@ܣ@@@v@|@@d@@p@~@@@@ҡ@@@N@t@l@@@Р@@@@@~@*@@(@ @@@_-/ cf@=@??@i@Z# Gy/min u-2/secnone  Sr-90 Am-241b u@I@G@H@E@@@(@@ٰ@?@2@b@j@@@@B@@j@L@0@@@@@8@@L@@@ڨ@@@r@@@2@@@x@@@@t@,@@@@X@@ @j@*@@v@@0@@@@@ȡ@@@(@ġ@ޡ@t@@`@@Ԡ@~@@>@X@X@H@@@@@@؞@\@X@T@h@Н@H@@\@@@ț@_.0 cf@=@??@i@Z# Gy/min u-2/secnone  Sr-90 Am-241b w@G@H@F@G@:@y@"@@*@հ@ް@@@@2@@@X@@@@F@@@@V@.@@ @H@:@̨@r@ب@@@@v@Z@>@:@@n@@@@@P@f@@T@n@@B@ң@z@@@@0@@J@\@@@@@R@L@H@@:@@p@@@@@@ȟ@@&@@@|@$@@d@@@ @@@l@̝@_/1 cf@=@??r@Z# Gy/min u-2/secnone  Sr-90 Am-241b @R@P@Q@P@@I@M@@ȹ@ȸ@@@@v@@I@ @@9@@N@@@@g@@@ij@ʳ@ @@@,@J@@ұ@@1@@@A@@@&@\@.@@n@b@(@R@@`@ڭ@^@Ԭ@r@@D@~@@@N@@Ԫ@@t@@@@*@@@P@X@@>@֨@F@X@@Ħ@Z@@ʦ@ @@@@@Х@@Х@@*@_02 cf@=@??r@Z# Gy/min u-2/secnone  Sr-90 Am-241b @K@F@N@O@װ@@@@@>@޳@@@@Y@@߱@@P@b@ @۰@@M@ޯ@!@ @ܮ@h@@@@@@@@$@@@@>@@$@@@P@@2@&@F@@@@~@@@@@ڦ@"@$@@n@ @@@@8@z@@X@@j@,@\@Ң@@@@@~@@@@@4@`@@@^@@^@@@@_13 cf@=@??r@Z# Gy/min u-2/secnone  Sr-90 Am-241b (@P@N@@P@P@@@_@@@@@@@@϶@g@@@@@մ@@@#@@@c@@m@z@5@D@ȱ@@G@ @U@@@@v@<@@@\@ @@`@<@@@ @H@$@@>@Ϋ@ҫ@"@J@0@@@N@j@@ܩ@D@L@|@\@@@Ԩ@ܧ@ʧ@@ȧ@@|@4@@Φ@Ҧ@Ҧ@@@,@Υ@ҥ@@@4@@F@@_24 c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b c@2@;@;@4@@ޡ@@t@ҡ@@@@D@@H@l@@@@̜@d@@@@@@@@@@@@p@D@@@@@@@@4@h@@T@ؕ@@<@l@$@@@ܓ@0@@@@@x@@<@@@@@@@@@@@@@А@p@̐@@@@@@p@@X@@@@@@H@@@@x@@@_35 c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b @h@:@@@3@2@ؙ@$@@@@@(@$@@l@ @ @؜@@@ԛ@@@@h@@@L@\@,@@@@@@@Е@d@8@@@0@@@@ؓ@@@@@@ @@T@ @@@@@ @@`@@@@\@H@@@H@0@@0@Џ@(@@@@@@@@@@`@@0@@@@@P@@@@@_46 c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b d@4@8@1@2@@6@@@@@f@@@@4@О@<@@@d@d@@@(@@@@@@h@@<@|@@@@@@p@@4@p@@@\@`@@|@0@@P@P@ԓ@ @\@|@ @@@@@@@ @|@ @ @@@l@@Ȑ@@ @<@Џ@T@8@8@@\@H@4@@@@@Ȍ@0@@@x@h@P@@_57 c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b e@;@8@=@8@@@@@f@@*@|@@@@ĝ@(@@@P@@@@`@8@@@̙@ܙ@0@@@@@@$@@@@@@@@@`@h@`@@@@@@ @@\@ȓ@@@H@̒@@(@@@@@x@@Б@@0@@8@@̐@@@l@@@(@@x@@@@@@X@@ @@Ѝ@@_68 c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b f@6@3@6@8@D@r@@ܟ@P@T@@@ @@@ @ț@@@l@@@@@@T@$@@|@D@,@@ė@t@\@@@@@8@\@@@p@@@@@@@@P@@@@P@P@@`@@@@@@ؐ@\@Ȑ@@@l@X@ȏ@@@@h@P@@@؎@@0@@@ȋ@X@@X@`@Ћ@@@@Њ@@@@@_79 c=@??Z# Gy/min u-2/secnone  Sr-90 Am-241b h@8@8@<@8@H@@@x@֡@ @@@,@@B@@"@ @@@@@@ @@D@@؛@@@̚@ܙ@X@@t@\@$@@@ @@@@ܖ@8@P@@@|@x@@x@ @@@4@,@H@,@@p@@@8@P@ @d@@đ@,@ܑ@ؑ@@,@@@Đ@@@@@x@@0@H@@@@@Ѝ@Ѝ@(@X@@@Luminescence/inst/extdata/DorNie_0016.psl0000644000176200001440000003675613231137116017660 0ustar liggesusers ========================================================================================= Run Name: ALU Sample no: 0016 Sequence Name: Praktikum2016 Filename: Praktikum2016 Dark Count: 15 c/s Light Count: 0 c/s Dark Count Correction: OFF 256 Offset Subtract: ON Datafile Path: D:\Results\DORNIE\ALU\ALU0016.psl Summary Path : D:\Results\DORNIE\ALU\summary\ALU.sum Run Sequence : Praktikum2016 ----------------------------------------------------------------------------------------- \ L11 @ 5/19/2016 4:45:12 PM / '--------------------------' ---------------------------------------------------------------------------------------- Measurement : DARK 15s | Stim 0 | On/Off(us) 15,15 | Cycle(ms),No 1000, 15 ---------------------------------------------------------------------------------------- Time (s) Total Count Counts per Cycle -------- --------------------- --------------------- 1 0 +/- 4 0 +/- 4 2 -2 +/- 6 -2 +/- 4 3 12 +/- 8 14 +/- 5 4 19 +/- 9 7 +/- 5 5 25 +/- 10 6 +/- 5 6 18 +/- 10 -7 +/- 5 7 17 +/- 11 -1 +/- 4 8 23 +/- 12 6 +/- 5 9 15 +/- 12 -8 +/- 5 10 18 +/- 13 3 +/- 4 11 16 +/- 13 -2 +/- 4 12 22 +/- 14 6 +/- 5 13 22 +/- 15 0 +/- 4 14 28 +/- 15 6 +/- 5 15 25 +/- 16 -3 +/- 4 Terminal Count = 25 +/- 16 ---------------------------------------------------------------------------------------- Measurement : S1 15 0 100s | Stim 1 | On/Off(us) 15, 0 | Cycle(ms),No 1000, 100 ---------------------------------------------------------------------------------------- Time (s) Total Count Counts per Cycle -------- --------------------- --------------------- 1 16660 +/- 129 16660 +/- 129 2 32789 +/- 181 16129 +/- 127 3 48649 +/- 221 15860 +/- 126 4 64422 +/- 254 15773 +/- 126 5 77931 +/- 279 13509 +/- 116 6 93102 +/- 305 15171 +/- 123 7 108127 +/- 329 15025 +/- 123 8 122880 +/- 351 14753 +/- 122 9 137318 +/- 371 14438 +/- 120 10 151523 +/- 389 14205 +/- 119 11 165571 +/- 407 14048 +/- 119 12 179457 +/- 424 13886 +/- 118 13 193226 +/- 440 13769 +/- 117 14 206845 +/- 455 13619 +/- 117 15 220210 +/- 470 13365 +/- 116 16 233057 +/- 483 12847 +/- 113 17 246024 +/- 496 12967 +/- 114 18 258717 +/- 509 12693 +/- 113 19 271443 +/- 521 12726 +/- 113 20 284106 +/- 533 12663 +/- 113 21 296846 +/- 545 12740 +/- 113 22 309240 +/- 556 12394 +/- 111 23 321382 +/- 567 12142 +/- 110 24 333521 +/- 578 12139 +/- 110 25 345488 +/- 588 11967 +/- 109 26 357358 +/- 598 11870 +/- 109 27 369194 +/- 608 11836 +/- 109 28 380867 +/- 617 11673 +/- 108 29 392406 +/- 627 11539 +/- 107 30 403919 +/- 636 11513 +/- 107 31 415362 +/- 645 11443 +/- 107 32 426753 +/- 654 11391 +/- 107 33 438016 +/- 662 11263 +/- 106 34 449144 +/- 671 11128 +/- 106 35 460170 +/- 679 11026 +/- 105 36 471161 +/- 687 10991 +/- 105 37 481959 +/- 695 10798 +/- 104 38 492740 +/- 702 10781 +/- 104 39 503494 +/- 710 10754 +/- 104 40 513949 +/- 717 10455 +/- 102 41 524654 +/- 725 10705 +/- 104 42 535111 +/- 732 10457 +/- 102 43 545469 +/- 739 10358 +/- 102 44 556047 +/- 746 10578 +/- 103 45 566590 +/- 753 10543 +/- 103 46 576850 +/- 760 10260 +/- 101 47 587113 +/- 767 10263 +/- 101 48 597322 +/- 773 10209 +/- 101 49 607258 +/- 780 9936 +/- 100 50 617201 +/- 786 9943 +/- 100 51 626983 +/- 792 9782 +/- 99 52 636819 +/- 798 9836 +/- 99 53 646695 +/- 805 9876 +/- 99 54 655509 +/- 810 8814 +/- 94 55 665363 +/- 816 9854 +/- 99 56 674934 +/- 822 9571 +/- 98 57 684515 +/- 828 9581 +/- 98 58 693948 +/- 834 9433 +/- 97 59 703516 +/- 839 9568 +/- 98 60 712982 +/- 845 9466 +/- 97 61 722344 +/- 850 9362 +/- 97 62 731611 +/- 856 9267 +/- 96 63 740851 +/- 861 9240 +/- 96 64 750276 +/- 867 9425 +/- 97 65 759405 +/- 872 9129 +/- 96 66 768537 +/- 877 9132 +/- 96 67 777573 +/- 882 9036 +/- 95 68 786739 +/- 888 9166 +/- 96 69 795598 +/- 893 8859 +/- 94 70 804571 +/- 898 8973 +/- 95 71 813485 +/- 903 8914 +/- 94 72 822406 +/- 907 8921 +/- 95 73 831225 +/- 912 8819 +/- 94 74 840160 +/- 917 8935 +/- 95 75 848871 +/- 922 8711 +/- 93 76 857733 +/- 927 8862 +/- 94 77 866580 +/- 932 8847 +/- 94 78 875191 +/- 936 8611 +/- 93 79 883976 +/- 941 8785 +/- 94 80 892559 +/- 945 8583 +/- 93 81 900899 +/- 950 8340 +/- 91 82 909380 +/- 954 8481 +/- 92 83 917905 +/- 959 8525 +/- 92 84 926191 +/- 963 8286 +/- 91 85 934738 +/- 967 8547 +/- 93 86 943030 +/- 972 8292 +/- 91 87 951445 +/- 976 8415 +/- 92 88 959676 +/- 980 8231 +/- 91 89 967730 +/- 984 8054 +/- 90 90 976219 +/- 989 8489 +/- 92 91 984294 +/- 993 8075 +/- 90 92 992370 +/- 997 8076 +/- 90 93 1000418 +/- 1001 8048 +/- 90 94 1008331 +/- 1005 7913 +/- 89 95 1016283 +/- 1009 7952 +/- 89 96 1024276 +/- 1013 7993 +/- 89 97 1032190 +/- 1017 7914 +/- 89 98 1040099 +/- 1021 7909 +/- 89 99 1047962 +/- 1024 7863 +/- 89 100 1055928 +/- 1028 7966 +/- 89 Terminal Count = 1055928 +/- 1028 ---------------------------------------------------------------------------------------- Measurement : DARK 15s | Stim 0 | On/Off(us) 15,15 | Cycle(ms),No 1000, 15 ---------------------------------------------------------------------------------------- Time (s) Total Count Counts per Cycle -------- --------------------- --------------------- 1 4 +/- 4 4 +/- 4 2 14 +/- 7 10 +/- 5 3 9 +/- 7 -5 +/- 4 4 8 +/- 8 -1 +/- 4 5 0 +/- 9 -8 +/- 5 6 -6 +/- 10 -6 +/- 5 7 7 +/- 11 13 +/- 5 8 12 +/- 11 5 +/- 4 9 -12 +/- 12 -24 +/- 6 10 -1 +/- 12 11 +/- 5 11 8 +/- 13 9 +/- 5 12 15 +/- 14 7 +/- 5 13 7 +/- 14 -8 +/- 5 14 -5 +/- 15 -12 +/- 5 15 -12 +/- 15 -7 +/- 5 Terminal Count = -12 +/- 15 ---------------------------------------------------------------------------------------- Measurement : S2 15 0 100s | Stim 2 | On/Off(us) 15, 0 | Cycle(ms),No 1000, 100 ---------------------------------------------------------------------------------------- Time (s) Total Count Counts per Cycle -------- --------------------- --------------------- 1 88023 +/- 297 88023 +/- 297 2 173608 +/- 417 85585 +/- 293 3 257002 +/- 507 83394 +/- 289 4 337842 +/- 581 80840 +/- 284 5 416668 +/- 646 78826 +/- 281 6 493191 +/- 702 76523 +/- 277 7 567633 +/- 753 74442 +/- 273 8 640003 +/- 800 72370 +/- 269 9 710432 +/- 843 70429 +/- 265 10 779934 +/- 883 69502 +/- 264 11 847323 +/- 921 67389 +/- 260 12 912894 +/- 956 65571 +/- 256 13 977444 +/- 989 64550 +/- 254 14 1040046 +/- 1020 62602 +/- 250 15 1101576 +/- 1050 61530 +/- 248 16 1161549 +/- 1078 59973 +/- 245 17 1220325 +/- 1105 58776 +/- 242 18 1277027 +/- 1130 56702 +/- 238 19 1333490 +/- 1155 56463 +/- 238 20 1389034 +/- 1179 55544 +/- 236 21 1443594 +/- 1202 54560 +/- 234 22 1497249 +/- 1224 53655 +/- 232 23 1548759 +/- 1245 51510 +/- 227 24 1600292 +/- 1265 51533 +/- 227 25 1650904 +/- 1285 50612 +/- 225 26 1700538 +/- 1304 49634 +/- 223 27 1749403 +/- 1323 48865 +/- 221 28 1796880 +/- 1341 47477 +/- 218 29 1844491 +/- 1358 47611 +/- 218 30 1890984 +/- 1375 46493 +/- 216 31 1937212 +/- 1392 46228 +/- 215 32 1982618 +/- 1408 45406 +/- 213 33 2027030 +/- 1424 44412 +/- 211 34 2071016 +/- 1439 43986 +/- 210 35 2113676 +/- 1454 42660 +/- 207 36 2156344 +/- 1469 42668 +/- 207 37 2198478 +/- 1483 42134 +/- 205 38 2240192 +/- 1497 41714 +/- 204 39 2281314 +/- 1511 41122 +/- 203 40 2321794 +/- 1524 40480 +/- 201 41 2361658 +/- 1537 39864 +/- 200 42 2401167 +/- 1550 39509 +/- 199 43 2439834 +/- 1562 38667 +/- 197 44 2474234 +/- 1573 34400 +/- 186 45 2511724 +/- 1585 37490 +/- 194 46 2549656 +/- 1597 37932 +/- 195 47 2586736 +/- 1609 37080 +/- 193 48 2623506 +/- 1620 36770 +/- 192 49 2659445 +/- 1631 35939 +/- 190 50 2695350 +/- 1642 35905 +/- 190 51 2730486 +/- 1653 35136 +/- 187 52 2765285 +/- 1663 34799 +/- 187 53 2799384 +/- 1673 34099 +/- 185 54 2833349 +/- 1683 33965 +/- 184 55 2866981 +/- 1693 33632 +/- 183 56 2900327 +/- 1703 33346 +/- 183 57 2933060 +/- 1713 32733 +/- 181 58 2965878 +/- 1722 32818 +/- 181 59 2997817 +/- 1732 31939 +/- 179 60 3029628 +/- 1741 31811 +/- 178 61 3061474 +/- 1750 31846 +/- 178 62 3092488 +/- 1759 31014 +/- 176 63 3123846 +/- 1768 31358 +/- 177 64 3154486 +/- 1776 30640 +/- 175 65 3184857 +/- 1785 30371 +/- 174 66 3215039 +/- 1793 30182 +/- 174 67 3244570 +/- 1802 29531 +/- 172 68 3274321 +/- 1810 29751 +/- 173 69 3303533 +/- 1818 29212 +/- 171 70 3332132 +/- 1826 28599 +/- 169 71 3360314 +/- 1833 28182 +/- 168 72 3388958 +/- 1841 28644 +/- 169 73 3417112 +/- 1849 28154 +/- 168 74 3444699 +/- 1856 27587 +/- 166 75 3472236 +/- 1864 27537 +/- 166 76 3499511 +/- 1871 27275 +/- 165 77 3526575 +/- 1878 27064 +/- 165 78 3553717 +/- 1885 27142 +/- 165 79 3580372 +/- 1893 26655 +/- 163 80 3606875 +/- 1899 26503 +/- 163 81 3633171 +/- 1906 26296 +/- 162 82 3658384 +/- 1913 25213 +/- 159 83 3684158 +/- 1920 25774 +/- 161 84 3708882 +/- 1926 24724 +/- 157 85 3734219 +/- 1933 25337 +/- 159 86 3759580 +/- 1939 25361 +/- 159 87 3784542 +/- 1946 24962 +/- 158 88 3808953 +/- 1952 24411 +/- 156 89 3833380 +/- 1958 24427 +/- 156 90 3857979 +/- 1965 24599 +/- 157 91 3882107 +/- 1971 24128 +/- 155 92 3906050 +/- 1977 23943 +/- 155 93 3927964 +/- 1982 21914 +/- 148 94 3951776 +/- 1988 23812 +/- 154 95 3975292 +/- 1994 23516 +/- 153 96 3998638 +/- 2000 23346 +/- 153 97 4021603 +/- 2006 22965 +/- 152 98 4044558 +/- 2011 22955 +/- 152 99 4067386 +/- 2017 22828 +/- 151 100 4089938 +/- 2023 22552 +/- 150 Terminal Count = 4089938 +/- 2023 ---------------------------------------------------------------------------------------- Measurement : DARK 15s | Stim 0 | On/Off(us) 15,15 | Cycle(ms),No 1000, 15 ---------------------------------------------------------------------------------------- Time (s) Total Count Counts per Cycle -------- --------------------- --------------------- 1 -22 +/- 6 -22 +/- 6 2 -23 +/- 7 -1 +/- 4 3 2 +/- 7 25 +/- 6 4 2 +/- 8 0 +/- 4 5 14 +/- 9 12 +/- 5 6 18 +/- 10 4 +/- 4 7 30 +/- 12 12 +/- 5 8 17 +/- 12 -13 +/- 5 9 43 +/- 13 26 +/- 6 10 27 +/- 13 -16 +/- 6 11 1 +/- 13 -26 +/- 6 12 -11 +/- 14 -12 +/- 5 13 17 +/- 15 28 +/- 7 14 9 +/- 15 -8 +/- 5 15 -1 +/- 15 -10 +/- 5 Terminal Count = -1 +/- 15 Luminescence/inst/extdata/Daybreak_TestFile.txt0000644000176200001440000003151313231137116021356 0ustar liggesusersScriptFile=C:\Program Files\Daybreak\FL Console\Scripts\active\QSAR23c.TXT ScriptName=Single aliquot regenerative dose method Sample= ReaderID=TLINFO.DTA ReaderType=2200 SingleAliquot=true [NewRecord] Operation=1 SampleType=Disk Disk=0 DataType=PREHEAT TL MaxTemp=200 HoldTime=10 RampRate=3 Started=6/4/2015 3:58:46 PM Stopped=6/4/2015 4:00:52 PM Points=40 0; 5.000; 30.000; valid 1; 10.000; 32.000; valid 2; 15.000; 30.000; valid 3; 20.000; 28.000; valid 4; 25.000; 32.000; valid 5; 30.000; 33.000; valid 6; 35.000; 37.000; valid 7; 40.000; 31.000; valid 8; 45.000; 29.000; valid 9; 50.000; 25.000; valid 10; 55.000; 37.000; valid 11; 60.000; 37.000; valid 12; 65.000; 36.000; valid 13; 70.000; 34.000; valid 14; 75.000; 28.000; valid 15; 80.000; 36.000; valid 16; 85.000; 35.000; valid 17; 90.000; 32.000; valid 18; 95.000; 31.000; valid 19; 100.000; 31.000; valid 20; 105.000; 31.000; valid 21; 110.000; 41.000; valid 22; 115.000; 28.000; valid 23; 120.000; 39.000; valid 24; 125.000; 28.000; valid 25; 130.000; 39.000; valid 26; 135.000; 28.000; valid 27; 140.000; 31.000; valid 28; 145.000; 36.000; valid 29; 150.000; 43.000; valid 30; 155.000; 34.000; valid 31; 160.000; 39.000; valid 32; 165.000; 31.000; valid 33; 170.000; 35.000; valid 34; 175.000; 41.000; valid 35; 180.000; 45.000; valid 36; 185.000; 46.000; valid 37; 190.000; 50.000; valid 38; 195.000; 70.000; valid 39; 200.000; 82.000; valid [NewRecord] Operation=2 SampleType=Disk Disk=0 DataType=OSL SampleTemp=125 Power=95.00 Started=6/4/2015 4:01:23 PM Stopped=6/4/2015 4:03:03 PM Points=325 0; 0.100; 303.000; valid 1; 0.200; 335.000; valid 2; 0.300; 344.000; valid 3; 0.400; 332.000; valid 4; 0.500; 304.000; valid 5; 0.600; 325.000; valid 6; 0.700; 324.000; valid 7; 0.800; 292.000; valid 8; 0.900; 290.000; valid 9; 1.000; 301.000; valid 10; 1.100; 310.000; valid 11; 1.200; 279.000; valid 12; 1.300; 288.000; valid 13; 1.400; 298.000; valid 14; 1.500; 272.000; valid 15; 1.600; 292.000; valid 16; 1.700; 323.000; valid 17; 1.800; 273.000; valid 18; 1.900; 290.000; valid 19; 2.000; 315.000; valid 20; 2.100; 296.000; valid 21; 2.200; 286.000; valid 22; 2.300; 280.000; valid 23; 2.400; 293.000; valid 24; 2.500; 299.000; valid 25; 2.600; 260.000; valid 26; 2.700; 275.000; valid 27; 2.800; 267.000; valid 28; 2.900; 252.000; valid 29; 3.000; 278.000; valid 30; 3.100; 276.000; valid 31; 3.200; 268.000; valid 32; 3.300; 278.000; valid 33; 3.400; 293.000; valid 34; 3.500; 266.000; valid 35; 3.600; 279.000; valid 36; 3.700; 257.000; valid 37; 3.800; 272.000; valid 38; 3.900; 264.000; valid 39; 4.000; 279.000; valid 40; 4.100; 263.000; valid 41; 4.200; 264.000; valid 42; 4.300; 246.000; valid 43; 4.400; 285.000; valid 44; 4.500; 252.000; valid 45; 4.600; 238.000; valid 46; 4.700; 257.000; valid 47; 4.800; 280.000; valid 48; 4.900; 264.000; valid 49; 5.000; 242.000; valid 50; 5.100; 256.000; valid 51; 5.200; 257.000; valid 52; 5.300; 238.000; valid 53; 5.400; 234.000; valid 54; 5.500; 239.000; valid 55; 5.600; 252.000; valid 56; 5.700; 220.000; valid 57; 5.800; 239.000; valid 58; 5.900; 234.000; valid 59; 6.000; 233.000; valid 60; 6.100; 227.000; valid 61; 6.200; 215.000; valid 62; 6.300; 252.000; valid 63; 6.400; 214.000; valid 64; 6.500; 231.000; valid 65; 6.600; 232.000; valid 66; 6.700; 214.000; valid 67; 6.800; 242.000; valid 68; 6.900; 216.000; valid 69; 7.000; 216.000; valid 70; 7.100; 225.000; valid 71; 7.200; 204.000; valid 72; 7.300; 224.000; valid 73; 7.400; 235.000; valid 74; 7.500; 227.000; valid 75; 7.600; 207.000; valid 76; 7.700; 209.000; valid 77; 7.800; 221.000; valid 78; 7.900; 207.000; valid 79; 8.000; 221.000; valid 80; 8.100; 200.000; valid 81; 8.200; 214.000; valid 82; 8.300; 228.000; valid 83; 8.400; 207.000; valid 84; 8.500; 200.000; valid 85; 8.600; 195.000; valid 86; 8.700; 193.000; valid 87; 8.800; 187.000; valid 88; 8.900; 212.000; valid 89; 9.000; 197.000; valid 90; 9.100; 206.000; valid 91; 9.200; 227.000; valid 92; 9.300; 202.000; valid 93; 9.400; 180.000; valid 94; 9.500; 186.000; valid 95; 9.600; 193.000; valid 96; 9.700; 203.000; valid 97; 9.800; 204.000; valid 98; 9.900; 189.000; valid 99; 10.000; 202.000; valid 100; 10.100; 186.000; valid 101; 10.200; 174.000; valid 102; 10.300; 180.000; valid 103; 10.400; 208.000; valid 104; 10.500; 194.000; valid 105; 10.600; 185.000; valid 106; 10.700; 194.000; valid 107; 10.800; 184.000; valid 108; 10.900; 178.000; valid 109; 11.000; 170.000; valid 110; 11.100; 181.000; valid 111; 11.200; 181.000; valid 112; 11.300; 183.000; valid 113; 11.400; 192.000; valid 114; 11.500; 167.000; valid 115; 11.600; 179.000; valid 116; 11.700; 175.000; valid 117; 11.800; 161.000; valid 118; 11.900; 145.000; valid 119; 12.000; 175.000; valid 120; 12.100; 192.000; valid 121; 12.200; 178.000; valid 122; 12.300; 164.000; valid 123; 12.400; 162.000; valid 124; 12.500; 188.000; valid 125; 12.600; 173.000; valid 126; 12.700; 170.000; valid 127; 12.800; 191.000; valid 128; 12.900; 156.000; valid 129; 13.000; 159.000; valid 130; 13.100; 173.000; valid 131; 13.200; 154.000; valid 132; 13.300; 155.000; valid 133; 13.400; 151.000; valid 134; 13.500; 165.000; valid 135; 13.600; 161.000; valid 136; 13.700; 150.000; valid 137; 13.800; 161.000; valid 138; 13.900; 161.000; valid 139; 14.000; 168.000; valid 140; 14.100; 152.000; valid 141; 14.200; 157.000; valid 142; 14.300; 171.000; valid 143; 14.400; 154.000; valid 144; 14.500; 162.000; valid 145; 14.600; 162.000; valid 146; 14.700; 173.000; valid 147; 14.800; 162.000; valid 148; 14.900; 154.000; valid 149; 15.000; 155.000; valid 150; 15.100; 139.000; valid 151; 15.200; 134.000; valid 152; 15.300; 148.000; valid 153; 15.400; 148.000; valid 154; 15.500; 155.000; valid 155; 15.600; 140.000; valid 156; 15.700; 151.000; valid 157; 15.800; 140.000; valid 158; 15.900; 147.000; valid 159; 16.000; 146.000; valid 160; 16.100; 146.000; valid 161; 16.200; 145.000; valid 162; 16.300; 127.000; valid 163; 16.400; 156.000; valid 164; 16.500; 144.000; valid 165; 16.600; 145.000; valid 166; 16.700; 137.000; valid 167; 16.800; 131.000; valid 168; 16.900; 130.000; valid 169; 17.000; 146.000; valid 170; 17.100; 133.000; valid 171; 17.200; 121.000; valid 172; 17.300; 114.000; valid 173; 17.400; 146.000; valid 174; 17.500; 122.000; valid 175; 17.600; 131.000; valid 176; 17.700; 144.000; valid 177; 17.800; 134.000; valid 178; 17.900; 147.000; valid 179; 18.000; 126.000; valid 180; 18.100; 131.000; valid 181; 18.200; 120.000; valid 182; 18.300; 133.000; valid 183; 18.400; 138.000; valid 184; 18.500; 127.000; valid 185; 18.600; 140.000; valid 186; 18.700; 125.000; valid 187; 18.800; 133.000; valid 188; 18.900; 128.000; valid 189; 19.000; 114.000; valid 190; 19.100; 131.000; valid 191; 19.200; 113.000; valid 192; 19.300; 123.000; valid 193; 19.400; 128.000; valid 194; 19.500; 127.000; valid 195; 19.600; 132.000; valid 196; 19.700; 120.000; valid 197; 19.800; 109.000; valid 198; 19.900; 107.000; valid 199; 20.000; 112.000; valid 200; 20.100; 108.000; valid 201; 20.200; 124.000; valid 202; 20.300; 110.000; valid 203; 20.400; 120.000; valid 204; 20.500; 117.000; valid 205; 20.600; 115.000; valid 206; 20.700; 122.000; valid 207; 20.800; 102.000; valid 208; 20.900; 128.000; valid 209; 21.000; 105.000; valid 210; 21.100; 114.000; valid 211; 21.200; 114.000; valid 212; 21.300; 107.000; valid 213; 21.400; 118.000; valid 214; 21.500; 118.000; valid 215; 21.600; 120.000; valid 216; 21.700; 113.000; valid 217; 21.800; 126.000; valid 218; 21.900; 119.000; valid 219; 22.000; 108.000; valid 220; 22.100; 92.000; valid 221; 22.200; 92.000; valid 222; 22.300; 94.000; valid 223; 22.400; 107.000; valid 224; 22.500; 117.000; valid 225; 22.600; 92.000; valid 226; 22.700; 105.000; valid 227; 22.800; 99.000; valid 228; 22.900; 123.000; valid 229; 23.000; 111.000; valid 230; 23.100; 102.000; valid 231; 23.200; 109.000; valid 232; 23.300; 96.000; valid 233; 23.400; 90.000; valid 234; 23.500; 94.000; valid 235; 23.600; 103.000; valid 236; 23.700; 98.000; valid 237; 23.800; 101.000; valid 238; 23.900; 108.000; valid 239; 24.000; 101.000; valid 240; 24.100; 100.000; valid 241; 24.200; 108.000; valid 242; 24.300; 93.000; valid 243; 24.400; 107.000; valid 244; 24.500; 107.000; valid 245; 24.600; 92.000; valid 246; 24.700; 94.000; valid 247; 24.800; 89.000; valid 248; 24.900; 97.000; valid 249; 25.000; 95.000; valid 250; 26.000; 892.000; valid 251; 27.000; 890.000; valid 252; 28.000; 841.000; valid 253; 29.000; 850.000; valid 254; 30.000; 807.000; valid 255; 31.000; 791.000; valid 256; 32.000; 753.000; valid 257; 33.000; 728.000; valid 258; 34.000; 703.000; valid 259; 35.000; 651.000; valid 260; 36.000; 640.000; valid 261; 37.000; 612.000; valid 262; 38.000; 550.000; valid 263; 39.000; 539.000; valid 264; 40.000; 524.000; valid 265; 41.000; 501.000; valid 266; 42.000; 503.000; valid 267; 43.000; 478.000; valid 268; 44.000; 468.000; valid 269; 45.000; 438.000; valid 270; 46.000; 434.000; valid 271; 47.000; 441.000; valid 272; 48.000; 388.000; valid 273; 49.000; 381.000; valid 274; 50.000; 372.000; valid 275; 51.000; 380.000; valid 276; 52.000; 349.000; valid 277; 53.000; 326.000; valid 278; 54.000; 317.000; valid 279; 55.000; 297.000; valid 280; 56.000; 299.000; valid 281; 57.000; 294.000; valid 282; 58.000; 258.000; valid 283; 59.000; 260.000; valid 284; 60.000; 264.000; valid 285; 61.000; 271.000; valid 286; 62.000; 250.000; valid 287; 63.000; 241.000; valid 288; 64.000; 239.000; valid 289; 65.000; 241.000; valid 290; 66.000; 220.000; valid 291; 67.000; 215.000; valid 292; 68.000; 217.000; valid 293; 69.000; 203.000; valid 294; 70.000; 196.000; valid 295; 71.000; 201.000; valid 296; 72.000; 198.000; valid 297; 73.000; 188.000; valid 298; 74.000; 183.000; valid 299; 75.000; 173.000; valid 300; 76.000; 166.000; valid 301; 77.000; 167.000; valid 302; 78.000; 187.000; valid 303; 79.000; 169.000; valid 304; 80.000; 162.000; valid 305; 81.000; 145.000; valid 306; 82.000; 163.000; valid 307; 83.000; 151.000; valid 308; 84.000; 154.000; valid 309; 85.000; 135.000; valid 310; 86.000; 134.000; valid 311; 87.000; 133.000; valid 312; 88.000; 133.000; valid 313; 89.000; 141.000; valid 314; 90.000; 134.000; valid 315; 91.000; 133.000; valid 316; 92.000; 120.000; valid 317; 93.000; 128.000; valid 318; 94.000; 114.000; valid 319; 95.000; 147.000; valid 320; 96.000; 139.000; valid 321; 97.000; 114.000; valid 322; 98.000; 128.000; valid 323; 99.000; 113.000; valid 324; 100.000; 127.000; valid [NewRecord] Operation=3 SampleType=Disk Disk=0 DataType=NORM Irrad SampleTemp=20 IrradTime=257 IrradDose=20.02 Started=6/4/2015 4:03:51 PM Stopped=6/4/2015 4:08:15 PM Luminescence/inst/CITATION0000644000176200001440000000677113521300134015001 0ustar liggesuserscitHeader("To cite the R package 'Luminescence' please use the first two entries, and apply the rest if applicable:") citation(auto = meta) bibentry(bibtype = "Article", title = "Introducing an R package for luminescence dating analysis", author = "Sebastian Kreutzer, Christoph Schmidt, Margret C. Fuchs, Michael Dietze, Manfred Fischer, Markus Fuchs", year = "2012", journal = "Ancient TL", volume = "30", number = "1", pages = "1-8") bibentry(bibtype = "Article", title = "A practical guide to the R package Luminescence", author = "Michael Dietze, Sebastian Kreutzer, Margret C. Fuchs, Christoph Burow, Manfred Fischer, Christoph Schmidt", year = "2013", journal = "Ancient TL", volume = "31", number = "1", pages = "11-18") bibentry(bibtype = "Article", title = "Data processing in luminescence dating analysis: An exemplary workflow using the R package 'Luminescence'", author = "Margret C. Fuchs, Sebastian Kreutzer, Christoph Burow, Michael Dietze, Manfred Fischer, Christoph Schmidt, Markus Fuchs", year = "2015", journal = "Quaternary International", volume = "362", pages = "8-13", doi = "10.1016/j.quaint.2014.06.034") bibentry(bibtype = "Article", title = "A new R function for the Internal External Uncertainty (IEU) model", author = "Rachel K Smedley", journal = "Ancient TL", year = "2015", volume = "33", number = "1", pages = "16-21") bibentry(bibtype = "Article", title = "The abanico plot: visualising chronometric data with individual standard errors", author = "Michael Dietze, Sebastian Kreutzer, Christoph Burow, Margret C. Fuchs, Manfred Fischer, Christoph Schmidt", year = "2016", journal = "Quaternary Geochronology", volume = "31", pages = "12-18", doi = "10.1016/j.quageo.2015.09.003") bibentry(bibtype = "Article", title = "Bayesian statistics in luminescence dating: The baSAR-model and its implementation in the R package 'Luminescence'", author = "Norbert Mercier and Sebastian Kreutzer and Claire Christophe and Guillaume Guerin and Pierre Guibert and Christelle Lahaye and Philippe Lanos and Anne Philippe and Chantal Tribolo", year = "2016", journal = "Ancient TL", volume = "34", number = "2", pages = "14-21") bibentry(bibtype = "Article", title = "Software in the context of luminescence dating: status, concepts and suggestions exemplified by the R package 'Luminescence'", author = "Sebastian Kreutzer and Christoph Burow and Michael Dietze and Margret C. Fuchs and Manfred Fischer and Christoph Schmidt", year = "2017", journal = "Ancient TL", volume = "35", number = "2", pages = "1-11") bibentry(bibtype = "Article", title = "Environmental Dose Rate Determination Using a Passive Dosimeter: Techniques and Workflow for alpha-Al2O3:C Chips", author = "Sebastian Kreutzer and Loic Martin and Guillaume Guerin and Chantal Tribolo and Pierre Selva and Norbert Mercier", year = "2018", journal = "Geochronometria", volume = "45", pages = "56-67", doi = "10.1515/geochr-2015-0086") bibentry(bibtype = "Article", title = "Age determination using feldspar: Evaluating fading-correction model performance", author = "Georgina E.King and Christoph Burow and Helen M.Roberts and Nicholas J.G.Pearce", year = "2018", journal = "Radiation Measurements", volume = "119", pages = "58-73", doi = "10.1016/j.radmeas.2018.07.013")