diff --git a/CMakeLists.txt b/CMakeLists.txt index 65c330d..45d5d34 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -31,6 +31,15 @@ install (TARGETS jbdiagnose.x PERMISSIONS OWNER_EXECUTE OWNER_WRITE OWNER_READ GROUP_EXECUTE GROUP_READ DESTINATION libexec) +function (install_Rscripts) + foreach (file ${ARGV}) + get_filename_component (name_without_extension ${file} NAME_WE) + install (FILES ${file} + DESTINATION libexec) + endforeach () +endfunction() +install_Rscripts (src/preparing_conv.R) + add_subdirectory (scripts) add_subdirectory (share) add_subdirectory (tests) diff --git a/doc/tools/ObsTool.md b/doc/tools/ObsTool.md new file mode 100644 index 0000000..23aeaa6 --- /dev/null +++ b/doc/tools/ObsTool.md @@ -0,0 +1,249 @@ +# Documentation about How to use Obstool + + 1. Get the ascii file from odb: see HoWtogetINPUTfiles_obstool + 1. Set and Run scripts/1prepare_obstool.sh + 1. Set and Run scripts/2monitor_obstool.sh + +## How to get INPUT files_obstool: + +HOW to get the input files: From ODB files : examples for gnss ztd, radar, and sat obs + +## gnss ztd obs +The SQL from ODB2 +```bash +odb sql 'select statid,lat,lon,an_depar,fg_depar,obsvalue where varno=128' -i ccma.odb > ${DIROUT}/gnss_odb_${yyyy}${mm}${dd}${hh} +grep -v "obsvalue" ${DIROUT}/gnss_odb_${yyyy}${mm}${dd}${hh} > ${DIROUT}/ccma_mon_${yyyy}${mm}${dd}${hh} +``` + +Using conv.40h1.sql like: +``` +CREATE VIEW mondb AS +SELECT + type@desc , + expver@desc , + class@desc , + stream@desc , + andate@desc , + antime@desc , + reportype@hdr , + numtsl@desc , + timeslot@timeslot_index , + seqno@hdr , + bufrtype@hdr , + subtype@hdr , + groupid@hdr , + obstype@hdr , + codetype@hdr , + sensor@hdr , + statid@hdr , + date@hdr , + time@hdr , + report_status@hdr , + report_event1@hdr , + report_rdbflag@hdr , + degrees(lat) as lat@hdr , + degrees(lon) as lon@hdr , + lsm@modsurf , + orography@modsurf , + stalt@hdr , + flight_phase@conv , + anemoht@conv , + baroht@conv , + station_type@conv , + sonde_type@conv , + entryno@body , + obsvalue@body , + varno@body , + vertco_type@body , + vertco_reference_1@body , + datum_anflag@body , + datum_status@body , + datum_event1@body , + datum_rdbflag@body , + biascorr@body , + biascorr_fg@body , + an_depar@body , + fg_depar@body , + obs_error@errstat , + repres_error@errstat , + pers_error@errstat , + fg_error@errstat , + eda_spread@errstat , + mf_vertco_type@body , + mf_log_p@body , + mf_stddev@body +FROM desc,timeslot_index,hdr,modsurf,conv,body,errstat +WHERE obstype>=1 AND obstype<=6; +``` + +## Radar RH and DOW + +```bash +odb sql 'select statid,varno,lat,lon,vertco,an_depar,fg_depar,obsvalue where obstype=13 and varno=29' -i ccma.odb > ${DIROUT}/radar_RH_${yyyy}${mm}${dd}${hh} +odb sql 'select statid,varno,lat,lon,an_depar,fg_depar,obsvalue where obstype=13 and varno=195' -i ccma.odb > ${DIROUT}/radar_DOW_${yyyy}${mm}${dd}${hh} +grep -v "obsvalue" ${DIROUT}/radar_RH_${yyyy}${mm}${dd}${hh} > ${DIROUT}/ccma_mon_${yyyy}${mm}${dd}${hh} +# or +grep -v "obsvalue" ${DIROUT}/radar_DOW_${yyyy}${mm}${dd}${hh} > ${DIROUT}/ccma_mon_${yyyy}${mm}${dd}${hh} +``` + +Using radarv.cy40.sql like: +``` +CREATE VIEW radarv +SELECT +obstype@hdr,codetype@hdr,statid,varno,lat@hdr,lon@hdr,vertco_type@body,vertco_reference_1@body,sensor@hdr,date,time@hdr,report_status.active@hdr,report_status.blacklisted@hdr,report_status.passive@hdr,report_status.rejected@hdr,datum_status.active@body,datum_status.blacklisted@body,datum_status.passive@body,datum_status.rejected@body,an_depar,fg_depar,obsvalue,final_obs_error@errstat,elevation@radar_body,distance@radar_body,azimuth@radar_body +FROM hdr,desc,body,errstat,sat,radar,radar_body +WHERE (varno /= 91 ) AND (an_depar is not NULL) +``` + +## Satem obs + +``` +odbsql -T -q 'SELECT statid,vertco_reference_1,sensor,rad2deg(lat),rad2deg(lon),an_depar,tdiff(date,time,andate,antime)/60,fg_depar,obsvalue FROM hdr,body,desc where obstype=7' > ${DIROUT}/ccma_mon_${yyyy}${mm}${dd}${hh} +``` + +using satem.40h1.sql like: +``` +CREATE VIEW mondb AS +SELECT + type@desc , + expver@desc , + class@desc , + stream@desc , + andate@desc , + antime@desc , + numtsl@desc , + timeslot@timeslot_index , + latlon_rad@desc , + enda_member@desc , + seqno@hdr , + subseqno@hdr , + bufrtype@hdr , + subtype@hdr , + subsetno@hdr , + groupid@hdr , + reportype@hdr , + obstype@hdr , + codetype@hdr , + sensor@hdr , + retrtype@hdr , + instrument_type@hdr , + stalt@hdr , + date@hdr , + time@hdr , + numlev@hdr , + numactiveb@hdr , + checksum@hdr , + sortbox@hdr , + areatype@hdr , + report_status@hdr , + report_event1@hdr , + report_rdbflag@hdr , + report_blacklist@hdr , + report_event2@hdr , + thinningkey_1@hdr , + thinningkey_2@hdr , + thinningkey_3@hdr , + thinningtimekey@hdr , + sitedep@hdr , + source@hdr , + degrees(lat) as lat@hdr , + degrees(lon) as lon@hdr , + statid@hdr , + seaice@modsurf , + orography@modsurf , + snow_depth@modsurf , + t2m@modsurf , + albedo@modsurf , + windspeed10m@modsurf , + u10m@modsurf , + v10m@modsurf , + surface_class@modsurf , + tsfc@modsurf , + satellite_identifier@sat , + satellite_instrument@sat , + zenith@sat , + azimuth@sat , + solar_zenith@sat , + solar_azimuth@sat , + gen_centre@sat , + gen_subcentre@sat , + datastream@sat , + scanline@radiance , + scanpos@radiance , + orbit@radiance , + typesurf@radiance , + corr_version@radiance , + cldcover@radiance , + cldptop_1@radiance , + cldptop_2@radiance , + cldptop_3@radiance , + cldne_1@radiance , + cldne_2@radiance , + cldne_3@radiance , + skintemper@radiance , + skintemp_1@radiance , + skintemp_2@radiance , + skintemp_3@radiance , + scatterindex_89_157@radiance, + scatterindex_23_89@radiance, + scatterindex_23_165@radiance, + lwp_obs@radiance , + asr_pclear@radiance , + asr_pcloudy@radiance , + asr_pcloudy_low@radiance , + asr_pcloudy_middle@radiance, + asr_pcloudy_high@radiance , + csr_pclear@radiance_body , + cld_fg_depar@radiance_body , + rank_cld@radiance_body , + tausfc@radiance_body , + skintemp_retr@radiance_body, + emis_rtin@radiance_body , + emis_atlas@radiance_body , + emis_retr@radiance_body , + emis_fg@radiance_body , + entryno@body , + obsvalue@body , + varno@body , + vertco_type@body , + vertco_reference_1@body , + vertco_reference_2@body , + datum_anflag@body , + datum_status@body , + datum_event1@body , + datum_rdbflag@body , + datum_blacklist@body , + datum_event2@body , + varbc_ix@body , + biascorr@body , + biascorr_fg@body , + tbcorr@body , + wdeff_bcorr@body , + an_depar@body , + fg_depar@body , + actual_depar@body , + qc_a@body , + qc_l@body , + qc_pge@body , + fc_sens_obs@body , + an_sens_obs@body , + jacobian_peak@body , + jacobian_peakl@body , + jacobian_hpeak@body , + jacobian_hpeakl@body , + mf_vertco_type@body , + mf_log_p@body , + mf_stddev@body , + nlayer@body , + final_obs_error@errstat , + obs_error@errstat , + repres_error@errstat , + pers_error@errstat , + fg_error@errstat , + eda_spread@errstat , + obs_ak_error@errstat , + obs_corr_ev_1@errstat , + obs_corr_mask@errstat , + obs_corr_diag_1@errstat +FROM desc,timeslot_index,hdr,modsurf,sat,radiance,radiance_body,body,errstat WHERE obstype@hdr=7; +``` diff --git a/scripts/1prepare_obstool.sh b/scripts/1prepare_obstool.sh new file mode 100755 index 0000000..7c4c050 --- /dev/null +++ b/scripts/1prepare_obstool.sh @@ -0,0 +1,130 @@ +#!/bin/bash + +#set -xe +set -e + +#1prepare_obstool.sh +#Once we have ascii files with odbsql we start with thi script to prepare the data. + +PROGNAME=`basename $0` + +# Define a usage function +usage() { + +bold=$(tput bold) +normal=$(tput sgr0) +unline=$(tput smul) + +cat << USAGE + +${bold}NAME${normal} + ${PROGNAME} - estimate observation errors + +${bold}USAGE${normal} + ${PROGNAME} -i -o + -s -e + [ -h ] + +${bold}DESCRIPTION${normal} + Prepare input data for ObsTool error covariance estimation + +${bold}OPTIONS${normal} + -i ${unline}input-directory${normal} + input directory + + -o ${unline}output-directory${normal} + output directory + + -s ${unline}start-dtg${normal} + + -e ${unline}end-dtg${normal} + + -c ${unline}forecast-cycle${normal} + + -n ${unline}expt-name${normal} + Experiment name + + -h Help! Print usage information. + +USAGE +} + +SDTG=DUMMY +EDTG=DUMMY +FINT=3 +NEXP="DUMMY" + +while getopts i:o:s:e:c:n:h option +do + case $option in + i) + OTIN=$OPTARG + ;; + o) + OTOUT=$OPTARG +#EW if [ -d $OTOUT ]; then +#EW echo "Directory ${OTOUT} already exists. Please choose another name" +#EW exit 1 +#EW else +#EW mkdir -p ${OTOUT} +#EW fi + ;; + s) + SDTG=$OPTARG + ;; + e) + EDTG=$OPTARG + ;; + c) + FINT=$OPTARG + ;; + n) + NEXP=$OPTARG + ;; + h) + usage + exit 0 + ;; + *) + echo + echo "Try '${PROGNAME} -h' for more information" + ;; + esac +done + +# +# Where am I? +# +this_script_loc="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +appdir=$(dirname ${this_script_loc}) +bindir=${appdir}/bin +libdir=${appdir}/lib +exedir=${appdir}/libexec +levdir=${appdir}/share/levdef + +#For gnss ztd, radar, radar_dow and also for ascat: +exe=preparing_conv.R + +#========================================================== +# PREPARE STATISTICS +#========================================================== +CDTG=$SDTG +while [[ $CDTG -le $EDTG ]]; do + echo + echo "Processing CCMA data for ${CDTG} ..." + if [ ! -s ${OTIN}/ccma_mon_conv_${CDTG} ]; then + echo " ${PROGNAME}: Input ${OTIN}/ccma_mon_conv_${CDTG} does not exist." + echo " ${PROGNAME}: Aborting ..." + exit 1 + fi + sed -e "s/XXYYMMDDNTXX/${CDTG}/" ${exedir}/$exe > ${CDTG}_${exe} + + # Start visualization in R + echo "R CMD BATCH ${CDTG}_${exe}" + R CMD BATCH ${CDTG}_${exe} && rm -f ${CDTG}_${exe} + # increment by cycle h + DSTR="${CDTG:0:4}-${CDTG:4:2}-${CDTG:6:2} ${CDTG:8:2}:00:00" + CDTG=$(date -d "$DSTR $FINT hour" +%Y%m%d%H) +done + +exit 0 diff --git a/scripts/2monitor_obstool.sh b/scripts/2monitor_obstool.sh new file mode 100755 index 0000000..ecc016f --- /dev/null +++ b/scripts/2monitor_obstool.sh @@ -0,0 +1,74 @@ +#!/bin/bash +module load R + +# Definitions + +#Charge .profile +#. ./profile_obstool.h + +#or you can have data here: +#--------------------- +# Experiment Setting +#--------------------- +#lEXP="AIBr_fr" +lEXP="AIBr_fr2" +cEXP="oper" +#Be careful: also write exp name in Rscrits/preparing_gnss.R and monitoring_gnss.R +#--------------------- +# Setting Date +#--------------------- +sdate=2019021100 +edate=2019022821 +#sdate=2019031712 +#edate=2019032606 +cycle=3 +#Observation +OBS=gnss_ztd + +#........................ + + +echo ${lEXP} + +#READ=".\/Files\/" +DIRW=/AccordDaTools/src + +#For gnss ztd, radar, radar_dow etc : +exe=monitoring_conv.R +#For ascat: +exe=monitoring_ascat.R +#For other satellite data: +#exe=monitoring_sat.R + +echo $sdate +echo $edate +date=$sdate + + +#========================================================== +# PLOT STATISTICS +#========================================================== + +cd ${DIRW} + + llEXP=`echo ${lEXP} | sed "s/ /\",\"/g"` + ccEXP=`echo ${cEXP} | sed "s/ /\",\"/g"` + echo $llEXP + echo $ccEXP + + sed \ + -e "s/XXEXPXX/${llEXP}/" \ + -e "s/XXCEXPXX/${ccEXP}/" \ + -e "s/sdate=\"\"/sdate=\"${sdate}\"/" \ + -e "s/edate=\"\"/edate=\"${edate}\"/" \ + -e "s/cyc=\"\"/cyc=\"${cycle}\"/" \ + $exe > run2.x + +# Start visualization in R + R CMD BATCH run2.x + echo 'Result at ~/../$OBS' + echo 'Output at ~/../src/run2.x.Rout' + +# Cleaning + rm -f run*.x + diff --git a/scripts/CMakeLists.txt b/scripts/CMakeLists.txt index 9c9f5e4..d827a54 100644 --- a/scripts/CMakeLists.txt +++ b/scripts/CMakeLists.txt @@ -11,4 +11,5 @@ endfunction() install_scripts (jbdiagnose.sh plotjbbal.py plotjbdiag.py) install_scripts (varbcdiagnose.sh plotvarbcpred.py) install_scripts (diacov.sh) +install_scripts (1prepare_obstool.sh) #install_scripts (diacov.sh festat.sh) diff --git a/scripts/profile_obstool.sh b/scripts/profile_obstool.sh new file mode 100644 index 0000000..ce5003a --- /dev/null +++ b/scripts/profile_obstool.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +#--------------------- +# Experiment Setting +#--------------------- +#lEXP="AIBr_fr" +lEXP="AIBr_fr2" +cEXP="oper" +#Be careful: also write exp name in Rscrits/preparing_gnss.R and monitoring_gnss.R +#--------------------- +# Setting Date +#--------------------- +sdate=2019021100 +edate=2019022821 +#sdate=2019031712 +#edate=2019032606 +cycle=3 + +#Observation +OBS=gnss_ztd +#----------------------------------- +# Data preparation or Visualisation +#----------------------------------- +#Preparation +LPREP=TRUE +LMON=FALSE +#or visualization +#LPREP=FALSE +#LMON=TRUE diff --git a/src/get_satinfo.R b/src/get_satinfo.R new file mode 100755 index 0000000..218f1d0 --- /dev/null +++ b/src/get_satinfo.R @@ -0,0 +1,27 @@ +get.satName <- function(sats.in, sens.in) +{ + # NAMES DEFINITON + satID = c(1) + satName = c("gnss") + + sensID = c(0) + sensName = c("ztd") + + names(satName) = as.character(satID) + names(sensName) = as.character(sensID) + + return(list(satName[[as.character(sats.in)]], sensName[[as.character(sens.in)]])) +} + +get.satInfo <- function(sensor.in) +{ + #---------------------------------------- + # CHANNEL SELECTION + #---------------------------------------- + selChan = list() + selChan[["0"]] = c(0) + + outSelChan = selChan[[as.character(sensor.in)]] + + return(list(outSelChan)) +} diff --git a/src/get_statistics.R b/src/get_statistics.R new file mode 100755 index 0000000..e294a8b --- /dev/null +++ b/src/get_statistics.R @@ -0,0 +1,75 @@ +prepare_mean_stat <- function(data, lvar) +{ + rransum = aggregate(list(ANSUM = data$ansum ), by=lvar, FUN=sum ) + rrfgsum = aggregate(list(FGSUM = data$fgsum ), by=lvar, FUN=sum ) + rrfgncsum = aggregate(list(FGNCSUM = data$fgncsum), by=lvar, FUN=sum ) + rrfgsqr = aggregate(list(FGSQR = data$fgsqr ), by=lvar, FUN=sum ) + rrnum = aggregate(list(NOBS = data$num ), by=lvar, FUN=sum ) + + # Prepare statistics for plotting + out = rrnum + out$BSFG = rrfgsum$FGSUM/rrnum$NOBS + out$BSFGNC= rrfgncsum$FGNCSUM/rrnum$NOBS + out$RMSFG = sqrt(rrfgsqr$FGSQR/rrnum$NOBS) + out$SDFG = sqrt((out$RMSFG)^2 - (out$BSFG)^2) + + return(out) +} + +prepare_diag_cov <- function(data, lvar) +{ + # COV + ag1 = aggregate(list(ASUM1 = data$Asum1 ), by=lvar, FUN=sum ) + ag2 = aggregate(list(FGSUM1 = data$FGsum1), by=lvar, FUN=sum ) + ag3 = aggregate(list(FGSUM2 = data$FGsum2), by=lvar, FUN=sum ) + ag4 = aggregate(list(AFGSQR = data$AFGsqr), by=lvar, FUN=sum ) + ag5 = aggregate(list(FGSQR = data$FGsqr ), by=lvar, FUN=sum ) + ag6 = aggregate(list(NOBS = data$num ), by=lvar, FUN=sum ) + # STD + ag7 = aggregate(list(FGSQR1 = data$FGsqr1 ), by=lvar, FUN=sum ) + ag8 = aggregate(list(FGSQR2 = data$FGsqr2 ), by=lvar, FUN=sum ) + ag9 = aggregate(list(ASQR1 = data$Asqr1 ), by=lvar, FUN=sum ) + + # Prepare statistics for plotting + #--------------------------------- + # Covariance + cov = ag6 + cov$COV.HL = (ag5$FGSQR/ag6$NOBS) - ((ag2$FGSUM1 * ag3$FGSUM2)/(ag6$NOBS)^2) + cov$COV.DR.B = cov$COV.HL - ((ag4$AFGSQR/ag6$NOBS) - ((ag1$ASUM1 * ag3$FGSUM2)/(ag6$NOBS)^2)) + cov$COV.DR.R = (ag4$AFGSQR/ag6$NOBS) - ((ag1$ASUM1 * ag3$FGSUM2)/(ag6$NOBS)^2) + + # Standard deviation + cov$sigmaFG1 = sqrt( (ag7$FGSQR1/ag6$NOBS) - (ag2$FGSUM1/ag6$NOBS)^2 ) + cov$sigmaFG2 = sqrt( (ag8$FGSQR2/ag6$NOBS) - (ag3$FGSUM2/ag6$NOBS)^2 ) + cov$sigmaA1 = sqrt( (ag9$ASQR1/ag6$NOBS) - (ag1$ASUM1/ag6$NOBS)^2 ) + + # Correlation + cov$COR.HL = cov$COV.HL/(cov$sigmaFG1 * cov$sigmaFG2) + cov$COR.DR.R = cov$COV.DR.R/(cov$sigmaA1 * cov$sigmaFG2) + cov$COR.DR.B = cov$COV.DR.B/(cov$sigmaFG1 * cov$sigmaFG2) + + return(cov) +} + +prepare_obs_error <- function(data, lvar) +{ + rransum = aggregate(list(ANSUM = data$ansum ), by=lvar, FUN=sum ) + rrfgsum = aggregate(list(FGSUM = data$fgsum ), by=lvar, FUN=sum ) + rrfgsqr = aggregate(list(FGSQR = data$fgsqr ), by=lvar, FUN=sum ) + rrafsqr = aggregate(list(AFSQR = data$afsqr ), by=lvar, FUN=sum ) + rrnum = aggregate(list(NOBS = data$num ), by=lvar, FUN=sum ) + + # Prepare statistics for plotting + out = rrnum + + # STD OF FG.DEP + out$BSAN = rransum$ANSUM/rrnum$NOBS + out$BSFG = rrfgsum$FGSUM/rrnum$NOBS + out$RMSFG = sqrt(rrfgsqr$FGSQR/rrnum$NOBS) + out$SDFG = sqrt((out$RMSFG)^2 - (out$BSFG)^2) + + # STD OF DEROZIERS + out$SDDER = sqrt((rrafsqr$AFSQR/rrnum$NOBS) - (out$BSFG * out$BSAN)) + + return(out) +} diff --git a/src/monitoring_ascat.R b/src/monitoring_ascat.R new file mode 100755 index 0000000..a02da84 --- /dev/null +++ b/src/monitoring_ascat.R @@ -0,0 +1,141 @@ +#This script can be used for ascat obs. +#Be CAREFUL: Fill the path in read0 and also the name of the ouput file , and the obs + +# Load packages +source("get_statistics.R") +source("get_satinfo.R") +source("visualisation.R") +source("read_odb.R") + +# Inputs +lexp = c("XXEXPXX") +cexp = c("XXCEXPXX") +sdate="" +edate="" +cyc="" + +# Set binning interval (if NULL than default) +read0="/../$OBS" +read= paste(read0,"/",exp, sep="") + +dist_interval = NULL # [km] +max_distance = NULL # [km] + +# ----------------------------------------- +# DEFINITION +# ----------------------------------------- +cyc.txt = paste(cyc,"hours") + +# ----------------------------------------- +# READ DATA +# ----------------------------------------- +collEXP = paste(lexp, collapse="_") +dbname = paste("source_sats_", collEXP, sep="") + +# Read data from ODB +dfData = read.ODB(sdate, edate, cycle=cyc.txt, dbname, rerun=FALSE) + +# ----------------------------------------- +# VISUALISATION +# ----------------------------------------- +daterange = c(as.POSIXct(min(dfData$date),tz="MEST"),as.POSIXct(max(dfData$date),tz="MEST")) +lsat = unique(dfData$sat) + +for ( isat in lsat ) +{ + msSAT = subset(dfData, sat==isat) + lsens = unique(msSAT$sens) + + for ( isens in lsens ) + { + + # Get satellite channel selection for the instrument + channels = get.satInfo(isens)[[1]] + + msSENS = subset(msSAT, sat==isat & sens==isens & chan%in%channels) + + plchan = unique(msSENS$chan) + nchan = length(plchan) + + csat = get.satName(isat, isens)[1] + csens = get.satName(isat, isens)[2] + + print(paste("Plotting ", csat, "/", csens, sep="")) + + if (nrow(msSENS) > 0) + { + ########################################################### + # PREPARE MEAN STATISTICS + ########################################################### + if ( !is.null(max_distance) & !is.null(dist_interval) ) + { + print(paste("New separation bin", dist_interval, "km up to", max_distance,"km", sep="")) + lDint = seq(0, max_distance, dist_interval) + cDint = seq(dist_interval/2, max_distance-(dist_interval/2), dist_interval) + + ldist = cut(msSENS$dist, lDint, labels=cDint, include.lowest=FALSE) + monSENS = prepare_diag_cov(msSENS, list(CHAN=msSENS$chan, DIST=ldist, EXP=msSENS$Exp)) + monSENS$DIST = as.numeric(as.character(monSENS[,"DIST"])) + + }else{ + + # OUT: COV.DR (Desroziers), COV.HL (Hollingsworth/Lonnberg) ~ CHAN, DIST, EXP + monSENS = prepare_diag_cov(msSENS, list(CHAN=msSENS$chan, DIST=msSENS$dist, EXP=msSENS$Exp)) + } + + ########################################################### + # HORIZONTAL PLOTS + ########################################################### + #----------------------------- + # COVARIANCE + #----------------------------- + pdf(paste("diag_cov_",isat,"_",isens,".pdf",sep="")) + + for (ichan in plchan) + { + mondata = subset(monSENS, CHAN==ichan) + + attach(mondata) + + # plot.horizontal(data, x, y) + plot.horiz( mondata, c("DIST"), c("COV.DR.B", "COV.HL"), legx = c("Desroziers.B", "FG_depar"), + main=paste(csat," / ",csens, " / Channel ", ichan, sep=""), + xlab="Distance [km]", ylab=expression("Covariance [" ~ K^2 ~ "]"), + LCOL.EXP=c("black"), LLWD=c(2,2), LLTY=c(2,1), LPCH=c(18,20), INCL0=TRUE, FIRST=2) + + detach(mondata) + + } # ichan + + dev.off() + + #----------------------------- + # CORRELATIONS + #----------------------------- + pdf(paste("diag_cor_",isat,"_",isens,".pdf",sep="")) + + for (ichan in plchan) + { + mondata = subset(monSENS, CHAN==ichan & DIST>0) + + attach(mondata) + + # plot.horizontal(data, x, y) + plot.horiz( mondata, c("DIST"), c("COR.DR.R"), legx = c("Desroziers.R"), + main=paste(csat," / ",csens, " / Channel ", ichan, sep=""), + xlab="Distance [km]", ylab="Correlation", ylim=c(0,1), + LCOL.EXP=c("black"), LLWD=c(2), LLTY=c(2), LPCH=c(20), INCL0=TRUE) + + detach(mondata) + + } # ichan + + dev.off() + + }else{ + + print(paste("Data for ", csat,"/", csens," are not available.", sep="")) + + } # test obsnum + } # isensor +} # isat diff --git a/src/monitoring_conv.R b/src/monitoring_conv.R new file mode 100755 index 0000000..5368028 --- /dev/null +++ b/src/monitoring_conv.R @@ -0,0 +1,141 @@ +#This script can be used for gnssztd, radar, or any conv obs. +#Be CAREFUL: Fill the path in read0 and also the name of the ouput file at line 75 and 109, and the obs at line 93 and 122 + +# Load packages +source("get_statistics.R") +source("get_satinfo.R") +source("visualisation.R") +source("read_odb.R") + +# Inputs +lexp = c("XXEXPXX") +cexp = c("XXCEXPXX") +sdate="" +edate="" +cyc="" +exp="" + +# Set binning interval (if NULL than default) +#read="/scratch/ms/es/mdy/HARM_DiagTool/gnss_obs" +read0="/AccordDATools/scripts" +read= paste(read0,"/",exp, sep="") + + +dist_interval = NULL # [km] +max_distance = NULL # [km] + +# ----------------------------------------- +# DEFINITION +# ----------------------------------------- +cyc.txt = paste(cyc,"hours") + +# ----------------------------------------- +# READ DATA +# ----------------------------------------- +collEXP = paste(lexp, collapse="_") +dbname = paste("source_sats_", collEXP, sep="") + +# Read data from ODB +dfData = read.ODB(sdate, edate, cycle=cyc.txt, dbname, rerun=FALSE) + +# ----------------------------------------- +# VISUALISATION +# ----------------------------------------- +daterange = c(as.POSIXct(min(dfData$date),tz="MEST"),as.POSIXct(max(dfData$date),tz="MEST")) + + + # if (nrow(msSENS) > 0) + # { + ########################################################### + # PREPARE MEAN STATISTICS + ########################################################### + if ( !is.null(max_distance) & !is.null(dist_interval) ) + { + print(paste("New separation bin", dist_interval, "km up to", max_distance,"km", sep="")) + lDint = seq(0, max_distance, dist_interval) + cDint = seq(dist_interval/2, max_distance-(dist_interval/2), dist_interval) + + ldist = cut(dfData$dist, lDint, labels=cDint, include.lowest=FALSE) + monSENS = prepare_diag_cov(dfData, list(DIST=ldist, EXP=dfData$Exp)) + monSENS$DIST = as.numeric(as.character(monSENS[,"DIST"])) + + }else{ + + # OUT: COV.DR (Desroziers), COV.HL (Hollingsworth/Lonnberg) DIST, EXP + monSENS = prepare_diag_cov(dfData, list(DIST=dfData$dist, EXP=dfData$Exp)) + } + + ########################################################### + # HORIZONTAL PLOTS + ########################################################### + #----------------------------- + # COVARIANCE + #----------------------------- + ##pdf(paste("diag_cov_",isat,"_",isens,".pdf",sep="")) + #pdf(paste("diag_cov_gnss_ztd",".pdf",sep="")) + png(paste("diag_cov_OBS",".png",sep="")) + + + #for (ichan in plchan) + #{ + #mondata = subset(monSENS, CHAN==ichan) + mondata = monSENS + + attach(mondata) + csat=1 + csens=2 + ichan=3 + # plot.horizontal(data, x, y) + #plot.horiz( mondata, c("DIST"), c("COV.DR.B", "COV.HL"), legx = c("Desroziers.B", "FG_depar"), + # main=paste(csat," / ",csens, " / Channel ", ichan, sep=""), + # xlab="Distance [km]", ylab=expression("Covariance [" ~ K^2 ~ "]"), + # LCOL.EXP=c("black"), LLWD=c(2,2), LLTY=c(2,1), LPCH=c(18,20), INCL0=TRUE, FIRST=2) + plot.horiz( mondata, c("DIST"), c("COV.DR.B", "COV.HL"), legx = c("Desroziers.B", "FG_depar"), + main=paste(" OBS ", sep=""), + xlab="Distance [km]", ylab=expression("Covariance [" ~ K^2 ~ "]"), + LCOL.EXP=c("black"), LLWD=c(2,2), LLTY=c(2,1), LPCH=c(18,20), INCL0=TRUE, FIRST=2) + + + detach(mondata) + + #} # ichan + + dev.off() + + #----------------------------- + # CORRELATIONS + #----------------------------- + ##pdf(paste("diag_cor_",isat,"_",isens,".pdf",sep="")) + #pdf(paste("diag_cor_gnss_ztd",".pdf",sep="")) + png(paste("diag_cor_gnss_ztd",".png",sep="")) + + #mondata = subset(monSENS, CHAN==ichan & DIST>0) + mondata = monSENS + + attach(mondata) + + # plot.horizontal(data, x, y) + #plot.horiz( mondata, c("DIST"), c("COR.DR.R"), legx = c("Desroziers.R"), + # main=paste(csat," / ",csens, " / Channel ", ichan, sep=""), + # xlab="Distance [km]", ylab="Correlation", ylim=c(0,1), + # LCOL.EXP=c("black"), LLWD=c(2), LLTY=c(2), LPCH=c(20), INCL0=TRUE) + plot.horiz( mondata, c("DIST"), c("COR.DR.R"), legx = c("Desroziers.R"), + main=paste(" GNSS ZTD ", sep=""), + xlab="Distance [km]", ylab="Correlation", ylim=c(0,1), + LCOL.EXP=c("black"), LLWD=c(2), LLTY=c(2), LPCH=c(20), INCL0=TRUE) + + + detach(mondata) + + + + dev.off() + + #}else{ + + #print(paste("Data for ", csat,"/", csens," are not available.", sep="")) + print(paste("Data are not available.", sep="")) + + #} # test obsnum +# } # isensor +#} # isat diff --git a/src/monitoring_sat.R b/src/monitoring_sat.R new file mode 100755 index 0000000..20547c4 --- /dev/null +++ b/src/monitoring_sat.R @@ -0,0 +1,144 @@ +#This script can be used for satellite obs. +#Be CAREFUL: Fill the path in read0 and also the name of the ouput file + +# Load packages +source("get_statistics.R") +source("get_satinfo.R") +source("visualisation.R") +source("read_odb.R") + +# Inputs +lexp = c("XXEXPXX") +cexp = c("XXCEXPXX") +sdate="" +edate="" +cyc="" + +# Set binning interval (if NULL than default) +read="/scratch/ms/es/mdy/HARM_DiagTool/sat_obs" + +dist_interval = NULL # [km] +max_distance = NULL # [km] + +# ----------------------------------------- +# DEFINITION +# ----------------------------------------- +cyc.txt = paste(cyc,"hours") + +# ----------------------------------------- +# READ DATA +# ----------------------------------------- +collEXP = paste(lexp, collapse="_") +dbname = paste("source_sats_", collEXP, sep="") + +# Read data from ODB +dfData = read.ODB(sdate, edate, cycle=cyc.txt, dbname, rerun=FALSE) + +# ----------------------------------------- +# VISUALISATION +# ----------------------------------------- +daterange = c(as.POSIXct(min(dfData$date),tz="MEST"),as.POSIXct(max(dfData$date),tz="MEST")) +lsat = unique(dfData$sat) + +for ( isat in lsat ) +{ + msSAT = subset(dfData, sat==isat) + lsens = unique(msSAT$sens) + + for ( isens in lsens ) + { + + # Get satellite channel selection for the instrument + channels = get.satInfo(isens)[[1]] + + msSENS = subset(msSAT, sat==isat & sens==isens & chan%in%channels) + + plchan = unique(msSENS$chan) + nchan = length(plchan) + + csat = get.satName(isat, isens)[1] + csens = get.satName(isat, isens)[2] + + print(paste("Plotting ", csat, "/", csens, sep="")) + + if (nrow(msSENS) > 0) + { + ########################################################### + # PREPARE MEAN STATISTICS + ########################################################### + if ( !is.null(max_distance) & !is.null(dist_interval) ) + { + print(paste("New separation bin", dist_interval, "km up to", max_distance,"km", sep="")) + lDint = seq(0, max_distance, dist_interval) + cDint = seq(dist_interval/2, max_distance-(dist_interval/2), dist_interval) + + ldist = cut(msSENS$dist, lDint, labels=cDint, include.lowest=FALSE) + monSENS = prepare_diag_cov(msSENS, list(CHAN=msSENS$chan, DIST=ldist, EXP=msSENS$Exp)) + monSENS$DIST = as.numeric(as.character(monSENS[,"DIST"])) + + }else{ + + # OUT: COV.DR (Desroziers), COV.HL (Hollingsworth/Lonnberg) ~ CHAN, DIST, EXP + monSENS = prepare_diag_cov(msSENS, list(CHAN=msSENS$chan, DIST=msSENS$dist, EXP=msSENS$Exp)) + } + + ########################################################### + # HORIZONTAL PLOTS + ########################################################### + #----------------------------- + # COVARIANCE + #----------------------------- + #pdf(paste("diag_cov_",isat,"_",isens,".pdf",sep="")) + + + for (ichan in plchan) + { + png(paste("diag_cov_",isat,"_",isens,"_",ichan,".png",sep="")) + + mondata = subset(monSENS, CHAN==ichan) + + attach(mondata) + + # plot.horizontal(data, x, y) + plot.horiz( mondata, c("DIST"), c("COV.DR.B", "COV.HL"), legx = c("Desroziers.B", "FG_depar"), + main=paste(csat," / ",csens, " / Channel ", ichan, sep=""), + xlab="Distance [km]", ylab=expression("Covariance [" ~ K^2 ~ "]"), + LCOL.EXP=c("black"), LLWD=c(2,2), LLTY=c(2,1), LPCH=c(18,20), INCL0=TRUE, FIRST=2) + + detach(mondata) + + } # ichan + + dev.off() + + #----------------------------- + # CORRELATIONS + #----------------------------- + #pdf(paste("diag_cor_",isat,"_",isens,".pdf",sep="")) + + for (ichan in plchan) + { + png(paste("diag_cor_",isat,"_",isens,"_",ichan,".png",sep="")) + mondata = subset(monSENS, CHAN==ichan & DIST>0) + + attach(mondata) + + # plot.horizontal(data, x, y) + plot.horiz( mondata, c("DIST"), c("COR.DR.R"), legx = c("Desroziers.R"), + main=paste(csat," / ",csens, " / Channel ", ichan, sep=""), + xlab="Distance [km]", ylab="Correlation", ylim=c(0,1), + LCOL.EXP=c("black"), LLWD=c(2), LLTY=c(2), LPCH=c(20), INCL0=TRUE) + + detach(mondata) + + } # ichan + + dev.off() + + }else{ + + print(paste("Data for ", csat,"/", csens," are not available.", sep="")) + + } # test obsnum + } # isensor +} # isat diff --git a/src/preparing_conv.R b/src/preparing_conv.R new file mode 100755 index 0000000..57a2a08 --- /dev/null +++ b/src/preparing_conv.R @@ -0,0 +1,140 @@ + +######################################### +# SETTINGS +######################################### +#january 2022. Jana. Some cleaning from sat data. +#This script can be used for gnssztd, radar, or any conv obs.And ascat. +#Be CAREFUL: Fill the path input in read0 and also the name of the file + +# Spatial separation bin for OmG +bin_int = 20 # [km] : binning interval for pairs +bin_max_dist = 200 # [km] : maximal distance for the bin_int + +# Time separation distance +time_btw_omg = 60# [+/- min] : time_btw_omg = time between OmG/OmA in pairs +dist=20 + +######################################### +#START SCRIPT +######################################### +# Load R-packages +library(sp) + +# INPUTS +yymmddnt="XXYYMMDDNTXX" + +lDint = c(0, 1, seq(bin_int, bin_max_dist, bin_int)) +cDint = c(0, seq(bin_int, bin_max_dist, bin_int)) + +# SET VARIABLES +read="input" +save= read + +date = strptime(as.character(yymmddnt), format="%Y%m%d%H") +#$~> odbsql -T -q 'SELECT rad2deg(lat),rad2deg(lon),an_depar,fg_depar,obsvalue FROM hdr,body,desc where obstype=1 AND an_depar IS NOT NULL' +lab_input.x = c("lat","lon","an_depar","fg_depar","obsvalue") + +# READ DATA +file = paste(read,"/ccma_mon_conv_", yymmddnt, sep="") +#------------------------------------ +# RUN PREPARATION +#------------------------------------ +if (file.exists(file)){ + + print(paste("File exists: ",file)) + print("Read data") + + data = read.table(paste(file,sep=""), col.names=lab_input.x) + newdata = data.frame() + + #------------------------------------ + # START DISTANCE CALCULATION + #------------------------------------ + # Distances between satellite pixels + # matrix(a,b) : diagonal(0 at points), non-diagonal(distance btw pixels) + # Distance is determined by spDists + + latlon <- SpatialPoints(coords = data[,c("lon","lat")], proj4string=CRS("+proj=longlat +datum=WGS84")) + matdist <- spDists(latlon, latlon, longlat=T) + print(paste("----> LatLon")) + + tabdist = as.table(matdist) + nobs = length(data$obsvalue) + + colnames(tabdist) = 1:nobs + rownames(tabdist) = 1:nobs + + rdfnewdata = as.data.frame(tabdist) + names(rdfnewdata) = c("n1","n2","dist") + + dfnewdata = subset(rdfnewdata, dist<=bin_max_dist) + + print(paste("----> Data Table")) + dfnewdata$OA1 = data[dfnewdata[,"n1"], "an_depar"] + dfnewdata$OA2 = data[dfnewdata[,"n2"], "an_depar"] + dfnewdata$FG1 = data[dfnewdata[,"n1"], "fg_depar"] + dfnewdata$FG2 = data[dfnewdata[,"n2"], "fg_depar"] + + print(paste("----> newdata")) + newdata = rbind(newdata, dfnewdata) + + print(paste("----> snewdata")) + snewdata = subset(newdata, dist<=bin_max_dist) + + print(paste("----> ldist")) + ldist = cut(snewdata$dist, lDint, labels=cDint, include.lowest=TRUE) + + print(paste("----> unique")) + print(unique(ldist)) + attach(snewdata) + + ########################################### + # SAVE SCORES + ########################################### + # Save scores for each day + # --> less demaning for memory + # --> to speed-up the diagnostic + #============================================================== + # COV(XY) = E[X*Yt] - E[X]*E[Yt] + # save: + # sum[X]; sum[Yt]; sum[X*Yt]; num[X] + # + # COV(OA1,FG2) = E[OA1*FG2] - E[OA1]*E[FG2] + # OA1 = an_depar + # FG2 = fg_depar + # save: + # sum[OA1]; sum[FG2] ;sum[OA1*FG2]; num[FG2] + #============================================================== + # STD(X) = sqrt( E[X^2] - (E[X])^2 ) + # save: + # sum[FG1] ; sum[FG2] ; sum[OA1] + # sum[FG1^2] ; sum[FG2^2] ; sum[OA1^2] + #============================================================== + # COVARIANCE, CORRELATIONS + print(paste("----> StatTab")) + StatTab = aggregate(list(Asum1 = OA1), list(dist=ldist), FUN=sum ) + StatTab$FGsum1 = aggregate(list(FGSUM1 = FG1), list(dist=ldist), FUN=sum )$FGSUM1 + StatTab$FGsum2 = aggregate(list(FGSUM2 = FG2), list(dist=ldist), FUN=sum )$FGSUM2 + StatTab$AFGsqr = aggregate(list(AFGSQR = OA1*FG2), list(dist=ldist), FUN=sum )$AFGSQR + StatTab$FGsqr = aggregate(list(FGSQR = FG1*FG2), list(dist=ldist), FUN=sum )$FGSQR + StatTab$num = aggregate(list(NOBS = FG2), list(dist=ldist), FUN=length )$NOBS + + # STD + StatTab$FGsqr1 = aggregate(list(FGSQR1 = FG1*FG1), list(dist=ldist), FUN=sum )$FGSQR1 + StatTab$FGsqr2 = aggregate(list(FGSQR2 = FG2*FG2), list(dist=ldist), FUN=sum )$FGSQR2 + StatTab$Asqr1 = aggregate(list(ASQR1 = OA1*OA1), list(dist=ldist), FUN=sum )$ASQR1 + + detach(snewdata) + + # Additional informations + StatTab$date = date + StatTab$dist = as.numeric(as.character(StatTab[,"dist"])) + + # SAVE STATISTICS + save(StatTab, file=paste(save,"/stat_conv_",yymmddnt,".Rdat",sep="")) + +}else{ + print(paste("File",file,"does not exist!!")) +} + +rm(list = ls()) diff --git a/src/preparing_sat.R b/src/preparing_sat.R new file mode 100755 index 0000000..bcb9f42 --- /dev/null +++ b/src/preparing_sat.R @@ -0,0 +1,177 @@ + +######################################### +# SETTINGS +######################################### +# Set satellite/sensor ID +#mylsat = c(223) +#mylsens = c(3) +mylsat = c(3,223,209,73) +mylsens = c(3,4,15,29) + +#mylsat = c(209,209,223,223,3,3,4,4) +#mylsens = c(3,4,3,15,3,15,3,15) + +# Spatial separation bin for OmG +bin_int = 20 # [km] : binning interval for pairs +bin_max_dist = 200 # [km] : maximal distance for the bin_int + +# Time separation distance +time_btw_omg = 60 # [+/- min] : time_btw_omg = time between OmG/OmA in pairs + +######################################### +# START SCRIPT +######################################### +# Load R-packages +library(sp) + +# INPUTS +exp="" +yymmddnt="XXYYMMDDNTXX" + +lDint = c(0, 1, seq(bin_int, bin_max_dist, bin_int)) +cDint = c(0, seq(bin_int, bin_max_dist, bin_int)) + +# SET VARIABLES +read="/scratch/ms/es/mdy/HARM_DiagTool/sat_obs" +save="/scratch/ms/es/mdy/HARM_DiagTool/sat_obs" + +date = strptime(as.character(yymmddnt), format="%Y%m%d%H") +lab_input.x = c("satid", "press", "sensor", "lat", "lon", "an_depar", "tdiff", "fg_depar", "obsvalue") + + +# READ DATA +file = paste(read,"/",exp,"/ccma_mon_", yymmddnt, sep="") + +#------------------------------------ +# RUN PREPARATION +#------------------------------------ +if (file.exists(file)){ + + rdata = read.table(paste(file,sep=""), col.names=lab_input.x) + newdata = data.frame() + + # DATA SELECTION + data = subset(rdata, abs(tdiff) <= time_btw_omg) + + #------------------------------------ + # START DISTANCE CALCULATION + #------------------------------------ + lsat = intersect(unique(data$satid), mylsat) + + for (isat in lsat){ + + print(paste("Satellite:",isat)) + + data_sat = subset(data, satid==isat) + lsens = intersect(unique(data_sat$sensor), mylsens) + + for (isens in lsens){ + + print(paste("--> Sensor:",isens)) + + data_sens = subset(data_sat, sensor==isens) + lchan = unique(data_sens$press) + + for (ichan in lchan){ + + print(paste("----> Chan:",ichan)) + + data_chan = subset(data_sens, press==ichan) + + # Distances between satellite pixels + # matrix(a,b) : diagonal(0 at points), non-diagonal(distance btw pixels) + # Distance is determined by spDists + + latlon <- SpatialPoints(coords = data_chan[,c("lon","lat")], proj4string=CRS("+proj=longlat +datum=WGS84")) + matdist <- spDists(latlon, latlon, longlat=T) + + print(paste("----> LatLon")) + + tabdist = as.table(matdist) + + nobs = length(data_chan$obsvalue) + + colnames(tabdist) = 1:nobs + rownames(tabdist) = 1:nobs + + rdfnewdata = as.data.frame(tabdist) + names(rdfnewdata) = c("n1","n2","dist") + + #dfnewdata = subset(rdfnewdata, dist>0 & dist<=bin_max_dist) + dfnewdata = subset(rdfnewdata, dist<=bin_max_dist) + + print(paste("----> Data Table")) + + dfnewdata$OA1 = data_chan[dfnewdata[,"n1"], "an_depar"] + dfnewdata$OA2 = data_chan[dfnewdata[,"n2"], "an_depar"] + dfnewdata$FG1 = data_chan[dfnewdata[,"n1"], "fg_depar"] + dfnewdata$FG2 = data_chan[dfnewdata[,"n2"], "fg_depar"] + + dfnewdata$sat = isat + dfnewdata$sens = isens + dfnewdata$chan = ichan + + newdata = rbind(newdata, dfnewdata) + + } + } + } + + snewdata = subset(newdata, dist<=bin_max_dist) + ldist = cut(snewdata$dist, lDint, labels=cDint, include.lowest=TRUE) + + print(unique(ldist)) + + attach(snewdata) + + ########################################### + # SAVE SCORES + ########################################### + # Save scores for each day + # --> less demaning for memory + # --> to speed-up the diagnostic + #============================================================== + # COV(XY) = E[X*Yt] - E[X]*E[Yt] + # save: + # sum[X]; sum[Yt]; sum[X*Yt]; num[X] + # + # COV(OA1,FG2) = E[OA1*FG2] - E[OA1]*E[FG2] + # OA1 = an_depar + # FG2 = fg_depar + # save: + # sum[OA1]; sum[FG2] ;sum[OA1*FG2]; num[FG2] + #============================================================== + # STD(X) = sqrt( E[X^2] - (E[X])^2 ) + # save: + # sum[FG1] ; sum[FG2] ; sum[OA1] + # sum[FG1^2] ; sum[FG2^2] ; sum[OA1^2] + #============================================================== + # COVARIANCE, CORRELATIONS + StatTab = aggregate(list(Asum1 = OA1) , list(sat=sat, sens=sens, chan=chan, dist=ldist), FUN=sum ) + StatTab$FGsum1 = aggregate(list(FGSUM1 = FG1) , list(sat=sat, sens=sens, chan=chan, dist=ldist), FUN=sum )$FGSUM1 + StatTab$FGsum2 = aggregate(list(FGSUM2 = FG2) , list(sat=sat, sens=sens, chan=chan, dist=ldist), FUN=sum )$FGSUM2 + StatTab$AFGsqr = aggregate(list(AFGSQR = OA1*FG2) , list(sat=sat, sens=sens, chan=chan, dist=ldist), FUN=sum )$AFGSQR + StatTab$FGsqr = aggregate(list(FGSQR = FG1*FG2) , list(sat=sat, sens=sens, chan=chan, dist=ldist), FUN=sum )$FGSQR + StatTab$num = aggregate(list(NOBS = FG2) , list(sat=sat, sens=sens, chan=chan, dist=ldist), FUN=length )$NOBS + + # STD + StatTab$FGsqr1 = aggregate(list(FGSQR1 = FG1*FG1) , list(sat=sat, sens=sens, chan=chan, dist=ldist), FUN=sum )$FGSQR1 + StatTab$FGsqr2 = aggregate(list(FGSQR2 = FG2*FG2) , list(sat=sat, sens=sens, chan=chan, dist=ldist), FUN=sum )$FGSQR2 + StatTab$Asqr1 = aggregate(list(ASQR1 = OA1*OA1) , list(sat=sat, sens=sens, chan=chan, dist=ldist), FUN=sum )$ASQR1 + + detach(snewdata) + + # Additional informations + StatTab$date = date + StatTab$dist = as.numeric(as.character(StatTab[,"dist"])) + + # SAVE STATISTICS + save(StatTab, file=paste(save,"/",exp,"/stat_sats_",yymmddnt,".Rdat",sep="")) + +}else{ + + print(paste("File",file,"does not exist!!")) + +} + +rm(list = ls()) diff --git a/src/read_odb.R b/src/read_odb.R new file mode 100755 index 0000000..872f9ab --- /dev/null +++ b/src/read_odb.R @@ -0,0 +1,52 @@ +read.ODB <- function(sdate, edate, cycle, dbname, rerun=FALSE) +{ + + # Time initialization + days <- seq(from=strptime(sdate, format="%Y%m%d%H"), to=strptime(edate, format="%Y%m%d%H"), by=cycle) + + # Save collected date to database + database=paste(read, "/", dbname, "_", sdate, "_", edate, ".Rdat", sep="") + + if (file.exists(database) & rerun==FALSE) + { + print(paste("Reading file from ", database, " ...")) + load(database) + + }else{ + + dfData = data.frame() + + # READ STATISTICS + for ( i in seq_along(days) ) + { + idays = format(days[i],"%Y%m%d%H") + + for (j in seq_along(lexp)) + { + iexp = lexp[j] + + subDir=paste(read,"/", iexp, "/stat_sats_", idays,".Rdat", sep="") + + print(paste("Read file", subDir,"...")) + + if (file.exists(subDir)) + { + load(subDir) + + StatTab$Exp = lexp[j] + + # Merging data frames + dfData = rbind(dfData, StatTab) + + }else{ + print(paste("Files", subDir,"does not exist.")) + } + + } # iexp + } # idays + + save(dfData, file=database) + } #fi exist + + return(dfData) +} diff --git a/src/visualisation.R b/src/visualisation.R new file mode 100755 index 0000000..d965010 --- /dev/null +++ b/src/visualisation.R @@ -0,0 +1,120 @@ +set.visual <- function() +{ + col.exp= c("black", "red3", "blue3", "green3", "yellow3") + + list.setting = list(col.exp) + return(list.setting) +} + + +plot.vert <- function(data, cx, cy, CNT = NULL, YREV = FALSE, LCOL.EXP = col.exp, LLWD = 2, LLTY = 1, LPCH = NA, legx = cx, ...) +{ + + # PROZATIM PRO JEDNU STATISTIKU + #col.exp= set.visual()[[1]] + + cexp = unique(data$EXP) + nexp = length(cexp) + nobs = data$NOBS + + nstat = length(cx) + + x = data[,cx] + y = unique(data[,cy]) + nx = dim(data)[1] + ny = length(y) + + if (YREV==TRUE){ YLIM = rev(range(1:ny)) }else{ YLIM = range(1:ny) } + + par(mfrow=c(1,2), oma=c(1,1,1,1), mar=c(4,4,3,1)) + + plot(range(x,na.rm=TRUE), range(1:ny), ylim=YLIM, type="n", axes=F, ...) + + axis(2, at=c(1:ny), labels=y, las=2, cex=.1, tck=1, lwd=.5, col="grey", lty=3) + #axis(1) + axis(1, at=pretty(as.numeric(unlist(x))), labels=chartr(".", ",", as.character(pretty(as.numeric(unlist(x)))))) + box() + + for (iexp in 1:nexp) + { + for (istat in 1:nstat) + { + lines( subset(data, data$EXP==cexp[iexp])[,cx[istat]], c(1:ny), col=LCOL.EXP[istat], lwd=LLWD[istat], lty=LLTY[istat] ) + if (sum(!(is.na(LPCH)))>0){ points(subset(data, data$EXP==cexp[iexp])[,cx[istat]], c(1:ny), col=LCOL.EXP[iexp], pch=LPCH[istat]) } + } + } + + #---------------------------- + # PLOT NUMBER OF OBSERVATION + #---------------------------- + par(mar=c(4,1,3,4)) + + plot(range(c(0,nobs/1000)), range(1:ny), ylim=YLIM, type="n", axes=F, xlab=expression("Počet měření [" ~ x10^3 ~ "]"), ylab="", main=paste("NT:", CNT, "UTC")) + + axis(1) + axis(2, at=c(1:ny), labels=FALSE, las=2, cex=.1, tck=1, lwd=.5, col="grey", lty=3) + box() + + for (iexp in 1:nexp){ lines(subset(data, data$EXP==cexp[iexp])[,"NOBS"]/1000, c(1:ny), col="blue", lty=2, lwd=2) } + + legend("bottomleft", legx, col=LCOL.EXP, lty=LLTY[1:nstat], lwd=LLWD[1:nstat], pch=LPCH[1:nstat], bg="white", cex=.8) + +} + +plot.horiz <- function(data, cx, cy, CNT = NULL, YREV = FALSE, LCOL.EXP = col.exp, LLWD = 2, LLTY = 1, LPCH = NA, INCL0 = FALSE, legx = cy, FIRST=1, ...) +{ + # PROZATIM PRO JEDNU STATISTIKU + col.exp = set.visual()[[1]] + + cexp = unique(data$EXP) + nexp = length(cexp) + nobs = data$NOBS + + nstat = length(cy) + + x = unique(data[,cx]) + y = data[,cy] + nx = length(x) + ny = dim(data)[1] + + if (INCL0==TRUE){ yran = range(c(y,0),na.rm=T) }else{ yran = range(y,na.rm=T) } + if (cx=="DATE"){ xran = c(as.POSIXct(min(x),tz="MEST"),as.POSIXct(max(x),tz="MEST")) ; cycl="6 hours" }else{ xran = range(x) } + + par(mfrow=c(2,1), oma=c(1,1,1,1), mar=c(4,4.2,3,1)) + + plot(xran, yran, type="n", axes=F, ...) + + if (cx=="DATE"){ axis.POSIXct(1, at=seq(xran[1], xran[2], by=cycl), format="%m%d%H", las=2, cex.axis=.8, tck=1, lwd=.5, col="grey", lty=3) }else{ + axis(1, at = x, cex=1, tck=1, lwd=.8, col="grey", lty=3) } + + axis(2, las=2, cex=1, tck=1, lwd=1, col="grey", lty=3) + box() + + for (iexp in 1:nexp) + { + for (istat in 1:nstat) + { + lines(x[FIRST:nx], subset(data, data$EXP==cexp[iexp])[FIRST:ny,cy[istat]], col=LCOL.EXP[iexp], lwd=LLWD[istat], lty=LLTY[istat] ) + if (sum(!(is.na(LPCH)))>0){ points(x, subset(data, data$EXP==cexp[iexp])[,cy[istat]], col=LCOL.EXP[iexp], pch=LPCH[istat]) } + } + } + + abline(h=0 , lty=2, col="grey") + + legend("topright", legx, col="black", lty=LLTY[1:nstat], lwd=LLWD[1:nstat], pch=LPCH[1:nstat], bg="white", cex=.8) + + #---------------------------- + # PLOT NUMBER OF OBSERVATION + #---------------------------- + par(mar=c(4,4.2,1,1)) + + visdata = rbind(subset(data, data$EXP==cexp[1])[,"NOBS"]/100) + + # BARPLOT + barplot( visdata, + axes=F, col=LCOL.EXP[1:nexp], space=c(1,1), ylab=expression("Observation number [" ~ x10^2 ~ "]"), xlab="", + legend.text = cexp, args.legend = list(x = "topright", bg = "white"), cex.names = x ) + + axis(2, las=2, cex=1, tck=1, lwd=1, col="grey", lty=3) + box() +}