diff --git a/CRAN-SUBMISSION b/CRAN-SUBMISSION new file mode 100644 index 00000000..aa00d4f1 --- /dev/null +++ b/CRAN-SUBMISSION @@ -0,0 +1,3 @@ +Version: 1.3.4 +Date: 2025-07-30 07:50:37 UTC +SHA: 060a53dcc0b87a3b860f15860fd28dcb37dff29a diff --git a/DESCRIPTION b/DESCRIPTION index 14a8d3a7..ee17d83b 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,6 +1,6 @@ Package: REddyProc Type: Package -Version: 1.3.3 +Version: 1.3.4 Title: Post Processing of (Half-)Hourly Eddy-Covariance Measurements Authors@R: c( person( "Department for Biogeochemical Integration at MPI-BGC, Jena, Germany", role=c("cph"), email="REddyProc-help@bgc-jena.mpg.de" ) ,person( "Thomas", "Wutzler", role=c("aut","cre"), email="twutz@bgc-jena.mpg.de" ) @@ -31,7 +31,7 @@ Description: Standard and extensible Eddy-Covariance data post-processing based and day-time based approaches (Lasslop et al. (2010) ). URL: https://www.bgc-jena.mpg.de/bgi/index.php/Services/REddyProcWeb, - https://github.com/bgctw/REddyProc + https://github.com/EarthyScience/REddyProc License: GPL (>= 2) Encoding: UTF-8 LazyData: true @@ -67,4 +67,4 @@ Collate: 'logitnorm.R' 'variableNames.R' 'zzzDebugCode.R' -RoxygenNote: 7.3.0 +RoxygenNote: 7.3.1 diff --git a/NEWS.md b/NEWS.md index b286f908..d4e5244d 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,12 @@ +# REddyProc 1.3.4 +- Handles edge-case in daytime-partitioning where all temperature + sensitivities are estimated the same numerical value +- Updated documentation + - references to new github home: EarthySciences + - identify package in foreign package links + - document plotting diurnal cycle (sPlotDiurnalCycleM) +- Fixed unit of y-label in uStarThreshold plots + # REddyProc 1.3.3 - Fixed bug in plotting daily sums, where default scale (gC / day) was of by a factor nRecInDays. diff --git a/R/FileHandlingFormats.R b/R/FileHandlingFormats.R index ab7ea593..060b2c24 100644 --- a/R/FileHandlingFormats.R +++ b/R/FileHandlingFormats.R @@ -55,9 +55,9 @@ fLoadEuroFlux16 <- function( #' @param file_path scalar string: the path to the csv file #' @param additional_columns character vector of columns to #' read in addition of standard columns of \code{\link{read_from_fluxnet15}}. -#' Can be a character vector or a object return by \code{\link{cols}} +#' Can be a character vector or a object return by \code{\link[readr]{cols}} #' @param colname_NEE name (scalar string) of column that reports NEE observations -#' @param ... further arguments to \code{\link{read_csv}} +#' @param ... further arguments to \code{\link[readr]{read_csv}} #' #' @examples #' ds_fn15 <- Example_DETha98 %>% @@ -266,7 +266,7 @@ extract_FN15 <- function(EProc = .self, is_export_nonfilled = TRUE, keep_other_c #' it to \code{\link{read_from_ameriflux22}}. #' #' @param file_path scalar string: the path to the csv file -#' @param ... further arguments to \code{\link{read_csv}} +#' @param ... further arguments to \code{\link[readr]{read_csv}} #' #' @seealso \code{\link{read_from_ameriflux22}} \code{\link{help_export}} #' diff --git a/cran-comments.md b/cran-comments.md index e0a51098..fc9dd8c9 100644 --- a/cran-comments.md +++ b/cran-comments.md @@ -1,26 +1,14 @@ ## Notes Dear CRAN maintainers, -following your email that noted empty documentation for some of the function -arguments, we submit a new version of the REddyProc package. +following your email that required updating of foreign package links +in documentation, we submit a new version of the REddyProc package. -In addition to fixing the empty comments, we include a new features: -- exporting the results in an additional format requested by a users -- an experimental implementation of the gap-filling procedure by Vekuri et. al 2023 +In addition to fixing the links, some plotting and documetation was improved, +and some edge-cases are dealt with in processing. +Updated the URLs to the new github-organization home. ## Test environments -* local Linux-Mint, R 4.3.2 +* local Linux-Mint, R 4.3.3 * Github actions, current Ubuntu -* (win_builder was not reachable by curl from devtools::check_win_devel()) -* r_hub: all default platforms - -## R CMD check results -No warnings, nor errors. - -1 Note: -"checking for detritus in the temp directory ... NOTE - Found the following files/directories: - ‘REddyProcExamples’" -REddyProcExamples is there on purpose: It is created on a first call to -getExamplePath(exampleId) and then reused by several tests. - +* r_hub2: linux, mac, windows diff --git a/inst/WORDLIST b/inst/WORDLIST index 1d5788cd..589e84ed 100644 --- a/inst/WORDLIST +++ b/inst/WORDLIST @@ -1,9 +1,14 @@ +1001L +16cm +6cm +A1 AMM AVP Ameriflux Aubinet BGC Baldocchi +Barr13 Bayley BerkeleyJulianDateToPOSIXct Berkley @@ -11,35 +16,37 @@ Biogeochemical Biogeosciences CARBODATA CMD +CO2 CPT ChangePointDetection DEGebExample -DETha +DETha98 DJF DateTime DateTimes Dez DoY +E0 +EarthyScience EddyData Esat -Evapotranspiration ExtRad FC FLUXNET -FP -FW +FW1 +FW2 Falge FillAll FluxVar Fluxnet -Fw +Fluxnet15 +Fw2Binned GPP Gapfilling Gebesee GeoFunctions -JA +H20 JJA -Jena JulianDate Knauer LRC @@ -68,6 +75,8 @@ Papale PotRad Provding QFFluxVar +R4 +R5 README REco REddyProcNCDF @@ -81,7 +90,7 @@ Rg Rmd SVP Seasonfactor -Sigmoid +T0 TRangeDay TRef TW @@ -89,19 +98,21 @@ Tair Tharandt TimeDate TminOftheDay -ToAmerifluxVariableNameMapping Tsoil +U05 +U95 USTAR UStar Ust Ustar UstarScens +UstarScens’ VMF VPD +VPD0 VPDfromDew Vapour -VariableNameMapping -Vekuri +Vekuri23 VisRad YYYY YYYYMMDDHHMM @@ -109,17 +120,18 @@ YYYYMMddhhmm Zieba aggUncertainty aggregationMode -al antje +base10 +beta0 bg bgc -bgctw bigleaf bugfix calendaryear changepoint circ colour +coloured computeCost computeGPPGradient computeLRCGradient @@ -132,7 +144,6 @@ ctrlUstarEst cz czechglobe daymin -de degC degK dewpoint @@ -142,10 +153,8 @@ downweighs dplyr eSat eop -et eu europe -evapotranspiration fCalcAVPfromVMFandPress fCalcETfromLE fCalcExtRadiation @@ -158,20 +167,21 @@ fConvertCtoK fConvertGlobalToVisible fConvertKtoC fConvertTimeToPosix -fConvertVisibleWm +fConvertVisibleWm2toPhotons fKeepColumnAttributes -fLoadEuroFlux +fLoadEuroFlux16 fLoadFluxNCIntoDataframe -fLoadFluxnet +fLoadFluxnet15 fSplitDateTime fWriteDataframeToFile -fWriteFrench +fWriteFrench23 filterLongRuns filterLongRunsInVector fitLRC fixVPD fluxdata fluxnet +fluxnet2015 fmeth fnum formular @@ -184,10 +194,10 @@ gC gapfilled gapfilling geo -getAmerifluxToBGC -getBGC +getAmerifluxToBGC05VariableNameMapping +getBGC05ToAmerifluxVariableNameMapping getExamplePath -getFilledExampleDETha +getFilledExampleDETha98Data getOptimizedParameterPositions getParameterInitials getParameterNames @@ -204,7 +214,6 @@ https iOpt igut invlogit -irradiance isFilterMeteoQualityFlag isParameterInBounds isSplitDatetime @@ -215,6 +224,7 @@ jena kPa ksickel logitnorm +m2 maximise mbar md @@ -227,7 +237,6 @@ mmiglia mmol modelled moffat -mol mreichstein mumol nAggSeason @@ -259,7 +268,6 @@ ppfd pre predictGPP predictLRC -preprocessed pscore pvWave qc @@ -272,6 +280,7 @@ renameVariablesInDataframe replaceMissingSdNEEParms rlang rstudio +s41559 sApplyUStarScen sCalcPotRadiation sDATA @@ -305,6 +314,7 @@ sMRFluxPartitionUStarScens sPlotDailySums sPlotDailySumsY sPlotDiurnalCycle +sPlotDiurnalCycleM sPlotFingerprint sPlotFingerprintY sPlotHHFluxes @@ -318,25 +328,22 @@ sTKFluxPartition sTKFluxPartitionUStarScens sTemp sUSTAR -sd sdBeta sdE sdNEE seasonFactor seasonYear -sigmoid sigut sitename solartime stdDev strptime -subclasses tempdir testthat +theta0 thetaPrior tidyverse tmp -toPhotons twutz uStar uStarAggr @@ -349,7 +356,7 @@ uStarSeasonEst uStarTh uStarThreshold umol -umolCO +umolCO2 upscaling usControlUstarEst usControlUstarSubsetting @@ -358,7 +365,8 @@ usCreateSeasonFactorMonthWithinYear usCreateSeasonFactorYday usCreateSeasonFactorYdayYear usEstUstarThreshold -usEstUstarThresholdSingleFw +usEstUstarThresholdSingleFw1Binned +usEstUstarThresholdSingleFw2Binned usGetAnnualSeasonUStarMap usGetAnnualSeasonUStarMappingFromDistributionResult usGetSeasonalSeasonUStarMap @@ -374,7 +382,6 @@ varnames webservice whichValueGreaterEqualC withUstar -writeable www xlab yday @@ -382,3 +389,4 @@ yearDay yearday ylab Šigut +μ diff --git a/man/fKeepColumnAttributes.Rd b/man/fKeepColumnAttributes.Rd index d11d0b1d..a7f69b12 100644 --- a/man/fKeepColumnAttributes.Rd +++ b/man/fKeepColumnAttributes.Rd @@ -1,20 +1,20 @@ -\name{fKeepColumnAttributes} -\alias{fKeepColumnAttributes} -\title{fKeepColumnAttributes} -\description{Copy column attributes after processing a data.frame} -\usage{fKeepColumnAttributes(x, FUN, ...)} -\arguments{ - \item{x}{data.frame to be processed} - \item{FUN}{\code{function(x::data.frame, ...) -> data.frame} to be applied} - \item{\dots}{additional arguments to FUN} -} -\details{The columns of the resulting data.frame that match a column name in x -will get the same attributes as in x.} -\value{result of \code{function(x, ...)} with column attributes preserved} - -\author{Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} - - - - - +\name{fKeepColumnAttributes} +\alias{fKeepColumnAttributes} +\title{fKeepColumnAttributes} +\description{Copy column attributes after processing a data.frame} +\usage{fKeepColumnAttributes(x, FUN, ...)} +\arguments{ + \item{x}{data.frame to be processed} + \item{FUN}{\code{function(x::data.frame, ...) -> data.frame} to be applied} + \item{\dots}{additional arguments to FUN} +} +\details{The columns of the resulting data.frame that match a column name in x +will get the same attributes as in x.} +\value{result of \code{function(x, ...)} with column attributes preserved} + +\author{Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} + + + + + diff --git a/man/fLoadAmeriflux22.Rd b/man/fLoadAmeriflux22.Rd index b6e012d9..3e7bb20a 100644 --- a/man/fLoadAmeriflux22.Rd +++ b/man/fLoadAmeriflux22.Rd @@ -9,7 +9,7 @@ fLoadAmeriflux22(file_path, ...) \arguments{ \item{file_path}{scalar string: the path to the csv file} -\item{...}{further arguments to \code{\link{read_csv}}} +\item{...}{further arguments to \code{\link[readr]{read_csv}}} } \description{ Reads Variables from file into data.frame from file and passes diff --git a/man/fLoadFluxnet15.Rd b/man/fLoadFluxnet15.Rd index df73ff46..551310bc 100644 --- a/man/fLoadFluxnet15.Rd +++ b/man/fLoadFluxnet15.Rd @@ -16,11 +16,11 @@ fLoadFluxnet15( \item{additional_columns}{character vector of columns to read in addition of standard columns of \code{\link{read_from_fluxnet15}}. -Can be a character vector or a object return by \code{\link{cols}}} +Can be a character vector or a object return by \code{\link[readr]{cols}}} \item{colname_NEE}{name (scalar string) of column that reports NEE observations} -\item{...}{further arguments to \code{\link{read_csv}}} +\item{...}{further arguments to \code{\link[readr]{read_csv}}} } \description{ Assigns default units to the columns and keeps variable name attributes diff --git a/man/fWriteFrench23.Rd b/man/fWriteFrench23.Rd index a02e297d..646b3678 100644 --- a/man/fWriteFrench23.Rd +++ b/man/fWriteFrench23.Rd @@ -1,27 +1,27 @@ -\name{fWriteFrench23} -\alias{fWriteFrench23} -\title{fWriteFrench23} -\description{Write data frame to ASCII comma-separated text file} -\usage{fWriteFrench23(data, filename, isSplitDatetime = FALSE, - digits = 5)} -\arguments{ - \item{data}{Data frame to be exported, with unit attributes attached to columns} - \item{filename}{(string) name (including path) of the output file} - \item{isSplitDatetime}{set to TRUE to create columns Year, DoY and Hour} - \item{digits}{(integer) number of digits, i.e. precision, for numeric values} -} -\details{Writes data.frame as comma-separated file after two header rows. - -The first header row contains the column names, and the second units. - -Spaces in column names are replaced by underscore and \% is replaced by -the word percent.} - - -\author{TW -Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} - - - -\seealso{\code{\link{fWriteDataframeToFile}}} - +\name{fWriteFrench23} +\alias{fWriteFrench23} +\title{fWriteFrench23} +\description{Write data frame to ASCII comma-separated text file} +\usage{fWriteFrench23(data, filename, isSplitDatetime = FALSE, + digits = 5)} +\arguments{ + \item{data}{Data frame to be exported, with unit attributes attached to columns} + \item{filename}{(string) name (including path) of the output file} + \item{isSplitDatetime}{set to TRUE to create columns Year, DoY and Hour} + \item{digits}{(integer) number of digits, i.e. precision, for numeric values} +} +\details{Writes data.frame as comma-separated file after two header rows. + +The first header row contains the column names, and the second units. + +Spaces in column names are replaced by underscore and \% is replaced by +the word percent.} + + +\author{TW +Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} + + + +\seealso{\code{\link{fWriteDataframeToFile}}} + diff --git a/man/filter_entire_days.Rd b/man/filter_entire_days.Rd index bd22ad0b..dc71dfc9 100644 --- a/man/filter_entire_days.Rd +++ b/man/filter_entire_days.Rd @@ -1,19 +1,19 @@ -\name{filter_entire_days} -\alias{filter_entire_days} -\title{filter entire days} -\description{Omit records before the start of the first full day and the end of the last full day} -\usage{filter_entire_days(df, col_time = "DateTime")} -\arguments{ - \item{df}{data.frame with column col_time of equidistant} - \item{col_time}{Name of the column with the equidistant timesteps} -} -\details{Column attributes such as 'units' are kept.} - - -\author{Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} - - - -\seealso{\code{\link{help_DateTimes}}, \code{\link{get_day_boundaries}} -\code{\link{fKeepColumnAttributes}}} - +\name{filter_entire_days} +\alias{filter_entire_days} +\title{filter entire days} +\description{Omit records before the start of the first full day and the end of the last full day} +\usage{filter_entire_days(df, col_time = "DateTime")} +\arguments{ + \item{df}{data.frame with column col_time of equidistant} + \item{col_time}{Name of the column with the equidistant timesteps} +} +\details{Column attributes such as 'units' are kept.} + + +\author{Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} + + + +\seealso{\code{\link{help_DateTimes}}, \code{\link{get_day_boundaries}} +\code{\link{fKeepColumnAttributes}}} + diff --git a/man/help_DateTimes.Rd b/man/help_DateTimes.Rd index e5091bd1..cfc14c32 100644 --- a/man/help_DateTimes.Rd +++ b/man/help_DateTimes.Rd @@ -1,31 +1,31 @@ -\name{help_DateTimes} -\alias{help_DateTimes} -\title{help DateTimes} -\description{Overview of functions helping with Timestamps and Dates} -\usage{help_DateTimes()} -\details{Functions helping with preparing and subsetting timestamps: -\itemize{ -\item Convert different time formats to POSIX: -\code{\link{fConvertTimeToPosix}} -\item Convert JulianDate format used in Berkeley release to POSIXct: -\code{\link{BerkeleyJulianDateToPOSIXct}} -\item Return the first timestamp at (end_of_first_record_in_day) and the -last at midnight: -\code{\link{get_day_boundaries}} -\item Omit records before the start of the first full day and the end of -the last full day: -\code{\link{filter_entire_days}} -\item Subset data.frame to given years respecting the end-of-period -convention: \code{\link{filter_years_eop}} -} - -Back to \link{REddyProc-package}.} - - -\author{TW -Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} - - - - - +\name{help_DateTimes} +\alias{help_DateTimes} +\title{help DateTimes} +\description{Overview of functions helping with Timestamps and Dates} +\usage{help_DateTimes()} +\details{Functions helping with preparing and subsetting timestamps: +\itemize{ +\item Convert different time formats to POSIX: +\code{\link{fConvertTimeToPosix}} +\item Convert JulianDate format used in Berkeley release to POSIXct: +\code{\link{BerkeleyJulianDateToPOSIXct}} +\item Return the first timestamp at (end_of_first_record_in_day) and the +last at midnight: +\code{\link{get_day_boundaries}} +\item Omit records before the start of the first full day and the end of +the last full day: +\code{\link{filter_entire_days}} +\item Subset data.frame to given years respecting the end-of-period +convention: \code{\link{filter_years_eop}} +} + +Back to \link{REddyProc-package}.} + + +\author{TW +Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} + + + + + diff --git a/man/help_export.Rd b/man/help_export.Rd index a8fc3175..de0a6920 100644 --- a/man/help_export.Rd +++ b/man/help_export.Rd @@ -1,56 +1,56 @@ -\name{help_export} -\alias{help_export} -\title{help export} -\description{Overview of functions helping with exporting Data and Results} -\usage{help_export()} -\details{Functions helping with exporting data -\itemize{ -\item Export Input data from REddyProc class: -\code{\link{sEddyProc_sExportData}} -\item Export Computed results from REddyProc class: -\code{\link{sEddyProc_sExportResults}} -\item Write data frame to ASCII tab-separated text file: -\code{\link{fWriteDataframeToFile}} -\item Write data frame to ASCII comma-separated text file with units in header row: -\code{\link{fWriteFrench23}} -} - -Writing a file that can be supplied to the REddyProc webservice at -MPI-BGC Jena can be done by exporting data from REddyProc class \code{EProc}. - -\preformatted{ -df <- EProc$sExportData() -fWriteDataframeToFile(df, "myfilename.txt", isSplitDatetime = TRUE) -} - -For preparing minimal working examples also consider -\itemize{ -\item Omit records before the start of the first full day and the end of -the last full day: -\code{df <- \link{filter_entire_days}(df)} -\item Subset data.frame to one or two years: -\code{df <- \link{filter_years_eop}(df, c(1998))} -} - -There are several functions that import from file of different formats. -\itemize{ -\item Load text file with one header and one unit row into data frame: -\code{\link{fLoadTXTIntoDataframe}} -\item Reads sequence of annual files in the format of Europe-fluxdata 2016: -\code{\link{fLoadEuroFlux16}} -\item Read basic variables from Ameriflux standard (as of 2022) files: -\code{\link{fLoadAmeriflux22}} -\item Read NetCDF files -> moved to separate package REddyProcNCDF -(https://github.com/bgctw/REddyProcNCDF) -} - -Back to \link{REddyProc-package}.} - - -\author{TW -Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} - - - - - +\name{help_export} +\alias{help_export} +\title{help export} +\description{Overview of functions helping with exporting Data and Results} +\usage{help_export()} +\details{Functions helping with exporting data +\itemize{ +\item Export Input data from REddyProc class: +\code{\link{sEddyProc_sExportData}} +\item Export Computed results from REddyProc class: +\code{\link{sEddyProc_sExportResults}} +\item Write data frame to ASCII tab-separated text file: +\code{\link{fWriteDataframeToFile}} +\item Write data frame to ASCII comma-separated text file with units in header row: +\code{\link{fWriteFrench23}} +} + +Writing a file that can be supplied to the REddyProc webservice at +MPI-BGC Jena can be done by exporting data from REddyProc class \code{EProc}. + +\preformatted{ +df <- EProc$sExportData() +fWriteDataframeToFile(df, "myfilename.txt", isSplitDatetime = TRUE) +} + +For preparing minimal working examples also consider +\itemize{ +\item Omit records before the start of the first full day and the end of +the last full day: +\code{df <- \link{filter_entire_days}(df)} +\item Subset data.frame to one or two years: +\code{df <- \link{filter_years_eop}(df, c(1998))} +} + +There are several functions that import from file of different formats. +\itemize{ +\item Load text file with one header and one unit row into data frame: +\code{\link{fLoadTXTIntoDataframe}} +\item Reads sequence of annual files in the format of Europe-fluxdata 2016: +\code{\link{fLoadEuroFlux16}} +\item Read basic variables from Ameriflux standard (as of 2022) files: +\code{\link{fLoadAmeriflux22}} +\item Read NetCDF files -> moved to separate package REddyProcNCDF +(https://github.com/EarthyScience/REddyProcNCDF) +} + +Back to \link{REddyProc-package}.} + + +\author{TW +Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} + + + + + diff --git a/man/sEddyProc_sPlotDiurnalCycleM.Rd b/man/sEddyProc_sPlotDiurnalCycleM.Rd index b8ef26ac..cc42d17a 100644 --- a/man/sEddyProc_sPlotDiurnalCycleM.Rd +++ b/man/sEddyProc_sPlotDiurnalCycleM.Rd @@ -1,39 +1,39 @@ -\name{sEddyProc_sPlotDiurnalCycleM} -\alias{sEddyProc_sPlotDiurnalCycleM} -\title{sEddyProc sPlotDiurnalCycleM} -\description{The diurnal cycles of a single month are plotted to the current device, -scaled to all data. Each year is plotted as a different (coloured) line.} -\usage{sEddyProc_sPlotDiurnalCycleM(Var = Var.s, - QFVar = if (!missing(QFVar.s)) QFVar.s else "none", - QFValue = if (!missing(QFValue.n)) QFValue.n else NA_real_, - Month = Month.i, Legend = if (!missing(Legend.b)) Legend.b else T, - data = cbind(sDATA, sTEMP), dts = sINFO$DTS, - Var.s, QFVar.s = "none", QFValue.n = NA_real_, - Month.i, Legend.b = T)} -\arguments{ - \item{Var}{Variable to plot} - \item{QFVar}{Quality -flag of variable to be filled} - \item{QFValue}{Value -of quality flag for data to plot} - \item{Month}{Month to plot} - \item{Legend}{Plot -with legend} - \item{data}{data.frame with variables to plot} - \item{dts}{numeric integer} - \item{Var.s}{Variable to plot} - \item{QFVar.s}{Quality flag of variable to be filled} - \item{QFValue.n}{Value of quality flag for data to plot} - \item{Month.i}{Month to plot} - \item{Legend.b}{Plot with legend} -} - - - -\author{AMM, KS -Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} - - - - - +\name{sEddyProc_sPlotDiurnalCycleM} +\alias{sEddyProc_sPlotDiurnalCycleM} +\title{sEddyProc sPlotDiurnalCycleM} +\description{The diurnal cycles of a single month are plotted to the current device, +scaled to all data. Each year is plotted as a different (coloured) line.} +\usage{sEddyProc_sPlotDiurnalCycleM(Var = Var.s, + QFVar = if (!missing(QFVar.s)) QFVar.s else "none", + QFValue = if (!missing(QFValue.n)) QFValue.n else NA_real_, + Month = Month.i, Legend = if (!missing(Legend.b)) Legend.b else T, + data = cbind(sDATA, sTEMP), dts = sINFO$DTS, + Var.s, QFVar.s = "none", QFValue.n = NA_real_, + Month.i, Legend.b = T)} +\arguments{ + \item{Var}{Variable to plot} + \item{QFVar}{Quality +flag of variable to be filled} + \item{QFValue}{Value +of quality flag for data to plot} + \item{Month}{Month to plot} + \item{Legend}{Plot +with legend} + \item{data}{data.frame with variables to plot} + \item{dts}{numeric integer} + \item{Var.s}{Variable to plot} + \item{QFVar.s}{Quality flag of variable to be filled} + \item{QFValue.n}{Value of quality flag for data to plot} + \item{Month.i}{Month to plot} + \item{Legend.b}{Plot with legend} +} + + + +\author{AMM, KS +Department for Biogeochemical Integration at MPI-BGC, Jena, Germany [cph], Thomas Wutzler [aut, cre], Markus Reichstein [aut], Antje Maria Moffat [aut, trl], Olaf Menzer [ctb], Mirco Migliavacca [aut], Kerstin Sickel [ctb, trl], Ladislav igut [ctb]} + + + + + diff --git a/vignettes/DEGebExample.md b/vignettes/DEGebExample.md index d60f48fc..aa37a780 100644 --- a/vignettes/DEGebExample.md +++ b/vignettes/DEGebExample.md @@ -1,5 +1,4 @@ -Crop example demonstrating multiple years and user defined uStar-Seasons -======================================================================== +# Crop example demonstrating multiple years and user defined uStar-Seasons #isDevelopMode <- TRUE if (!exists("isDevelopMode")) library(REddyProc) @@ -8,7 +7,7 @@ Crop example demonstrating multiple years and user defined uStar-Seasons First, the data is loaded. This example uses data that has been downloaded from and preprocessed by `fLoadEuroFlux16`, where the DateTime Column has been created, and the -variables renamed to the BGC-convention (e.g. Tair instead of Ta). +variables renamed to the BGC-convention (e.g. Tair instead of Ta). data(DEGebExample) summary(DEGebExample) @@ -37,8 +36,7 @@ and rH. EProc <- sEddyProc$new('DE-Geb', DEGebExample, c('NEE','Rg','Tair','VPD', 'Ustar')) EProc$sSetLocationInfo(LatDeg = 51.1, LongDeg = 10.9, TimeZoneHour = 1) #Location of Gebesee -Defining Seasons with different surface friction conditions ------------------------------------------------------------ +## Defining Seasons with different surface friction conditions The site is a crop site. The harvesting times are visible as sharp edges in the plots of NEE. @@ -126,8 +124,7 @@ threshold for each of the defined seasons, by calling EProc$sMDSGapFillAfterUstar('NEE', FillAll = FALSE, isVerbose = FALSE) -Uncertainty introduced by the uStar Threshold estimate: bootstrap ------------------------------------------------------------------ +## Uncertainty introduced by the uStar Threshold estimate: bootstrap With a lower estimate of uStar threshold, more records with lower NEE are kept in the dataset instead of marked as gaps. Therefore annual @@ -152,46 +149,18 @@ uStar Threshold. #)) #EProc$sSetUstarScenarios(uStarScens) EProc$useSeaonsalUStarThresholds() - - ## - ## Enter a frame number, or 0 to exit - ## - ## 1: twDev::genVigs("DEGebExample") - ## 2: packageTools.R#300: rmarkdown::render(fNameI, output_format = output_f - ## 3: render(input = input, output_format = format, output_file = NULL, outp - ## 4: knitr::knit(knit_input, knit_output, envir = envir, quiet = quiet, enc - ## 5: process_file(text, output) - ## 6: withCallingHandlers(if (tangle) process_tangle(group) else process_gro - ## 7: process_group(group) - ## 8: process_group.block(group) - ## 9: call_block(x) - ## 10: block_exec(params) - ## 11: in_dir(input_dir(), evaluate(code, envir = env, new_device = FALSE, ke - ## 12: evaluate(code, envir = env, new_device = FALSE, keep_warning = !isFALS - ## 13: evaluate::evaluate(...) - ## 14: evaluate_call(expr, parsed$src[[i]], envir = envir, enclos = enclos, d - ## 15: timing_fn(handle(ev <- withCallingHandlers(withVisible(eval(expr, envi - ## 16: handle(ev <- withCallingHandlers(withVisible(eval(expr, envir, enclos) - ## 17: withCallingHandlers(withVisible(eval(expr, envir, enclos)), warning = - ## 18: withVisible(eval(expr, envir, enclos)) - ## 19: eval(expr, envir, enclos) - ## 20: eval(expr, envir, enclos) - ## 21: EProc$useSeaonsalUStarThresholds() - ## - ## Enter an item from the menu, or 0 to exit - EProc$sGetUstarScenarios() ## season uStar U10 U90 - ## 5 2004001 0.13500000 0.10174000 0.16030000 - ## 6 2004070 0.12037500 0.09632500 0.12268889 - ## 7 2004210 0.08925000 0.10499293 0.19000727 - ## 8 2004320 0.16041176 0.09918212 0.17438050 - ## 9 2005070 0.12533333 0.11512857 0.17554643 - ## 10 2005180 0.13473214 0.08448929 0.14519000 - ## 11 2005320 0.04842361 0.05055000 0.08105208 - ## 12 2006120 0.06966667 0.06480909 0.08695350 - ## 13 2006305 0.25094444 0.10060806 0.26186553 + ## 5 2004001 0.13500000 0.11874009 0.15412045 + ## 6 2004070 0.12037500 0.08538636 0.12177250 + ## 7 2004210 0.08925000 0.09386250 0.16746306 + ## 8 2004320 0.16041176 0.09177434 0.17215564 + ## 9 2005070 0.12533333 0.10400556 0.16937143 + ## 10 2005180 0.13473214 0.08419405 0.13203875 + ## 11 2005320 0.04842361 0.04905486 0.10056250 + ## 12 2006120 0.06966667 0.06113059 0.09647238 + ## 13 2006305 0.25094444 0.12240556 0.25735196 After setting the uStar scenarios, they will be used by method `sMDSGapFillUStarScens`. @@ -202,7 +171,7 @@ Additional output columns are produced for each uStar quantile. grep("^NEE.*_f$", colnames( EProc$sExportResults()), value = TRUE ) - ## character(0) + ## [1] "NEE_uStar_f" "NEE_U10_f" "NEE_U90_f" Several methods provide processing steps for all scenarios. In addition, method `sApplyUStarScen` calls a user-specified function repeatedly with @@ -215,10 +184,9 @@ several GPP estimates. #grep("U10", colnames(EProc$sExportResults()), value = TRUE) grep("^GPP.*_f$", colnames( EProc$sExportResults()), value = TRUE ) - ## [1] "GPP_uStar_f" "GPP_U10_f" "GPP_U90_f" + ## [1] "GPP_U10_f" "GPP_U90_f" "GPP_uStar_f" -Using change point detection instead of moving point method for UStar Threshold estimation ------------------------------------------------------------------------------------------- +## Using change point detection instead of moving point method for UStar Threshold estimation The package also provides another method of estimating the point where NEE saturates with increasing uStar. With the ChangePointDetection (CPT) @@ -239,41 +207,15 @@ as Gap. ) #(uStarThCP <- usGetSeasonalSeasonUStarMap(resUStar)) EProc$useSeaonsalUStarThresholds() - - ## - ## Enter a frame number, or 0 to exit - ## - ## 1: twDev::genVigs("DEGebExample") - ## 2: packageTools.R#300: rmarkdown::render(fNameI, output_format = output_f - ## 3: render(input = input, output_format = format, output_file = NULL, outp - ## 4: knitr::knit(knit_input, knit_output, envir = envir, quiet = quiet, enc - ## 5: process_file(text, output) - ## 6: withCallingHandlers(if (tangle) process_tangle(group) else process_gro - ## 7: process_group(group) - ## 8: process_group.block(group) - ## 9: call_block(x) - ## 10: block_exec(params) - ## 11: in_dir(input_dir(), evaluate(code, envir = env, new_device = FALSE, ke - ## 12: evaluate(code, envir = env, new_device = FALSE, keep_warning = !isFALS - ## 13: evaluate::evaluate(...) - ## 14: evaluate_call(expr, parsed$src[[i]], envir = envir, enclos = enclos, d - ## 15: timing_fn(handle(ev <- withCallingHandlers(withVisible(eval(expr, envi - ## 16: handle(ev <- withCallingHandlers(withVisible(eval(expr, envir, enclos) - ## 17: withCallingHandlers(withVisible(eval(expr, envir, enclos)), warning = - ## 18: withVisible(eval(expr, envir, enclos)) - ## 19: eval(expr, envir, enclos) - ## 20: eval(expr, envir, enclos) - ## 21: EProc$useSeaonsalUStarThresholds() - EProc$sGetUstarScenarios() ## season uStar - ## 5 2004001 0.5454669 - ## 6 2004070 0.1203274 - ## 7 2004210 0.2144359 - ## 8 2004320 0.1558058 - ## 9 2005070 0.1558058 - ## 10 2005180 0.1558058 - ## 11 2005320 0.3506363 - ## 12 2006120 0.3506363 - ## 13 2006305 0.3506363 + ## 5 2004001 0.4940012 + ## 6 2004070 0.3167250 + ## 7 2004210 0.2220000 + ## 8 2004320 0.4977500 + ## 9 2005070 0.3361000 + ## 10 2005180 0.3029803 + ## 11 2005320 0.3908000 + ## 12 2006120 0.2138062 + ## 13 2006305 0.3544000 diff --git a/vignettes/DEGebExample_files/figure-html/DEGeb_estUStar1a-1.png b/vignettes/DEGebExample_files/figure-html/DEGeb_estUStar1a-1.png index 2f9e015f..9bbc6947 100644 Binary files a/vignettes/DEGebExample_files/figure-html/DEGeb_estUStar1a-1.png and b/vignettes/DEGebExample_files/figure-html/DEGeb_estUStar1a-1.png differ diff --git a/vignettes/DEGebExample_files/figure-markdown_strict/DEGeb_estUStar1a-1.png b/vignettes/DEGebExample_files/figure-markdown_strict/DEGeb_estUStar1a-1.png index a82c7104..53f55c0b 100644 Binary files a/vignettes/DEGebExample_files/figure-markdown_strict/DEGeb_estUStar1a-1.png and b/vignettes/DEGebExample_files/figure-markdown_strict/DEGeb_estUStar1a-1.png differ diff --git a/vignettes/aggUncertainty.md b/vignettes/aggUncertainty.md index 144346e6..afb05a45 100644 --- a/vignettes/aggUncertainty.md +++ b/vignettes/aggUncertainty.md @@ -52,7 +52,7 @@ for the gap-filled records. NEE_orig_sd = ifelse( is.finite(.data$NEE_uStar_orig), .data$NEE_uStar_fsd, NA), NEE_uStar_fgood = ifelse( - is.finite(.data$NEE_uStar_fqc <= 1), .data$NEE_uStar_f, NA) + is.finite(.data$NEE_uStar_fqc) & (.data$NEE_uStar_fqc <= 1), .data$NEE_uStar_f, NA) ) If the aggregated mean should be computed excluding poor @@ -75,9 +75,9 @@ average relative uncertainty of the individual observations. With neglecting correlations among records, the uncertainty of the mean annual flux is computed by adding the variances. The mean is computed by *m* = ∑*x**i*/*n*. And hence its standard deviation by -$sd(m) = \\sqrt{Var(m)}= \\sqrt{\\sum{Var(x\_i)}/n^2} = \\sqrt{n \\bar{\\sigma^2}/n^2} = \\bar{\\sigma^2}/\\sqrt{n}$. +$sd(m) = \sqrt{Var(m)}= \sqrt{\sum{Var(x\_i)}/n^2} = \sqrt{n \bar{\sigma^2}/n^2} = \bar{\sigma^2}/\sqrt{n}$. This results in an approximate reduction of the average standard -deviation $\\bar{\\sigma^2}$ by $\\sqrt{n}$. +deviation $\bar{\sigma^2}$ by $\sqrt{n}$. results %>% summarise( nRec = sum(is.finite(NEE_orig_sd)) @@ -97,19 +97,19 @@ low. When observations are not independent of each other, the formulas now become *V**a**r*(*m*) = *s*2/*n**e**f**f* where -$s^2 = \\frac{n\_{eff}}{n(n\_{eff}-1)} \\sum\_{i=1}^n \\sigma\_i^2$, and +$s^2 = \frac{n\_{eff}}{n(n\_{eff}-1)} \sum\_{i=1}^n \sigma\_i^2$, and with the number of effective observations *n**e**f**f* decreasing with the autocorrelation among records (Bayley 1946, Zieba 2011). -The average standard deviation $\\sqrt{\\bar{\\sigma^2\_i}}$ now -approximately decreases only by about $\\sqrt{n\_{eff}}$: +The average standard deviation $\sqrt{\bar{\sigma^2\_i}}$ now +approximately decreases only by about $\sqrt{n\_{eff}}$: $$ -Var(m) = \\frac{s^2}{n\_{eff}} -= \\frac{\\frac{n\_{eff}}{n(n\_{eff}-1)} \\sum\_{i=1}^n \\sigma\_i^2}{n\_{eff}} -= \\frac{1}{n(n\_{eff}-1)} \\sum\_{i=1}^n \\sigma\_i^2 \\\\ -= \\frac{1}{n(n\_{eff}-1)} n \\bar{\\sigma^2\_i} = \\frac{\\bar{\\sigma^2\_i}}{(n\_{eff}-1)} +Var(m) = \frac{s^2}{n\_{eff}} += \frac{\frac{n\_{eff}}{n(n\_{eff}-1)} \sum\_{i=1}^n \sigma\_i^2}{n\_{eff}} += \frac{1}{n(n\_{eff}-1)} \sum\_{i=1}^n \sigma\_i^2 \\ += \frac{1}{n(n\_{eff}-1)} n \bar{\sigma^2\_i} = \frac{\bar{\sigma^2\_i}}{(n\_{eff}-1)} $$ First we need to quantify the error terms, i.e. model-data residuals. @@ -118,8 +118,8 @@ For all the records of good quality, we have an original measured value `NEE_uStar_fall`. For computing autocorrelation, equidistant time steps are important. -Hence, instead of filtering, the residuals of bad-quality data are set -to missing. +Hence, instead of filtering, the residuals of non-observation, +i.e. gap-filled, data are set to missing. results <- EProc$sExportResults() %>% mutate( @@ -212,7 +212,7 @@ column with that name. ) aggDay - ## # A tibble: 365 x 7 + ## # A tibble: 365 × 7 ## DoY DateTime nEff nRec NEE sdNEE sdNEEuncorr ## ## 1 0 1998-01-01 00:30:00 11.0 21 0.124 0.760 0.536 @@ -225,7 +225,7 @@ column with that name. ## 8 7 1998-01-08 00:30:00 17.7 46 -0.139 0.525 0.320 ## 9 8 1998-01-09 00:30:00 17.5 45 0.614 0.474 0.290 ## 10 9 1998-01-10 00:30:00 15.4 36 0.242 0.641 0.411 - ## # … with 355 more rows + ## # ℹ 355 more rows ![](aggUncertainty_files/figure-markdown_strict/uncBand-1.png) @@ -235,10 +235,10 @@ neglecting correlations. ## u\* threshold uncertainty -There is also uncertainty due to unknown u\* threshold. Since, the same -threshold is used for all times in a given uStar scenario, the relative -uncertainty of this component does not decrease when aggregating across -time. +There is also flux uncertainty due to uncertainty in u\* threshold +estimation. Since the same threshold is used for all times in a given +uStar scenario, the relative uncertainty of this component does not +decrease when aggregating across time. The strategy is to @@ -258,8 +258,8 @@ this uncertainty is computationally expensive. 1\. First, we estimate many samples of the probability density of the unknown uStar threshold. - # for run-time of the vignette creation, here we use only few uStar quantiles - # For real-world applications, a larger sample is required. + # for run-time of the vignette creation, here we use only few (3) uStar quantiles + # For real-world applications, a larger sample (> 30) is required. nScen <- 3 # nScen <- 39 EddyDataWithPosix <- Example_DETha98 %>% @@ -294,14 +294,14 @@ column in the data exported from REddyProc. NEEagg ## uStar U2.5 U50 U97.5 - ## -1.616926 -1.630803 -1.618933 -1.646267 + ## -1.616926 -1.627199 -1.625671 -1.648635 4\. compute uncertainty across aggregated values sdNEEagg_ustar <- sd(NEEagg) sdNEEagg_ustar - ## [1] 0.01349296 + ## [1] 0.01346817 ## Combined aggregated uncertainty @@ -316,4 +316,4 @@ from the random uncertainty, the variances add. sdAnnual ## sdRand sdUstar sdComb - ## 1 0.04798329 0.01349296 0.04984432 + ## 1 0.04798329 0.01346817 0.04983762 diff --git a/vignettes/aggUncertainty_files/figure-markdown_strict/uncBand-1.png b/vignettes/aggUncertainty_files/figure-markdown_strict/uncBand-1.png index 233f5699..f8cc14d5 100644 Binary files a/vignettes/aggUncertainty_files/figure-markdown_strict/uncBand-1.png and b/vignettes/aggUncertainty_files/figure-markdown_strict/uncBand-1.png differ diff --git a/vignettes/aggUncertainty_files/figure-markdown_strict/unnamed-chunk-3-1.png b/vignettes/aggUncertainty_files/figure-markdown_strict/unnamed-chunk-3-1.png index 70e6b72e..32f49ad0 100644 Binary files a/vignettes/aggUncertainty_files/figure-markdown_strict/unnamed-chunk-3-1.png and b/vignettes/aggUncertainty_files/figure-markdown_strict/unnamed-chunk-3-1.png differ diff --git a/vignettes/aggUncertainty_files/figure-markdown_strict/unnamed-chunk-7-1.png b/vignettes/aggUncertainty_files/figure-markdown_strict/unnamed-chunk-7-1.png index 17745197..f8a83af5 100644 Binary files a/vignettes/aggUncertainty_files/figure-markdown_strict/unnamed-chunk-7-1.png and b/vignettes/aggUncertainty_files/figure-markdown_strict/unnamed-chunk-7-1.png differ diff --git a/vignettes/bigleaf.md b/vignettes/bigleaf.md index 560caacd..000f0d6c 100644 --- a/vignettes/bigleaf.md +++ b/vignettes/bigleaf.md @@ -1,5 +1,4 @@ -bigleaf package replacing GeoFunctions of REddyProc -=================================================== +# bigleaf package replacing GeoFunctions of REddyProc `REddyProc` package included several utility functions that were somewhat out of the package scope. These functions are removed from @@ -12,12 +11,11 @@ package. "bigleaf package must be installed to create this vignette.") library(REddyProc) -Vapour pressure deficit (VPD) ------------------------------ +## Vapour pressure deficit (VPD) **Vapour pressure deficit (VPD)** was computed with REddyProc in *h**P**a* from relative humidity in % and air temperature in -***C*. +*C*. VPD0 <- fCalcVPDfromRHandTair(DEGebExample$rH, DEGebExample$Tair) @@ -70,8 +68,7 @@ relative humidity as fraction. ## [1] 0.0000 15.8225 31.6450 47.4675 63.2900 79.1125 94.9350 -Evapotranspiration from latent heat and air temperature -------------------------------------------------------- +## Evapotranspiration from latent heat and air temperature LE <- seq(300,500,by = 50) Tair <- 25 @@ -87,8 +84,7 @@ former `fCalcETfromLE`. ## [1] 6.819916 7.956569 9.093222 10.229874 11.366527 -Converting visible radiation from irradiance to photons flux ------------------------------------------------------------- +## Converting visible radiation from irradiance to photons flux Photon flux density (PPFD) of visible light can be computed from energy in incoming radiation @@ -104,7 +100,7 @@ functions `fConvertVisibleWm2toPhotons` and `fConvertGlobalToVisible`. ## [1] 460 -The PPFD of light including non-visible parts, i.e. former +The PPFD of light including non-visible parts, i.e. former `fConvertVisibleWm2toPhotons`, is obtained by setting argument `frac_PAR` to 1. @@ -112,8 +108,7 @@ The PPFD of light including non-visible parts, i.e. former ## [1] 920 -Potential and Extraterrestrial solar radiation ----------------------------------------------- +## Potential and Extraterrestrial solar radiation Potential radiation (*W**m*−2) depends on time and geo-location. diff --git a/vignettes/gapFilling.md b/vignettes/gapFilling.md index 5331d5ae..87d35017 100644 --- a/vignettes/gapFilling.md +++ b/vignettes/gapFilling.md @@ -1,5 +1,4 @@ -Extended usage of the gap filling algorithm -=========================================== +# Extended usage of the gap filling algorithm #+++ load libraries used in this vignette library(REddyProc) @@ -38,8 +37,7 @@ Extended usage of the gap filling algorithm ![](gapFilling_files/figure-markdown_strict/ex2b-1.png) -Explicit demonstration of MDS algorithm for NEE gap filling -=========================================================== +# Explicit demonstration of MDS algorithm for NEE gap filling EProcMDS <- sEddyProc$new( 'DE-Tha', EddyDataWithPosix, c('NEE', 'Rg', 'Tair', 'VPD')) @@ -73,17 +71,17 @@ Explicit demonstration of MDS algorithm for NEE gap filling FilledEddyData <- EProcMDS$sExportResults() head(FilledEddyData) - ## VAR_orig VAR_f VAR_fqc VAR_fall VAR_fall_qc VAR_fnum VAR_fsd - ## 1 -1.21 -1.210000 0 1.373704 1 54 2.531604 - ## 2 1.72 1.720000 0 1.396364 1 55 2.513678 - ## 3 NA 1.006569 1 1.006569 1 58 3.311918 - ## 4 NA 1.061343 1 1.061343 1 67 3.164355 - ## 5 2.55 2.550000 0 1.071176 1 68 3.141698 - ## 6 NA 1.205441 1 1.205441 1 68 2.884610 - ## VAR_fmeth VAR_fwin - ## 1 1 14 - ## 2 1 14 - ## 3 1 14 - ## 4 1 14 - ## 5 1 14 - ## 6 1 14 + ## VAR_orig VAR_f VAR_fqc VAR_fall VAR_fall_qc VAR_fnum VAR_fsd VAR_fmeth + ## 1 -1.21 -1.210000 0 1.373704 1 54 2.531604 1 + ## 2 1.72 1.720000 0 1.396364 1 55 2.513678 1 + ## 3 NA 1.006569 1 1.006569 1 58 3.311918 1 + ## 4 NA 1.061343 1 1.061343 1 67 3.164355 1 + ## 5 2.55 2.550000 0 1.071176 1 68 3.141698 1 + ## 6 NA 1.205441 1 1.205441 1 68 2.884610 1 + ## VAR_fwin + ## 1 14 + ## 2 14 + ## 3 14 + ## 4 14 + ## 5 14 + ## 6 14 diff --git a/vignettes/gapFilling_files/figure-markdown_strict/ex2b-1.png b/vignettes/gapFilling_files/figure-markdown_strict/ex2b-1.png new file mode 100644 index 00000000..f14b57c7 Binary files /dev/null and b/vignettes/gapFilling_files/figure-markdown_strict/ex2b-1.png differ diff --git a/vignettes/uStarCases.md b/vignettes/uStarCases.md index 79a06c3b..a29e793f 100644 --- a/vignettes/uStarCases.md +++ b/vignettes/uStarCases.md @@ -1,5 +1,4 @@ -Different treatments of uStar threshold -======================================= +# Different treatments of uStar threshold The recommended way of dealing with the uncertain uStar threshold for filtering the half-hourly data, is to repeat all the processing steps @@ -20,8 +19,7 @@ First, some setup. filterLongRuns(Example_DETha98, "NEE") , 'YDH', Year = 'Year', Day = 'DoY', Hour = 'Hour') -Not applying uStar filtering ----------------------------- +## Not applying uStar filtering Subsequent processing steps can be performed without further uStar filtering using `sEddyProc_sMDSGapFill`. Corresponding result columns @@ -34,15 +32,14 @@ then have no uStar specific suffix. ## [1] "NEE_f" -User-specified uStar threshold ------------------------------- +## User-specified uStar threshold The user can provide value for uStar-filtering before gapfilling, using `sEddyProc_sMDSGapFillAfterUstar`. Output columns for this uStar scenario use the suffix as specified by argument `uStarSuffix` which -defaults to "uStar". +defaults to “uStar”. -The friction velocity, uStar, needs to be in column named "Ustar" of the +The friction velocity, uStar, needs to be in column named “Ustar” of the input dataset. EProc <- sEddyProc$new( @@ -53,8 +50,7 @@ input dataset. ## [1] "NEE_uStar_f" -Single uStar threshold estimate -------------------------------- +## Single uStar threshold estimate The uStar threshold can be estimated from the uStar-NEE relationship from the data without estimating its uncertainty by a bootstrap. @@ -78,20 +74,19 @@ from the data without estimating its uncertainty by a bootstrap. Next, the annual estimate is used as the default in gap-filling. Output columns use the suffix as specified by argument `uSstarSuffix` which -defaults to "uStar". +defaults to “uStar”. #EProc$useAnnualUStarThresholds() EProc$sMDSGapFillAfterUstar('NEE') - ## Warning in .self$sGetUstarScenarios(): uStar scenarios not set yet. Setting - ## to annual mapping. + ## Warning in .self$sGetUstarScenarios(): uStar scenarios not set yet. Setting to + ## annual mapping. grep("NEE.*_f$",names(EProc$sExportResults()), value = TRUE) ## [1] "NEE_uStar_f" -Scenarios across distribution of u\* threshold estimate -------------------------------------------------------- +## Scenarios across distribution of u\* threshold estimate Choosing a different u\* threshold effects filtering and the subsequent processing steps of gap-filling, and flux-partitioning. In order to @@ -113,8 +108,8 @@ bootstrap. ## ## Estimated UStar distribution of: - ## uStar 5% 50% 95% - ## 1 0.41625 0.378475 0.4524588 0.6209465 + ## uStar 5% 50% 95% + ## 1 0.41625 0.3637091 0.4392992 0.6196833 ## by using 100 bootstrap samples and controls: ## taClasses UstarClasses ## 7 20 @@ -128,12 +123,12 @@ bootstrap. # inspect the thresholds to be used by default EProc$sGetUstarScenarios() - ## season uStar U05 U50 U95 - ## 1 1998001 0.41625 0.378475 0.4524588 0.6209465 - ## 2 1998003 0.41625 0.378475 0.4524588 0.6209465 - ## 3 1998006 0.41625 0.378475 0.4524588 0.6209465 - ## 4 1998009 0.41625 0.378475 0.4524588 0.6209465 - ## 5 1998012 0.41625 0.378475 0.4524588 0.6209465 + ## season uStar U05 U50 U95 + ## 1 1998001 0.41625 0.3637091 0.4392992 0.6196833 + ## 2 1998003 0.41625 0.3637091 0.4392992 0.6196833 + ## 3 1998006 0.41625 0.3637091 0.4392992 0.6196833 + ## 4 1998009 0.41625 0.3637091 0.4392992 0.6196833 + ## 5 1998012 0.41625 0.3637091 0.4392992 0.6196833 By default the annually aggregated threshold estimates are used for each season within one year as in the original method publication. To see the @@ -142,22 +137,14 @@ estimates for different aggregation levels, use method (uStarThAgg <- EProc$sGetEstimatedUstarThresholdDistribution()) - ## aggregationMode seasonYear season uStar 5% 50% - ## 1 single NA 0.4162500 0.3784750 0.4524588 - ## 2 year 1998 0.4162500 0.3784750 0.4524588 - ## 3 season 1998 1998001 0.4162500 0.3784750 0.4524588 - ## 4 season 1998 1998003 0.4162500 0.3289559 0.4000000 - ## 5 season 1998 1998006 0.3520000 0.3318286 0.3864683 - ## 6 season 1998 1998009 0.3369231 0.2297673 0.4044653 - ## 7 season 1998 1998012 0.1740000 0.1849500 0.4325682 - ## 95% - ## 1 0.6209465 - ## 2 0.6209465 - ## 3 0.6209465 - ## 4 0.5890722 - ## 5 0.4483500 - ## 6 0.5360426 - ## 7 0.5890722 + ## aggregationMode seasonYear season uStar 5% 50% 95% + ## 1 single NA 0.4162500 0.3637091 0.4392992 0.6196833 + ## 2 year 1998 0.4162500 0.3637091 0.4392992 0.6196833 + ## 3 season 1998 1998001 0.4162500 0.3637091 0.4392992 0.6196833 + ## 4 season 1998 1998003 0.4162500 0.3157000 0.3980429 0.5443170 + ## 5 season 1998 1998006 0.3520000 0.3116583 0.3804048 0.4410750 + ## 6 season 1998 1998009 0.3369231 0.2165644 0.3752381 0.5689036 + ## 7 season 1998 1998012 0.1740000 0.2198500 0.4200000 0.6177607 In conjunction with method `usGetSeasonalSeasonUStarMap` and `sEddyProc_sSetUstarScenarios` this can be used to set seasonally @@ -171,13 +158,13 @@ different u\* threshold. However, this common case supported by method EProc$sGetUstarScenarios() ## season uStar U05 U50 U95 - ## 3 1998001 0.4162500 0.3784750 0.4524588 0.6209465 - ## 4 1998003 0.4162500 0.3289559 0.4000000 0.5890722 - ## 5 1998006 0.3520000 0.3318286 0.3864683 0.4483500 - ## 6 1998009 0.3369231 0.2297673 0.4044653 0.5360426 - ## 7 1998012 0.1740000 0.1849500 0.4325682 0.5890722 + ## 3 1998001 0.4162500 0.3637091 0.4392992 0.6196833 + ## 4 1998003 0.4162500 0.3157000 0.3980429 0.5443170 + ## 5 1998006 0.3520000 0.3116583 0.3804048 0.4410750 + ## 6 1998009 0.3369231 0.2165644 0.3752381 0.5689036 + ## 7 1998012 0.1740000 0.2198500 0.4200000 0.6177607 -Several function whose name ends with 'UstarScens' perform the +Several function whose name ends with ‘UstarScens’ perform the subsequent processing steps for all uStar scenarios. They operate and create columns that differ between threshold scenarios by a suffix. @@ -193,7 +180,7 @@ create columns that differ between threshold scenarios by a suffix. EProc$sMRFluxPartitionUStarScens() grep("GPP_.*_f$",names(EProc$sExportResults()), value = TRUE) - ## [1] "GPP_U95_f" "GPP_U50_f" "GPP_U05_f" "GPP_uStar_f" + ## [1] "GPP_U05_f" "GPP_U50_f" "GPP_U95_f" "GPP_uStar_f" if (FALSE) { # run only interactively, because it takes long @@ -202,12 +189,11 @@ create columns that differ between threshold scenarios by a suffix. } The argument `uStarScenKeep = "U50"` specifies that the outputs that are -not distinguished by the suffix, e.g. `FP_GPP2000`, should be reported +not distinguished by the suffix, e.g. `FP_GPP2000`, should be reported for the median u\* threshold scenario with suffix `U50`, instead of the default first scenario. -See also --------- +## See also A more advanced case of user-specified seasons for uStar threshold estimate is given in [`vignette('DEGebExample')`](DEGebExample.html). diff --git a/vignettes/useCase.md b/vignettes/useCase.md index 7f098646..d3273801 100644 --- a/vignettes/useCase.md +++ b/vignettes/useCase.md @@ -1,297 +1,227 @@ ---- -title: REddyProc typical workflow -output: - rmarkdown::html_vignette: - keep_md: true -vignette: > - %\VignetteEngine{knitr::rmarkdown_notangle} - %\VignetteIndexEntry{REddyProc typical workflow} - %\usepackage[UTF-8]{inputenc} ---- - - - - - - - - # REddyProc typical workflow ## Importing the half-hourly data -The workflow starts with importing the half-hourly data. The example, reads a -text file with data of the year 1998 from the Tharandt site and converts the -separate decimal columns year, day, and hour to a POSIX timestamp column. -Next, it initializes the `sEddyProc` class. - - -```r -#+++ load libraries used in this vignette -library(REddyProc) -library(dplyr) -#+++ Load data with 1 header and 1 unit row from (tab-delimited) text file -fileName <- getExamplePath('Example_DETha98.txt', isTryDownload = TRUE) -EddyData <- if (length(fileName)) fLoadTXTIntoDataframe(fileName) else - # or use example dataset in RData format provided with REddyProc - Example_DETha98 -#+++ Replace long runs of equal NEE values by NA -EddyData <- filterLongRuns(EddyData, "NEE") -#+++ Add time stamp in POSIX time format -EddyDataWithPosix <- fConvertTimeToPosix( - EddyData, 'YDH',Year = 'Year',Day = 'DoY', Hour = 'Hour') %>% - filterLongRuns("NEE") -#+++ Initalize R5 reference class sEddyProc for post-processing of eddy data -#+++ with the variables needed for post-processing later -EProc <- sEddyProc$new( - 'DE-Tha', EddyDataWithPosix, c('NEE','Rg','Tair','VPD', 'Ustar')) -``` - +The workflow starts with importing the half-hourly data. The example, +reads a text file with data of the year 1998 from the Tharandt site and +converts the separate decimal columns year, day, and hour to a POSIX +timestamp column. Next, it initializes the `sEddyProc` class. + + #+++ load libraries used in this vignette + library(REddyProc) + library(dplyr) + #+++ Load data with 1 header and 1 unit row from (tab-delimited) text file + fileName <- getExamplePath('Example_DETha98.txt', isTryDownload = TRUE) + EddyData <- if (length(fileName)) fLoadTXTIntoDataframe(fileName) else + # or use example dataset in RData format provided with REddyProc + Example_DETha98 + #+++ Replace long runs of equal NEE values by NA + EddyData <- filterLongRuns(EddyData, "NEE") + #+++ Add time stamp in POSIX time format + EddyDataWithPosix <- fConvertTimeToPosix( + EddyData, 'YDH',Year = 'Year',Day = 'DoY', Hour = 'Hour') %>% + filterLongRuns("NEE") + #+++ Initalize R5 reference class sEddyProc for post-processing of eddy data + #+++ with the variables needed for post-processing later + EProc <- sEddyProc$new( + 'DE-Tha', EddyDataWithPosix, c('NEE','Rg','Tair','VPD', 'Ustar')) A fingerprint-plot of the source half-hourly shows already several gaps. -A fingerprint-plot is a color-coded image of the half-hourly fluxes by daytime -on the x and and day of the year on the y axis. - -```r -EProc$sPlotFingerprintY('NEE', Year = 1998) -``` - -![](useCase_files/figure-html/fpNEEOrig-1.png) +A fingerprint-plot is a color-coded image of the half-hourly fluxes by +daytime on the x and and day of the year on the y axis. + EProc$sPlotFingerprintY('NEE', Year = 1998) +![](useCase_files/figure-markdown_strict/fpNEEOrig-1.png) For writing plots of data of several years to pdf see also -* [`sEddyProc_sPlotFingerprint`](../html/sEddyProc_sPlotFingerprint.html), -* [`sEddyProc_sPlotHHFluxes`](../html/sEddyProc_sPlotHHFluxes.html), and -* [`sEddyProc_sPlotDiurnalCycle`](../html/sEddyProc_sPlotDiurnalCycle.html). +- [`sEddyProc_sPlotFingerprint`](../html/sEddyProc_sPlotFingerprint.html), +- [`sEddyProc_sPlotHHFluxes`](../html/sEddyProc_sPlotHHFluxes.html), + and +- [`sEddyProc_sPlotDiurnalCycle`](../html/sEddyProc_sPlotDiurnalCycle.html). ## Estimating the uStar threshold distribution -The second step, is the estimation of the distribution of uStar thresholds, to -identify periods of low friction velocity (uStar), where NEE is biased low. -Discarding periods with low uStar is one of the largest sources of uncertainty -in aggregated fluxes. Hence, several quantiles of the distribution of -the uncertain uStar threshold are estimated by a bootstrap. - -The friction velocity, uStar, needs to be in column named "Ustar" of the input -dataset. - - -```r -EProc$sEstimateUstarScenarios( - nSample = 100L, probs = c(0.05, 0.5, 0.95)) -EProc$sGetEstimatedUstarThresholdDistribution() -``` - -``` -## aggregationMode seasonYear season uStar 5% 50% 95% -## 1 single NA 0.4162500 0.3855867 0.4479808 0.5768934 -## 2 year 1998 0.4162500 0.3855867 0.4479808 0.5768934 -## 3 season 1998 1998001 0.4162500 0.3855867 0.4479808 0.5768934 -## 4 season 1998 1998003 0.4162500 0.3136443 0.4048836 0.5513104 -## 5 season 1998 1998006 0.3520000 0.3196000 0.3900000 0.4654333 -## 6 season 1998 1998009 0.3369231 0.2626420 0.3959722 0.5377187 -## 7 season 1998 1998012 0.1740000 0.2354000 0.4375556 0.5768934 -``` - - - -The output reports annually aggregated uStar estimates of -0.42 for -the original data and -0.39, 0.45, 0.58 -for lower, median, -and upper quantile of the estimated distribution. The threshold can vary between -periods of different surface roughness, e.g. before and after harvest. -Therefore, there are estimates for different time periods, called seasons. -These season-estimates are by default aggregated to entire years. - -The subsequent post processing steps will be repeated using the four $u_*$ threshold -scenarios (non-resampled and tree quantiles of the bootstrapped distribution). -They require to specify a $u_*$-threshold for each -season and a suffix to distinguish the outputs related to different thresholds. -By default the annually aggregated estimates are used for each season -within the year. - - -```r -EProc$sGetUstarScenarios() -``` - -``` -## season uStar U05 U50 U95 -## 1 1998001 0.41625 0.3855867 0.4479808 0.5768934 -## 2 1998003 0.41625 0.3855867 0.4479808 0.5768934 -## 3 1998006 0.41625 0.3855867 0.4479808 0.5768934 -## 4 1998009 0.41625 0.3855867 0.4479808 0.5768934 -## 5 1998012 0.41625 0.3855867 0.4479808 0.5768934 -``` -## Gap-filling -The second post-processing step is filling the gaps in NEE using information of the -valid data. Here, we decide to use the same annual $u_*$ threshold estimate -in each season, as obtained above, and decide to compute uncertainty also -for valid records (FillAll). - - -```r -EProc$sMDSGapFillUStarScens('NEE') -``` +The second step, is the estimation of the distribution of uStar +thresholds, to identify periods of low friction velocity (uStar), where +NEE is biased low. Discarding periods with low uStar is one of the +largest sources of uncertainty in aggregated fluxes. Hence, several +quantiles of the distribution of the uncertain uStar threshold are +estimated by a bootstrap. + +The friction velocity, uStar, needs to be in column named “Ustar” of the +input dataset. + + EProc$sEstimateUstarScenarios( + nSample = 100L, probs = c(0.05, 0.5, 0.95)) + EProc$sGetEstimatedUstarThresholdDistribution() + + ## aggregationMode seasonYear season uStar 5% 50% 95% + ## 1 single NA 0.4162500 0.3795192 0.4500000 0.6368611 + ## 2 year 1998 0.4162500 0.3795192 0.4500000 0.6368611 + ## 3 season 1998 1998001 0.4162500 0.3795192 0.4500000 0.6368611 + ## 4 season 1998 1998003 0.4162500 0.3394866 0.4130139 0.6201562 + ## 5 season 1998 1998006 0.3520000 0.3313333 0.3874444 0.4673625 + ## 6 season 1998 1998009 0.3369231 0.2265271 0.3811790 0.5159905 + ## 7 season 1998 1998012 0.1740000 0.1924667 0.4331866 0.6368611 + +The output reports annually aggregated uStar estimates of 0.42 for the +original data and 0.38, 0.45, 0.64 for lower, median, and upper quantile +of the estimated distribution. The threshold can vary between periods of +different surface roughness, e.g. before and after harvest. Therefore, +there are estimates for different time periods, called seasons. These +season-estimates are by default aggregated to entire years. + +The subsequent post processing steps will be repeated using the four +*u*\* threshold scenarios (non-resampled and tree quantiles +of the bootstrapped distribution). They require to specify a +*u*\*-threshold for each season and a suffix to distinguish +the outputs related to different thresholds. By default the annually +aggregated estimates are used for each season within the year. + + EProc$sGetUstarScenarios() + + ## season uStar U05 U50 U95 + ## 1 1998001 0.41625 0.3795192 0.45 0.6368611 + ## 2 1998003 0.41625 0.3795192 0.45 0.6368611 + ## 3 1998006 0.41625 0.3795192 0.45 0.6368611 + ## 4 1998009 0.41625 0.3795192 0.45 0.6368611 + ## 5 1998012 0.41625 0.3795192 0.45 0.6368611 -The screen output (not shown here) already shows that the $u_*$-filtering and -gap-filling was repeated for each given estimate of the $u_*$ threshold -, i.e. column in -`uStarThAnnual`, with marking 22% to 38% of the data as gap. -For gap-filling without prior $u_*$-filtering using `sEddyProc_sMDSGapFill` -or for applying single or user-specified $u_*$ thresholds using -`sEddyProc_sMDSGapFillAfterUstar` -see `vignette("uStarCases")`. +## Gap-filling -For each of the different $u_*$ threshold estimates -a separate set of output columns of filled NEE and its -uncertainty is generated, distinguished by the suffixes given with -`uStarSuffixes`. "_f" denotes the filled value and "_fsd" the -estimated standard deviation of its uncertainty. +The second post-processing step is filling the gaps in NEE using +information of the valid data. Here, we decide to use the same annual +*u*\* threshold estimate in each season, as obtained above, +and decide to compute uncertainty also for valid records (FillAll). + EProc$sMDSGapFillUStarScens('NEE') -```r -grep("NEE_.*_f$",names(EProc$sExportResults()), value = TRUE) -grep("NEE_.*_fsd$",names(EProc$sExportResults()), value = TRUE) -``` +The screen output (not shown here) already shows that the +*u*\*-filtering and gap-filling was repeated for each given +estimate of the *u*\* threshold , i.e. column in +`uStarThAnnual`, with marking 22% to 38% of the data as gap. For +gap-filling without prior *u*\*-filtering using +`sEddyProc_sMDSGapFill` or for applying single or user-specified +*u*\* thresholds using `sEddyProc_sMDSGapFillAfterUstar` see +`vignette("uStarCases")`. -``` -## [1] "NEE_uStar_f" "NEE_U05_f" "NEE_U50_f" "NEE_U95_f" -## [1] "NEE_uStar_fsd" "NEE_U05_fsd" "NEE_U50_fsd" "NEE_U95_fsd" -``` +For each of the different *u*\* threshold estimates a +separate set of output columns of filled NEE and its uncertainty is +generated, distinguished by the suffixes given with `uStarSuffixes`. +“\_f” denotes the filled value and “\_fsd” the estimated +standard deviation of its uncertainty. -A fingerprint-plot of one of the new variables shows that gaps have been filled. + grep("NEE_.*_f$",names(EProc$sExportResults()), value = TRUE) + grep("NEE_.*_fsd$",names(EProc$sExportResults()), value = TRUE) -```r -EProc$sPlotFingerprintY('NEE_U50_f', Year = 1998) -``` + ## [1] "NEE_uStar_f" "NEE_U05_f" "NEE_U50_f" "NEE_U95_f" + ## [1] "NEE_uStar_fsd" "NEE_U05_fsd" "NEE_U50_fsd" "NEE_U95_fsd" -![](useCase_files/figure-html/fpNEEFilled-1.png) +A fingerprint-plot of one of the new variables shows that gaps have been +filled. + EProc$sPlotFingerprintY('NEE_U50_f', Year = 1998) +![](useCase_files/figure-markdown_strict/fpNEEFilled-1.png) ## Partitioning net flux into GPP and Reco -The third post-processing step is partitioning the net flux (NEE) into its gross -components GPP and Reco. -The partitioning needs to distinguish carefully between night-time and day-time. -Therefore it needs a specification of geographical coordinates and time zone -to allow computing sunrise and sunset. Further, the missing values in the -used meteorological data need to be filled. - -For VPD, which is important for daytime flux partitioning, and additional -gap-filling of longer gaps based on minimum daily temperature (assumed dewpoint) -is available. - +The third post-processing step is partitioning the net flux (NEE) into +its gross components GPP and Reco. The partitioning needs to distinguish +carefully between night-time and day-time. Therefore it needs a +specification of geographical coordinates and time zone to allow +computing sunrise and sunset. Further, the missing values in the used +meteorological data need to be filled. -```r -EProc$sSetLocationInfo(LatDeg = 51.0, LongDeg = 13.6, TimeZoneHour = 1) -EProc$sMDSGapFill('Tair', FillAll = FALSE, minNWarnRunLength = NA) -EProc$sMDSGapFill('VPD', FillAll = FALSE, minNWarnRunLength = NA) -EProc$sFillVPDFromDew() # fill longer gaps still present in VPD_f -``` +For VPD, which is important for daytime flux partitioning, and +additional gap-filling of longer gaps based on minimum daily temperature +(assumed dewpoint) is available. -Now we are ready to invoke the partitioning, here by the night-time approach, -for each of the several filled NEE columns. + EProc$sSetLocationInfo(LatDeg = 51.0, LongDeg = 13.6, TimeZoneHour = 1) + EProc$sMDSGapFill('Tair', FillAll = FALSE, minNWarnRunLength = NA) + EProc$sMDSGapFill('VPD', FillAll = FALSE, minNWarnRunLength = NA) + EProc$sFillVPDFromDew() # fill longer gaps still present in VPD_f -```r -EProc$sMRFluxPartitionUStarScens() -``` +Now we are ready to invoke the partitioning, here by the night-time +approach, for each of the several filled NEE columns. + EProc$sMRFluxPartitionUStarScens() +The results are stored in columns `Reco` and `GPP_f` modified by the +respective *u*\* threshold suffix. -The results are stored in columns `Reco` and `GPP_f` modified by the respective -$u_*$ threshold suffix. + grep("GPP.*_f$|Reco",names(EProc$sExportResults()), value = TRUE) -```r -grep("GPP.*_f$|Reco",names(EProc$sExportResults()), value = TRUE) -``` + ## [1] "Reco_U05" "GPP_U05_f" "Reco_U50" "GPP_U50_f" "Reco_U95" + ## [6] "GPP_U95_f" "Reco_uStar" "GPP_uStar_f" -``` -## [1] "Reco_U05" "GPP_U05_f" "Reco_U50" "GPP_U50_f" "Reco_U95" "GPP_U95_f" "Reco_uStar" "GPP_uStar_f" -``` +Visualizations of the results by a fingerprint plot gives a compact +overview. -Visualizations of the results by a fingerprint plot gives a compact overview. + EProc$sPlotFingerprintY('GPP_U50_f', Year = 1998) -```r -EProc$sPlotFingerprintY('GPP_U50_f', Year = 1998) -``` +![](useCase_files/figure-markdown_strict/fingerPrintGPP-1.png) -![](useCase_files/figure-html/fingerPrintGPP-1.png) - -For using daytime-based flux partitioning see [`sEddyProc_sGLFluxPartition`](../html/sEddyProc_sGLFluxPartition.html) +For using daytime-based flux partitioning see +[`sEddyProc_sGLFluxPartition`](../html/sEddyProc_sGLFluxPartition.html) computing columns `GPP_DT` and `Recco_DT`. ## Estimating the uncertainty of aggregated results -The results of the different $u_*$ threshold scenarios can be used for estimating -the uncertainty due to not knowing the threshold. +The results of the different *u*\* threshold scenarios can be +used for estimating the uncertainty due to not knowing the threshold. First, the mean of the GPP across all the year is computed for each -$u_*$-scenario and converted from ${\mu mol\, CO_2\, -m^{-2} s^{-1}}$ to ${gC\,m^{-2} yr^{-1}}$. - - -```r -FilledEddyData <- EProc$sExportResults() -uStarSuffixes <- colnames(EProc$sGetUstarScenarios())[-1] -#suffix <- uStarSuffixes[2] -GPPAggCO2 <- sapply( uStarSuffixes, function(suffix) { - GPPHalfHour <- FilledEddyData[[paste0("GPP_",suffix,"_f")]] - mean(GPPHalfHour, na.rm = TRUE) -}) -molarMass <- 12.011 -GPPAgg <- GPPAggCO2 * 1e-6 * molarMass * 3600*24*365.25 -print(GPPAgg) -``` - -``` -## uStar U05 U50 U95 -## 1919.176 1903.914 1958.870 1903.933 -``` - -The difference between those aggregated values is a first estimate of -uncertainty range in GPP due to uncertainty of the $u_*$ threshold. - -```r -(max(GPPAgg) - min(GPPAgg)) / median(GPPAgg) -``` -In this run of the example a relative error of about -2.9% -is inferred. - -For a better but more time consuming uncertainty estimate, specify a larger -sample of $u_*$ threshold values, for each repeat the post-processing, and -compute statistics from the larger sample of resulting GPP columns. This can be -achieved by specifying a larger sequence of quantiles when calling -`sEstimateUstarScenarios` in place of the command shown above. - -```r -EProc$sEstimateUstarScenarios( - nSample = 200, probs = seq(0.025,0.975,length.out = 39) ) -``` +*u*\*-scenario and converted from +*μ**m**o**l* *C**O*2 *m*−2*s*−1 to +*g**C* *m*−2*y**r*−1. + + FilledEddyData <- EProc$sExportResults() + uStarSuffixes <- colnames(EProc$sGetUstarScenarios())[-1] + #suffix <- uStarSuffixes[2] + GPPAggCO2 <- sapply( uStarSuffixes, function(suffix) { + GPPHalfHour <- FilledEddyData[[paste0("GPP_",suffix,"_f")]] + mean(GPPHalfHour, na.rm = TRUE) + }) + molarMass <- 12.011 + GPPAgg <- GPPAggCO2 * 1e-6 * molarMass * 3600*24*365.25 + print(GPPAgg) + + ## uStar U05 U50 U95 + ## 1919.176 1897.384 1958.870 1929.483 + +The difference between those aggregated values is a first estimate of +uncertainty range in GPP due to uncertainty of the *u*\* +threshold. + + (max(GPPAgg) - min(GPPAgg)) / median(GPPAgg) + +In this run of the example a relative error of about 3.2% is inferred. + +For a better but more time consuming uncertainty estimate, specify a +larger sample of *u*\* threshold values, for each repeat the +post-processing, and compute statistics from the larger sample of +resulting GPP columns. This can be achieved by specifying a larger +sequence of quantiles when calling `sEstimateUstarScenarios` in place of +the command shown above. + + EProc$sEstimateUstarScenarios( + nSample = 200, probs = seq(0.025,0.975,length.out = 39) ) ## Storing the results in a csv-file -The results still reside inside the `sEddyProc` class. -We first export them to an R Data.frame, append the columns to the original -input data, and write this data.frame to text file in a temporary directory. - -```r -FilledEddyData <- EProc$sExportResults() -CombinedData <- cbind(EddyData, FilledEddyData) -fWriteDataframeToFile(CombinedData, 'DE-Tha-Results.txt', Dir = tempdir()) -# or without relying on data.frame EddyData -# with replacing column DateTime by Year, DoY, and Hour: -fWriteDataframeToFile( - cbind(EProc$sExportData(), EProc$sExportResults()), 'DE-Tha-Results_ydh.txt', - isSplitDatetime=TRUE, Dir = tempdir()) -# tmp <- fLoadTXTIntoDataframe(file.path(tempdir(),'DE-Tha-Results_ydh.txt')) -``` - - - +The results still reside inside the `sEddyProc` class. We first export +them to an R Data.frame, append the columns to the original input data, +and write this data.frame to text file in a temporary directory. + + FilledEddyData <- EProc$sExportResults() + CombinedData <- cbind(EddyData, FilledEddyData) + fWriteDataframeToFile(CombinedData, 'DE-Tha-Results.txt', Dir = tempdir()) + # or without relying on data.frame EddyData + # with replacing column DateTime by Year, DoY, and Hour: + fWriteDataframeToFile( + cbind(EProc$sExportData(), EProc$sExportResults()), 'DE-Tha-Results_ydh.txt', + isSplitDatetime=TRUE, Dir = tempdir()) + # tmp <- fLoadTXTIntoDataframe(file.path(tempdir(),'DE-Tha-Results_ydh.txt')) diff --git a/vignettes/useCase_files/figure-html/fingerPrintGPP-1.png b/vignettes/useCase_files/figure-html/fingerPrintGPP-1.png deleted file mode 100644 index 51b26aac..00000000 Binary files a/vignettes/useCase_files/figure-html/fingerPrintGPP-1.png and /dev/null differ diff --git a/vignettes/useCase_files/figure-html/fpNEEFilled-1.png b/vignettes/useCase_files/figure-html/fpNEEFilled-1.png deleted file mode 100644 index d5945678..00000000 Binary files a/vignettes/useCase_files/figure-html/fpNEEFilled-1.png and /dev/null differ diff --git a/vignettes/useCase_files/figure-html/fpNEEOrig-1.png b/vignettes/useCase_files/figure-html/fpNEEOrig-1.png deleted file mode 100644 index f18e8572..00000000 Binary files a/vignettes/useCase_files/figure-html/fpNEEOrig-1.png and /dev/null differ diff --git a/vignettes/useCase_files/figure-markdown_strict/fingerPrintGPP-1.png b/vignettes/useCase_files/figure-markdown_strict/fingerPrintGPP-1.png new file mode 100644 index 00000000..a35d89e2 Binary files /dev/null and b/vignettes/useCase_files/figure-markdown_strict/fingerPrintGPP-1.png differ diff --git a/vignettes/useCase_files/figure-markdown_strict/fpNEEFilled-1.png b/vignettes/useCase_files/figure-markdown_strict/fpNEEFilled-1.png new file mode 100644 index 00000000..baf66181 Binary files /dev/null and b/vignettes/useCase_files/figure-markdown_strict/fpNEEFilled-1.png differ diff --git a/vignettes/useCase_files/figure-markdown_strict/fpNEEOrig-1.png b/vignettes/useCase_files/figure-markdown_strict/fpNEEOrig-1.png new file mode 100644 index 00000000..5cd88355 Binary files /dev/null and b/vignettes/useCase_files/figure-markdown_strict/fpNEEOrig-1.png differ