ohi logo
OHI Science | Citation policy

Summary

The Ultraviolet Radiation pressure layer is generated from daily data on Local Noon Erythemal UV Irradiance (mW/m2) derived from satellite observations.

  1. Average the data for each week/year/cell
  2. For each week/year/cell, calculate the mean and sd, so each cell would have ~624 (12*52) values (2004-2016)
  3. Determine which of these were anomalous, defined as greater than the mean plus 1 standard deviation
  4. Sum weekly anomalies for each year/cell (for a total of 52 possible anomalies per year/cell)
  5. Calculate the total number of anomalies in the reference period (in our case, 2004-2009, for a total of 52*5 anomalies per cell)
  6. Calculate the total number of anomalies in the most recent 5 year period (2011-2015)
  7. then for each cell, get the difference between current anomalies and reference anomolies
  8. Rescale the data to be between 0-1 by using the 99.99th quantile as a reference point

Updates from previous assessment

Previous assessments used two different sets of data;

  1. EarthProbe/TOMS data from 1997 - 2001
  2. OMI/Aura data from 2008 - 2013

From Halpern et al. (2015):

Because the satellite values cannot easily be compared to each other, we chose to create two mean baseline UV values against which to look for anomalies. We calculated two separate baseline means and standard deviations for the two 5-year time periods shown above. Daily irradiance values were averaged into 60 monthly mean UV values for each five year period. Mean monthly UV values exceeding the baseline mean plus one standard deviation were labeled as anomalous pixels.

For OHI 2016 we changed our methods and decided to use just one dataset that can be used long term and comparable across time rather than two separate datasets. The methods therefore changed as well to account for the change in data. This layer is now calculated in a very similar manner to the Sea Surface Temperature layer. The OMI/Aura data for 2014 and 2015 were also included in this assessment.


Data Source

Reference: The Ultraviolet Radiation pressures layer uses the Aura OMI GLobal Surface UVB Data Product.
Native Data Resolution: 1 degree
Values: Level-3 OMI Surface UV Irradiance and Erythemal Dose- OMUVBd
Time Range: Daily data from 2005 - 2015
Format: HDF5


Methods

Setup

knitr::opts_chunk$set(message = FALSE, warning = FALSE)

source('~/github/ohiprep/src/R/common.R')
library(raster)
library(ncdf4)
library(rgdal)
library(rhdf5) #from bioconductor: http://bioconductor.org/packages/release/bioc/html/rhdf5.html
library(ggplot2)
library(RColorBrewer)

raw_data_dir = file.path(dir_M,'git-annex/globalprep/_raw_data/NASA_OMI_AURA_UV/d2016')

#years of data we are using for this data layer
yrs  <- c(2005:2015)
mths <- c("01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12")
days <- seq(1,358,7)

#global ocean raster at 1km for resampling/projecting purposes
  ocean     <- raster(file.path(dir_M, 'model/GL-NCEAS-Halpern2008/tmp/ocean.tif'))
  ocean_shp <- readOGR(file.path(dir_M,'git-annex/globalprep/spatial/d2014/data'),layer = 'regions_gcs')
## OGR data source with driver: ESRI Shapefile 
## Source: "/home/shares/ohi/git-annex/globalprep/spatial/d2014/data", layer: "regions_gcs"
## with 551 features
## It has 7 fields
  land      <- ocean_shp%>%
                  subset(rgn_typ %in% c('land','land-disputed','land-noeez'))
  
#set mollweide projection CRS
  mollCRS = crs('+proj=moll +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84 +units=m +no_defs')
 
#define colors in spectral format for plotting rasters   
  cols    = rev(colorRampPalette(brewer.pal(9, 'Spectral'))(255)) # rainbow color scheme

Create rasters from HDF files

Calculate weekly means and standard deviations across all years

#list all files from raw data folder

files = list.files(file.path(raw_data_dir,'test'),pattern='*.he5$',full.names=T)

attribute <- "/HDFEOS/GRIDS/OMI UVB PRODUCT/Data Fields/ErythemalDailyDose"

#function to turn HDF file into a raster 
he_to_ras <- function(x){

    h <- h5read(x,attribute)
    r <- raster(h,xmn=-90,ymn=-180,xmx = 90, ymx = 180, crs = "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0")%>%
            t()%>%
            flip(direction='y')
    
    r[r<0]<-NA #where r = 0, set to NA
    
    return(r)
  
}

#for every week in each year in the time series, calculate the weekly mean and standard deviation.
for (i in yrs){
  
  print(i)
  
    l <- files[substr(files[],96,99)==i]
    
    #now grab weeks
    for (j in 1:length(days)){
      
      print(j)
    
      dys       <- l[days[j]:(days[j]+6)]              #get all files from the year for these 7 days
      rasters   <- lapply(dys[!is.na(dys)],he_to_ras)  #rasterize all of these .nc files. This function will assign NA to the empty ones
    
      uv_week   <- stack(rasters)
  
      #calculate the mean
      week_mean    <-  calc(uv_week, fun=function(x){mean(x,na.rm=T)},
                         filename=paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2016/int/weekly_means/weekly_means_',i,'_',j,'.tif'),
                         overwrite=T)
      
      
      week_sd      <-  calc(uv_week, fun=function(x){sd(x,na.rm=T)},
                         filename=paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2016/int/weekly_sd/weekly_sd_',i,'_',j,'.tif'),
                         overwrite=T)
      
      week_mean_sd <-  overlay(week_mean,week_sd,fun=function(x,y){x+y},
                         filename=paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2016/int/weekly_mean_sd/weekly_mean_sd_',i,'_',j,'.tif'),
                         overwrite=T)
      
    }
}

#get weekly climatologies across all years in the time series

names_weekly <- list.files(file.path(dir_M,'git-annex/globalprep/prs_uv/v2016/int/weekly_means'),full.names=T)

for(i in 1:52){
  
  print(i)
  
  #get all rasters for week i
    if(i %in% c(1:9)){ #did this for the first 9 weeks for filepath naming only
       w = names_weekly[(substr(names_weekly,87,91)==paste0(i,'.tif'))]%>%stack()
    }else{
      w  = names_weekly[(substr(names_weekly,87,88)==i)]%>%stack()
    }
  
  #mean
  m  = calc(w,fun=function(x){mean(x,na.rm=T)},filename=paste0(file.path(dir_M),
           '/git-annex/globalprep/prs_uv/v2016/int/weekly_climatologies/mean_week_',i,'.tif'),overwrite=T)
  
  #sd
  sd = calc(w,fun=function(x){sd(x,na.rm=T)},progress='text',filename=paste0(file.path(dir_M),
           '/git-annex/globalprep/prs_uv/v2016/int/weekly_climatologies/sd_week_',i,'.tif'),overwrite=T)
  
  #mean plus sd
  m_sd = overlay(m,sd,fun=function(x,y){x+y},filename = paste0(file.path(dir_M),
           '/git-annex/globalprep/prs_uv/v2016/int/weekly_climatologies/mean_sd_week_',i,'.tif'),overwrite=T)
  
}

Compare to climatology

Compare each week in each year to the climatology for that week. The climatology is equal to the mean plus one standard deviation

# Second Loop to calculate annual positive anomalies


for (i in 2005:2015){
  
  print(i)
  
  s = stack()
  
  for (j in 1:52){
    
    print(j)

    w_mean = raster(paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2016/int/weekly_means/weekly_means_',i,'_',j,'.tif')) #mean UV for week i

    w_anom = raster(paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2016/int/weekly_climatologies/mean_sd_week_',j,'.tif')) #get the week climatology
    
    count = overlay(w_mean,w_anom,fun=function(x,y){ifelse(x>y,1,0)},progress='text') #compare to average anomaly for that week 
    
    s = stack(s,count)
    
  }
  
  year = calc(s,fun=function(x){sum(x,na.rm=T)},progress='text',filename=paste0('int/annual_pos_anomalies_',i,'.tif'),overwrite=T)
}

Calculate differences

Calculate the difference in total number of anomalies in recent 5 years (2011 - 2015) compared to first 5 years (2005-2009)

l   <- list.files('int',full.names=T)

ref <- list.files('int',full.names=T)[1:5]%>%stack()%>%
        calc(.,fun=function(x){sum(x,na.rm=T)})

library(parallel)

registerDoParallel(5)

foreach(i = c(2007:2011)) %dopar% { #i=2005

  yrs <- c(i:(i+4))
  
  s   <- stack(l[substr(l,26,29)%in%yrs])%>%sum(.,na.rm=T)

  diff = s - ref #calculate difference between total number of anomalies in recent and historical (2005-2009) time periods

  projection(diff) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"
  
  out = mask(diff,land,inverse=T, filename =paste0('int/annual_diff_ocean_',yrs[1], '_', yrs[5],'.tif'),overwrite=T)
}

Rescale

out_files <- list.files('int',full.names=T, pattern = 'ocean')

#get data across all years
vals <- c()

for(i in 1:5){
  
  print(i)
  m <- out_files[i]%>%
    raster()%>%
    getValues()
  
  na.omit(m)
  
  vals <- c(vals,m)
  
}

resc_num  <- quantile(vals,prob=0.9999,na.rm=T)#41 

foreach(i = c(2007:2011)) %dopar% { #i=2005
  
 yrs <- c(i:(i+4))
 
  out_rescale = out_files[substr(out_files,23,26)==i]%>%
                 raster()%>%
                  projectRaster(crs=mollCRS,progress='text',over=T)%>%
                  calc(fun=function(x){ifelse(x>0,ifelse(x>resc_num,1,x/resc_num),0)})%>%
                  resample(ocean,method='ngb', filename=paste0(dir_M,'/git-annex/globalprep/prs_uv/v2016/output/uv_',
                                                               min(yrs),'_',max(yrs),'-2005_2009.tif',sep=""),overwrite=T)
}

Results

out <- list.files(file.path(dir_M,'git-annex/globalprep/prs_uv/v2016/output'),full.names=T)[5]%>%raster()

plot(out,box=F,col=cols,axes=F,main = 'UV Pressure Layer \n OHI 2016')

Extract data for each region

setwd('globalprep/prs_uv/v2016')

source('../../../src/R/common.R')

library(raster)
library(rgdal)
library(dplyr)

# raster/zonal data

zones <- raster(file.path(dir_M, "git-annex/globalprep/spatial/d2014/data/rgn_mol_raster_1km/sp_mol_raster_1km.tif"))  # raster data
rgn_data <- read.csv(file.path(dir_M, "git-annex/globalprep/spatial/d2014/data/rgn_mol_raster_1km/regionData.csv"))    # data for sp_id's used in raster



# get raster data:
rasts <- list.files(file.path(dir_M,'git-annex/globalprep/prs_uv/v2016/output'))

pressure_stack <- stack()
for(raster in rasts){ # raster = "uv_2007_2011-2005-2009_rescaled_moll_1km.tif"
  tmp <- raster(file.path(dir_M,'git-annex/globalprep/prs_uv/v2016/output', raster))
  pressure_stack <- stack(pressure_stack, tmp)
}

# extract data for each region:
regions_stats <- zonal(pressure_stack,  zones, fun="mean", na.rm=TRUE, progress="text")
regions_stats2 <- data.frame(regions_stats)
setdiff(regions_stats2$zone, rgn_data$ant_id) 
setdiff(rgn_data$ant_id, regions_stats2$zone) 

data <- merge(rgn_data, regions_stats, all.y=TRUE, by.x="rgn_id", by.y="zone") %>%
  gather("year", "pressure_score", starts_with("uv")) 

uv_data <- data %>%
  mutate(year = substring(year, 9, 12)) %>%
  mutate(year = as.numeric(year)) %>%
  mutate(year = year + 1) %>%
  filter(ant_typ == "eez") %>%
  dplyr::select(rgn_id, rgn_nam, year, pressure_score)

write.csv(uv_data, "int/slr.csv", row.names=FALSE)

## save toolbox data for different years/regions

# function to extract data more easily
uv_data <- read.csv("int/slr.csv")

saveData <- function(newYear){ # newYear= 2012
  
  criteria_year <- ~year == newYear

    uv  <- uv_data %>%
      filter_(criteria_year) %>%
      dplyr::select(rgn_id, pressure_score) %>%
      arrange(rgn_id)
  
  write.csv(uv, sprintf('output/uv_%s.csv', newYear), row.names=FALSE)
}


### extract data 
for(newYear in (max(uv_data$year) - 4):(max(uv_data$year))){
  saveData(newYear)
}


### try visualizing the data using googleVis plot
library(googleVis)
plotData <- uv_data %>%
  dplyr::select(rgn_nam, year, pressure_score)

Motion=gvisMotionChart(plotData, 
                       idvar="rgn_nam", 
                       timevar="year")
plot(Motion)

print(Motion, file='uv.html')

Citation information

Jari Hovila, Antii Arola, and Johanna Tamminen (Oct ), OMI/Aura Surface UVB Irradiance and Erythemal Dose Daily L3 Global 1.0x1.0 deg Grid, version 003, NASA Goddard Space Flight Center