The Ultraviolet Radiation pressure layer is generated from daily data on Local Noon Erythemal UV Irradiance (mW/m2) derived from satellite observations.
One additional year of data.
Reference: The Ultraviolet Radiation pressures layer uses the Aura OMI GLobal Surface UVB Data Product.
Native Data Resolution: 1 degree
Values: Level-3 OMI Surface UV Irradiance and Erythemal Dose- OMUVBd
Time Range: Daily data from 2005 - 2016 Format: HDF5
Note: The first two weeks of June are missing from the 2016 raw data. The code is written in a way to account for this, but it does mean that we are missing two weeks that could slightly influence the results of the most recent pressure layer if it were available.
knitr::opts_chunk$set(message = FALSE, warning = FALSE)
library(raster)
source('~/github/ohiprep/src/R/common.R')
library(ncdf4)
library(rgdal)
library(rhdf5) #from bioconductor: http://bioconductor.org/packages/release/bioc/html/rhdf5.html
library(ggplot2)
library(RColorBrewer)
library(foreach)
library(doParallel)
raw_data_dir = file.path(dir_M,'git-annex/globalprep/_raw_data/NASA_OMI_AURA_UV')
#years of data we are using for this data layer
yrs  <- c(2005:2016)
mths <- c("01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12")
days <- seq(1,358,7)
#global ocean raster at 1km for resampling/projecting purposes
  ocean     <- raster(file.path(dir_M, 'model/GL-NCEAS-Halpern2008/tmp/ocean.tif'))
  ocean_shp <- readOGR(file.path(dir_M,'git-annex/globalprep/spatial/d2014/data'),layer = 'regions_gcs',verbose=F)
  land      <- ocean_shp%>%
                  subset(rgn_typ %in% c('land','land-disputed','land-noeez'))
  
#set mollweide projection CRS
  mollCRS = crs('+proj=moll +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84 +units=m +no_defs')
 
#define colors in spectral format for plotting rasters   
  cols    = rev(colorRampPalette(brewer.pal(9, 'Spectral'))(255)) # rainbow color schemeCalculate weekly means and standard deviations across all years
#list all files from raw data folder
files = c(list.files(file.path(raw_data_dir,'d2017/data'),pattern='*.he5$',full.names=T),
          list.files(file.path(raw_data_dir,'d2016/test'),pattern='*.he5$',full.names=T))
#function to turn HDF file into a raster 
he_to_ras <- function(x){
    h <- h5read(x,attribute)
    r <- raster(h,xmn=-90,ymn=-180,xmx = 90, ymx = 180, crs = "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0")%>%
            t()%>%
            flip(direction='y')
    
    r[r<0]<-NA #where r = 0, set to NA
    
    return(r)
  
}
registerDoParallel(8)
#for every week in each year in the time series, calculate the weekly mean and standard deviation.
foreach (yr = yrs) %dopar%{
  
  attribute = ifelse(yr == 2016,"/HDFEOS/GRIDS/OMI UVB Product/Data Fields/ErythemalDailyDose",
                "/HDFEOS/GRIDS/OMI UVB PRODUCT/Data Fields/ErythemalDailyDose")
  
    l <- files[substr(files[],96,99)==yr]
    
    if(yr == 2016){
    days <-  c(seq(1,148,7),   #2 weeks in June are missing. need to account for that when assigning days
              seq(152,351,7))
   
    }else{ 
      days <- days
    }
    #now grab weeks
    
    if(yr == 2016){
      weeks <- c(1:22,25:52) #2 weeks in June are missing. need to remove these here
    }else{
      weeks <- length(days)
    }
    
  for (j in 1:length(weeks)){
      week = weeks[j]
      if(yr == 2016 & j == 22){
        dys <- l[days[j]:(days[j]+3)]
      }else{
        dys   <- l[days[j]:(days[j]+7)] #get all files from the year for these 7 days
      }
      
      rasters   <- lapply(dys[!is.na(dys)],he_to_ras)  #rasterize all of these .nc files. This function will assign NA to the empty ones
    
      uv_week   <- stack(rasters)
  
      #calculate the mean
      week_mean    <-  calc(uv_week, fun=function(x){mean(x,na.rm=T)},
                         filename=paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2017/int/weekly_means/weekly_means_',yr,'_',week,'.tif'),
                         overwrite=T)
      
      
      week_sd      <-  calc(uv_week, fun=function(x){sd(x,na.rm=T)},
                         filename=paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2017/int/weekly_sd/weekly_sd_',yr,'_',week,'.tif'),
                         overwrite=T)
      
      week_mean_sd <-  overlay(week_mean,week_sd,fun=function(x,y){x+y},
                         filename=paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2017/int/weekly_mean_sd/weekly_mean_sd_',yr,'_',week,'.tif'),
                         overwrite=T)
    }
}
#get weekly climatologies across all years in the time series
names_weekly <- list.files(file.path(dir_M,'git-annex/globalprep/prs_uv/v2017/int/weekly_means'),full.names=T)
foreach(i = c(1:52)) %dopar% {
  
  #get all rasters for week i
    if(i %in% c(1:9)){ #did this for the first 9 weeks for filepath naming only
       w = names_weekly[(substr(names_weekly,87,91)==paste0(i,'.tif'))]%>%stack()
    }else{
      w  = names_weekly[(substr(names_weekly,87,88)==i)]%>%stack()
    }
  
  #mean
  m  = calc(w,fun=function(x){mean(x,na.rm=T)},filename=paste0(file.path(dir_M),
           '/git-annex/globalprep/prs_uv/v2017/int/weekly_climatologies/mean_week_',i,'.tif'),overwrite=T)
  
  #sd
  sd = calc(w,fun=function(x){sd(x,na.rm=T)},progress='text',filename=paste0(file.path(dir_M),
           '/git-annex/globalprep/prs_uv/v2017/int/weekly_climatologies/sd_week_',i,'.tif'),overwrite=T)
  
  #mean plus sd
  m_sd = overlay(m,sd,fun=function(x,y){x+y},filename = paste0(file.path(dir_M),
           '/git-annex/globalprep/prs_uv/v2017/int/weekly_climatologies/mean_sd_week_',i,'.tif'),overwrite=T)
  
}Compare each week in each year to the climatology for that week. The climatology is equal to the mean plus one standard deviation
# Second Loop to calculate annual positive anomalies
foreach (i = c(2005:2016)) %dopar%{
    
  if(i != 2016){
    
    s = stack()
    for (j in 1:52){
      w_mean = raster(paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2017/int/weekly_means/weekly_means_',i,'_',j,'.tif')) #mean UV for week i
  
      w_anom = raster(paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2017/int/weekly_climatologies/mean_sd_week_',j,'.tif')) #get the week climatology
      
      count = overlay(w_mean,w_anom,fun=function(x,y){ifelse(x>y,1,0)},progress='text') #compare to average anomaly for that week 
      
      s = stack(s,count)
    }
      
  }else{
    
    #2016 is missing 2 weeks in june. this is a hacky way of fixing it
    s = stack()
    for (j in c(1:22,25:52)){
      w_mean = raster(paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2017/int/weekly_means/weekly_means_',i,'_',j,'.tif')) #mean UV for week i
  
      w_anom = raster(paste0(file.path(dir_M),'/git-annex/globalprep/prs_uv/v2017/int/weekly_climatologies/mean_sd_week_',j,'.tif')) #get the week climatology
      
      count = overlay(w_mean,w_anom,fun=function(x,y){ifelse(x>y,1,0)},progress='text') #compare to average anomaly for that week 
      
      s = stack(s,count)
    }  
  
  year = calc(s,fun=function(x){sum(x,na.rm=T)},filename=paste0('int/annual_pos_anomalies_',i,'.tif'),overwrite=T)
  }
}Calculate the difference in total number of anomalies over a 5 year period compared to the first 5 years (2005-2009)
l   <- list.files('int', full.names=T, pattern = "anomalies")
#reference period (2005-2009)
ref <- list.files('int', pattern = "anomalies", full.names=T)[1:5]%>%
        stack()%>%
        calc(., fun=function(x){sum(x, na.rm=T)})
plot(ref,col=cols,axes=F,main = "Total Number of anomalies 2005-2009")
plot(land,add=T)registerDoParallel(8)
foreach(i = c(2005:2012)) %dopar% { #i=2005
  yrs <- c(i:(i+4))
  
  s   <- stack(l[substr(l, 26, 29)%in%yrs])%>%
        sum(., na.rm=T)
  diff = s - ref #calculate difference between total number of anomalies in recent and historical (2005-2009) time periods
  projection(diff) <- "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"
  
  out = mask(diff,land,inverse=T, filename =paste0('int/annual_diff_ocean_',yrs[1], '_', yrs[5],'.tif'),overwrite=T)
}out_files <- list.files('int',full.names=T, pattern = 'ocean')
resc_num  <- read.csv('~/github/ohiprep/globalprep/supplementary_information/v2016/reference_points_pressures.csv', stringsAsFactors=F)%>%
             filter(pressure == "Ultraviolet Radiation Anomalies")%>%
             dplyr::select(ref_point)%>%
             as.numeric(.$ref_point)
registerDoParallel(5)
foreach(i = c(2005:2012)) %dopar% { #i=2005
  
 yrs <- c(i:(i+4))
 
  out_rescale = out_files[substr(out_files,23,26)==i]%>%
                 raster()%>%
                  projectRaster(crs=mollCRS,progress='text',over=T)%>%
                  calc(fun=function(x){ifelse(x>0,ifelse(x>resc_num,1,x/resc_num),0)})%>%
                  resample(ocean,method='ngb', filename=paste0(dir_M,'/git-annex/globalprep/prs_uv/v2017/output/uv_',
                                                               min(yrs),'_',max(yrs),'-2005_2009.tif',sep=""),overwrite=T)
}out <- list.files(file.path(dir_M,'git-annex/globalprep/prs_uv/v2017/output'),full.names=T)[8]%>%raster()
plot(out,box=F,col=cols,axes=F,main = 'UV Pressure Layer \n OHI 2017')# raster/zonal data
zones <- raster(file.path(dir_M, "git-annex/globalprep/spatial/d2014/data/rgn_mol_raster_1km/sp_mol_raster_1km.tif"))  # raster data
rgn_data <- read.csv(file.path(dir_M, "git-annex/globalprep/spatial/d2014/data/rgn_mol_raster_1km/regionData.csv"))    # data for sp_id's used in raster
# get raster data:
rasts <- list.files(file.path(dir_M,'git-annex/globalprep/prs_uv/v2017/output'))
pressure_stack <- stack()
for(raster in rasts){ # raster = "uv_2007_2011-2005-2009_rescaled_moll_1km.tif"
  tmp <- raster(file.path(dir_M,'git-annex/globalprep/prs_uv/v2017/output', raster))
  pressure_stack <- stack(pressure_stack, tmp)
}library(animation) 
saveGIF({
  for(i in 1:nlayers(pressure_stack)){
      # don't forget to fix the zlimits
      plot(pressure_stack[[i]], zlim = c(0,1),axes=F, col=cols,
           main=names(pressure_stack[[i]]))
      
  }
}, movie.name = 'uv.gif')## [1] TRUEI think this needs to be updated so output is just one .csv for all years
# extract data for each region:
regions_stats <- zonal(pressure_stack,  zones, fun="mean", na.rm=TRUE, progress="text")
regions_stats2 <- data.frame(regions_stats)
setdiff(regions_stats2$zone, rgn_data$ant_id) 
setdiff(rgn_data$ant_id, regions_stats2$zone) 
data <- merge(rgn_data, regions_stats, all.y=TRUE, by.x="rgn_id", by.y="zone") %>%
  gather("year", "pressure_score", starts_with("uv")) 
uv_data <- data %>%
  mutate(year = substring(year, 9, 12)) %>%
  mutate(year = as.numeric(year)) %>%
  mutate(year = year + 1) %>%
  filter(ant_typ == "eez") %>%
  dplyr::select(rgn_id, rgn_nam, year, pressure_score)
write.csv(uv_data, "int/uv.csv", row.names=FALSE)
## save toolbox data for different years/regions
# function to extract data more easily
uv_data <- read.csv("int/uv.csv")
saveData <- function(newYear){ # newYear= 2012
  
  criteria_year <- ~year == newYear
    uv  <- uv_data %>%
      filter_(criteria_year) %>%
      dplyr::select(rgn_id, pressure_score) %>%
      arrange(rgn_id)
  
  write.csv(uv, sprintf('output/uv_%s.csv', newYear), row.names=FALSE)
}
### extract data 
for(newYear in (max(uv_data$year) - 4):(max(uv_data$year))){
  saveData(newYear)
}
### try visualizing the data using googleVis plot
library(googleVis)
plotData <- uv_data %>%
  dplyr::select(rgn_nam, year, pressure_score)
Motion=gvisMotionChart(plotData, 
                       idvar="rgn_nam", 
                       timevar="year")
plot(Motion)
print(Motion, file='uv.html')Jari Hovila, Antii Arola, and Johanna Tamminen (Oct ), OMI/Aura Surface UVB Irradiance and Erythemal Dose Daily L3 Global 1.0x1.0 deg Grid, version 003, NASA Goddard Space Flight Center