# Step 1: Load R Packages
### options(repos='http://cran.rstudio.com/')
#install.packages("astsa")
#install.packages('ggplot2')
#install.packages('forecast')
#install.packages('tseries')
#install.packages("data.table")
library(astsa)
library(forecast)
library(tseries)
library(zoo)
library(tseries)
library(data.table)
library(dplyr)
library(tidyr)
library(naniar)
library(stringr)
library(ggplot2)
library(DT)
library(lubridate)
library(ggpubr)
mytype = 'RMD'
setwd("/scratch/user/cma16/Task4_Deliverable2/OHprocess4/AllCrash/FacilityBased/")
load("./multi-lane_divided_OH_reduce_withCrash.rData")
setwd(paste0("/scratch/user/cma16/Task4_Deliverable2/OHprocess4/AllCrash/FacilityBased/",mytype))
df_RMD <- OH_mun_med
df_RMD$spd_av = 3600*df_RMD$DISTANCE/df_RMD$Travel_TIME_ALL_VEHICLES
df_RMD$spd_pv = 3600*df_RMD$DISTANCE/df_RMD$Travel_TIME_PASSENGER_VEHICLES
df_RMD$spd_ft = 3600*df_RMD$DISTANCE/df_RMD$Travel_TIME_FREIGHT_TRUCKS
### Remove outliers
df_RMD$spd_av = ifelse(df_RMD$spd_av <120, df_RMD$spd_av, NA)
df_RMD$spd_pv = ifelse(df_RMD$spd_pv <120, df_RMD$spd_pv, NA)
df_RMD$spd_ft = ifelse(df_RMD$spd_ft <120, df_RMD$spd_ft, NA)
### Month, Day
df_RMD$date <- as.character(df_RMD$DATE)
df_RMD$date <- str_pad(df_RMD$DATE, 8, pad = "0")
df_RMD$Month <- substr(df_RMD$date, start = 1, stop = 2)
df_RMD$Day <- substr(df_RMD$date, start = 3, stop = 4)
df_RMD$Year <- substr(df_RMD$date, start = 5, stop = 8)
df_RMD$MonthDay <- paste0(df_RMD$Month,"_", df_RMD$Day)
head(df_RMD)
## TimeStamp TMC DATE EPOCH1h Travel_TIME_ALL_VEHICLES
## 1: 108N05217_0101_0 108N05217 1012015 0 NA
## 2: 108N05217_0101_1 108N05217 1012015 1 52
## 3: 108N05217_0101_10 108N05217 1012015 10 46
## 4: 108N05217_0101_11 108N05217 1012015 11 35
## 5: 108N05217_0101_12 108N05217 1012015 12 NA
## 6: 108N05217_0101_13 108N05217 1012015 13 NA
## Travel_TIME_PASSENGER_VEHICLES Travel_TIME_FREIGHT_TRUCKS ADMIN_LEVE
## 1: NA NA USA
## 2: NA 52 USA
## 3: NA 46 USA
## 4: 35 NA USA
## 5: NA NA USA
## 6: NA NA USA
## ADMIN_LE_1 ADMIN_LE_2 DISTANCE ROAD_NUMBE ROAD_NAME LATITUDE LONGITUDE
## 1: Ohio Ottawa 0.72476 OH-2 41.50337 -82.83927
## 2: Ohio Ottawa 0.72476 OH-2 41.50337 -82.83927
## 3: Ohio Ottawa 0.72476 OH-2 41.50337 -82.83927
## 4: Ohio Ottawa 0.72476 OH-2 41.50337 -82.83927
## 5: Ohio Ottawa 0.72476 OH-2 41.50337 -82.83927
## 6: Ohio Ottawa 0.72476 OH-2 41.50337 -82.83927
## ROAD_DIREC ORN_FID COUNTY divided SURF_TYP NHS_CDE HPMS ACCESS AADT_YR
## 1: Eastbound 21383.24 OTT D G N * F 12
## 2: Eastbound 21383.24 OTT D G N * F 12
## 3: Eastbound 21383.24 OTT D G N * F 12
## 4: Eastbound 21383.24 OTT D G N * F 12
## 5: Eastbound 21383.24 OTT D G N * F 12
## 6: Eastbound 21383.24 OTT D G N * F 12
## FED_FACI PK_LANES MED_TYPE FED_MEDW BEGMP ENDMP SEG_LNG cnty_rte
## 1: 2 NA 2.359726 47.44576 26.36 26.99 0.4302199 OTT0002R
## 2: 2 NA 2.359726 47.44576 26.36 26.99 0.4302199 OTT0002R
## 3: 2 NA 2.359726 47.44576 26.36 26.99 0.4302199 OTT0002R
## 4: 2 NA 2.359726 47.44576 26.36 26.99 0.4302199 OTT0002R
## 5: 2 NA 2.359726 47.44576 26.36 26.99 0.4302199 OTT0002R
## 6: 2 NA 2.359726 47.44576 26.36 26.99 0.4302199 OTT0002R
## rte_nbr aadt aadt_bc aadt_pt surf_wid no_lanes func_cls rodwycls Total
## 1: 0002R 17640 2340 15300 48 4 2 9 0
## 2: 0002R 17640 2340 15300 48 4 2 9 0
## 3: 0002R 17640 2340 15300 48 4 2 9 0
## 4: 0002R 17640 2340 15300 48 4 2 9 0
## 5: 0002R 17640 2340 15300 48 4 2 9 0
## 6: 0002R 17640 2340 15300 48 4 2 9 0
## K A B C O DAYMTH Crash spd_av spd_pv spd_ft date Month Day
## 1: 0 0 0 0 0 0101 0 NA NA NA 01012015 01 01
## 2: 0 0 0 0 0 0101 0 50.17569 NA 50.17569 01012015 01 01
## 3: 0 0 0 0 0 0101 0 56.72035 NA 56.72035 01012015 01 01
## 4: 0 0 0 0 0 0101 0 74.54674 74.54674 NA 01012015 01 01
## 5: 0 0 0 0 0 0101 0 NA NA NA 01012015 01 01
## 6: 0 0 0 0 0 0101 0 NA NA NA 01012015 01 01
## Year MonthDay
## 1: 2015 01_01
## 2: 2015 01_01
## 3: 2015 01_01
## 4: 2015 01_01
## 5: 2015 01_01
## 6: 2015 01_01
day1<- df_RMD[,-c(1)] %>% group_by(MonthDay) %>% summarize(Speed_All_Mean=mean(spd_av, na.rm=TRUE))
day1
## # A tibble: 365 x 2
## MonthDay Speed_All_Mean
## <chr> <dbl>
## 1 01_01 58.7
## 2 01_02 58.3
## 3 01_03 56.8
## 4 01_04 58.2
## 5 01_05 57.1
## 6 01_06 52.4
## 7 01_07 55.7
## 8 01_08 56.2
## 9 01_09 53.5
## 10 01_10 57.9
## # ... with 355 more rows
# Step 2: Examine Data
speed_clean <- tsclean(ts(day1$Speed_All_Mean))
plot.ts(speed_clean)
# ggplot() + geom_line(data = Q1, aes(x = TimeStamp, y = speed_clean)) + ylab('Cleaned Speed Records')
day1$cnt_ma = ma(speed_clean, order=7) # using the clean count with no outliers
day1$cnt_ma30 = ma(speed_clean, order=30)
# Step 3: Decompose Your Data
count_ma = ts(na.omit(speed_clean), frequency=30)
decomp = stl(count_ma, s.window="periodic")
deseasonal_cnt <- seasadj(decomp)
plot(decomp)
# Step 4: Stationarity
# statinary test
adf.test(count_ma, alternative = "stationary")
##
## Augmented Dickey-Fuller Test
##
## data: count_ma
## Dickey-Fuller = -3.9428, Lag order = 7, p-value = 0.01213
## alternative hypothesis: stationary
adf.test(deseasonal_cnt, alternative = "stationary")
##
## Augmented Dickey-Fuller Test
##
## data: deseasonal_cnt
## Dickey-Fuller = -3.8423, Lag order = 7, p-value = 0.01716
## alternative hypothesis: stationary
d1 = diff(deseasonal_cnt)
adf.test(d1, alternative = "stationary")
## Warning in adf.test(d1, alternative = "stationary"): p-value smaller than
## printed p-value
##
## Augmented Dickey-Fuller Test
##
## data: d1
## Dickey-Fuller = -9.5137, Lag order = 7, p-value = 0.01
## alternative hypothesis: stationary
# Step 5: Autocorrelations and Choosing Model Order
# check ACF and PACF
acf2(count_ma)
## ACF PACF
## [1,] 0.60 0.60
## [2,] 0.32 -0.05
## [3,] 0.28 0.18
## [4,] 0.27 0.06
## [5,] 0.27 0.09
## [6,] 0.41 0.31
## [7,] 0.54 0.27
## [8,] 0.36 -0.15
## [9,] 0.18 -0.08
## [10,] 0.18 0.02
## [11,] 0.18 -0.01
## [12,] 0.23 0.11
## [13,] 0.44 0.28
## [14,] 0.58 0.22
## [15,] 0.42 -0.01
## [16,] 0.26 0.01
## [17,] 0.24 0.00
## [18,] 0.22 -0.05
## [19,] 0.24 0.00
## [20,] 0.40 0.06
## [21,] 0.52 0.14
## [22,] 0.42 0.10
## [23,] 0.22 -0.08
## [24,] 0.14 -0.09
## [25,] 0.12 -0.08
## [26,] 0.19 0.01
## [27,] 0.38 0.09
## [28,] 0.49 0.10
## [29,] 0.34 -0.09
## [30,] 0.16 -0.06
## [31,] 0.03 -0.25
## [32,] 0.04 -0.04
## [33,] 0.12 -0.04
## [34,] 0.24 -0.08
## [35,] 0.39 0.15
## [36,] 0.29 -0.02
## [37,] 0.11 -0.02
## [38,] 0.05 0.03
## [39,] 0.07 -0.04
## [40,] 0.16 0.01
## [41,] 0.24 -0.13
## [42,] 0.36 0.06
## [43,] 0.25 -0.10
## [44,] 0.07 0.00
## [45,] -0.04 -0.04
## [46,] -0.01 0.05
## [47,] 0.03 -0.05
## [48,] 0.14 0.02
## [49,] 0.29 0.06
## [50,] 0.20 -0.04
## [51,] 0.01 -0.01
## [52,] -0.06 0.00
## [53,] -0.05 -0.05
## [54,] 0.02 0.02
## [55,] 0.14 0.00
## [56,] 0.29 0.10
## [57,] 0.20 -0.02
## [58,] 0.00 0.06
## [59,] -0.08 0.00
## [60,] -0.05 0.03
## [61,] 0.00 0.02
## [62,] 0.12 -0.02
## [63,] 0.27 0.09
## [64,] 0.15 -0.04
## [65,] -0.01 0.04
## [66,] -0.08 0.00
## [67,] -0.05 0.06
## [68,] 0.00 -0.03
## [69,] 0.09 -0.02
## [70,] 0.22 0.03
## [71,] 0.12 -0.01
## [72,] -0.04 0.02
## [73,] -0.09 0.05
## [74,] -0.07 0.01
## [75,] -0.02 0.02
## [76,] 0.10 -0.04
## [77,] 0.23 0.01
## [78,] 0.15 -0.05
## [79,] 0.00 0.04
## [80,] -0.07 -0.01
## [81,] -0.07 0.04
## [82,] -0.02 -0.04
## [83,] 0.08 -0.01
## [84,] 0.21 -0.03
## [85,] 0.12 -0.05
## [86,] -0.02 0.02
## [87,] -0.11 -0.04
## [88,] -0.09 0.00
## [89,] -0.03 -0.02
## [90,] 0.08 -0.01
## [91,] 0.20 0.01
## [92,] 0.12 -0.07
## [93,] -0.03 -0.02
## [94,] -0.10 -0.01
## [95,] -0.06 0.02
## [96,] 0.01 0.03
## [97,] 0.09 0.00
## [98,] 0.20 0.03
## [99,] 0.12 -0.04
## [100,] -0.02 0.00
## [101,] -0.08 0.00
## [102,] -0.07 -0.03
## [103,] -0.01 -0.01
## [104,] 0.08 0.03
## [105,] 0.19 0.02
## [106,] 0.11 0.00
## [107,] -0.03 -0.02
## [108,] -0.10 0.01
## [109,] -0.09 -0.02
## [110,] -0.04 0.01
## [111,] 0.05 -0.02
## [112,] 0.14 -0.05
## [113,] 0.09 -0.02
## [114,] -0.04 0.00
## [115,] -0.12 -0.02
## [116,] -0.11 -0.01
## [117,] -0.05 -0.03
## [118,] 0.04 -0.01
## [119,] 0.14 -0.03
## [120,] 0.07 0.01
## ACF PACF
## [1,] 0.61 0.61
## [2,] 0.33 -0.07
## [3,] 0.29 0.18
## [4,] 0.29 0.08
## [5,] 0.28 0.08
## [6,] 0.42 0.33
## [7,] 0.56 0.27
## [8,] 0.38 -0.17
## [9,] 0.20 -0.06
## [10,] 0.18 0.00
## [11,] 0.19 -0.02
## [12,] 0.24 0.11
## [13,] 0.45 0.28
## [14,] 0.59 0.22
## [15,] 0.44 -0.01
## [16,] 0.27 0.01
## [17,] 0.25 0.01
## [18,] 0.23 -0.06
## [19,] 0.25 -0.01
## [20,] 0.41 0.07
## [21,] 0.55 0.16
## [22,] 0.43 0.09
## [23,] 0.23 -0.07
## [24,] 0.15 -0.09
## [25,] 0.12 -0.10
## [26,] 0.19 0.00
## [27,] 0.37 0.07
## [28,] 0.50 0.10
## [29,] 0.34 -0.09
## [30,] 0.15 -0.10
## [31,] 0.02 -0.24
## [32,] 0.04 -0.05
## [33,] 0.12 -0.03
## [34,] 0.25 -0.07
## [35,] 0.40 0.14
## [36,] 0.29 -0.04
## [37,] 0.11 -0.01
## [38,] 0.05 0.04
## [39,] 0.08 -0.04
## [40,] 0.16 0.03
## [41,] 0.25 -0.13
## [42,] 0.37 0.04
## [43,] 0.25 -0.09
## [44,] 0.07 0.01
## [45,] -0.04 -0.04
## [46,] -0.01 0.06
## [47,] 0.03 -0.04
## [48,] 0.14 0.02
## [49,] 0.29 0.07
## [50,] 0.21 -0.03
## [51,] 0.02 0.00
## [52,] -0.06 0.02
## [53,] -0.05 -0.03
## [54,] 0.02 0.03
## [55,] 0.15 -0.01
## [56,] 0.30 0.10
## [57,] 0.20 -0.03
## [58,] 0.01 0.06
## [59,] -0.09 -0.01
## [60,] -0.07 0.00
## [61,] -0.01 0.00
## [62,] 0.11 -0.03
## [63,] 0.27 0.10
## [64,] 0.16 -0.04
## [65,] -0.02 0.03
## [66,] -0.10 0.01
## [67,] -0.05 0.06
## [68,] 0.00 -0.01
## [69,] 0.09 -0.02
## [70,] 0.23 0.03
## [71,] 0.13 -0.01
## [72,] -0.04 0.00
## [73,] -0.10 0.06
## [74,] -0.07 0.01
## [75,] -0.02 0.02
## [76,] 0.10 -0.03
## [77,] 0.24 0.01
## [78,] 0.15 -0.06
## [79,] -0.01 0.04
## [80,] -0.07 0.00
## [81,] -0.06 0.04
## [82,] -0.02 -0.03
## [83,] 0.09 0.00
## [84,] 0.23 -0.03
## [85,] 0.13 -0.05
## [86,] -0.02 0.03
## [87,] -0.11 -0.04
## [88,] -0.09 0.01
## [89,] -0.03 -0.03
## [90,] 0.07 -0.05
## [91,] 0.20 0.01
## [92,] 0.11 -0.09
## [93,] -0.04 -0.01
## [94,] -0.10 0.00
## [95,] -0.07 0.00
## [96,] 0.00 0.03
## [97,] 0.09 0.00
## [98,] 0.22 0.03
## [99,] 0.12 -0.03
## [100,] -0.02 0.00
## [101,] -0.08 0.00
## [102,] -0.08 -0.05
## [103,] -0.01 0.01
## [104,] 0.08 0.03
## [105,] 0.19 0.02
## [106,] 0.11 0.02
## [107,] -0.03 -0.02
## [108,] -0.10 0.01
## [109,] -0.10 -0.02
## [110,] -0.04 0.02
## [111,] 0.06 -0.01
## [112,] 0.14 -0.06
## [113,] 0.09 0.02
## [114,] -0.04 0.00
## [115,] -0.12 -0.03
## [116,] -0.11 0.01
## [117,] -0.05 -0.04
## [118,] 0.05 0.02
## [119,] 0.13 -0.04
## [120,] 0.06 -0.02
Seasonility Not in Consideration
# Step 6: Fitting an ARIMA model
auto.arima(deseasonal_cnt, seasonal=FALSE)
## Series: deseasonal_cnt
## ARIMA(2,1,1)
##
## Coefficients:
## ar1 ar2 ma1
## 0.4660 -0.2462 -0.9196
## s.e. 0.0537 0.0529 0.0219
##
## sigma^2 estimated as 0.3043: log likelihood=-299.27
## AIC=606.55 AICc=606.66 BIC=622.14
# Step 7: Evaluate and Iterate
# (try different model)
fit<-auto.arima(deseasonal_cnt, seasonal=FALSE)
tsdisplay(residuals(fit), lag.max=45, main='Model Residuals [Seasonality not considered]')
# step 8 forcast
fcast <- forecast(fit, h=30)
plot(fcast)
Seasonility in Consideration
# Step 6: Fitting an ARIMA model
auto.arima(deseasonal_cnt, seasonal=TRUE)
## Series: deseasonal_cnt
## ARIMA(2,1,5)(2,0,0)[30] with drift
##
## Coefficients:
## ar1 ar2 ma1 ma2 ma3 ma4 ma5 sar1
## -0.4205 -0.9704 -0.0699 0.4321 -0.6598 -0.3054 -0.1752 0.0342
## s.e. 0.0178 0.0242 0.0614 0.0570 0.0446 0.0669 0.0664 0.0686
## sar2 drift
## -0.3722 0.0029
## s.e. 0.0671 0.0020
##
## sigma^2 estimated as 0.2562: log likelihood=-269.17
## AIC=560.35 AICc=561.1 BIC=603.22
# Step 7: Evaluate and Iterate
# (try different model)
fit<-auto.arima(deseasonal_cnt, seasonal=TRUE)
tsdisplay(residuals(fit), lag.max=45, main='Model Residuals [Seasonality considered]')
# step 8 forcast
fcast <- forecast(fit, h=30)
plot(fcast)