diff --git a/.Rbuildignore b/.Rbuildignore index dd952bc..35629e1 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -10,3 +10,6 @@ ^Meta$ ^README\.Rmd$ ^vignettes/calculate_speed_delay_cache$ +^vignettes/data_cache/$ +^vignettes/data_cache$ +^vignettes/compare_congestion_metrics_cache$ diff --git a/.github/workflows/R-CMD-check.yaml b/.github/workflows/R-CMD-check.yaml index 9e29be3..c8347f5 100644 --- a/.github/workflows/R-CMD-check.yaml +++ b/.github/workflows/R-CMD-check.yaml @@ -1,10 +1,10 @@ +# Workflow derived from https://github.com/r-lib/actions/tree/v2/examples +# Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help on: push: - branches: - - main + branches: [main, master] pull_request: - branches: - - main + branches: [main, master] name: R-CMD-check @@ -18,61 +18,29 @@ jobs: fail-fast: false matrix: config: + - {os: macos-latest, r: 'release'} - {os: windows-latest, r: 'release'} - - {os: macOS-latest, r: 'release'} - - {os: ubuntu-latest, r: 'release', container: rocker/geospatial, rspm: "https://packagemanager.rstudio.com/cran/__linux__/xenial/latest"} env: - R_REMOTES_NO_ERRORS_FROM_WARNINGS: true + GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} + R_KEEP_PKG_SOURCE: yes steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: r-lib/actions/setup-r@master + - uses: r-lib/actions/setup-pandoc@v2 + + - uses: r-lib/actions/setup-r@v2 with: r-version: ${{ matrix.config.r }} + http-user-agent: ${{ matrix.config.http-user-agent }} + use-public-rspm: true - - uses: r-lib/actions/setup-pandoc@master - - - name: Query dependencies - run: | - install.packages('remotes') - saveRDS(remotes::dev_package_deps(dependencies = TRUE), ".github/depends.Rds", version = 2) - writeLines(sprintf("R-%i.%i", getRversion()$major, getRversion()$minor), ".github/R-version") - shell: Rscript {0} - - - name: Cache R packages - if: runner.os != 'Windows' - uses: actions/cache@v1 + - uses: r-lib/actions/setup-r-dependencies@v2 with: - path: ${{ env.R_LIBS_USER }} - key: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1-${{ hashFiles('.github/depends.Rds') }} - restore-keys: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1- - - - name: Install system dependencies - if: runner.os == 'Linux' - env: - RHUB_PLATFORM: linux-x86_64-ubuntu-gcc - run: | - Rscript -e "remotes::install_github('r-hub/sysreqs')" - sysreqs=$(Rscript -e "cat(sysreqs::sysreq_commands('DESCRIPTION'))") - sudo -s eval "$sysreqs" - - - name: Install dependencies - run: | - remotes::install_deps(dependencies = TRUE) - remotes::install_cran("rcmdcheck") - shell: Rscript {0} - - - name: Check - env: - _R_CHECK_CRAN_INCOMING_REMOTE_: false - run: rcmdcheck::rcmdcheck(args = c("--no-manual", "--as-cran"), error_on = "error", check_dir = "check") - shell: Rscript {0} + extra-packages: any::rcmdcheck + needs: check - - name: Upload check results - if: failure() - uses: actions/upload-artifact@main + - uses: r-lib/actions/check-r-package@v2 with: - name: ${{ runner.os }}-r${{ matrix.config.r }}-results - path: check + upload-snapshots: true diff --git a/.github/workflows/pkgdown.yaml b/.github/workflows/pkgdown.yaml index d1a1350..087f0b0 100644 --- a/.github/workflows/pkgdown.yaml +++ b/.github/workflows/pkgdown.yaml @@ -1,46 +1,46 @@ +# Workflow derived from https://github.com/r-lib/actions/tree/v2/examples +# Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help on: push: - branches: main + branches: [main, master] + pull_request: + branches: [main, master] + release: + types: [published] + workflow_dispatch: name: pkgdown jobs: pkgdown: - runs-on: macOS-latest + runs-on: ubuntu-latest + # Only restrict concurrency for non-PR jobs + concurrency: + group: pkgdown-${{ github.event_name != 'pull_request' || github.run_id }} env: GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - - uses: r-lib/actions/setup-r@master + - uses: r-lib/actions/setup-pandoc@v2 - - uses: r-lib/actions/setup-pandoc@master - - - name: Query dependencies - run: | - install.packages('remotes') - saveRDS(remotes::dev_package_deps(dependencies = TRUE), ".github/depends.Rds", version = 2) - writeLines(sprintf("R-%i.%i", getRversion()$major, getRversion()$minor), ".github/R-version") - shell: Rscript {0} + - uses: r-lib/actions/setup-r@v2 + with: + use-public-rspm: true - - name: Cache R packages - uses: actions/cache@v1 + - uses: r-lib/actions/setup-r-dependencies@v2 with: - path: ${{ env.R_LIBS_USER }} - key: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1-${{ hashFiles('.github/depends.Rds') }} - restore-keys: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1- + extra-packages: any::pkgdown, local::. + needs: website - - name: Install dependencies - run: | - remotes::install_deps(dependencies = TRUE) - install.packages("pkgdown") + - name: Build site + run: pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE) shell: Rscript {0} - - name: Install package - run: R CMD INSTALL . - - - name: Deploy package - run: | - git config --local user.email "actions@github.com" - git config --local user.name "GitHub Actions" - Rscript -e 'pkgdown::deploy_to_branch(new_process = FALSE)' + - name: Deploy to GitHub pages 🚀 + if: github.event_name != 'pull_request' + uses: JamesIves/github-pages-deploy-action@v4.4.1 + with: + clean: false + branch: gh-pages + folder: docs diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index b36903f..0000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,128 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, religion, or sexual identity and -orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologizing to those affected by our mistakes, -and learning from the experience -* Focusing on what is best not just for us as individuals, but for the overall -community - -Examples of unacceptable behavior include: - -* The use of sexualized language or imagery, and sexual attention or -advances of any kind -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email -address, without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a -professional setting - -## Enforcement Responsibilities - -Community leaders are responsible for clarifying and enforcing our standards -of acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Community leaders have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies -when an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail -address, posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement at [INSERT CONTACT -METHOD]. All complaints will be reviewed and investigated promptly and fairly. - -All community leaders are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Community leaders will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from community leaders, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series of -actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or permanent -ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within the -community. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.0, -available at https://www.contributor-covenant.org/version/2/0/ -code_of_conduct.html. - -Community Impact Guidelines were inspired by [Mozilla's code of conduct -enforcement ladder](https://github.com/mozilla/diversity). - -[homepage]: https://www.contributor-covenant.org - -For answers to common questions about this code of conduct, see the FAQ at -https://www.contributor-covenant.org/faq. Translations are available at https:// -www.contributor-covenant.org/translations. diff --git a/DESCRIPTION b/DESCRIPTION index 71c23d3..56f31ea 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,11 +1,12 @@ Type: Package Package: tc.sensors Title: Retrieve Loop Detector Data from the MnDOT JSON Feed -Version: 0.2.0.9002 -Date: 2021-12-14 +Version: 0.2.1 +Date: 2022-10-26 Authors@R: c( person("Metropolitan Council", role = "cph"), - person("Liz", "Roten", , "liz.roten@metc.state.mn.us", role = c("cre", "aut")), + person("Liz", "Roten", , "liz.roten@metc.state.mn.us", role = c("cre", "aut"), + comment = c(ORCID = "0000-0002-5346-3549")), person("Nicole", "Sullivan", , "nicole.sullivan@metc.state.mn.us", role = "aut"), person("Ashley", "Asmus", , "ashley.asmus@metc.state.mn.us", role = "ctb", comment = c(ORCID = "0000-0001-5505-1372")) @@ -18,12 +19,12 @@ Description: Process data collected from Minnesota Department of License: MIT + file LICENSE BugReports: https://github.com/Metropolitan-Council/tc.sensors/issues Imports: + cli (>= 3.3.0), curl (>= 4.3), data.table (>= 1.12.8), dplyr (>= 1.0.0), geosphere (>= 1.5.10), jsonlite (>= 1.6.1), - lwgeom (>= 0.2.5), magrittr (>= 1.5), purrr (>= 0.3.4), rlang (>= 0.4.6), @@ -35,8 +36,6 @@ Imports: xml2 (>= 1.3.2) Suggests: cowplot (>= 1.0.0), - doParallel (>= 1.0.15), - foreach (>= 1.5.0), furrr (>= 0.1.0), ggplot2 (>= 3.3.2), knitr (>= 1.28), @@ -52,4 +51,4 @@ VignetteBuilder: Encoding: UTF-8 LazyData: true Roxygen: list(markdown = TRUE, roclets = c("rd", "namespace", "collate")) -RoxygenNote: 7.1.2 +RoxygenNote: 7.2.1 diff --git a/LICENSE.md b/LICENSE.md index 5f32998..681e5e0 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,6 +1,6 @@ # MIT License -Copyright (c) 2021 Metropolitan Council +Copyright (c) 2022 Metropolitan Council Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/NAMESPACE b/NAMESPACE index 5142ef4..6aff0ae 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -15,7 +15,18 @@ export(pull_sensor_ids) export(replace_impossible) export(scrub_sensor) import(data.table) -importFrom(curl,has_internet) +importFrom(cli,cli_abort) +importFrom(cli,cli_alert) +importFrom(curl,nslookup) +importFrom(data.table,":=") +importFrom(data.table,.BY) +importFrom(data.table,.EACHI) +importFrom(data.table,.GRP) +importFrom(data.table,.I) +importFrom(data.table,.N) +importFrom(data.table,.NGRP) +importFrom(data.table,.SD) +importFrom(data.table,data.table) importFrom(data.table,fwrite) importFrom(dplyr,bind_cols) importFrom(dplyr,bind_rows) diff --git a/R/add_distance.R b/R/add_distance.R index 73690c8..3ac2163 100644 --- a/R/add_distance.R +++ b/R/add_distance.R @@ -50,18 +50,18 @@ add_distance <- function(config, # input checks --------------------------------------------------------------- if (nrow(.config) < 1) { - stop("There must be more than one sensor in the configuration") + cli::cli_abort("There must be more than one sensor in the configuration") } if (min(.config[, .(n = .N), keyby = .(corridor_route)][, n]) < 2) { - stop("There must be at least two sensors for each corridor in the configuration") + cli::cli_abort("There must be at least two sensors for each corridor in the configuration") } # Select stations (want distance between stations, not between ramps etc.) config_stations <- as.data.table(.config)[r_node_n_type == "Station", ] if (nrow(config_stations) == 0) { - stop("There must be station node types in the configuration") + cli::cli_abort("There must be station node types in the configuration") } diff --git a/R/add_weather.R b/R/add_weather.R index 20573c2..b300d12 100644 --- a/R/add_weather.R +++ b/R/add_weather.R @@ -26,12 +26,12 @@ #' @export #' #' @import data.table -#' @importFrom curl has_internet +#' @importFrom curl nslookup #' @importFrom tis day #' @importFrom utils read.csv +#' @importFrom cli cli_abort cli_alert #' #' @examples -#' #' \dontrun{ #' #' library(tc.sensors) @@ -59,12 +59,12 @@ add_weather <- function(sensor_data, interval_length = 1, station = "MSP", time_zone = "America%2FChicago") { - if (curl::has_internet() == FALSE) { - stop("You must be connected to the internet to access weather data") + if (curl::nslookup("metrocouncil.org") == FALSE) { + cli::cli_abort("You must be connected to the internet to access weather data") } if (interval_length < 1) { - stop("Choose an interval length of at least one hour") + cli::cli_abort("Choose an interval length of at least one hour") } min_date <- min(sensor_data$date) @@ -99,7 +99,7 @@ add_weather <- function(sensor_data, # meta <- str_c("https://mesonet.agron.iastate.edu/geojson/network/", network, ".geojson", sep="") # jdict <- fromJSON(url(meta)) - message(paste("Downloading:", station, "for", min_date, "to", max_date - 1, sep = " ")) + cli::cli_alert(paste("Downloading:", station, "for", min_date, "to", max_date - 1, sep = " ")) data <- data.table::data.table(utils::read.csv(request, na.strings = "null")) if (save_raw == TRUE) { diff --git a/R/aggregate_sensor.R b/R/aggregate_sensor.R index 6744719..0cb4c72 100644 --- a/R/aggregate_sensor.R +++ b/R/aggregate_sensor.R @@ -54,6 +54,7 @@ #' @export #' #' @import data.table +#' @importFrom cli cli_abort #' #' @examples #' \dontrun{ @@ -82,24 +83,24 @@ aggregate_sensor <- function(sensor_data, config, interval_length, occupancy_pct_threshold = 0.0020) { # input checks --------------------------------------------------------------- if (is.na(interval_length)) { - stop("No aggregation to do!") + cli::cli_abort("No aggregation to do!") } if (interval_length > 24) { - stop("Interval cannot exceed 24 hours.") + cli::cli_abort("Interval cannot exceed 24 hours.") if (length(unique(sensor_data$date)) <= 1) { - stop("For intervals greater than 24 hours, you must have data for more than one date") + cli::cli_abort("For intervals greater than 24 hours, you must have data for more than one date") } } if (nrow(sensor_data) != 2880 * length(unique(sensor_data$date))) { - stop("For multiple dates, you must have at least 2,880 rows for each date you want covered.") + cli::cli_abort("For multiple dates, you must have at least 2,880 rows for each date you want covered.") } if (length(unique(sensor_data$sensor)) > 1) { - stop("More than one sensor is in this dataset.") + cli::cli_abort("More than one sensor is in this dataset.") } # format data ---------------------------------------------------------------- diff --git a/R/pull_sensor.R b/R/pull_sensor.R index c109e38..766387d 100644 --- a/R/pull_sensor.R +++ b/R/pull_sensor.R @@ -1,8 +1,8 @@ #' @title Pull sensor volume and occupancy #' -#' -#' @description Create a tidy data frame, containing volume and occupancy, for a single date and sensor. -#' Use \code{\link{pull_sensor_ids}} to obtain metro sensor IDs. +#' @description Create a tidy data frame, containing volume and occupancy, +#' for a single date and sensor. +#' Use \code{\link{pull_sensor_ids}} to obtain metro sensor IDs. #' #' @param pull_date character, the date of data to pull. #' Needs to by in "YYYY-MM-DD" format. @@ -61,6 +61,7 @@ #' @importFrom jsonlite fromJSON #' @importFrom dplyr bind_cols rename #' @importFrom rlang .data +#' @importFrom cli cli_alert #' #' @family loop sensor functions #' @@ -83,7 +84,7 @@ pull_sensor <- function(sensor, pull_date, if (nrow(loop_date_sensor) == 1) { if (fill_gaps == TRUE) { if (.quiet == FALSE) { - message("Filling gaps...") + cli::cli_alert("Filling gaps...") } loop_date_sensor <- data.table::as.data.table( diff --git a/R/pull_sensor_ids.R b/R/pull_sensor_ids.R index ea717dd..8efd8fc 100644 --- a/R/pull_sensor_ids.R +++ b/R/pull_sensor_ids.R @@ -1,6 +1,6 @@ #' @title Function to pull all sensor IDs in the Twin Cities metro #' -#' Create a tidy dataframe containing sensor IDs for MnDOT metro district, mainly to be used with pull_sensor +#' @description Create a tidy dataframe containing sensor IDs for MnDOT metro district, mainly to be used with pull_sensor #' #' @inheritParams pull_configuration #' @return dataframe containing variable "detector" diff --git a/R/scrub.R b/R/scrub.R index 1ed0dd0..e05f4f9 100644 --- a/R/scrub.R +++ b/R/scrub.R @@ -6,6 +6,7 @@ #' @export #' scrub_sensor <- function(sensor_data, interval_length = NA) { + sensor_data[!duplicated(sensor_data, by = c("date", "hour", "min", "sensor"), fromLast = TRUE)] } @@ -19,6 +20,7 @@ scrub_sensor <- function(sensor_data, interval_length = NA) { #' @export #' #' @import data.table +#' @importFrom cli cli_abort #' #' @details #' ## Criteria @@ -36,18 +38,18 @@ scrub_sensor <- function(sensor_data, interval_length = NA) { replace_impossible <- function(sensor_data, interval_length = NA) { if (length(unique(sensor_data$sensor)) > 1) { - stop("More than one sensor is in this dataset.") + cli::cli_abort("More than one sensor is in this dataset.") } if (is.na(interval_length)) { if (nrow(sensor_data) != 2880 * length(unique(sensor_data$date))) { - stop("For multiple dates, you must have at least 2,880 rows for each date you want covered.") + cli::cli_abort("For multiple dates, you must have at least 2,880 rows for each date you want covered.") } sensor_data[, volume := ifelse(volume >= 20, NA, volume)][, occupancy := ifelse(occupancy >= 1800, NA, occupancy)] } else { if (interval_length > 24) { - stop("Interval cannot exceed 24 hours.") + cli::cli_abort("Interval cannot exceed 24 hours.") } sensor_data[, volume.sum := ifelse(volume.sum >= (interval_length * 2300), NA, volume.sum)][, occupancy.sum := ifelse(occupancy.sum >= (interval_length * 216000), NA, occupancy.sum)][, volume.sum := ifelse(volume.pct.null >= 10, NA, volume.sum)][, occupancy.sum := ifelse(occupancy.pct.null >= 10, NA, occupancy.sum)][, speed := ifelse(is.na(volume.sum), NA, speed)][, speed := ifelse(is.na(occupancy.sum), NA, speed)] diff --git a/R/tc.sensors-package.R b/R/tc.sensors-package.R new file mode 100644 index 0000000..0a029b6 --- /dev/null +++ b/R/tc.sensors-package.R @@ -0,0 +1,15 @@ +#' @keywords internal +"_PACKAGE" + +## usethis namespace: start +#' @importFrom data.table := +#' @importFrom data.table .BY +#' @importFrom data.table .EACHI +#' @importFrom data.table .GRP +#' @importFrom data.table .I +#' @importFrom data.table .N +#' @importFrom data.table .NGRP +#' @importFrom data.table .SD +#' @importFrom data.table data.table +## usethis namespace: end +NULL diff --git a/R/utils-pipe.R b/R/utils-pipe.R index e79f3d8..fd0b1d1 100644 --- a/R/utils-pipe.R +++ b/R/utils-pipe.R @@ -8,4 +8,7 @@ #' @export #' @importFrom magrittr %>% #' @usage lhs \%>\% rhs +#' @param lhs A value or the magrittr placeholder. +#' @param rhs A function call using the magrittr semantics. +#' @return The result of calling `rhs(lhs)`. NULL diff --git a/README.Rmd b/README.Rmd index d6d093a..24ce9c0 100644 --- a/README.Rmd +++ b/README.Rmd @@ -54,9 +54,4 @@ Definitions come from MnDOT Data Extract [documentation](http://data.dot.state.m [@ashleyasmus](https://github.com/ashleyasmus), [@eroten](https://github.com/eroten), and [@sullivannicole](https://github.com/sullivannicole). -## Code of Conduct - -Please note that the `{tc.sensors}` project is released with a [Contributor Code of Conduct](https://contributor-covenant.org/version/2/0/CODE_OF_CONDUCT.html). By contributing to this project, you agree to abide by its terms. - -
diff --git a/README.md b/README.md index eb34fff..7d7f786 100644 --- a/README.md +++ b/README.md @@ -41,34 +41,33 @@ speeds, delay, and VMT from the resulting files. Definitions come from MnDOT Data Extract [documentation](http://data.dot.state.mn.us/datatools/dataextract.html) - - **Volume** The number of vehicles that pass through a detector in a +- **Volume** The number of vehicles that pass through a detector in a given time period. - - **Occupancy** The percentage of time a detector’s field is occupied +- **Occupancy** The percentage of time a detector’s field is occupied by a vehicle. - - **Flow** The number of vehicles that pass through a detector per +- **Flow** The number of vehicles that pass through a detector per hour (`Volume * Samples per Hour`). - - **Headway** The number of seconds between each vehicle +- **Headway** The number of seconds between each vehicle (`Seconds_per_Hour / Flow`). - - **Density** The number of vehicles per mile (`Flow / Speed`). See +- **Density** The number of vehicles per mile (`Flow / Speed`). See [full calculation method](http://data.dot.state.mn.us/datatools/Density.html) for additional context. - - **Speed** The average speed of the vehicles that pass in a sampling +- **Speed** The average speed of the vehicles that pass in a sampling period (`Flow / Density`). - - **Lost/Spare Capacity** The average flow that a roadway is losing, +- **Lost/Spare Capacity** The average flow that a roadway is losing, either due to low traffic or high congestion, throughout the sampling period. - - - `Flow > 1800: 0` - - `Density > 43: Spare Capacity: Flow - 1800` - - `Density <= 43: Lost Capacity: 1800 - Flow` + - `Flow > 1800: 0` + - `Density > 43: Lost Capacity: Flow - 1800` + - `Density >= 43: Lost Capacity: 1800 - Flow` ## Associated repositories and projects - - **[loop-sensor-trends](https://github.com/Metropolitan-Council/loop-sensor-trends)** +- **[loop-sensor-trends](https://github.com/Metropolitan-Council/loop-sensor-trends)** Data analysis and interactive R Shiny app for examining changes in regional traffic levels in response to the COVID-19 pandemic. - - **[Twin-Cities-Loop-Detectors](https://github.com/sullivannicole/Twin-Cities-Loop-Detectors)** +- **[Twin-Cities-Loop-Detectors](https://github.com/sullivannicole/Twin-Cities-Loop-Detectors)** A pre-cursor to `{tc.sensors}`. Contains extensive documentation and code samples that will be integrated into this package. @@ -80,13 +79,6 @@ Definitions come from MnDOT Data Extract [@eroten](https://github.com/eroten), and [@sullivannicole](https://github.com/sullivannicole). -## Code of Conduct - -Please note that the `{tc.sensors}` project is released with a -[Contributor Code of -Conduct](https://contributor-covenant.org/version/2/0/CODE_OF_CONDUCT.html). -By contributing to this project, you agree to abide by its terms. -
diff --git a/man/add_distance.Rd b/man/add_distance.Rd index 1da6642..2bdcf35 100644 --- a/man/add_distance.Rd +++ b/man/add_distance.Rd @@ -27,7 +27,9 @@ Find the distance between all sensors based on corridor and direction. Essential It is best to call this function on the entire sensor configuration data table; you can access the table with \code{pull_configuration()}. Non-station node types will be -\subsection{Interpolation}{\preformatted{Where upstream detector does not exist, or where distance is +\subsection{Interpolation}{ + +\if{html}{\out{
}}\preformatted{Where upstream detector does not exist, or where distance is beyond 3 miles, interpolate so as not to assume large VMT merely because vehicles crossed an isolated detector. @@ -39,7 +41,7 @@ missing (i.e. NA for entire corridor), or distance attributed is greater than 3 miles (want a conservative assumption for how many miles of travel volume at a particular sensor depicts), interpolate with metro-wide network median. -} +}\if{html}{\out{
}} } } \examples{ diff --git a/man/add_weather.Rd b/man/add_weather.Rd index bafd994..07e994c 100644 --- a/man/add_weather.Rd +++ b/man/add_weather.Rd @@ -48,7 +48,6 @@ For additional Minnesota station ID codes, see the \href{http://mesonet.agron.iastate.edu/sites/networks.php?network=MN_ASOS}{Mesonet station directory}. } \examples{ - \dontrun{ library(tc.sensors) diff --git a/man/aggregate_sensor.Rd b/man/aggregate_sensor.Rd index 1979171..a5ecc1f 100644 --- a/man/aggregate_sensor.Rd +++ b/man/aggregate_sensor.Rd @@ -51,7 +51,9 @@ values for the given measure Aggregate raw sensor data to a chosen level } \details{ -\subsection{Calculating speed}{\preformatted{There are 60 scans per second, which means there are 60*60 = 1,800 scans per +\subsection{Calculating speed}{ + +\if{html}{\out{
}}\preformatted{There are 60 scans per second, which means there are 60*60 = 1,800 scans per 30-second interval. The occupancy value in the 30-second interval data represents the number of scans that were occupied of the 1,800 scans in that interval. @@ -59,10 +61,12 @@ interval. With 60 scans per second, 60 seconds per minute there are 3,600 scans per minute. With 3,600 scans per minute, 60 minutes per hour there are 216,000 scans per hour. To find the number of scans in 15 minutes, we can multiply 0.25 * 216000 = 54,000 scans. -} +}\if{html}{\out{
}} } -\subsection{Impossible values}{\preformatted{Any observation with a volume that exceeds 20 vehicles or an occupancy that exceeds 1,800 scans +\subsection{Impossible values}{ + +\if{html}{\out{
}}\preformatted{Any observation with a volume that exceeds 20 vehicles or an occupancy that exceeds 1,800 scans will be replaced with `NA`. It is impossible for more than twenty vehicles to pass over a sensor in only 30 seconds, and the maximum number of scans in 30 seconds is 1,800 (60 scans/second * 30 seconds). @@ -72,7 +76,7 @@ in only 30 seconds, and the maximum number of scans in 30 seconds is 1,800 (60 s at the raw data level. The interpolated value for a given observation is the mean of the two observations on either side of the observation. This method preserves the variable's overall distribution. -} +}\if{html}{\out{
}} } } \examples{ diff --git a/man/pipe.Rd b/man/pipe.Rd index 0eec752..a648c29 100644 --- a/man/pipe.Rd +++ b/man/pipe.Rd @@ -6,6 +6,14 @@ \usage{ lhs \%>\% rhs } +\arguments{ +\item{lhs}{A value or the magrittr placeholder.} + +\item{rhs}{A function call using the magrittr semantics.} +} +\value{ +The result of calling \code{rhs(lhs)}. +} \description{ See \code{magrittr::\link[magrittr:pipe]{\%>\%}} for details. } diff --git a/man/pull_sensor.Rd b/man/pull_sensor.Rd index 644f083..1114a04 100644 --- a/man/pull_sensor.Rd +++ b/man/pull_sensor.Rd @@ -22,11 +22,14 @@ values. Default is \code{TRUE}} data frame containing variables volume, occupancy, sensor, date, time. } \description{ -Create a tidy data frame, containing volume and occupancy, for a single date and sensor. +Create a tidy data frame, containing volume and occupancy, +for a single date and sensor. Use \code{\link{pull_sensor_ids}} to obtain metro sensor IDs. } \details{ -\subsection{Output}{\preformatted{A complete year's worth of data for volume or occupancy for one sensor +\subsection{Output}{ + +\if{html}{\out{
}}\preformatted{A complete year's worth of data for volume or occupancy for one sensor usually results in a file that is around ~30-31KB. Approximate time to pull one sensor's and one extension's @@ -35,12 +38,14 @@ Approximate time to pull one sensor's and one extension's Also note that if you assign `pull_sensor()`'s output, the result is returned in-memory, and there must be sufficient space in-memory to do so. -} +}\if{html}{\out{
}} } -\subsection{Missing data}{\preformatted{Occupancy *can* be missing while volume data exists and vice versa. +\subsection{Missing data}{ + +\if{html}{\out{
}}\preformatted{Occupancy *can* be missing while volume data exists and vice versa. It is unknown how a loop could be monitoring volume and not occupancy. -} +}\if{html}{\out{
}} } } \examples{ diff --git a/man/pull_sensor_ids.Rd b/man/pull_sensor_ids.Rd index 21ede1b..9999128 100644 --- a/man/pull_sensor_ids.Rd +++ b/man/pull_sensor_ids.Rd @@ -2,9 +2,7 @@ % Please edit documentation in R/pull_sensor_ids.R \name{pull_sensor_ids} \alias{pull_sensor_ids} -\title{Function to pull all sensor IDs in the Twin Cities metro - -Create a tidy dataframe containing sensor IDs for MnDOT metro district, mainly to be used with pull_sensor} +\title{Function to pull all sensor IDs in the Twin Cities metro} \usage{ pull_sensor_ids(.quiet = TRUE) } @@ -15,8 +13,6 @@ pull_sensor_ids(.quiet = TRUE) dataframe containing variable "detector" } \description{ -Function to pull all sensor IDs in the Twin Cities metro - Create a tidy dataframe containing sensor IDs for MnDOT metro district, mainly to be used with pull_sensor } \examples{ diff --git a/man/replace_impossible.Rd b/man/replace_impossible.Rd index e0e697f..ea120e4 100644 --- a/man/replace_impossible.Rd +++ b/man/replace_impossible.Rd @@ -22,14 +22,16 @@ replaced with \code{NA}. Replace impossible volume and occupancy values with \code{NA} at given interval } \details{ -\subsection{Criteria}{\preformatted{- Hourly +\subsection{Criteria}{ + +\if{html}{\out{
}}\preformatted{- Hourly - total hourly occupancy exceeds 216,000 scans - total hourly volume exceeds 2,300 cars - 30-sec - total 30-second volume exceeds 20 cars - total 30-second occupancy exceed 1,800 scans - Percent nulls > 10. -} +}\if{html}{\out{
}} } } \author{ diff --git a/man/tc.sensors-package.Rd b/man/tc.sensors-package.Rd new file mode 100644 index 0000000..f82f88d --- /dev/null +++ b/man/tc.sensors-package.Rd @@ -0,0 +1,33 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/tc.sensors-package.R +\docType{package} +\name{tc.sensors-package} +\alias{tc.sensors} +\alias{tc.sensors-package} +\title{tc.sensors: Retrieve Loop Detector Data from the MnDOT JSON Feed} +\description{ +Process data collected from Minnesota Department of Transportation (MnDOT) loop detectors installed on the Minnesota Freeway system in 30-second interval measurements of occupancy and volume, data which are pushed daily to a public JSON feed. Occupancy and volume data can be used to calculate speed and delay. +} +\seealso{ +Useful links: +\itemize{ + \item Report bugs at \url{https://github.com/Metropolitan-Council/tc.sensors/issues} +} + +} +\author{ +\strong{Maintainer}: Liz Roten \email{liz.roten@metc.state.mn.us} (\href{https://orcid.org/0000-0002-5346-3549}{ORCID}) + +Authors: +\itemize{ + \item Nicole Sullivan \email{nicole.sullivan@metc.state.mn.us} +} + +Other contributors: +\itemize{ + \item Metropolitan Council [copyright holder] + \item Ashley Asmus \email{ashley.asmus@metc.state.mn.us} (\href{https://orcid.org/0000-0001-5505-1372}{ORCID}) [contributor] +} + +} +\keyword{internal} diff --git a/tests/testthat/test-add_weather.R b/tests/testthat/test-add_weather.R index 0eeced4..0314000 100644 --- a/tests/testthat/test-add_weather.R +++ b/tests/testthat/test-add_weather.R @@ -1,5 +1,6 @@ -testthat::skip_if_offline() +testthat::skip_if_offline(host = "metrocouncil.org") +testthat::skip_on_ci() test_that("Weather data functions as expected", { config <- pull_configuration() diff --git a/tests/testthat/test-replace_impossible.R b/tests/testthat/test-replace_impossible.R index 3c3ddd8..34f6b9d 100644 --- a/tests/testthat/test-replace_impossible.R +++ b/tests/testthat/test-replace_impossible.R @@ -1,6 +1,6 @@ -testthat::skip_if_offline() +testthat::skip_if_offline("metrocouncil.org") -test_that("Impossible values are replaced", { +testthat::test_that("Impossible values are replaced", { config <- pull_configuration() yesterday <- as.Date(Sys.Date() - 3) diff --git a/vignettes/pulling_sensors_in_parallel.Rmd b/vignettes/pulling_sensors_in_parallel.Rmd index c2e4bfc..5e782a7 100644 --- a/vignettes/pulling_sensors_in_parallel.Rmd +++ b/vignettes/pulling_sensors_in_parallel.Rmd @@ -11,51 +11,42 @@ vignette: > ```{r, eval = FALSE} library(tc.sensors) -library(doParallel) -library(foreach) library(dplyr) library(magrittr) -library(tictoc) # If you want to benchmark - but you shouldn't have to; I already did (: +library(furrr) +library(tictoc) +# pull sensor configuration and IDs sensor_config <- pull_configuration() sensor_ids <- pull_sensor_ids() -cores <- detectCores() -cl <- makeCluster(cores) -registerDoParallel(cl) tic() # Start the timer -foreach(j = sensor_ids[[1]][633:714]) %dopar% { - date_range <- seq(as.Date("2019/01/01"), as.Date("2019/12/31"), by = "days") - n <- length(date_range) - loops_ls <- vector("list", n) - - for (i in 1:n) { - loops_ls[[i]] <- tc.sensors::pull_sensor(j, date_range[[i]]) - } - - loops_df <- data.table::rbindlist(loops_ls) - data.table::fwrite(loops_df, paste0("../../MnDOT_Loop_Detectors/Volume-Occupancy/2019/Sensor ", j, ".csv")) -} -toc() -stopCluster(cl) - -``` - -## Save to Microsoft Access Database - -```{r, eval=FALSE} -library(RODBC) - -test_conn <- odbcConnectAccess2007("../LoopDBTest.accdb") -sqlTables(test_conn, tableType = "TABLE")$TABLE_NAME # Check available tables - -sqlSave(channel = test_conn, - dat = loops_df, - append = T, - tablename = "testtable", - rownames = F) # Prevents RStudio from crashing - -check <- RODBC::sqlQuery(test_conn, "SELECT * from testtable WHERE date = 20191231") +# set up parallel processing +future::plan(multisession) + +# define date range +date_range <- seq(as.Date("2019/01/01"), + as.Date("2019/12/31"), by = "days") + +# iterate through each sensor +sensor_data <- furrr::future_map_dfr( + sensor_ids[[1]][633:640], + function(x){ + + # iterate through each date + purrr::map_dfr(date_range, + function(d){ + pull_sensor( + sensor = x, + pull_date = d + ) + }) + + }) + +tictoc::toc() + +head(sensor_data) ```