#' download_CLASS
#' Adapted from Sean's much more elegant batch_processing script.
#' 
#'Used for routine acquisition of GOES FDC files from a CLASS subscription. The function
#' (really a script) will open an ftp location, find all of the netcdf FDC files,
#' and downlad all of those files that are not already found in the download_path. 
#'
#' @param url Subscription FTP location (e.g.,
#'   "ftp://ftp.class.ncdc.noaa.gov/sub/sraffuse1/52654/")
#' @param download_path Local path where netcdf files are stored
#' 
#' @examples download_CLASS("ftp://ftp.class.ncdc.noaa.gov/sub/susan.m.one1/52484/",
#'  "/Users/susan/GOES-16/CLASS/Download_CLASS/")
#url <- "ftp://ftp.class.ncdc.noaa.gov/sub/susan.m.one1/52484/"
#download_path <- "/Users/susan/GOES-16/CLASS/Download_CLASS/"
#download_CLASS(url, download_path)

download_CLASS <- function(url, download_path) {

  filenames <- RCurl::getURL(url, ftp.use.epsv = FALSE, dirlistonly = TRUE)
  filenames <- unlist(strsplit(filenames, "\n"))
  files <- filenames[grepl(glob2rx("*.gz"), filenames)] # gzip'd files only

  # Process CONUS files
  filesC <- files[grepl("FDC", files)]
  download_path_FDCC <- paste(download_path, "Retrospective/", sep = "")
  print(paste0("download_path_FDCC ", download_path_FDCC))
  for(filename in filesC){
    # Only process the file if it doesn't exist locally (we've already done those)
    if (file.exists(paste(download_path_FDCC, filename, sep = ""))) {
      print(paste0("Skipping ", filename))
    } else {
      print(paste0("Processing ", filename))
      download.file(paste0(url, filename), paste0(download_path_FDCC, filename), mode = "wb",
                    quiet = TRUE)
    }
  }
}
