2017-10-08 96 views
2

我在一个Shiny应用中使用带有文本加载栏(get_reddit())的函数,我想显示不在R控制台中但在应用中的进程。有谁知道我该怎么做?R从一个文本加载栏到一个闪亮的加载条

现在我在应用程序中有一个空的进度条(因为我没有任何incProgress()withProgress())和我的RStudio控制台中的活动文本栏,所以我没有任何意外。

library(shiny) 
library(RedditExtractoR) 

ui <- fluidPage(actionButton("go", "GO !"), 
       tableOutput("reddit")) 

server <- function(input, output) { 
    get_data <- eventReactive(input$go, { 
    withProgress(message = 'Work in progress', value = 0, { 
     df <- 
     get_reddit(
      search_terms = "Lyon", 
      regex_filter = "", 
      subreddit = "france", 
      cn_threshold = 1, 
      page_threshold = 1, 
      sort_by = "comments", 
      wait_time = 2 
     ) 
     df 
    }) 
    }) 

    output$reddit <- renderTable({ 
    df <- get_data() 
    df[1:5, 1:5] 
    }) 

} 

shinyApp(ui = ui, server = server) 

谢谢你的帮忙!

回答

0

一个简单的解决方案是编辑RedditExtractoR包中负责进度条的函数,该函数是reddit_content。这个函数在get_reddit函数中调用,所以这个函数也必须更新。

library(shiny) 
library(RedditExtractoR) 
source("get_reddit2.R") # source the new get_reddit2 function (see below) 
source("reddit_content2.R") # source the new reddit_content2 function (see below) 

ui <- fluidPage(actionButton("go", "GO !"), 
       tableOutput("reddit")) 

server <- function(input, output) { 
    get_data <- eventReactive(input$go, { 
     df <- get_reddit2(
     search_terms = "science", 
     subreddit = "science") 
    }) 
    output$reddit <- renderTable({ 
    df <- get_data() 
    df[1:5, 1:5] 
    }) 
} 

shinyApp(ui = ui, server = server) 

将下面的函数在一个单独的文件称为get_reddit2.R您从应用程序源(见上文):

get_reddit2 <- function (
    search_terms = NA, 
    regex_filter = "", 
    subreddit = NA, 
    cn_threshold = 1, 
    page_threshold = 1, 
    sort_by = "comments", 
    wait_time = 2) 
{ 
    URL = unique(as.character(
    reddit_urls(
     search_terms, 
     regex_filter, 
     subreddit, 
     cn_threshold, 
     page_threshold, 
     sort_by, 
     wait_time 
    )$URL 
)) 
    retrieved_data = reddit_content2(URL, wait_time) 
    return(retrieved_data) 
} 

也将下面的函数在称为reddit_content2.R一个单独的文件(见上文) :

reddit_content2 <- function (URL, wait_time = 2) 
{ 
    if (is.null(URL) | length(URL) == 0 | !is.character(URL)) { 
    stop("invalid URL parameter") 
    } 
    GetAttribute = function(node, feature) { 
    Attribute = node$data[[feature]] 
    replies = node$data$replies 
    reply.nodes = if (is.list(replies)) 
     replies$data$children 
    else 
     NULL 
    return(list(Attribute, lapply(reply.nodes, function(x) { 
     GetAttribute(x, feature) 
    }))) 
    } 
    get.structure = function(node, depth = 0) { 
    if (is.null(node)) { 
     return(list()) 
    } 
    filter = is.null(node$data$author) 
    replies = node$data$replies 
    reply.nodes = if (is.list(replies)) 
     replies$data$children 
    else 
     NULL 
    return(list(
     paste0(filter, " ", depth), 
     lapply(1:length(reply.nodes), 
      function(x) 
       get.structure(reply.nodes[[x]], paste0(depth, 
                 "_", x))) 
    )) 
    } 
    data_extract = data.frame(
    id = numeric(), 
    structure = character(), 
    post_date = as.Date(character()), 
    comm_date = as.Date(character()), 
    num_comments = numeric(), 
    subreddit = character(), 
    upvote_prop = numeric(), 
    post_score = numeric(), 
    author = character(), 
    user = character(), 
    comment_score = numeric(), 
    controversiality = numeric(), 
    comment = character(), 
    title = character(), 
    post_text = character(), 
    link = character(), 
    domain = character(), 
    URL = character() 
) 

    # pb = utils::txtProgressBar(min = 0, 
    #       max = length(URL), 
    #       style = 3) 
    withProgress(message = 'Work in progress', value = 0, { 

    for (i in seq(URL)) { 
    if (!grepl("^https?://(.*)", URL[i])) 
     URL[i] = paste0("https://www.", gsub("^.*(reddit\\..*$)", 
              "\\1", URL[i])) 
    if (!grepl("\\?ref=search_posts$", URL[i])) 
     URL[i] = paste0(gsub("/$", "", URL[i]), "/?ref=search_posts") 
    X = paste0(gsub("\\?ref=search_posts$", "", URL[i]), 
       ".json?limit=500") 
    raw_data = tryCatch(
     RJSONIO::fromJSON(readLines(X, warn = FALSE)), 
     error = function(e) 
     NULL 
    ) 
    if (is.null(raw_data)) { 
     Sys.sleep(min(1, wait_time)) 
     raw_data = tryCatch(
     RJSONIO::fromJSON(readLines(X, 
            warn = FALSE)), 
     error = function(e) 
      NULL 
    ) 
    } 
    if (is.null(raw_data) == FALSE) { 
     meta.node = raw_data[[1]]$data$children[[1]]$data 
     main.node = raw_data[[2]]$data$children 
     if (min(length(meta.node), length(main.node)) > 0) { 
     structure = unlist(lapply(1:length(main.node), 
            function(x) 
            get.structure(main.node[[x]], x))) 
     TEMP = data.frame(
      id = NA, 
      structure = gsub("FALSE ", 
          "", structure[!grepl("TRUE", structure)]), 
      post_date = format(as.Date(
      as.POSIXct(meta.node$created_utc, 
         origin = "1970-01-01") 
     ), "%d-%m-%y"), 
      comm_date = format(as.Date(
      as.POSIXct(unlist(lapply(main.node, 
            function(x) { 
             GetAttribute(x, "created_utc") 
            })), origin = "1970-01-01") 
     ), "%d-%m-%y"), 
      num_comments = meta.node$num_comments, 
      subreddit = ifelse(
      is.null(meta.node$subreddit), 
      "UNKNOWN", 
      meta.node$subreddit 
     ), 
      upvote_prop = meta.node$upvote_ratio, 
      post_score = meta.node$score, 
      author = meta.node$author, 
      user = unlist(lapply(main.node, function(x) { 
      GetAttribute(x, "author") 
      })), 
      comment_score = unlist(lapply(main.node, 
             function(x) { 
              GetAttribute(x, "score") 
             })), 
      controversiality = unlist(lapply(main.node, 
              function(x) { 
              GetAttribute(x, "controversiality") 
              })), 
      comment = unlist(lapply(main.node, function(x) { 
      GetAttribute(x, "body") 
      })), 
      title = meta.node$title, 
      post_text = meta.node$selftext, 
      link = meta.node$url, 
      domain = meta.node$domain, 
      URL = URL[i], 
      stringsAsFactors = FALSE 
     ) 
     TEMP$id = 1:nrow(TEMP) 
     if (dim(TEMP)[1] > 0 & dim(TEMP)[2] > 0) 
      data_extract = rbind(TEMP, data_extract) 
     else 
      print(paste("missed", i, ":", URL[i])) 
     } 
    } 

    # utils::setTxtProgressBar(pb, i) 
    incProgress() 
    Sys.sleep(min(2, wait_time)) 
    } 

    # close(pb) 
    }) 
    return(data_extract) 
} 

现在加载条显示为Shiny而不是控制台。