How to manage parallel processing with animated ggplot2-plot?
Asked Answered
R

2

4

I'm trying to build an animated barplot with ggplot2 and magick that's growing on a "day per day" base. Unfortunately, I've got tenthousands of entries in my dataset (dates for each day for several years and different categories), which makes processing very slow. Thus, I'm using the snow package to speed up processing time. However, I ran into trouble when splitting my data and calling ggplot() in a cluster.

magick requires to split the data per date for animation and snow requires splitting per cluster for parallel processing. So, I'm getting a list of lists, which causes problems when calling ggplot() within clusterApply(). The structure of the lists is of course dependent on the sequence I'm splitting my data (see versions 1 and 2 in the sample code), but no version led to success yet. I suppose access to the list elements when using data$date doesn't work since there are more levels in the list now.

So, my question is: is it possible to build an animated graph via ggplot2 by using parallel processing in this way?

Here's the sample code visualizing my problem (I tried to structure it as much as possible):

########################################################################
# setup
########################################################################
library(parallel)
library(snow)
library(ggplot2)
library(magick)

# creating some sample data for one year
# 4 categories; each category has a specific value per day
set.seed(1)
x <- data.frame(
  rep(as.Date((Sys.Date()-364):Sys.Date(), origin="1970-01-01"),4),
  c(rep("cat01",length.out=365),
    rep("cat02",length.out=365),
    rep("cat03",length.out=365),
    rep("cat04",length.out=365)),
  sample(0:50,365*4, replace=TRUE)
)
colnames(x) <- c("date", "category", "value")
x$category <- factor(x$category)

# creating a cumulative measure making the graphs appear "growing"
x$cumsum <- NA
for(i in levels(x$category)){
  x$cumsum[x$category == i] <- cumsum(x$value[x$category == i])
}
x <- x[order(x$date),]

# number of cores
cores <- detectCores()

# clustering
cl <- makeCluster(cores, type="SOCK")

# adding a grouping-variable to the data for each cluster
x$group <- rep(1:cores, length.out = nrow(x))

########################################################################
# splitting the data
########################################################################
# V1: worker first, plotting second
# splitting data for the worker
datasplit01 <- split(x, x$group)

# splitting data for plotting
datalist01 <- clusterApply(cl, datasplit01, function(x){split(x, x$date)})

########################################################################
# V2: plotting first, worker second
# splitting data for plotting
datasplit02 <- split(x, x$date)

# splitting data for the worker
datalist02 <- clusterApply(cl, datasplit02, function(x){split(x, x$group)})

########################################################################
# conventional plotting
########################################################################
# plotting the whole data works fine
ggplot(x)+
  geom_bar(aes(category, value), stat = "identity")

########################################################################
# conventional animation with ggplot2
########################################################################
# animation per date works, but pretty slowly

# opening magick-device
img <- image_graph(1000, 700, res = 96)

# plotting 
  # replace the second line with first line if the code is too slow and if
  # you like to get an impression of what the plot should look like
# out <- lapply(datasplit02[1:50], function(data){   # line 1: downscaled dataset
out <- lapply(datasplit02, function(data){           # line 2: full dataset
  plot <- ggplot(data)+
    geom_bar(aes(category, cumsum), stat = "identity")+
    # holding breaks and limits constant per plot
    scale_y_continuous(expand = c(0,0), 
                       breaks = seq(0,max(x$cumsum)+500,500), 
                       limits = c(0,max(x$cumsum)+500))+
    ggtitle(data$date)
  print(plot)
})
dev.off()

# animation
animation <- image_animate(img, fps = 5)
animation

########################################################################
# parallel process plotting
########################################################################
# animation per date in parallel processing does not work, probably
# due to ggplot not working with a list of lists

# opening magick-device
img <- image_graph(1000, 700, res = 96)

# plotting
out <- clusterApply(cl, datalist01, function(data){
  plot <- ggplot(data)+
    geom_bar(aes(category, cumsum), stat = "identity")+
    # holding breaks and limits constant per plot
    scale_y_continuous(expand = c(0,0), 
                       breaks = seq(0,max(x$cumsum)+500,500), 
                       limits = c(0,max(x$cumsum)+500))+
    ggtitle(data$date)
  print(plot)
})
dev.off()

# animation
animation <- image_animate(img, fps = 5)
animation

Thank you for your suggestions!

UPDATE: using snowfall, the code is much shorter, I don't get the same errors, but the device still doesn't produce a plot.

########################################################################
# snowfall version
########################################################################
library(parallel)
library(snowfall)
library(ggplot2)
library(magick)

# creating some sample data for one year
# 4 categories; each category has a specific value per day
set.seed(1)
x <- data.frame(
  rep(as.Date((Sys.Date()-364):Sys.Date(), origin="1970-01-01"),4),
  c(rep("cat01",length.out=365),
    rep("cat02",length.out=365),
    rep("cat03",length.out=365),
    rep("cat04",length.out=365)),
  sample(0:50,365*4, replace=TRUE)
)
colnames(x) <- c("date", "category", "value")
x$category <- factor(x$category)

# creating a cumulative measure making the graphs appear "growing"
x$cumsum <- NA
for(i in levels(x$category)){
  x$cumsum[x$category == i] <- cumsum(x$value[x$category == i])
}
x <- x[order(x$date),]

# number of cores
cores <- detectCores()

# clustering
sfInit(parallel = TRUE, cpus = cores, type = "SOCK")

# splitting data for plotting
datalist <- split(x, x$date)

# making everything accessible in the cluster
sfExportAll()
sfLibrary(ggplot2)
sfLibrary(magick)

# opening magick-device
img <- image_graph(1000, 700, res = 96)

# plotting
out <- sfLapply(datalist, function(data){
  plot <- ggplot(data)+
    geom_bar(aes(category, cumsum), stat = "identity")+
    # holding breaks and limits constant per plot
    scale_y_continuous(expand = c(0,0), 
                       breaks = seq(0,max(x$cumsum)+500,500), 
                       limits = c(0,max(x$cumsum)+500))+
    ggtitle(data$date)
plot
})
dev.off()

# animation
animation <- image_animate(img, fps = 5)
animation

When using

img <- image_graph(1000, 700, res = 96)
out
dev.off()
animation <- image_animate(img, fps = 5)
animation

the plot is produced. However, calling out is very slow, which is why I must avoid this option to make it work.

Redtop answered 5/6, 2018 at 10:12 Comment(5)
How does the animation work? Can you give it a list of ggplots?Stamina
Yes. When calling out from the code above, you get a list of plots. > class(out) [1] "list"Redtop
Then why use print? What is the problem here?Stamina
You're right, print() isn't necessary. Still, this doesn't solve my problem. I need to process my plot using parallel processing for performance. I updated the code and included a version using snowfall, which seems to work, but doesn't produce a plot.Redtop
See also #67321987.Trillium
T
3

So, my solution:

  • split dates in ncores periods

  • get the plot for each period and save it as a GIF

  • read back all GIF and combine them


########################################################################
# setup
########################################################################

# creating some sample data for one year
# 4 categories; each category has a specific value per day
set.seed(1)
x <- data.frame(
  rep(as.Date((Sys.Date()-364):Sys.Date(), origin="1970-01-01"),4),
  c(rep("cat01",length.out=365),
    rep("cat02",length.out=365),
    rep("cat03",length.out=365),
    rep("cat04",length.out=365)),
  sample(0:50,365*4, replace=TRUE)
)
colnames(x) <- c("date", "category", "value")

# creating a cumulative measure making the graphs appear "growing"
library(dplyr)
x <- x %>%
  as_tibble() %>%
  arrange(date) %>%
  mutate(date = as.character(date)) %>%
  group_by(category) %>%
  mutate(cumsum = cumsum(value))

y_max <- max(x$cumsum) + 500

library(doParallel)

all_dates <- unique(x$date)
ncores <- detectCores() - 1
ind_cluster <- sort(rep_len(1:ncores, length(all_dates)))
date_cluster <- split(all_dates, ind_cluster)
registerDoParallel(cl <- makeCluster(ncores))

tmp <- tempfile()

files <- foreach(ic = 1:ncores, .packages = c("tidyverse", "magick")) %dopar% {

  img <- image_graph(1000, 700, res = 96)

  x %>%
    filter(date %in% date_cluster[[ic]]) %>%
    group_by(date) %>%
    do(
      plot = ggplot(.) +
        geom_col(aes(category, cumsum)) +
        scale_y_continuous(expand = c(0, 0), 
                           breaks = seq(0, y_max, 500), 
                           limits = c(0, y_max))
    ) %>%
  pmap(function(date, plot) {
    print(plot + ggtitle(date))
    NULL
  })

  dev.off()

  image_write(image_animate(img, fps = 5), paste0(tmp, ic, ".gif"))
}
stopCluster(cl)

test <- do.call(c, lapply(files, magick::image_read))
test
Toddler answered 5/6, 2018 at 14:47 Comment(0)
T
2

I would do

library(tidyverse)
library(gganimate)
x %>% 
  as.tibble() %>% 
  arrange(date) %>%  
  group_by(category) %>% 
  mutate(Sum=cumsum(value)) %>% 
  ggplot(aes(category, Sum, fill = category)) +
  geom_col(position = 'identity') + 
  ggtitle("{frame_time}") +
  transition_time(date) +
  ease_aes('linear') 
anim_save("GIF.gif")  

enter image description here

If it's to much data I recommend to increase the transition time to months instead of days.

Tessy answered 6/11, 2018 at 13:48 Comment(1)
Thanks also for your answer to my old question! I'll have to test its speed compared to the parrallel version, but it might be a lot faster.Redtop

© 2022 - 2024 — McMap. All rights reserved.