tzhu-bio / cisDynet

An integrated platform for modeling gene-regulatory dynamics and networks
MIT License
28 stars 3 forks source link

Error when running "counts <- getCount(...)" in Chapter 10.1 #4

Open yashi99 opened 1 year ago

yashi99 commented 1 year ago

Hi, When running counts <- getCount(...), I got the same error "object 'X4' not found "as shown below.Could you please help to solve it? Besides, I find that when running "m2g <- getMotif2Gene(...)" in Chapter 11, it reported error when I just provided the "japser_motif.txt" downloaded from _https://jaspar.genereg.net/download/data/2022/CORE/JASPAR2022_CORE_non-redundant_pfms_meme.txt_ as input. Is is because the correct input motif file is not this file but should be an output file calculated by my own data?

counts <- getCount(sample_list=c("hESC_rep1","hESC_rep2"), 
                   cut_path="./cut_sites/", 
                   peak_path="./peaks",
                   save_file_path="./",
                   peak_suffix = "_peaks_unique.narrowPeak.bed")
Error in `summarize()`:
i In argument: `sum = sum(X4)`.
i In group 1: `.id.x = 1`.
Caused by error:
! object 'X4' not found
Traceback:

1. getCount(sample_list = c("hESC_rep1", "hESC_rep2", "iMeLC_rep1", 
 .     "iMeLC_rep2", "d1hPGCLC_rep1", "d1hPGCLC_rep2", "d2hPGCLC_rep1", 
 .     "d2hPGCLC_rep2", "d4hPGCLC_rep1", "d4hPGCLC_rep2", "d6hPGCLC_rep1", 
 .     "d6hPGCLC_rep2"), cut_path = "./atac4/cut_sites/", peak_path = "./atac4/peaks", 
 .     save_file_path = "./atac4_downstream_results/", peak_suffix = "_peaks_unique.narrowPeak.bed")
2. lapply(sample_list, function(x) {
 .     cut <- valr::read_bed(sprintf("%s/%s_q30_cut_sites.bed", 
 .         cut_path, x))
 .     res <- valr::bed_map(merged_peaks, cut, sum = sum(X4))[4]
 .     return(res)
 . })
3. FUN(X[[i]], ...)
4. valr::bed_map(merged_peaks, cut, sum = sum(X4))
5. summarize(res_int, !!!quos(...))
6. summarise.grouped_df(res_int, !!!quos(...))
7. summarise_cols(.data, dplyr_quosures(...), by, "summarise")
8. withCallingHandlers({
 .     for (i in seq_along(dots)) {
 .         poke_error_context(dots, i, mask = mask)
 .         context_poke("column", old_current_column)
 .         dot <- dots[[i]]
 .         dot <- expand_pick(dot, mask)
 .         quosures <- expand_across(dot)
 .         quosures_results <- map(quosures, summarise_eval_one, 
 .             mask = mask)
 .         for (k in seq_along(quosures)) {
 .             quo <- quosures[[k]]
 .             quo_data <- attr(quo, "dplyr:::data")
 .             quo_result <- quosures_results[[k]]
 .             if (is.null(quo_result)) {
 .                 next
 .             }
 .             types_k <- quo_result$types
 .             chunks_k <- quo_result$chunks
 .             results_k <- quo_result$results
 .             if (!quo_data$is_named && is.data.frame(types_k)) {
 .                 chunks_extracted <- .Call(dplyr_extract_chunks, 
 .                   chunks_k, types_k)
 .                 types_k_names <- names(types_k)
 .                 for (j in seq_along(chunks_extracted)) {
 .                   mask$add_one(name = types_k_names[j], chunks = chunks_extracted[[j]], 
 .                     result = results_k[[j]])
 .                 }
 .                 chunks <- append(chunks, chunks_extracted)
 .                 types <- append(types, as.list(types_k))
 .                 results <- append(results, results_k)
 .                 out_names <- c(out_names, types_k_names)
 .             }
 .             else {
 .                 name <- dplyr_quosure_name(quo_data)
 .                 mask$add_one(name = name, chunks = chunks_k, 
 .                   result = results_k)
 .                 chunks <- append(chunks, list(chunks_k))
 .                 types <- append(types, list(types_k))
 .                 results <- append(results, list(results_k))
 .                 out_names <- c(out_names, name)
 .             }
 .         }
 .     }
 .     sizes <- .Call(dplyr_summarise_recycle_chunks_in_place, chunks, 
 .         results)
 .     for (i in seq_along(chunks)) {
 .         result <- results[[i]] %||% vec_c(!!!chunks[[i]], .ptype = types[[i]])
 .         cols[[out_names[i]]] <- result
 .     }
 . }, error = function(cnd) {
 .     if (inherits(cnd, "dplyr:::summarise_incompatible_size")) {
 .         action <- "recycle"
 .         i <- cnd$dplyr_error_data$index
 .     }
 .     else {
 .         action <- "compute"
 .         i <- i
 .     }
 .     handler <- dplyr_error_handler(dots = dots, mask = mask, 
 .         bullets = summarise_bullets, error_call = error_call, 
 .         action = action)
 .     handler(cnd)
 . }, warning = dplyr_warning_handler(state = warnings_state, mask = mask, 
 .     error_call = error_call))
9. map(quosures, summarise_eval_one, mask = mask)
10. lapply(.x, .f, ...)
11. FUN(X[[i]], ...)
12. mask$eval_all_summarise(quo)
13. eval()
14. .handleSimpleError(function (cnd) 
  . {
  .     if (inherits(cnd, "dplyr:::summarise_incompatible_size")) {
  .         action <- "recycle"
  .         i <- cnd$dplyr_error_data$index
  .     }
  .     else {
  .         action <- "compute"
  .         i <- i
  .     }
  .     handler <- dplyr_error_handler(dots = dots, mask = mask, 
  .         bullets = summarise_bullets, error_call = error_call, 
  .         action = action)
  .     handler(cnd)
  . }, "object 'X4' not found", base::quote(NULL))
15. h(simpleError(msg, call))
16. handler(cnd)
17. abort(message, class = error_class, parent = parent, call = error_call)
18. signal_abort(cnd, .file)
tzhu-bio commented 1 year ago

This is the same as the previous quantification error reported due to the valr version, I've fixed the bug. Please reinstall cisDynet!

There are some differences between this and the motif file needed in snakemake. It is in jaspar format and can be downloaded from here https://jaspar.genereg.net/download/data/2022/CORE/JASPAR2022_CORE_redundant_pfms_jaspar.txt.

yashi99 commented 1 year ago

It works! Thank you so much!