Closed cron-weasley closed 2 years ago
I also try to use miniconda and install STITCH via our INSTALL guide, but when I run my case, also got the memory not mapped: [2022-08-19 11:24:34] downsample sample CAU06646 - 19 of 304799 reads removed [2022-08-19 11:24:35] downsample sample CAU03790 - 6 of 478916 reads removed [2022-08-19 11:24:35] downsample sample CAU02823 - 24 of 365368 reads removed [2022-08-19 11:24:35] downsample sample CAU05108 - 13 of 310494 reads removed [2022-08-19 11:24:35] downsample sample CAU05679 - 28 of 257980 reads removed [2022-08-19 11:24:35] downsample sample CAU03281 - 21 of 255361 reads removed
caught segfault address 0x564180756004, cause 'memory not mapped'
Traceback: 1: cpp_read_reassign(ord = ord, qnameInteger_ord = qnameInteger_ord, bxtagInteger_ord = bxtagInteger_ord, bxtag_bad_ord = bxtag_bad_ord, qname = qname, bxtag = bxtag, strand = strand, sampleReadsRaw = sampleReadsRaw, readStart_ord = readStart_ord, readEnd_ord = readEnd_ord, readStart = readStart, readEnd = readEnd, iSizeUpperLimit = iSizeUpperLimit, bxTagUpperLimit = bxTagUpperLimit, use_bx_tag = use_bx_tag, save_sampleReadsInfo = save_sampleReadsInfo) 2: merge_reads_from_sampleReadsRaw(sampleReadsRaw = sampleReadsRaw, qname = qname, bxtag = bxtag, strand = strand, readStart = readStart, readEnd = readEnd, iSizeUpperLimit = iSizeUpperLimit, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit, save_sampleReadsInfo = save_sampleReadsInfo, qname_all = qname_all, readStart_all = readStart_all, readEnd_all = readEnd_all) 3: loadBamAndConvert(iBam = iBam, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, chrLength = chrLength, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 4: FUN(X[[i]], ...) 5: lapply(X = S, FUN = FUN, ...) 6: doTryCatch(return(expr), name, parentenv, handler) 7: tryCatchOne(expr, names, parentenv, handlers[[1L]]) 8: tryCatchList(expr, classes, parentenv, handlers) 9: tryCatch(expr, error = function(e) { call <- conditionCall(e) if (!is.null(call)) { if (identical(call[[1L]], quote(doTryCatch))) call <- sys.call(-4L) dcall <- deparse(call, nlines = 1L) prefix <- paste("Error in", dcall, ": ") LONG <- 75L sm <- strsplit(conditionMessage(e), "\n")[[1L]] w <- 14L + nchar(dcall, type = "w") + nchar(sm[1L], type = "w") if (is.na(w)) w <- 14L + nchar(dcall, type = "b") + nchar(sm[1L], type = "b") if (w > LONG) prefix <- paste0(prefix, "\n ") } else prefix <- "Error : " msg <- paste0(prefix, conditionMessage(e), "\n") .Internal(seterrmessage(msg[1L])) if (!silent && isTRUE(getOption("show.error.messages"))) { cat(msg, file = outFile) .Internal(printDeferredWarnings()) } invisible(structure(msg, class = "try-error", condition = e))}) 10: try(lapply(X = S, FUN = FUN, ...), silent = TRUE) 11: sendMaster(try(lapply(X = S, FUN = FUN, ...), silent = TRUE)) 12: FUN(X[[i]], ...) 13: lapply(seq_len(cores), inner.do) 14: mclapply(1:length(sampleRanges), mc.cores = nCores, FUN = loadBamAndConvert_across_a_range, sampleRanges = sampleRanges, bundling_info = bundling_info, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, chrLength = chrLength, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 15: generate_input(bundling_info = bundling_info, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, nCores = nCores, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 16: generate_or_refactor_input(regenerateInput = regenerateInput, bundling_info = bundling_info, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, outputdir = outputdir, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, generateInputOnly = generateInputOnly, nCores = nCores, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 17: STITCH(chr = opt$chr, posfile = opt$posfile, K = opt$K, S = opt$S, nGen = opt$nGen, outputdir = opt$outputdir, tempdir = opt$tempdir, bamlist = opt$bamlist, cramlist = opt$cramlist, sampleNames_file = opt$sampleNames_file, reference = opt$reference, genfile = opt$genfile, method = opt$method, output_format = opt$output_format, B_bit_prob = opt$B_bit_prob, outputInputInVCFFormat = opt$outputInputInVCFFormat, downsampleToCov = opt$downsampleToCov, downsampleFraction = opt$downsampleFraction, readAware = opt$readAware, chrStart = opt$chrStart, chrEnd = opt$chrEnd, regionStart = opt$regionStart, regionEnd = opt$regionEnd, buffer = opt$buffer, maxDifferenceBetweenReads = opt$maxDifferenceBetweenReads, maxEmissionMatrixDifference = opt$maxEmissionMatrixDifference, alphaMatThreshold = opt$alphaMatThreshold, emissionThreshold = opt$emissionThreshold, iSizeUpperLimit = opt$iSizeUpperLimit, bqFilter = opt$bqFilter, niterations = opt$niterations, shuffleHaplotypeIterations = eval(parse(text = opt$shuffleHaplotypeIterations)), splitReadIterations = eval(parse(text = opt$splitReadIterations)), nCores = opt$nCores, expRate = opt$expRate, maxRate = opt$maxRate, minRate = opt$minRate, Jmax = opt$Jmax, regenerateInput = opt$regenerateInput, originalRegionName = opt$originalRegionName, keepInterimFiles = opt$keepInterimFiles, keepTempDir = opt$keepTempDir, switchModelIteration = opt$switchModelIteration, generateInputOnly = opt$generateInputOnly, restartIterations = opt$restartIterations, refillIterations = eval(parse(text = opt$refillIterations)), downsampleSamples = opt$downsampleSamples, downsampleSamplesKeepList = opt$downsampleSamplesKeepList, subsetSNPsfile = opt$subsetSNPsfile, useSoftClippedBases = opt$useSoftClippedBases, outputBlockSize = opt$outputBlockSize, outputSNPBlockSize = opt$outputSNPBlockSize, inputBundleBlockSize = opt$inputBundleBlockSize, genetic_map_file = opt$genetic_map_file, reference_haplotype_file = opt$reference_haplotype_file, reference_legend_file = opt$reference_legend_file, reference_sample_file = opt$reference_sample_file, reference_populations = eval(parse(text = opt$reference_populations)), reference_phred = opt$reference_phred, reference_iterations = opt$reference_iterations, reference_shuffleHaplotypeIterations = eval(parse(text = opt$reference_shuffleHaplotypeIterations)), output_filename = opt$output_filename, initial_min_hapProb = opt$initial_min_hapProb, initial_max_hapProb = opt$initial_max_hapProb, regenerateInputWithDefaultValues = opt$regenerateInputWithDefaultValues, plotHapSumDuringIterations = opt$plotHapSumDuringIterations, plot_shuffle_haplotype_attempts = opt$plot_shuffle_haplotype_attempts, plotAfterImputation = opt$plotAfterImputation, save_sampleReadsInfo = opt$save_sampleReadsInfo, gridWindowSize = opt$gridWindowSize, shuffle_bin_nSNPs = opt$shuffle_bin_nSNPs, shuffle_bin_radius = opt$shuffle_bin_radius, keepSampleReadsInRAM = opt$keepSampleReadsInRAM, useTempdirWhileWriting = opt$useTempdirWhileWriting, output_haplotype_dosages = opt$output_haplotype_dosages, use_bx_tag = opt$use_bx_tag, bxTagUpperLimit = opt$bxTagUpperLimit) An irrecoverable exception occurred. R is aborting now ... [2022-08-19 11:24:36] downsample sample CAU04045 - 7 of 203157 reads removed [2022-08-19 11:24:37] downsample sample CAU06101 - 19 of 248204 reads removed [2022-08-19 11:24:37] downsample sample CAU04012 - 14 of 266510 reads removed [2022-08-19 11:24:37] downsample sample CAU05360 - 12 of 316159 reads removed [2022-08-19 11:24:38] downsample sample CAU05713 - 28 of 240640 reads removed [2022-08-19 11:24:39] downsample sample CAU03572 - 14 of 262813 reads removed [2022-08-19 11:24:39] downsample sample CAU06295 - 49 of 331666 reads removed [2022-08-19 11:24:39] downsample sample CAU05501 - 33 of 298722 reads removed [2022-08-19 11:24:39] downsample sample CAU02901 - 2 of 236043 reads removed [2022-08-19 11:24:40] downsample sample CAU03356 - 53 of 314702 reads removed [2022-08-19 11:24:40] downsample sample CAU06140 - 7 of 267272 reads removed [2022-08-19 11:24:40] downsample sample CAU03939 - 2 of 296245 reads removed [2022-08-19 11:24:41] downsample sample CAU05324 - 20 of 279044 reads removed [2022-08-19 11:24:41] downsample sample CAU03387 - 5 of 323809 reads removed [2022-08-19 11:24:42] downsample sample CAU06679 - 25 of 272406 reads removed [2022-08-19 11:24:42] downsample sample CAU02824 - 1 of 240400 reads removed [2022-08-19 11:24:42] downsample sample CAU03833 - 12 of 436946 reads removed [2022-08-19 11:24:44] downsample sample CAU06647 - 86 of 331483 reads removed [2022-08-19 11:24:45] downsample sample CAU04046 - 1 of 287713 reads removed [2022-08-19 11:24:45] downsample sample CAU06477 - 20 of 308812 reads removed [2022-08-19 11:24:45] downsample sample CAU05109 - 3 of 358965 reads removed [2022-08-19 11:24:45] downsample sample CAU02791 - 95 of 485145 reads removed [2022-08-19 11:24:45] downsample sample CAU05714 - 3 of 257422 reads removed [2022-08-19 11:24:46] downsample sample CAU03755 - 21 of 429736 reads removed [2022-08-19 11:24:46] downsample sample CAU05179 - 25 of 325599 reads removed [2022-08-19 11:24:47] downsample sample CAU06296 - 8 of 303108 reads removed [2022-08-19 11:24:47] downsample sample CAU03647 - 70 of 499109 reads removed [2022-08-19 11:24:48] downsample sample CAU02902 - 12 of 290958 reads removed [2022-08-19 11:24:48] downsample sample CAU06141 - 11 of 299109 reads removed [2022-08-19 11:24:48] downsample sample CAU06715 - 49 of 383336 reads removed [2022-08-19 11:24:49] downsample sample CAU03793 - 1 of 243180 reads removed [2022-08-19 11:24:49] downsample sample CAU06680 - 14 of 268248 reads removed [2022-08-19 11:24:50] downsample sample CAU03207 - 7 of 410334 reads removed [2022-08-19 11:24:51] downsample sample CAU03574 - 2 of 213170 reads removed [2022-08-19 11:24:51] downsample sample CAU03283 - 21 of 315305 reads removed [2022-08-19 11:24:51] downsample sample CAU06648 - 6 of 276579 reads removed [2022-08-19 11:24:51] downsample sample CAU03975 - 8 of 348414 reads removed [2022-08-19 11:24:51] downsample sample CAU05145 - 8 of 269954 reads removed [2022-08-19 11:24:54] downsample sample CAU05110 - 2 of 319695 reads removed [2022-08-19 11:24:54] downsample sample CAU03358 - 20 of 287283 reads removed [2022-08-19 11:24:55] downsample sample CAU02792 - 54 of 340730 reads removed [2022-08-19 11:24:55] downsample sample CAU03429 - 12 of 377804 reads removed [2022-08-19 11:24:55] downsample sample CAU03175 - 50 of 515694 reads removed [2022-08-19 11:24:56] downsample sample CAU05180 - 32 of 339240 reads removed [2022-08-19 11:24:57] downsample sample CAU06297 - 67 of 340670 reads removed [2022-08-19 11:24:57] downsample sample CAU06681 - 91 of 322724 reads removed [2022-08-19 11:24:58] downsample sample CAU03043 - 6 of 351827 reads removed [2022-08-19 11:24:59] downsample sample CAU03251 - 17 of 345758 reads removed [2022-08-19 11:24:59] downsample sample CAU06716 - 56 of 364692 reads removed [2022-08-19 11:24:59] downsample sample CAU03208 - 4 of 308825 reads removed [2022-08-19 11:24:59] downsample sample CAU06266 - 11 of 362226 reads removed [2022-08-19 11:25:02] downsample sample CAU05111 - 4 of 293375 reads removed [2022-08-19 11:25:02] downsample sample CAU03430 - 6 of 257955 reads removed [2022-08-19 11:25:04] downsample sample CAU02793 - 10 of 293176 reads removed [2022-08-19 11:25:04] downsample sample CAU06479 - 56 of 320150 reads removed [2022-08-19 11:25:06] downsample sample CAU06105 - 20 of 290975 reads removed [2022-08-19 11:25:06] downsample sample CAU06682 - 18 of 305636 reads removed [2022-08-19 11:25:06] downsample sample CAU03176 - 4 of 388108 reads removed [2022-08-19 11:25:06] downsample sample CAU05147 - 28 of 306066 reads removed [2022-08-19 11:25:06] downsample sample CAU03390 - 8 of 316986 reads removed [2022-08-19 11:25:06] downsample sample CAU03795 - 36 of 301905 reads removed [2022-08-19 11:25:06] Load and convert BAM 600 of 3401 [2022-08-19 11:25:09] downsample sample CAU06267 - 32 of 358403 reads removed [2022-08-19 11:25:10] downsample sample CAU03286 - 4 of 275238 reads removed [2022-08-19 11:25:12] downsample sample CAU06717 - 20 of 444953 reads removed [2022-08-19 11:25:13] downsample sample CAU06299 - 8 of 249937 reads removed [2022-08-19 11:25:13] Load and convert BAM 3400 of 3401 [2022-08-19 11:25:14] downsample sample CAU06683 - 12 of 319616 reads removed [2022-08-19 11:25:16] downsample sample CAU03210 - 6 of 287630 reads removed [2022-08-19 11:25:17] downsample sample CAU02828 - 9 of 321523 reads removed [2022-08-19 11:25:19] downsample sample CAU06301 - 2 of 204659 reads removed [2022-08-19 11:25:20] downsample sample CAU06718 - 1 of 270482 reads removed [2022-08-19 11:25:20] downsample sample CAU06481 - 61 of 287915 reads removed [2022-08-19 11:25:26] downsample sample CAU05183 - 4 of 377447 reads removed [2022-08-19 11:25:26] downsample sample CAU06302 - 15 of 286667 reads removed [2022-08-19 11:25:33] downsample sample CAU03393 - 42 of 366037 reads removed [2022-08-19 11:25:33] downsample sample CAU06719 - 55 of 438977 reads removed [2022-08-19 11:25:35] downsample sample CAU03179 - 7 of 496057 reads removed [2022-08-19 11:25:35] downsample sample CAU03212 - 37 of 361168 reads removed [2022-08-19 11:25:57] downsample sample CAU03214 - 115 of 516477 reads removed [2022-08-19 11:26:07] downsample sample CAU03215 - 25 of 390080 reads removed [2022-08-19 11:26:08] Done generating inputs [2022-08-19 11:26:08] Copying files onto tempdir [2022-08-19 11:27:56] Done copying files onto tempdir [2022-08-19 11:27:56] Generate allele count [2022-08-19 11:29:24] Error in readChar(con, 5L, useBytes = TRUE) : cannot open the connection
Error in check_mclapply_OK(out2) : An error occured during STITCH. The first such error is above Calls: STITCH -> buildAlleleCount -> check_mclapply_OK In addition: Warning messages: 1: In mclapply(1:length(sampleRanges), mc.cores = nCores, FUN = loadBamAndConvert_across_a_range, : scheduled core 9 did not deliver a result, all values of the job will be affected 2: In mclapply(sampleRanges, mc.cores = nCores, FUN = buildAlleleCount_subfunction, : scheduled core 9 encountered error in user code, all values of the job will be affected Execution halted
Hi,
I got the same problem when performing STITCH on some chromosome.
ERROR message is like that:
[2022-08-19 18:08:07] Generate inputs
caught segfault address 0x1d443000, cause 'memory not mapped'
BTW, I am trying to impute human genome, and now I can successfully run STITCH on chr1, 3 ... but failed on chr 2 like below.
Hope to get some help, plz. Thanks a lot!
Dear Auth, First, Thanks a lot for writing this powerfull software. When I build and run STITCH-1.6.6 on CentOS 7.9 with devtoolset-gcc8 and R-4.2.1, I got the memory not mapped error, Please see the picture, how to debug and check this problem? Thanks!
Hi,
I performed STITCH with nCores = 10, and no ERROR message appears now. Maybe you can try to set a larger number of nCores when doing imputation. Hope it could solve your problem.
Best,
I also try to use miniconda and install STITCH via our INSTALL guide, but when I run my case, also got the memory not mapped: [2022-08-19 11:24:34] downsample sample CAU06646 - 19 of 304799 reads removed [2022-08-19 11:24:35] downsample sample CAU03790 - 6 of 478916 reads removed [2022-08-19 11:24:35] downsample sample CAU02823 - 24 of 365368 reads removed [2022-08-19 11:24:35] downsample sample CAU05108 - 13 of 310494 reads removed [2022-08-19 11:24:35] downsample sample CAU05679 - 28 of 257980 reads removed [2022-08-19 11:24:35] downsample sample CAU03281 - 21 of 255361 reads removed
caught segfault address 0x564180756004, cause 'memory not mapped'
Traceback: 1: cpp_read_reassign(ord = ord, qnameInteger_ord = qnameInteger_ord, bxtagInteger_ord = bxtagInteger_ord, bxtag_bad_ord = bxtag_bad_ord, qname = qname, bxtag = bxtag, strand = strand, sampleReadsRaw = sampleReadsRaw, readStart_ord = readStart_ord, readEnd_ord = readEnd_ord, readStart = readStart, readEnd = readEnd, iSizeUpperLimit = iSizeUpperLimit, bxTagUpperLimit = bxTagUpperLimit, use_bx_tag = use_bx_tag, save_sampleReadsInfo = save_sampleReadsInfo) 2: merge_reads_from_sampleReadsRaw(sampleReadsRaw = sampleReadsRaw, qname = qname, bxtag = bxtag, strand = strand, readStart = readStart, readEnd = readEnd, iSizeUpperLimit = iSizeUpperLimit, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit, save_sampleReadsInfo = save_sampleReadsInfo, qname_all = qname_all, readStart_all = readStart_all, readEnd_all = readEnd_all) 3: loadBamAndConvert(iBam = iBam, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, chrLength = chrLength, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 4: FUN(X[[i]], ...) 5: lapply(X = S, FUN = FUN, ...) 6: doTryCatch(return(expr), name, parentenv, handler) 7: tryCatchOne(expr, names, parentenv, handlers[[1L]]) 8: tryCatchList(expr, classes, parentenv, handlers) 9: tryCatch(expr, error = function(e) { call <- conditionCall(e) if (!is.null(call)) { if (identical(call[[1L]], quote(doTryCatch))) call <- sys.call(-4L) dcall <- deparse(call, nlines = 1L) prefix <- paste("Error in", dcall, ": ") LONG <- 75L sm <- strsplit(conditionMessage(e), "\n")[[1L]] w <- 14L + nchar(dcall, type = "w") + nchar(sm[1L], type = "w") if (is.na(w)) w <- 14L + nchar(dcall, type = "b") + nchar(sm[1L], type = "b") if (w > LONG) prefix <- paste0(prefix, "\n ") } else prefix <- "Error : " msg <- paste0(prefix, conditionMessage(e), "\n") .Internal(seterrmessage(msg[1L])) if (!silent && isTRUE(getOption("show.error.messages"))) { cat(msg, file = outFile) .Internal(printDeferredWarnings()) } invisible(structure(msg, class = "try-error", condition = e))}) 10: try(lapply(X = S, FUN = FUN, ...), silent = TRUE) 11: sendMaster(try(lapply(X = S, FUN = FUN, ...), silent = TRUE)) 12: FUN(X[[i]], ...) 13: lapply(seq_len(cores), inner.do) 14: mclapply(1:length(sampleRanges), mc.cores = nCores, FUN = loadBamAndConvert_across_a_range, sampleRanges = sampleRanges, bundling_info = bundling_info, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, chrLength = chrLength, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 15: generate_input(bundling_info = bundling_info, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, nCores = nCores, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 16: generate_or_refactor_input(regenerateInput = regenerateInput, bundling_info = bundling_info, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, outputdir = outputdir, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, generateInputOnly = generateInputOnly, nCores = nCores, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 17: STITCH(chr = opt$chr, posfile = opt$posfile, K = opt$K, S = opt$S, nGen = opt$nGen, outputdir = opt$outputdir, tempdir = opt$tempdir, bamlist = opt$bamlist, cramlist = opt$cramlist, sampleNames_file = opt$sampleNames_file, reference = opt$reference, genfile = opt$genfile, method = opt$method, output_format = opt$output_format, B_bit_prob = opt$B_bit_prob, outputInputInVCFFormat = opt$outputInputInVCFFormat, downsampleToCov = opt$downsampleToCov, downsampleFraction = opt$downsampleFraction, readAware = opt$readAware, chrStart = opt$chrStart, chrEnd = opt$chrEnd, regionStart = opt$regionStart, regionEnd = opt$regionEnd, buffer = opt$buffer, maxDifferenceBetweenReads = opt$maxDifferenceBetweenReads, maxEmissionMatrixDifference = opt$maxEmissionMatrixDifference, alphaMatThreshold = opt$alphaMatThreshold, emissionThreshold = opt$emissionThreshold, iSizeUpperLimit = opt$iSizeUpperLimit, bqFilter = opt$bqFilter, niterations = opt$niterations, shuffleHaplotypeIterations = eval(parse(text = opt$shuffleHaplotypeIterations)), splitReadIterations = eval(parse(text = opt$splitReadIterations)), nCores = opt$nCores, expRate = opt$expRate, maxRate = opt$maxRate, minRate = opt$minRate, Jmax = opt$Jmax, regenerateInput = opt$regenerateInput, originalRegionName = opt$originalRegionName, keepInterimFiles = opt$keepInterimFiles, keepTempDir = opt$keepTempDir, switchModelIteration = opt$switchModelIteration, generateInputOnly = opt$generateInputOnly, restartIterations = opt$restartIterations, refillIterations = eval(parse(text = opt$refillIterations)), downsampleSamples = opt$downsampleSamples, downsampleSamplesKeepList = opt$downsampleSamplesKeepList, subsetSNPsfile = opt$subsetSNPsfile, useSoftClippedBases = opt$useSoftClippedBases, outputBlockSize = opt$outputBlockSize, outputSNPBlockSize = opt$outputSNPBlockSize, inputBundleBlockSize = opt$inputBundleBlockSize, genetic_map_file = opt$genetic_map_file, reference_haplotype_file = opt$reference_haplotype_file, reference_legend_file = opt$reference_legend_file, reference_sample_file = opt$reference_sample_file, reference_populations = eval(parse(text = opt$reference_populations)), reference_phred = opt$reference_phred, reference_iterations = opt$reference_iterations, reference_shuffleHaplotypeIterations = eval(parse(text = opt$reference_shuffleHaplotypeIterations)), output_filename = opt$output_filename, initial_min_hapProb = opt$initial_min_hapProb, initial_max_hapProb = opt$initial_max_hapProb, regenerateInputWithDefaultValues = opt$regenerateInputWithDefaultValues, plotHapSumDuringIterations = opt$plotHapSumDuringIterations, plot_shuffle_haplotype_attempts = opt$plot_shuffle_haplotype_attempts, plotAfterImputation = opt$plotAfterImputation, save_sampleReadsInfo = opt$save_sampleReadsInfo, gridWindowSize = opt$gridWindowSize, shuffle_bin_nSNPs = opt$shuffle_bin_nSNPs, shuffle_bin_radius = opt$shuffle_bin_radius, keepSampleReadsInRAM = opt$keepSampleReadsInRAM, useTempdirWhileWriting = opt$useTempdirWhileWriting, output_haplotype_dosages = opt$output_haplotype_dosages, use_bx_tag = opt$use_bx_tag, bxTagUpperLimit = opt$bxTagUpperLimit) An irrecoverable exception occurred. R is aborting now ... [2022-08-19 11:24:36] downsample sample CAU04045 - 7 of 203157 reads removed [2022-08-19 11:24:37] downsample sample CAU06101 - 19 of 248204 reads removed [2022-08-19 11:24:37] downsample sample CAU04012 - 14 of 266510 reads removed [2022-08-19 11:24:37] downsample sample CAU05360 - 12 of 316159 reads removed [2022-08-19 11:24:38] downsample sample CAU05713 - 28 of 240640 reads removed [2022-08-19 11:24:39] downsample sample CAU03572 - 14 of 262813 reads removed [2022-08-19 11:24:39] downsample sample CAU06295 - 49 of 331666 reads removed [2022-08-19 11:24:39] downsample sample CAU05501 - 33 of 298722 reads removed [2022-08-19 11:24:39] downsample sample CAU02901 - 2 of 236043 reads removed [2022-08-19 11:24:40] downsample sample CAU03356 - 53 of 314702 reads removed [2022-08-19 11:24:40] downsample sample CAU06140 - 7 of 267272 reads removed [2022-08-19 11:24:40] downsample sample CAU03939 - 2 of 296245 reads removed [2022-08-19 11:24:41] downsample sample CAU05324 - 20 of 279044 reads removed [2022-08-19 11:24:41] downsample sample CAU03387 - 5 of 323809 reads removed [2022-08-19 11:24:42] downsample sample CAU06679 - 25 of 272406 reads removed [2022-08-19 11:24:42] downsample sample CAU02824 - 1 of 240400 reads removed [2022-08-19 11:24:42] downsample sample CAU03833 - 12 of 436946 reads removed [2022-08-19 11:24:44] downsample sample CAU06647 - 86 of 331483 reads removed [2022-08-19 11:24:45] downsample sample CAU04046 - 1 of 287713 reads removed [2022-08-19 11:24:45] downsample sample CAU06477 - 20 of 308812 reads removed [2022-08-19 11:24:45] downsample sample CAU05109 - 3 of 358965 reads removed [2022-08-19 11:24:45] downsample sample CAU02791 - 95 of 485145 reads removed [2022-08-19 11:24:45] downsample sample CAU05714 - 3 of 257422 reads removed [2022-08-19 11:24:46] downsample sample CAU03755 - 21 of 429736 reads removed [2022-08-19 11:24:46] downsample sample CAU05179 - 25 of 325599 reads removed [2022-08-19 11:24:47] downsample sample CAU06296 - 8 of 303108 reads removed [2022-08-19 11:24:47] downsample sample CAU03647 - 70 of 499109 reads removed [2022-08-19 11:24:48] downsample sample CAU02902 - 12 of 290958 reads removed [2022-08-19 11:24:48] downsample sample CAU06141 - 11 of 299109 reads removed [2022-08-19 11:24:48] downsample sample CAU06715 - 49 of 383336 reads removed [2022-08-19 11:24:49] downsample sample CAU03793 - 1 of 243180 reads removed [2022-08-19 11:24:49] downsample sample CAU06680 - 14 of 268248 reads removed [2022-08-19 11:24:50] downsample sample CAU03207 - 7 of 410334 reads removed [2022-08-19 11:24:51] downsample sample CAU03574 - 2 of 213170 reads removed [2022-08-19 11:24:51] downsample sample CAU03283 - 21 of 315305 reads removed [2022-08-19 11:24:51] downsample sample CAU06648 - 6 of 276579 reads removed [2022-08-19 11:24:51] downsample sample CAU03975 - 8 of 348414 reads removed [2022-08-19 11:24:51] downsample sample CAU05145 - 8 of 269954 reads removed [2022-08-19 11:24:54] downsample sample CAU05110 - 2 of 319695 reads removed [2022-08-19 11:24:54] downsample sample CAU03358 - 20 of 287283 reads removed [2022-08-19 11:24:55] downsample sample CAU02792 - 54 of 340730 reads removed [2022-08-19 11:24:55] downsample sample CAU03429 - 12 of 377804 reads removed [2022-08-19 11:24:55] downsample sample CAU03175 - 50 of 515694 reads removed [2022-08-19 11:24:56] downsample sample CAU05180 - 32 of 339240 reads removed [2022-08-19 11:24:57] downsample sample CAU06297 - 67 of 340670 reads removed [2022-08-19 11:24:57] downsample sample CAU06681 - 91 of 322724 reads removed [2022-08-19 11:24:58] downsample sample CAU03043 - 6 of 351827 reads removed [2022-08-19 11:24:59] downsample sample CAU03251 - 17 of 345758 reads removed [2022-08-19 11:24:59] downsample sample CAU06716 - 56 of 364692 reads removed [2022-08-19 11:24:59] downsample sample CAU03208 - 4 of 308825 reads removed [2022-08-19 11:24:59] downsample sample CAU06266 - 11 of 362226 reads removed [2022-08-19 11:25:02] downsample sample CAU05111 - 4 of 293375 reads removed [2022-08-19 11:25:02] downsample sample CAU03430 - 6 of 257955 reads removed [2022-08-19 11:25:04] downsample sample CAU02793 - 10 of 293176 reads removed [2022-08-19 11:25:04] downsample sample CAU06479 - 56 of 320150 reads removed [2022-08-19 11:25:06] downsample sample CAU06105 - 20 of 290975 reads removed [2022-08-19 11:25:06] downsample sample CAU06682 - 18 of 305636 reads removed [2022-08-19 11:25:06] downsample sample CAU03176 - 4 of 388108 reads removed [2022-08-19 11:25:06] downsample sample CAU05147 - 28 of 306066 reads removed [2022-08-19 11:25:06] downsample sample CAU03390 - 8 of 316986 reads removed [2022-08-19 11:25:06] downsample sample CAU03795 - 36 of 301905 reads removed [2022-08-19 11:25:06] Load and convert BAM 600 of 3401 [2022-08-19 11:25:09] downsample sample CAU06267 - 32 of 358403 reads removed [2022-08-19 11:25:10] downsample sample CAU03286 - 4 of 275238 reads removed [2022-08-19 11:25:12] downsample sample CAU06717 - 20 of 444953 reads removed [2022-08-19 11:25:13] downsample sample CAU06299 - 8 of 249937 reads removed [2022-08-19 11:25:13] Load and convert BAM 3400 of 3401 [2022-08-19 11:25:14] downsample sample CAU06683 - 12 of 319616 reads removed [2022-08-19 11:25:16] downsample sample CAU03210 - 6 of 287630 reads removed [2022-08-19 11:25:17] downsample sample CAU02828 - 9 of 321523 reads removed [2022-08-19 11:25:19] downsample sample CAU06301 - 2 of 204659 reads removed [2022-08-19 11:25:20] downsample sample CAU06718 - 1 of 270482 reads removed [2022-08-19 11:25:20] downsample sample CAU06481 - 61 of 287915 reads removed [2022-08-19 11:25:26] downsample sample CAU05183 - 4 of 377447 reads removed [2022-08-19 11:25:26] downsample sample CAU06302 - 15 of 286667 reads removed [2022-08-19 11:25:33] downsample sample CAU03393 - 42 of 366037 reads removed [2022-08-19 11:25:33] downsample sample CAU06719 - 55 of 438977 reads removed [2022-08-19 11:25:35] downsample sample CAU03179 - 7 of 496057 reads removed [2022-08-19 11:25:35] downsample sample CAU03212 - 37 of 361168 reads removed [2022-08-19 11:25:57] downsample sample CAU03214 - 115 of 516477 reads removed [2022-08-19 11:26:07] downsample sample CAU03215 - 25 of 390080 reads removed [2022-08-19 11:26:08] Done generating inputs [2022-08-19 11:26:08] Copying files onto tempdir [2022-08-19 11:27:56] Done copying files onto tempdir [2022-08-19 11:27:56] Generate allele count [2022-08-19 11:29:24] Error in readChar(con, 5L, useBytes = TRUE) : cannot open the connection
Error in check_mclapply_OK(out2) : An error occured during STITCH. The first such error is above Calls: STITCH -> buildAlleleCount -> check_mclapply_OK In addition: Warning messages: 1: In mclapply(1:length(sampleRanges), mc.cores = nCores, FUN = loadBamAndConvert_across_a_range, : scheduled core 9 did not deliver a result, all values of the job will be affected 2: In mclapply(sampleRanges, mc.cores = nCores, FUN = buildAlleleCount_subfunction, : scheduled core 9 encountered error in user code, all values of the job will be affected Execution halted
Hi,
I think this ERROR message refers to your .bam file. You could try to find the .bam file of the sample that causes this ERROR and redo the imputation after removing this .bam file from your bam list (I tried and succeeded). As for me, it is the biggst .bam file of all the .bam file, but I am not sure whether the file size causes this problem.
Best,
Hi,
Sorry for the late reply, I was on vacation.
The log files suggest a lot of reads, are you imputing entire human chromosomes? If so I recommend imputing in smaller chunks for RAM reasons. My suspicion is that your job is running out of RAM. Is this being run on a cluster? If so, you should be able to see the memory used by the job, and confirm if this is indeed the case.
Best, Robbie
Thanks for your reply Robbie! And my server have 1.5TB Memory. I use free -g to monitor the memory usage, and the memory is engough. I also collect the error log, could you please help me to check the log file? Thanks! Also could you tell me how to debug run STITCH with R, so that I can collect detialed log info.
Thanks![https://res.cdn.office.net/assets/mail/file-icon/png/txt_16x16.png]chr10.g06.basevar.v1.k5.220831183455.loghttps://1drv.ms/u/s!AgFD4m0V4k2IgSzrYicDa-7uF09E
发件人: rwdavies @.> 发送时间: 2022年8月25日 17:07 收件人: rwdavies/STITCH @.> 抄送: cron-weasley @.>; Author @.> 主题: Re: [rwdavies/STITCH] Memory not mapped Problem (Issue #70)
Hi,
Sorry for the late reply, I was on vacation.
The log files suggest a lot of reads, are you imputing entire human chromosomes? If so I recommend imputing in smaller chunks for RAM reasons. My suspicion is that your job is running out of RAM. Is this being run on a cluster? If so, you should be able to see the memory used by the job, and confirm if this is indeed the case.
Best, Robbie
― Reply to this email directly, view it on GitHubhttps://github.com/rwdavies/STITCH/issues/70#issuecomment-1226991415, or unsubscribehttps://github.com/notifications/unsubscribe-auth/ABPB62JJCO6YB4SR6TMW63LV24ZW7ANCNFSM565Z43EQ. You are receiving this because you authored the thread.Message ID: @.***>
thanks @B179895-2020 And @rwdavies my newest error log is here:
[2022-08-31 18:34:55] Running STITCH(chr = 10, nGen = 100, posfile = ./chr10/pos.basevar.v1.txt, K = 5, S = 1, outputdir = /scratch/chr10.g06.basevar.v1.k5.220831183455/, nStarts = , tempdir = /scratch/, bamlist = bamlist.g06.txt, cramlist = , sampleNames_file = , reference = , genfile = , method = diploid, output_format = bgvcf, B_bit_prob = 16, outputInputInVCFFormat = FALSE, downsampleToCov = 50, downsampleFraction = 1, readAware = TRUE, chrStart = NA, chrEnd = NA, regionStart = NA, regionEnd = NA, buffer = NA, maxDifferenceBetweenReads = 1000, maxEmissionMatrixDifference = 1e+10, alphaMatThreshold = 1e-04, emissionThreshold = 1e-04, iSizeUpperLimit = 600, bqFilter = 17, niterations = 40, shuffleHaplotypeIterations = c(4, 8, 12, 16), splitReadIterations = 25, nCores = 96, expRate = 0.5, maxRate = 100, minRate = 0.1, Jmax = 1000, regenerateInput = TRUE, originalRegionName = NA, keepInterimFiles = FALSE, keepTempDir = FALSE, outputHaplotypeProbabilities = FALSE, switchModelIteration = NA, generateInputOnly = FALSE, restartIterations = NA, refillIterations = c(6, 10, 14, 18), downsampleSamples = 1, downsampleSamplesKeepList = NA, subsetSNPsfile = NA, useSoftClippedBases = FALSE, outputBlockSize = 1000, outputSNPBlockSize = 100000, inputBundleBlockSize = NA, genetic_map_file = , reference_haplotype_file = , reference_legend_file = , reference_sample_file = , reference_populations = NA, reference_phred = 20, reference_iterations = 40, reference_shuffleHaplotypeIterations = c(4, 8, 12, 16), output_filename = stitch.chr10.g06.basevar.v1.k5.vcf.gz, initial_min_hapProb = 0.2, initial_max_hapProb = 0.8, regenerateInputWithDefaultValues = FALSE, plotHapSumDuringIterations = FALSE, plot_shuffle_haplotype_attempts = FALSE, plotAfterImputation = TRUE, save_sampleReadsInfo = FALSE, gridWindowSize = NA, shuffle_bin_nSNPs = NULL, shuffle_bin_radius = 5000, keepSampleReadsInRAM = FALSE, useTempdirWhileWriting = FALSE, output_haplotype_dosages = FALSE, use_bx_tag = TRUE, bxTagUpperLimit = 50000) [2022-08-31 18:34:55] Program start [2022-08-31 18:34:55] Get and validate pos and gen [2022-08-31 18:34:56] Done get and validate pos and gen [2022-08-31 18:34:56] Get BAM sample names [2022-08-31 18:34:56] Done getting BAM sample names [2022-08-31 18:34:56] Generate inputs [2022-08-31 18:34:57] Load and convert BAM 2700 of 3702 [2022-08-31 18:34:57] Load and convert BAM 2200 of 3702
caught segfault address 0xa2b9000, cause 'memory not mapped'
Traceback: 1: cpp_read_reassign(ord = ord, qnameInteger_ord = qnameInteger_ord, bxtagInteger_ord = bxtagInteger_ord, bxtag_bad_ord = bxtag_bad_ord, qname = qname, bxtag = bxtag, strand = strand, sampleReadsRaw = sampleReadsRaw, readStart_ord = readStart_ord, readEnd_ord = readEnd_ord, readStart = readStart, readEnd = readEnd, iSizeUpperLimit = iSizeUpperLimit, bxTagUpperLimit = bxTagUpperLimit, use_bx_tag = use_bx_tag, save_sampleReadsInfo = save_sampleReadsInfo) 2: merge_reads_from_sampleReadsRaw(sampleReadsRaw = sampleReadsRaw, qname = qname, bxtag = bxtag, strand = strand, readStart = readStart, readEnd = readEnd, iSizeUpperLimit = iSizeUpperLimit, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit, save_sampleReadsInfo = save_sampleReadsInfo, qname_all = qname_all, readStart_all = readStart_all, readEnd_all = readEnd_all) 3: loadBamAndConvert(iBam = iBam, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, chrLength = chrLength, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 4: FUN(X[[i]], ...) 5: lapply(X = S, FUN = FUN, ...) 6: doTryCatch(return(expr), name, parentenv, handler) 7: tryCatchOne(expr, names, parentenv, handlers[[1L]]) 8: tryCatchList(expr, classes, parentenv, handlers) 9: tryCatch(expr, error = function(e) { call <- conditionCall(e) if (!is.null(call)) { if (identical(call[[1L]], quote(doTryCatch))) call <- sys.call(-4L) dcall <- deparse(call)[1L] prefix <- paste("Error in", dcall, ": ") LONG <- 75L sm <- strsplit(conditionMessage(e), "\n")[[1L]] w <- 14L + nchar(dcall, type = "w") + nchar(sm[1L], type = "w") if (is.na(w)) w <- 14L + nchar(dcall, type = "b") + nchar(sm[1L], type = "b") if (w > LONG) prefix <- paste0(prefix, "\n ") } else prefix <- "Error : " msg <- paste0(prefix, conditionMessage(e), "\n") .Internal(seterrmessage(msg[1L])) if (!silent && isTRUE(getOption("show.error.messages"))) { cat(msg, file = outFile) .Internal(printDeferredWarnings()) } invisible(structure(msg, class = "try-error", condition = e))}) 10: try(lapply(X = S, FUN = FUN, ...), silent = TRUE) 11: sendMaster(try(lapply(X = S, FUN = FUN, ...), silent = TRUE)) 12: FUN(X[[i]], ...) 13: lapply(seq_len(cores), inner.do) 14: mclapply(1:length(sampleRanges), mc.cores = nCores, FUN = loadBamAndConvert_across_a_range, sampleRanges = sampleRanges, bundling_info = bundling_info, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, chrLength = chrLength, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 15: generate_input(bundling_info = bundling_info, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, nCores = nCores, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 16: generate_or_refactor_input(regenerateInput = regenerateInput, bundling_info = bundling_info, L = L, pos = pos, nSNPs = nSNPs, bam_files = bam_files, cram_files = cram_files, reference = reference, iSizeUpperLimit = iSizeUpperLimit, bqFilter = bqFilter, chr = chr, outputdir = outputdir, N = N, downsampleToCov = downsampleToCov, sampleNames = sampleNames, inputdir = inputdir, useSoftClippedBases = useSoftClippedBases, regionName = regionName, tempdir = tempdir, chrStart = chrStart, chrEnd = chrEnd, generateInputOnly = generateInputOnly, nCores = nCores, save_sampleReadsInfo = save_sampleReadsInfo, use_bx_tag = use_bx_tag, bxTagUpperLimit = bxTagUpperLimit) 17: STITCH(chr = opt$chr, posfile = opt$posfile, K = opt$K, S = opt$S, nGen = opt$nGen, outputdir = opt$outputdir, tempdir = opt$tempdir, bamlist = opt$bamlist, cramlist = opt$cramlist, sampleNames_file = opt$sampleNames_file, reference = opt$reference, genfile = opt$genfile, method = opt$method, output_format = opt$output_format, B_bit_prob = opt$B_bit_prob, outputInputInVCFFormat = opt$outputInputInVCFFormat, downsampleToCov = opt$downsampleToCov, downsampleFraction = opt$downsampleFraction, readAware = opt$readAware, chrStart = opt$chrStart, chrEnd = opt$chrEnd, regionStart = opt$regionStart, regionEnd = opt$regionEnd, buffer = opt$buffer, maxDifferenceBetweenReads = opt$maxDifferenceBetweenReads, maxEmissionMatrixDifference = opt$maxEmissionMatrixDifference, alphaMatThreshold = opt$alphaMatThreshold, emissionThreshold = opt$emissionThreshold, iSizeUpperLimit = opt$iSizeUpperLimit, bqFilter = opt$bqFilter, niterations = opt$niterations, shuffleHaplotypeIterations = eval(parse(text = opt$shuffleHaplotypeIterations)), splitReadIterations = eval(parse(text = opt$splitReadIterations)), nCores = opt$nCores, expRate = opt$expRate, maxRate = opt$maxRate, minRate = opt$minRate, Jmax = opt$Jmax, regenerateInput = opt$regenerateInput, originalRegionName = opt$originalRegionName, keepInterimFiles = opt$keepInterimFiles, keepTempDir = opt$keepTempDir, switchModelIteration = opt$switchModelIteration, generateInputOnly = opt$generateInputOnly, restartIterations = opt$restartIterations, refillIterations = eval(parse(text = opt$refillIterations)), downsampleSamples = opt$downsampleSamples, downsampleSamplesKeepList = opt$downsampleSamplesKeepList, subsetSNPsfile = opt$subsetSNPsfile, useSoftClippedBases = opt$useSoftClippedBases, outputBlockSize = opt$outputBlockSize, outputSNPBlockSize = opt$outputSNPBlockSize, inputBundleBlockSize = opt$inputBundleBlockSize, genetic_map_file = opt$genetic_map_file, reference_haplotype_file = opt$reference_haplotype_file, reference_legend_file = opt$reference_legend_file, reference_sample_file = opt$reference_sample_file, reference_populations = eval(parse(text = opt$reference_populations)), reference_phred = opt$reference_phred, reference_iterations = opt$reference_iterations, reference_shuffleHaplotypeIterations = eval(parse(text = opt$reference_shuffleHaplotypeIterations)), output_filename = opt$output_filename, initial_min_hapProb = opt$initial_min_hapProb, initial_max_hapProb = opt$initial_max_hapProb, regenerateInputWithDefaultValues = opt$regenerateInputWithDefaultValues, plotHapSumDuringIterations = opt$plotHapSumDuringIterations, plot_shuffle_haplotype_attempts = opt$plot_shuffle_haplotype_attempts, plotAfterImputation = opt$plotAfterImputation, save_sampleReadsInfo = opt$save_sampleReadsInfo, gridWindowSize = opt$gridWindowSize, shuffle_bin_nSNPs = opt$shuffle_bin_nSNPs, shuffle_bin_radius = opt$shuffle_bin_radius, keepSampleReadsInRAM = opt$keepSampleReadsInRAM, useTempdirWhileWriting = opt$useTempdirWhileWriting, output_haplotype_dosages = opt$output_haplotype_dosages, use_bx_tag = opt$use_bx_tag, bxTagUpperLimit = opt$bxTagUpperLimit) An irrecoverable exception occurred. R is aborting now ... [2022-08-31 18:34:59] Load and convert BAM 1700 of 3702 [2022-08-31 18:35:02] Load and convert BAM 1200 of 3702 [2022-08-31 18:35:04] Load and convert BAM 700 of 3702 [2022-08-31 18:35:06] Load and convert BAM 2900 of 3702 [2022-08-31 18:35:06] Load and convert BAM 2400 of 3702 [2022-08-31 18:35:06] Load and convert BAM 3400 of 3702 [2022-08-31 18:35:07] Load and convert BAM 200 of 3702 [2022-08-31 18:35:11] Load and convert BAM 1900 of 3702 [2022-08-31 18:35:15] Load and convert BAM 900 of 3702 [2022-08-31 18:35:17] Load and convert BAM 1400 of 3702 [2022-08-31 18:35:18] Load and convert BAM 2600 of 3702 [2022-08-31 18:35:18] Load and convert BAM 400 of 3702 [2022-08-31 18:35:18] Load and convert BAM 3600 of 3702 [2022-08-31 18:35:19] Load and convert BAM 3100 of 3702 [2022-08-31 18:35:23] Load and convert BAM 1600 of 3702 [2022-08-31 18:35:25] Load and convert BAM 2100 of 3702 [2022-08-31 18:35:26] Load and convert BAM 600 of 3702 [2022-08-31 18:35:27] Load and convert BAM 1100 of 3702 [2022-08-31 18:35:28] Load and convert BAM 2800 of 3702 [2022-08-31 18:35:34] Load and convert BAM 3300 of 3702 [2022-08-31 18:35:34] Load and convert BAM 1300 of 3702 [2022-08-31 18:35:36] Load and convert BAM 3000 of 3702 [2022-08-31 18:35:36] Load and convert BAM 100 of 3702 [2022-08-31 18:35:36] Load and convert BAM 2300 of 3702 [2022-08-31 18:35:37] Load and convert BAM 800 of 3702 [2022-08-31 18:35:39] Load and convert BAM 1800 of 3702 [2022-08-31 18:35:41] Load and convert BAM 300 of 3702 [2022-08-31 18:35:45] Load and convert BAM 2000 of 3702 [2022-08-31 18:35:47] Load and convert BAM 2500 of 3702 [2022-08-31 18:35:50] Load and convert BAM 3700 of 3702 [2022-08-31 18:35:52] Load and convert BAM 3500 of 3702 [2022-08-31 18:35:52] Load and convert BAM 3200 of 3702 [2022-08-31 18:35:55] Load and convert BAM 500 of 3702 [2022-08-31 18:35:56] Load and convert BAM 1500 of 3702 [2022-08-31 18:36:04] Load and convert BAM 1000 of 3702 [2022-08-31 18:36:09] Done generating inputs [2022-08-31 18:36:09] Copying files onto tempdir [2022-08-31 18:36:53] Done copying files onto tempdir [2022-08-31 18:36:53] Generate allele count [2022-08-31 18:37:03] Error in readChar(con, 5L, useBytes = TRUE) : cannot open the connection
Error in check_mclapply_OK(out2) : An error occured during STITCH. The first such error is above Calls: STITCH -> buildAlleleCount -> check_mclapply_OK In addition: Warning messages: 1: In mclapply(1:length(sampleRanges), mc.cores = nCores, FUN = loadBamAndConvert_across_a_range, : scheduled core 22 did not deliver a result, all values of the job will be affected 2: In mclapply(sampleRanges, mc.cores = nCores, FUN = buildAlleleCount_subfunction, : scheduled cores 22 encountered errors in user code, all values of the jobs will be affected Execution halted
Dear @rwdavies , Some new updates: Do some test, I found that STITCH-1.6.3 have no "memory not mapped" error. And STITCH-1.6.6 have the error, I use the same R version and same input files. Thanks!
Hi,
If it's not RAM I don't have any obvious candidates for what the problem would be. Does it always crash on the same BAM file? Can you remove that file from the analysis? Are you able to try and narrow down the region you're imputing to see if it's something about certain reads? If is consistently the same bam file, can you double check it meets the specifications, using a SAM or BAM validation procedure?
Robbie
Dear Auth, First, Thanks a lot for writing this powerfull software. When I build and run STITCH-1.6.6 on CentOS 7.9 with devtoolset-gcc8 and R-4.2.1, I got the memory not mapped error, Please see the picture, how to debug and check this problem? Thanks!