Last-Order / Minyami

A lovely video downloader for HLS videos
GNU General Public License v3.0
555 stars 36 forks source link

No space left on device. #114

Closed bodarycomforts closed 6 months ago

bodarycomforts commented 1 year ago

[MINYAMI][INFO] M3U8 file fetched. [MINYAMI][WARN] Unsupported encryption method: "SAMPLE-AES-CTR". Chunks will not be decrypted. [MINYAMI][WARN] Unsupported encryption method: "SAMPLE-AES-CTR". Chunks will not be decrypted. [MINYAMI][WARN] Unsupported encryption method: "SAMPLE-AES-CTR". Chunks will not be decrypted. [MINYAMI][INFO] Ctrl+C pressed, waiting for tasks finished. [MINYAMI][INFO] 2372 chunks downloaded. Start merging chunks. [MINYAMI][MERGING] [███████████████████████████████████░░░░░] 88% | ETA: 21s | 2095/2372node:events:498 throw er; // Unhandled 'error' event ^

Error: ENOSPC: no space left on device, write Emitted 'error' event on WriteStream instance at: at emitErrorNT (node:internal/streams/destroy:164:8) at emitErrorCloseNT (node:internal/streams/destroy:129:3) at processTicksAndRejections (node:internal/process/task_queues:83:21) { errno: -4055, code: 'ENOSPC', syscall: 'write' }

Node.js v17.4.0

how to merge them again?

Last-Order commented 1 year ago

You can merge the chunks in the temporary folder in sequence using copy /b (Windows) or cat (Linux).

fireattack commented 1 year ago

An overly complicated script I use personally (simplified a little bit already)

import argparse
import json
from pathlib import Path
from shutil import copyfileobj

def load_json(filename):
    filename = Path(filename)
    with filename.open('r', encoding='utf-8') as f:
        data = json.load(f)
    return data

def concat(files, output):
    output = Path(output)
    if not output.parent.exists():
        output.parent.mkdir(parents=True)
    out = output.open('wb')
    for i, f in enumerate(files):
        f = Path(f)
        fi = f.open('rb')
        copyfileobj(fi, out)
        fi.close()
        print(f'Merging.. finished {i + 1}/{len(files)}      ', end='\r')
    out.close()
    print('')

def main(args):
    p = Path(args.path)

    info_file = p / 'task.json'
    if not info_file.exists():
        print('ERROR: No task.json found!')
        return

    info = load_json(info_file)
    default_filename = Path(info['outputPath']).name
    if args.output:
        output = Path(args.output)
        if output.suffix.lower() != '.ts':
            output = output.with_name(output.name + '.ts')
    elif args.output_dir:
        output = Path(args.output_dir) / default_filename
    else:
        output = Path.cwd() / default_filename

    all_files = [p / Path(f).name for f in info['outputFileList']]
    segment_groups = []
    for x, f in enumerate(all_files):
        has_file = f.exists()
        current_stat = has_file
        if x == 0:
            prev_stat = current_stat
            prev_id = x
        elif current_stat != prev_stat:
            # print('Stat changed')
            segment_groups.append((prev_id, x - 1, prev_stat))
            prev_id = x
            prev_stat = current_stat
        elif x == len(all_files) - 1:
            segment_groups.append((prev_id, x, prev_stat))

    print('Segment status:')
    for start, end, has_file in segment_groups:
        print(f'* {start:05d} - {end:05d}: {"downloaded" if has_file else "missing"}')

    valid_segment_groups = [x for x in segment_groups if x[2] == True]
    if valid_segment_groups == []:
        print('ERROR: No valid segments found!')
        return
    for idx, (start, end, has_file) in enumerate(valid_segment_groups, 1):
        ranges = list(range(start, end + 1))
        files = [all_files[k] for k in ranges] # files are ordered by id now.
        if len(valid_segment_groups) > 1:
            output_ = output.with_name(f'{output.stem} part{idx:02d} {ranges[0]}-{ranges[-1]}{output.suffix}')
        else:
            output_ = output
        if output_.exists():
            print('ERROR: Output file already exists!')
            continue
        print(f'INFO: {len(all_files)} segments will be merged into {output}')
        if not args.info:
            temp = output_.with_name(output_.name + '.merging')
            concat(files, temp)
            temp.rename(output_)

if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("path", nargs='?', default='.', help='minyami temp file folder [default: cwd]')
    parser.add_argument('-O', '--output-dir', help='output parent folder [default: cwd]')
    parser.add_argument('-o', '--output', help='output file fullpath (overrides -O) [default: name in task.json]')
    parser.add_argument('-i', '--info', action='store_true', help='show info only, do not actually merge')
    args = parser.parse_args()
    main(args)

Usage:

python mergeminyami.py "/path/to/minyamitempfolder/minyami_1679206554259"
python mergeminyami.py "/path/to/minyamitempfolder/minyami_1679206554259" -O "path/to/folder/to/save"
python mergeminyami.py "/path/to/minyamitempfolder/minyami_1679206554259" -o "filename.ts"
python mergeminyami.py "/path/to/minyamitempfolder/minyami_1679206554259" -o "path/to/save/file/filename.ts"