NVlabs / nvdiffrecmc

Official code for the NeurIPS 2022 paper "Shape, Light, and Material Decomposition from Images using Monte Carlo Rendering and Denoising".
Other
363 stars 28 forks source link

Question regarding light probe configurations of NeRF-Synthetic #36

Closed cmh1027 closed 6 months ago

cmh1027 commented 6 months ago

Based on your paper, I see that eight different light probes (city, courtyard, etc..) were used for evaluation of NeRFfactor scenes. Then, what are four light probes used in evaluation of NeRF-Synthetic dataset?

In addition, It seems that the code you provide here doesn't work with GT mesh of Blender dataset.

Error: Python: Traceback (most recent call last):
  File "/Users/cmh/Desktop/blender/nvdiffrecmc/relight_base.py", line 145, in <module>
    relight_scene(SCENE=s, PROBE="data/irrmaps")
  File "/Users/cmhDesktop/blender/nvdiffrecmc/relight_base.py", line 63, in relight_scene
    image_node_ks = bsdf.inputs["Specular"].links[0].from_node
IndexError: tuple index out of range

Maybe because formats of mtl file are different..? (I'm not familiar with Blender..) How did you set different light probe on GT blender mesh?

jmunkberg commented 6 months ago

See comment and name in Fig 24 in the paper: Figure 24: Relighting examples from Hotdog scene of the NeRF synthetic dataset. We show a single view relit by the four different high frequency light probes from Poly Haven (CC0).

JHnvidia commented 6 months ago

Hi @cmh1027,

Links for the probes below. We used the 2k x 2k versions https://polyhaven.com/a/aerodynamics_workshop https://polyhaven.com/a/boiler_room https://polyhaven.com/a/dreifaltigkeitsberg https://polyhaven.com/a/music_hall_01

The script is hardcoded to load our exported materials, which are non-standard. The mtl standard does not support our BSDF, so it's a bit hacky. For references we instead loaded and rendered the raw blender (.blend) files, in which case you don't need to do anything about the materials as they will load correctly. The script would look something like the following (untested).

import os
import bpy 
import json
import numpy as np
from mathutils import *; from math import *

RESOLUTION = 512
SAMPLES = 64
BOUNCES = 1
BASE_PATH = <path_to_mesh_and_probes>

os.makedirs(os.path.join(BASE_PATH, "ref_relit"), exist_ok=True)

scene = bpy.context.scene
scene.world.use_nodes = True
scene.render.engine = 'CYCLES'
scene.render.film_transparent = True
scene.cycles.device = 'GPU'
scene.cycles.samples = SAMPLES
scene.cycles.max_bounces = BOUNCES
scene.cycles.diffuse_bounces = BOUNCES
scene.cycles.glossy_bounces = BOUNCES
scene.cycles.transmission_bounces = 0
scene.cycles.volume_bounces = 0
scene.cycles.transparent_max_bounces = 8
scene.cycles.use_denoising = True

scene.render.resolution_x = RESOLUTION
scene.render.resolution_y = RESOLUTION
scene.render.resolution_percentage = 100

# OpenEXR output, should not be tonemapped
scene.display_settings.display_device = 'None'
scene.view_settings.view_transform = 'Standard'
scene.view_settings.exposure = 0.0
scene.view_settings.gamma = 1.0
scene.render.image_settings.file_format = 'OPEN_EXR' 

scene.view_layers['ViewLayer'].use_pass_combined=True
scene.view_layers['ViewLayer'].use_pass_diffuse_direct=True
scene.view_layers['ViewLayer'].use_pass_diffuse_color=True
scene.view_layers['ViewLayer'].use_pass_glossy_direct=True
scene.view_layers['ViewLayer'].use_pass_glossy_color=True

def relight_scene(SCENE, POSES, alpha_transparency=False):
    MESH_PATH = os.path.join(BASE_PATH, "nerfactor_data", "blend_files")
    PROBE_PATH = os.path.join(BASE_PATH, "nerfactor_data", "light_probes", "test")
    RESULTS_PATH = os.path.join(BASE_PATH, "ref_relit", SCENE)

    os.makedirs(RESULTS_PATH, exist_ok=True)

    bpy.ops.object.select_all(action="SELECT")
    bpy.ops.object.delete()

    ################### Import mesh ###################

    bpy.ops.wm.open_mainfile(filepath=os.path.join(MESH_PATH, SCENE + ".blend"))

    ################### Load HDR probe ###################

    # Common setup
    val_cfg = json.load(open(os.path.join(BASE_PATH, "nerfactor_data", POSES)))

    camera_data   = bpy.data.cameras.new(name='Camera')
    camera_object = bpy.data.objects.new('Camera', camera_data)
    scene.collection.objects.link(camera_object)
    scene.camera  = camera_object
    camera_data.angle = val_cfg['camera_angle_x']

    # load all probes
    probes = [
        "city",
        "courtyard",
        "forest",
        "interior",
        "night",
        "studio",
        "sunrise",
        "sunset",
    ]

    for p in probes:
        envmap = scene.world.node_tree.nodes.new(type="ShaderNodeTexEnvironment")
        envmap.image = bpy.data.images.load(os.path.join(PROBE_PATH, p + ".hdr"))
        scene.world.node_tree.links.new(envmap.outputs[0], scene.world.node_tree.nodes['Background'].inputs['Color'])

        for i in range(8):
            mtx = np.array(val_cfg['frames'][i]['transform_matrix'], dtype=np.float32)
            scene.camera.matrix_world = Matrix(mtx)
            scene.render.filepath = os.path.join(RESULTS_PATH, p + '_r_{0:03d}'.format(int(i)))
            bpy.ops.render.render(write_still=True)  # render still 

relight_scene(SCENE="nerfactor_hotdog", POSES="hotdog_transforms_val.json")
relight_scene(SCENE="nerfactor_lego", POSES="lego_transforms_val.json")
relight_scene(SCENE="nerfactor_ficus", POSES="ficus_transforms_val.json", alpha_transparency=True)
relight_scene(SCENE="nerfactor_drums", POSES="drums_transforms_val.json", alpha_transparency=True)
cmh1027 commented 6 months ago

@JHnvidia

  File "/Users/cmh/Desktop/blender/nvdiffrecmc/relight_base.py", line 68, in relight_scene
    scene.collection.objects.link(camera_object)
ReferenceError: StructRNA of type Scene has been removed

It gives this error. Any ideas?

cmh1027 commented 6 months ago

I successfully rendered relighted scene with original synthetic blender script with a slight modification.

import argparse, sys, os
import json
import bpy
import mathutils
import numpy as np

RESOLUTION = 512
RESULTS_PATH = 'results_500'
DEPTH_SCALE = 1.4
COLOR_DEPTH = 8
FORMAT = 'PNG'
CIRCLE_FIXED_START = (.3,0,0)
SCENE = sys.argv[-1]
PROBE = "data/irrmaps"

ROOT_DIR = "/Users/cmh/Desktop/blender/nvdiffrecmc"

def listify_matrix(matrix):
    matrix_list = []
    for row in matrix:
        matrix_list.append(list(row))
    return matrix_list

# Render Optimizations
bpy.context.scene.render.use_persistent_data = True

# Set up rendering of depth map.
bpy.context.scene.use_nodes = True
tree = bpy.context.scene.node_tree
links = tree.links

# Add passes for additionally dumping albedo and normals.
#bpy.context.scene.view_layers["RenderLayer"].use_pass_normal = True
bpy.context.scene.render.image_settings.file_format = str(FORMAT)
bpy.context.scene.render.image_settings.color_depth = str(COLOR_DEPTH)

# Create input render layer node.
render_layers = tree.nodes.new('CompositorNodeRLayers')

depth_file_output = tree.nodes.new(type="CompositorNodeOutputFile")
depth_file_output.label = 'Depth Output'
if FORMAT == 'OPEN_EXR':
    links.new(render_layers.outputs['Depth'], depth_file_output.inputs[0])
else:
    # Remap as other types can not represent the full range of depth.
    map = tree.nodes.new(type="CompositorNodeMapValue")
    # Size is chosen kind of arbitrarily, try out until you're satisfied with resulting depth map.
    map.offset = [-0.7]
    map.size = [DEPTH_SCALE]
    map.use_min = True
    map.min = [0]
    links.new(render_layers.outputs['Depth'], map.inputs[0])

    links.new(map.outputs[0], depth_file_output.inputs[0])

normal_file_output = tree.nodes.new(type="CompositorNodeOutputFile")
normal_file_output.label = 'Normal Output'
links.new(render_layers.outputs['Normal'], normal_file_output.inputs[0])

# Background
bpy.context.scene.render.dither_intensity = 0.0
bpy.context.scene.render.film_transparent = True

# Create collection for objects not to render with background

objs = [ob for ob in bpy.context.scene.objects if ob.type in ('EMPTY') and 'Empty' in ob.name]
bpy.ops.object.delete({"selected_objects": objs})

def parent_obj_to_camera(b_camera):
    origin = (0, 0, 0)
    b_empty = bpy.data.objects.new("Empty", None)
    b_empty.location = origin
    b_camera.parent = b_empty  # setup parenting

    scn = bpy.context.scene
    scn.collection.objects.link(b_empty)
    bpy.context.view_layer.objects.active = b_empty
    # scn.objects.active = b_empty
    return b_empty

scene = bpy.context.scene
scene.render.resolution_x = RESOLUTION
scene.render.resolution_y = RESOLUTION
scene.render.resolution_percentage = 100

cam = scene.objects['Camera']
cam.location = (0, 4.0, 0.5)
cam_constraint = cam.constraints.new(type='TRACK_TO')
cam_constraint.track_axis = 'TRACK_NEGATIVE_Z'
cam_constraint.up_axis = 'UP_Y'
b_empty = parent_obj_to_camera(cam)
cam_constraint.target = b_empty

scene.render.image_settings.file_format = 'PNG'  # set output format to .png

from glob import glob
from mathutils import Matrix; 
from math import *

with open(os.path.join(ROOT_DIR, f"data/nerf_synthetic/{SCENE}/transforms_val.json"), 'r') as f:
    val_cfg = json.load(f)

camera_data   = bpy.data.cameras.new(name='Camera')
camera_object = bpy.data.objects.new('Camera', camera_data)
scene.collection.objects.link(camera_object)
scene.camera  = camera_object
camera_data.angle = val_cfg['camera_angle_x']

for DATASET in ["nerf", "nerfactor"]:
    RESULTS_PATH = os.path.join(ROOT_DIR, "relight_gt", DATASET, SCENE)
    for file in glob(os.path.join(ROOT_DIR, PROBE, DATASET, "*.hdr")):
        envmap = scene.world.node_tree.nodes.new(type="ShaderNodeTexEnvironment")
        envmap.image = bpy.data.images.load(file)
        scene.world.node_tree.links.new(envmap.outputs[0], scene.world.node_tree.nodes['Background'].inputs['Color'])
        for i in range(8):
            mtx = np.array(val_cfg['frames'][i]['transform_matrix'], dtype=np.float32)
            scene.camera.matrix_world = Matrix(mtx)
            scene.render.filepath = os.path.join(RESULTS_PATH, os.path.basename(file)[:-4] + '_r_{0:03d}'.format(int(i)))
            bpy.ops.render.render(write_still=True)  # render still 

Execution can be done with a command like following Blender --background blend_files/drums.blend --python blend_files/relight.py drums