Distributed rendering

This test demonstrates distributed rendering feature of Lightmetrica.

[1]:
%load_ext autoreload
%autoreload 2
[2]:
import os
import imageio
import pandas as pd
import numpy as np
import multiprocessing as mp
%matplotlib inline
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
import lmfunctest as ft
import lmscene
import lightmetrica as lm
[3]:
os.getpid()
[3]:
715
[4]:
%load_ext lightmetrica_jupyter

Worker process

[5]:
%%writefile _run_worker_process.py
import os
import uuid
import traceback
import lightmetrica as lm
def run_worker_process():
    try:
        lm.init('user::default', {})
        lm.info()
        lm.log.setSeverity(1000)
        lm.log.log(lm.log.LogLevel.Err, lm.log.LogLevel.Info, '', 0, 'pid={}'.format(os.getpid()))
        lm.dist.worker.init('dist::worker::default', {
            'name': uuid.uuid4().hex,
            'address': 'localhost',
            'port': 5000,
            'numThreads': 1
        })
        lm.dist.worker.run()
        lm.dist.shutdown()
        lm.shutdown()
    except Exception:
        tr = traceback.print_exc()
        lm.log.log(lm.log.LogLevel.Err, lm.log.LogLevel.Info, '', 0, str(tr))
Writing _run_worker_process.py
[6]:
from _run_worker_process import *
if __name__ == '__main__':
    pool = mp.Pool(4, run_worker_process)

Master process

[7]:
lm.init()
lm.log.init('logger::jupyter', {})
lm.progress.init('progress::jupyter', {})
lm.dist.init('dist::master::default', {
    'port': 5000
})
lm.dist.printWorkerInfo()
[I|0.000|173@dist  ] Listening [port='5000']
[8]:
lmscene.load(ft.env.scene_path, 'fireplace_room')
lm.build('accel::sahbvh', {})
lm.asset('film_output', 'film::bitmap', {'w': 1920, 'h': 1080})
lm.renderer('renderer::raycast', {
    'output': lm.asset('film_output')
})
[I|0.023|48@assets ] Loading asset [name='camera_main']
[I|0.024|48@assets ] Loading asset [name='model_obj']
[I|0.024|29@objload]   Loading OBJ file [path='fireplace_room.obj']
[I|0.024|169@objloa]   Loading MTL file [path='fireplace_room.mtl']
[I|0.025|44@texture]   Loading texture [path='wood.ppm']
[I|0.139|44@texture]   Loading texture [path='leaf.ppm'][I|0.139|263@dist  ]   Connected worker [name='7a81c2ca81bb4ca0a31ce98c07e3a409']

[I|0.145|44@texture]   Loading texture [path='picture8.ppm']
[I|0.203|91@dist   ]   Accepted [name='rep', addr='tcp://0.0.0.0:5003']
[I|0.203|263@dist  ]   Connected worker [name='5cd11944a21c4e9d81e528321ff29119']
[I|0.204|263@dist  ]   Connected worker [name='7d09d0861d1f45d6aaeb90ec18e8cf1a']
[I|0.204|91@dist   ]   Accepted [name='rep', addr='tcp://0.0.0.0:5003']
[I|0.204|263@dist  ]   Connected worker [name='d58977b8419a4bb0bd3a38ad9ee6ea8b']
[I|0.204|91@dist   ]   Accepted [name='rep', addr='tcp://0.0.0.0:5003']
[I|0.204|91@dist   ]   Accepted [name='rep', addr='tcp://0.0.0.0:5003']
[I|0.210|44@texture]   Loading texture [path='wood5.ppm']
[I|2.471|246@scene ] Building acceleration structure [name='accel::sahbvh']
[I|2.471|131@accel_]   Flattening scene
[I|2.598|261@accel_]   Building
[I|5.586|48@assets ] Loading asset [name='film_output']
[9]:
lm.dist.allowWorkerConnection(False)
lm.dist.sync()
lm.render()
lm.dist.gatherFilm(lm.asset('film_output'))
lm.dist.allowWorkerConnection(True)
[I|6.027|179@user  ] Saving state to stream
[I|8.879|151@user  ] Starting render [name='renderer::raycast']


[10]:
img = np.copy(lm.buffer(lm.asset('film_output')))
f = plt.figure(figsize=(15,15))
ax = f.add_subplot(111)
ax.imshow(np.clip(np.power(img,1/2.2),0,1), origin='lower')
plt.show()
../_images/executed_functest_func_distributed_rendering_12_0.png
[11]:
# Termination of the worker process is necessary for Windows
# because fork() is not supported in Windows.
# cf. https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
pool.terminate()
pool.join()
[I|29.302|103@dist  ] Disconnected [name='rep', addr='tcp://0.0.0.0:5003']
[I|29.307|103@dist  ] Disconnected [name='rep', addr='tcp://0.0.0.0:5003']
[I|29.308|103@dist  ] Disconnected [name='rep', addr='tcp://0.0.0.0:5003']
[I|29.313|103@dist  ] Disconnected [name='rep', addr='tcp://0.0.0.0:5003']