Commit 339b71e6 by songxinkai

analysis.py & nn_analysis.py

parent 62da0b21
...@@ -6,4 +6,7 @@ ...@@ -6,4 +6,7 @@
*.npz *.npz
*.dae *.dae
data/* data/*
logs/* logs/*
\ No newline at end of file .*swp
ret-*
.nfs*
#!/workspace/S/songxinkai/local/anaconda3/bin/python
import os
import sys
import json
import random
import numpy as np
from struct import unpack, pack
import pandas as pd
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
H = 400
W = 400
IMAGE = 6
C_SAM = 64
F_SAM = 128
RAY_BATCH = 32768
EMB_LEN = 90
data_dir = "dump_data"
def read_data(filename, skip_lines):
with open(filename, 'r') as f:
lines = f.readlines()
data = [[float(x) for x in line.strip().split("[")[-1].split("]")[0].split(", ")] for line_id, line in enumerate(lines) if line_id >= skip_lines]
return data
def quant(input, quant_step, q_min = -128, q_max = 127):
quant_step = float(quant_step)
q_min = int(q_min)
q_max = int(q_max)
output = [round(x / quant_step) for x in input]
for i in range(len(output)):
output[i] = q_min if output[i] < q_min else output[i]
output[i] = q_max if output[i] > q_max else output[i]
return output
## Quantization
if False:
i_min = 1.
i_max = -1.
quant_bit = 8
inputs = []
# for im_id in range(IMAGE):
for ray_id in range(0, H*W, RAY_BATCH):
print ("Reading %d"%ray_id)
n_ray = min(RAY_BATCH, H*W - ray_id)
input = read_data(os.path.join(data_dir, "0", "%d"%ray_id, "embedded.txt"), C_SAM*n_ray)
inputs += input
idx = random.sample(range(len(inputs)), H*W*10)
for i in idx:
t_min = min(inputs[i])
t_max = max(inputs[i])
i_min = t_min if t_min < i_min else i_min
i_max = t_max if t_max > i_max else i_max
abs_max = i_max if i_max + i_min > 0 else -i_min
quant_step = 2 * abs_max / 2**(quant_bit)
print (i_min, i_max, quant_step) # -2.6404643058776855 2.193082332611084 0.02062862738966942
# for im_id in range(IMAGE):
with open (os.path.join(data_dir, "0", "embedded_quant.bin"), 'wb') as f:
for i,input in enumerate(inputs):
if i % (192 * 400) == 0:
print ("Quantizing %d"%(i/(192*400)))
i_q = quant(input, quant_step, -128, 127)
f.write(pack("%dh"%(len(input)), *i_q))
## Delta & Stat
if False:
inputs = [[[[] for z in range(C_SAM+F_SAM)] for w in range(W)] for h in range(H)]
with open (os.path.join(data_dir, "0", "embedded_quant.bin"), 'rb') as f:
for h in range(H):
print ("Reading %d/%d"%(h, H))
for w in range(W):
for z in range(C_SAM + F_SAM):
inputs[h][w][z] += unpack("%dh"%(EMB_LEN), f.read(EMB_LEN * 2))
if sys.argv[1] == "h":
h_idx = sorted(random.sample(range(H-1), int(H / 2)))
h_stat = [0 for i in range(511)]
for h in h_idx:
w_idx = random.sample(range(W), int(W / 2))
print ("Delta H: %d/%d"%(h, H))
for w in w_idx:
for z in range(C_SAM + F_SAM):
for i in range(EMB_LEN):
delta = inputs[h+1][w][z][i] - inputs[h][w][z][i]
h_stat[delta] += 1
print ("h_stat:", h_stat)
with open (os.path.join(data_dir, "0", "embedded_quant_delta_h.bin"), 'wb') as f:
f.write(pack("%di"%(511), *h_stat))
elif sys.argv[1] == "w":
h_idx = sorted(random.sample(range(H), int(H / 2)))
w_stat = [0 for i in range(511)]
for h in h_idx:
w_idx = random.sample(range(W-1), int(W / 2))
print ("Delta W: %d/%d"%(h, H))
for w in w_idx:
for z in range(C_SAM + F_SAM):
for i in range(EMB_LEN):
delta = inputs[h][w+1][z][i] - inputs[h][w][z][i]
w_stat[delta] += 1
print ("w_stat:", w_stat)
with open(os.path.join(data_dir, "0", "embedded_quant_delta_w.bin"), 'wb') as f:
f.write(pack("%di"%(511), *w_stat))
elif sys.argv[1] == "z":
h_idx = sorted(random.sample(range(H), int(H / 2)))
z_stat = [0 for i in range(511)]
for h in h_idx:
print ("Delta Z: %d/%d"%(h, H))
w_idx = random.sample(range(W), int(W / 2))
for w in w_idx:
for z in range(C_SAM + F_SAM - 1):
for i in range(EMB_LEN):
delta = inputs[h][w][z+1][i] - inputs[h][w][z][i]
z_stat[delta] += 1
print ("z_stat:", z_stat)
with open(os.path.join(data_dir, "0", "embedded_quant_delta_z.bin"), 'wb') as f:
f.write(pack("%di"%(511), *z_stat))
if True:
h_stat = []
w_stat = []
z_stat = []
with open(os.path.join(data_dir, "0", "embedded_quant_delta_h.bin"), 'rb') as f:
h_stat = unpack("%di"%(511), f.read(511*4))
with open(os.path.join(data_dir, "0", "embedded_quant_delta_w.bin"), 'rb') as f:
w_stat = unpack("%di"%(511), f.read(511*4))
with open(os.path.join(data_dir, "0", "embedded_quant_delta_z.bin"), 'rb') as f:
z_stat = unpack("%di"%(511), f.read(511*4))
h_sum = sum(h_stat)
w_sum = sum(w_stat)
z_sum = sum(z_stat)
res = {"h":{}, "w":{}, "z":{}, "all":{}}
res['h'][0] = h_stat[255]/h_sum
res['w'][0] = w_stat[255]/w_sum
res['z'][0] = z_stat[255]/z_sum
res['all'][0] = (h_stat[255]+w_stat[255]+z_stat[255])/(h_sum+w_sum+z_sum)
for i in range(9):
low = max(0, -2**i + 255)
high = min(510, 2**i-1 + 255)
h_count = sum(h_stat[low : high+1])
w_count = sum(w_stat[low : high+1])
z_count = sum(z_stat[low : high+1])
res['h'][2**i] = h_count/h_sum
res['w'][2**i] = w_count/w_sum
res['z'][2**i] = z_count/z_sum
res['all'][2**i] = (h_count+w_count+z_count)/(h_sum+w_sum+z_sum)
print (res)
plt.figure(dpi=300, figsize=(16,8))
# plt.rcParams['font.sans-serif']=['SimHei']
# plt.rcParams['axes.unicode_minus']=False
idx = ["[0]"]# + ["%d"%2**i for i in range(9)]
for i in range(9):
low = max(-255, -2**i)
high = min(255, 2**i-1)
idx.append("[%d~%d]"%(low, high))
h_count = [res['h'][0]] + [res['h'][2**i] for i in range(9)]
w_count = [res['w'][0]] + [res['w'][2**i] for i in range(9)]
z_count = [res['z'][0]] + [res['z'][2**i] for i in range(9)]
all_count = [res['all'][0]] + [res['all'][2**i] for i in range(9)]
x = np.arange(len(idx))
width = 0.2
plt.bar(x-width/2, h_count, label='H', alpha=0.6, width=width)
plt.bar(x-3*width/2, w_count, label='W', alpha=0.6, width=width)
plt.bar(x+width/2, z_count, label='Z', alpha=0.6, width=width)
plt.bar(x+3*width/2, all_count, label='Average', alpha=0.6, width=width)
plt.legend()
plt.xlabel('Density')
plt.ylabel('Count')
# plt.ylim(0, 1.0)
# plt.yscale("log")
# plt.xscale("log")
print (idx)
print (h_count)
plt.xticks(x,idx)
# for i in range(len(idx)):
# plt.text(i-0.4, count[i]+0.02,"%.3f"%count[i],va='center')
# plt.title('Density')
plt.tight_layout()
plt.show()
plt.savefig("h_delta.png")
dump_data/3/98304/outputs_flat.txt 8388608
dump_data/3/98304/pts.txt 6291456
dump_data/3/98304/z_samples.txt 32768
dump_data/3/98304/viewdirs.txt 65536
dump_data/3/98304/embedded.txt 8388608
dump_data/3/65536/outputs_flat.txt 8388608
dump_data/3/65536/pts.txt 6291456
dump_data/3/65536/z_samples.txt 32768
dump_data/3/65536/viewdirs.txt 65536
dump_data/3/65536/embedded.txt 8388608
dump_data/3/32768/outputs_flat.txt 8388608
dump_data/3/32768/pts.txt 6291456
dump_data/3/32768/z_samples.txt 32768
dump_data/3/32768/viewdirs.txt 65536
dump_data/3/32768/embedded.txt 8388608
dump_data/3/131072/outputs_flat.txt 7405568
dump_data/3/131072/pts.txt 5554176
dump_data/3/131072/z_samples.txt 28928
dump_data/3/131072/viewdirs.txt 57856
dump_data/3/131072/embedded.txt 7405568
dump_data/3/0/outputs_flat.txt 8388608
dump_data/3/0/pts.txt 6291456
dump_data/3/0/z_samples.txt 32768
dump_data/3/0/viewdirs.txt 65536
dump_data/3/0/embedded.txt 8388608
dump_data/1/98304/outputs_flat.txt 8388608
dump_data/1/98304/pts.txt 6291456
dump_data/1/98304/z_samples.txt 32768
dump_data/1/98304/viewdirs.txt 65536
dump_data/1/98304/embedded.txt 8388608
dump_data/1/65536/outputs_flat.txt 8388608
dump_data/1/65536/pts.txt 6291456
dump_data/1/65536/z_samples.txt 32768
dump_data/1/65536/viewdirs.txt 65536
dump_data/1/65536/embedded.txt 8388608
dump_data/1/32768/outputs_flat.txt 8388608
dump_data/1/32768/pts.txt 6291456
dump_data/1/32768/z_samples.txt 32768
dump_data/1/32768/viewdirs.txt 65536
dump_data/1/32768/embedded.txt 8388608
dump_data/1/131072/outputs_flat.txt 7405568
dump_data/1/131072/pts.txt 5554176
dump_data/1/131072/z_samples.txt 28928
dump_data/1/131072/viewdirs.txt 57856
dump_data/1/131072/embedded.txt 7405568
dump_data/1/0/outputs_flat.txt 8388608
dump_data/1/0/pts.txt 6291456
dump_data/1/0/z_samples.txt 32768
dump_data/1/0/viewdirs.txt 65536
dump_data/1/0/embedded.txt 8388608
dump_data/5/98304/outputs_flat.txt 8388608
dump_data/5/98304/pts.txt 6291456
dump_data/5/98304/z_samples.txt 32768
dump_data/5/98304/viewdirs.txt 65536
dump_data/5/98304/embedded.txt 8388608
dump_data/5/65536/outputs_flat.txt 8388608
dump_data/5/65536/pts.txt 6291456
dump_data/5/65536/z_samples.txt 32768
dump_data/5/65536/viewdirs.txt 65536
dump_data/5/65536/embedded.txt 8388608
dump_data/5/32768/outputs_flat.txt 8388608
dump_data/5/32768/pts.txt 6291456
dump_data/5/32768/z_samples.txt 32768
dump_data/5/32768/viewdirs.txt 65536
dump_data/5/32768/embedded.txt 8388608
dump_data/5/131072/outputs_flat.txt 7405568
dump_data/5/131072/pts.txt 5554176
dump_data/5/131072/z_samples.txt 28928
dump_data/5/131072/viewdirs.txt 57856
dump_data/5/131072/embedded.txt 7405568
dump_data/5/0/outputs_flat.txt 8388608
dump_data/5/0/pts.txt 6291456
dump_data/5/0/z_samples.txt 32768
dump_data/5/0/viewdirs.txt 65536
dump_data/5/0/embedded.txt 8388608
dump_data/6/98304/outputs_flat.txt 2097152
dump_data/6/98304/pts.txt 6291456
dump_data/6/98304/z_samples.txt 32768
dump_data/6/98304/viewdirs.txt 65536
dump_data/6/98304/embedded.txt 8388608
dump_data/6/65536/outputs_flat.txt 8388608
dump_data/6/65536/pts.txt 6291456
dump_data/6/65536/z_samples.txt 32768
dump_data/6/65536/viewdirs.txt 65536
dump_data/6/65536/embedded.txt 8388608
dump_data/6/32768/outputs_flat.txt 8388608
dump_data/6/32768/pts.txt 6291456
dump_data/6/32768/z_samples.txt 32768
dump_data/6/32768/viewdirs.txt 65536
dump_data/6/32768/embedded.txt 8388608
dump_data/6/0/outputs_flat.txt 8388608
dump_data/6/0/pts.txt 6291456
dump_data/6/0/z_samples.txt 32768
dump_data/6/0/viewdirs.txt 65536
dump_data/6/0/embedded.txt 8388608
dump_data/2/98304/outputs_flat.txt 8388608
dump_data/2/98304/pts.txt 6291456
dump_data/2/98304/z_samples.txt 32768
dump_data/2/98304/viewdirs.txt 65536
dump_data/2/98304/embedded.txt 8388608
dump_data/2/65536/outputs_flat.txt 8388608
dump_data/2/65536/pts.txt 6291456
dump_data/2/65536/z_samples.txt 32768
dump_data/2/65536/viewdirs.txt 65536
dump_data/2/65536/embedded.txt 8388608
dump_data/2/32768/outputs_flat.txt 8388608
dump_data/2/32768/pts.txt 6291456
dump_data/2/32768/z_samples.txt 32768
dump_data/2/32768/viewdirs.txt 65536
dump_data/2/32768/embedded.txt 8388608
dump_data/2/131072/outputs_flat.txt 7405568
dump_data/2/131072/pts.txt 5554176
dump_data/2/131072/z_samples.txt 28928
dump_data/2/131072/viewdirs.txt 57856
dump_data/2/131072/embedded.txt 7405568
dump_data/2/0/outputs_flat.txt 8388608
dump_data/2/0/pts.txt 6291456
dump_data/2/0/z_samples.txt 32768
dump_data/2/0/viewdirs.txt 65536
dump_data/2/0/embedded.txt 8388608
dump_data/0/98304/outputs_flat.txt 8388608
dump_data/0/98304/pts.txt 6291456
dump_data/0/98304/z_samples.txt 32768
dump_data/0/98304/viewdirs.txt 65536
dump_data/0/98304/embedded.txt 8388608
dump_data/0/65536/outputs_flat.txt 8388608
dump_data/0/65536/pts.txt 6291456
dump_data/0/65536/z_samples.txt 32768
dump_data/0/65536/viewdirs.txt 65536
dump_data/0/65536/embedded.txt 8388608
dump_data/0/32768/outputs_flat.txt 8388608
dump_data/0/32768/pts.txt 6291456
dump_data/0/32768/z_samples.txt 32768
dump_data/0/32768/viewdirs.txt 65536
dump_data/0/32768/embedded.txt 8388608
dump_data/0/131072/outputs_flat.txt 7405568
dump_data/0/131072/pts.txt 5554176
dump_data/0/131072/z_samples.txt 28928
dump_data/0/131072/viewdirs.txt 57856
dump_data/0/131072/embedded.txt 7405568
dump_data/0/0/outputs_flat.txt 8388608
dump_data/0/0/pts.txt 6291456
dump_data/0/0/z_samples.txt 32768
dump_data/0/0/viewdirs.txt 65536
dump_data/0/0/embedded.txt 8388608
dump_data/4/98304/outputs_flat.txt 8388608
dump_data/4/98304/pts.txt 6291456
dump_data/4/98304/z_samples.txt 32768
dump_data/4/98304/viewdirs.txt 65536
dump_data/4/98304/embedded.txt 8388608
dump_data/4/65536/outputs_flat.txt 8388608
dump_data/4/65536/pts.txt 6291456
dump_data/4/65536/z_samples.txt 32768
dump_data/4/65536/viewdirs.txt 65536
dump_data/4/65536/embedded.txt 8388608
dump_data/4/32768/outputs_flat.txt 8388608
dump_data/4/32768/pts.txt 6291456
dump_data/4/32768/z_samples.txt 32768
dump_data/4/32768/viewdirs.txt 65536
dump_data/4/32768/embedded.txt 8388608
dump_data/4/131072/outputs_flat.txt 7405568
dump_data/4/131072/pts.txt 5554176
dump_data/4/131072/z_samples.txt 28928
dump_data/4/131072/viewdirs.txt 57856
dump_data/4/131072/embedded.txt 7405568
dump_data/4/0/outputs_flat.txt 8388608
dump_data/4/0/pts.txt 6291456
dump_data/4/0/z_samples.txt 32768
dump_data/4/0/viewdirs.txt 65536
dump_data/4/0/embedded.txt 8388608
#!/bin/bash
#- Job parameters
# (TODO)
# Please modify job name
#SBATCH -J test # The job name
#SBATCH -o ret-%j.out # Write the standard output to file named 'ret-<job_number>.out'
#SBATCH -e ret-%j.err # Write the standard error to file named 'ret-<job_number>.err'
#- Needed resources
# (TODO)
# Please modify your requirements
#SBATCH -p nv-gpu#,nv-gpu-hw # Submit to 'nv-gpu' and 'nv-gpu-hw' Partitiion
#SBATCH -t 0-8:00:00 # Run for a maximum time of 0 days, 12 hours, 00 mins, 00 secs
#SBATCH --nodes=1 # Request N nodes
#SBATCH --gres=gpu:4 # Request M GPU per node
#SBATCH --gres-flags=enforce-binding # CPU-GPU Affinity
#SBATCH --constraint="Volta" # Request GPU Type: Volta(V100 or V100S) or RTX8000
###
### The system will alloc 8 cores per gpu by default.
### If you need more or less, use following:
### #SBATCH --cpus-per-task=K # Request K cores
###
#SBATCH --qos=gpu-short # Request QOS Type
#- Operstions
echo "Job start at $(date "+%Y-%m-%d %H:%M:%S")"
echo "Job run at:"
echo "$(hostnamectl)"
#- Load environments
source /tools/module_env.sh
module list # list modules loaded by default
##- tools
module load cluster-tools/v1.0
module load cmake/3.15.7
module load git/2.17.1
module load vim/8.1.2424
##- language
module load python3/3.6.8
##- cuda
module load cuda-cudnn/11.0-8.0.4
##- virtualenv
# source xxxxx/activate
#- Log information
echo $(module list) # list modules loaded
echo $(which gcc)
echo $(which python)
echo $(which python3)
cluster-quota # nas quota
nvidia-smi --format=csv --query-gpu=name,driver_version,power.limit # gpu info
echo "Use GPU ${CUDA_VISIBLE_DEVICES}$" # which gpus
#- Warning! Please not change your CUDA_VISIBLE_DEVICES
#- in `.bashrc`, `env.sh`, or your job script
#- Job step
# sleep 28800
sleep 108000
#- End
echo "Job end at $(date "+%Y-%m-%d %H:%M:%S")"
/home/S/songxinkai/lustre/big_data/nerf/
\ No newline at end of file
Loaded blender (138, 400, 400, 4) torch.Size([40, 4, 4]) [400, 400, 555.5555155968841] ./data/nerf_synthetic/lego
Found ckpts ['./logs/blender_paper_lego/010000.tar', './logs/blender_paper_lego/020000.tar', './logs/blender_paper_lego/030000.tar', './logs/blender_paper_lego/040000.tar', './logs/blender_paper_lego/050000.tar', './logs/blender_paper_lego/060000.tar', './logs/blender_paper_lego/070000.tar', './logs/blender_paper_lego/080000.tar', './logs/blender_paper_lego/090000.tar', './logs/blender_paper_lego/100000.tar', './logs/blender_paper_lego/110000.tar', './logs/blender_paper_lego/120000.tar', './logs/blender_paper_lego/130000.tar', './logs/blender_paper_lego/140000.tar', './logs/blender_paper_lego/150000.tar', './logs/blender_paper_lego/160000.tar', './logs/blender_paper_lego/170000.tar', './logs/blender_paper_lego/180000.tar', './logs/blender_paper_lego/190000.tar', './logs/blender_paper_lego/200000.tar']
Reloading from ./logs/blender_paper_lego/200000.tar
Not ndc!
RENDER ONLY
test poses shape torch.Size([40, 4, 4])
0 0.004492044448852539
t_embed_fn: 0.005682706832885742
t_embedded_dirs: 0.0016887187957763672
t_batchify: 0.06836938858032227
ray2rgb: 0.2894630432128906
-----------------------------------------
t_embed_fn: 0.014403581619262695
t_embedded_dirs: 0.0024747848510742188
t_batchify: 0.7604551315307617
ray2rgb: 0.2795720100402832
-----------------------------------------
t_embed_fn: 0.0004591941833496094
t_embedded_dirs: 0.00024056434631347656
t_batchify: 0.06850290298461914
ray2rgb: 0.27981114387512207
-----------------------------------------
t_embed_fn: 0.0004658699035644531
t_embedded_dirs: 0.00024175643920898438
t_batchify: 0.7794499397277832
ray2rgb: 0.2809183597564697
-----------------------------------------
t_embed_fn: 0.0004684925079345703
t_embedded_dirs: 0.00025177001953125
t_batchify: 0.06871342658996582
ray2rgb: 0.2805180549621582
-----------------------------------------
t_embed_fn: 0.0004611015319824219
t_embedded_dirs: 0.0002410411834716797
t_batchify: 0.787761926651001
ray2rgb: 0.2691304683685303
-----------------------------------------
t_embed_fn: 0.000457763671875
t_embedded_dirs: 0.0002410411834716797
t_batchify: 0.06810331344604492
ray2rgb: 0.2812983989715576
-----------------------------------------
t_embed_fn: 0.00046372413635253906
t_embedded_dirs: 0.00024271011352539062
t_batchify: 0.7798621654510498
ray2rgb: 0.2809314727783203
-----------------------------------------
t_embed_fn: 0.0004799365997314453
t_embedded_dirs: 0.0002498626708984375
t_batchify: 0.034607887268066406
ray2rgb: 0.27315640449523926
-----------------------------------------
t_embed_fn: 0.0004665851593017578
t_embedded_dirs: 0.00025200843811035156
t_batchify: 0.658602237701416
ray2rgb: 0.27873849868774414
-----------------------------------------
torch.Size([400, 400, 3]) torch.Size([400, 400])
1 6.97077488899231
t_embed_fn: 0.00046634674072265625
t_embedded_dirs: 0.0002415180206298828
t_batchify: 0.06775736808776855
ray2rgb: 0.28147101402282715
-----------------------------------------
t_embed_fn: 0.0004673004150390625
t_embedded_dirs: 0.0002429485321044922
t_batchify: 0.779726505279541
ray2rgb: 0.28161001205444336
-----------------------------------------
t_embed_fn: 0.0004620552062988281
t_embedded_dirs: 0.000240325927734375
t_batchify: 0.06871604919433594
ray2rgb: 0.281984806060791
-----------------------------------------
t_embed_fn: 0.0004589557647705078
t_embedded_dirs: 0.0002524852752685547
t_batchify: 0.7820725440979004
ray2rgb: 0.28073787689208984
-----------------------------------------
t_embed_fn: 0.00045800209045410156
t_embedded_dirs: 0.0002396106719970703
t_batchify: 0.06891512870788574
ray2rgb: 0.2814192771911621
-----------------------------------------
t_embed_fn: 0.0004596710205078125
t_embedded_dirs: 0.00024271011352539062
t_batchify: 0.7825844287872314
ray2rgb: 0.28194379806518555
-----------------------------------------
t_embed_fn: 0.00046563148498535156
t_embedded_dirs: 0.0002410411834716797
t_batchify: 0.06768918037414551
ray2rgb: 0.2820413112640381
-----------------------------------------
t_embed_fn: 0.00046372413635253906
t_embedded_dirs: 0.0002422332763671875
t_batchify: 0.7856667041778564
ray2rgb: 0.2827110290527344
-----------------------------------------
t_embed_fn: 0.00047588348388671875
t_embedded_dirs: 0.00026035308837890625
t_batchify: 0.034928083419799805
ray2rgb: 0.2755904197692871
-----------------------------------------
t_embed_fn: 0.000476837158203125
t_embedded_dirs: 0.00024390220642089844
t_batchify: 0.6625657081604004
ray2rgb: 0.280637264251709
-----------------------------------------
2 6.978453636169434
t_embed_fn: 0.0004558563232421875
t_embedded_dirs: 0.0002455711364746094
t_batchify: 0.06855106353759766
ray2rgb: 0.28368067741394043
-----------------------------------------
t_embed_fn: 0.0004603862762451172
t_embedded_dirs: 0.00024127960205078125
t_batchify: 0.7885024547576904
ray2rgb: 0.2841043472290039
-----------------------------------------
t_embed_fn: 0.00046825408935546875
t_embedded_dirs: 0.00024127960205078125
t_batchify: 0.06924700736999512
ray2rgb: 0.2820124626159668
-----------------------------------------
t_embed_fn: 0.00046944618225097656
t_embedded_dirs: 0.00024318695068359375
t_batchify: 0.7875931262969971
ray2rgb: 0.28377556800842285
-----------------------------------------
t_embed_fn: 0.0004551410675048828
t_embedded_dirs: 0.000240325927734375
t_batchify: 0.06899261474609375
ray2rgb: 0.2845749855041504
-----------------------------------------
t_embed_fn: 0.0004611015319824219
t_embedded_dirs: 0.0002415180206298828
t_batchify: 0.7873523235321045
ray2rgb: 0.2839233875274658
-----------------------------------------
t_embed_fn: 0.00046443939208984375
t_embedded_dirs: 0.0002415180206298828
t_batchify: 0.06952071189880371
ray2rgb: 0.2837350368499756
-----------------------------------------
t_embed_fn: 0.000457763671875
t_embedded_dirs: 0.00025153160095214844
t_batchify: 0.7861335277557373
ray2rgb: 0.28296494483947754
-----------------------------------------
t_embed_fn: 0.00048542022705078125
t_embedded_dirs: 0.00025177001953125
t_batchify: 0.03527951240539551
ray2rgb: 0.2756001949310303
-----------------------------------------
t_embed_fn: 0.0004687309265136719
t_embedded_dirs: 0.00024366378784179688
t_batchify: 0.6655068397521973
ray2rgb: 0.28540825843811035
-----------------------------------------
3 7.068856716156006
t_embed_fn: 0.0005204677581787109
t_embedded_dirs: 0.0002620220184326172
t_batchify: 0.06601071357727051
ray2rgb: 0.28430628776550293
-----------------------------------------
t_embed_fn: 0.00031304359436035156
t_embedded_dirs: 0.00016808509826660156
t_batchify: 0.7924971580505371
ray2rgb: 0.2848021984100342
-----------------------------------------
t_embed_fn: 0.0003113746643066406
t_embedded_dirs: 0.0001652240753173828
t_batchify: 0.06985926628112793
ray2rgb: 0.28444910049438477
-----------------------------------------
t_embed_fn: 0.00031685829162597656
t_embedded_dirs: 0.00016808509826660156
t_batchify: 0.7881932258605957
ray2rgb: 0.2849085330963135
-----------------------------------------
t_embed_fn: 0.0003437995910644531
t_embedded_dirs: 0.0001804828643798828
t_batchify: 0.06979918479919434
ray2rgb: 0.28516101837158203
-----------------------------------------
t_embed_fn: 0.0003170967102050781
t_embedded_dirs: 0.0001678466796875
t_batchify: 0.7910106182098389
ray2rgb: 0.28624701499938965
-----------------------------------------
t_embed_fn: 0.0004642009735107422
t_embedded_dirs: 0.0011603832244873047
t_batchify: 0.06853938102722168
ray2rgb: 0.28592681884765625
-----------------------------------------
t_embed_fn: 0.0003268718719482422
t_embedded_dirs: 0.0001678466796875
t_batchify: 0.7878212928771973
ray2rgb: 0.2861330509185791
-----------------------------------------
t_embed_fn: 0.00038552284240722656
t_embedded_dirs: 0.0001900196075439453
t_batchify: 0.03547835350036621
ray2rgb: 0.2775297164916992
-----------------------------------------
t_embed_fn: 0.0003230571746826172
t_embedded_dirs: 0.000171661376953125
t_batchify: 0.668581485748291
ray2rgb: 0.2830989360809326
-----------------------------------------
4 7.0523011684417725
t_embed_fn: 0.00033593177795410156
t_embedded_dirs: 0.0001704692840576172
t_batchify: 0.06680417060852051
ray2rgb: 0.28603339195251465
-----------------------------------------
t_embed_fn: 0.0003075599670410156
t_embedded_dirs: 0.00017333030700683594
t_batchify: 0.7934117317199707
ray2rgb: 0.2856314182281494
-----------------------------------------
t_embed_fn: 0.0003457069396972656
t_embedded_dirs: 0.0001728534698486328
t_batchify: 0.06957268714904785
ray2rgb: 0.2863452434539795
-----------------------------------------
t_embed_fn: 0.0003075599670410156
t_embedded_dirs: 0.00016546249389648438
t_batchify: 0.7904298305511475
ray2rgb: 0.2852799892425537
-----------------------------------------
t_embed_fn: 0.0003097057342529297
t_embedded_dirs: 0.0001652240753173828
t_batchify: 0.06972670555114746
ray2rgb: 0.287151575088501
-----------------------------------------
t_embed_fn: 0.0003457069396972656
t_embedded_dirs: 0.00017118453979492188
t_batchify: 0.7930221557617188
ray2rgb: 0.2854950428009033
-----------------------------------------
t_embed_fn: 0.0003116130828857422
t_embedded_dirs: 0.0001697540283203125
t_batchify: 0.06962013244628906
ray2rgb: 0.28666019439697266
-----------------------------------------
t_embed_fn: 0.00031757354736328125
t_embedded_dirs: 0.00016689300537109375
t_batchify: 0.7920510768890381
ray2rgb: 0.28590869903564453
-----------------------------------------
t_embed_fn: 0.00032448768615722656
t_embedded_dirs: 0.00018215179443359375
t_batchify: 0.03541421890258789
ray2rgb: 0.2790720462799072
-----------------------------------------
t_embed_fn: 0.00031876564025878906
t_embedded_dirs: 0.00016832351684570312
t_batchify: 0.6691431999206543
ray2rgb: 0.28513097763061523
-----------------------------------------
5 7.071180582046509
t_embed_fn: 0.00032639503479003906
t_embedded_dirs: 0.0001773834228515625
t_batchify: 0.06896829605102539
ray2rgb: 0.2865118980407715
-----------------------------------------
t_embed_fn: 0.0003066062927246094
t_embedded_dirs: 0.0001659393310546875
t_batchify: 0.7945582866668701
ray2rgb: 0.2872772216796875
-----------------------------------------
t_embed_fn: 0.00031757354736328125
t_embedded_dirs: 0.0001666545867919922
t_batchify: 0.07000350952148438
ray2rgb: 0.2875864505767822
-----------------------------------------
t_embed_fn: 0.0003113746643066406
t_embedded_dirs: 0.00016736984252929688
t_batchify: 0.7962586879730225
ray2rgb: 0.28728818893432617
-----------------------------------------
t_embed_fn: 0.00030922889709472656
t_embedded_dirs: 0.00016355514526367188
t_batchify: 0.0678701400756836
ray2rgb: 0.2886185646057129
-----------------------------------------
t_embed_fn: 0.0003063678741455078
t_embedded_dirs: 0.00017523765563964844
t_batchify: 0.7986676692962646
ray2rgb: 0.2871408462524414
-----------------------------------------
t_embed_fn: 0.0003037452697753906
t_embedded_dirs: 0.00016498565673828125
t_batchify: 0.0703880786895752
ray2rgb: 0.28737354278564453
-----------------------------------------
t_embed_fn: 0.0003063678741455078
t_embedded_dirs: 0.0001666545867919922
t_batchify: 0.7944388389587402
ray2rgb: 0.2871971130371094
-----------------------------------------
t_embed_fn: 0.0003612041473388672
t_embedded_dirs: 0.000186920166015625
t_batchify: 0.035624027252197266
ray2rgb: 0.28030967712402344
-----------------------------------------
t_embed_fn: 0.0003294944763183594
t_embedded_dirs: 0.0001704692840576172
t_batchify: 0.6696650981903076
ray2rgb: 0.28522181510925293
-----------------------------------------
6 7.098287105560303
t_embed_fn: 0.0003135204315185547
t_embedded_dirs: 0.00016880035400390625
t_batchify: 0.06933403015136719
ray2rgb: 0.287503719329834
-----------------------------------------
t_embed_fn: 0.00031447410583496094
t_embedded_dirs: 0.0001685619354248047
t_batchify: 0.79813551902771
ray2rgb: 0.2878425121307373
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.0001704692840576172
t_batchify: 0.07029604911804199
ray2rgb: 0.2875711917877197
-----------------------------------------
t_embed_fn: 0.0003070831298828125
t_embedded_dirs: 0.00017595291137695312
t_batchify: 0.7977397441864014
ray2rgb: 0.2881050109863281
-----------------------------------------
t_embed_fn: 0.0003063678741455078
t_embedded_dirs: 0.00016450881958007812
t_batchify: 0.06840014457702637
ray2rgb: 0.2889244556427002
-----------------------------------------
t_embed_fn: 0.00030684471130371094
t_embedded_dirs: 0.0001735687255859375
t_batchify: 0.7990615367889404
ray2rgb: 0.28751182556152344
-----------------------------------------
t_embed_fn: 0.0002994537353515625
t_embedded_dirs: 0.00016164779663085938
t_batchify: 0.07027292251586914
ray2rgb: 0.2886650562286377
-----------------------------------------
t_embed_fn: 0.0003058910369873047
t_embedded_dirs: 0.00016617774963378906
t_batchify: 0.7976260185241699
ray2rgb: 0.29089832305908203
-----------------------------------------
t_embed_fn: 0.0003304481506347656
t_embedded_dirs: 0.00017762184143066406
t_batchify: 0.03605341911315918
ray2rgb: 0.2863779067993164
-----------------------------------------
t_embed_fn: 0.00032067298889160156
t_embedded_dirs: 0.00016951560974121094
t_batchify: 0.6979985237121582
ray2rgb: 0.29709935188293457
-----------------------------------------
7 7.1637420654296875
t_embed_fn: 0.0003032684326171875
t_embedded_dirs: 0.000171661376953125
t_batchify: 0.06929159164428711
ray2rgb: 0.29379844665527344
-----------------------------------------
t_embed_fn: 0.0003535747528076172
t_embedded_dirs: 0.00017213821411132812
t_batchify: 0.8232057094573975
ray2rgb: 0.29676175117492676
-----------------------------------------
t_embed_fn: 0.0003032684326171875
t_embedded_dirs: 0.00017309188842773438
t_batchify: 0.07198190689086914
ray2rgb: 0.294921875
-----------------------------------------
t_embed_fn: 0.00030517578125
t_embedded_dirs: 0.0001723766326904297
t_batchify: 0.8284909725189209
ray2rgb: 0.30800342559814453
-----------------------------------------
t_embed_fn: 0.0003190040588378906
t_embedded_dirs: 0.00017070770263671875
t_batchify: 0.07243728637695312
ray2rgb: 0.30623435974121094
-----------------------------------------
t_embed_fn: 0.00031447410583496094
t_embedded_dirs: 0.0001671314239501953
t_batchify: 0.8256947994232178
ray2rgb: 0.30234646797180176
-----------------------------------------
t_embed_fn: 0.00031065940856933594
t_embedded_dirs: 0.00016498565673828125
t_batchify: 0.07234406471252441
ray2rgb: 0.30153822898864746
-----------------------------------------
t_embed_fn: 0.0003154277801513672
t_embedded_dirs: 0.00016641616821289062
t_batchify: 0.8374838829040527
ray2rgb: 0.3149149417877197
-----------------------------------------
t_embed_fn: 0.00032067298889160156
t_embedded_dirs: 0.00017333030700683594
t_batchify: 0.03859353065490723
ray2rgb: 0.3007385730743408
-----------------------------------------
t_embed_fn: 0.00031638145446777344
t_embedded_dirs: 0.00017762184143066406
t_batchify: 0.7090034484863281
ray2rgb: 0.3108022212982178
-----------------------------------------
8 7.446503162384033
t_embed_fn: 0.00030541419982910156
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.07162642478942871
ray2rgb: 0.3083181381225586
-----------------------------------------
t_embed_fn: 0.00030732154846191406
t_embedded_dirs: 0.00016641616821289062
t_batchify: 0.8556897640228271
ray2rgb: 0.3234529495239258
-----------------------------------------
t_embed_fn: 0.0003063678741455078
t_embedded_dirs: 0.00016307830810546875
t_batchify: 0.07731962203979492
ray2rgb: 0.30915403366088867
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.00016570091247558594
t_batchify: 0.8618259429931641
ray2rgb: 0.3239734172821045
-----------------------------------------
t_embed_fn: 0.00031113624572753906
t_embedded_dirs: 0.00016379356384277344
t_batchify: 0.07597661018371582
ray2rgb: 0.33304476737976074
-----------------------------------------
t_embed_fn: 0.00031566619873046875
t_embedded_dirs: 0.00016736984252929688
t_batchify: 0.9094982147216797
ray2rgb: 0.3379640579223633
-----------------------------------------
t_embed_fn: 0.00030493736267089844
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.07642292976379395
ray2rgb: 0.33417654037475586
-----------------------------------------
t_embed_fn: 0.0003066062927246094
t_embedded_dirs: 0.0001659393310546875
t_batchify: 0.9133834838867188
ray2rgb: 0.3372185230255127
-----------------------------------------
t_embed_fn: 0.00033402442932128906
t_embedded_dirs: 0.00017547607421875
t_batchify: 0.04010748863220215
ray2rgb: 0.3241281509399414
-----------------------------------------
t_embed_fn: 0.000316619873046875
t_embedded_dirs: 0.00016999244689941406
t_batchify: 0.7709331512451172
ray2rgb: 0.3345928192138672
-----------------------------------------
9 7.987747430801392
t_embed_fn: 0.00034236907958984375
t_embedded_dirs: 0.0001685619354248047
t_batchify: 0.06984210014343262
ray2rgb: 0.3253021240234375
-----------------------------------------
t_embed_fn: 0.00030732154846191406
t_embedded_dirs: 0.00016641616821289062
t_batchify: 0.8980464935302734
ray2rgb: 0.3296833038330078
-----------------------------------------
t_embed_fn: 0.0003161430358886719
t_embedded_dirs: 0.00016760826110839844
t_batchify: 0.07489275932312012
ray2rgb: 0.3269312381744385
-----------------------------------------
t_embed_fn: 0.0003173351287841797
t_embedded_dirs: 0.0001685619354248047
t_batchify: 0.9006495475769043
ray2rgb: 0.33008360862731934
-----------------------------------------
t_embed_fn: 0.0003018379211425781
t_embedded_dirs: 0.0001723766326904297
t_batchify: 0.07682490348815918
ray2rgb: 0.320157527923584
-----------------------------------------
t_embed_fn: 0.0003218650817871094
t_embedded_dirs: 0.00017261505126953125
t_batchify: 0.8988299369812012
ray2rgb: 0.35580897331237793
-----------------------------------------
t_embed_fn: 0.0003097057342529297
t_embedded_dirs: 0.00016546249389648438
t_batchify: 0.07851052284240723
ray2rgb: 0.3561899662017822
-----------------------------------------
t_embed_fn: 0.00030684471130371094
t_embedded_dirs: 0.0001747608184814453
t_batchify: 0.9597134590148926
ray2rgb: 0.35683608055114746
-----------------------------------------
t_embed_fn: 0.00032806396484375
t_embedded_dirs: 0.00017642974853515625
t_batchify: 0.04261517524719238
ray2rgb: 0.34051513671875
-----------------------------------------
t_embed_fn: 0.0003273487091064453
t_embedded_dirs: 0.0001704692840576172
t_batchify: 0.810981273651123
ray2rgb: 0.353435754776001
-----------------------------------------
10 8.276435613632202
t_embed_fn: 0.0003058910369873047
t_embedded_dirs: 0.000164031982421875
t_batchify: 0.07105040550231934
ray2rgb: 0.34308481216430664
-----------------------------------------
t_embed_fn: 0.00031304359436035156
t_embedded_dirs: 0.0001735687255859375
t_batchify: 0.9458045959472656
ray2rgb: 0.3525683879852295
-----------------------------------------
t_embed_fn: 0.0003039836883544922
t_embedded_dirs: 0.00016355514526367188
t_batchify: 0.07841086387634277
ray2rgb: 0.34489870071411133
-----------------------------------------
t_embed_fn: 0.00030231475830078125
t_embedded_dirs: 0.00017213821411132812
t_batchify: 0.9426634311676025
ray2rgb: 0.34948253631591797
-----------------------------------------
t_embed_fn: 0.0003085136413574219
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.0780637264251709
ray2rgb: 0.3457372188568115
-----------------------------------------
t_embed_fn: 0.00031638145446777344
t_embedded_dirs: 0.00016760826110839844
t_batchify: 0.9429023265838623
ray2rgb: 0.34993553161621094
-----------------------------------------
t_embed_fn: 0.00030875205993652344
t_embedded_dirs: 0.0001652240753173828
t_batchify: 0.0770268440246582
ray2rgb: 0.35903477668762207
-----------------------------------------
t_embed_fn: 0.0003082752227783203
t_embedded_dirs: 0.00016736984252929688
t_batchify: 0.967620849609375
ray2rgb: 0.35944604873657227
-----------------------------------------
t_embed_fn: 0.0003209114074707031
t_embedded_dirs: 0.00017380714416503906
t_batchify: 0.0424342155456543
ray2rgb: 0.34340906143188477
-----------------------------------------
t_embed_fn: 0.00032019615173339844
t_embedded_dirs: 0.00016951560974121094
t_batchify: 0.8129065036773682
ray2rgb: 0.35634493827819824
-----------------------------------------
11 8.535687685012817
t_embed_fn: 0.0003170967102050781
t_embedded_dirs: 0.0001652240753173828
t_batchify: 0.07183122634887695
ray2rgb: 0.34652066230773926
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.00016570091247558594
t_batchify: 0.9520416259765625
ray2rgb: 0.35430169105529785
-----------------------------------------
t_embed_fn: 0.0003101825714111328
t_embedded_dirs: 0.0001628398895263672
t_batchify: 0.07814192771911621
ray2rgb: 0.32391810417175293
-----------------------------------------
t_embed_fn: 0.0003085136413574219
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.111849069595337
ray2rgb: 0.3652315139770508
-----------------------------------------
t_embed_fn: 0.0003025531768798828
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.08061718940734863
ray2rgb: 0.36730027198791504
-----------------------------------------
t_embed_fn: 0.000308990478515625
t_embedded_dirs: 0.00017499923706054688
t_batchify: 0.9792819023132324
ray2rgb: 0.36707496643066406
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.0001628398895263672
t_batchify: 0.07916784286499023
ray2rgb: 0.3696413040161133
-----------------------------------------
t_embed_fn: 0.0003323554992675781
t_embedded_dirs: 0.00016832351684570312
t_batchify: 0.9426934719085693
ray2rgb: 0.377269983291626
-----------------------------------------
t_embed_fn: 0.0003249645233154297
t_embedded_dirs: 0.00018215179443359375
t_batchify: 0.04540443420410156
ray2rgb: 0.32834672927856445
-----------------------------------------
t_embed_fn: 0.0003190040588378906
t_embedded_dirs: 0.0001747608184814453
t_batchify: 0.7918179035186768
ray2rgb: 0.4083676338195801
-----------------------------------------
12 8.813022136688232
t_embed_fn: 0.0003268718719482422
t_embedded_dirs: 0.0001678466796875
t_batchify: 0.07164263725280762
ray2rgb: 0.3878617286682129
-----------------------------------------
t_embed_fn: 0.0003075599670410156
t_embedded_dirs: 0.0001735687255859375
t_batchify: 0.9006540775299072
ray2rgb: 0.39325499534606934
-----------------------------------------
t_embed_fn: 0.0003044605255126953
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.0840613842010498
ray2rgb: 0.33348727226257324
-----------------------------------------
t_embed_fn: 0.00031828880310058594
t_embedded_dirs: 0.0001678466796875
t_batchify: 1.843245029449463
ray2rgb: 0.358107328414917
-----------------------------------------
t_embed_fn: 0.0003075599670410156
t_embedded_dirs: 0.00016498565673828125
t_batchify: 0.0766000747680664
ray2rgb: 0.33287858963012695
-----------------------------------------
t_embed_fn: 0.00031566619873046875
t_embedded_dirs: 0.00016808509826660156
t_batchify: 0.9112770557403564
ray2rgb: 0.3549027442932129
-----------------------------------------
t_embed_fn: 0.00030231475830078125
t_embedded_dirs: 0.00016450881958007812
t_batchify: 0.07921457290649414
ray2rgb: 0.3424961566925049
-----------------------------------------
t_embed_fn: 0.00030422210693359375
t_embedded_dirs: 0.00016546249389648438
t_batchify: 0.9493553638458252
ray2rgb: 0.42653989791870117
-----------------------------------------
t_embed_fn: 0.00033402442932128906
t_embedded_dirs: 0.0001785755157470703
t_batchify: 0.04487872123718262
ray2rgb: 0.39502716064453125
-----------------------------------------
t_embed_fn: 0.0003230571746826172
t_embedded_dirs: 0.00017142295837402344
t_batchify: 0.9359250068664551
ray2rgb: 0.4124300479888916
-----------------------------------------
13 9.704028844833374
t_embed_fn: 0.000316619873046875
t_embedded_dirs: 0.0001652240753173828
t_batchify: 0.06952452659606934
ray2rgb: 0.3729386329650879
-----------------------------------------
t_embed_fn: 0.0003039836883544922
t_embedded_dirs: 0.00017261505126953125
t_batchify: 1.020787000656128
ray2rgb: 0.38172101974487305
-----------------------------------------
t_embed_fn: 0.0003044605255126953
t_embedded_dirs: 0.00016379356384277344
t_batchify: 0.08098125457763672
ray2rgb: 0.3821535110473633
-----------------------------------------
t_embed_fn: 0.00031495094299316406
t_embedded_dirs: 0.00016760826110839844
t_batchify: 1.0216541290283203
ray2rgb: 0.42113351821899414
-----------------------------------------
t_embed_fn: 0.0003123283386230469
t_embedded_dirs: 0.0001647472381591797
t_batchify: 0.08346295356750488
ray2rgb: 0.42202329635620117
-----------------------------------------
t_embed_fn: 0.0003082752227783203
t_embedded_dirs: 0.00017452239990234375
t_batchify: 1.113844394683838
ray2rgb: 0.42231106758117676
-----------------------------------------
t_embed_fn: 0.0003039836883544922
t_embedded_dirs: 0.00016307830810546875
t_batchify: 0.08337044715881348
ray2rgb: 0.4184870719909668
-----------------------------------------
t_embed_fn: 0.00030541419982910156
t_embedded_dirs: 0.0001659393310546875
t_batchify: 1.118030071258545
ray2rgb: 0.41887569427490234
-----------------------------------------
t_embed_fn: 0.0003333091735839844
t_embedded_dirs: 0.00017714500427246094
t_batchify: 0.04864764213562012
ray2rgb: 0.3973050117492676
-----------------------------------------
t_embed_fn: 0.0003209114074707031
t_embedded_dirs: 0.00017023086547851562
t_batchify: 0.9371225833892822
ray2rgb: 0.41902685165405273
-----------------------------------------
14 9.702852487564087
t_embed_fn: 0.0003094673156738281
t_embedded_dirs: 0.0001647472381591797
t_batchify: 0.07248497009277344
ray2rgb: 0.40621042251586914
-----------------------------------------
t_embed_fn: 0.00030732154846191406
t_embedded_dirs: 0.00017571449279785156
t_batchify: 1.0759708881378174
ray2rgb: 0.40201854705810547
-----------------------------------------
t_embed_fn: 0.0003032684326171875
t_embedded_dirs: 0.00016379356384277344
t_batchify: 0.08355116844177246
ray2rgb: 0.39750027656555176
-----------------------------------------
t_embed_fn: 0.0003066062927246094
t_embedded_dirs: 0.0001678466796875
t_batchify: 0.9628357887268066
ray2rgb: 0.41431307792663574
-----------------------------------------
t_embed_fn: 0.0003108978271484375
t_embedded_dirs: 0.00016307830810546875
t_batchify: 0.08236956596374512
ray2rgb: 0.3660087585449219
-----------------------------------------
t_embed_fn: 0.0003070831298828125
t_embedded_dirs: 0.00016736984252929688
t_batchify: 1.0473155975341797
ray2rgb: 0.4288334846496582
-----------------------------------------
t_embed_fn: 0.00030231475830078125
t_embedded_dirs: 0.00016307830810546875
t_batchify: 0.08637404441833496
ray2rgb: 0.42935681343078613
-----------------------------------------
t_embed_fn: 0.0003082752227783203
t_embedded_dirs: 0.00016736984252929688
t_batchify: 1.1267740726470947
ray2rgb: 0.42778754234313965
-----------------------------------------
t_embed_fn: 0.0003230571746826172
t_embedded_dirs: 0.00017452239990234375
t_batchify: 0.04832792282104492
ray2rgb: 0.3998692035675049
-----------------------------------------
t_embed_fn: 0.00031638145446777344
t_embedded_dirs: 0.00017714500427246094
t_batchify: 0.850804328918457
ray2rgb: 0.413067102432251
-----------------------------------------
15 9.59234356880188
t_embed_fn: 0.0003027915954589844
t_embedded_dirs: 0.00016379356384277344
t_batchify: 0.07181262969970703
ray2rgb: 0.39942336082458496
-----------------------------------------
t_embed_fn: 0.000308990478515625
t_embedded_dirs: 0.00017333030700683594
t_batchify: 1.5814344882965088
ray2rgb: 0.3662440776824951
-----------------------------------------
t_embed_fn: 0.0003032684326171875
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.0783085823059082
ray2rgb: 0.35604310035705566
-----------------------------------------
t_embed_fn: 0.0003147125244140625
t_embedded_dirs: 0.00016689300537109375
t_batchify: 1.8474485874176025
ray2rgb: 0.35788679122924805
-----------------------------------------
t_embed_fn: 0.0003058910369873047
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.07698750495910645
ray2rgb: 0.3339042663574219
-----------------------------------------
t_embed_fn: 0.00033783912658691406
t_embedded_dirs: 0.0001766681671142578
t_batchify: 0.9345059394836426
ray2rgb: 0.39360523223876953
-----------------------------------------
t_embed_fn: 0.0003070831298828125
t_embedded_dirs: 0.0001647472381591797
t_batchify: 0.08297181129455566
ray2rgb: 0.39546728134155273
-----------------------------------------
t_embed_fn: 0.0003142356872558594
t_embedded_dirs: 0.00016760826110839844
t_batchify: 1.0490341186523438
ray2rgb: 0.4278421401977539
-----------------------------------------
t_embed_fn: 0.0003266334533691406
t_embedded_dirs: 0.00017547607421875
t_batchify: 0.04760026931762695
ray2rgb: 0.40371131896972656
-----------------------------------------
t_embed_fn: 0.000347137451171875
t_embedded_dirs: 0.00017333030700683594
t_batchify: 0.801677942276001
ray2rgb: 0.4377858638763428
-----------------------------------------
16 10.513818502426147
t_embed_fn: 0.00030493736267089844
t_embedded_dirs: 0.00016570091247558594
t_batchify: 0.07187175750732422
ray2rgb: 0.4239480495452881
-----------------------------------------
t_embed_fn: 0.0003154277801513672
t_embedded_dirs: 0.00016760826110839844
t_batchify: 1.567817211151123
ray2rgb: 0.37538838386535645
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.0001709461212158203
t_batchify: 0.08059096336364746
ray2rgb: 0.37569332122802734
-----------------------------------------
t_embed_fn: 0.00031757354736328125
t_embedded_dirs: 0.0001685619354248047
t_batchify: 0.8691596984863281
ray2rgb: 0.42971348762512207
-----------------------------------------
t_embed_fn: 0.00031280517578125
t_embedded_dirs: 0.0001773834228515625
t_batchify: 0.08886218070983887
ray2rgb: 0.4012789726257324
-----------------------------------------
t_embed_fn: 0.0003173351287841797
t_embedded_dirs: 0.0001678466796875
t_batchify: 1.304344892501831
ray2rgb: 0.45015382766723633
-----------------------------------------
t_embed_fn: 0.0003044605255126953
t_embedded_dirs: 0.0001659393310546875
t_batchify: 0.08607316017150879
ray2rgb: 0.450885534286499
-----------------------------------------
t_embed_fn: 0.00030875205993652344
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.0507729053497314
ray2rgb: 0.46586132049560547
-----------------------------------------
t_embed_fn: 0.0003292560577392578
t_embedded_dirs: 0.0001747608184814453
t_batchify: 0.050965070724487305
ray2rgb: 0.43889403343200684
-----------------------------------------
t_embed_fn: 0.00032973289489746094
t_embedded_dirs: 0.00017070770263671875
t_batchify: 0.8982946872711182
ray2rgb: 0.4068446159362793
-----------------------------------------
17 10.355693101882935
t_embed_fn: 0.0003139972686767578
t_embedded_dirs: 0.00016832351684570312
t_batchify: 0.07274556159973145
ray2rgb: 0.3357841968536377
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.00016570091247558594
t_batchify: 1.8235819339752197
ray2rgb: 0.36830806732177734
-----------------------------------------
t_embed_fn: 0.0003025531768798828
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.08041596412658691
ray2rgb: 0.3629612922668457
-----------------------------------------
t_embed_fn: 0.000308990478515625
t_embedded_dirs: 0.00016689300537109375
t_batchify: 1.562147617340088
ray2rgb: 0.3687446117401123
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.00016450881958007812
t_batchify: 0.08125972747802734
ray2rgb: 0.34101176261901855
-----------------------------------------
t_embed_fn: 0.0003075599670410156
t_embedded_dirs: 0.00017380714416503906
t_batchify: 1.2374019622802734
ray2rgb: 0.42957210540771484
-----------------------------------------
t_embed_fn: 0.00029921531677246094
t_embedded_dirs: 0.0001628398895263672
t_batchify: 0.0839998722076416
ray2rgb: 0.43183255195617676
-----------------------------------------
t_embed_fn: 0.0003025531768798828
t_embedded_dirs: 0.0001647472381591797
t_batchify: 1.026641607284546
ray2rgb: 0.4141824245452881
-----------------------------------------
t_embed_fn: 0.00032806396484375
t_embedded_dirs: 0.00018262863159179688
t_batchify: 0.04698014259338379
ray2rgb: 0.33692336082458496
-----------------------------------------
t_embed_fn: 0.00031685829162597656
t_embedded_dirs: 0.00017833709716796875
t_batchify: 1.3538198471069336
ray2rgb: 0.3981764316558838
-----------------------------------------
18 11.222811222076416
t_embed_fn: 0.0003001689910888672
t_embedded_dirs: 0.00016260147094726562
t_batchify: 0.07196593284606934
ray2rgb: 0.3868873119354248
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.00016546249389648438
t_batchify: 1.078948736190796
ray2rgb: 0.4099106788635254
-----------------------------------------
t_embed_fn: 0.0003094673156738281
t_embedded_dirs: 0.0001628398895263672
t_batchify: 0.08430099487304688
ray2rgb: 0.3780941963195801
-----------------------------------------
t_embed_fn: 0.00030493736267089844
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.0820419788360596
ray2rgb: 0.4712502956390381
-----------------------------------------
t_embed_fn: 0.0003039836883544922
t_embedded_dirs: 0.0001647472381591797
t_batchify: 0.08905220031738281
ray2rgb: 0.469282865524292
-----------------------------------------
t_embed_fn: 0.0003154277801513672
t_embedded_dirs: 0.00016641616821289062
t_batchify: 1.2632834911346436
ray2rgb: 0.45431041717529297
-----------------------------------------
t_embed_fn: 0.0003027915954589844
t_embedded_dirs: 0.0001621246337890625
t_batchify: 0.0866847038269043
ray2rgb: 0.45949625968933105
-----------------------------------------
t_embed_fn: 0.0003063678741455078
t_embedded_dirs: 0.00017499923706054688
t_batchify: 1.1884996891021729
ray2rgb: 0.4537782669067383
-----------------------------------------
t_embed_fn: 0.0003325939178466797
t_embedded_dirs: 0.0001780986785888672
t_batchify: 0.051630258560180664
ray2rgb: 0.4286079406738281
-----------------------------------------
t_embed_fn: 0.00031828880310058594
t_embedded_dirs: 0.00016880035400390625
t_batchify: 1.0044572353363037
ray2rgb: 0.4521009922027588
-----------------------------------------
19 10.432234048843384
t_embed_fn: 0.00032019615173339844
t_embedded_dirs: 0.00016760826110839844
t_batchify: 0.0718386173248291
ray2rgb: 0.4590582847595215
-----------------------------------------
t_embed_fn: 0.0003147125244140625
t_embedded_dirs: 0.00016832351684570312
t_batchify: 1.216007947921753
ray2rgb: 0.44516658782958984
-----------------------------------------
t_embed_fn: 0.0003044605255126953
t_embedded_dirs: 0.00016951560974121094
t_batchify: 0.08749151229858398
ray2rgb: 0.45087504386901855
-----------------------------------------
t_embed_fn: 0.00030684471130371094
t_embedded_dirs: 0.0001678466796875
t_batchify: 1.0379421710968018
ray2rgb: 0.45984458923339844
-----------------------------------------
t_embed_fn: 0.00030517578125
t_embedded_dirs: 0.0001704692840576172
t_batchify: 0.08708643913269043
ray2rgb: 0.47623586654663086
-----------------------------------------
t_embed_fn: 0.0003066062927246094
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.1228861808776855
ray2rgb: 0.4544377326965332
-----------------------------------------
t_embed_fn: 0.00030493736267089844
t_embedded_dirs: 0.00016498565673828125
t_batchify: 0.08877992630004883
ray2rgb: 0.45566606521606445
-----------------------------------------
t_embed_fn: 0.0003151893615722656
t_embedded_dirs: 0.00016641616821289062
t_batchify: 1.0447118282318115
ray2rgb: 0.43906450271606445
-----------------------------------------
t_embed_fn: 0.0003185272216796875
t_embedded_dirs: 0.00017189979553222656
t_batchify: 0.05033087730407715
ray2rgb: 0.40375518798828125
-----------------------------------------
t_embed_fn: 0.00031757354736328125
t_embedded_dirs: 0.0001761913299560547
t_batchify: 1.7432844638824463
ray2rgb: 0.3796427249908447
-----------------------------------------
20 11.039860725402832
t_embed_fn: 0.0003020763397216797
t_embedded_dirs: 0.00017189979553222656
t_batchify: 0.07193922996520996
ray2rgb: 0.3583087921142578
-----------------------------------------
t_embed_fn: 0.00031447410583496094
t_embedded_dirs: 0.0001666545867919922
t_batchify: 1.052156686782837
ray2rgb: 0.4788541793823242
-----------------------------------------
t_embed_fn: 0.0003180503845214844
t_embedded_dirs: 0.0001652240753173828
t_batchify: 0.09233498573303223
ray2rgb: 0.48058629035949707
-----------------------------------------
t_embed_fn: 0.0003159046173095703
t_embedded_dirs: 0.00016736984252929688
t_batchify: 1.093397617340088
ray2rgb: 0.4632449150085449
-----------------------------------------
t_embed_fn: 0.0003032684326171875
t_embedded_dirs: 0.0001647472381591797
t_batchify: 0.08951544761657715
ray2rgb: 0.4685835838317871
-----------------------------------------
t_embed_fn: 0.0003075599670410156
t_embedded_dirs: 0.0001728534698486328
t_batchify: 1.3145673274993896
ray2rgb: 0.4542214870452881
-----------------------------------------
t_embed_fn: 0.0003285408020019531
t_embedded_dirs: 0.00016999244689941406
t_batchify: 0.0887608528137207
ray2rgb: 0.4573071002960205
-----------------------------------------
t_embed_fn: 0.0003077983856201172
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.1038661003112793
ray2rgb: 0.4270288944244385
-----------------------------------------
t_embed_fn: 0.00032210350036621094
t_embedded_dirs: 0.0001742839813232422
t_batchify: 0.04938673973083496
ray2rgb: 0.45850563049316406
-----------------------------------------
t_embed_fn: 0.00031757354736328125
t_embedded_dirs: 0.0001690387725830078
t_batchify: 0.9918577671051025
ray2rgb: 0.44653868675231934
-----------------------------------------
21 10.50919222831726
t_embed_fn: 0.0003173351287841797
t_embedded_dirs: 0.00016546249389648438
t_batchify: 0.07106375694274902
ray2rgb: 0.3141024112701416
-----------------------------------------
t_embed_fn: 0.0003085136413574219
t_embedded_dirs: 0.00016736984252929688
t_batchify: 1.6724374294281006
ray2rgb: 0.3838052749633789
-----------------------------------------
t_embed_fn: 0.0003101825714111328
t_embedded_dirs: 0.00016427040100097656
t_batchify: 0.07928466796875
ray2rgb: 0.41469621658325195
-----------------------------------------
t_embed_fn: 0.0003147125244140625
t_embedded_dirs: 0.00016736984252929688
t_batchify: 1.649756908416748
ray2rgb: 0.3747100830078125
-----------------------------------------
t_embed_fn: 0.0003104209899902344
t_embedded_dirs: 0.00016355514526367188
t_batchify: 0.08002543449401855
ray2rgb: 0.3968629837036133
-----------------------------------------
t_embed_fn: 0.0003147125244140625
t_embedded_dirs: 0.00016641616821289062
t_batchify: 1.9137248992919922
ray2rgb: 0.36075901985168457
-----------------------------------------
t_embed_fn: 0.0003027915954589844
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.07887792587280273
ray2rgb: 0.35041236877441406
-----------------------------------------
t_embed_fn: 0.00031566619873046875
t_embedded_dirs: 0.00016808509826660156
t_batchify: 1.1086056232452393
ray2rgb: 0.47290825843811035
-----------------------------------------
t_embed_fn: 0.00032448768615722656
t_embedded_dirs: 0.0001761913299560547
t_batchify: 0.04963541030883789
ray2rgb: 0.4469165802001953
-----------------------------------------
t_embed_fn: 0.00032019615173339844
t_embedded_dirs: 0.00016951560974121094
t_batchify: 1.049358606338501
ray2rgb: 0.4676969051361084
-----------------------------------------
22 11.804641008377075
t_embed_fn: 0.0003018379211425781
t_embedded_dirs: 0.00016355514526367188
t_batchify: 0.07217979431152344
ray2rgb: 0.4493744373321533
-----------------------------------------
t_embed_fn: 0.0003075599670410156
t_embedded_dirs: 0.0001666545867919922
t_batchify: 1.045530080795288
ray2rgb: 0.4591050148010254
-----------------------------------------
t_embed_fn: 0.00030422210693359375
t_embedded_dirs: 0.0001728534698486328
t_batchify: 0.08687734603881836
ray2rgb: 0.4716463088989258
-----------------------------------------
t_embed_fn: 0.0003063678741455078
t_embedded_dirs: 0.00016570091247558594
t_batchify: 1.101959466934204
ray2rgb: 0.47189760208129883
-----------------------------------------
t_embed_fn: 0.00031304359436035156
t_embedded_dirs: 0.00016546249389648438
t_batchify: 0.08677291870117188
ray2rgb: 0.3826792240142822
-----------------------------------------
t_embed_fn: 0.0003058910369873047
t_embedded_dirs: 0.00016546249389648438
t_batchify: 1.9998877048492432
ray2rgb: 0.3663754463195801
-----------------------------------------
t_embed_fn: 0.00031113624572753906
t_embedded_dirs: 0.00016450881958007812
t_batchify: 0.07919836044311523
ray2rgb: 0.37076854705810547
-----------------------------------------
t_embed_fn: 0.00030732154846191406
t_embedded_dirs: 0.0001659393310546875
t_batchify: 1.6792445182800293
ray2rgb: 0.36376214027404785
-----------------------------------------
t_embed_fn: 0.0003228187561035156
t_embedded_dirs: 0.0001747608184814453
t_batchify: 0.04403972625732422
ray2rgb: 0.31306982040405273
-----------------------------------------
t_embed_fn: 0.00031828880310058594
t_embedded_dirs: 0.0001766681671142578
t_batchify: 1.6954646110534668
ray2rgb: 0.3777332305908203
-----------------------------------------
23 11.9886155128479
t_embed_fn: 0.0003032684326171875
t_embedded_dirs: 0.000164031982421875
t_batchify: 0.07190227508544922
ray2rgb: 0.3498270511627197
-----------------------------------------
t_embed_fn: 0.0003170967102050781
t_embedded_dirs: 0.00016808509826660156
t_batchify: 1.0137207508087158
ray2rgb: 0.4672667980194092
-----------------------------------------
t_embed_fn: 0.00030159950256347656
t_embedded_dirs: 0.00016951560974121094
t_batchify: 0.08893394470214844
ray2rgb: 0.3863184452056885
-----------------------------------------
t_embed_fn: 0.0003108978271484375
t_embedded_dirs: 0.0001666545867919922
t_batchify: 1.170757532119751
ray2rgb: 0.49440789222717285
-----------------------------------------
t_embed_fn: 0.0002999305725097656
t_embedded_dirs: 0.00016188621520996094
t_batchify: 0.0867457389831543
ray2rgb: 0.49310851097106934
-----------------------------------------
t_embed_fn: 0.00030922889709472656
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.0692050457000732
ray2rgb: 0.5197384357452393
-----------------------------------------
t_embed_fn: 0.0003020763397216797
t_embedded_dirs: 0.00016260147094726562
t_batchify: 0.09421300888061523
ray2rgb: 0.38762497901916504
-----------------------------------------
t_embed_fn: 0.0003066062927246094
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.5934042930603027
ray2rgb: 0.40873050689697266
-----------------------------------------
t_embed_fn: 0.00032401084899902344
t_embedded_dirs: 0.00017333030700683594
t_batchify: 0.04642891883850098
ray2rgb: 0.3659811019897461
-----------------------------------------
t_embed_fn: 0.0003178119659423828
t_embedded_dirs: 0.000179290771484375
t_batchify: 0.894768476486206
ray2rgb: 0.46421265602111816
-----------------------------------------
24 10.537854433059692
t_embed_fn: 0.00031256675720214844
t_embedded_dirs: 0.0001652240753173828
t_batchify: 0.07191205024719238
ray2rgb: 0.34457921981811523
-----------------------------------------
t_embed_fn: 0.00030684471130371094
t_embedded_dirs: 0.00017404556274414062
t_batchify: 1.397920846939087
ray2rgb: 0.4818863868713379
-----------------------------------------
t_embed_fn: 0.0003066062927246094
t_embedded_dirs: 0.0001647472381591797
t_batchify: 0.09000539779663086
ray2rgb: 0.4816131591796875
-----------------------------------------
t_embed_fn: 0.00031685829162597656
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.0349740982055664
ray2rgb: 0.5224878787994385
-----------------------------------------
t_embed_fn: 0.00030112266540527344
t_embedded_dirs: 0.00016307830810546875
t_batchify: 0.09095525741577148
ray2rgb: 0.5302934646606445
-----------------------------------------
t_embed_fn: 0.0003147125244140625
t_embedded_dirs: 0.00016832351684570312
t_batchify: 1.120302677154541
ray2rgb: 0.4414839744567871
-----------------------------------------
t_embed_fn: 0.0003151893615722656
t_embedded_dirs: 0.00016355514526367188
t_batchify: 0.08424186706542969
ray2rgb: 0.4645979404449463
-----------------------------------------
t_embed_fn: 0.0003066062927246094
t_embedded_dirs: 0.0001666545867919922
t_batchify: 1.1564462184906006
ray2rgb: 0.4763352870941162
-----------------------------------------
t_embed_fn: 0.00033092498779296875
t_embedded_dirs: 0.0001747608184814453
t_batchify: 0.05056500434875488
ray2rgb: 0.4409608840942383
-----------------------------------------
t_embed_fn: 0.0003209114074707031
t_embedded_dirs: 0.00017070770263671875
t_batchify: 1.470374345779419
ray2rgb: 0.39325642585754395
-----------------------------------------
25 11.216758012771606
t_embed_fn: 0.00032830238342285156
t_embedded_dirs: 0.00016808509826660156
t_batchify: 0.07178473472595215
ray2rgb: 0.3490941524505615
-----------------------------------------
t_embed_fn: 0.0003139972686767578
t_embedded_dirs: 0.0001659393310546875
t_batchify: 1.566023826599121
ray2rgb: 0.4116489887237549
-----------------------------------------
t_embed_fn: 0.0003113746643066406
t_embedded_dirs: 0.00016570091247558594
t_batchify: 0.08104515075683594
ray2rgb: 0.4087677001953125
-----------------------------------------
t_embed_fn: 0.00031757354736328125
t_embedded_dirs: 0.00016689300537109375
t_batchify: 1.65616774559021
ray2rgb: 0.3822367191314697
-----------------------------------------
t_embed_fn: 0.0003120899200439453
t_embedded_dirs: 0.00017905235290527344
t_batchify: 0.07886147499084473
ray2rgb: 0.32804083824157715
-----------------------------------------
t_embed_fn: 0.0003082752227783203
t_embedded_dirs: 0.00017690658569335938
t_batchify: 1.8187651634216309
ray2rgb: 0.37062954902648926
-----------------------------------------
t_embed_fn: 0.00030112266540527344
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.08069396018981934
ray2rgb: 0.3699612617492676
-----------------------------------------
t_embed_fn: 0.0003066062927246094
t_embedded_dirs: 0.00016736984252929688
t_batchify: 1.2678465843200684
ray2rgb: 0.4628288745880127
-----------------------------------------
t_embed_fn: 0.0003254413604736328
t_embedded_dirs: 0.00018334388732910156
t_batchify: 0.04969358444213867
ray2rgb: 0.4394395351409912
-----------------------------------------
t_embed_fn: 0.00032639503479003906
t_embedded_dirs: 0.000171661376953125
t_batchify: 0.9080379009246826
ray2rgb: 0.448305606842041
-----------------------------------------
26 11.62030029296875
t_embed_fn: 0.0003039836883544922
t_embedded_dirs: 0.00016355514526367188
t_batchify: 0.07169103622436523
ray2rgb: 0.34139394760131836
-----------------------------------------
t_embed_fn: 0.00030517578125
t_embedded_dirs: 0.0001652240753173828
t_batchify: 1.9913837909698486
ray2rgb: 0.37743091583251953
-----------------------------------------
t_embed_fn: 0.00030112266540527344
t_embedded_dirs: 0.00016307830810546875
t_batchify: 0.07913732528686523
ray2rgb: 0.3776211738586426
-----------------------------------------
t_embed_fn: 0.00030350685119628906
t_embedded_dirs: 0.0001647472381591797
t_batchify: 1.5378718376159668
ray2rgb: 0.40489959716796875
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.000164031982421875
t_batchify: 0.08504819869995117
ray2rgb: 0.3395204544067383
-----------------------------------------
t_embed_fn: 0.00030732154846191406
t_embedded_dirs: 0.00016689300537109375
t_batchify: 1.8213870525360107
ray2rgb: 0.3679487705230713
-----------------------------------------
t_embed_fn: 0.0003101825714111328
t_embedded_dirs: 0.0001621246337890625
t_batchify: 0.07836079597473145
ray2rgb: 0.38100576400756836
-----------------------------------------
t_embed_fn: 0.0003070831298828125
t_embedded_dirs: 0.0001659393310546875
t_batchify: 1.6514549255371094
ray2rgb: 0.3787696361541748
-----------------------------------------
t_embed_fn: 0.00033020973205566406
t_embedded_dirs: 0.0001766681671142578
t_batchify: 0.0440676212310791
ray2rgb: 0.33347439765930176
-----------------------------------------
t_embed_fn: 0.0003159046173095703
t_embedded_dirs: 0.00017595291137695312
t_batchify: 1.0048348903656006
ray2rgb: 0.4341435432434082
-----------------------------------------
27 12.17283010482788
t_embed_fn: 0.0003170967102050781
t_embedded_dirs: 0.00016427040100097656
t_batchify: 0.0715641975402832
ray2rgb: 0.3397483825683594
-----------------------------------------
t_embed_fn: 0.0003044605255126953
t_embedded_dirs: 0.00016498565673828125
t_batchify: 1.8331220149993896
ray2rgb: 0.36922621726989746
-----------------------------------------
t_embed_fn: 0.0003104209899902344
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.07890582084655762
ray2rgb: 0.35872650146484375
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.0001723766326904297
t_batchify: 1.5866944789886475
ray2rgb: 0.4023313522338867
-----------------------------------------
t_embed_fn: 0.000301361083984375
t_embedded_dirs: 0.00016307830810546875
t_batchify: 0.081787109375
ray2rgb: 0.407573938369751
-----------------------------------------
t_embed_fn: 0.00030684471130371094
t_embedded_dirs: 0.00017261505126953125
t_batchify: 1.0531284809112549
ray2rgb: 0.46210551261901855
-----------------------------------------
t_embed_fn: 0.00030350685119628906
t_embedded_dirs: 0.00017023086547851562
t_batchify: 0.0910027027130127
ray2rgb: 0.41664743423461914
-----------------------------------------
t_embed_fn: 0.00030732154846191406
t_embedded_dirs: 0.0001666545867919922
t_batchify: 1.690150260925293
ray2rgb: 0.3872230052947998
-----------------------------------------
t_embed_fn: 0.0003228187561035156
t_embedded_dirs: 0.0001747608184814453
t_batchify: 0.04463934898376465
ray2rgb: 0.33471012115478516
-----------------------------------------
t_embed_fn: 0.0003235340118408203
t_embedded_dirs: 0.000171661376953125
t_batchify: 1.506556510925293
ray2rgb: 0.3822190761566162
-----------------------------------------
28 11.969962120056152
t_embed_fn: 0.0003204345703125
t_embedded_dirs: 0.0001652240753173828
t_batchify: 0.0716390609741211
ray2rgb: 0.3677175045013428
-----------------------------------------
t_embed_fn: 0.0003082752227783203
t_embedded_dirs: 0.00016570091247558594
t_batchify: 1.0876882076263428
ray2rgb: 0.4954671859741211
-----------------------------------------
t_embed_fn: 0.0003006458282470703
t_embedded_dirs: 0.0001621246337890625
t_batchify: 0.0894615650177002
ray2rgb: 0.49129724502563477
-----------------------------------------
t_embed_fn: 0.0003044605255126953
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.1000525951385498
ray2rgb: 0.49953365325927734
-----------------------------------------
t_embed_fn: 0.00030231475830078125
t_embedded_dirs: 0.0001723766326904297
t_batchify: 0.08902525901794434
ray2rgb: 0.5663323402404785
-----------------------------------------
t_embed_fn: 0.00030732154846191406
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.0929064750671387
ray2rgb: 0.45394229888916016
-----------------------------------------
t_embed_fn: 0.0003101825714111328
t_embedded_dirs: 0.00016427040100097656
t_batchify: 0.0872502326965332
ray2rgb: 0.45861196517944336
-----------------------------------------
t_embed_fn: 0.00031280517578125
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.5166356563568115
ray2rgb: 0.4190847873687744
-----------------------------------------
t_embed_fn: 0.0003228187561035156
t_embedded_dirs: 0.0001742839813232422
t_batchify: 0.0482938289642334
ray2rgb: 0.34448790550231934
-----------------------------------------
t_embed_fn: 0.00033092498779296875
t_embedded_dirs: 0.00017142295837402344
t_batchify: 1.0282394886016846
ray2rgb: 0.46580934524536133
-----------------------------------------
29 10.846221446990967
t_embed_fn: 0.00030994415283203125
t_embedded_dirs: 0.00016450881958007812
t_batchify: 0.07225894927978516
ray2rgb: 0.4470648765563965
-----------------------------------------
t_embed_fn: 0.00030732154846191406
t_embedded_dirs: 0.0001652240753173828
t_batchify: 1.4693920612335205
ray2rgb: 0.4294908046722412
-----------------------------------------
t_embed_fn: 0.0003085136413574219
t_embedded_dirs: 0.00016307830810546875
t_batchify: 0.08274459838867188
ray2rgb: 0.35985589027404785
-----------------------------------------
t_embed_fn: 0.0003063678741455078
t_embedded_dirs: 0.00017261505126953125
t_batchify: 1.843912124633789
ray2rgb: 0.38396334648132324
-----------------------------------------
t_embed_fn: 0.0003311634063720703
t_embedded_dirs: 0.0001697540283203125
t_batchify: 0.0836021900177002
ray2rgb: 0.4106459617614746
-----------------------------------------
t_embed_fn: 0.0003077983856201172
t_embedded_dirs: 0.00016736984252929688
t_batchify: 1.4148821830749512
ray2rgb: 0.4187943935394287
-----------------------------------------
t_embed_fn: 0.0003097057342529297
t_embedded_dirs: 0.00016355514526367188
t_batchify: 0.08572673797607422
ray2rgb: 0.42055749893188477
-----------------------------------------
t_embed_fn: 0.00030541419982910156
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.1360411643981934
ray2rgb: 0.4810829162597656
-----------------------------------------
t_embed_fn: 0.00032591819763183594
t_embedded_dirs: 0.00017690658569335938
t_batchify: 0.04868006706237793
ray2rgb: 0.42450904846191406
-----------------------------------------
t_embed_fn: 0.00031876564025878906
t_embedded_dirs: 0.0001678466796875
t_batchify: 1.251770257949829
ray2rgb: 0.4302365779876709
-----------------------------------------
30 11.771389722824097
t_embed_fn: 0.00030159950256347656
t_embedded_dirs: 0.0001614093780517578
t_batchify: 0.0698404312133789
ray2rgb: 0.41721606254577637
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.00017690658569335938
t_batchify: 1.1512198448181152
ray2rgb: 0.4692409038543701
-----------------------------------------
t_embed_fn: 0.0003094673156738281
t_embedded_dirs: 0.0001621246337890625
t_batchify: 0.08486318588256836
ray2rgb: 0.46846818923950195
-----------------------------------------
t_embed_fn: 0.0003039836883544922
t_embedded_dirs: 0.00016546249389648438
t_batchify: 1.621690273284912
ray2rgb: 0.3928511142730713
-----------------------------------------
t_embed_fn: 0.0003027915954589844
t_embedded_dirs: 0.00016427040100097656
t_batchify: 0.08139491081237793
ray2rgb: 0.36931753158569336
-----------------------------------------
t_embed_fn: 0.0003104209899902344
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.7350184917449951
ray2rgb: 0.38410449028015137
-----------------------------------------
t_embed_fn: 0.00030350685119628906
t_embedded_dirs: 0.00017070770263671875
t_batchify: 0.08037376403808594
ray2rgb: 0.3553459644317627
-----------------------------------------
t_embed_fn: 0.0003032684326171875
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.8083806037902832
ray2rgb: 0.3751850128173828
-----------------------------------------
t_embed_fn: 0.0003254413604736328
t_embedded_dirs: 0.00017452239990234375
t_batchify: 0.04425787925720215
ray2rgb: 0.4819655418395996
-----------------------------------------
t_embed_fn: 0.00032591819763183594
t_embedded_dirs: 0.00016999244689941406
t_batchify: 0.8240489959716797
ray2rgb: 0.45966315269470215
-----------------------------------------
31 11.74666714668274
t_embed_fn: 0.00030231475830078125
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.070526123046875
ray2rgb: 0.38700199127197266
-----------------------------------------
t_embed_fn: 0.0003142356872558594
t_embedded_dirs: 0.00016570091247558594
t_batchify: 1.7953155040740967
ray2rgb: 0.39452219009399414
-----------------------------------------
t_embed_fn: 0.00030112266540527344
t_embedded_dirs: 0.0001621246337890625
t_batchify: 0.08239531517028809
ray2rgb: 0.39545106887817383
-----------------------------------------
t_embed_fn: 0.0003044605255126953
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.145416021347046
ray2rgb: 0.5104913711547852
-----------------------------------------
t_embed_fn: 0.0003113746643066406
t_embedded_dirs: 0.00016379356384277344
t_batchify: 0.08860301971435547
ray2rgb: 0.5117602348327637
-----------------------------------------
t_embed_fn: 0.0003037452697753906
t_embedded_dirs: 0.0001659393310546875
t_batchify: 0.9594180583953857
ray2rgb: 0.5610077381134033
-----------------------------------------
t_embed_fn: 0.0003020763397216797
t_embedded_dirs: 0.00016164779663085938
t_batchify: 0.09251284599304199
ray2rgb: 0.3685774803161621
-----------------------------------------
t_embed_fn: 0.0003027915954589844
t_embedded_dirs: 0.00016498565673828125
t_batchify: 1.3755805492401123
ray2rgb: 0.4872744083404541
-----------------------------------------
t_embed_fn: 0.0003204345703125
t_embedded_dirs: 0.0001723766326904297
t_batchify: 0.05326652526855469
ray2rgb: 0.3581705093383789
-----------------------------------------
t_embed_fn: 0.0003147125244140625
t_embedded_dirs: 0.00017404556274414062
t_batchify: 1.6953587532043457
ray2rgb: 0.38118672370910645
-----------------------------------------
32 11.788119792938232
t_embed_fn: 0.0002987384796142578
t_embedded_dirs: 0.0001621246337890625
t_batchify: 0.06831860542297363
ray2rgb: 0.36405014991760254
-----------------------------------------
t_embed_fn: 0.00031566619873046875
t_embedded_dirs: 0.00016641616821289062
t_batchify: 1.5272479057312012
ray2rgb: 0.38161349296569824
-----------------------------------------
t_embed_fn: 0.0003063678741455078
t_embedded_dirs: 0.00016260147094726562
t_batchify: 0.0802297592163086
ray2rgb: 0.3707561492919922
-----------------------------------------
t_embed_fn: 0.00030350685119628906
t_embedded_dirs: 0.00016546249389648438
t_batchify: 1.7929260730743408
ray2rgb: 0.3809516429901123
-----------------------------------------
t_embed_fn: 0.0003044605255126953
t_embedded_dirs: 0.00016236305236816406
t_batchify: 0.08063244819641113
ray2rgb: 0.33602142333984375
-----------------------------------------
t_embed_fn: 0.0003077983856201172
t_embedded_dirs: 0.0001659393310546875
t_batchify: 1.1024749279022217
ray2rgb: 0.5049471855163574
-----------------------------------------
t_embed_fn: 0.0003018379211425781
t_embedded_dirs: 0.0001621246337890625
t_batchify: 0.09045076370239258
ray2rgb: 0.5117559432983398
-----------------------------------------
t_embed_fn: 0.0003113746643066406
t_embedded_dirs: 0.0001678466796875
t_batchify: 1.1036808490753174
ray2rgb: 0.5036056041717529
-----------------------------------------
t_embed_fn: 0.00032210350036621094
t_embedded_dirs: 0.00017261505126953125
t_batchify: 0.05069541931152344
ray2rgb: 0.3963804244995117
-----------------------------------------
t_embed_fn: 0.0003178119659423828
t_embedded_dirs: 0.00016760826110839844
t_batchify: 1.3708033561706543
ray2rgb: 0.4205641746520996
-----------------------------------------
33 11.513911247253418
t_embed_fn: 0.0003008842468261719
t_embedded_dirs: 0.0001614093780517578
t_batchify: 0.07181882858276367
ray2rgb: 0.40917181968688965
-----------------------------------------
t_embed_fn: 0.0003039836883544922
t_embedded_dirs: 0.00016427040100097656
t_batchify: 1.522139549255371
ray2rgb: 0.4211752414703369
-----------------------------------------
t_embed_fn: 0.0003170967102050781
t_embedded_dirs: 0.0001647472381591797
t_batchify: 0.08093500137329102
ray2rgb: 0.3343658447265625
-----------------------------------------
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.0001723766326904297
t_batchify: 1.8114335536956787
ray2rgb: 0.4022212028503418
-----------------------------------------
t_embed_fn: 0.00030159950256347656
t_embedded_dirs: 0.0001709461212158203
t_batchify: 0.08227348327636719
ray2rgb: 0.4067659378051758
-----------------------------------------
t_embed_fn: 0.00031256675720214844
t_embedded_dirs: 0.00016498565673828125
t_batchify: 1.1352887153625488
ray2rgb: 0.4826951026916504
-----------------------------------------
t_embed_fn: 0.000324249267578125
t_embedded_dirs: 0.00017452239990234375
t_batchify: 0.08865714073181152
ray2rgb: 0.3819711208343506
-----------------------------------------
t_embed_fn: 0.0003104209899902344
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.7215874195098877
ray2rgb: 0.39197564125061035
-----------------------------------------
t_embed_fn: 0.0003254413604736328
t_embedded_dirs: 0.00018143653869628906
t_batchify: 0.0455474853515625
ray2rgb: 0.37920522689819336
-----------------------------------------
t_embed_fn: 0.0003159046173095703
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.009371042251587
ray2rgb: 0.5019376277923584
-----------------------------------------
34 11.757067203521729
t_embed_fn: 0.00030612945556640625
t_embedded_dirs: 0.00016760826110839844
t_batchify: 0.07179737091064453
ray2rgb: 0.46531033515930176
-----------------------------------------
t_embed_fn: 0.0003046989440917969
t_embedded_dirs: 0.0001652240753173828
t_batchify: 1.1546456813812256
ray2rgb: 0.446915864944458
-----------------------------------------
t_embed_fn: 0.0003094673156738281
t_embedded_dirs: 0.00016307830810546875
t_batchify: 0.08614635467529297
ray2rgb: 0.44814157485961914
-----------------------------------------
t_embed_fn: 0.0003056526184082031
t_embedded_dirs: 0.0001659393310546875
t_batchify: 1.1857819557189941
ray2rgb: 0.539680004119873
-----------------------------------------
t_embed_fn: 0.00031280517578125
t_embedded_dirs: 0.00016546249389648438
t_batchify: 0.09253978729248047
ray2rgb: 0.3894684314727783
-----------------------------------------
t_embed_fn: 0.0003070831298828125
t_embedded_dirs: 0.00016760826110839844
t_batchify: 1.7399249076843262
ray2rgb: 0.40149736404418945
-----------------------------------------
t_embed_fn: 0.0003123283386230469
t_embedded_dirs: 0.00016379356384277344
t_batchify: 0.08349418640136719
ray2rgb: 0.3699619770050049
-----------------------------------------
t_embed_fn: 0.00030422210693359375
t_embedded_dirs: 0.00016498565673828125
t_batchify: 1.4028050899505615
ray2rgb: 0.4862060546875
-----------------------------------------
t_embed_fn: 0.0003249645233154297
t_embedded_dirs: 0.0001735687255859375
t_batchify: 0.05253958702087402
ray2rgb: 0.4544548988342285
-----------------------------------------
t_embed_fn: 0.0003159046173095703
t_embedded_dirs: 0.00016999244689941406
t_batchify: 0.9954826831817627
ray2rgb: 0.449143648147583
-----------------------------------------
35 11.393942594528198
t_embed_fn: 0.0003027915954589844
t_embedded_dirs: 0.0001621246337890625
t_batchify: 0.0704185962677002
ray2rgb: 0.3342461585998535
-----------------------------------------
t_embed_fn: 0.00031280517578125
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.4410004615783691
ray2rgb: 0.4954833984375
-----------------------------------------
t_embed_fn: 0.00031113624572753906
t_embedded_dirs: 0.0001666545867919922
t_batchify: 0.09060406684875488
ray2rgb: 0.4991927146911621
-----------------------------------------
t_embed_fn: 0.00030422210693359375
t_embedded_dirs: 0.0001652240753173828
t_batchify: 1.376417875289917
ray2rgb: 0.4478285312652588
-----------------------------------------
t_embed_fn: 0.0003123283386230469
t_embedded_dirs: 0.00016450881958007812
t_batchify: 0.08154821395874023
ray2rgb: 0.4506368637084961
-----------------------------------------
t_embed_fn: 0.00030422210693359375
t_embedded_dirs: 0.0001647472381591797
t_batchify: 1.6864826679229736
ray2rgb: 0.3850674629211426
-----------------------------------------
t_embed_fn: 0.0002999305725097656
t_embedded_dirs: 0.00016260147094726562
t_batchify: 0.07870864868164062
ray2rgb: 0.37571263313293457
-----------------------------------------
t_embed_fn: 0.0003190040588378906
t_embedded_dirs: 0.00016760826110839844
t_batchify: 1.4305427074432373
ray2rgb: 0.44399428367614746
-----------------------------------------
t_embed_fn: 0.0003249645233154297
t_embedded_dirs: 0.0001747608184814453
t_batchify: 0.04806971549987793
ray2rgb: 0.34027552604675293
-----------------------------------------
t_embed_fn: 0.00031757354736328125
t_embedded_dirs: 0.00017905235290527344
t_batchify: 0.995772123336792
ray2rgb: 0.5051350593566895
-----------------------------------------
36 11.652038335800171
t_embed_fn: 0.00029754638671875
t_embedded_dirs: 0.0001621246337890625
t_batchify: 0.07152891159057617
ray2rgb: 0.47166991233825684
-----------------------------------------
t_embed_fn: 0.00030684471130371094
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.250929832458496
ray2rgb: 0.5064296722412109
-----------------------------------------
t_embed_fn: 0.0003097057342529297
t_embedded_dirs: 0.00016164779663085938
t_batchify: 0.09152865409851074
ray2rgb: 0.49986839294433594
-----------------------------------------
t_embed_fn: 0.0003082752227783203
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.1658973693847656
ray2rgb: 0.4440877437591553
-----------------------------------------
t_embed_fn: 0.0003082752227783203
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.08618855476379395
ray2rgb: 0.39371681213378906
-----------------------------------------
t_embed_fn: 0.00031495094299316406
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.8173179626464844
ray2rgb: 0.3962218761444092
-----------------------------------------
t_embed_fn: 0.0003108978271484375
t_embedded_dirs: 0.00016427040100097656
t_batchify: 0.07949328422546387
ray2rgb: 1.0381624698638916
-----------------------------------------
t_embed_fn: 0.0003173351287841797
t_embedded_dirs: 0.0001678466796875
t_batchify: 1.0919063091278076
ray2rgb: 0.3985307216644287
-----------------------------------------
t_embed_fn: 0.0003325939178466797
t_embedded_dirs: 0.00017595291137695312
t_batchify: 0.046425580978393555
ray2rgb: 0.33290791511535645
-----------------------------------------
t_embed_fn: 0.00031948089599609375
t_embedded_dirs: 0.00016880035400390625
t_batchify: 1.2607645988464355
ray2rgb: 0.4400200843811035
-----------------------------------------
37 11.957654476165771
t_embed_fn: 0.0003154277801513672
t_embedded_dirs: 0.00016355514526367188
t_batchify: 0.0718541145324707
ray2rgb: 0.4205949306488037
-----------------------------------------
t_embed_fn: 0.00030541419982910156
t_embedded_dirs: 0.0001666545867919922
t_batchify: 1.354198932647705
ray2rgb: 0.4675900936126709
-----------------------------------------
t_embed_fn: 0.0003135204315185547
t_embedded_dirs: 0.00016546249389648438
t_batchify: 0.08370327949523926
ray2rgb: 0.4702870845794678
-----------------------------------------
t_embed_fn: 0.00030541419982910156
t_embedded_dirs: 0.00016498565673828125
t_batchify: 1.7011549472808838
ray2rgb: 0.3916029930114746
-----------------------------------------
t_embed_fn: 0.00030517578125
t_embedded_dirs: 0.0001628398895263672
t_batchify: 0.07887125015258789
ray2rgb: 0.35924410820007324
-----------------------------------------
t_embed_fn: 0.000316619873046875
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.1940512657165527
ray2rgb: 0.4727776050567627
-----------------------------------------
t_embed_fn: 0.00030493736267089844
t_embedded_dirs: 0.0001633167266845703
t_batchify: 0.08531832695007324
ray2rgb: 0.37122130393981934
-----------------------------------------
t_embed_fn: 0.0003173351287841797
t_embedded_dirs: 0.00016641616821289062
t_batchify: 1.2899339199066162
ray2rgb: 0.5248472690582275
-----------------------------------------
t_embed_fn: 0.00032258033752441406
t_embedded_dirs: 0.00018262863159179688
t_batchify: 0.05125069618225098
ray2rgb: 0.478318452835083
-----------------------------------------
t_embed_fn: 0.0003151893615722656
t_embedded_dirs: 0.00016617774963378906
t_batchify: 0.927436113357544
ray2rgb: 0.48125171661376953
-----------------------------------------
38 11.348690271377563
t_embed_fn: 0.0003190040588378906
t_embedded_dirs: 0.0001742839813232422
t_batchify: 0.07243132591247559
ray2rgb: 0.48630762100219727
-----------------------------------------
t_embed_fn: 0.0003058910369873047
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.138237714767456
ray2rgb: 0.4838836193084717
-----------------------------------------
t_embed_fn: 0.0003008842468261719
t_embedded_dirs: 0.00016188621520996094
t_batchify: 0.08733892440795898
ray2rgb: 0.4898524284362793
-----------------------------------------
t_embed_fn: 0.00031685829162597656
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.437084674835205
ray2rgb: 0.4454638957977295
-----------------------------------------
t_embed_fn: 0.00030541419982910156
t_embedded_dirs: 0.0001671314239501953
t_batchify: 0.08698582649230957
ray2rgb: 0.36901068687438965
-----------------------------------------
t_embed_fn: 0.00030541419982910156
t_embedded_dirs: 0.0001652240753173828
t_batchify: 2.0557126998901367
ray2rgb: 0.36971354484558105
-----------------------------------------
t_embed_fn: 0.0003018379211425781
t_embedded_dirs: 0.00016236305236816406
t_batchify: 0.08133649826049805
ray2rgb: 0.3472886085510254
-----------------------------------------
t_embed_fn: 0.0003173351287841797
t_embedded_dirs: 0.00016617774963378906
t_batchify: 1.9625887870788574
ray2rgb: 0.38072800636291504
-----------------------------------------
t_embed_fn: 0.00032830238342285156
t_embedded_dirs: 0.00018477439880371094
t_batchify: 0.04277443885803223
ray2rgb: 0.3617110252380371
-----------------------------------------
t_embed_fn: 0.00031757354736328125
t_embedded_dirs: 0.0001678466796875
t_batchify: 1.298569679260254
ray2rgb: 0.41113710403442383
-----------------------------------------
39 12.477950811386108
t_embed_fn: 0.0003025531768798828
t_embedded_dirs: 0.00016260147094726562
t_batchify: 0.07019925117492676
ray2rgb: 0.42851877212524414
-----------------------------------------
t_embed_fn: 0.00030422210693359375
t_embedded_dirs: 0.00016641616821289062
t_batchify: 1.083345651626587
ray2rgb: 0.4609408378601074
-----------------------------------------
t_embed_fn: 0.00030231475830078125
t_embedded_dirs: 0.00016260147094726562
t_batchify: 0.08414101600646973
ray2rgb: 1.1242787837982178
-----------------------------------------
t_embed_fn: 0.0003154277801513672
t_embedded_dirs: 0.0001666545867919922
t_batchify: 1.1130619049072266
ray2rgb: 0.39867234230041504
-----------------------------------------
t_embed_fn: 0.00030350685119628906
t_embedded_dirs: 0.00016427040100097656
t_batchify: 0.08346104621887207
ray2rgb: 0.43150806427001953
-----------------------------------------
t_embed_fn: 0.00030541419982910156
t_embedded_dirs: 0.00016498565673828125
t_batchify: 1.0752723217010498
ray2rgb: 0.5043025016784668
-----------------------------------------
t_embed_fn: 0.0002999305725097656
t_embedded_dirs: 0.000171661376953125
t_batchify: 0.0913090705871582
ray2rgb: 0.5087456703186035
-----------------------------------------
t_embed_fn: 0.0003147125244140625
t_embedded_dirs: 0.0001671314239501953
t_batchify: 1.1648128032684326
ray2rgb: 0.47225499153137207
-----------------------------------------
t_embed_fn: 0.0003249645233154297
t_embedded_dirs: 0.00018453598022460938
t_batchify: 0.047651052474975586
ray2rgb: 0.44689226150512695
-----------------------------------------
t_embed_fn: 0.00032019615173339844
t_embedded_dirs: 0.00016808509826660156
t_batchify: 0.9426059722900391
ray2rgb: 0.5309169292449951
-----------------------------------------
Done rendering ./logs/blender_paper_lego/renderonly_path_199999
...@@ -35,7 +35,7 @@ def pose_spherical(theta, phi, radius): ...@@ -35,7 +35,7 @@ def pose_spherical(theta, phi, radius):
def load_blender_data(basedir, half_res=False, testskip=1): def load_blender_data(basedir, half_res=False, testskip=1):
splits = ['train', 'val', 'test'] splits = ['val', 'test', 'train']
metas = {} metas = {}
for s in splits: for s in splits:
with open(os.path.join(basedir, 'transforms_{}.json'.format(s)), 'r') as fp: with open(os.path.join(basedir, 'transforms_{}.json'.format(s)), 'r') as fp:
......
#!/workspace/S/songxinkai/local/anaconda3/bin/python
import time
import torch
import torch.nn as nn
import json
import torch.nn.functional as F
import numpy as np
class NeRF(nn.Module):
def __init__(self, D=8, W=256, input_ch=3, input_ch_views=3, output_ch=4, skips=[4], use_viewdirs=False):
"""
"""
super(NeRF, self).__init__()
self.D = D
self.W = W
self.input_ch = input_ch
self.input_ch_views = input_ch_views
self.skips = skips
self.use_viewdirs = use_viewdirs
self.pts_linears = nn.ModuleList(
[nn.Linear(input_ch, W)] + [nn.Linear(W, W) if i not in self.skips else nn.Linear(W + input_ch, W) for i in range(D-1)])
### Implementation according to the official code release (https://github.com/bmild/nerf/blob/master/run_nerf_helpers.py#L104-L105)
self.views_linears = nn.ModuleList([nn.Linear(input_ch_views + W, W//2)])
### Implementation according to the paper
# self.views_linears = nn.ModuleList(
# [nn.Linear(input_ch_views + W, W//2)] + [nn.Linear(W//2, W//2) for i in range(D//2)])
if use_viewdirs:
self.feature_linear = nn.Linear(W, W)
self.alpha_linear = nn.Linear(W, 1)
self.rgb_linear = nn.Linear(W//2, 3)
else:
self.output_linear = nn.Linear(W, output_ch)
def forward(self, x):
input_pts, input_views = torch.split(x, [self.input_ch, self.input_ch_views], dim=-1)
h = input_pts
for i, l in enumerate(self.pts_linears):
h = self.pts_linears[i](h)
h = F.relu(h)
if i in self.skips:
h = torch.cat([input_pts, h], -1)
if self.use_viewdirs:
alpha = self.alpha_linear(h)
feature = self.feature_linear(h)
h = torch.cat([feature, input_views], -1)
for i, l in enumerate(self.views_linears):
h = self.views_linears[i](h)
h = F.relu(h)
rgb = self.rgb_linear(h)
outputs = torch.cat([rgb, alpha], -1)
else:
outputs = self.output_linear(h)
return outputs
def load_weights_from_keras(self, weights):
assert self.use_viewdirs, "Not implemented if use_viewdirs=False"
# Load pts_linears
for i in range(self.D):
idx_pts_linears = 2 * i
self.pts_linears[i].weight.data = torch.from_numpy(np.transpose(weights[idx_pts_linears]))
self.pts_linears[i].bias.data = torch.from_numpy(np.transpose(weights[idx_pts_linears+1]))
# Load feature_linear
idx_feature_linear = 2 * self.D
self.feature_linear.weight.data = torch.from_numpy(np.transpose(weights[idx_feature_linear]))
self.feature_linear.bias.data = torch.from_numpy(np.transpose(weights[idx_feature_linear+1]))
# Load views_linears
idx_views_linears = 2 * self.D + 2
self.views_linears[0].weight.data = torch.from_numpy(np.transpose(weights[idx_views_linears]))
self.views_linears[0].bias.data = torch.from_numpy(np.transpose(weights[idx_views_linears+1]))
# Load rgb_linear
idx_rbg_linear = 2 * self.D + 4
self.rgb_linear.weight.data = torch.from_numpy(np.transpose(weights[idx_rbg_linear]))
self.rgb_linear.bias.data = torch.from_numpy(np.transpose(weights[idx_rbg_linear+1]))
# Load alpha_linear
idx_alpha_linear = 2 * self.D + 6
self.alpha_linear.weight.data = torch.from_numpy(np.transpose(weights[idx_alpha_linear]))
self.alpha_linear.bias.data = torch.from_numpy(np.transpose(weights[idx_alpha_linear+1]))
if __name__ == "__main__":
H = 400
W = 400
C_SAM = 64
F_SAM = 128
EMB_LEN = 90
ckpt_path = "./logs/blender_paper_lego/200000.tar"
quant_step = 0.02062862738966942
device = torch.device("cuda")
model= NeRF(D=8, W=256,
input_ch=63, output_ch=5, skips=[4],
input_ch_views=27, use_viewdirs=True).to(device)
ckpt = torch.load(ckpt_path)
model.load_state_dict(ckpt['network_fine_state_dict'])
# model = torch.nn.DataParallel(model.cuda(), device_ids=[0, 1, 2, 3])
# inputs = [[[[] for z in range(C_SAM+F_SAM)] for w in range(W)] for h in range(H)]
# with open (os.path.join(data_dir, "0", "embedded_quant.bin"), 'rb') as f:
# for h in range(H):
# print ("Reading %d/%d"%(h, H))
# for w in range(W):
# for z in range(C_SAM + F_SAM):
# inputs[h][w][z] += torch.tensor(unpack("%dh"%(EMB_LEN), f.read(EMB_LEN * 2))) * quant_step
batch_size = 4096
N = 10000
inputs = torch.rand((N*batch_size, 90)).to(device)
torch.cuda.synchronize()
start = time.time()
print ("start inference")
for i in range(N):
output = model(inputs[i*batch_size:i*batch_size+batch_size])
if i % 1000 == 0:
print (i, output.shape)
print ("time:", time.time() - start)
...@@ -19,7 +19,10 @@ from load_blender import load_blender_data ...@@ -19,7 +19,10 @@ from load_blender import load_blender_data
from load_LINEMOD import load_LINEMOD_data from load_LINEMOD import load_LINEMOD_data
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# os.environ["CUDA_VISIBLE_DEVICES"] = "0,1,2,3,4,5,6,7"
device = torch.device("cuda")
np.random.seed(0) np.random.seed(0)
DEBUG = False DEBUG = False
...@@ -34,35 +37,80 @@ def batchify(fn, chunk): ...@@ -34,35 +37,80 @@ def batchify(fn, chunk):
return ret return ret
def run_network(inputs, viewdirs, fn, embed_fn, embeddirs_fn, netchunk=1024*64): def run_network(inputs, viewdirs, fn, dump_data, dump_data_dir, embed_fn, embeddirs_fn, netchunk=1024*64):
# print ("inputs:", inputs.shape)
# print ("viewdirs:", viewdirs.shape)
# print ("netchunk:", netchunk)
"""Prepares inputs and applies network 'fn'. """Prepares inputs and applies network 'fn'.
""" """
inputs_flat = torch.reshape(inputs, [-1, inputs.shape[-1]]) inputs_flat = torch.reshape(inputs, [-1, inputs.shape[-1]])
t_start_0 = time.time()
torch.cuda.synchronize()
t_start = time.time()
embedded = embed_fn(inputs_flat) embedded = embed_fn(inputs_flat)
torch.cuda.synchronize()
t_embed_fn = time.time()
if viewdirs is not None: if viewdirs is not None:
input_dirs = viewdirs[:,None].expand(inputs.shape) input_dirs = viewdirs[:,None].expand(inputs.shape)
input_dirs_flat = torch.reshape(input_dirs, [-1, input_dirs.shape[-1]]) input_dirs_flat = torch.reshape(input_dirs, [-1, input_dirs.shape[-1]])
embedded_dirs = embeddirs_fn(input_dirs_flat) embedded_dirs = embeddirs_fn(input_dirs_flat)
embedded = torch.cat([embedded, embedded_dirs], -1) embedded = torch.cat([embedded, embedded_dirs], -1)
torch.cuda.synchronize()
t_embedded_dirs = time.time()
# print ("inputs:", inputs.shape)
# print ("inputs_flat:", inputs_flat.shape)
# print ("viewdirs:", viewdirs.shape)
# print (" after embedding:", embedded.shape)
if dump_data:
with open(os.path.join(dump_data_dir, "viewdirs.txt"), 'a') as f:
for viewdir in viewdirs:
f.write(str(viewdir.tolist()))
f.write("\n")
with open(os.path.join(dump_data_dir, "embedded.txt"), 'a') as f:
for emb in embedded:
f.write(str(emb.tolist()))
f.write("\n")
outputs_flat = batchify(fn, netchunk)(embedded) outputs_flat = batchify(fn, netchunk)(embedded)
if dump_data:
with open(os.path.join(dump_data_dir, "outputs_flat.txt"), 'a') as f:
for output_flat in outputs_flat:
f.write(str(output_flat.tolist()))
f.write("\n")
torch.cuda.synchronize()
t_batchify = time.time()
outputs = torch.reshape(outputs_flat, list(inputs.shape[:-1]) + [outputs_flat.shape[-1]]) outputs = torch.reshape(outputs_flat, list(inputs.shape[:-1]) + [outputs_flat.shape[-1]])
# print("t_before_start: ", t_start-t_start_0)
# print("t_embed_fn: ", t_embed_fn-t_start)
# print("t_embedded_dirs: ", t_embedded_dirs-t_embed_fn)
# print("t_batchify: ", t_batchify-t_embedded_dirs, " samples:", embedded.shape)
return outputs return outputs
glb_im_id = 0
def batchify_rays(rays_flat, chunk=1024*32, **kwargs): def batchify_rays(rays_flat, chunk=1024*32, **kwargs):
"""Render rays in smaller minibatches to avoid OOM. """Render rays in smaller minibatches to avoid OOM.
""" """
all_ret = {} all_ret = {}
# print ("rays_flat:", rays_flat.shape)
# print ("chunk:", chunk)
global glb_im_id
for i in range(0, rays_flat.shape[0], chunk): for i in range(0, rays_flat.shape[0], chunk):
ret = render_rays(rays_flat[i:i+chunk], **kwargs) dump_data_dir = os.path.join("dump_data", "%d"%glb_im_id, "%d"%i)
os.system("mkdir -p %s"%(dump_data_dir))
ret = render_rays(rays_flat[i:i+chunk], dump_data_dir=dump_data_dir, **kwargs)
# raw_input()
for k in ret: for k in ret:
if k not in all_ret: if k not in all_ret:
all_ret[k] = [] all_ret[k] = []
all_ret[k].append(ret[k]) all_ret[k].append(ret[k])
all_ret = {k : torch.cat(all_ret[k], 0) for k in all_ret} all_ret = {k : torch.cat(all_ret[k], 0) for k in all_ret}
glb_im_id += 1
return all_ret return all_ret
...@@ -70,6 +118,16 @@ def render(H, W, K, chunk=1024*32, rays=None, c2w=None, ndc=True, ...@@ -70,6 +118,16 @@ def render(H, W, K, chunk=1024*32, rays=None, c2w=None, ndc=True,
near=0., far=1., near=0., far=1.,
use_viewdirs=False, c2w_staticcam=None, use_viewdirs=False, c2w_staticcam=None,
**kwargs): **kwargs):
# print ("H, W, K:", H, W, K)
# print ("chunk:", chunk)
# print ("rays:", rays)
# print ("c2w:", c2w)
# print ("ndc:", ndc)
# print ("near:", near)
# print ("far:", far)
# print ("use_viewdirs:", use_viewdirs)
# print ("c2w_staticcam:", c2w_staticcam)
# print ("kwargs:", kwargs)
"""Render rays """Render rays
Args: Args:
H: int. Height of image in pixels. H: int. Height of image in pixels.
...@@ -92,13 +150,16 @@ def render(H, W, K, chunk=1024*32, rays=None, c2w=None, ndc=True, ...@@ -92,13 +150,16 @@ def render(H, W, K, chunk=1024*32, rays=None, c2w=None, ndc=True,
acc_map: [batch_size]. Accumulated opacity (alpha) along a ray. acc_map: [batch_size]. Accumulated opacity (alpha) along a ray.
extras: dict with everything returned by render_rays(). extras: dict with everything returned by render_rays().
""" """
t_start = time.time()
if c2w is not None: if c2w is not None:
# special case to render full image # special case to render full image
rays_o, rays_d = get_rays(H, W, K, c2w) rays_o, rays_d = get_rays(H, W, K, c2w)
else: else:
# use provided ray batch # use provided ray batch
rays_o, rays_d = rays rays_o, rays_d = rays
t_get_rays = time.time()
t_use_viewdirs_0 = time.time()
if use_viewdirs: if use_viewdirs:
# provide ray directions as input # provide ray directions as input
viewdirs = rays_d viewdirs = rays_d
...@@ -121,16 +182,42 @@ def render(H, W, K, chunk=1024*32, rays=None, c2w=None, ndc=True, ...@@ -121,16 +182,42 @@ def render(H, W, K, chunk=1024*32, rays=None, c2w=None, ndc=True,
rays = torch.cat([rays_o, rays_d, near, far], -1) rays = torch.cat([rays_o, rays_d, near, far], -1)
if use_viewdirs: if use_viewdirs:
rays = torch.cat([rays, viewdirs], -1) rays = torch.cat([rays, viewdirs], -1)
t_use_viewdirs_1 = time.time()
# Render and reshape # Render and reshape
t_render_0 = time.time()
# print ("==============================================================================")
# print ("rays:", rays.shape)
all_ret = batchify_rays(rays, chunk, **kwargs) all_ret = batchify_rays(rays, chunk, **kwargs)
# print ("==============================================================================")
t_render_1 = time.time()
for k in all_ret: for k in all_ret:
k_sh = list(sh[:-1]) + list(all_ret[k].shape[1:]) k_sh = list(sh[:-1]) + list(all_ret[k].shape[1:])
all_ret[k] = torch.reshape(all_ret[k], k_sh) all_ret[k] = torch.reshape(all_ret[k], k_sh)
t_render_2 = time.time()
k_extract = ['rgb_map', 'disp_map', 'acc_map'] k_extract = ['rgb_map', 'disp_map', 'acc_map']
ret_list = [all_ret[k] for k in k_extract] ret_list = [all_ret[k] for k in k_extract]
ret_dict = {k : all_ret[k] for k in all_ret if k not in k_extract} ret_dict = {k : all_ret[k] for k in all_ret if k not in k_extract}
t_end = time.time()
# print ("t_start:", t_start - t_start)
# print ("t_get_rays:", t_get_rays - t_start)
# print ("t_use_viewdirs_0:", t_use_viewdirs_0 - t_start)
# print ("t_use_viewdirs_1:", t_use_viewdirs_1 - t_start)
# print ("t_render_0:", t_render_0 - t_start)
# print ("t_render_1:", t_render_1 - t_start)
# print ("t_render_2:", t_render_2 - t_start)
# print ("t_end:", t_end - t_start)
times = []
times.append(t_start - t_start)
times.append(t_get_rays - t_start)
times.append(t_use_viewdirs_0 - t_start)
times.append(t_use_viewdirs_1 - t_start)
times.append(t_render_0 - t_start)
times.append(t_render_1 - t_start)
times.append(t_render_2 - t_start)
times.append(t_end - t_start)
return ret_list + [ret_dict] return ret_list + [ret_dict]
...@@ -147,26 +234,27 @@ def render_path(render_poses, hwf, K, chunk, render_kwargs, gt_imgs=None, savedi ...@@ -147,26 +234,27 @@ def render_path(render_poses, hwf, K, chunk, render_kwargs, gt_imgs=None, savedi
rgbs = [] rgbs = []
disps = [] disps = []
t = time.time() psnr_list = []
# t = time.time()
for i, c2w in enumerate(tqdm(render_poses)): for i, c2w in enumerate(tqdm(render_poses)):
print(i, time.time() - t) # print(i, time.time() - t)
t = time.time() # t = time.time()
rgb, disp, acc, _ = render(H, W, K, chunk=chunk, c2w=c2w[:3,:4], **render_kwargs) rgb, disp, acc, _ = render(H, W, K, chunk=chunk, c2w=c2w[:3,:4], **render_kwargs)
rgbs.append(rgb.cpu().numpy()) rgbs.append(rgb.cpu().numpy())
disps.append(disp.cpu().numpy()) disps.append(disp.cpu().numpy())
if i==0: if i==0:
print(rgb.shape, disp.shape) print(rgb.shape, disp.shape)
"""
if gt_imgs is not None and render_factor==0: if gt_imgs is not None and render_factor==0:
p = -10. * np.log10(np.mean(np.square(rgb.cpu().numpy() - gt_imgs[i]))) p = -10. * np.log10(np.mean(np.square(rgb.cpu().numpy() - gt_imgs[i])))
print(p) print(i, ", PSNR:", p)
""" psnr_list.append(p)
if savedir is not None: if savedir is not None:
rgb8 = to8b(rgbs[-1]) rgb8 = to8b(rgbs[-1])
filename = os.path.join(savedir, '{:03d}.png'.format(i)) filename = os.path.join(savedir, '{:03d}.png'.format(i))
imageio.imwrite(filename, rgb8) imageio.imwrite(filename, rgb8)
print ("Average PSNR:", sum(psnr_list)/len(psnr_list))
rgbs = np.stack(rgbs, 0) rgbs = np.stack(rgbs, 0)
...@@ -193,12 +281,17 @@ def create_nerf(args): ...@@ -193,12 +281,17 @@ def create_nerf(args):
model_fine = None model_fine = None
if args.N_importance > 0: if args.N_importance > 0:
print ("============= model_fine superparameters ==============")
print (args.netdepth_fine, args.netwidth_fine,
input_ch, output_ch, skips,
input_ch_views, args.use_viewdirs)
print ("============= model_fine superparameters ==============")
model_fine = NeRF(D=args.netdepth_fine, W=args.netwidth_fine, model_fine = NeRF(D=args.netdepth_fine, W=args.netwidth_fine,
input_ch=input_ch, output_ch=output_ch, skips=skips, input_ch=input_ch, output_ch=output_ch, skips=skips,
input_ch_views=input_ch_views, use_viewdirs=args.use_viewdirs).to(device) input_ch_views=input_ch_views, use_viewdirs=args.use_viewdirs).to(device)
grad_vars += list(model_fine.parameters()) grad_vars += list(model_fine.parameters())
network_query_fn = lambda inputs, viewdirs, network_fn : run_network(inputs, viewdirs, network_fn, network_query_fn = lambda inputs, viewdirs, network_fn, dump_data, dump_data_dir: run_network(inputs, viewdirs, network_fn, dump_data, dump_data_dir,
embed_fn=embed_fn, embed_fn=embed_fn,
embeddirs_fn=embeddirs_fn, embeddirs_fn=embeddirs_fn,
netchunk=args.netchunk) netchunk=args.netchunk)
...@@ -244,6 +337,8 @@ def create_nerf(args): ...@@ -244,6 +337,8 @@ def create_nerf(args):
'use_viewdirs' : args.use_viewdirs, 'use_viewdirs' : args.use_viewdirs,
'white_bkgd' : args.white_bkgd, 'white_bkgd' : args.white_bkgd,
'raw_noise_std' : args.raw_noise_std, 'raw_noise_std' : args.raw_noise_std,
'dump_data' : args.dump_data,
# 'dump_data_dir' : args.dump_data_dir,
} }
# NDC only good for LLFF-style forward facing data # NDC only good for LLFF-style forward facing data
...@@ -272,15 +367,41 @@ def raw2outputs(raw, z_vals, rays_d, raw_noise_std=0, white_bkgd=False, pytest=F ...@@ -272,15 +367,41 @@ def raw2outputs(raw, z_vals, rays_d, raw_noise_std=0, white_bkgd=False, pytest=F
weights: [num_rays, num_samples]. Weights assigned to each sampled color. weights: [num_rays, num_samples]. Weights assigned to each sampled color.
depth_map: [num_rays]. Estimated distance to object. depth_map: [num_rays]. Estimated distance to object.
""" """
# print ("raw:", raw.shape)
# print ("z_vals:", z_vals.shape)
# print ("rays_d:", z_vals.shape)
ts=[]
ts.append(time.time()) # 0
torch.cuda.synchronize()
raw2alpha = lambda raw, dists, act_fn=F.relu: 1.-torch.exp(-act_fn(raw)*dists) raw2alpha = lambda raw, dists, act_fn=F.relu: 1.-torch.exp(-act_fn(raw)*dists)
torch.cuda.synchronize()
ts.append(time.time()) # 1
dists = z_vals[...,1:] - z_vals[...,:-1] dists = z_vals[...,1:] - z_vals[...,:-1]
# print ("dists:", dists.shape)
torch.cuda.synchronize()
ts.append(time.time()) # 2
# print ("dists:", dists.shape)
dists = torch.cat([dists, torch.Tensor([1e10]).expand(dists[...,:1].shape)], -1) # [N_rays, N_samples] dists = torch.cat([dists, torch.Tensor([1e10]).expand(dists[...,:1].shape)], -1) # [N_rays, N_samples]
# print ("dists:", dists.shape)
# print ("T3:", time.time() - ts[-1])
torch.cuda.synchronize()
ts.append(time.time()) # 3
# print ("rays_d:", rays_d.shape)
dists = dists * torch.norm(rays_d[...,None,:], dim=-1) dists = dists * torch.norm(rays_d[...,None,:], dim=-1)
# print ("dists:", dists.shape)
torch.cuda.synchronize()
ts.append(time.time()) # 4
# print ("raw:", raw.shape)
rgb = torch.sigmoid(raw[...,:3]) # [N_rays, N_samples, 3] rgb = torch.sigmoid(raw[...,:3]) # [N_rays, N_samples, 3]
# print ("rgb:", rgb.shape)
torch.cuda.synchronize()
ts.append(time.time()) # 5
noise = 0. noise = 0.
torch.cuda.synchronize()
ts.append(time.time()) # 6
if raw_noise_std > 0.: if raw_noise_std > 0.:
noise = torch.randn(raw[...,3].shape) * raw_noise_std noise = torch.randn(raw[...,3].shape) * raw_noise_std
...@@ -289,11 +410,22 @@ def raw2outputs(raw, z_vals, rays_d, raw_noise_std=0, white_bkgd=False, pytest=F ...@@ -289,11 +410,22 @@ def raw2outputs(raw, z_vals, rays_d, raw_noise_std=0, white_bkgd=False, pytest=F
np.random.seed(0) np.random.seed(0)
noise = np.random.rand(*list(raw[...,3].shape)) * raw_noise_std noise = np.random.rand(*list(raw[...,3].shape)) * raw_noise_std
noise = torch.Tensor(noise) noise = torch.Tensor(noise)
torch.cuda.synchronize()
ts.append(time.time()) # 7
alpha = raw2alpha(raw[...,3] + noise, dists) # [N_rays, N_samples] alpha = raw2alpha(raw[...,3] + noise, dists) # [N_rays, N_samples]
torch.cuda.synchronize()
ts.append(time.time()) # 8
# weights = alpha * tf.math.cumprod(1.-alpha + 1e-10, -1, exclusive=True) # weights = alpha * tf.math.cumprod(1.-alpha + 1e-10, -1, exclusive=True)
weights = alpha * torch.cumprod(torch.cat([torch.ones((alpha.shape[0], 1)), 1.-alpha + 1e-10], -1), -1)[:, :-1] weights = alpha * torch.cumprod(torch.cat([torch.ones((alpha.shape[0], 1)), 1.-alpha + 1e-10], -1), -1)[:, :-1]
torch.cuda.synchronize()
ts.append(time.time()) # 9
rgb_map = torch.sum(weights[...,None] * rgb, -2) # [N_rays, 3] rgb_map = torch.sum(weights[...,None] * rgb, -2) # [N_rays, 3]
torch.cuda.synchronize()
ts.append(time.time()) # 10
# for i in range(1, len(ts)):
# print ("ray2rgb[%d]: %.5f"%(i, ts[i] - ts[i-1]), " samples:", dists.shape)
# print ("-----------------------------------------")
depth_map = torch.sum(weights * z_vals, -1) depth_map = torch.sum(weights * z_vals, -1)
disp_map = 1./torch.max(1e-10 * torch.ones_like(depth_map), depth_map / torch.sum(weights, -1)) disp_map = 1./torch.max(1e-10 * torch.ones_like(depth_map), depth_map / torch.sum(weights, -1))
...@@ -317,7 +449,9 @@ def render_rays(ray_batch, ...@@ -317,7 +449,9 @@ def render_rays(ray_batch,
white_bkgd=False, white_bkgd=False,
raw_noise_std=0., raw_noise_std=0.,
verbose=False, verbose=False,
pytest=False): pytest=False,
dump_data=False,
dump_data_dir=""):
"""Volumetric rendering. """Volumetric rendering.
Args: Args:
ray_batch: array of shape [batch_size, ...]. All information necessary ray_batch: array of shape [batch_size, ...]. All information necessary
...@@ -361,7 +495,9 @@ def render_rays(ray_batch, ...@@ -361,7 +495,9 @@ def render_rays(ray_batch,
z_vals = 1./(1./near * (1.-t_vals) + 1./far * (t_vals)) z_vals = 1./(1./near * (1.-t_vals) + 1./far * (t_vals))
z_vals = z_vals.expand([N_rays, N_samples]) z_vals = z_vals.expand([N_rays, N_samples])
# print ("z_vals:", z_vals.shape)
# false
if perturb > 0.: if perturb > 0.:
# get intervals between samples # get intervals between samples
mids = .5 * (z_vals[...,1:] + z_vals[...,:-1]) mids = .5 * (z_vals[...,1:] + z_vals[...,:-1])
...@@ -378,11 +514,16 @@ def render_rays(ray_batch, ...@@ -378,11 +514,16 @@ def render_rays(ray_batch,
z_vals = lower + (upper - lower) * t_rand z_vals = lower + (upper - lower) * t_rand
# print ("============== z_vals ================")
# print (z_vals)
pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None] # [N_rays, N_samples, 3] pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None] # [N_rays, N_samples, 3]
# raw = run_network(pts) # raw = run_network(pts)
raw = network_query_fn(pts, viewdirs, network_fn)
# print ("pts:", pts.shape)
# print (viewdirs:", viewdirs.shape)
raw = network_query_fn(pts, viewdirs, network_fn, dump_data, dump_data_dir)
rgb_map, disp_map, acc_map, weights, depth_map = raw2outputs(raw, z_vals, rays_d, raw_noise_std, white_bkgd, pytest=pytest) rgb_map, disp_map, acc_map, weights, depth_map = raw2outputs(raw, z_vals, rays_d, raw_noise_std, white_bkgd, pytest=pytest)
if N_importance > 0: if N_importance > 0:
...@@ -391,14 +532,28 @@ def render_rays(ray_batch, ...@@ -391,14 +532,28 @@ def render_rays(ray_batch,
z_vals_mid = .5 * (z_vals[...,1:] + z_vals[...,:-1]) z_vals_mid = .5 * (z_vals[...,1:] + z_vals[...,:-1])
z_samples = sample_pdf(z_vals_mid, weights[...,1:-1], N_importance, det=(perturb==0.), pytest=pytest) z_samples = sample_pdf(z_vals_mid, weights[...,1:-1], N_importance, det=(perturb==0.), pytest=pytest)
if dump_data:
with open(os.path.join(dump_data_dir, "z_samples.txt"), 'a') as f:
for sample in z_samples:
f.write(str(sample.tolist()))
f.write("\n")
z_samples = z_samples.detach() z_samples = z_samples.detach()
z_vals, _ = torch.sort(torch.cat([z_vals, z_samples], -1), -1) z_vals, _ = torch.sort(torch.cat([z_vals, z_samples], -1), -1)
pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None] # [N_rays, N_samples + N_importance, 3] pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None] # [N_rays, N_samples + N_importance, 3]
if dump_data:
with open(os.path.join(dump_data_dir, "pts.txt"), 'a') as f:
for ray_pts in pts:
for pt in ray_pts:
f.write(str(pt.tolist()))
f.write("\n")
run_fn = network_fn if network_fine is None else network_fine run_fn = network_fn if network_fine is None else network_fine
# raw = run_network(pts, fn=run_fn) # raw = run_network(pts, fn=run_fn)
raw = network_query_fn(pts, viewdirs, run_fn) # print ("N_importance:", N_importance)
# print ("pts:", pts.shape)
# print ("viewdirs:", viewdirs.shape)
raw = network_query_fn(pts, viewdirs, run_fn, dump_data, dump_data_dir)
rgb_map, disp_map, acc_map, weights, depth_map = raw2outputs(raw, z_vals, rays_d, raw_noise_std, white_bkgd, pytest=pytest) rgb_map, disp_map, acc_map, weights, depth_map = raw2outputs(raw, z_vals, rays_d, raw_noise_std, white_bkgd, pytest=pytest)
...@@ -481,6 +636,10 @@ def config_parser(): ...@@ -481,6 +636,10 @@ def config_parser():
help='render the test set instead of render_poses path') help='render the test set instead of render_poses path')
parser.add_argument("--render_factor", type=int, default=0, parser.add_argument("--render_factor", type=int, default=0,
help='downsampling factor to speed up rendering, set 4 or 8 for fast preview') help='downsampling factor to speed up rendering, set 4 or 8 for fast preview')
parser.add_argument("--dump_data", action='store_true',
help='dump intermediute data')
# parser.add_argument("--dump_data_dir", type=str, default='dump_data',
# help='dump_data_dir')
# training options # training options
parser.add_argument("--precrop_iters", type=int, default=0, parser.add_argument("--precrop_iters", type=int, default=0,
...@@ -651,6 +810,7 @@ def train(): ...@@ -651,6 +810,7 @@ def train():
render_poses = torch.Tensor(render_poses).to(device) render_poses = torch.Tensor(render_poses).to(device)
# Short circuit if only rendering out from trained model # Short circuit if only rendering out from trained model
# print (i_split)
if args.render_only: if args.render_only:
print('RENDER ONLY') print('RENDER ONLY')
with torch.no_grad(): with torch.no_grad():
...@@ -663,8 +823,10 @@ def train(): ...@@ -663,8 +823,10 @@ def train():
testsavedir = os.path.join(basedir, expname, 'renderonly_{}_{:06d}'.format('test' if args.render_test else 'path', start)) testsavedir = os.path.join(basedir, expname, 'renderonly_{}_{:06d}'.format('test' if args.render_test else 'path', start))
os.makedirs(testsavedir, exist_ok=True) os.makedirs(testsavedir, exist_ok=True)
print('test poses shape', render_poses.shape) # print('test poses shape', render_poses.shape)
# print ("render_poses:", render_poses.shape)
# print ("hwf:", hwf)
# print ("K:", K)
rgbs, _ = render_path(render_poses, hwf, K, args.chunk, render_kwargs_test, gt_imgs=images, savedir=testsavedir, render_factor=args.render_factor) rgbs, _ = render_path(render_poses, hwf, K, args.chunk, render_kwargs_test, gt_imgs=images, savedir=testsavedir, render_factor=args.render_factor)
print('Done rendering', testsavedir) print('Done rendering', testsavedir)
imageio.mimwrite(os.path.join(testsavedir, 'video.mp4'), to8b(rgbs), fps=30, quality=8) imageio.mimwrite(os.path.join(testsavedir, 'video.mp4'), to8b(rgbs), fps=30, quality=8)
......
import torch import torch
# torch.autograd.set_detect_anomaly(True) # torch.autograd.set_detect_anomaly(True)
import torch.nn as nn import torch.nn as nn
import json
import torch.nn.functional as F import torch.nn.functional as F
import numpy as np import numpy as np
...@@ -151,14 +152,29 @@ class NeRF(nn.Module): ...@@ -151,14 +152,29 @@ class NeRF(nn.Module):
# Ray helpers # Ray helpers
def get_rays(H, W, K, c2w): def get_rays(H, W, K, c2w):
# print ("==== H, W, K, c2w =====")
# print (H, W, K, c2w)
i, j = torch.meshgrid(torch.linspace(0, W-1, W), torch.linspace(0, H-1, H)) # pytorch's meshgrid has indexing='ij' i, j = torch.meshgrid(torch.linspace(0, W-1, W), torch.linspace(0, H-1, H)) # pytorch's meshgrid has indexing='ij'
# print ("==== i,j =====")
# print (i.shape)
# print (j.shape)
# print (i, j)
i = i.t() i = i.t()
j = j.t() j = j.t()
# print ("==== i.t(), j.t() =====")
# print (i.shape)
# print (j.shape)
# print (i, j)
dirs = torch.stack([(i-K[0][2])/K[0][0], -(j-K[1][2])/K[1][1], -torch.ones_like(i)], -1) dirs = torch.stack([(i-K[0][2])/K[0][0], -(j-K[1][2])/K[1][1], -torch.ones_like(i)], -1)
# print ("========= dirs.shape ========")
# print (dirs.shape)
# Rotate ray directions from camera frame to the world frame # Rotate ray directions from camera frame to the world frame
rays_d = torch.sum(dirs[..., np.newaxis, :] * c2w[:3,:3], -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs] rays_d = torch.sum(dirs[..., np.newaxis, :] * c2w[:3,:3], -1) # dot product, equals to: [c2w.dot(dir) for dir in dirs]
# Translate camera frame's origin to the world frame. It is the origin of all rays. # Translate camera frame's origin to the world frame. It is the origin of all rays.
rays_o = c2w[:3,-1].expand(rays_d.shape) rays_o = c2w[:3,-1].expand(rays_d.shape)
# print ("==== rays_o, rays_d ====")
# print (rays_o.shape)
# print (rays_d.shape)
return rays_o, rays_d return rays_o, rays_d
...@@ -195,6 +211,10 @@ def ndc_rays(H, W, focal, near, rays_o, rays_d): ...@@ -195,6 +211,10 @@ def ndc_rays(H, W, focal, near, rays_o, rays_d):
# Hierarchical sampling (section 5.2) # Hierarchical sampling (section 5.2)
def sample_pdf(bins, weights, N_samples, det=False, pytest=False): def sample_pdf(bins, weights, N_samples, det=False, pytest=False):
# Get pdf # Get pdf
# print ("=============== sample_pdf =====================")
# print ("bins", bins.shape, "weights", weights.shape, torch.sum(weights), N_samples, det, pytest)
# print (bins)
# print (weights)
weights = weights + 1e-5 # prevent nans weights = weights + 1e-5 # prevent nans
pdf = weights / torch.sum(weights, -1, keepdim=True) pdf = weights / torch.sum(weights, -1, keepdim=True)
cdf = torch.cumsum(pdf, -1) cdf = torch.cumsum(pdf, -1)
...@@ -236,4 +256,8 @@ def sample_pdf(bins, weights, N_samples, det=False, pytest=False): ...@@ -236,4 +256,8 @@ def sample_pdf(bins, weights, N_samples, det=False, pytest=False):
t = (u-cdf_g[...,0])/denom t = (u-cdf_g[...,0])/denom
samples = bins_g[...,0] + t * (bins_g[...,1]-bins_g[...,0]) samples = bins_g[...,0] + t * (bins_g[...,1]-bins_g[...,0])
# print (samples.shape)
# with open("tmp/samples.txt", 'a') as f:
# json.dump(samples.tolist(), f)
# f.write("\n")
return samples return samples
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment