-
-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #28 from akatz-ai/alex-dev
Added nodes for making depthmaps seamless and adjusting brightness ac…
- Loading branch information
Showing
4 changed files
with
191 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
import torch | ||
import numpy as np | ||
|
||
class AK_AdjustDepthmapBrightness: | ||
@classmethod | ||
def INPUT_TYPES(cls): | ||
return { | ||
"required": { | ||
"depthmap_batch": ("IMAGE",), # Expecting a tensor in [B, H, W, C] shape | ||
} | ||
} | ||
|
||
RETURN_TYPES = ("IMAGE",) | ||
FUNCTION = "adjust_brightness_for_loop" | ||
CATEGORY = "💜Akatz Nodes/Utils" | ||
DESCRIPTION = """ | ||
Adjusts the brightness of each frame in a depth map video batch to match the brightness of the first frame. | ||
- depthmap_batch: Batch of depth maps, shape [B, H, W, C]. | ||
""" | ||
|
||
def adjust_brightness_for_loop(self, depthmap_batch): | ||
""" | ||
Adjusts the brightness of each frame in a depth map batch to match the brightness of the first frame. | ||
Args: | ||
depthmap_batch (torch.Tensor): Batch of depth maps, shape [B, H, W, C] | ||
Returns: | ||
torch.Tensor: The batch of adjusted depth maps, shape [B, H, W, C] | ||
""" | ||
# Convert to numpy for processing | ||
depthmap_np = depthmap_batch.cpu().numpy().copy() | ||
|
||
# Handle single image input by adding a batch dimension | ||
if depthmap_np.ndim == 3: | ||
depthmap_np = np.expand_dims(depthmap_np, axis=0) | ||
|
||
num_frames = depthmap_np.shape[0] | ||
first_frame_np = depthmap_np[0] | ||
|
||
# Calculate average brightness for the first frame | ||
if first_frame_np.shape[-1] == 3: | ||
first_frame_gray_np = np.mean(first_frame_np, axis=-1) | ||
else: | ||
first_frame_gray_np = first_frame_np.squeeze(-1) | ||
first_frame_avg_brightness = first_frame_gray_np.mean() | ||
|
||
# Process each frame | ||
for i in range(num_frames): | ||
frame_np = depthmap_np[i] | ||
if frame_np.shape[-1] == 3: | ||
frame_gray_np = np.mean(frame_np, axis=-1) | ||
else: | ||
frame_gray_np = frame_np.squeeze(-1) | ||
frame_avg_brightness = frame_gray_np.mean() | ||
brightness_diff = first_frame_avg_brightness - frame_avg_brightness | ||
|
||
# Adjust the frame's brightness | ||
frame_adjusted_np = frame_gray_np + brightness_diff | ||
frame_adjusted_np = np.clip(frame_adjusted_np, 0, 1) | ||
frame_adjusted_np = np.stack([frame_adjusted_np] * 3, axis=-1) | ||
|
||
depthmap_np[i] = frame_adjusted_np | ||
|
||
# Remove batch dimension if it was a single image | ||
if depthmap_np.shape[0] == 1: | ||
depthmap_np = depthmap_np[0] | ||
|
||
# Convert back to PyTorch tensor | ||
depthmap_adjusted = torch.from_numpy(depthmap_np).to(depthmap_batch.device).type_as(depthmap_batch) | ||
|
||
return (depthmap_adjusted,) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,113 @@ | ||
import torch | ||
import numpy as np | ||
|
||
class AK_MakeDepthmapSeamless: | ||
@classmethod | ||
def INPUT_TYPES(cls): | ||
return { | ||
"required": { | ||
"depthmap_batch": ("IMAGE",), # Batch of images (depth maps) | ||
}, | ||
} | ||
|
||
RETURN_TYPES = ("IMAGE",) # Output is a batch of images (seamless depth maps) | ||
CATEGORY = "💜Akatz Nodes/Utils" | ||
FUNCTION = "make_depthmap_seamless" | ||
DESCRIPTION = """ | ||
# AK Make Depthmap Seamless | ||
Adjusts depth maps in the batch to become seamless by fitting and removing a plane using least-squares. | ||
- depthmap_batch: The input depth maps to be adjusted to become seamless. | ||
""" | ||
|
||
def make_depthmap_seamless(self, depthmap_batch): | ||
""" | ||
Adjusts depth maps in the batch to become seamless by fitting and removing a plane using least-squares. | ||
Args: | ||
depthmap_batch (torch.Tensor): Batch of depth maps, shape [B, H, W, C] | ||
Returns: | ||
torch.Tensor: The batch of adjusted depth maps with seamless edges, shape [B, H, W, C] | ||
""" | ||
# Convert PyTorch tensor to NumPy array for processing | ||
depthmap_np = depthmap_batch.cpu().numpy() # [B, H, W, C] | ||
|
||
# Handle single image input | ||
if depthmap_np.ndim == 3: | ||
depthmap_np = np.expand_dims(depthmap_np, axis=0) # Shape [1, H, W, C] | ||
|
||
# Compute the average depth map across all frames | ||
avg_depthmap_np = np.mean(depthmap_np, axis=0) # Shape [H, W, C] | ||
|
||
# If it's 3 channels, average them into one channel | ||
if avg_depthmap_np.shape[-1] == 3: | ||
avg_depthmap_gray_np = np.mean(avg_depthmap_np, axis=-1) # Shape [H, W] | ||
else: | ||
avg_depthmap_gray_np = avg_depthmap_np.squeeze(-1) # Shape [H, W] | ||
|
||
# Fit plane to the average depth map | ||
plane = self.fit_plane_least_squares(avg_depthmap_gray_np) | ||
|
||
# Process each image in the batch using the same plane correction | ||
for i in range(depthmap_np.shape[0]): | ||
single_depthmap_np = depthmap_np[i] | ||
|
||
# If it's 3 channels, average them into one channel | ||
if single_depthmap_np.shape[-1] == 3: | ||
single_depthmap_gray_np = np.mean(single_depthmap_np, axis=-1) # Shape [H, W] | ||
else: | ||
single_depthmap_gray_np = single_depthmap_np.squeeze(-1) # Shape [H, W] | ||
|
||
# Subtract the plane from the depth map | ||
depthmap_seamless_np = single_depthmap_gray_np - plane | ||
|
||
# Normalize across the entire batch for consistency | ||
depthmap_seamless_np = (depthmap_seamless_np - depthmap_seamless_np.min()) / ( | ||
(depthmap_seamless_np.max() - depthmap_seamless_np.min()) or 1) | ||
|
||
# Ensure it's 3-channel again before inserting back into batch | ||
depthmap_seamless_np = np.stack([depthmap_seamless_np] * 3, axis=-1) # Shape [H, W, 3] | ||
|
||
# Replace in batch | ||
depthmap_np[i] = depthmap_seamless_np | ||
|
||
# Remove batch dimension if input was a single image | ||
if depthmap_np.shape[0] == 1: | ||
depthmap_np = depthmap_np[0] | ||
|
||
# Convert back to PyTorch tensor | ||
depthmap_seamless = torch.from_numpy(depthmap_np).to(depthmap_batch.device).type_as(depthmap_batch) | ||
|
||
return (depthmap_seamless,) | ||
|
||
def fit_plane_least_squares(self, D): | ||
""" | ||
Fits a plane to the depth map D using least squares. | ||
Args: | ||
D (numpy.ndarray): The input depth map as a 2D NumPy array. | ||
Returns: | ||
numpy.ndarray: The plane fitted to the depth map. | ||
""" | ||
H, W = D.shape | ||
x = np.arange(W) | ||
y = np.arange(H) | ||
X, Y = np.meshgrid(x, y) | ||
Z = D | ||
|
||
# Flatten the arrays for linear regression | ||
X_flat = X.flatten() | ||
Y_flat = Y.flatten() | ||
Z_flat = Z.flatten() | ||
|
||
# Design matrix for plane fitting | ||
A = np.c_[X_flat, Y_flat, np.ones_like(X_flat)] | ||
|
||
# Perform least squares fitting | ||
C, _, _, _ = np.linalg.lstsq(A, Z_flat, rcond=None) | ||
|
||
# Construct the plane from coefficients | ||
plane = (C[0] * X + C[1] * Y + C[2]) | ||
|
||
return plane |