Examples

Currently, this package provides two implementations for drawing the heatmaps in general which differs in the input format corresponding two functions to work efficiently.

draw_heatmap

This implementation is designed for the concatenated input format. For the details about the inputs, please refer to the packages/draw_heatmap/examples/input_data.py file.

Concatenated input example
# Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import torch
from accvlab.draw_heatmap import draw_heatmap
from input_data import (
    device,
    centers_list,
    radii_list,
    HEATMAP_SIZE,
    diameter_to_sigma_factor,
    k_scale,
)


def draw_heatmap_example():
    """
    This example shows how to draw heatmaps for concatenated inputs.

    Args:
        centers_list: List[torch.Tensor]
        radii_list: List[torch.Tensor]
        HEATMAP_SIZE: [H, W]
        diameter_to_sigma_factor: float
        k_scale: float
    """
    num_of_heatmaps = len(centers_list)

    centers = torch.cat(centers_list, dim=0)
    radii = torch.cat(radii_list, dim=0)
    heatmaps = torch.zeros(
        (num_of_heatmaps, HEATMAP_SIZE[0], HEATMAP_SIZE[1]), dtype=torch.float32, device=device
    )

    # because we concat all samples, we need to record the index of the heatmap for each bounding box
    heatmap_idxes = torch.tensor(
        [i for i, sublist in enumerate(centers_list) for _ in sublist],
        device=device,
        dtype=torch.int32,
    )
    draw_heatmap(heatmaps, centers, radii, heatmap_idxes, diameter_to_sigma_factor, k_scale)
    return heatmaps


if __name__ == "__main__":
    heatmaps = draw_heatmap_example()
    print(heatmaps.shape)

draw_heatmap_batched

This implementation is designed for the batched input format. For the details about the inputs, please refer to the packages/draw_heatmap/examples/input_data.py file.

Note

It’s general to draw all classes in one heatmap. However, another option in to have one heatmap for each class. Both are supported by this function distinguishing by feeding labels parameter of each bounding box or not.

one-for-all classes

Batched input, one heatmap for all classes example
# Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import torch
from accvlab.draw_heatmap import draw_heatmap_batched
from input_data import (
    device,
    centers_list,
    radii_list,
    HEATMAP_SIZE,
    diameter_to_sigma_factor,
    k_scale,
    MAX_NUM_TARGET,
)
from accvlab.batching_helpers import RaggedBatch


def draw_heatmap_example_batch():
    """
    This example shows how to draw heatmaps for batched inputs.

    Args:
        centers_list: List[torch.Tensor]
        radii_list: List[torch.Tensor]
        HEATMAP_SIZE: [H, W]
        diameter_to_sigma_factor: float
        k_scale: float
    """
    batch_size = len(centers_list)

    # In order to support the batched input, we need to convert the input to the format of ragged batch
    centers = torch.zeros((batch_size, MAX_NUM_TARGET, 2), dtype=torch.int32, device=device)
    radii = torch.zeros((batch_size, MAX_NUM_TARGET), dtype=torch.int32, device=device)
    gt_nums_targets = torch.zeros((batch_size,), dtype=torch.int32, device=device)
    # Copy over the valid boxes
    for i, sublist in enumerate(centers_list):
        centers[i, : len(sublist)] = centers_list[i]
        radii[i, : len(sublist)] = radii_list[i]
        gt_nums_targets[i] = len(sublist)

    centers_rb = RaggedBatch(centers, sample_sizes=gt_nums_targets)
    radii_rb = RaggedBatch(radii, sample_sizes=gt_nums_targets)
    heatmaps = torch.zeros((batch_size, HEATMAP_SIZE[0], HEATMAP_SIZE[1]), dtype=torch.float32, device=device)
    draw_heatmap_batched(heatmaps, centers_rb, radii_rb, diameter_to_sigma_factor, k_scale)
    return heatmaps


if __name__ == "__main__":
    heatmaps = draw_heatmap_example_batch()
    print(heatmaps.shape)

one-for-each class

Batched input, one heatmap for each class example
# Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import torch
from accvlab.draw_heatmap import draw_heatmap_batched
from input_data import (
    device,
    centers_list,
    radii_list,
    labels_list,
    HEATMAP_SIZE,
    diameter_to_sigma_factor,
    k_scale,
    MAX_NUM_TARGET,
    MAX_NUM_CLASSES,
)
from accvlab.batching_helpers import RaggedBatch


def draw_heatmap_classwise_example_batch():
    """
    This example shows how to draw class-wise heatmaps for batched inputs.

    Args:
        centers_list: List[torch.Tensor]
        radii_list: List[torch.Tensor]
        labels_list: List[torch.Tensor]
        HEATMAP_SIZE: [H, W]
        diameter_to_sigma_factor: float
        k_scale: float
    """
    batch_size = len(centers_list)

    # In order to support the batched input, we need to convert the input to the format of ragged batch
    centers = torch.zeros((batch_size, MAX_NUM_TARGET, 2), dtype=torch.int32, device=device)
    radii = torch.zeros((batch_size, MAX_NUM_TARGET), dtype=torch.int32, device=device)
    labels = torch.zeros((batch_size, MAX_NUM_TARGET), dtype=torch.int32, device=device)
    gt_nums_targets = torch.zeros((batch_size,), dtype=torch.int32, device=device)
    # Copy over the valid boxes
    for i, sublist in enumerate(centers_list):
        centers[i, : len(sublist)] = centers_list[i]
        radii[i, : len(sublist)] = radii_list[i]
        labels[i, : len(sublist)] = labels_list[i]
        gt_nums_targets[i] = len(sublist)

    centers_rb = RaggedBatch(centers, sample_sizes=gt_nums_targets)
    radii_rb = RaggedBatch(radii, sample_sizes=gt_nums_targets)
    labels_rb = RaggedBatch(labels, sample_sizes=gt_nums_targets)
    heatmaps = torch.zeros(
        (batch_size, MAX_NUM_CLASSES, HEATMAP_SIZE[0], HEATMAP_SIZE[1]), dtype=torch.float32, device=device
    )
    draw_heatmap_batched(heatmaps, centers_rb, radii_rb, diameter_to_sigma_factor, k_scale, labels=labels_rb)
    return heatmaps


if __name__ == "__main__":
    heatmaps = draw_heatmap_classwise_example_batch()
    print(heatmaps.shape)