Coverage for cuda / core / _memory / _legacy.py: 91.67%
60 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-08 01:07 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-08 01:07 +0000
1# SPDX-FileCopyrightText: Copyright (c) 2024-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2#
3# SPDX-License-Identifier: Apache-2.0
5from __future__ import annotations
7from typing import TYPE_CHECKING
9if TYPE_CHECKING:
10 from cuda.core._memory._buffer import DevicePointerT
12from cuda.core._memory._buffer import Buffer, MemoryResource
13from cuda.core._utils.cuda_utils import (
14 _check_driver_error as raise_if_driver_error,
15)
16from cuda.core._utils.cuda_utils import (
17 driver,
18)
20__all__ = ["LegacyPinnedMemoryResource", "_SynchronousMemoryResource"]
23class LegacyPinnedMemoryResource(MemoryResource):
24 """Create a pinned memory resource that uses legacy cuMemAllocHost/cudaMallocHost
25 APIs.
26 """
28 # TODO: support creating this MR with flags that are later passed to cuMemHostAlloc?
30 def allocate(self, size, stream=None) -> Buffer:
31 """Allocate a buffer of the requested size.
33 Parameters
34 ----------
35 size : int
36 The size of the buffer to allocate, in bytes.
37 stream : Stream, optional
38 Currently ignored
40 Returns
41 -------
42 Buffer
43 The allocated buffer object, which is accessible on both host and device.
44 """
45 if stream is None: 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 SbTbmbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
46 from cuda.core._stream import default_stream 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 SbTbmbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
48 stream = default_stream() 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 SbTbmbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
49 if size: 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 SbTbmbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
50 err, ptr = driver.cuMemAllocHost(size) 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 SbTbmbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
51 raise_if_driver_error(err) 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 SbTbmbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
52 else:
53 ptr = 0
54 return Buffer._init(ptr, size, self) 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 SbTbmbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
56 def deallocate(self, ptr: DevicePointerT, size, stream):
57 """Deallocate a buffer previously allocated by this resource.
59 Parameters
60 ----------
61 ptr : :obj:`~_memory.DevicePointerT`
62 The pointer or handle to the buffer to deallocate.
63 size : int
64 The size of the buffer to deallocate, in bytes.
65 stream : Stream
66 The stream on which to perform the deallocation synchronously.
67 """
68 if stream is not None: 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 mbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
69 stream.sync()
71 if size: 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 mbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
72 (err,) = driver.cuMemFreeHost(ptr) 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 mbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
73 raise_if_driver_error(err) 2B C D E F G H I J K L M N O P Q R S T U V W X Y Z 0 1 mbnb2 3 4 5 6 7 8 9 ! # $ % ' ( ) * + , - . / : ; = ? @ [ ] ^ _ ` { b
75 @property
76 def is_device_accessible(self) -> bool:
77 """bool: this memory resource provides device-accessible buffers."""
78 return True 1BCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!#$%'()*+,-./:;=?@[]^_`{b
80 @property
81 def is_host_accessible(self) -> bool:
82 """bool: this memory resource provides host-accessible buffers."""
83 return True 1BCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!#$%'()*+,-./:;=?@[]^_`{b
85 @property
86 def device_id(self) -> int:
87 """This memory resource is not bound to any GPU."""
88 raise RuntimeError("a pinned memory resource is not bound to any GPU")
91class _SynchronousMemoryResource(MemoryResource):
92 __slots__ = ("_device_id",)
94 def __init__(self, device_id):
95 from .._device import Device 2obA | } b sbtbubvbwbxbybzbAbBbCbDbEbFbGbHbIbJbKbLbMbNbObPbQbRbc d e f g h i j k l m n o p q r s t u v w x ~ y z
97 self._device_id = Device(device_id).device_id 2obA | } b sbtbubvbwbxbybzbAbBbCbDbEbFbGbHbIbJbKbLbMbNbObPbQbRbc d e f g h i j k l m n o p q r s t u v w x ~ y z
99 def allocate(self, size, stream=None) -> Buffer:
100 if stream is None: 2obA | } b sbtbubvbwbxbybzbAbBbCbDbEbFbGbHbIbJbKbLbMbNbObPbQbRbc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
101 from cuda.core._stream import default_stream 2obA | } b sbtbubvbwbxbybzbAbBbCbDbEbFbGbHbIbJbKbLbMbNbObPbQbRbc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
103 stream = default_stream() 2obA | } b sbtbubvbwbxbybzbAbBbCbDbEbFbGbHbIbJbKbLbMbNbObPbQbRbc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
104 if size: 2obA | } b sbtbubvbwbxbybzbAbBbCbDbEbFbGbHbIbJbKbLbMbNbObPbQbRbc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
105 err, ptr = driver.cuMemAlloc(size) 2A | } b sbtbubvbwbxbybzbAbBbCbDbEbFbGbHbIbJbKbLbMbNbObPbQbRbc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
106 raise_if_driver_error(err) 2A | } b sbtbubvbwbxbybzbAbBbCbDbEbFbGbHbIbJbKbLbMbNbObPbQbRbc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
107 else:
108 ptr = 0 2ob
109 return Buffer._init(ptr, size, self) 2obA | } b sbtbubvbwbxbybzbAbBbCbDbEbFbGbHbIbJbKbLbMbNbObPbQbRbc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
111 def deallocate(self, ptr, size, stream):
112 if stream is not None: 2obA | } b UbVbWbXbYbZb0b1b2b3b4b5b6b7b8b9b!b#b$b%b'b(b)b*b+b,bc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
113 stream.sync()
114 if size: 2obA | } b UbVbWbXbYbZb0b1b2b3b4b5b6b7b8b9b!b#b$b%b'b(b)b*b+b,bc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
115 (err,) = driver.cuMemFree(ptr) 2A | } b UbVbWbXbYbZb0b1b2b3b4b5b6b7b8b9b!b#b$b%b'b(b)b*b+b,bc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
116 raise_if_driver_error(err) 2A | } b UbVbWbXbYbZb0b1b2b3b4b5b6b7b8b9b!b#b$b%b'b(b)b*b+b,bc d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbpbqb~ kby rblbz
118 @property
119 def is_device_accessible(self) -> bool:
120 return True 2c d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbkby lbz
122 @property
123 def is_host_accessible(self) -> bool:
124 return False
126 @property
127 def device_id(self) -> int:
128 return self._device_id 2obA c d e f g abbbh cbi j dbebfbk l m n o p q r gbs hbibt u v w x jbkby lbz