diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index b539573753..0322d8cdd6 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -51,17 +51,20 @@ def __init__( color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, thickness: int = 2, color_lookup: ColorLookup = ColorLookup.CLASS, + opacity: float = 0.5, ): """ Args: color (Union[Color, ColorPalette]): The color or color palette to use for annotating detections. thickness (int): Thickness of the bounding box lines. + opacity (float): Opacity of the overlay mask. Must be between `0` and `1`. color_lookup (ColorLookup): Strategy for mapping colors to annotations. Options are `INDEX`, `CLASS`, `TRACK`. """ self.color: Union[Color, ColorPalette] = color self.thickness: int = thickness + self.opacity: float = opacity # Store opacity self.color_lookup: ColorLookup = color_lookup @ensure_cv2_image_for_annotation @@ -103,7 +106,11 @@ def annotate( ![bounding-box-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/bounding-box-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) + overlay = scene.copy() + for detection_idx in range(len(detections)): x1, y1, x2, y2 = detections.xyxy[detection_idx].astype(int) color = resolve_color( @@ -115,12 +122,15 @@ def annotate( else custom_color_lookup, ) cv2.rectangle( - img=scene, + img=overlay, pt1=(x1, y1), pt2=(x2, y2), - color=color.as_bgr(), + color=color.as_bgra(), thickness=self.thickness, ) + + # Blend the overlay with the original scene using opacity + cv2.addWeighted(overlay, self.opacity, scene, 1 - self.opacity, 0, dst=scene) return scene @@ -134,6 +144,7 @@ def __init__( color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, thickness: int = 2, color_lookup: ColorLookup = ColorLookup.CLASS, + opacity: float = 0.5, ): """ Args: @@ -142,9 +153,11 @@ def __init__( thickness (int): Thickness of the bounding box lines. color_lookup (ColorLookup): Strategy for mapping colors to annotations. Options are `INDEX`, `CLASS`, `TRACK`. + opacity (float): Opacity of the overlay mask. Must be between 0 and 1. """ self.color: Union[Color, ColorPalette] = color self.thickness: int = thickness + self.opacity: float = opacity self.color_lookup: ColorLookup = color_lookup @ensure_cv2_image_for_annotation @@ -192,6 +205,7 @@ def annotate( if detections.data is None or ORIENTED_BOX_COORDINATES not in detections.data: return scene obb_boxes = np.array(detections.data[ORIENTED_BOX_COORDINATES]).astype(int) + overlay = scene.copy() for detection_idx in range(len(detections)): obb = obb_boxes[detection_idx] @@ -204,8 +218,10 @@ def annotate( else custom_color_lookup, ) - cv2.drawContours(scene, [obb], 0, color.as_bgr(), self.thickness) + cv2.drawContours(scene, [obb], 0, color.as_bgra(), self.thickness) + # Blend the overlay with the original scene using opacity + cv2.addWeighted(overlay, self.opacity, scene, 1 - self.opacity, 0, dst=scene) return scene @@ -275,6 +291,8 @@ def annotate( ![mask-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/mask-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) if detections.mask is None: return scene @@ -291,7 +309,7 @@ def annotate( else custom_color_lookup, ) mask = detections.mask[detection_idx] - colored_mask[mask] = color.as_bgr() + colored_mask[mask] = color.as_bgra() cv2.addWeighted( colored_mask, self.opacity, scene, 1 - self.opacity, 0, dst=scene @@ -313,6 +331,7 @@ def __init__( color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, thickness: int = 2, color_lookup: ColorLookup = ColorLookup.CLASS, + opacity: float = 0.5, ): """ Args: @@ -321,9 +340,11 @@ def __init__( thickness (int): Thickness of the polygon lines. color_lookup (ColorLookup): Strategy for mapping colors to annotations. Options are `INDEX`, `CLASS`, `TRACK`. + opacity (float): Opacity of the overlay mask. Must be between 0 and 1. """ self.color: Union[Color, ColorPalette] = color self.thickness: int = thickness + self.opacity: float = opacity self.color_lookup: ColorLookup = color_lookup @ensure_cv2_image_for_annotation @@ -365,6 +386,8 @@ def annotate( ![polygon-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/polygon-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) if detections.mask is None: return scene @@ -387,6 +410,8 @@ def annotate( thickness=self.thickness, ) + # Blend the overlay with the original scene using opacity + cv2.addWeighted(scene, self.opacity, scene, 1 - self.opacity, 0, dst=scene) return scene @@ -452,6 +477,8 @@ def annotate( ![box-mask-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/box-mask-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) scene_with_boxes = scene.copy() for detection_idx in range(len(detections)): @@ -468,7 +495,7 @@ def annotate( img=scene_with_boxes, pt1=(x1, y1), pt2=(x2, y2), - color=color.as_bgr(), + color=color.as_bgra(), thickness=-1, ) @@ -548,6 +575,8 @@ def annotate( ![halo-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/halo-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) if detections.mask is None: return scene @@ -567,7 +596,7 @@ def annotate( ) mask = detections.mask[detection_idx] fmask = np.logical_or(fmask, mask) - color_bgr = color.as_bgr() + color_bgr = color.as_bgra() colored_mask[mask] = color_bgr colored_mask = cv2.blur(colored_mask, (self.kernel_size, self.kernel_size)) @@ -592,6 +621,7 @@ def __init__( start_angle: int = -45, end_angle: int = 235, color_lookup: ColorLookup = ColorLookup.CLASS, + opacity: float = 0.5, ): """ Args: @@ -608,6 +638,7 @@ def __init__( self.start_angle: int = start_angle self.end_angle: int = end_angle self.color_lookup: ColorLookup = color_lookup + self.opacity = opacity @ensure_cv2_image_for_annotation def annotate( @@ -648,6 +679,8 @@ def annotate( ![ellipse-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/ellipse-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) for detection_idx in range(len(detections)): x1, y1, x2, y2 = detections.xyxy[detection_idx].astype(int) @@ -668,7 +701,7 @@ def annotate( angle=0.0, startAngle=self.start_angle, endAngle=self.end_angle, - color=color.as_bgr(), + color=color.as_bgra(), thickness=self.thickness, lineType=cv2.LINE_4, ) @@ -686,6 +719,7 @@ def __init__( thickness: int = 4, corner_length: int = 15, color_lookup: ColorLookup = ColorLookup.CLASS, + opacity: float = 0.5, ): """ Args: @@ -700,6 +734,7 @@ def __init__( self.thickness: int = thickness self.corner_length: int = corner_length self.color_lookup: ColorLookup = color_lookup + self.opacity = opacity @ensure_cv2_image_for_annotation def annotate( @@ -740,6 +775,8 @@ def annotate( ![box-corner-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/box-corner-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) for detection_idx in range(len(detections)): x1, y1, x2, y2 = detections.xyxy[detection_idx].astype(int) @@ -756,12 +793,12 @@ def annotate( for x, y in corners: x_end = x + self.corner_length if x == x1 else x - self.corner_length cv2.line( - scene, (x, y), (x_end, y), color.as_bgr(), thickness=self.thickness + scene, (x, y), (x_end, y), color.as_bgra(), thickness=self.thickness ) y_end = y + self.corner_length if y == y1 else y - self.corner_length cv2.line( - scene, (x, y), (x, y_end), color.as_bgr(), thickness=self.thickness + scene, (x, y), (x, y_end), color.as_bgra(), thickness=self.thickness ) return scene @@ -776,6 +813,7 @@ def __init__( color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, thickness: int = 2, color_lookup: ColorLookup = ColorLookup.CLASS, + opacity: float = 0.5, ): """ Args: @@ -789,6 +827,7 @@ def __init__( self.color: Union[Color, ColorPalette] = color self.thickness: int = thickness self.color_lookup: ColorLookup = color_lookup + self.opacity = opacity @ensure_cv2_image_for_annotation def annotate( @@ -830,6 +869,8 @@ def annotate( ![circle-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/circle-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) for detection_idx in range(len(detections)): x1, y1, x2, y2 = detections.xyxy[detection_idx].astype(int) @@ -847,10 +888,12 @@ def annotate( img=scene, center=center, radius=int(distance), - color=color.as_bgr(), + color=color.as_bgra(), thickness=self.thickness, ) + # Blend the overlay with the original scene using opacity + cv2.addWeighted(scene, self.opacity, scene, 1 - self.opacity, 0, dst=scene) return scene @@ -868,6 +911,7 @@ def __init__( color_lookup: ColorLookup = ColorLookup.CLASS, outline_thickness: int = 0, outline_color: Union[Color, ColorPalette] = Color.BLACK, + opacity: float = 0.5, ): """ Args: @@ -888,6 +932,7 @@ def __init__( self.color_lookup: ColorLookup = color_lookup self.outline_thickness = outline_thickness self.outline_color: Union[Color, ColorPalette] = outline_color + self.opacity = opacity @ensure_cv2_image_for_annotation def annotate( @@ -928,6 +973,8 @@ def annotate( ![dot-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/dot-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) xy = detections.get_anchors_coordinates(anchor=self.position) for detection_idx in range(len(detections)): @@ -941,7 +988,7 @@ def annotate( ) center = (int(xy[detection_idx, 0]), int(xy[detection_idx, 1])) - cv2.circle(scene, center, self.radius, color.as_bgr(), -1) + cv2.circle(scene, center, self.radius, color.as_bgra(), -1) if self.outline_thickness: outline_color = resolve_color( color=self.outline_color, @@ -955,7 +1002,7 @@ def annotate( scene, center, self.radius, - outline_color.as_bgr(), + outline_color.as_bgra(), self.outline_thickness, ) return scene @@ -1073,6 +1120,8 @@ def annotate( custom_color_lookup=custom_color_lookup, ) + cv2.addWeighted(scene, self.opacity, scene, 1 - self.opacity, 0, dst=scene) + return scene def _validate_labels(self, labels: Optional[List[str]], detections: Detections): @@ -1182,7 +1231,7 @@ def _draw_labels( self.draw_rounded_rectangle( scene=scene, xyxy=box_xyxy, - color=background_color.as_bgr(), + color=background_color.as_bgra(), border_radius=self.border_radius, ) @@ -1194,7 +1243,7 @@ def _draw_labels( org=(text_x, text_y), fontFace=CV2_FONT, fontScale=self.text_scale, - color=text_color.as_bgr(), + color=text_color.as_bgra(), thickness=self.text_thickness, lineType=cv2.LINE_AA, ) @@ -1459,14 +1508,14 @@ def _draw_labels( draw.rounded_rectangle( tuple(box_xyxy), radius=self.border_radius, - fill=background_color.as_rgb(), + fill=background_color.as_rgba(), outline=None, ) draw.text( xy=(label_x_position, label_y_position), text=labels[idx], font=self.font, - fill=text_color.as_rgb(), + fill=text_color.as_rgba(), ) @staticmethod @@ -1759,7 +1808,7 @@ def annotate( scene, [xy.astype(np.int32)], False, - color=color.as_bgr(), + color=color.as_bgra(), thickness=self.thickness, ) return scene @@ -1955,6 +2004,7 @@ def __init__( color_lookup: ColorLookup = ColorLookup.CLASS, outline_thickness: int = 0, outline_color: Union[Color, ColorPalette] = Color.BLACK, + opacity: float = 0.5, ): """ Args: @@ -1977,6 +2027,7 @@ def __init__( self.color_lookup: ColorLookup = color_lookup self.outline_thickness: int = outline_thickness self.outline_color: Union[Color, ColorPalette] = outline_color + self.opacity = opacity @ensure_cv2_image_for_annotation def annotate( @@ -2017,6 +2068,8 @@ def annotate( ![triangle-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/triangle-annotator-example.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) xy = detections.get_anchors_coordinates(anchor=self.position) for detection_idx in range(len(detections)): @@ -2038,7 +2091,7 @@ def annotate( np.int32, ) - cv2.fillPoly(scene, [vertices], color.as_bgr()) + cv2.fillPoly(scene, [vertices], color.as_bgra()) if self.outline_thickness: outline_color = resolve_color( color=self.outline_color, @@ -2052,7 +2105,7 @@ def annotate( scene, [vertices], True, - outline_color.as_bgr(), + outline_color.as_bgra(), thickness=self.outline_thickness, ) return scene @@ -2070,6 +2123,7 @@ def __init__( thickness: int = 2, color_lookup: ColorLookup = ColorLookup.CLASS, roundness: float = 0.6, + opacity: float = 1.0, ): """ Args: @@ -2089,6 +2143,7 @@ def __init__( if not 0 < roundness <= 1.0: raise ValueError("roundness attribute must be float between (0, 1.0]") self.roundness: float = roundness + self.opacity: float = opacity @ensure_cv2_image_for_annotation def annotate( @@ -2130,6 +2185,8 @@ def annotate( ![round-box-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/round-box-annotator-example-purple.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) for detection_idx in range(len(detections)): x1, y1, x2, y2 = detections.xyxy[detection_idx].astype(int) @@ -2175,7 +2232,7 @@ def annotate( angle=0, startAngle=start_angle, endAngle=end_angle, - color=color.as_bgr(), + color=color.as_bgra(), thickness=self.thickness, ) @@ -2183,7 +2240,7 @@ def annotate( img=scene, pt1=line[0], pt2=line[1], - color=color.as_bgr(), + color=color.as_bgra(), thickness=self.thickness, ) @@ -2204,6 +2261,7 @@ def __init__( position: Position = Position.TOP_CENTER, color_lookup: ColorLookup = ColorLookup.CLASS, border_thickness: Optional[int] = None, + opacity: float = 1.0, ): """ Args: @@ -2216,6 +2274,7 @@ def __init__( color_lookup (ColorLookup): Strategy for mapping colors to annotations. Options are `INDEX`, `CLASS`, `TRACK`. border_thickness (Optional[int]): The thickness of the border lines. + opacity (float): Opacity of the overlay mask. Must be between `0` and `1`. """ self.height: int = height self.width: int = width @@ -2226,6 +2285,7 @@ def __init__( if border_thickness is None: self.border_thickness = int(0.15 * self.height) + self.opacity = opacity @ensure_cv2_image_for_annotation def annotate( @@ -2308,16 +2368,17 @@ def annotate( border_coordinates[0][0] + int(border_width * value), border_coordinates[1][1], ), - color=color.as_bgr(), + color=color.as_bgra(), thickness=-1, ) cv2.rectangle( img=scene, pt1=border_coordinates[0], pt2=border_coordinates[1], - color=self.border_color.as_bgr(), + color=self.border_color.as_bgra(), thickness=self.border_thickness, ) + return scene @staticmethod @@ -2388,6 +2449,7 @@ def __init__( border_color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, border_thickness: int = 2, border_color_lookup: ColorLookup = ColorLookup.CLASS, + opacity: float = 1.0, ): """ Args: @@ -2401,12 +2463,14 @@ def __init__( border_thickness (int): The thickness of the border around the cropped area. border_color_lookup (ColorLookup): Strategy for mapping colors to annotations. Options are `INDEX`, `CLASS`, `TRACK`. + opacity (float): Opacity of the overlay mask. Must be between `0` and `1`. """ self.position: Position = position self.scale_factor: float = scale_factor self.border_color: Union[Color, ColorPalette] = border_color self.border_thickness: int = border_thickness self.border_color_lookup: ColorLookup = border_color_lookup + self.opacity = opacity @ensure_cv2_image_for_annotation def annotate( @@ -2450,6 +2514,8 @@ def annotate( ![crop-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/crop-annotator-example.png) """ + if scene is None: + raise ValueError("The 'scene' parameter cannot be None.") assert isinstance(scene, np.ndarray) crops = [ crop_image(image=scene, xyxy=xyxy) for xyxy in detections.xyxy.astype(int) @@ -2477,7 +2543,7 @@ def annotate( img=scene, pt1=(x1, y1), pt2=(x2, y2), - color=color.as_bgr(), + color=color.as_bgra(), thickness=self.border_thickness, ) @@ -2590,7 +2656,7 @@ def annotate(self, scene: ImageType, detections: Detections) -> ImageType: supervision-annotator-examples/background-color-annotator-example-purple.png) """ assert isinstance(scene, np.ndarray) - colored_mask = np.full_like(scene, self.color.as_bgr(), dtype=np.uint8) + colored_mask = np.full_like(scene, self.color.as_bgra(), dtype=np.uint8) cv2.addWeighted( scene, 1 - self.opacity, colored_mask, self.opacity, 0, dst=colored_mask @@ -2716,9 +2782,9 @@ def annotate( mask_2 = mask_2 & ~mask_overlap color_layer = np.zeros_like(scene, dtype=np.uint8) - color_layer[mask_overlap] = self.color_overlap.as_bgr() - color_layer[mask_1] = self.color_1.as_bgr() - color_layer[mask_2] = self.color_2.as_bgr() + color_layer[mask_overlap] = self.color_overlap.as_bgra() + color_layer[mask_1] = self.color_1.as_bgra() + color_layer[mask_2] = self.color_2.as_bgra() scene[mask_overlap] = (1 - self.opacity) * scene[ mask_overlap diff --git a/supervision/draw/color.py b/supervision/draw/color.py index e52df5a00a..4e31827cb8 100644 --- a/supervision/draw/color.py +++ b/supervision/draw/color.py @@ -65,22 +65,23 @@ def _validate_color_hex(color_hex: str): @dataclass class Color: """ - Represents a color in RGB format. + Represents a color in RGBA format. This class provides methods to work with colors, including creating colors from hex - codes, converting colors to hex strings, RGB tuples, and BGR tuples. + codes, converting colors to hex strings, RGB tuples, BGR tuples, and RGBA tuples. Attributes: r (int): Red channel value (0-255). g (int): Green channel value (0-255). b (int): Blue channel value (0-255). + a (float): Alpha channel value (0.0-1.0), default is 1.0. Example: ```python import supervision as sv sv.Color.WHITE - # Color(r=255, g=255, b=255) + # Color(r=255, g=255, b=255, a=1.0) ``` | Constant | Hex Code | RGB | @@ -98,6 +99,7 @@ class Color: r: int g: int b: int + a: float = 1.0 @classmethod def from_hex(cls, color_hex: str) -> Color: @@ -108,7 +110,9 @@ def from_hex(cls, color_hex: str) -> Color: color_hex (str): The hex string representing the color. This string can start with '#' followed by either 3 or 6 hexadecimal characters. In case of 3 characters, each character is repeated to form the full - 6-character hex code. + 6-character hex code. If the string has 6 characters, it is assumed + to be in the format '#RRGGBB' or '#RRGGBBAA'. If the string has + 8 characters, the last two characters are treated as the alpha channel. Returns: Color: An instance representing the color. @@ -127,12 +131,16 @@ def from_hex(cls, color_hex: str) -> Color: _validate_color_hex(color_hex) color_hex = color_hex.lstrip("#") if len(color_hex) == 3: - color_hex = "".join(c * 2 for c in color_hex) - r, g, b = (int(color_hex[i : i + 2], 16) for i in range(0, 6, 2)) - return cls(r, g, b) + color_hex = ( + "".join(c * 2 for c in color_hex) + "ff" + ) # Default alpha value of 255 + elif len(color_hex) == 6: + color_hex += "ff " # Default alpha value of 255 if not provided + r, g, b, a = (int(color_hex[i : i + 2], 16) for i in range(0, 8, 2)) + return cls(r, g, b, a / 255.0) @classmethod - def from_rgb_tuple(cls, color_tuple: Tuple[int, int, int]) -> Color: + def from_rgb_tuple(cls, color_tuple: Tuple[int, int, int, float]) -> Color: """ Create a Color instance from an RGB tuple. @@ -151,11 +159,11 @@ def from_rgb_tuple(cls, color_tuple: Tuple[int, int, int]) -> Color: # Color(r=255, g=255, b=0) ``` """ - r, g, b = color_tuple - return cls(r=r, g=g, b=b) + r, g, b, *a = color_tuple + return cls(r=r, g=g, b=b, a=a[0] if a else 1.0) @classmethod - def from_bgr_tuple(cls, color_tuple: Tuple[int, int, int]) -> Color: + def from_bgr_tuple(cls, color_tuple: Tuple[int, int, int, float]) -> Color: """ Create a Color instance from a BGR tuple. @@ -174,10 +182,10 @@ def from_bgr_tuple(cls, color_tuple: Tuple[int, int, int]) -> Color: # Color(r=255, g=255, b=0) ``` """ - b, g, r = color_tuple - return cls(r=r, g=g, b=b) + b, g, r, *a = color_tuple + return cls(r=r, g=g, b=b, a=a[0] if a else 1.0) - def as_hex(self) -> str: + def as_hex(self, include_alpha: bool = False) -> str: """ Converts the Color instance to a hex string. @@ -188,11 +196,14 @@ def as_hex(self) -> str: ```python import supervision as sv + sv.Color(r=255, g=255, b=0, a=0.8).as_hex() + # '#ffff00cc' sv.Color(r=255, g=255, b=0).as_hex() - # '#ffff00' + # '#ffff00ff' ``` """ - return f"#{self.r:02x}{self.g:02x}{self.b:02x}" + alpha = f"{int(self.a * 255):02x}" if include_alpha else "" + return f"#{self.r:02x}{self.g:02x}{self.b:02x}{alpha}" def as_rgb(self) -> Tuple[int, int, int]: """ @@ -211,6 +222,23 @@ def as_rgb(self) -> Tuple[int, int, int]: """ return self.r, self.g, self.b + def as_rgba(self) -> Tuple[int, int, int, float]: + """ + Returns the color as an RGBA tuple. + + Returns: + Tuple[int, int, int, float]: RGBA tuple. + + Example: + ```python + import supervision as sv + + sv.Color(r=255, g=255, b=0).as_rgba() + # (255, 255, 0, 1.0) + ``` + """ + return self.r, self.g, self.b, self.a + def as_bgr(self) -> Tuple[int, int, int]: """ Returns the color as a BGR tuple. @@ -228,6 +256,23 @@ def as_bgr(self) -> Tuple[int, int, int]: """ return self.b, self.g, self.r + def as_bgra(self) -> Tuple[int, int, int, float]: + """ + Returns the color as a BGRA tuple. + + Returns: + Tuple[int, int, int, float]: BGRA tuple. + + Example: + ```python + import supervision as sv + + sv.Color(r=255, g=255, b=0).as_bgra() + # (0, 255, 255, 1.0) + ``` + """ + return self.b, self.g, self.r, self.a + @classproperty def WHITE(cls) -> Color: return Color.from_hex("#FFFFFF")