Skip to content

Commit

Permalink
Lint
Browse files Browse the repository at this point in the history
  • Loading branch information
Smartappli authored Aug 2, 2024
1 parent e7774f0 commit d980fa9
Showing 1 changed file with 7 additions and 7 deletions.
14 changes: 7 additions & 7 deletions llama_cpp/llama_cpp.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
from __future__ import annotations

import sys
import os
import ctypes
import functools
import os
import pathlib

import sys
from typing import (
TYPE_CHECKING,
Any,
Callable,
Generic,
List,
Union,
NewType,
Optional,
TYPE_CHECKING,
TypeVar,
Generic,
Union,
)

from typing_extensions import TypeAlias


Expand Down Expand Up @@ -1767,7 +1767,7 @@ def llama_kv_cache_view_init(
# // Free a KV cache view. (use only for debugging purposes)
# LLAMA_API void llama_kv_cache_view_free(struct llama_kv_cache_view * view);
@ctypes_function("llama_kv_cache_view_free", [llama_kv_cache_view_p], None)
def llama_kv_cache_view_free(view: "ctypes.pointer[llama_kv_cache_view]", /): # type: ignore
def llama_kv_cache_view_free(view: ctypes.pointer[llama_kv_cache_view], /): # type: ignore
"""Free a KV cache view. (use only for debugging purposes)"""
...

Expand Down

0 comments on commit d980fa9

Please sign in to comment.