Enable full type checking on search engine

Now it utilize type stub for PySocks library and all search engine python code are properly
type checked.

Note that a `cast` is required because there isn't enough hints in PySocks to let the type
checker understand that the classes are supposed to be compatible.

PR #23183.
This commit is contained in:
Chocobo1
2025-08-31 22:10:30 +08:00
committed by GitHub
parent dffd27a879
commit becfd19e34
2 changed files with 11 additions and 8 deletions

View File

@@ -69,9 +69,12 @@ jobs:
- name: Check typings (search engine) - name: Check typings (search engine)
run: | run: |
curl \
-L \
-o src/searchengine/nova3/socks.pyi "https://github.com/python/typeshed/raw/refs/heads/main/stubs/PySocks/socks.pyi"
MYPYPATH="src/searchengine/nova3" \ MYPYPATH="src/searchengine/nova3" \
mypy \ mypy \
--follow-imports skip \ --explicit-package-bases \
--strict \ --strict \
$PY_FILES $PY_FILES
pyright \ pyright \

View File

@@ -1,4 +1,4 @@
# VERSION: 1.54 # VERSION: 1.55
# Author: # Author:
# Christophe DUMEZ (chris@qbittorrent.org) # Christophe DUMEZ (chris@qbittorrent.org)
@@ -40,7 +40,7 @@ import urllib.error
import urllib.parse import urllib.parse
import urllib.request import urllib.request
from collections.abc import Mapping from collections.abc import Mapping
from typing import Any, Optional from typing import Any, Optional, cast
import socks import socks
@@ -61,7 +61,7 @@ def _getBrowserUserAgent() -> str:
return f"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:{nowVersion}.0) Gecko/20100101 Firefox/{nowVersion}.0" return f"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:{nowVersion}.0) Gecko/20100101 Firefox/{nowVersion}.0"
_headers: dict[str, Any] = {'User-Agent': _getBrowserUserAgent()} _headers: dict[str, str] = {'User-Agent': _getBrowserUserAgent()}
_original_socket = socket.socket _original_socket = socket.socket
@@ -73,10 +73,10 @@ def enable_socks_proxy(enable: bool) -> None:
resolveHostname = (parts.scheme == "socks4a") or (parts.scheme == "socks5h") resolveHostname = (parts.scheme == "socks4a") or (parts.scheme == "socks5h")
if (parts.scheme == "socks4") or (parts.scheme == "socks4a"): if (parts.scheme == "socks4") or (parts.scheme == "socks4a"):
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, parts.hostname, parts.port, resolveHostname) socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, parts.hostname, parts.port, resolveHostname)
socket.socket = socks.socksocket # type: ignore[misc] socket.socket = cast(type[socket.socket], socks.socksocket) # type: ignore[misc]
elif (parts.scheme == "socks5") or (parts.scheme == "socks5h"): elif (parts.scheme == "socks5") or (parts.scheme == "socks5h"):
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, parts.hostname, parts.port, resolveHostname, parts.username, parts.password) socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, parts.hostname, parts.port, resolveHostname, parts.username, parts.password)
socket.socket = socks.socksocket # type: ignore[misc] socket.socket = cast(type[socket.socket], socks.socksocket) # type: ignore[misc]
else: else:
# the following code provide backward compatibility for older qbt versions # the following code provide backward compatibility for older qbt versions
# TODO: scheduled be removed with qbt >= 5.3 # TODO: scheduled be removed with qbt >= 5.3
@@ -85,7 +85,7 @@ def enable_socks_proxy(enable: bool) -> None:
legacySocksURL = f"socks5h://{legacySocksURL.strip()}" legacySocksURL = f"socks5h://{legacySocksURL.strip()}"
parts = urllib.parse.urlsplit(legacySocksURL) parts = urllib.parse.urlsplit(legacySocksURL)
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, parts.hostname, parts.port, True, parts.username, parts.password) socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, parts.hostname, parts.port, True, parts.username, parts.password)
socket.socket = socks.socksocket # type: ignore[misc] socket.socket = cast(type[socket.socket], socks.socksocket) # type: ignore[misc]
else: else:
socket.socket = _original_socket # type: ignore[misc] socket.socket = _original_socket # type: ignore[misc]
@@ -94,7 +94,7 @@ def enable_socks_proxy(enable: bool) -> None:
htmlentitydecode = html.unescape htmlentitydecode = html.unescape
def retrieve_url(url: str, custom_headers: Mapping[str, Any] = {}, request_data: Optional[Any] = None, ssl_context: Optional[ssl.SSLContext] = None, unescape_html_entities: bool = True) -> str: def retrieve_url(url: str, custom_headers: Mapping[str, str] = {}, request_data: Optional[Any] = None, ssl_context: Optional[ssl.SSLContext] = None, unescape_html_entities: bool = True) -> str:
""" Return the content of the url page as a string """ """ Return the content of the url page as a string """
request = urllib.request.Request(url, request_data, {**_headers, **custom_headers}) request = urllib.request.Request(url, request_data, {**_headers, **custom_headers})