Skip to content

Commit e5d0fdf

Browse files
authored
chore: Bump minimum Python version to 3.10 (apify#1281)
- Bump minimum Python version to 3.10. - Remove `eval‑type‑backport` as no longer needed. - Closes: apify#1124
1 parent 59b6fbf commit e5d0fdf

36 files changed

Lines changed: 126 additions & 563 deletions

.github/workflows/run_code_checks.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,16 +20,22 @@ jobs:
2020
lint_check:
2121
name: Lint check
2222
uses: apify/workflows/.github/workflows/python_lint_check.yaml@main
23+
with:
24+
python-version: '["3.10", "3.11", "3.12", "3.13"]'
2325

2426
type_check:
2527
name: Type check
2628
uses: apify/workflows/.github/workflows/python_type_check.yaml@main
29+
with:
30+
python-version: '["3.10", "3.11", "3.12", "3.13"]'
2731

2832
unit_tests:
2933
name: Unit tests
3034
uses: apify/workflows/.github/workflows/python_unit_tests.yaml@main
3135
secrets:
3236
httpbin_url: ${{ secrets.APIFY_HTTPBIN_TOKEN && format('https://httpbin.apify.actor?token={0}', secrets.APIFY_HTTPBIN_TOKEN) || 'https://httpbin.org'}}
37+
with:
38+
python-version: '["3.10", "3.11", "3.12", "3.13"]'
3339

3440
docs_check:
3541
name: Docs check

CONTRIBUTING.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ Here you'll find a contributing guide to get started with development.
44

55
## Environment
66

7-
For local development, it is required to have Python 3.9 (or a later version) installed.
7+
For local development, it is required to have Python 3.10 (or a later version) installed.
88

99
We use [uv](https://docs.astral.sh/uv/) for project management. Install it and set up your IDE accordingly.
1010

docs/guides/code_examples/session_management/multi_sessions_http.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import asyncio
2+
from collections.abc import Callable
23
from datetime import timedelta
34
from itertools import count
4-
from typing import Callable
55

66
from crawlee import ConcurrencySettings, Request
77
from crawlee.crawlers import BasicCrawlingContext, HttpCrawler, HttpCrawlingContext

docs/introduction/01_setting_up.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ This guide will help you get started with Crawlee by setting it up on your compu
1414

1515
Before installing Crawlee itself, make sure that your system meets the following requirements:
1616

17-
- **Python 3.9 or higher**: Crawlee requires Python 3.9 or a newer version. You can download Python from the [official website](https://python.org/downloads/).
17+
- **Python 3.10 or higher**: Crawlee requires Python 3.10 or a newer version. You can download Python from the [official website](https://python.org/downloads/).
1818
- **Python package manager**: While this guide uses [pip](https://pip.pypa.io/) (the most common package manager), you can also use any package manager you want. You can download pip from the [official website](https://pip.pypa.io/en/stable/installation/).
1919

2020
### Verifying prerequisites

docs/quick-start/index.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ Crawlee offers the following main crawler classes: <ApiLink to="class/BeautifulS
2323

2424
:::caution Minimum Python version
2525

26-
Crawlee requires Python 3.9 or later.
26+
Crawlee requires Python 3.10 or higher.
2727

2828
:::
2929

pyproject.toml

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,12 @@ description = "Crawlee for Python"
99
authors = [{ name = "Apify Technologies s.r.o.", email = "[email protected]" }]
1010
license = { file = "LICENSE" }
1111
readme = "README.md"
12-
requires-python = ">=3.9"
12+
requires-python = ">=3.10"
1313
classifiers = [
1414
"Development Status :: 4 - Beta",
1515
"Intended Audience :: Developers",
1616
"License :: OSI Approved :: Apache Software License",
1717
"Operating System :: OS Independent",
18-
"Programming Language :: Python :: 3.9",
1918
"Programming Language :: Python :: 3.10",
2019
"Programming Language :: Python :: 3.11",
2120
"Programming Language :: Python :: 3.12",
@@ -37,7 +36,6 @@ dependencies = [
3736
"browserforge>=1.2.3",
3837
"cachetools>=5.5.0",
3938
"colorama>=0.4.0",
40-
"eval-type-backport>=0.2.0",
4139
"httpx[brotli,http2,zstd]>=0.27.0",
4240
"more-itertools>=10.2.0",
4341
"protego>=0.4.0",
@@ -64,15 +62,13 @@ all = [
6462
"jaro-winkler>=2.0.3",
6563
"parsel>=1.10.0",
6664
"playwright>=1.27.0",
67-
"scikit-learn==1.5.2; python_version == '3.9'",
68-
"scikit-learn>=1.6.0; python_version >= '3.10'",
69-
'typer>=0.12.0',
65+
"scikit-learn>=1.6.0",
66+
"typer>=0.12.0",
7067
]
7168
adaptive-crawler = [
7269
"jaro-winkler>=2.0.3",
7370
"playwright>=1.27.0",
74-
"scikit-learn==1.5.2; python_version == '3.9'",
75-
"scikit-learn>=1.6.0; python_version >= '3.10'",
71+
"scikit-learn>=1.6.0",
7672
]
7773
beautifulsoup = ["beautifulsoup4[lxml]>=4.12.0", "html5lib>=1.0"]
7874
cli = ["cookiecutter>=2.6.0", "inquirer>=3.3.0", "rich>=13.9.0", "typer>=0.12.0"]
@@ -215,7 +211,7 @@ markers = [
215211
]
216212

217213
[tool.mypy]
218-
python_version = "3.9"
214+
python_version = "3.10"
219215
plugins = ["pydantic.mypy"]
220216
exclude = [
221217
"src/crawlee/project_template",
@@ -261,7 +257,7 @@ module = [
261257
disable_error_code = ["misc"]
262258

263259
[tool.basedpyright]
264-
pythonVersion = "3.9"
260+
pythonVersion = "3.10"
265261
typeCheckingMode = "standard"
266262
include = ["src", "tests", "docs", "website"]
267263

src/crawlee/_autoscaling/_types.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from dataclasses import dataclass, field
44
from datetime import datetime, timedelta, timezone
5-
from typing import TYPE_CHECKING, Union
5+
from typing import TYPE_CHECKING
66

77
if TYPE_CHECKING:
88
from crawlee._utils.byte_size import ByteSize
@@ -152,4 +152,4 @@ def is_overloaded(self) -> bool:
152152
return self.new_error_count > self.max_error_count
153153

154154

155-
Snapshot = Union[MemorySnapshot, CpuSnapshot, EventLoopSnapshot, ClientSnapshot]
155+
Snapshot = MemorySnapshot | CpuSnapshot | EventLoopSnapshot | ClientSnapshot

src/crawlee/_autoscaling/autoscaled_pool.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,14 @@
77
from contextlib import suppress
88
from datetime import timedelta
99
from logging import getLogger
10-
from typing import TYPE_CHECKING, Callable
10+
from typing import TYPE_CHECKING
1111

1212
from crawlee._types import ConcurrencySettings
1313
from crawlee._utils.docs import docs_group
1414
from crawlee._utils.recurring_task import RecurringTask
1515

1616
if TYPE_CHECKING:
17-
from collections.abc import Awaitable
17+
from collections.abc import Awaitable, Callable
1818

1919
from crawlee._autoscaling import SystemStatus
2020

src/crawlee/_types.py

Lines changed: 9 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,16 @@
11
from __future__ import annotations
22

33
import dataclasses
4-
from collections.abc import Iterator, Mapping
4+
from collections.abc import Callable, Iterator, Mapping
55
from dataclasses import dataclass
66
from typing import (
77
TYPE_CHECKING,
88
Annotated,
99
Any,
10-
Callable,
1110
Literal,
12-
Optional,
1311
Protocol,
1412
TypedDict,
1513
TypeVar,
16-
Union,
1714
cast,
1815
overload,
1916
)
@@ -28,7 +25,7 @@
2825
import re
2926
from collections.abc import Callable, Coroutine, Sequence
3027

31-
from typing_extensions import NotRequired, Required, TypeAlias, Unpack
28+
from typing_extensions import NotRequired, Required, Unpack
3229

3330
from crawlee import Glob, Request
3431
from crawlee._request import RequestOptions
@@ -41,30 +38,22 @@
4138

4239
# Workaround for https://github.com/pydantic/pydantic/issues/9445
4340
J = TypeVar('J', bound='JsonSerializable')
44-
JsonSerializable: TypeAlias = Union[
45-
list[J],
46-
dict[str, J],
47-
str,
48-
bool,
49-
int,
50-
float,
51-
None,
52-
]
41+
JsonSerializable = list[J] | dict[str, J] | str | bool | int | float | None
5342
else:
5443
from pydantic import JsonValue as JsonSerializable
5544

5645
T = TypeVar('T')
5746

58-
HttpMethod: TypeAlias = Literal['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'CONNECT', 'OPTIONS', 'TRACE', 'PATCH']
47+
HttpMethod = Literal['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'CONNECT', 'OPTIONS', 'TRACE', 'PATCH']
5948

60-
HttpPayload: TypeAlias = bytes
49+
HttpPayload = bytes
6150

62-
RequestTransformAction: TypeAlias = Literal['skip', 'unchanged']
51+
RequestTransformAction = Literal['skip', 'unchanged']
6352

64-
EnqueueStrategy: TypeAlias = Literal['all', 'same-domain', 'same-hostname', 'same-origin']
53+
EnqueueStrategy = Literal['all', 'same-domain', 'same-hostname', 'same-origin']
6554
"""Enqueue strategy to be used for determining which links to extract and enqueue."""
6655

67-
SkippedReason: TypeAlias = Literal['robots_txt']
56+
SkippedReason = Literal['robots_txt']
6857

6958

7059
def _normalize_headers(headers: Mapping[str, str]) -> dict[str, str]:
@@ -264,7 +253,7 @@ def __init__(self, *, key_value_store_getter: GetKeyValueStoreFunction) -> None:
264253
self._key_value_store_getter = key_value_store_getter
265254
self.add_requests_calls = list[AddRequestsKwargs]()
266255
self.push_data_calls = list[PushDataFunctionCall]()
267-
self.key_value_store_changes = dict[tuple[Optional[str], Optional[str]], KeyValueStoreChangeRecords]()
256+
self.key_value_store_changes = dict[tuple[str | None, str | None], KeyValueStoreChangeRecords]()
268257

269258
async def add_requests(
270259
self,

src/crawlee/_utils/context.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
from __future__ import annotations
22

33
import asyncio
4+
from collections.abc import Callable
45
from functools import wraps
5-
from typing import Any, Callable, TypeVar
6+
from typing import Any, TypeVar
67

78
T = TypeVar('T', bound=Callable[..., Any])
89

0 commit comments

Comments
 (0)