Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Deprecate send_catch_log_deferred().#7161

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to ourterms of service andprivacy statement. We’ll occasionally send you account related emails.

Already on GitHub?Sign in to your account

Draft
wRAR wants to merge6 commits intoscrapy:master
base:master
Choose a base branch
Loading
fromwRAR:send_catch_log_async
Draft
Changes from1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
PrevPrevious commit
NextNext commit
More engine start/stop robustness.
  • Loading branch information
@wRAR
wRAR committedNov 28, 2025
commit5c8b3f3dd32173cb0f3cc4dde6dc469864f27f53
27 changes: 21 additions & 6 deletionsscrapy/core/engine.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -115,6 +115,8 @@ def __init__(
self._slot: _Slot | None = None
self.spider: Spider | None = None
self.running: bool = False
self._starting: bool = False
self._stopping: bool = False
self.paused: bool = False
self._spider_closed_callback: Callable[
[Spider], Coroutine[Any, Any, None] | Deferred[None] | None
Expand DownExpand Up@@ -172,10 +174,14 @@ async def start_async(self, *, _start_request_processing: bool = True) -> None:

.. versionadded:: VERSION
"""
if self.running:
if self._starting:
raise RuntimeError("Engine already running")
self.start_time = time()
self._starting = True
await self.signals.send_catch_log_async(signal=signals.engine_started)
if self._stopping:
# band-aid until https://github.com/scrapy/scrapy/issues/6916
return
if _start_request_processing and self.spider is None:
# require an opened spider when not run in scrapy shell
return
Expand DownExpand Up@@ -205,12 +211,21 @@ async def stop_async(self) -> None:
.. versionadded:: VERSION
"""

if not self.running:
if not self._starting:
raise RuntimeError("Engine not running")

self.running = False
self.running = self._starting = False
self._stopping = True
if self._start_request_processing_awaitable is not None:
self._start_request_processing_awaitable.cancel()
if (
not is_asyncio_available()
or self._start_request_processing_awaitable
is not asyncio.current_task()
):
# If using the asyncio loop and stop_async() was called from
# start() itself, we can't cancel it, and _start_request_processing()
# will exit via the self.running check.
self._start_request_processing_awaitable.cancel()
self._start_request_processing_awaitable = None
if self.spider is not None:
await self.close_spider_async(reason="shutdown")
Expand DownExpand Up@@ -285,15 +300,15 @@ async def _start_request_processing(self) -> None:
self._slot.nextcall.schedule()
self._slot.heartbeat.start(self._SLOT_HEARTBEAT_INTERVAL)

while self._start and self.spider:
while self._start and self.spider and self.running:
await self._process_start_next()
if not self.needs_backout():
# Give room for the outcome of self._process_start_next() to be
# processed before continuing with the next iteration.
self._slot.nextcall.schedule()
await self._slot.nextcall.wait()
except (asyncio.exceptions.CancelledError, CancelledError):
# self.stop() has cancelled us, nothing to do
# self.stop_async() has cancelled us, nothing to do
return
except Exception:
# an error happened, log it and stop the engine
Expand Down

[8]ページ先頭

©2009-2025 Movatter.jp