gpt4 book ai didi

python-3.x - 当我使用 fastapi 运行 aiogram 时,FSM 不起作用

转载 作者:行者123 更新时间:2023-12-04 18:26:13 34 4
gpt4 key购买 nike

我对 有疑问fsm 使用 图形 快速API .我从 aiogram_fsm_example 运行代码,但将长轮询更改为 Fastapi 实现。这是我得到的代码:

import logging
from fastapi import FastAPI, Request
import aiogram.utils.markdown as md
from aiogram import Bot, Dispatcher, types
from aiogram.contrib.fsm_storage.memory import MemoryStorage
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters import Text
from aiogram.dispatcher.filters.state import State, StatesGroup
from aiogram.types import ParseMode
from aiogram.utils import executor

logging.basicConfig(level=logging.INFO)

API_TOKEN = "here's the bot token"


bot = Bot(token=API_TOKEN)

# For example use simple MemoryStorage for Dispatcher.
storage = MemoryStorage()
dp = Dispatcher(bot, storage=storage)


# States
class Form(StatesGroup):
name = State() # Will be represented in storage as 'Form:name'
age = State() # Will be represented in storage as 'Form:age'
gender = State() # Will be represented in storage as 'Form:gender'


@dp.message_handler(commands='start')
async def cmd_start(message: types.Message):
"""
Conversation's entry point
"""
# Set state
await Form.name.set()

await message.reply("Hi there! What's your name?")


@dp.message_handler(state=Form.name)
async def process_name(message: types.Message, state: FSMContext):
"""
Process user name
"""
async with state.proxy() as data:
data['name'] = message.text

await Form.next()
await message.reply("How old are you?")


# Check age. Age gotta be digit
@dp.message_handler(lambda message: not message.text.isdigit(), state=Form.age)
async def process_age_invalid(message: types.Message):
"""
If age is invalid
"""
return await message.reply("Age gotta be a number.\nHow old are you? (digits only)")


@dp.message_handler(lambda message: message.text.isdigit(), state=Form.age)
async def process_age(message: types.Message, state: FSMContext):
# Update state and data
await Form.next()
await state.update_data(age=int(message.text))

# Configure ReplyKeyboardMarkup
markup = types.ReplyKeyboardMarkup(resize_keyboard=True, selective=True)
markup.add("Male", "Female")
markup.add("Other")

await message.reply("What is your gender?", reply_markup=markup)


@dp.message_handler(lambda message: message.text not in ["Male", "Female", "Other"], state=Form.gender)
async def process_gender_invalid(message: types.Message):
"""
In this example gender has to be one of: Male, Female, Other.
"""
return await message.reply("Bad gender name. Choose your gender from the keyboard.")


@dp.message_handler(state=Form.gender)
async def process_gender(message: types.Message, state: FSMContext):
async with state.proxy() as data:
data['gender'] = message.text

# Remove keyboard
markup = types.ReplyKeyboardRemove()

# And send message
await bot.send_message(
message.chat.id,
md.text(
md.text('Hi! Nice to meet you,', md.bold(data['name'])),
md.text('Age:', md.code(data['age'])),
md.text('Gender:', data['gender']),
sep='\n',
),
reply_markup=markup,
parse_mode=ParseMode.MARKDOWN,
)

# Finish conversation
await state.finish()


# my changes
app = FastAPI()


@app.get("/")
async def root():
return "ok"


@app.post("/")
async def process_update(request: Request):
update = await request.json()
update = types.Update(**update)
print("incoming", update)
await dp.process_update(update)
但是当我用 uvicorn (uvicorn main:app) 运行它并将/start 命令发送到机器人时,后端会抛出这个错误:
ERROR:    Exception in ASGI application
Traceback (most recent call last):
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/uvicorn/protocols/http/h11_impl.py", line 373, in run_asgi
result = await app(self.scope, self.receive, self.send)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/uvicorn/middleware/proxy_headers.py", line 75, in __call__
return await self.app(scope, receive, send)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/fastapi/applications.py", line 208, in __call__
await super().__call__(scope, receive, send)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/starlette/applications.py", line 112, in __call__
await self.middleware_stack(scope, receive, send)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/starlette/middleware/errors.py", line 181, in __call__
raise exc
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/starlette/middleware/errors.py", line 159, in __call__
await self.app(scope, receive, _send)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/starlette/exceptions.py", line 82, in __call__
raise exc
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/starlette/exceptions.py", line 71, in __call__
await self.app(scope, receive, sender)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/starlette/routing.py", line 656, in __call__
await route.handle(scope, receive, send)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/starlette/routing.py", line 259, in handle
await self.app(scope, receive, send)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/starlette/routing.py", line 61, in app
response = await func(request)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/fastapi/routing.py", line 226, in app
raw_response = await run_endpoint_function(
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/fastapi/routing.py", line 159, in run_endpoint_function
return await dependant.call(**values)
File "/home/oleh/projects/tg_bot_test_fsm/./main.py", line 124, in process_update
await dp.process_update(update)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/aiogram/dispatcher/dispatcher.py", line 257, in process_update
return await self.message_handlers.notify(update.message)
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/aiogram/dispatcher/handler.py", line 116, in notify
response = await handler_obj.handler(*args, **partial_data)
File "/home/oleh/projects/tg_bot_test_fsm/./main.py", line 38, in cmd_start
await Form.name.set()
File "/home/oleh/projects/tg_bot_test_fsm/.venv/lib/python3.8/site-packages/aiogram/dispatcher/filters/state.py", line 56, in set
state = Dispatcher.get_current().current_state()
AttributeError: 'NoneType' object has no attribute 'current_state'
据我了解:调度程序中有一个状态在我使用 dp 时没有创建。 进程更新 () 功能。
当我用 long_polling 运行它时 - 一切正常,但我需要用 Fastapi 运行它。
有没有办法手动设置状态?或者我只是没有正确处理它?
附言我在 WSL Ubuntu 20.04 LTS 中运行它。 python 版本是 3.8.10, 图形 - 2.15, 优维康 - 0.15.0 和 快速API - 0.70.0。
已解决:如果您将 Fastapi 与 aiogram 一起使用并尝试 FSM,则需要使用 state.set_state(Form.name) 函数以另一种方式设置状态。我的 start 方法的工作代码如下所示:
@dp.message_handler(commands='start', state="*")
async def cmd_start(message: types.Message, state: FSMContext):
"""
Conversation's entry point
"""
# Set state
await state.set_state(Form.name)

await message.reply("Hi there! What's your name?")

最佳答案

为 Dispatcher 设置当前上下文就足够了

dp = Dispatcher(bot, storage=storage)
Dispatcher.set_current(dp)

关于python-3.x - 当我使用 fastapi 运行 aiogram 时,FSM 不起作用,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/69502039/

34 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com