modify remove asyncio

This commit is contained in:
jiegeaiai 2024-11-16 10:13:11 +08:00
parent aa554b1209
commit 4e98ee0e76
6 changed files with 32 additions and 32 deletions

View File

@ -58,7 +58,7 @@ class SherpaNcnnAsr(AsrBase):
def _recognize_loop(self):
segment_id = 0
time.sleep(9)
time.sleep(3)
last_result = ""
logger.info(f'_recognize_loop')
print(f'_recognize_loop')

View File

@ -42,10 +42,10 @@ class NLPBase(AsrObserver):
if self._callback is not None and self._is_running:
self._callback.on_message(txt)
async def _request(self, question):
def _request(self, question):
pass
async def _on_close(self):
def _on_close(self):
pass
def process(self, message: str):

View File

@ -17,13 +17,13 @@ class DouBaoSDK:
self.__client = AsyncArk(api_key=token)
self._stream = None
async def request(self, question, handle, callback):
def request(self, question, handle, callback):
if self.__client is None:
self.__client = AsyncArk(api_key=self._token)
t = time.time()
logger.info(f'-------dou_bao ask:{question}')
try:
self._stream = await self.__client.chat.completions.create(
self._stream = self.__client.chat.completions.create(
model="ep-20241008152048-fsgzf",
messages=[
{"role": "system", "content": "你是测试客服,是由字节跳动开发的 AI 人工智能助手"},
@ -33,7 +33,7 @@ class DouBaoSDK:
)
sec = ''
async for completion in self._stream:
for completion in self._stream:
sec = sec + completion.choices[0].delta.content
sec, message = handle.handle(sec)
if len(message) > 0:
@ -41,19 +41,19 @@ class DouBaoSDK:
callback(message)
callback(sec)
# logger.info(f'-------dou_bao nlp time:{time.time() - t:.4f}s')
await self._stream.close()
self._stream.close()
self._stream = None
except Exception as e:
logger.error(f'-------dou_bao error:{e}')
# logger.info(f'-------dou_bao nlp time:{time.time() - t:.4f}s')
async def close(self):
def close(self):
if self._stream is not None:
await self._stream.close()
self._stream.close()
self._stream = None
logger.info('AsyncArk close')
if self.__client is not None and not self.__client.is_closed():
await self.__client.close()
self.__client.close()
self.__client = None
def aclose(self):
@ -88,7 +88,7 @@ class DouBaoHttp:
response = requests.post(url, headers=headers, json=data, stream=True)
return response
async def request(self, question, handle, callback):
def request(self, question, handle, callback):
t = time.time()
self._requesting = True
logger.info(f'-------dou_bao ask:{question}')
@ -126,7 +126,7 @@ class DouBaoHttp:
self._requesting = False
logger.info(f'-------dou_bao nlp time:{time.time() - t:.4f}s')
async def close(self):
def close(self):
if self._response is not None and self._requesting:
self._response.close()
@ -155,12 +155,12 @@ class DouBao(NLPBase):
self.__token = 'c9635f9e-0f9e-4ca1-ac90-8af25a541b74'
self._dou_bao = DouBaoHttp(self.__token)
async def _request(self, question):
await self._dou_bao.request(question, self._split_handle, self._on_callback)
def _request(self, question):
self._dou_bao.request(question, self._split_handle, self._on_callback)
async def _on_close(self):
def _on_close(self):
if self._dou_bao is not None:
await self._dou_bao.close()
self._dou_bao.close()
logger.info('AsyncArk close')
def on_clear_cache(self, *args, **kwargs):

View File

@ -69,6 +69,7 @@ class TTSAudioSplitHandle(TTSAudioHandle):
logger.info('TTSAudioSplitHandle::on_handle is not running')
return
logger.info(f'TTSAudioSplitHandle::on_handle {index}')
s, txt = stream
current = 0
with self._lock:
@ -76,7 +77,6 @@ class TTSAudioSplitHandle(TTSAudioHandle):
current = self._priority_queue[0][0]
if current == 0:
self._current = 0
self._priority_queue.clear()
if s is None:
heapq.heappush(self._priority_queue, (index, None))
@ -101,9 +101,9 @@ class TTSAudioSplitHandle(TTSAudioHandle):
if chunks is not None:
for chunk in chunks:
logger.info(f'TTSAudioSplitHandle::on_handle push')
# logger.info(f'TTSAudioSplitHandle::on_handle push')
self.on_next_handle((chunk, txt), 0)
logger.info(f'TTSAudioSplitHandle::on_handle push finish')
# logger.info(f'TTSAudioSplitHandle::on_handle push finish')
def stop(self):
self._is_running = False

View File

@ -37,30 +37,30 @@ class TTSBase(NLPCallback):
def handle(self, value):
self._handle = value
async def _request(self, txt: str, index):
def _request(self, txt: str, index):
if not self._is_running:
logger.info('TTSBase::_request is not running')
return
t = time.time()
stream = await self._on_request(txt)
stream = self._on_request(txt)
logger.info(f'-------tts request time:{time.time() - t:.4f}s, txt:{txt}')
if stream is None or self._is_running is False:
logger.warning(f'-------stream is None or is_running {self._is_running}')
return
if self._handle is not None and self._is_running:
await self._on_handle((stream, txt), index)
self._on_handle((stream, txt), index)
else:
logger.info(f'handle is None, running:{self._is_running}')
logger.info(f'-------tts finish time:{time.time() - t:.4f}s, txt:{txt}')
async def _on_request(self, text: str):
def _on_request(self, text: str):
pass
async def _on_handle(self, stream, index):
def _on_handle(self, stream, index):
pass
async def _on_close(self):
def _on_close(self):
pass
def on_message(self, txt: str):

View File

@ -24,12 +24,12 @@ class TTSEdgeHttp(TTSBase):
logger.info(f"TTSEdge init, {voice}")
self._response_list = []
async def _on_async_request(self, data):
async with aiohttp.ClientSession() as session:
async with session.post(self._url, json=data) as response:
def _on_async_request(self, data):
with aiohttp.ClientSession() as session:
with session.post(self._url, json=data) as response:
print('TTSEdgeHttp, _on_request, response:', response)
if response.status == 200:
stream = BytesIO(await response.read())
stream = BytesIO(response.read())
return stream
else:
byte_stream = None
@ -44,7 +44,7 @@ class TTSEdgeHttp(TTSBase):
self._response_list.remove(response)
return stream
async def _on_request(self, txt: str):
def _on_request(self, txt: str):
logger.info(f'TTSEdgeHttp, _on_request, txt:{txt}')
data = {
"model": "tts-1",
@ -57,7 +57,7 @@ class TTSEdgeHttp(TTSBase):
# return self._on_async_request(data)
return self._on_sync_request(data)
async def _on_handle(self, stream, index):
def _on_handle(self, stream, index):
st, txt = stream
try:
st.seek(0)
@ -92,7 +92,7 @@ class TTSEdgeHttp(TTSBase):
return stream
async def _on_close(self):
def _on_close(self):
print('TTSEdge close')
# if self._byte_stream is not None and not self._byte_stream.closed:
# self._byte_stream.close()