Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 14 additions & 3 deletions moshi/moshi/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,9 +136,20 @@ def _on_audio_output(self, out_data, frames, time_, status) -> None:

async def run(self) -> None:
with self._in_stream, self._out_stream:
await asyncio.gather(
self._recv_loop(), self._decoder_loop(), self._queue_loop()
)
tasks = [
asyncio.create_task(self._recv_loop()),
asyncio.create_task(self._decoder_loop()),
asyncio.create_task(self._queue_loop())
]
done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)

# Cancel any remaining tasks
for task in pending:
task.cancel()

# Wait for cancelled tasks to complete
if pending:
await asyncio.wait(pending, return_when=asyncio.ALL_COMPLETED)


async def run(printer: AnyPrinter, args):
Expand Down
17 changes: 15 additions & 2 deletions moshi/moshi/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,20 @@ async def send_loop():
self.lm_gen.reset_streaming()
# Send the handshake.
await ws.send_bytes(b"\x00")
await asyncio.gather(opus_loop(), recv_loop(), send_loop())
tasks = [
asyncio.create_task(opus_loop()),
asyncio.create_task(recv_loop()),
asyncio.create_task(send_loop())
]
done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)

# Cancel any remaining tasks
for task in pending:
task.cancel()

# Wait for cancelled tasks to complete
if pending:
await asyncio.wait(pending, return_when=asyncio.ALL_COMPLETED)
log("info", "done with connection")
return ws

Expand Down Expand Up @@ -284,7 +297,7 @@ async def handle_root(_):
tunnel = setup_tunnel('localhost', args.port, tunnel_token, None, **tunnel_kwargs)
log("info", f"Tunnel started, if executing on a remote GPU, you can use {tunnel}.")
log("info", "Note that this tunnel goes through the US and you might experience high latency in Europe.")
web.run_app(app, host=args.host , port=args.port, ssl_context=ssl_context)
web.run_app(app, host=args.host, port=args.port, ssl_context=ssl_context)


with torch.no_grad():
Expand Down