Compare commits

..

No commits in common. "15b93e3cfbc167da0d59a70c831847c0c0d755a2" and "5ee4b3b7207194a010ada4825b99b378f381768b" have entirely different histories.

20 changed files with 272 additions and 19 deletions

1
.gitignore vendored
View File

@ -1,3 +1,2 @@
.venv .venv
__* __*
.pypirc

BIN
dist/yura-14.3.7-py3-none-any.whl vendored Normal file

Binary file not shown.

BIN
dist/yura-14.3.7.tar.gz vendored Normal file

Binary file not shown.

42
dist/yura-14.3.7/PKG-INFO vendored Normal file
View File

@ -0,0 +1,42 @@
Metadata-Version: 2.1
Name: yura
Version: 14.3.7
Summary: Yura async AI client
Author: retoor
Author-email: retoor@retoor.io
License: MIT
Requires-Python: >=3.7
Description-Content-Type: text/markdown
Requires-Dist: websockets
# Yura LLM Client for Katya server
Part of project with as target replacing the native ollama protocol. This protocol supports streaming and is usable trough https and it is possible to directly attach a web client to the backend.
## Install
```bash
pip install -e .
```
## Build
```bash
make build
```
## Command line usage
```bash
yura ws://[host]:[port]/[path]/
```
## Python
```python
import asyncio
from yura.client import AsyncClient
async def communicate():
client = AsyncClient("ws://[host]:[port]/[path]/")
async for response in client.chat("Your prompt"):
print(response)
asyncio.run(communicate())
```

31
dist/yura-14.3.7/README.md vendored Normal file
View File

@ -0,0 +1,31 @@
# Yura LLM Client for Katya server
Part of project with as target replacing the native ollama protocol. This protocol supports streaming and is usable trough https and it is possible to directly attach a web client to the backend.
## Install
```bash
pip install -e .
```
## Build
```bash
make build
```
## Command line usage
```bash
yura ws://[host]:[port]/[path]/
```
## Python
```python
import asyncio
from yura.client import AsyncClient
async def communicate():
client = AsyncClient("ws://[host]:[port]/[path]/")
async for response in client.chat("Your prompt"):
print(response)
asyncio.run(communicate())
```

3
dist/yura-14.3.7/pyproject.toml vendored Normal file
View File

@ -0,0 +1,3 @@
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"

25
dist/yura-14.3.7/setup.cfg vendored Normal file
View File

@ -0,0 +1,25 @@
[metadata]
name = yura
version = 14.3.7
description = Yura async AI client
author = retoor
author_email = retoor@retoor.io
license = MIT
long_description = file: README.md
long_description_content_type = text/markdown
[options]
packages = find:
package_dir =
= src
python_requires = >=3.7
install_requires =
websockets
[options.packages.find]
where = src
[egg_info]
tag_build =
tag_date = 0

View File

@ -0,0 +1,42 @@
Metadata-Version: 2.1
Name: yura
Version: 14.3.7
Summary: Yura async AI client
Author: retoor
Author-email: retoor@retoor.io
License: MIT
Requires-Python: >=3.7
Description-Content-Type: text/markdown
Requires-Dist: websockets
# Yura LLM Client for Katya server
Part of project with as target replacing the native ollama protocol. This protocol supports streaming and is usable trough https and it is possible to directly attach a web client to the backend.
## Install
```bash
pip install -e .
```
## Build
```bash
make build
```
## Command line usage
```bash
yura ws://[host]:[port]/[path]/
```
## Python
```python
import asyncio
from yura.client import AsyncClient
async def communicate():
client = AsyncClient("ws://[host]:[port]/[path]/")
async for response in client.chat("Your prompt"):
print(response)
asyncio.run(communicate())
```

View File

@ -0,0 +1,11 @@
README.md
pyproject.toml
setup.cfg
src/yura/__init__.py
src/yura/__main__.py
src/yura/client.py
src/yura.egg-info/PKG-INFO
src/yura.egg-info/SOURCES.txt
src/yura.egg-info/dependency_links.txt
src/yura.egg-info/requires.txt
src/yura.egg-info/top_level.txt

View File

@ -0,0 +1 @@

View File

@ -0,0 +1 @@
websockets

View File

@ -0,0 +1 @@
yura

106
dist/yura-14.3.7/src/yura/client.py vendored Normal file
View File

@ -0,0 +1,106 @@
import asyncio
import websockets
import json
import sys
class AsyncClient:
def __init__(self, url="ws://127.0.0.1:8470"):
self.url = url
self.ws = None
self.queue_in = asyncio.Queue()
self.queue_out = asyncio.Queue()
self.communication_task = None
async def ensure_connection():
if not self.ws:
self.ws = await websockets.connect(self.url)
return self.ws
async def ensure_communication(self):
if not self.communication_task:
self.communication_task = asyncio.create_task(self.communicate())
return self.communication_task
async def chat(self, message):
await self.ensure_communication()
await self.queue_out.put(message)
while True:
while True:
try:
response = await asyncio.wait_for(self.queue_in.get(), 0.1)
except asyncio.TimeoutError:
continue
break
yield response
if response["done"]:
break
async def communicate(self):
loop = asyncio.get_event_loop()
async with websockets.connect(self.url) as websocket:
while True:
message_content = None
while not message_content:
try:
message_content = await asyncio.wait_for(
self.queue_out.get(), 0.1
)
except asyncio.TimeoutError:
continue
response = await websocket.send(json.dumps(message_content))
while True:
response = json.loads(await websocket.recv())
if response["done"]:
break
await self.queue_in.put(response)
await self.queue_in.put(response)
async def cli_client(url="ws://127.0.0.1:8470"):
loop = asyncio.get_event_loop()
async_client = AsyncClient(url)
while True:
sys.stdout.write("> ")
sys.stdout.flush()
message_content = await loop.run_in_executor(None, sys.stdin.readline)
async for response in async_client.chat(message_content):
print(response["content"], end="", flush=True)
if response["done"]:
break
print("")
def main():
url = "ws://127.0.0.1:8470"
try:
url = sys.argv[1]
except IndexError:
pass
asyncio.run(cli_client(url))
if __name__ == "__main__":
main()

Binary file not shown.

Binary file not shown.

19
make
View File

@ -9,15 +9,6 @@ args_string = " ".join(args)
def install(): def install():
os.system("./.venv/bin/python -m pip install -e .") os.system("./.venv/bin/python -m pip install -e .")
def build():
os.system("./.venv/bin/python -m pip install build")
os.system("rm -r dist")
os.system("./.venv/bin/python -m build .")
os.system("./.venv/bin/python -m pip install black")
os.system("./.venv/bin/python -m black .")
if not pathlib.Path(".venv").exists(): if not pathlib.Path(".venv").exists():
os.system("python3 -m venv .venv") os.system("python3 -m venv .venv")
install() install()
@ -26,12 +17,10 @@ if "install" in args:
install() install()
if "build" in sys.argv: if "build" in sys.argv:
build() os.system("./.venv/bin/python -m pip install build")
os.system("./.venv/bin/python -m build .")
if "publish" in sys.argv: os.system("./.venv/bin/python -m pip install black")
build() os.system("./.venv/bin/python -m black .")
os.system("./.venv/bin/python -m pip install twine")
os.system("./.venv/bin/python -m twine upload --repository gitea dist/*")
if "run" in sys.argv: if "run" in sys.argv:
os.system("./.venv/bin/yura " + args_string) os.system("./.venv/bin/yura " + args_string)

View File

@ -1,6 +1,6 @@
[metadata] [metadata]
name = yura name = yura
version = 14.3.9 version = 14.3.7
description = Yura async AI client description = Yura async AI client
author = retoor author = retoor
author_email = retoor@retoor.io author_email = retoor@retoor.io

View File

@ -1,6 +1,6 @@
Metadata-Version: 2.1 Metadata-Version: 2.1
Name: yura Name: yura
Version: 14.3.9 Version: 14.3.7
Summary: Yura async AI client Summary: Yura async AI client
Author: retoor Author: retoor
Author-email: retoor@retoor.io Author-email: retoor@retoor.io

View File

@ -1,6 +1,8 @@
README.md README.md
pyproject.toml pyproject.toml
setup.cfg setup.cfg
src/yura/__init__.py
src/yura/__main__.py
src/yura/cli.py src/yura/cli.py
src/yura/client.py src/yura/client.py
src/yura.egg-info/PKG-INFO src/yura.egg-info/PKG-INFO

View File

@ -1 +1 @@
yura