aboutsummaryrefslogtreecommitdiffstats
path: root/noetic-llama/src/ollamawrapper
diff options
context:
space:
mode:
authorjwansek <eddie.atten.ea29@gmail.com>2024-02-27 12:43:15 +0000
committerjwansek <eddie.atten.ea29@gmail.com>2024-02-27 12:43:15 +0000
commitb5e7a1d7d2adb3670824d726573cc08157a02b69 (patch)
treefa2f8f00435468a36e6705cd696648991b599d89 /noetic-llama/src/ollamawrapper
parenta398252b0dfe3c112e5643104aaf22c411a15b1d (diff)
downloadnoetic-llama-b5e7a1d7d2adb3670824d726573cc08157a02b69.tar.gz
noetic-llama-b5e7a1d7d2adb3670824d726573cc08157a02b69.zip
Added the ROS servicecall API
Diffstat (limited to 'noetic-llama/src/ollamawrapper')
-rw-r--r--noetic-llama/src/ollamawrapper/CMakeLists.txt26
-rwxr-xr-x[-rw-r--r--]noetic-llama/src/ollamawrapper/package.xml0
-rw-r--r--noetic-llama/src/ollamawrapper/src/capabilities/weather.py6
-rw-r--r--noetic-llama/src/ollamawrapper/src/ollamawrapper.py21
-rwxr-xr-xnoetic-llama/src/ollamawrapper/srv/OllamaCall.srv8
5 files changed, 38 insertions, 23 deletions
diff --git a/noetic-llama/src/ollamawrapper/CMakeLists.txt b/noetic-llama/src/ollamawrapper/CMakeLists.txt
index c94424b..4533887 100644
--- a/noetic-llama/src/ollamawrapper/CMakeLists.txt
+++ b/noetic-llama/src/ollamawrapper/CMakeLists.txt
@@ -55,10 +55,10 @@ find_package(catkin REQUIRED COMPONENTS
# )
## Generate services in the 'srv' folder
-add_service_files(
- FILES
- OllamaCall.srv
-)
+#add_service_files(
+# FILES
+# OllamaCall.srv
+#)
## Generate actions in the 'action' folder
# add_action_files(
@@ -104,9 +104,9 @@ generate_messages(
## DEPENDS: system dependencies of this project that dependent projects also need
catkin_package(
# INCLUDE_DIRS include
-# LIBRARIES ollamawrapper
-# CATKIN_DEPENDS roscpp rospy std_msgs
-# DEPENDS system_lib
+ LIBRARIES ollamawrapper
+ CATKIN_DEPENDS roscpp rospy std_msgs message_runtime
+ DEPENDS system_lib
)
###########
@@ -164,6 +164,18 @@ catkin_install_python(PROGRAMS
DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
)
+install(
+ DIRECTORY src/capabilities/
+ DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}/capabilities
+)
+
+install(FILES
+ src/Modelfile.jinja2
+ src/ollamafunctiongrammar.ppeg
+ src/parser.py
+ DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+)
+
## Mark executables for installation
## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
# install(TARGETS ${PROJECT_NAME}_node
diff --git a/noetic-llama/src/ollamawrapper/package.xml b/noetic-llama/src/ollamawrapper/package.xml
index cfad2a4..cfad2a4 100644..100755
--- a/noetic-llama/src/ollamawrapper/package.xml
+++ b/noetic-llama/src/ollamawrapper/package.xml
diff --git a/noetic-llama/src/ollamawrapper/src/capabilities/weather.py b/noetic-llama/src/ollamawrapper/src/capabilities/weather.py
index 0108ee5..4d6e073 100644
--- a/noetic-llama/src/ollamawrapper/src/capabilities/weather.py
+++ b/noetic-llama/src/ollamawrapper/src/capabilities/weather.py
@@ -3,8 +3,10 @@ import dotenv
import geocoder
import requests
-if os.path.exists(os.path.join(os.path.dirname(__file__), "..", "..", "..", "apikeys.env")):
- dotenv.load_dotenv(os.path.join(os.path.dirname(__file__), "..", "..", "..", "apikeys.env"))
+APIKEYS_PATH = "/home/eden/Documents/noetic-llama/noetic-llama/apikeys.env"
+# APIKEYS_PATH = os.path.join(os.path.dirname(__file__), "..", "..", "..", "apikeys.env")
+if os.path.exists(APIKEYS_PATH):
+ dotenv.load_dotenv(APIKEYS_PATH)
if "BINGMAPS" not in os.environ:
raise Exception("'BINGMAPS' API key environment variable not found")
diff --git a/noetic-llama/src/ollamawrapper/src/ollamawrapper.py b/noetic-llama/src/ollamawrapper/src/ollamawrapper.py
index f7abec1..f344299 100644
--- a/noetic-llama/src/ollamawrapper/src/ollamawrapper.py
+++ b/noetic-llama/src/ollamawrapper/src/ollamawrapper.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
from dataclasses import dataclass
-from ollamawrapper.srv import OllamaCall, OllamaCallResponse
+from ollamamessages.srv import OllamaCall, OllamaCallResponse
import inspect
import typing
import jinja2
@@ -53,7 +53,7 @@ def getfunctioncapabilities():
def get_functions(ollama_output):
return [f.strip() for f in ollama_output[8:].strip().split(";") if f != ""]
-def main():
+def main(prompt):
functioncapabilities = getfunctioncapabilities()
modelfile = functioncapabilities.to_modelfile("nexusraven:13b-v2-q3_K_S")
@@ -63,18 +63,27 @@ def main():
# with open("Modelfile", "r") as f:
# ollama.create(model = "temp", modelfile= f.read())
- ollama_output = client.generate(model='temp', prompt='What\'s the weather like in Lincoln right now? What\'s 2 + 2?', options={"stop": ["Thought:"]})
- print(ollama_output)
+ ollama_output = client.generate(model='temp', prompt = prompt, options={"stop": ["Thought:"]})
+ #print(ollama_output)
for func_str in get_functions(ollama_output["response"]):
print(func_str + ":")
exec(func_str)
client.delete("temp")
+ return ollama_output
def handle_ollama_call(req):
- print("Recieved ollama request %s" % req.input)
- return OllamaCallResponse(1, 2, 3, 4, 5, 6)
+ print("Recieved ollama request '%s'" % req.input)
+ o = main(req.input)
+ # print(o.keys())
+ return OllamaCallResponse(
+ o["total_duration"],
+ o["load_duration"],
+ o["prompt_eval_duration"],
+ o["eval_count"],
+ o["eval_duration"]
+ )
def handle_ollama_server():
rospy.init_node("ollama_wrapper_server")
diff --git a/noetic-llama/src/ollamawrapper/srv/OllamaCall.srv b/noetic-llama/src/ollamawrapper/srv/OllamaCall.srv
deleted file mode 100755
index 872a74b..0000000
--- a/noetic-llama/src/ollamawrapper/srv/OllamaCall.srv
+++ /dev/null
@@ -1,8 +0,0 @@
-text input
----
-int64 total_duration
-int64 load_duration
-int64 prompt_eval_count
-int64 prompt_eval_duration
-int64 eval_count
-int64 eval_duration