Browse Source

1.使用python连接ollama中的llama3
2.llama3测试成功

Air 4 weeks ago
parent
commit
c4da9ca281
2 changed files with 10 additions and 0 deletions
  1. 5 0
      src/kg_construction/llm_construct_kg.py
  2. 5 0
      test/ollama_test.py

+ 5 - 0
src/kg_construction/llm_construct_kg.py

@@ -0,0 +1,5 @@
+import ollama
+
+def get_response_vicuna(prompt):
+    content = ollama.generate(model='llama3:latest', prompt=prompt)
+    return content['response']

+ 5 - 0
test/ollama_test.py

@@ -0,0 +1,5 @@
+from src.kg_construction.llm_construct_kg import get_response_vicuna
+
+if __name__ == "__main__":
+    respond = get_response_vicuna("你好")
+    print(respond)