@@ -0,0 +1,5 @@
+import ollama
+
+def get_response_vicuna(prompt):
+ content = ollama.generate(model='llama3:latest', prompt=prompt)
+ return content['response']
+from src.kg_construction.llm_construct_kg import get_response_vicuna
+if __name__ == "__main__":
+ respond = get_response_vicuna("你好")
+ print(respond)