Integrate with Hugging Face models using their Inference API.
pip install huggingface_hub cognitora
1from huggingface_hub import InferenceClient
2from cognitora import Cognitora
3import re
4
5cognitora = Cognitora(api_key="your-cognitora-api-key")
6
7def extract_code_block(llm_response):
8 pattern = re.compile(r'```python\n(.*?)\n```', re.DOTALL)
9 match = pattern.search(llm_response)
10 if match:
11 return match.group(1)
12 return ""
13
14async def run_huggingface_code_interpreter(user_query: str):
15 session = cognitora.sessions.create(
16 image="docker.io/library/python:3.11-slim",
17 timeout=300,
18 persistent=True
19 )
20
21 system_prompt = """You are a helpful coding assistant that can execute python code in a Jupyter notebook.
22You are given tasks to complete and you run Python code to solve them.
23Generally, you follow these rules:
24- ALWAYS FORMAT YOUR RESPONSE IN MARKDOWN
25- ALWAYS RESPOND ONLY WITH CODE IN CODE BLOCK LIKE THIS:
26```python
27{code}
28```"""
29
30 # Initialize the client
31 client = InferenceClient(
32 provider="hf-inference",
33 api_key="HF_INFERENCE_API_KEY"
34 )
35
36 completion = client.chat.completions.create(
37 model="Qwen/Qwen2.5-72B-Instruct",
38 messages=[
39 {"role": "system", "content": system_prompt},
40 {"role": "user", "content": user_query},
41 ]
42 )
43
44 content = completion.choices[0].message.content
45 code = extract_code_block(content)
46
47 if code:
48 execution = cognitora.compute.execute(
49 session_id=session.id,
50 command=["python", "-c", code]
51 )
52
53 return {
54 "code": code,
55 "result": execution.stdout,
56 "error": execution.stderr,
57 "exit_code": execution.exit_code
58 }
59
60 return {"error": "No code block found in response"}
Integrate with GPT-4 and other OpenAI models for intelligent code generation and execution.
Build intelligent applications with Claude 3.5 Sonnet and other Anthropic models.
Build intelligent applications with Google's Gemini Pro models and function calling.
Get started with Hugging Face and Cognitora in minutes. Secure, scalable, and ready for anything.