Direct Python integration for maximum flexibility and control.
pip install cognitora openai
1import os
2from cognitora import Cognitora
3
4# Initialize client
5cognitora = Cognitora(
6 api_key=os.getenv("COGNITORA_API_KEY"),
7 base_url="https://api.cognitora.dev"
8)
9
10class CodeInterpreter:
11 def __init__(self):
12 self.session = cognitora.code_interpreter.create_session(
13 language="python",
14 timeout_minutes=30,
15 resources={
16 "cpu_cores": 1.0,
17 "memory_mb": 512,
18 "storage_gb": 5
19 }
20 )
21
22 def execute_code(self, code: str, language: str = "python") -> dict:
23 try:
24 execution = cognitora.code_interpreter.execute(
25 code=code,
26 language=language,
27 session_id=self.session.session_id,
28 timeout_seconds=30
29 )
30
31 return {
32 "success": execution.data.status == "completed",
33 "outputs": execution.data.outputs,
34 "execution_time_ms": execution.data.execution_time_ms,
35 "session_id": execution.data.session_id
36 }
37 except Exception as e:
38 return {
39 "success": False,
40 "error": str(e),
41 "outputs": [],
42 "execution_time_ms": 0
43 }
44
45 def execute_compute(self, image: str, command: list, **kwargs) -> dict:
46 """Execute code using compute API for containerized workloads"""
47 try:
48 execution = cognitora.containers.create_container(
49 image=image,
50 command=command,
51 cpu_cores=kwargs.get('cpu_cores', 1.0),
52 memory_mb=kwargs.get('memory_mb', 512),
53 max_cost_credits=kwargs.get('max_cost_credits', 10),
54 storage_gb=kwargs.get('storage_gb', 5),
55 networking=kwargs.get('networking', False) # Secure by default for containers
56 )
57
58 return {
59 "execution_id": execution.id,
60 "status": execution.status,
61 "image": execution.image,
62 "command": execution.command
63 }
64 except Exception as e:
65 return {
66 "success": False,
67 "error": str(e)
68 }
69
70# Example usage
71interpreter = CodeInterpreter()
72
73# Execute Python code in session
74result = interpreter.execute_code("print('Hello from Cognitora!')")
75print("Session result:", result)
76
77# Execute containerized workload
78compute_result = interpreter.execute_compute(
79 image="docker.io/library/python:3.11-slim",
80 command=["python", "-c", "print('Hello from container!')"],
81 cpu_cores=1.0,
82 memory_mb=512,
83 max_cost_credits=10
84)
85print("Compute result:", compute_result)
Get started with Python SDK and Cognitora in minutes. Secure, scalable, and ready for anything.