Skip to main content

🔌 Kernel Module

The kernel/ module provides Semantic Kernel-style service management for enterprise applications.


Module Overview


File Structure

src/openstackai/kernel/
├── __init__.py
├── kernel.py # Main Kernel class
├── services.py # Service registry
├── filters.py # Filter system
└── context.py # Execution context

Kernel

Central orchestration layer for AI applications.

Basic Usage

from openstackai.kernel import Kernel

# Create kernel
kernel = Kernel()

# Add services
kernel.add_service(openai_provider, service_id="gpt4", is_default=True)
kernel.add_service(azure_provider, service_id="azure")
kernel.add_service(redis_memory, service_id="memory")

# Add plugins
kernel.add_plugin(WeatherPlugin())
kernel.add_plugin(SearchPlugin())

# Invoke function
result = await kernel.invoke("weather", "get_forecast", city="NYC")

Kernel Architecture


KernelBuilder

Fluent API for kernel construction.

from openstackai.kernel import KernelBuilder

kernel = (KernelBuilder()
# Add LLM services
.add_llm(
openai_client,
name="gpt4",
is_default=True
)
.add_llm(
azure_client,
name="azure-gpt"
)

# Add memory
.add_memory(
redis_memory,
name="cache"
)

# Add vector store
.add_vector_store(
chroma_db,
name="vectors"
)

# Add plugins
.add_plugin(WeatherPlugin())
.add_plugin(SearchPlugin())

# Add filters
.add_filter(LoggingFilter())
.add_filter(ValidationFilter())

.build())

ServiceRegistry

Manage and access services.

Service Types

Registry Operations

from openstackai.kernel import ServiceRegistry, Service, ServiceType

registry = ServiceRegistry()

# Register service
registry.register(Service(
service_id="gpt4",
service_type=ServiceType.LLM,
instance=openai_client,
is_default=True
))

# Get service
llm = registry.get("gpt4")
default_llm = registry.get_default(ServiceType.LLM)

# List services
all_services = registry.list_services()
llm_services = registry.list_by_type(ServiceType.LLM)

FilterRegistry

Middleware for request/response processing.

Filter Types

Creating Filters

from openstackai.kernel import Filter, FilterContext, FilterType

class LoggingFilter(Filter):
"""Log all kernel invocations."""

filter_type = FilterType.PRE_AND_POST

async def on_pre_invoke(self, context: FilterContext) -> FilterContext:
print(f"Starting: {context.function_name}")
context.start_time = time.time()
return context

async def on_post_invoke(self, context: FilterContext) -> FilterContext:
elapsed = time.time() - context.start_time
print(f"Completed: {context.function_name} in {elapsed:.2f}s")
return context

class ValidationFilter(Filter):
"""Validate inputs before execution."""

filter_type = FilterType.PRE

async def on_pre_invoke(self, context: FilterContext) -> FilterContext:
if not context.arguments.get("required_param"):
raise ValueError("Missing required_param")
return context

Registering Filters

kernel.add_filter(LoggingFilter())
kernel.add_filter(ValidationFilter())
kernel.add_filter(AuthenticationFilter(api_key="..."))

KernelContext

Execution context for kernel operations.

from openstackai.kernel import KernelContext

# Create context
context = KernelContext(
user_id="user123",
session_id="session456",
metadata={"source": "api"}
)

# Execute with context
result = await kernel.invoke(
"weather",
"get_forecast",
context=context,
city="NYC"
)

Plugin System

Creating Plugins

from openstackai.kernel import KernelPlugin
from openstackai.skills import tool

class WeatherPlugin(KernelPlugin):
"""Weather information plugin."""

name = "weather"
description = "Get weather information"

@tool(description="Get current weather")
async def get_current(self, city: str) -> dict:
return {"city": city, "temp": 72, "conditions": "Sunny"}

@tool(description="Get weather forecast")
async def get_forecast(self, city: str, days: int = 7) -> list:
return [{"day": i, "temp": 70 + i} for i in range(days)]

Plugin Registration

# Register plugin
kernel.add_plugin(WeatherPlugin())

# Invoke plugin function
current = await kernel.invoke("weather", "get_current", city="NYC")
forecast = await kernel.invoke("weather", "get_forecast", city="NYC", days=5)

Creating Agents with Kernel

from openstackai.kernel import Kernel

# Create kernel with services
kernel = (KernelBuilder()
.add_llm(azure_client, name="azure", is_default=True)
.add_memory(redis_memory)
.add_plugin(WeatherPlugin())
.add_plugin(SearchPlugin())
.build())

# Create agent using kernel
agent = kernel.create_agent(
name="Assistant",
instructions="You are a helpful assistant.",
plugins=["weather", "search"]
)

# Agent uses kernel services automatically
result = await agent.run("What's the weather in NYC?")

Full Example

from openstackai.kernel import Kernel, KernelBuilder
from openstackai.core.llm import OpenAIProvider
from openstackai.sessions import RedisSessionStore

# Initialize services
openai = OpenAIProvider(api_key="...")
azure = OpenAIProvider(endpoint="...", deployment="...")
redis = RedisSessionStore(url="redis://localhost:6379")

# Build kernel
kernel = (KernelBuilder()
.add_llm(openai, name="openai", is_default=True)
.add_llm(azure, name="azure")
.add_memory(redis, name="sessions")
.add_plugin(WeatherPlugin())
.add_plugin(SearchPlugin())
.add_plugin(DatabasePlugin())
.add_filter(LoggingFilter())
.add_filter(AuthFilter(api_key="..."))
.build())

# Use kernel
async def main():
# Direct invocation
weather = await kernel.invoke("weather", "get_current", city="NYC")

# Create and run agent
agent = kernel.create_agent(
name="DataAssistant",
instructions="Help with data queries",
plugins=["database"]
)

result = await agent.run("What customers are in NYC?")
print(result.final_output)

Execution Flow


➡️ [[Sessions-Module]] | [[Skills-Module]] | [[Home]]