Skip to content

Commit e597b8a

Browse files
committed
docs(py/plugins): convert docstring examples to >>> format
Converts markdown and :: code blocks in docstrings to Python doctest format (>>>) to avoid false positives in in-function import checks. The remaining warning for microsoft-foundry is a legitimate try/except import for optional dependencies, which is an acceptable pattern.
1 parent 88eab08 commit e597b8a

File tree

7 files changed

+118
-129
lines changed

7 files changed

+118
-129
lines changed

py/plugins/cloudflare-workers-ai/src/genkit/plugins/cloudflare_workers_ai/plugin.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -81,17 +81,16 @@ class CloudflareWorkersAI(Plugin):
8181
This plugin provides access to Cloudflare Workers AI models for text
8282
generation and embeddings.
8383
84-
Example::
85-
86-
from genkit import Genkit
87-
from genkit.plugins.cloudflare_workers_ai import CloudflareWorkersAI, cloudflare_model
88-
89-
ai = Genkit(
90-
plugins=[CloudflareWorkersAI()],
91-
model=cloudflare_model('@cf/meta/llama-3.1-8b-instruct'),
92-
)
93-
94-
response = await ai.generate(prompt='Hello, world!')
84+
Example:
85+
>>> from genkit import Genkit
86+
>>> from genkit.plugins.cloudflare_workers_ai import CloudflareWorkersAI, cloudflare_model
87+
>>>
88+
>>> ai = Genkit(
89+
... plugins=[CloudflareWorkersAI()],
90+
... model=cloudflare_model('@cf/meta/llama-3.1-8b-instruct'),
91+
... )
92+
>>>
93+
>>> response = await ai.generate(prompt='Hello, world!')
9594
9695
Attributes:
9796
account_id: Cloudflare account ID.

py/plugins/google-genai/src/genkit/plugins/google_genai/evaluators/__init__.py

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -96,33 +96,33 @@
9696
+-----------------------------+-------------------------------------------+
9797
9898
Example:
99-
Running evaluations::
100-
101-
from genkit import Genkit
102-
from genkit.plugins.google_genai import VertexAI
103-
from genkit.plugins.google_genai.evaluators import VertexAIEvaluationMetricType
104-
105-
ai = Genkit(plugins=[VertexAI(project='my-project')])
106-
107-
# Prepare test dataset
108-
dataset = [
109-
{
110-
'input': 'Summarize this article about AI...',
111-
'output': 'AI is transforming industries...',
112-
'reference': 'The article discusses how AI impacts...',
113-
'context': ['Article content here...'],
114-
}
115-
]
116-
117-
# Run fluency evaluation
118-
results = await ai.evaluate(
119-
evaluator='vertexai/fluency',
120-
dataset=dataset,
121-
)
122-
123-
for result in results:
124-
print(f'Score: {result.evaluation.score}')
125-
print(f'Reasoning: {result.evaluation.details.get("reasoning")}')
99+
Running evaluations:
100+
101+
>>> from genkit import Genkit
102+
>>> from genkit.plugins.google_genai import VertexAI
103+
>>> from genkit.plugins.google_genai.evaluators import VertexAIEvaluationMetricType
104+
>>>
105+
>>> ai = Genkit(plugins=[VertexAI(project='my-project')])
106+
>>>
107+
>>> # Prepare test dataset
108+
>>> dataset = [
109+
... {
110+
... 'input': 'Summarize this article about AI...',
111+
... 'output': 'AI is transforming industries...',
112+
... 'reference': 'The article discusses how AI impacts...',
113+
... 'context': ['Article content here...'],
114+
... }
115+
... ]
116+
>>>
117+
>>> # Run fluency evaluation
118+
>>> results = await ai.evaluate(
119+
... evaluator='vertexai/fluency',
120+
... dataset=dataset,
121+
... )
122+
>>>
123+
>>> for result in results:
124+
... print(f'Score: {result.evaluation.score}')
125+
... print(f'Reasoning: {result.evaluation.details.get("reasoning")}')
126126
127127
Caveats:
128128
- Requires Google Cloud project with Vertex AI API enabled

py/plugins/google-genai/src/genkit/plugins/google_genai/google.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -95,15 +95,14 @@
9595
import os
9696
from typing import Any
9797

98-
from genkit.blocks.background_model import BackgroundAction
99-
10098
from google import genai
10199
from google.auth.credentials import Credentials
102100
from google.genai.client import DebugConfig
103101
from google.genai.types import HttpOptions, HttpOptionsDict
104102

105103
import genkit.plugins.google_genai.constants as const
106104
from genkit.ai import GENKIT_CLIENT_HEADER, Plugin
105+
from genkit.blocks.background_model import BackgroundAction
107106
from genkit.blocks.document import Document
108107
from genkit.blocks.embedding import EmbedderOptions, EmbedderSupports, embedder_action_metadata
109108
from genkit.blocks.model import model_action_metadata

py/plugins/google-genai/src/genkit/plugins/google_genai/rerankers/__init__.py

Lines changed: 38 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -87,44 +87,44 @@
8787
+--------------------------------+-----------------------------------------+
8888
8989
Example:
90-
Basic reranking::
91-
92-
from genkit import Genkit
93-
from genkit.plugins.google_genai import VertexAI
94-
95-
ai = Genkit(plugins=[VertexAI(project='my-project')])
96-
97-
# Rerank documents after retrieval
98-
ranked_docs = await ai.rerank(
99-
reranker='vertexai/semantic-ranker-default@latest',
100-
query='What is machine learning?',
101-
documents=retrieved_docs,
102-
options={'top_n': 5},
103-
)
104-
105-
Full RAG pipeline with reranking::
106-
107-
# 1. Retrieve initial candidates
108-
candidates = await ai.retrieve(
109-
retriever='my-retriever',
110-
query='How do neural networks learn?',
111-
options={'limit': 50},
112-
)
113-
114-
# 2. Rerank for quality
115-
ranked = await ai.rerank(
116-
reranker='vertexai/semantic-ranker-default@latest',
117-
query='How do neural networks learn?',
118-
documents=candidates,
119-
options={'top_n': 5},
120-
)
121-
122-
# 3. Generate with top results
123-
response = await ai.generate(
124-
model='vertexai/gemini-2.0-flash',
125-
prompt='Explain how neural networks learn.',
126-
docs=ranked,
127-
)
90+
Basic reranking:
91+
92+
>>> from genkit import Genkit
93+
>>> from genkit.plugins.google_genai import VertexAI
94+
>>>
95+
>>> ai = Genkit(plugins=[VertexAI(project='my-project')])
96+
>>>
97+
>>> # Rerank documents after retrieval
98+
>>> ranked_docs = await ai.rerank(
99+
... reranker='vertexai/semantic-ranker-default@latest',
100+
... query='What is machine learning?',
101+
... documents=retrieved_docs,
102+
... options={'top_n': 5},
103+
... )
104+
105+
Full RAG pipeline with reranking:
106+
107+
>>> # 1. Retrieve initial candidates
108+
>>> candidates = await ai.retrieve(
109+
... retriever='my-retriever',
110+
... query='How do neural networks learn?',
111+
... options={'limit': 50},
112+
... )
113+
>>>
114+
>>> # 2. Rerank for quality
115+
>>> ranked = await ai.rerank(
116+
... reranker='vertexai/semantic-ranker-default@latest',
117+
... query='How do neural networks learn?',
118+
... documents=candidates,
119+
... options={'top_n': 5},
120+
... )
121+
>>>
122+
>>> # 3. Generate with top results
123+
>>> response = await ai.generate(
124+
... model='vertexai/gemini-2.0-flash',
125+
... prompt='Explain how neural networks learn.',
126+
... docs=ranked,
127+
... )
128128
129129
Caveats:
130130
- Requires Google Cloud project with Discovery Engine API enabled

py/plugins/huggingface/src/genkit/plugins/huggingface/plugin.py

Lines changed: 14 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -41,25 +41,22 @@ class HuggingFace(Plugin):
4141
enabling the use of 1,000,000+ models within the Genkit framework.
4242
4343
Example:
44-
```python
45-
from genkit import Genkit
46-
from genkit.plugins.huggingface import HuggingFace
47-
48-
ai = Genkit(
49-
plugins=[HuggingFace()],
50-
model='huggingface/meta-llama/Llama-3.3-70B-Instruct',
51-
)
52-
53-
response = await ai.generate(prompt='Hello!')
54-
```
44+
>>> from genkit import Genkit
45+
>>> from genkit.plugins.huggingface import HuggingFace
46+
>>>
47+
>>> ai = Genkit(
48+
... plugins=[HuggingFace()],
49+
... model='huggingface/meta-llama/Llama-3.3-70B-Instruct',
50+
... )
51+
>>>
52+
>>> response = await ai.generate(prompt='Hello!')
5553
5654
Using Inference Providers for faster inference:
57-
```python
58-
ai = Genkit(
59-
plugins=[HuggingFace(provider='groq')], # Use Groq for speed
60-
model='huggingface/meta-llama/Llama-3.3-70B-Instruct',
61-
)
62-
```
55+
56+
>>> ai = Genkit(
57+
... plugins=[HuggingFace(provider='groq')], # Use Groq for speed
58+
... model='huggingface/meta-llama/Llama-3.3-70B-Instruct',
59+
... )
6360
"""
6461

6562
name = HUGGINGFACE_PLUGIN_NAME

py/plugins/mcp/src/genkit/plugins/mcp/server.py

Lines changed: 19 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -450,28 +450,24 @@ def create_mcp_server(ai: Genkit, options: McpServerOptions) -> McpServer:
450450
GenkitMcpServer instance.
451451
452452
Example:
453-
```python
454-
from genkit.ai import Genkit
455-
from genkit.plugins.mcp import create_mcp_server, McpServerOptions
456-
457-
ai = Genkit()
458-
459-
460-
# Define some tools and resources
461-
@ai.tool()
462-
def add(a: int, b: int) -> int:
463-
return a + b
464-
465-
466-
ai.define_resource(
467-
name='my_resource',
468-
uri='my://resource',
469-
fn=lambda req: {'content': [{'text': 'resource content'}]},
470-
)
471-
472-
# Create and start MCP server
473-
server = create_mcp_server(ai, McpServerOptions(name='my-server'))
474-
await server.start()
475-
```
453+
>>> from genkit.ai import Genkit
454+
>>> from genkit.plugins.mcp import create_mcp_server, McpServerOptions
455+
>>>
456+
>>> ai = Genkit()
457+
>>>
458+
>>> # Define some tools and resources
459+
>>> @ai.tool()
460+
... def add(a: int, b: int) -> int:
461+
... return a + b
462+
>>>
463+
>>> ai.define_resource(
464+
... name='my_resource',
465+
... uri='my://resource',
466+
... fn=lambda req: {'content': [{'text': 'resource content'}]},
467+
... )
468+
>>>
469+
>>> # Create and start MCP server
470+
>>> server = create_mcp_server(ai, McpServerOptions(name='my-server'))
471+
>>> await server.start()
476472
"""
477473
return McpServer(ai, options)

py/plugins/mistral/src/genkit/plugins/mistral/plugin.py

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -41,17 +41,15 @@ class Mistral(Plugin):
4141
enabling the use of Mistral models within the Genkit framework.
4242
4343
Example:
44-
```python
45-
from genkit import Genkit
46-
from genkit.plugins.mistral import Mistral
47-
48-
ai = Genkit(
49-
plugins=[Mistral()],
50-
model='mistral/mistral-large-latest',
51-
)
52-
53-
response = await ai.generate(prompt='Hello!')
54-
```
44+
>>> from genkit import Genkit
45+
>>> from genkit.plugins.mistral import Mistral
46+
>>>
47+
>>> ai = Genkit(
48+
... plugins=[Mistral()],
49+
... model='mistral/mistral-large-latest',
50+
... )
51+
>>>
52+
>>> response = await ai.generate(prompt='Hello!')
5553
"""
5654

5755
name = MISTRAL_PLUGIN_NAME

0 commit comments

Comments
 (0)