Skip to content

Commit

Permalink
Python: Update Notebooks to version 0.2.6.dev0; Fix bug in 0.2.5.dev0 (
Browse files Browse the repository at this point in the history
…#761)

### Motivation and Context
New release version 0.2.6
Fix bug in 0.2.5.dev0 pip package
Update notebooks to the latest pip package (0.2.6)

### Description of Changes
- fixed a param name error in the creation of a reference memory_record
- renamed text_service -> text_completion_service (consistency with .NET)
- renamed text_embedding_service -> text_embedding_generation_service
(consistency with .NET)
- Updated notebooks accordingly
- bumped pip versions to 0.2.6.dev0
  • Loading branch information
awharrison-28 authored May 2, 2023
1 parent 80ef70f commit 04206c6
Show file tree
Hide file tree
Showing 27 changed files with 119 additions and 109 deletions.
4 changes: 2 additions & 2 deletions python/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,11 @@ kernel = sk.Kernel()

# Prepare OpenAI service using credentials stored in the `.env` file
api_key, org_id = sk.openai_settings_from_dot_env()
kernel.config.add_text_service("dv", OpenAITextCompletion("text-davinci-003", api_key, org_id))
kernel.config.add_text_completion_service("dv", OpenAITextCompletion("text-davinci-003", api_key, org_id))

# Alternative using Azure:
# deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()
# kernel.config.add_text_service("dv", AzureTextCompletion(deployment, endpoint, api_key))
# kernel.config.add_text_completion_service("dv", AzureTextCompletion(deployment, endpoint, api_key))

# Wrap your prompt in a function
prompt = kernel.create_semantic_function("""
Expand Down
2 changes: 1 addition & 1 deletion python/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "semantic-kernel"
version = "0.2.5.dev"
version = "0.2.6.dev"
description = ""
authors = ["Microsoft <[email protected]>"]
readme = "README.md"
Expand Down
128 changes: 69 additions & 59 deletions python/semantic_kernel/kernel_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,19 @@

class KernelConfig:
def __init__(self) -> None:
self._text_services: Dict[
self._text_completion_services: Dict[
str, Callable[["KernelBase"], TextCompletionClientBase]
] = {}
self._chat_services: Dict[
str, Callable[["KernelBase"], ChatCompletionClientBase]
] = {}
self._embedding_services: Dict[
self._text_embedding_generation_services: Dict[
str, Callable[["KernelBase"], EmbeddingGeneratorBase]
] = {}

self._default_text_service: Optional[str] = None
self._default_text_completion_service: Optional[str] = None
self._default_chat_service: Optional[str] = None
self._default_embedding_service: Optional[str] = None
self._default_text_embedding_generation_service: Optional[str] = None

self._retry_mechanism: RetryMechanism = PassThroughWithoutRetry()

Expand All @@ -46,14 +46,14 @@ def get_ai_service(
) -> Callable[["KernelBase"], T]:
matching_type = {}
if type == TextCompletionClientBase:
service_id = service_id or self._default_text_service
matching_type = self._text_services
service_id = service_id or self._default_text_completion_service
matching_type = self._text_completion_services
elif type == ChatCompletionClientBase:
service_id = service_id or self._default_chat_service
matching_type = self._chat_services
elif type == EmbeddingGeneratorBase:
service_id = service_id or self._default_embedding_service
matching_type = self._embedding_services
service_id = service_id or self._default_text_embedding_generation_service
matching_type = self._text_embedding_generation_services
else:
raise ValueError(f"Unknown AI service type: {type.__name__}")

Expand All @@ -64,16 +64,16 @@ def get_ai_service(

return matching_type[service_id]

def all_text_services(self) -> List[str]:
return list(self._text_services.keys())
def all_text_completion_services(self) -> List[str]:
return list(self._text_completion_services.keys())

def all_chat_services(self) -> List[str]:
return list(self._chat_services.keys())

def all_embedding_services(self) -> List[str]:
return list(self._embedding_services.keys())
def all_text_embedding_generation_services(self) -> List[str]:
return list(self._text_embedding_generation_services.keys())

def add_text_service(
def add_text_completion_service(
self,
service_id: str,
service: Union[
Expand All @@ -83,16 +83,16 @@ def add_text_service(
) -> "KernelConfig":
if not service_id:
raise ValueError("service_id must be a non-empty string")
if not overwrite and service_id in self._text_services:
if not overwrite and service_id in self._text_completion_services:
raise ValueError(
f"Text service with service_id '{service_id}' already exists"
)

self._text_services[service_id] = (
self._text_completion_services[service_id] = (
service if isinstance(service, Callable) else lambda _: service
)
if self._default_text_service is None:
self._default_text_service = service_id
if self._default_text_completion_service is None:
self._default_text_completion_service = service_id

return self

Expand All @@ -118,13 +118,13 @@ def add_chat_service(
self._default_chat_service = service_id

if isinstance(service, TextCompletionClientBase):
self.add_text_service(service_id, service)
if self._default_text_service is None:
self._default_text_service = service_id
self.add_text_completion_service(service_id, service)
if self._default_text_completion_service is None:
self._default_text_completion_service = service_id

return self

def add_embedding_service(
def add_text_embedding_generation_service(
self,
service_id: str,
service: Union[
Expand All @@ -134,28 +134,28 @@ def add_embedding_service(
) -> "KernelConfig":
if not service_id:
raise ValueError("service_id must be a non-empty string")
if not overwrite and service_id in self._embedding_services:
if not overwrite and service_id in self._text_embedding_generation_services:
raise ValueError(
f"Embedding service with service_id '{service_id}' already exists"
)

self._embedding_services[service_id] = (
self._text_embedding_generation_services[service_id] = (
service if isinstance(service, Callable) else lambda _: service
)
if self._default_embedding_service is None:
self._default_embedding_service = service_id
if self._default_text_embedding_generation_service is None:
self._default_text_embedding_generation_service = service_id

return self

# TODO: look harder at retry stuff

def set_default_text_service(self, service_id: str) -> "KernelConfig":
if service_id not in self._text_services:
def set_default_text_completion_service(self, service_id: str) -> "KernelConfig":
if service_id not in self._text_completion_services:
raise ValueError(
f"AI service with service_id '{service_id}' does not exist"
)

self._default_text_service = service_id
self._default_text_completion_service = service_id
return self

def set_default_chat_service(self, service_id: str) -> "KernelConfig":
Expand All @@ -167,20 +167,24 @@ def set_default_chat_service(self, service_id: str) -> "KernelConfig":
self._default_chat_service = service_id
return self

def set_default_embedding_service(self, service_id: str) -> "KernelConfig":
if service_id not in self._embedding_services:
def set_default_text_embedding_generation_service(
self, service_id: str
) -> "KernelConfig":
if service_id not in self._text_embedding_generation_services:
raise ValueError(
f"AI service with service_id '{service_id}' does not exist"
)

self._default_embedding_service = service_id
self._default_text_embedding_generation_service = service_id
return self

def get_text_service_service_id(self, service_id: Optional[str] = None) -> str:
if service_id is None or service_id not in self._text_services:
if self._default_text_service is None:
def get_text_completion_service_service_id(
self, service_id: Optional[str] = None
) -> str:
if service_id is None or service_id not in self._text_completion_services:
if self._default_text_completion_service is None:
raise ValueError("No default text service is set")
return self._default_text_service
return self._default_text_completion_service

return service_id

Expand All @@ -192,23 +196,27 @@ def get_chat_service_service_id(self, service_id: Optional[str] = None) -> str:

return service_id

def get_embedding_service_id(self, service_id: Optional[str] = None) -> str:
if service_id is None or service_id not in self._embedding_services:
if self._default_embedding_service is None:
def get_text_embedding_generation_service_id(
self, service_id: Optional[str] = None
) -> str:
if service_id is None or service_id not in self._text_embedding_generation_services:
if self._default_text_embedding_generation_service is None:
raise ValueError("No default embedding service is set")
return self._default_embedding_service
return self._default_text_embedding_generation_service

return service_id

def remove_text_service(self, service_id: str) -> "KernelConfig":
if service_id not in self._text_services:
def remove_text_completion_service(self, service_id: str) -> "KernelConfig":
if service_id not in self._text_completion_services:
raise ValueError(
f"AI service with service_id '{service_id}' does not exist"
)

del self._text_services[service_id]
if self._default_text_service == service_id:
self._default_text_service = next(iter(self._text_services), None)
del self._text_completion_services[service_id]
if self._default_text_completion_service == service_id:
self._default_text_completion_service = next(
iter(self._text_completion_services), None
)
return self

def remove_chat_service(self, service_id: str) -> "KernelConfig":
Expand All @@ -222,39 +230,41 @@ def remove_chat_service(self, service_id: str) -> "KernelConfig":
self._default_chat_service = next(iter(self._chat_services), None)
return self

def remove_embedding_service(self, service_id: str) -> "KernelConfig":
if service_id not in self._embedding_services:
def remove_text_embedding_generation_service(self, service_id: str) -> "KernelConfig":
if service_id not in self._text_embedding_generation_services:
raise ValueError(
f"AI service with service_id '{service_id}' does not exist"
)

del self._embedding_services[service_id]
if self._default_embedding_service == service_id:
self._default_embedding_service = next(iter(self._embedding_services), None)
del self._text_embedding_generation_services[service_id]
if self._default_text_embedding_generation_service == service_id:
self._default_text_embedding_generation_service = next(
iter(self._text_embedding_generation_services), None
)
return self

def clear_all_text_services(self) -> "KernelConfig":
self._text_services = {}
self._default_text_service = None
def clear_all_text_completion_services(self) -> "KernelConfig":
self._text_completion_services = {}
self._default_text_completion_service = None
return self

def clear_all_chat_services(self) -> "KernelConfig":
self._chat_services = {}
self._default_chat_service = None
return self

def clear_all_embedding_services(self) -> "KernelConfig":
self._embedding_services = {}
self._default_embedding_service = None
def clear_all_text_embedding_generation_services(self) -> "KernelConfig":
self._text_embedding_generation_services = {}
self._default_text_embedding_generation_service = None
return self

def clear_all_services(self) -> "KernelConfig":
self._text_services = {}
self._text_completion_services = {}
self._chat_services = {}
self._embedding_services = {}
self._text_embedding_generation_services = {}

self._default_text_service = None
self._default_text_completion_service = None
self._default_chat_service = None
self._default_embedding_service = None
self._default_text_embedding_generation_service = None

return self
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def use_memory(
kernel = self.kernel()

if embeddings_generator is None:
service_id = kernel.config.get_embedding_service_id()
service_id = kernel.config.get_text_embedding_generation_service_id()
if not service_id:
raise ValueError("The embedding service id cannot be `None` or empty")

Expand Down
2 changes: 1 addition & 1 deletion python/semantic_kernel/memory/semantic_text_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ async def save_reference_async(

embedding = await self._embeddings_generator.generate_embeddings_async([text])
data = MemoryRecord.reference_record(
id=external_id,
external_id=external_id,
source_name=external_source_name,
description=description,
embedding=embedding,
Expand Down
2 changes: 1 addition & 1 deletion python/tests/end-to-end/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
kernel = sk.Kernel()

api_key, org_id = sk.openai_settings_from_dot_env()
kernel.config.add_text_service(
kernel.config.add_text_completion_service(
"davinci-003", sk_oai.OpenAITextCompletion("text-davinci-003", api_key, org_id)
)

Expand Down
4 changes: 2 additions & 2 deletions python/tests/end-to-end/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,10 +108,10 @@ async def main() -> None:
kernel = sk.Kernel()

api_key, org_id = sk.openai_settings_from_dot_env()
kernel.config.add_text_service(
kernel.config.add_text_completion_service(
"dv", sk_oai.OpenAITextCompletion("text-davinci-003", api_key, org_id)
)
kernel.config.add_embedding_service(
kernel.config.add_text_embedding_generation_service(
"ada", sk_oai.OpenAITextEmbedding("text-embedding-ada-002", api_key, org_id)
)

Expand Down
4 changes: 2 additions & 2 deletions python/tests/end-to-end/skills_from_dir.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@
# Configure AI service used by the kernel
if useAzureOpenAI:
api_key, endpoint = sk.azure_openai_settings_from_dot_env()
kernel.config.add_text_service(
kernel.config.add_text_completion_service(
service_id, sk_oai.AzureTextCompletion(model, api_key, endpoint)
)
else:
api_key, org_id = sk.openai_settings_from_dot_env()
kernel.config.add_text_service(
kernel.config.add_text_completion_service(
service_id, sk_oai.OpenAITextCompletion(model, api_key, org_id)
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ async def test_azure_chat_completion_with_skills():
deployment_name = "gpt-35-turbo"

# Configure LLM service
kernel.config.add_text_service(
"text_service", sk_oai.AzureChatCompletion(deployment_name, endpoint, api_key)
kernel.config.add_text_completion_service(
"text_completion", sk_oai.AzureChatCompletion(deployment_name, endpoint, api_key)
)

await e2e_text_completion.summarize_function_test(kernel)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ async def test_azure_text_completion_with_skills():
deployment_name = "text-davinci-003"

# Configure LLM service
kernel.config.add_text_service(
"text_service", sk_oai.AzureTextCompletion(deployment_name, endpoint, api_key)
kernel.config.add_text_completion_service(
"text_completion", sk_oai.AzureTextCompletion(deployment_name, endpoint, api_key)
)

await e2e_text_completion.summarize_function_test(kernel)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ async def test_hf_local_text2text_generation_service_with_skills():
kernel = sk.Kernel()

# Configure LLM service
kernel.config.add_text_service(
kernel.config.add_text_completion_service(
"google/flan-t5-base",
sk_hf.HuggingFaceTextCompletion(
"google/flan-t5-base", task="text2text-generation"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ async def test_hf_local_text_generation_service_with_skills():
kernel = sk.Kernel()

# Configure LLM service
kernel.config.add_text_service(
kernel.config.add_text_completion_service(
"gpt2", sk_hf.HuggingFaceTextCompletion("gpt2", task="text-generation")
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ async def test_hf_local_summarization_service_with_skills():
kernel = sk.Kernel()

# Configure LLM service
kernel.config.add_text_service(
kernel.config.add_text_completion_service(
"facebook/bart-large-cnn",
sk_hf.HuggingFaceTextCompletion(
"facebook/bart-large-cnn", task="summarization"
Expand Down
Loading

0 comments on commit 04206c6

Please sign in to comment.