|
33 | 33 | "source": [
|
34 | 34 | "// Usual setup: importing Semantic Kernel SDK and SkiaSharp, used to display images inline.\n",
|
35 | 35 | "\n",
|
36 |
| - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-rc3\"\n", |
| 36 | + "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-rc4\"\n", |
37 | 37 | "#r \"nuget: SkiaSharp, 2.88.3\"\n",
|
38 | 38 | "\n",
|
39 | 39 | "#!import config/Settings.cs\n",
|
40 | 40 | "#!import config/Utils.cs\n",
|
41 | 41 | "#!import config/SkiaUtils.cs\n",
|
42 | 42 | "\n",
|
43 | 43 | "using Microsoft.SemanticKernel;\n",
|
44 |
| - "using Microsoft.SemanticKernel.AI.TextToImage;\n", |
45 |
| - "using Microsoft.SemanticKernel.AI.Embeddings;\n", |
46 |
| - "using Microsoft.SemanticKernel.Connectors.AI.OpenAI;\n", |
| 44 | + "using Microsoft.SemanticKernel.TextToImage;\n", |
| 45 | + "using Microsoft.SemanticKernel.Embeddings;\n", |
| 46 | + "using Microsoft.SemanticKernel.Connectors.OpenAI;\n", |
47 | 47 | "using System.Numerics.Tensors;"
|
48 | 48 | ]
|
49 | 49 | },
|
|
78 | 78 | },
|
79 | 79 | "outputs": [],
|
80 | 80 | "source": [
|
| 81 | + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", |
| 82 | + "\n", |
81 | 83 | "#pragma warning disable SKEXP0001, SKEXP0002, SKEXP0011, SKEXP0012\n",
|
82 | 84 | "\n",
|
83 | 85 | "// Load OpenAI credentials from config/settings.json\n",
|
84 | 86 | "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n",
|
85 | 87 | "\n",
|
86 | 88 | "// Configure the three AI features: text embedding (using Ada), text completion (using DaVinci 3), image generation (DALL-E 2)\n",
|
87 |
| - "var builder = new KernelBuilder();\n", |
| 89 | + "var builder = Kernel.CreateBuilder();\n", |
88 | 90 | "\n",
|
89 | 91 | "if(useAzureOpenAI)\n",
|
90 | 92 | "{\n",
|
91 |
| - " builder.AddAzureOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", \"model-id\", azureEndpoint, apiKey);\n", |
92 |
| - " builder.AddAzureOpenAIChatCompletion(model, \"model-id\", azureEndpoint, apiKey);\n", |
93 |
| - " builder.AddAzureOpenAITextToImage(azureEndpoint, \"model-id\", apiKey);\n", |
| 93 | + " builder.AddAzureOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", azureEndpoint, apiKey);\n", |
| 94 | + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", |
| 95 | + " builder.AddAzureOpenAITextToImage(azureEndpoint, apiKey);\n", |
94 | 96 | "}\n",
|
95 | 97 | "else\n",
|
96 | 98 | "{\n",
|
|
105 | 107 | "var dallE = kernel.GetRequiredService<ITextToImageService>();\n",
|
106 | 108 | "\n",
|
107 | 109 | "// Get AI service instance used to extract embedding from a text\n",
|
108 |
| - "var textEmbedding = kernel.GetRequiredService<ITextEmbeddingGeneration>();" |
| 110 | + "var textEmbedding = kernel.GetRequiredService<ITextEmbeddingGenerationService>();" |
109 | 111 | ]
|
110 | 112 | },
|
111 | 113 | {
|
|
154 | 156 | "var genImgDescription = kernel.CreateFunctionFromPrompt(prompt, executionSettings);\n",
|
155 | 157 | "\n",
|
156 | 158 | "var random = new Random().Next(0, 200);\n",
|
157 |
| - "var imageDescriptionResult = await kernel.InvokeAsync(genImgDescription, new KernelArguments($\"{random}\"));\n", |
| 159 | + "var imageDescriptionResult = await kernel.InvokeAsync(genImgDescription, new() { [\"input\"] = random });\n", |
158 | 160 | "var imageDescription = imageDescriptionResult.ToString();\n",
|
159 | 161 | "\n",
|
160 | 162 | "// Use DALL-E 2 to generate an image. OpenAI in this case returns a URL (though you can ask to return a base64 image)\n",
|
|
0 commit comments