diff --git a/.devcontainer/Ollama/devcontainer.json b/.devcontainer/Ollama/devcontainer.json index 74d91a9..81bc14b 100644 --- a/.devcontainer/Ollama/devcontainer.json +++ b/.devcontainer/Ollama/devcontainer.json @@ -37,7 +37,7 @@ }, // Use 'postCreateCommand' to run commands after the container is created. - "postCreateCommand": "sudo dotnet workload update && sudo dotnet workload install aspire && sudo dotnet workload list && ollama pull all-minilm && ollama pull llama3.2 && ollama pull phi3.5", + "postCreateCommand": "sudo dotnet workload update && sudo dotnet workload install aspire && sudo dotnet workload list && ollama pull all-minilm && ollama pull llama3.2 && ollama pull phi4-mini", "postStartCommand": "", // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. diff --git a/02-SetupDevEnvironment/getting-started-ollama.md b/02-SetupDevEnvironment/getting-started-ollama.md index bfab5a9..2bc4e46 100644 --- a/02-SetupDevEnvironment/getting-started-ollama.md +++ b/02-SetupDevEnvironment/getting-started-ollama.md @@ -63,7 +63,7 @@ Once your Codespace is fully loaded and configured, let's run a sample app to ve ## Swap out the model in Ollama -One of the cool things about Ollama is that it's easy to change models. The current app uses the "**llama3.2**" model. Let’s switch it up and try the "**phi3.5**" model instead. +One of the cool things about Ollama is that it's easy to change models. The sample apps uses models like "**phi4-mini**" or "**llama3.2**" model. Let’s switch it up and try the "**phi3.5**" model instead. 1. Download the Phi3.5 model by running the command from the terminal: @@ -93,8 +93,6 @@ One of the cool things about Ollama is that it's easy to change models. The curr 1. **Narrow AI** – Designed for specific tasks, such as facial recognition software, voice assistants like Siri or Alexa, autonomous vehicles, etc., which operate under a limited preprogrammed set of behaviors and rules but excel within their domain when compared to humans in these specialized areas. 2. **General AI** – Capable of understanding, learning, and applying intelligence broadly across various domains like human beings do (natural language processing, problem-solving at a high level). General AIs are still largely theoretical as we haven't yet achieved this form to the extent necessary for practical applications beyond narrow tasks. - - ... ``` > 🙋 **Need help?**: Something not working? [Open an issue](https://github.com/microsoft/Generative-AI-for-beginners-dotnet/issues/new?template=Blank+issue) and we'll help you out. diff --git a/02-SetupDevEnvironment/readme.md b/02-SetupDevEnvironment/readme.md index b3e466e..802dc6f 100644 --- a/02-SetupDevEnvironment/readme.md +++ b/02-SetupDevEnvironment/readme.md @@ -37,16 +37,16 @@ Here's a quick rundown of the services: The Ollama Codespace will provision all the necessary models that you need. However, if you are working in local mode, once you have installed Ollama, you need to pull the models for the lessons you want to run. -- For lesson "**02 - Setting Up for .NET Development with Generative AI**" and project [MEAIFunctionsOllama](https://github.com/microsoft/Generative-AI-for-beginners-dotnet/tree/main/02-SetupDevEnvironment/src/BasicChat-03Ollama) you need to pull model [llama3.2](https://ollama.com/library/llama3.2) by entering in terminal +- For lesson "**02 - Setting Up for .NET Development with Generative AI**" and project [MEAIFunctionsOllama](https://github.com/microsoft/Generative-AI-for-beginners-dotnet/tree/main/02-SetupDevEnvironment/src/BasicChat-03Ollama) you need to pull a model like [phi4-mini](https://ollama.com/library/phi4-mini) or [llama3.2](https://ollama.com/library/llama3.2) by entering in terminal ```bash -ollama pull llama3.2 +ollama pull phi4-mini ``` -- For lesson "**03 - Core Generative AI Techniques with .NET**", when running the ollama projects like [RAGSimple-10SKOllama](https://github.com/microsoft/Generative-AI-for-beginners-dotnet/tree/main/03-CoreGenerativeAITechniques/src/RAGSimple-10SKOllama), you need to pull the models [all-minilm](https://ollama.com/library/all-minilm) and [phi3.5](https://ollama.com/library/phi3.5) by entering in terminal: +- For lesson "**03 - Core Generative AI Techniques with .NET**", when running the ollama projects like [RAGSimple-10SKOllama](https://github.com/microsoft/Generative-AI-for-beginners-dotnet/tree/main/03-CoreGenerativeAITechniques/src/RAGSimple-10SKOllama), you need to pull the models [all-minilm](https://ollama.com/library/all-minilm) and [phi4-mini](https://ollama.com/library/phi4-mini) by entering in terminal: ```bash -ollama pull phi3.5 +ollama pull phi4-mini ollama pull all-minilm ``` diff --git a/02-SetupDevEnvironment/src/BasicChat-03Ollama/Program.cs b/02-SetupDevEnvironment/src/BasicChat-03Ollama/Program.cs index 17a2084..a55215b 100644 --- a/02-SetupDevEnvironment/src/BasicChat-03Ollama/Program.cs +++ b/02-SetupDevEnvironment/src/BasicChat-03Ollama/Program.cs @@ -1,10 +1,10 @@ using Microsoft.Extensions.AI; -// you can test with the models "llama3.2" and "phi3.5" +// you can test with the models "phi4-mini" or "llama3.2" // to test other models you can download them with the command "ollama pull " -// in example: "ollama pull deepseek-r1" or "ollama pull phi4-mini" (for the phi4-mini model which is still being tested) +// in example: "ollama pull deepseek-r1" or "ollama pull phi3.5" IChatClient client = - new OllamaChatClient(new Uri("http://localhost:11434/"), "llama3.2"); + new OllamaChatClient(new Uri("http://localhost:11434/"), "phi4-mini"); var response = client.GetStreamingResponseAsync("What is AI?"); await foreach (var item in response) diff --git a/03-CoreGenerativeAITechniques/src/BasicChat-03Ollama/Program.cs b/03-CoreGenerativeAITechniques/src/BasicChat-03Ollama/Program.cs index a3e1220..d248aa4 100644 --- a/03-CoreGenerativeAITechniques/src/BasicChat-03Ollama/Program.cs +++ b/03-CoreGenerativeAITechniques/src/BasicChat-03Ollama/Program.cs @@ -2,11 +2,11 @@ using System.Text; IChatClient client = - new OllamaChatClient(new Uri("http://localhost:11434/"), "llama3.2"); + new OllamaChatClient(new Uri("http://localhost:11434/"), "phi4-mini"); // here we're building the prompt StringBuilder prompt = new StringBuilder(); -prompt.AppendLine("You will analyze the sentiment of the following product reviews. Each line is its own review. Output the sentiment of each review in a bulleted list and then provide a generate sentiment of all reviews. "); +prompt.AppendLine("You will analyze the sentiment of the following product reviews. Each line is its own review. Output the sentiment of each review in a bulleted list including the original text and the sentiment, and then provide a generate sentiment of all reviews. "); prompt.AppendLine("I bought this product and it's amazing. I love it!"); prompt.AppendLine("This product is terrible. I hate it."); prompt.AppendLine("I'm not sure about this product. It's okay."); diff --git a/03-CoreGenerativeAITechniques/src/BasicChat-04OllamaSK/Program.cs b/03-CoreGenerativeAITechniques/src/BasicChat-04OllamaSK/Program.cs index 7929897..de18094 100644 --- a/03-CoreGenerativeAITechniques/src/BasicChat-04OllamaSK/Program.cs +++ b/03-CoreGenerativeAITechniques/src/BasicChat-04OllamaSK/Program.cs @@ -5,7 +5,7 @@ using Microsoft.SemanticKernel.Connectors.Ollama; using OllamaSharp; -var modelId = "llama3.2"; +var modelId = "phi4-mini"; var uri = "http://localhost:11434/"; diff --git a/03-CoreGenerativeAITechniques/src/RAGSimple-10SKOllama/Program.cs b/03-CoreGenerativeAITechniques/src/RAGSimple-10SKOllama/Program.cs index 75e067a..f852d9c 100644 --- a/03-CoreGenerativeAITechniques/src/RAGSimple-10SKOllama/Program.cs +++ b/03-CoreGenerativeAITechniques/src/RAGSimple-10SKOllama/Program.cs @@ -25,13 +25,12 @@ #pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052, SKEXP0070 -using DocumentFormat.OpenXml.Bibliography; using Microsoft.KernelMemory; using Microsoft.KernelMemory.AI.Ollama; using Microsoft.SemanticKernel; var ollamaEndpoint = "http://localhost:11434"; -var modelIdChat = "phi3.5"; +var modelIdChat = "phi4-mini"; var modelIdEmbeddings = "all-minilm"; // questions diff --git a/03-CoreGenerativeAITechniques/src/RAGSimple-15Ollama-DeepSeekR1/Program.cs b/03-CoreGenerativeAITechniques/src/RAGSimple-15Ollama-DeepSeekR1/Program.cs index 6e88c94..68fa6b7 100644 --- a/03-CoreGenerativeAITechniques/src/RAGSimple-15Ollama-DeepSeekR1/Program.cs +++ b/03-CoreGenerativeAITechniques/src/RAGSimple-15Ollama-DeepSeekR1/Program.cs @@ -30,7 +30,6 @@ using Microsoft.SemanticKernel; var ollamaEndpoint = "http://localhost:11434"; -//var modelIdChat = "phi"; var modelIdChat = "deepseek-r1"; var modelIdEmbeddings = "all-minilm"; diff --git a/README.md b/README.md index 9a71ca4..b8d7b9d 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,16 @@ Don't forget to [star (🌟) this repo](https://docs.github.com/en/get-started/e ➡️Get your own copy by [Forking this repo](https://github.com/microsoft/Generative-AI-for-beginners-dotnet/fork) and find it next in your own repositories. +## ✨ What's New! + +We're constantly improving this course with the latest AI tools and models: + +- **phi4-mini model support**: The [Ollama Codespace](https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/02-SetupDevEnvironment/getting-started-ollama.md) now automatically downloads the [phi4-mini model](https://ollama.com/library/phi4-mini) - Microsoft's compact yet powerful LLM. Try it in samples like: + - [Chat Application](https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/BasicChat-03Ollama/Program.cs) - Experience fast responses with this efficient model + - [RAG Implementation](https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/RAGSimple-10SKOllama/Program.cs) - See how phi4-mini handles retrieval-augmented generation tasks + - Learn more about the model in the [Phi Cookbook](https://aka.ms/phicookbook) + + ## 🚀 Introduction Generative AI is transforming software development, and .NET is no exception. This course aims to simplify the journey by offering: @@ -124,5 +134,6 @@ We have a lot of other content to help your learning journey. Check out: - [Mastering GitHub Copilot for Paired Programming](https://github.com/microsoft/Mastering-GitHub-Copilot-for-Paired-Programming) - [Mastering GitHub Copilot for C#/.NET Developers](https://github.com/microsoft/mastering-github-copilot-for-dotnet-csharp-developers) - [Choose Your Own Copilot Adventure](https://github.com/microsoft/CopilotAdventures) +- [Phi Cookbook: Hands-On Examples with Microsoft's Phi Models](https://aka.ms/phicookbook) [Let's start learning Generative AI and .NET!](02-SetupDevEnvironment/readme.md) 🚀