{"id":6276,"date":"2024-08-02T08:25:50","date_gmt":"2024-08-02T08:25:50","guid":{"rendered":"https:\/\/www.mixtile.com\/?page_id=6276"},"modified":"2024-08-05T08:51:10","modified_gmt":"2024-08-05T08:51:10","slug":"deploy-an-llm-based-chatbot","status":"publish","type":"page","link":"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/","title":{"rendered":"LLM\u30d9\u30fc\u30b9\u306e\u30c1\u30e3\u30c3\u30c8\u30dc\u30c3\u30c8\u3092\u5c55\u958b\u3059\u308b"},"content":{"rendered":"<section class=\"section\" id=\"section_763452048\">\n\t\t<div class=\"bg section-bg fill bg-fill\" >\n\n\t\t\t\n\t\t\t\n\t\t\t\n\t<div class=\"is-border\"\n\t\tstyle=\"border-color:rgb(235, 235, 235);border-width:1px 0px 0px 0px;\">\n\t<\/div>\n\n\t\t<\/div>\n\n\t\t\n\n\t\t<div class=\"section-content relative\">\n\t\t\t\n\n<div class=\"row\"  id=\"row-1504831634\">\n\n\n\t<div id=\"col-858615314\" class=\"col medium-12 small-12 large-11\"  >\n\t\t\t\t<div class=\"col-inner\"  >\n\t\t\t\n\t\t\t\n\n\t<div id=\"text-3718027976\" class=\"text\">\n\t\t\n\n<h1 style=\"text-align: left;\"><span style=\"font-size: 160%;\"><strong><span style=\"font-weight: 600; color: #ffffff;\">Run a Large Language Model locally<br>\non Mixtile Blade 3 NPU<\/span><\/strong><\/span><\/h1>\n<p>&nbsp;<\/p>\n<p><span class=\"inline-comment-marker valid active\" style=\"color: #ffffff; font-size: 115%;\">Learn how to deploy a chatbot using a large language model (LLM) on the Mixtile Blade 3 (RK3588) NPU<br>\n<\/span><\/p>\n\t\t\n<style>\n#text-3718027976 {\n  font-size: 0.85rem;\n}\n<\/style>\n\t<\/div>\n\t\n\n\t\t<\/div>\n\t\t\t\t\n<style>\n#col-858615314 > .col-inner {\n  padding: 0px 0px 0px 0px;\n  margin: 10px 0px -68px 0px;\n}\n@media (min-width:550px) {\n  #col-858615314 > .col-inner {\n    padding: 0px 0px 0px 15px;\n  }\n}\n<\/style>\n\t<\/div>\n\n\t\n\n<\/div>\n\n\t\t<\/div>\n\n\t\t\n<style>\n#section_763452048 {\n  padding-top: 0px;\n  padding-bottom: 0px;\n  min-height: 400px;\n  background-color: rgb(246, 246, 246);\n}\n#section_763452048 .section-bg.bg-loaded {\n  background-image: url(https:\/\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240805-Evaluate-Banner-BG-Q6.jpg);\n}\n#section_763452048 .section-bg {\n  background-position: 50% 50%;\n}\n#section_763452048 .ux-shape-divider--top svg {\n  height: 150px;\n  --divider-top-width: 100%;\n}\n#section_763452048 .ux-shape-divider--bottom svg {\n  height: 150px;\n  --divider-width: 100%;\n}\n<\/style>\n\t<\/section>\n\t\n\t<section class=\"section\" id=\"section_1721993438\">\n\t\t<div class=\"bg section-bg fill bg-fill  bg-loaded\" >\n\n\t\t\t\n\t\t\t\n\t\t\t\n\n\t\t<\/div>\n\n\t\t\n\n\t\t<div class=\"section-content relative\">\n\t\t\t\n\n<div class=\"row align-center\"  id=\"row-1065072072\">\n\n\n\t<div id=\"col-569980461\" class=\"col medium-11 small-12 large-11\"  >\n\t\t\t\t<div class=\"col-inner text-center\" style=\"background-color:rgb(247, 247, 247);\" >\n\t\t\t\n\t\t\t\n\n<div class=\"row\"  id=\"row-1040533802\">\n\n\n\t<div id=\"col-1027640428\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner\"  >\n\t\t\t\n\t\t\t\n\n<h3>\u00a0<\/h3>\n<h3 style=\"font-weight: 600;\"><span style=\"font-size: 120%; color: #000000;\">\u3053\u306e\u30d7\u30ed\u30b8\u30a7\u30af\u30c8\u3067\u4f7f\u7528\u3057\u305f\u3082\u306e<\/span><\/h3>\n<hr \/>\n<p>\u00a0<\/p>\n\n\t\t<\/div>\n\t\t\t\t\n<style>\n#col-1027640428 > .col-inner {\n  margin: 0px 0px -67px 0px;\n}\n<\/style>\n\t<\/div>\n\n\t\n\n\t<div id=\"col-1046540276\" class=\"col medium-6 small-12 large-6\"  >\n\t\t\t\t<div class=\"col-inner\"  >\n\t\t\t\n\t\t\t\n\n\t<div id=\"text-1182689532\" class=\"text\">\n\t\t\n\n<h3 class=\"hckui__typography__h3\"><strong>\u30cf\u30fc\u30c9\u30a6\u30a7\u30a2\u30fb\u30b3\u30f3\u30dd\u30fc\u30cd\u30f3\u30c8<\/strong><\/h3>\n<p><a href=\"https:\/\/www.mixtile.com\/ja\/store\/sbc\/blade-3\/\"><span style=\"color: #00aae7;\">Mixtile Blade 3<\/span><\/a> \u00d71<br \/><a href=\"https:\/\/www.mixtile.com\/ja\/store\/accessory\/blade-3-case\/\"><span style=\"color: #00aae7;\">Mixtile Blade 3 Case<\/span><\/a> \u00d71<br \/><span style=\"color: #00aae7;\"><a style=\"color: #00aae7;\" href=\"https:\/\/www.asus.com\/displays-desktops\/monitors\/zenscreen\/zenscreen-touch-mb16amt\/\">Asus ZenScreen Touch<\/a><\/span> \u00d71<\/p>\n\t\t\n<style>\n#text-1182689532 {\n  text-align: center;\n}\n<\/style>\n\t<\/div>\n\t\n\n\t\t<\/div>\n\t\t\t\t\n<style>\n#col-1046540276 > .col-inner {\n  margin: 0px 0px -20px 0px;\n}\n<\/style>\n\t<\/div>\n\n\t\n\n\t<div id=\"col-1000699004\" class=\"col medium-6 small-12 large-6\"  >\n\t\t\t\t<div class=\"col-inner\"  >\n\t\t\t\n\t\t\t\n\n<h3 class=\"hckui__typography__h3\"><strong>\u30bd\u30d5\u30c8\u30a6\u30a7\u30a2\u30fb\u30a2\u30d7\u30ea\u3068\u30aa\u30f3\u30e9\u30a4\u30f3\u30fb\u30b5\u30fc\u30d3\u30b9<\/strong><\/h3>\n<p><a href=\"https:\/\/github.com\/mixtile-rockchip\/ubuntu-rockchip\"><span style=\"color: #00aae7;\">Ubuntu 22.04 (Rockchip)<\/span><\/a><br \/><span style=\"color: #00aae7;\"><a style=\"color: #00aae7;\" href=\"https:\/\/huggingface.co\/modelshttps:\/\/huggingface.co\/models\">Hugging Face<\/a><\/span><br \/><a href=\"https:\/\/github.com\/airockchip\/rknn-llm\"><span style=\"color: #00aae7;\">RK-LLM<\/span><\/a><\/p>\n\n\t\t<\/div>\n\t\t\t\t\n<style>\n#col-1000699004 > .col-inner {\n  margin: 0px 0px -20px 0px;\n}\n<\/style>\n\t<\/div>\n\n\t\n\n<\/div>\n\n\t\t<\/div>\n\t\t\t\t\n<style>\n#col-569980461 > .col-inner {\n  padding: 0px 20px 0px 30px;\n}\n<\/style>\n\t<\/div>\n\n\t\n\n\t<div id=\"col-390837448\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 200%; color: #000000;\">\u30b9\u30c8\u30fc\u30ea\u30fc<\/span><\/h3>\n<hr \/>\n<p style=\"text-align: left;\"><span style=\"color: #000000;\">This project aims to implement a local chatbot application using a large language model (LLM) on the Rockchip NPU. An NPU (Neural Processing Unit) is a specialized processor that speeds up neural network computations. Utilizing the NPU&#8217;s capabilities, the chatbot will provide real-time, efficient, and privacy-focused interactions without relying on cloud services. Key steps include selecting and optimizing an LLM for the Rockchip NPU, integrating it into the chatbot application, and ensuring robust performance.<\/span><\/p>\n<p>\u00a0<\/p>\n\n\t\t<\/div>\n\t\t\t\t\n<style>\n#col-390837448 > .col-inner {\n  margin: 0px 0px -40px 0px;\n}\n<\/style>\n\t<\/div>\n\n\t\n\n\t<div id=\"col-390475482\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 160%; color: #000000;\">What is a Large Language model?<\/span><\/h3>\n<p style=\"text-align: left;\"><span style=\"color: #000000;\">Large language models (LLMs) are advanced AI systems that understand and generate human-like text. They are trained on vast amounts of data, enabling them to perform various tasks such as answering questions, translating languages, and creating content. LLMs, like GPT-4, leverage deep learning techniques to predict and generate coherent and contextually relevant text, making them powerful tools for applications in customer service, content creation, and more. Their ability to process and generate natural language makes them invaluable in enhancing human-computer interactions.<\/span><\/p>\n<p style=\"text-align: left;\"><span style=\"color: #000000;\">The network architecture of a large language model (LLM) typically involves several key components:<\/span><\/p>\n\t<div class=\"img has-hover hidden x md-x lg-x y md-y lg-y\" id=\"image_1891599141\">\n\t\t\t\t\t\t\t\t<div class=\"img-inner dark\" >\n\t\t\t<img width=\"1020\" height=\"676\" src=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/05\/Article-Transfer-01_c.webp?resize=1020%2C676&#038;ssl=1\" class=\"attachment-original size-original\" alt=\"\" loading=\"lazy\" srcset=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/05\/Article-Transfer-01_c.webp?w=1375&amp;ssl=1 1375w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/05\/Article-Transfer-01_c.webp?resize=604%2C400&amp;ssl=1 604w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/05\/Article-Transfer-01_c.webp?resize=1207%2C800&amp;ssl=1 1207w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/05\/Article-Transfer-01_c.webp?resize=768%2C509&amp;ssl=1 768w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/05\/Article-Transfer-01_c.webp?resize=50%2C33&amp;ssl=1 50w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/05\/Article-Transfer-01_c.webp?resize=18%2C12&amp;ssl=1 18w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/05\/Article-Transfer-01_c.webp?resize=600%2C398&amp;ssl=1 600w\" sizes=\"(max-width: 1020px) 100vw, 1020px\" data-recalc-dims=\"1\" \/>\t\t\t\t\t\t\n\t\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t\n<style>\n#image_1891599141 {\n  width: 100%;\n}\n@media (min-width:550px) {\n  #image_1891599141 {\n    width: 50%;\n  }\n}\n<\/style>\n\t<\/div>\n\t\n\n\t<div id=\"text-614848028\" class=\"text\">\n\t\t\n\n<ul>\n<li><span style=\"color: #000000;\"><strong>Input Layer<\/strong>: This layer processes the input text data, converting it into a format that the model can understand, usually through tokenization.<\/span><\/li>\n<\/ul>\n<ul class=\"hckui__typography__bodyL\">\n<li><span style=\"color: #000000;\"><strong>Embedding Layer<\/strong>: Converts tokens into dense vectors that capture semantic meanings.<\/span><\/li>\n<li><span style=\"color: #000000;\"><strong>Transformer Blocks<\/strong>: The core of the LLM, consisting of multiple layers of transformers. Each transformer block includes a\u00a0<strong>Multi-Head Attention<\/strong>\u00a0to allow the model to focus on different parts of the input sequence simultaneously, a\u00a0<strong>Feed-Forward Neural Network<\/strong>\u00a0that processes the output from the attention mechanism, and a\u00a0<strong>Layer Normalization<\/strong>\u00a0that stabilizes and accelerates training by normalizing the inputs.<\/span><\/li>\n<li><span style=\"color: #000000;\"><strong>Output Layer<\/strong>: Converts the processed data back into a human-readable format, generating the final text output.<\/span><\/li>\n<\/ul>\n\t\t\n<style>\n#text-614848028 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\n\n\t<div id=\"col-1436103919\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 160%; color: #000000;\">Hardware Setup<\/span><\/h3>\n<p style=\"text-align: left;\"><span style=\"color: #000000;\">We will use a Mixtile Blade 3, a low-power SBC based on the 8nm Rockchip RK3588 processor. The RK3588 features an NPU (Neural Process Unit) with a maximum performance of 6 TOPS.<\/span><\/p>\n\t<div class=\"img has-hover x md-x lg-x y md-y lg-y\" id=\"image_467624452\">\n\t\t\t\t\t\t\t\t<div class=\"img-inner dark\" >\n\t\t\t<img width=\"800\" height=\"600\" src=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/230802-B3-\u6838\u5fc3\u9762.webp?resize=800%2C600&#038;ssl=1\" class=\"attachment-original size-original\" alt=\"\" loading=\"lazy\" srcset=\"https:\/\/i0.wp.com\/www.mixtile.com\/app\/uploads\/2024\/08\/230802-B3-\u6838\u5fc3\u9762.webp?w=800&amp;ssl=1 800w, https:\/\/i0.wp.com\/www.mixtile.com\/app\/uploads\/2024\/08\/230802-B3-\u6838\u5fc3\u9762.webp?resize=533%2C400&amp;ssl=1 533w, https:\/\/i0.wp.com\/www.mixtile.com\/app\/uploads\/2024\/08\/230802-B3-\u6838\u5fc3\u9762.webp?resize=768%2C576&amp;ssl=1 768w, https:\/\/i0.wp.com\/www.mixtile.com\/app\/uploads\/2024\/08\/230802-B3-\u6838\u5fc3\u9762.webp?resize=50%2C38&amp;ssl=1 50w, https:\/\/i0.wp.com\/www.mixtile.com\/app\/uploads\/2024\/08\/230802-B3-\u6838\u5fc3\u9762.webp?resize=16%2C12&amp;ssl=1 16w, https:\/\/i0.wp.com\/www.mixtile.com\/app\/uploads\/2024\/08\/230802-B3-\u6838\u5fc3\u9762.webp?resize=600%2C450&amp;ssl=1 600w\" sizes=\"(max-width: 800px) 100vw, 800px\" data-recalc-dims=\"1\" \/>\t\t\t\t\t\t\n\t\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t\n<style>\n#image_467624452 {\n  width: 100%;\n}\n@media (min-width:550px) {\n  #image_467624452 {\n    width: 40%;\n  }\n}\n<\/style>\n\t<\/div>\n\t\n\n<p style=\"text-align: left;\"><span style=\"color: #000000;\">We will utilize a Mixtile Blade 3 Case with a built-in fan, and the case also functions as a heatsink to maintain a cool temperature.<br \/>\n<\/span><\/p>\n\t<div class=\"img has-hover x md-x lg-x y md-y lg-y\" id=\"image_115763520\">\n\t\t\t\t\t\t\t\t<div class=\"img-inner dark\" >\n\t\t\t<img width=\"800\" height=\"600\" src=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/230802-B3-Case.webp?fit=800%2C600&amp;ssl=1\" class=\"attachment-large size-large\" alt=\"\" loading=\"lazy\" srcset=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/230802-B3-Case.webp?w=800&amp;ssl=1 800w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/230802-B3-Case.webp?resize=533%2C400&amp;ssl=1 533w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/230802-B3-Case.webp?resize=768%2C576&amp;ssl=1 768w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/230802-B3-Case.webp?resize=50%2C38&amp;ssl=1 50w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/230802-B3-Case.webp?resize=16%2C12&amp;ssl=1 16w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/230802-B3-Case.webp?resize=600%2C450&amp;ssl=1 600w\" sizes=\"(max-width: 800px) 100vw, 800px\" \/>\t\t\t\t\t\t\n\t\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t\n<style>\n#image_115763520 {\n  width: 100%;\n}\n@media (min-width:550px) {\n  #image_115763520 {\n    width: 45%;\n  }\n}\n<\/style>\n\t<\/div>\n\t\n\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #282828;\">For the initial setup, we will need a monitor and a keyboard.<\/span><\/p>\n\t<div class=\"img has-hover x md-x lg-x y md-y lg-y\" id=\"image_1605160685\">\n\t\t\t\t\t\t\t\t<div class=\"img-inner dark\" >\n\t\t\t<img width=\"740\" height=\"555\" src=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-3.webp?fit=740%2C555&amp;ssl=1\" class=\"attachment-large size-large\" alt=\"\" loading=\"lazy\" srcset=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-3.webp?w=740&amp;ssl=1 740w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-3.webp?resize=533%2C400&amp;ssl=1 533w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-3.webp?resize=50%2C38&amp;ssl=1 50w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-3.webp?resize=16%2C12&amp;ssl=1 16w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-3.webp?resize=600%2C450&amp;ssl=1 600w\" sizes=\"(max-width: 740px) 100vw, 740px\" \/>\t\t\t\t\t\t\n\t\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t\n<style>\n#image_1605160685 {\n  width: 100%;\n}\n@media (min-width:550px) {\n  #image_1605160685 {\n    width: 60%;\n  }\n}\n<\/style>\n\t<\/div>\n\t\n\n\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\n\n<\/div>\n<div class=\"row\"  id=\"row-888180717\">\n\n\n\t<div id=\"col-1994453302\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 160%; color: #000000;\">Model Conversion<\/span><\/h3>\n<p style=\"text-align: left;\"><span style=\"color: #000000;\">We will be utilizing the advanced <strong>RKLLM<\/strong> software stack to expedite the deployment of sophisticated AI models onto the Rockchip NPU, ensuring seamless integration and optimal performance. This comprehensive framework offers a streamlined approach to AI deployment.<\/span><\/p>\n\t<div class=\"box has-hover   has-hover box-text-bottom\" >\n\n\t\t<div class=\"box-image\" style=\"width:60%;\">\n\t\t\t\t\t\t<div class=\"\" >\n\t\t\t\t<img width=\"1020\" height=\"574\" src=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-4.webp?resize=1020%2C574&#038;ssl=1\" class=\"attachment- size-\" alt=\"\" loading=\"lazy\" srcset=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-4.webp?w=1280&amp;ssl=1 1280w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-4.webp?resize=711%2C400&amp;ssl=1 711w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-4.webp?resize=768%2C432&amp;ssl=1 768w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-4.webp?resize=50%2C28&amp;ssl=1 50w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-4.webp?resize=18%2C10&amp;ssl=1 18w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-4.webp?resize=600%2C338&amp;ssl=1 600w\" sizes=\"(max-width: 1020px) 100vw, 1020px\" data-recalc-dims=\"1\" \/>\t\t\t\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\t<div class=\"box-text text-center\" >\n\t\t\t<div class=\"box-text-inner\">\n\t\t\t\t\n\n<h4>lmage courtesy of Rockchip<br \/>\n<\/h4>\n\n\t\t\t<\/div>\n\t\t<\/div>\n\t<\/div>\n\t\n<p style=\"text-align: left;\"><span style=\"color: #000000;\">To convert and quantize a Hugging Face trained model to an RKLLM format, we first need to install an\u00a0<strong>RKLLM Toolkit<\/strong>\u00a0on an x86 Linux machine. Once the conversion process is completed, we can then proceed to perform inference on the Mixtile Blade 3 using the\u00a0<strong>RKLLM C API<\/strong>. The\u00a0<strong>RKLLM Runtime<\/strong>\u00a0provides an API for the Rockchip NPU, allowing for the deployment of RKLLM models and acceleration of LLM applications. The runtime utilizes the\u00a0<strong>RKNPU Kernel Driver<\/strong>\u00a0to interact with the NPU hardware.<\/span><\/p>\n<p style=\"text-align: left;\"><span style=\"color: #000000;\">First, clone the RKNN-LLM GitHub repository, and then create a virtual environment using the provided commands.<br \/><\/span><\/p>\n\t<div id=\"text-4109852671\" class=\"text\">\n\t\t\n\n<pre class=\"hljs awk\"><span style=\"font-size: 110%;\"><code>$ git clone <span style=\"color: #00aae7;\">https\uff1a<span class=\"hljs-regexp\">\/\/\u30b8<\/span>\u30c4\u30d6\u30c9\u30c3\u30c8\u30b3\u30e0<span class=\"hljs-regexp\">\/airockchip\/<\/span>rknn-llm.git<\/span><br \/>$ cd rknn-llm<span class=\"hljs-regexp\">\/<\/span><br \/><span class=\"hljs-regexp\">$ virtualenv <span style=\"color: #993366;\">--python=python3.8 .<\/span><\/span><br \/><span class=\"hljs-regexp\">$ source <span style=\"color: #993366;\">bin\/<\/span><\/span><span style=\"color: #993366;\">activate<\/span><\/code><\/span><\/pre>\n\t\t\n<style>\n#text-4109852671 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-4230843830\" class=\"text\">\n\t\t\n\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">Execute the following commands to install the\u00a0<strong>RKLLM Toolkit.<\/strong><\/span><\/p>\n\t\t\n<style>\n#text-4230843830 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-3677914888\" class=\"text\">\n\t\t\n\n<pre class=\"hljs smali\"><span style=\"font-size: 110%;\"><code>$ pip3 install pytz <br \/>$ pip3 install .\/rkllm-toolkit\/packages\/rkllm_toolkit-1.0.1-cp38-cp38-linux_x86_64.whl<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-3677914888 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-3714647419\" class=\"text\">\n\t\t\n\n<p class=\"hckui__typography__bodyL\"><span style=\"color: #000000;\">We will be utilizing the Microsoft\u00a0<strong>Phi-3-Mini-4K-Instruct<\/strong>\u00a0model for this project. This model comprises 3.8 billion parameters and is trained using the Phi-3 datasets, which encompass synthetic data as well as filtered publicly available website data. The model is part of the Phi-3 family with a context length of 4K tokens.<\/span><\/p>\n<p class=\"hckui__typography__bodyL\"><span style=\"color: #000000;\">To download the model, run the following command.<\/span><\/p>\n\t\t\n<style>\n#text-3714647419 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-1777178713\" class=\"text\">\n\t\t\n\n<pre class=\"hljs awk\"><span style=\"font-size: 110%;\"><code>$ git clone <span style=\"color: #00aae7;\">https\uff1a<span class=\"hljs-regexp\">\/\/<\/span>\u30cf\u30ae\u30f3\u30b0\u30d5\u30a7\u30a4\u30b9<span class=\"hljs-regexp\">\/\u30de\u30a4\u30af\u30ed\u30bd\u30d5\u30c8<\/span>\u30d5\u30a1\u30a4<span class=\"hljs-number\">3<\/span>-\u30df\u30cb<span class=\"hljs-number\">4<\/span>K\u30a4\u30f3\u30b9\u30c8\u30e9\u30af\u30bf\u30fc<\/span><br \/>$ cd <span style=\"color: #993366;\">~<span class=\"hljs-regexp\">\/rknn-llm\/<\/span><\/span>rkllm-runtime<span class=\"hljs-regexp\" style=\"color: #993366;\">\/\u4f8b<\/span>rkllm_api_demo<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-1777178713 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-3363834029\" class=\"text\">\n\t\t\n\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">We used the following Python script to convert the model to the RKLLM format so that it can be deployed to the Rockchip RK3588 NPU.<\/span><\/p>\n\t\t\n<style>\n#text-3363834029 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-2429084475\" class=\"text\">\n\t\t\n\n<pre class=\"hljs awk\"><span style=\"font-size: 110%;\"><code>from rkllm.api import RKLLM<br \/><br \/>modelpath = <span class=\"hljs-string\" style=\"color: #f99b1c;\">'\/home\/user\/Phi-3-mini-4k-instruct'<\/span><br \/>llm = RKLLM()<br \/><br \/>ret = llm.load_huggingface(model = modelpath)<br \/><span class=\"hljs-keyword\" style=\"color: #ff0000;\">if<\/span> ret != <span class=\"hljs-number\" style=\"color: #993366;\">0<\/span>:<br \/>    print(<span class=\"hljs-string\" style=\"color: #f99b1c;\">'Load model failed!'<\/span>)<br \/>    <span class=\"hljs-keyword\" style=\"color: #ff0000;\">exit<\/span>(ret)<br \/><br \/>ret = llm.build(do_quantization=True, optimization_level=<span class=\"hljs-number\">1<\/span>, quantized_dtype=<span class=\"hljs-string\">'<span style=\"color: #f99b1c;\">w8a8<\/span>'<\/span>, target_platform=<span class=\"hljs-string\">'<span style=\"color: #f99b1c;\">rk3588<\/span>'<\/span>)<br \/><span class=\"hljs-keyword\" style=\"color: #ff0000;\">if<\/span> ret != <span class=\"hljs-number\" style=\"color: #993366;\">0<\/span>:<br \/>    print(<span class=\"hljs-string\" style=\"color: #f99b1c;\">'Build model failed!'<\/span>)<br \/>    <span class=\"hljs-keyword\" style=\"color: #ff0000;\">exit<\/span>(ret)<br \/><br \/>ret = llm.export_rkllm(<span class=\"hljs-string\" style=\"color: #f99b1c;\">\".\/Phi-3-mini-4k-instruct.rkllm\"<\/span>)<br \/><span class=\"hljs-keyword\" style=\"color: #ff0000;\">if<\/span> ret != <span class=\"hljs-number\" style=\"color: #993366;\">0<\/span>:<br \/>    print(<span class=\"hljs-string\" style=\"color: #f99b1c;\">'Export model failed!'<\/span>)<br \/>    <span class=\"hljs-keyword\" style=\"color: #ff0000;\">exit<\/span>(ret)<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-2429084475 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-2348557067\" class=\"text\">\n\t\t\n\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">The Mixtile Blade 3\u2019s default OS installation has an outdated NPU driver version, but the RKLLM requires the NPU driver version 0.9.6, or above. To resolve this, we need to recompile the kernel from the source and install it. Please follow the instructions below to build and install it.<\/span><\/p>\n\t\t\n<style>\n#text-2348557067 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-1162212510\" class=\"text\">\n\t\t\n\n<pre class=\"hljs awk\"><span style=\"font-size: 110%;\"><code>$ git clone <span style=\"color: #00aae7;\">https\uff1a<span class=\"hljs-regexp\">\/\/\u30b8<\/span>\u30c4\u30d6\u30c9\u30c3\u30c8\u30b3\u30e0<span class=\"hljs-regexp\">\/mixtile-rockchip\/u<\/span>buntu-rockchip.git<\/span><br \/>$ cd ubuntu-rockchip<br \/>$ git checkout mixtile-blade3<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-1162212510 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-2189939203\" class=\"text\">\n\t\t\n\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">There are a few missing function definitions. To fix this, apply the following changes: append the following code to the build\/linux-rockchip\/include\/linux\/mm.h file.<\/span><\/p>\n\t\t\n<style>\n#text-2189939203 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-2922275207\" class=\"text\">\n\t\t\n\n<pre class=\"hljs reasonml\"><span style=\"font-size: 110%;\"><code>static inline void vm<span class=\"hljs-constructor\">_flags_set()<span class=\"hljs-params\">\u69cb\u9020\u4f53<\/span> <span class=\"hljs-params\">vm_area_struct<\/span> <span class=\"hljs-operator\">*<\/span><span class=\"hljs-params\">\u30d6\u30a4\u30a8\u30e0\u30a8\u30fc<\/span>, <span class=\"hljs-params\">vm_flags_t<\/span> <span class=\"hljs-params\">\u30d5\u30e9\u30c3\u30b0<\/span>)<\/span><br \/>{<br \/>    vma-&gt;vm_flags <span class=\"hljs-pattern-match\">|= flags\uff1b<\/span><br \/><span class=\"hljs-pattern-match\">}<\/span><br \/><br \/><span class=\"hljs-pattern-match\">static inline void vm<span class=\"hljs-constructor\">_flags_clear()<span class=\"hljs-params\">\u69cb\u9020\u4f53<\/span> <span class=\"hljs-params\">vm_area_struct<\/span> <span class=\"hljs-operator\">*<\/span><span class=\"hljs-params\">\u30d6\u30a4\u30a8\u30e0\u30a8\u30fc<\/span>,<span class=\"hljs-params\">vm_flags_t<\/span> <span class=\"hljs-params\">\u30d5\u30e9\u30c3\u30b0<\/span>)<\/span><\/span><br \/><span class=\"hljs-pattern-match\">{<\/span><br \/><span class=\"hljs-pattern-match\">    vma-&gt;vm<span class=\"hljs-constructor\">\u30d5\u30e9\u30b0<\/span> &amp;= ~flags\uff1b<\/span><br \/><span class=\"hljs-pattern-match\">}<\/span><\/code><\/span><\/pre>\n\t\t\n<style>\n#text-2922275207 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-1180273554\" class=\"text\">\n\t\t\n\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">At the beginning of the\u00a0<em>build\/linux-rockchip\/drivers\/rknpu\/rknpu_devfreq.c<\/em>\u00a0file, add the definition of the function as below.<\/span><\/p>\n\t\t\n<style>\n#text-1180273554 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-1771546455\" class=\"text\">\n\t\t\n\n<pre class=\"hljs reasonml\"><span style=\"font-size: 110%;\"><code>static inline void \u30ed\u30c3\u30af\u30c1\u30c3\u30d7<span class=\"hljs-constructor\">_uninit_opp_table()\u3002<span class=\"hljs-params\">\u69cb\u9020\u4f53<\/span> <span class=\"hljs-params\">\u88c5\u7f6e<\/span> <span class=\"hljs-operator\">*<\/span><span class=\"hljs-params\">\u30c7\u30f4<\/span>,<\/span><br \/><span class=\"hljs-constructor\">   <span class=\"hljs-params\">\u69cb\u9020\u4f53<\/span> <span class=\"hljs-params\">\u30ed\u30c3\u30af\u30c1\u30c3\u30d7_opp_info<\/span> <span class=\"hljs-operator\">*<\/span><span class=\"hljs-params\">\u30a4\u30f3\u30d5\u30a9\u30e1\u30fc\u30b7\u30e7\u30f3<\/span>)<\/span><br \/>{<br \/>}<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-1771546455 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-569712294\" class=\"text\">\n\t\t\n\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">To build the kernel image, execute the following command.<\/p>\n<p><\/span><\/p>\n\t\t\n<style>\n#text-569712294 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-2588292191\" class=\"text\">\n\t\t\n\n<pre class=\"hljs jboss-cli\"><span style=\"font-size: 110%;\"><code>$ sudo <span class=\"hljs-string\" style=\"color: #f99b1c;\">.\/build.sh<\/span> <span class=\"hljs-params\">--board=mixtile-blade3<\/span> -k<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-2588292191 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\n\n\t<div id=\"col-285189977\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 160%; color: #000000;\">Compile demo application<\/span><\/h3>\n<p style=\"text-align: left;\"><span style=\"color: #000000;\">First, download the cross-compilation toolchain\u00a0<span style=\"color: #00aae7;\"><a class=\"hckui__typography__linkBlue\" style=\"color: #00aae7;\" href=\"https:\/\/developer.arm.com\/downloads\/-\/gnu-a\/10-2-2020-11\" rel=\"nofollow\" data-ha=\"{&quot;eventName&quot;:&quot;Clicked link&quot;,&quot;customProps&quot;:{&quot;value&quot;:&quot;gcc-arm-10.2-2020.11-x86_64-aarch64-none-linux-gnu&quot;,&quot;href&quot;:&quot;https:\/\/developer.arm.com\/downloads\/-\/gnu-a\/10-2-2020-11&quot;,&quot;type&quot;:&quot;story&quot;,&quot;location&quot;:&quot;story&quot;},&quot;clickOpts&quot;:{&quot;delayRedirect&quot;:true}}\">gcc-arm-10.2-2020.11-x86_64-aarch64-none-linux-gnu<\/a><\/span>\u00a0and go to the rkllm-runtime\/examples\/rkllm_api_demo directory.<\/span><\/p>\n\t<div id=\"text-3104598777\" class=\"text\">\n\t\t\n\n<pre class=\"hljs awk\"><span style=\"font-size: 110%;\"><code>$ cd rknn-llm<span class=\"hljs-regexp\" style=\"color: #993366;\">\/rkllm-runtime\/<\/span>\u4f8b\/rkllm_api_demo<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-3104598777 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">Modify the src\/main.cpp as shown in the diff below:<\/span><\/p>\n\t<div id=\"text-2342059267\" class=\"text\">\n\t\t\n\n<pre class=\"hljs awk\"><span style=\"font-size: 110%;\"><code>--- a<span class=\"hljs-regexp\" style=\"color: #993366;\">\/rkllm-runtime\/<\/span>\u4f8b<span class=\"hljs-regexp\" style=\"color: #993366;\">\/rkllm_api_demo\/<\/span>\u30bd\u30fc\u30b9<span class=\"hljs-regexp\" style=\"color: #993366;\">\/main.cpp<\/span><br \/><span class=\"hljs-regexp\">+++ b<span style=\"color: #993366;\">\/<\/span><\/span><span style=\"color: #993366;\">rkllm-runtime<\/span><span class=\"hljs-regexp\"><span style=\"color: #993366;\">\/<\/span>\u4f8b<span style=\"color: #993366;\">\/<\/span><\/span><span style=\"color: #993366;\">rkllm_api_demo<\/span><span class=\"hljs-regexp\"><span style=\"color: #993366;\">\/<\/span>\u30bd\u30fc\u30b9<span style=\"color: #993366;\">\/m<\/span><\/span><span style=\"color: #993366;\">ain.cpp<\/span><br \/>@@ -<span class=\"hljs-number\" style=\"color: #993366;\">71<\/span>,<span class=\"hljs-number\" style=\"color: #993366;\">7<\/span> +<span class=\"hljs-number\" style=\"color: #993366;\">71<\/span>,<span class=\"hljs-number\" style=\"color: #993366;\">7<\/span> int main(int argc, char **argv)<br \/><span class=\"hljs-regexp\">\/\/<\/span>\u8bbe\u7f6e\u53c2\u6570\u53ca\u521d\u59cb\u5316<br \/>RKLLMParam param = rkllm_createDefaultParam()\uff1b<br \/>param.model_path = rkllm_model.c_str()\uff1b<br \/>- param.num_npu_core=\u3002 <span class=\"hljs-number\" style=\"color: #993366;\">2<\/span>;<br \/>+ param.num_npu_core = <span class=\"hljs-number\" style=\"color: #993366;\">3<\/span>;<br \/>param.top_k = <span class=\"hljs-number\" style=\"color: #993366;\">1<\/span>;<br \/>param.max_new_tokens=\u3002 <span class=\"hljs-number\" style=\"color: #993366;\">256<\/span>;<br \/>param.max_context_len = <span class=\"hljs-number\" style=\"color: #993366;\">512<\/span>;<br \/>@@ -<span class=\"hljs-number\" style=\"color: #993366;\">115<\/span>,<span class=\"hljs-number\" style=\"color: #993366;\">8<\/span> +<span class=\"hljs-number\" style=\"color: #993366;\">115<\/span>,<span class=\"hljs-number\" style=\"color: #993366;\">8<\/span> int main(int argc, char **argv)<br \/>cout &lt;&lt; input_str &lt;&lt; endl\uff1b<br \/>}<br \/>}<br \/>- <span class=\"hljs-regexp\">\/\/<\/span> string text = PROMPT_TEXT_PREFIX + input_str + PROMPT_TEXT_POSTFIX\uff1b<br \/>- \u6587\u5b57\u5217 text = input_str\uff1b<br \/>+ \u6587\u5b57\u5217 text = PROMPT_TEXT_PREFIX + input_str + PROMPT_TEXT_POSTFIX\uff1b<br \/>+ <span class=\"hljs-regexp\">\/\/<\/span>\u6587\u5b57\u5217 text = input_str\uff1b<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-2342059267 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-2327010120\" class=\"text\">\n\t\t\n\n<p><span style=\"color: #000000;\">Modify the GCC_COMPILER_PATH in the\u00a0<em>build-linux.sh\u00a0<\/em>compilation script.<\/span><\/p>\n\t\t\n<style>\n#text-2327010120 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-1247209317\" class=\"text\">\n\t\t\n\n<pre class=\"hljs apache\"><span style=\"font-size: 110%;\"><code><span class=\"hljs-attribute\" style=\"color: #ff0000;\">GCC_COMPILER_PATH<\/span>=~\/rknn-llm\/<span style=\"color: #993366;\">gcc-arm-<span class=\"hljs-number\">10<\/span>.<span class=\"hljs-number\">2<\/span>-<span class=\"hljs-number\">2020<\/span>.<span class=\"hljs-number\">11<\/span>-x<span class=\"hljs-number\">86<\/span>_<span class=\"hljs-number\">64<\/span>-aarch<span class=\"hljs-number\">64<\/span><\/span>-none-linux-gnu\/bin\/aarch<span class=\"hljs-number\">64<\/span>-none-linux-gnu<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-1247209317 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-2729107485\" class=\"text\">\n\t\t\n\n<p><span style=\"color: #000000;\">To compile the application, execute the following command.<\/span><\/p>\n\t\t\n<style>\n#text-2729107485 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-4026717913\" class=\"text\">\n\t\t\n\n<pre class=\"hljs mipsasm\"><span style=\"font-size: 110%;\"><code>$ <span style=\"color: #ff0000;\"><span class=\"hljs-keyword\">bash <\/span><span class=\"hljs-keyword\">build-linux.sh <\/span><\/span><\/code><\/span><\/pre>\n\t\t\n<style>\n#text-4026717913 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-2224477570\" class=\"text\">\n\t\t\n\n<p><span style=\"color: #000000;\">The executable file created can be located at the following path: build\/build_linux_aarch64_Release\/llm_demo<\/span><\/p>\n\t\t\n<style>\n#text-2224477570 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\n\n\t<div id=\"col-700944822\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 160%; color: #000000;\">Model Deployment<\/span><\/h3>\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">Copy the kernel image file\u00a0<em>linux-image-5.10.160-rockchip_5.10.160-21_arm64.deb,<\/em>\u00a0the converted model file\u00a0<em>Phi-3-mini-4k-instruct.rkllm,<\/em>\u00a0and application executable file\u00a0<em>llm_demo<\/em>\u00a0to the Mixtile Blade 3.<\/span><\/p>\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">To install the kernel, execute the following command on the Mixtile Blade 3.<\/span><\/p>\n\t<div id=\"text-2532320611\" class=\"text\">\n\t\t\n\n<pre class=\"hljs dns\"><span style=\"font-size: 110%;\"><code>$ sudo dpkg -i linux-image-<span class=\"hljs-number\" style=\"color: #993366;\">5.10.160<\/span>-rockchip_<span class=\"hljs-number\" style=\"color: #993366;\">5.10.160<\/span>-<span class=\"hljs-number\">21<\/span>_arm64.deb<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-2532320611 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">To verify the NPU driver version, execute the following command.<\/span><\/p>\n\t<div id=\"text-3343469777\" class=\"text\">\n\t\t\n\n<pre class=\"hljs awk\"><span style=\"font-size: 110%;\"><code>$ sudo cat <span class=\"hljs-regexp\" style=\"color: #993366;\">\/sys\/<\/span>kernel<span class=\"hljs-regexp\" style=\"color: #993366;\">\/debug\/<\/span>rknpu\/version<br \/><br \/>RKNPU driver: <span style=\"color: #993366;\">v0.<span class=\"hljs-number\">9.6<\/span><\/span><\/code><\/span><\/pre>\n\t\t\n<style>\n#text-3343469777 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\n\n<\/div>\n<div class=\"row\"  id=\"row-926672687\">\n\n\n\t<div id=\"col-2094793823\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 160%; color: #000000;\">Run Application<\/span><\/h3>\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">First, execute the commands below to set up the RKLLM runtime library on the Mixtile Blade 3.<\/span><\/p>\n\t<div id=\"text-470884169\" class=\"text\">\n\t\t\n\n<pre class=\"hljs awk\"><span style=\"font-size: 110%;\"><code>$ git clone <span style=\"color: #00aae7;\">https\uff1a<span class=\"hljs-regexp\">\/\/\u30b8<\/span>\u30c4\u30d6\u30c9\u30c3\u30c8\u30b3\u30e0<span class=\"hljs-regexp\">\/airockchip\/<\/span>rknn-llm.git<\/span><br \/>$ export LD_LIBRARY_PATH=rknn-llm<span class=\"hljs-regexp\" style=\"color: #993366;\">\/rkllm-runtime\/<\/span>\u30e9\u30f3\u30bf\u30a4\u30e0<span class=\"hljs-regexp\" style=\"color: #993366;\">\/\u30ea\u30ca\u30c3\u30af\u30b9<\/span>librkllm_api\/aarch64\uff1a<span class=\"hljs-variable\">$ld_library_path<\/span><\/code><\/span><\/pre>\n\t\t\n<style>\n#text-470884169 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #000000;\">Before running the application, we need to set the user limit using the following command; otherwise, the NPU memory allocation will fail.<\/span><\/p>\n\t<div id=\"text-40708175\" class=\"text\">\n\t\t\n\n<pre class=\"hljs shell\"><span style=\"font-size: 110%;\"><code><span class=\"hljs-meta\">$<\/span><span class=\"bash\"><span class=\"hljs-built_in\" style=\"color: #f99b1c;\"> \u30ea\u30df\u30c3\u30c8<\/span> -n 102400<\/span><\/code><\/span><\/pre>\n\t\t\n<style>\n#text-40708175 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-1992240173\" class=\"text\">\n\t\t\n\n<p class=\"hckui__typography__bodyL\"><span style=\"color: #282828;\">To run the application, execute the following command.<\/span><\/p>\n\t\t\n<style>\n#text-1992240173 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-1272101340\" class=\"text\">\n\t\t\n\n<pre class=\"hljs subunit\"><span style=\"font-size: 110%;\"><code>$ <span style=\"color: #f99b1c;\">.\/llm_demo<\/span> Phi<span class=\"hljs-string\">-3<\/span>-mini<span class=\"hljs-string\">-4<\/span>k-instruct.rkllm<\/code><\/span><\/pre>\n\t\t\n<style>\n#text-1272101340 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div id=\"text-3741267243\" class=\"text\">\n\t\t\n\n<p class=\"hckui__typography__bodyL\"><span style=\"color: #000000;\">When the application starts, it will take a few seconds to load the model. Once ready, it will present a user prompt, allowing us to input any questions or instructions.<\/span><\/p>\n\t\t\n<style>\n#text-3741267243 {\n  text-align: left;\n}\n<\/style>\n\t<\/div>\n\t\n\t<div class=\"img has-hover x md-x lg-x y md-y lg-y\" id=\"image_839677855\">\n\t\t\t\t\t\t\t\t<div class=\"img-inner dark\" >\n\t\t\t<img width=\"1020\" height=\"621\" src=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-2.webp?fit=1020%2C621&amp;ssl=1\" class=\"attachment-large size-large\" alt=\"\" loading=\"lazy\" srcset=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-2.webp?w=1280&amp;ssl=1 1280w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-2.webp?resize=657%2C400&amp;ssl=1 657w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-2.webp?resize=768%2C467&amp;ssl=1 768w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-2.webp?resize=50%2C30&amp;ssl=1 50w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-2.webp?resize=18%2C12&amp;ssl=1 18w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/240802-B3-NPU-2.webp?resize=600%2C365&amp;ssl=1 600w\" sizes=\"(max-width: 1020px) 100vw, 1020px\" \/>\t\t\t\t\t\t\n\t\t\t\t\t<\/div>\n\t\t\t\t\t\t\t\t\n<style>\n#image_839677855 {\n  width: 66%;\n}\n<\/style>\n\t<\/div>\n\t\n\n\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\n\n\t<div id=\"col-1147396081\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 160%; color: #000000;\">Demo<br \/><\/span><\/h3>\n<div class=\"video video-fit mb\" style=\"padding-top:56.25%;\"><iframe loading=\"lazy\" title=\"Run a Large Language Model locally on a Mixtile Blade 3 NPU\" width=\"1020\" height=\"574\" src=\"https:\/\/www.youtube.com\/embed\/iETIMzbt-yM?feature=oembed\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" allowfullscreen><\/iframe><\/div>\n\n\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\n\n\t<div id=\"col-375333621\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 160%; color: #000000;\">Conclusion<br \/><\/span><\/h3>\n<p class=\"hckui__typography__bodyL\" style=\"text-align: left;\"><span style=\"color: #282828;\">This project demonstrates an advanced conversational AI solution using a low-power edge device. The solution is highly responsive and efficient, though sometimes not accurate without relying on cloud-based services. It also demonstrates the advantages of running sophisticated AI applications locally by making use of Rockchip NPU\u2019s capabilities.<\/span><\/p>\n<hr \/>\n<p>\u00a0<\/p>\n\n\t\t<\/div>\n\t\t\t\t\n<style>\n#col-375333621 > .col-inner {\n  margin: 0px 0px -40px 0px;\n}\n<\/style>\n\t<\/div>\n\n\t\n\n\t<div id=\"col-565430804\" class=\"col small-12 large-12\"  >\n\t\t\t\t<div class=\"col-inner text-center\"  >\n\t\t\t\n\t\t\t\n\n<h3 style=\"font-weight: 600; text-align: left;\"><span style=\"font-size: 200%; color: #000000;\">\u30af\u30ec\u30b8\u30c3\u30c8<br \/><\/span><\/h3>\n<hr>\n<div class=\"row\"  id=\"row-1817384352\">\n\n\n\t<div id=\"col-1801556516\" class=\"col medium-10 small-12 large-10\"  >\n\t\t\t\t<div class=\"col-inner\"  >\n\t\t\t\n\t\t\t\n\n  <div class=\"icon-box testimonial-box icon-box-left text-left is-large\">\n                <div class=\"icon-box-img testimonial-image circle\" style=\"width: 90px\">\n              <img width=\"280\" height=\"280\" src=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/Naveen-Kumar-400.webp?resize=280%2C280&amp;ssl=1\" class=\"attachment-thumbnail size-thumbnail\" alt=\"\" loading=\"lazy\" srcset=\"https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/Naveen-Kumar-400.webp?w=400&amp;ssl=1 400w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/Naveen-Kumar-400.webp?resize=280%2C280&amp;ssl=1 280w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/Naveen-Kumar-400.webp?resize=50%2C50&amp;ssl=1 50w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/Naveen-Kumar-400.webp?resize=12%2C12&amp;ssl=1 12w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/Naveen-Kumar-400.webp?resize=300%2C300&amp;ssl=1 300w, https:\/\/i0.wp.com\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2024\/08\/Naveen-Kumar-400.webp?resize=100%2C100&amp;ssl=1 100w\" sizes=\"(max-width: 280px) 100vw, 280px\" \/>        <\/div>\n                <div class=\"icon-box-text p-last-0\">\n            \t\t\t\t<div class=\"testimonial-text line-height-small italic test_text first-reset last-reset is-italic\">\n            \n\n<h3 class=\"hckui__typography__h3\"><span style=\"color: #00aae7;\"><strong>Naveen Kumar<\/strong><\/span><\/h3>\n<p>Bioinformatician, Researcher, Programmer, Maker, Tinkerer, Community contributor Machine Learning Tokyo<\/p>\n\n          <\/div>\n          <div class=\"testimonial-meta pt-half\">\n             <strong class=\"testimonial-name test_name\"><\/strong>\n                          <span class=\"testimonial-company test_company\"><\/span>\n          <\/div>\n        <\/div>\n  <\/div>\n\n  \n\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\n\n<\/div>\n\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\n\t\n\n<\/div>\n\n\t\t<\/div>\n\n\t\t\n<style>\n#section_1721993438 {\n  padding-top: 30px;\n  padding-bottom: 30px;\n  min-height: 600px;\n}\n#section_1721993438 .ux-shape-divider--top svg {\n  height: 150px;\n  --divider-top-width: 100%;\n}\n#section_1721993438 .ux-shape-divider--bottom svg {\n  height: 150px;\n  --divider-width: 100%;\n}\n<\/style>\n\t<\/section>\n\t\n\n\n<style>@import url('https:\/\/fonts.googleapis.com\/css2?family=Poppins:wght@300;600;900&display=swap');<\/style>","protected":false},"excerpt":{"rendered":"","protected":false},"author":9,"featured_media":0,"parent":0,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"page-blank.php","meta":{"spay_email":""},"yoast_head":"<!-- This site is optimized with the Yoast SEO plugin v17.8 - https:\/\/yoast.com\/wordpress\/plugins\/seo\/ -->\n<title>Deploy an LLM-Based Chatbot | Mixtile<\/title>\n<meta name=\"robots\" content=\"index, follow, max-snippet:-1, max-image-preview:large, max-video-preview:-1\" \/>\n<link rel=\"canonical\" href=\"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/\" \/>\n<meta property=\"og:locale\" content=\"ja_JP\" \/>\n<meta property=\"og:type\" content=\"article\" \/>\n<meta property=\"og:title\" content=\"Deploy an LLM-Based Chatbot | Mixtile\" \/>\n<meta property=\"og:url\" content=\"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/\" \/>\n<meta property=\"og:site_name\" content=\"Mixtile\" \/>\n<meta property=\"article:modified_time\" content=\"2024-08-05T08:51:10+00:00\" \/>\n<meta name=\"twitter:card\" content=\"summary_large_image\" \/>\n<meta name=\"twitter:label1\" content=\"Est. reading time\" \/>\n\t<meta name=\"twitter:data1\" content=\"8 minutes\" \/>\n<script type=\"application\/ld+json\" class=\"yoast-schema-graph\">{\"@context\":\"https:\/\/schema.org\",\"@graph\":[{\"@type\":\"Organization\",\"@id\":\"https:\/\/www.mixtile.com\/ja\/#organization\",\"name\":\"Mixtile Limited\",\"url\":\"https:\/\/www.mixtile.com\/ja\/\",\"sameAs\":[],\"logo\":{\"@type\":\"ImageObject\",\"@id\":\"https:\/\/www.mixtile.com\/ja\/#logo\",\"inLanguage\":\"ja\",\"url\":\"https:\/\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2022\/02\/logo.svg\",\"contentUrl\":\"https:\/\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2022\/02\/logo.svg\",\"caption\":\"Mixtile Limited\"},\"image\":{\"@id\":\"https:\/\/www.mixtile.com\/ja\/#logo\"}},{\"@type\":\"WebSite\",\"@id\":\"https:\/\/www.mixtile.com\/ja\/#website\",\"url\":\"https:\/\/www.mixtile.com\/ja\/\",\"name\":\"Mixtile\",\"description\":\"Hardware for IoT Solutions\",\"publisher\":{\"@id\":\"https:\/\/www.mixtile.com\/ja\/#organization\"},\"potentialAction\":[{\"@type\":\"SearchAction\",\"target\":{\"@type\":\"EntryPoint\",\"urlTemplate\":\"https:\/\/www.mixtile.com\/ja\/?s={search_term_string}\"},\"query-input\":\"required name=search_term_string\"}],\"inLanguage\":\"ja\"},{\"@type\":\"WebPage\",\"@id\":\"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/#webpage\",\"url\":\"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/\",\"name\":\"Deploy an LLM-Based Chatbot | Mixtile\",\"isPartOf\":{\"@id\":\"https:\/\/www.mixtile.com\/ja\/#website\"},\"datePublished\":\"2024-08-02T08:25:50+00:00\",\"dateModified\":\"2024-08-05T08:51:10+00:00\",\"breadcrumb\":{\"@id\":\"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/#breadcrumb\"},\"inLanguage\":\"ja\",\"potentialAction\":[{\"@type\":\"ReadAction\",\"target\":[\"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/\"]}]},{\"@type\":\"BreadcrumbList\",\"@id\":\"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/#breadcrumb\",\"itemListElement\":[{\"@type\":\"ListItem\",\"position\":1,\"name\":\"Home\",\"item\":\"https:\/\/www.mixtile.com\/\"},{\"@type\":\"ListItem\",\"position\":2,\"name\":\"Deploy an LLM-Based Chatbot\"}]}]}<\/script>\n<!-- \/ Yoast SEO plugin. -->","yoast_head_json":{"title":"Deploy an LLM-Based Chatbot | Mixtile","robots":{"index":"index","follow":"follow","max-snippet":"max-snippet:-1","max-image-preview":"max-image-preview:large","max-video-preview":"max-video-preview:-1"},"canonical":"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/","og_locale":"ja_JP","og_type":"article","og_title":"Deploy an LLM-Based Chatbot | Mixtile","og_url":"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/","og_site_name":"Mixtile","article_modified_time":"2024-08-05T08:51:10+00:00","twitter_card":"summary_large_image","twitter_misc":{"Est. reading time":"8 minutes"},"schema":{"@context":"https:\/\/schema.org","@graph":[{"@type":"Organization","@id":"https:\/\/www.mixtile.com\/ja\/#organization","name":"Mixtile Limited","url":"https:\/\/www.mixtile.com\/ja\/","sameAs":[],"logo":{"@type":"ImageObject","@id":"https:\/\/www.mixtile.com\/ja\/#logo","inLanguage":"ja","url":"https:\/\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2022\/02\/logo.svg","contentUrl":"https:\/\/dh19rycdk230a.cloudfront.net\/app\/uploads\/2022\/02\/logo.svg","caption":"Mixtile Limited"},"image":{"@id":"https:\/\/www.mixtile.com\/ja\/#logo"}},{"@type":"WebSite","@id":"https:\/\/www.mixtile.com\/ja\/#website","url":"https:\/\/www.mixtile.com\/ja\/","name":"Mixtile","description":"Hardware for IoT Solutions","publisher":{"@id":"https:\/\/www.mixtile.com\/ja\/#organization"},"potentialAction":[{"@type":"SearchAction","target":{"@type":"EntryPoint","urlTemplate":"https:\/\/www.mixtile.com\/ja\/?s={search_term_string}"},"query-input":"required name=search_term_string"}],"inLanguage":"ja"},{"@type":"WebPage","@id":"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/#webpage","url":"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/","name":"Deploy an LLM-Based Chatbot | Mixtile","isPartOf":{"@id":"https:\/\/www.mixtile.com\/ja\/#website"},"datePublished":"2024-08-02T08:25:50+00:00","dateModified":"2024-08-05T08:51:10+00:00","breadcrumb":{"@id":"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/#breadcrumb"},"inLanguage":"ja","potentialAction":[{"@type":"ReadAction","target":["https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/"]}]},{"@type":"BreadcrumbList","@id":"https:\/\/www.mixtile.com\/ja\/deploy-an-llm-based-chatbot\/#breadcrumb","itemListElement":[{"@type":"ListItem","position":1,"name":"Home","item":"https:\/\/www.mixtile.com\/"},{"@type":"ListItem","position":2,"name":"Deploy an LLM-Based Chatbot"}]}]}},"_links":{"self":[{"href":"https:\/\/www.mixtile.com\/ja\/wp-json\/wp\/v2\/pages\/6276"}],"collection":[{"href":"https:\/\/www.mixtile.com\/ja\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/www.mixtile.com\/ja\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/www.mixtile.com\/ja\/wp-json\/wp\/v2\/users\/9"}],"replies":[{"embeddable":true,"href":"https:\/\/www.mixtile.com\/ja\/wp-json\/wp\/v2\/comments?post=6276"}],"version-history":[{"count":16,"href":"https:\/\/www.mixtile.com\/ja\/wp-json\/wp\/v2\/pages\/6276\/revisions"}],"predecessor-version":[{"id":6305,"href":"https:\/\/www.mixtile.com\/ja\/wp-json\/wp\/v2\/pages\/6276\/revisions\/6305"}],"wp:attachment":[{"href":"https:\/\/www.mixtile.com\/ja\/wp-json\/wp\/v2\/media?parent=6276"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}