{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "provenance": [], "gpuType": "T4" }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" }, "accelerator": "GPU" }, "cells": [ { "cell_type": "markdown", "source": [ "# Multiple-choice question" ], "metadata": { "id": "bjbqHUTuNFyd" } }, { "cell_type": "markdown", "source": [ "# 1. Install Litelines" ], "metadata": { "id": "B8jIr1KAKQWF" } }, { "cell_type": "code", "execution_count": 1, "metadata": { "id": "cvgtiC0Kcl-z" }, "outputs": [], "source": [ "%pip install --quiet --upgrade litelines" ] }, { "cell_type": "markdown", "source": [ "## 2. Download a model and its tokenizer" ], "metadata": { "id": "-dLJS16ZNy97" } }, { "cell_type": "code", "source": [ "# Use cuda for faster inference\n", "import torch\n", "\n", "device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n", "assert device == torch.device(\"cuda\"), \"In the Runtime tab, please Change runtime type to GPU\"" ], "metadata": { "id": "68zKdmOQcqLo" }, "execution_count": 2, "outputs": [] }, { "cell_type": "code", "source": [ "from transformers import AutoModelForCausalLM, AutoTokenizer\n", "\n", "MODEL_ID = \"Qwen/Qwen2.5-0.5B-Instruct\"\n", "tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)\n", "model = AutoModelForCausalLM.from_pretrained(MODEL_ID).to(device)" ], "metadata": { "id": "U-a-w5jwc6Xg" }, "execution_count": 3, "outputs": [] }, { "cell_type": "markdown", "source": [ "## 3. Prepare the inputs to the LLM" ], "metadata": { "id": "ohUnAx9NJ7AV" } }, { "cell_type": "code", "source": [ "user_input = \"\"\"Which key combination exits Vim?\n", "\n", "A) Ctrl+X\n", "B) Esc then :q!\n", "C) Alt+F4\n", "\"\"\"\n", "messages = [{\"role\": \"user\", \"content\": user_input}]\n", "inputs = tokenizer.apply_chat_template(\n", " messages,\n", " add_generation_prompt=True,\n", " return_tensors=\"pt\",\n", " return_dict=True\n", ").to(model.device)" ], "metadata": { "id": "Y8RtiZ7UefUx" }, "execution_count": 4, "outputs": [] }, { "cell_type": "markdown", "source": [ "## 4. Define a processor through a regular expression" ], "metadata": { "id": "9LaJrh0PKkcn" } }, { "cell_type": "code", "source": [ "from litelines.transformers import SchemaProcessor\n", "\n", "processor = SchemaProcessor(response_format=r\"A\\.|B\\.|C\\.\", tokenizer=tokenizer)\n", "processor.show_graph()" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 331 }, "id": "sVx6IzInKHoL", "outputId": "6b6a373e-91cd-48ce-ed31-e198d2584dce" }, "execution_count": 5, "outputs": [ { "output_type": "display_data", "data": { "text/plain": [ "" ], "image/svg+xml": "\n\n%3\n\nAllowed Paths\nRegular expression: A\\.|B\\.|C\\.\n\n\n0\n\n0\n\n\n\n1\n\n1\n\n\n\n0->1\n\n\n\n\nid\n\n\ntoken\n\n34\n\nC\n\n\n\n2\n\n2\n\n\n\n0->2\n\n\n\n\nid\n\n\ntoken\n\n33\n\nB\n\n\n\n3\n\n3\n\n\n\n0->3\n\n\n\n\nid\n\n\ntoken\n\n32\n\nA\n\n\n\n4\n\n\n4\n\n\n\n1->4\n\n\n\n\nid\n\n\ntoken\n\n13\n\n.\n\n\n\n2->4\n\n\n\n\nid\n\n\ntoken\n\n13\n\n.\n\n\n\n3->4\n\n\n\n\nid\n\n\ntoken\n\n13\n\n.\n\n\n\n\n\n\n->0\n\n\n\n\n" }, "metadata": {} } ] }, { "cell_type": "markdown", "source": [ "## 5. Generate a structured response" ], "metadata": { "id": "UUkc7wXcOQlN" } }, { "cell_type": "code", "source": [ "generated = model.generate(**inputs, logits_processor=[processor], temperature=0.1)\n", "print(f\"Response: {tokenizer.decode(generated[0][inputs['input_ids'].shape[-1]:-1])}\")" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "4oIHOm-rMdG0", "outputId": "d64d1505-aa01-4498-85fe-acc0112126c5" }, "execution_count": 6, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Response: B.\n" ] } ] }, { "cell_type": "markdown", "source": [ "## 6. Visualize the selected path" ], "metadata": { "id": "7ANqIJQ_M9fH" } }, { "cell_type": "code", "source": [ "processor.show_graph()" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 331 }, "id": "Pqo5AVexMnJ6", "outputId": "471d79a3-b43a-4f12-c756-32eecfb596e4" }, "execution_count": 7, "outputs": [ { "output_type": "display_data", "data": { "text/plain": [ "" ], "image/svg+xml": "\n\n%3\n\nAllowed Paths\nRegular expression: A\\.|B\\.|C\\.\n\n\n0\n\n0\n\n\n\n1\n\n1\n\n\n\n0->1\n\n\n\n\nid\n\n\ntoken\n\n34\n\nC\n\n\n\n2\n\n2\n\n\n\n0->2\n\n\n\n\nid\n\n\ntoken\n\n33\n\nB\n\n\n\n3\n\n3\n\n\n\n0->3\n\n\n\n\nid\n\n\ntoken\n\n32\n\nA\n\n\n\n4\n\n\n4\n\n\n\n1->4\n\n\n\n\nid\n\n\ntoken\n\n13\n\n.\n\n\n\n2->4\n\n\n\n\nid\n\n\ntoken\n\n13\n\n.\n\n\n\n3->4\n\n\n\n\nid\n\n\ntoken\n\n13\n\n.\n\n\n\n\n\n\n->0\n\n\n\n\n" }, "metadata": {} } ] } ] }