{ "cells": [ { "cell_type": "markdown", "id": "c09118cb-71ca-484d-a58c-2565c30a8923", "metadata": { "tags": [] }, "source": [ "## Conversation example using pure API" ] }, { "cell_type": "markdown", "id": "f1f84818-bef3-401a-ac48-c73bcddc1313", "metadata": {}, "source": [ "You must have a service account with the following roles:\n", "* Cloud Datastore User\n", "* Service Usage Consumer\n", "* Storage Admin\n", "* Vertex AI Administrator\n", "\n", "You'll need the key credential for your service account installed on this instance in a known location.\n", "\n", "Install package" ] }, { "cell_type": "code", "execution_count": null, "id": "4948cefd-533e-4782-828a-ed2a2c0201cd", "metadata": { "tags": [] }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [] } ], "source": [ "!pip install google-cloud-firestore\n", "!pip install google-cloud-aiplatform" ] }, { "cell_type": "markdown", "id": "115e1223-76da-43ee-93f4-16b12f715b8f", "metadata": {}, "source": [ "## Test Model for being alive" ] }, { "cell_type": "code", "execution_count": null, "id": "eec81a5f-4fed-44fb-8ddd-9e8415a9cc48", "metadata": { "tags": [] }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [] } ], "source": [ "\n", "from vertexai.generative_models import GenerativeModel\n", "model = GenerativeModel(\"gemini-pro\")\n", "response = model.generate_content(\"Hello Gemini\")\n", "print(\"Model response:\", response.text)\n" ] }, { "cell_type": "markdown", "id": "8c554d54-4a95-469f-9ce1-818e1f0100ca", "metadata": {}, "source": [ "## Example with Conversational logic: Pure Vertex AI API" ] }, { "cell_type": "code", "execution_count": null, "id": "4fb37138-15b5-4c9a-9140-27ff2b4c2809", "metadata": { "tags": [] }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [] } ], "source": [ "import os\n", "import numpy as np\n", "from google.cloud import firestore\n", "from vertexai.generative_models import GenerativeModel, ChatSession\n", "from vertexai.language_models import TextEmbeddingModel\n", "import vertexai\n", "\n", "# === Project Setup ===\n", "PROJECT_ID = \"YOUR PROJECT ID\"\n", "LOCATION = \"YOUR REGION\"\n", "FIRESTORE_DB = \"YOUR FIRESTORE DATABASE NAME\"\n", "SERVICE_ACCOUNT = \"YOUR SERVICE ACCOUNT KEY FILE LOCATION\"\n", "\n", "EMBEDDING_MODEL = \"textembedding-gecko@latest\"\n", "CHAT_MODEL = \"gemini-pro\"\n", "\n", "os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = SERVICE_ACCOUNT\n", "\n", "vertexai.init(project=PROJECT_ID, location=LOCATION)\n", "db = firestore.Client(project=PROJECT_ID, database=FIRESTORE_DB)\n", "\n", "# === Memory ===\n", "def get_conversation_history(session_id):\n", " doc = db.collection(\"conversations\").document(session_id).get()\n", " return doc.to_dict().get(\"messages\", []) if doc.exists else []\n", "\n", "def save_message(session_id, role, content):\n", " messages = get_conversation_history(session_id)\n", " messages.append({\"role\": role, \"content\": content})\n", " db.collection(\"conversations\").document(session_id).set({\"messages\": messages})\n", "\n", "# === Embedding ===\n", "def generate_embedding(text):\n", " model = TextEmbeddingModel.from_pretrained(EMBEDDING_MODEL)\n", " return model.get_embeddings([text])[0].values\n", "\n", "def cosine_similarity(a, b):\n", " return np.dot(a, b) / (np.linalg.norm(a) * np.linalg.norm(b))\n", "\n", "def search_embeddings(query, top_n=3):\n", " query_embedding = generate_embedding(query)\n", " results = []\n", " for doc in db.collection(\"embeddings\").stream():\n", " data = doc.to_dict()\n", " embedding_field = data.get(\"embedding\")\n", " if isinstance(embedding_field, list):\n", " embedding = embedding_field\n", " elif isinstance(embedding_field, dict) and \"values\" in embedding_field:\n", " embedding = embedding_field[\"values\"]\n", " else:\n", " continue\n", " score = cosine_similarity(query_embedding, embedding)\n", " results.append((score, data))\n", " results.sort(reverse=True, key=lambda x: x[0])\n", " return [doc for score, doc in results[:top_n]]\n", "\n", "# === Ask The Model ===\n", "def ask_model(session_id, user_input):\n", " # Load history\n", " history_data = get_conversation_history(session_id)\n", " history_text = \"\\n\".join([f\"{m['role'].capitalize()}: {m['content']}\" for m in history_data])\n", "\n", " # Search relevant context\n", " relevant_docs = search_embeddings(user_input)\n", " context = \"\\n\".join([doc.get(\"combined_text\", \"\") for doc in relevant_docs])\n", "\n", " # Compose prompt\n", " system_prompt = (\n", " \"You are Model, an AI singularity with a dry sense of humor. \"\n", " \"Be concise, and informative. Use context and memory to answer. \"\n", " \"If unsure, say so.\"\n", " \"**You must respond in 100 words or fewer.** \"\n", " )\n", "\n", " full_prompt = f\"\"\"{system_prompt}\n", "\n", "Relevant Info:\n", "{context}\n", "\n", "Conversation so far:\n", "{history_text}\n", "\n", "User: {user_input}\n", "Model (100 words or less):\"\"\"\n", "\n", " # Chat with Gemini\n", " model = GenerativeModel(CHAT_MODEL)\n", " chat = model.start_chat()\n", " response = chat.send_message(full_prompt)\n", "\n", " # Save turn\n", " save_message(session_id, \"user\", user_input)\n", " save_message(session_id, \"model\", response.text.strip())\n", "\n", " return response.text.strip()\n", "\n", "# === Example Usage ===\n", "if __name__ == \"__main__\":\n", " session_id = \"test_session\"\n", " query1 = \"YOUR CONTENT RELEVANT QUESTION HERE\"\n", " print(\"User:\", query1)\n", " print(\"Zen:\", ask_model(session_id, query1))\n", "\n", " query2 = \"YOUR FOLLOWUP QUESTION HERE\"\n", " print(\"\\nUser:\", query2)\n", " print(\"Model:\", ask_model(session_id, query2))\n" ] } ], "metadata": { "environment": { "kernel": "conda-base-py", "name": "workbench-notebooks.m129", "type": "gcloud", "uri": "us-docker.pkg.dev/deeplearning-platform-release/gcr.io/workbench-notebooks:m129" }, "kernelspec": { "display_name": "Python 3 (ipykernel) (Local)", "language": "python", "name": "conda-base-py" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.16" } }, "nbformat": 4, "nbformat_minor": 5 }