diff --git a/src/ol_openedx_chat/README.rst b/src/ol_openedx_chat/README.rst index 70ee2428..20dbb5ab 100644 --- a/src/ol_openedx_chat/README.rst +++ b/src/ol_openedx_chat/README.rst @@ -1,7 +1,7 @@ ol-openedx-chat ############### -An xBlock aside to add MIT Open Learning chat into xBlocks +An xBlock aside to add MIT Open Learning chat into xBlocks. Purpose @@ -9,15 +9,73 @@ Purpose MIT's AI chatbot for Open edX -Getting Started with Development -******************************** +Setup +===== +1) Add OL chat as a dependency +------------------------------ -Deploying -********* +For local development, you can use one of the following options to add +this as a dependency in the ``edx-platform`` repo: + +- **Install directly via pip.** + + :: + + # From the devstack directory, run bash in a running LMS container... + make dev.shell.lms + + # In bash, install the package + source /edx/app/edxapp/edxapp_env && pip install ol-openedx-chat== + + # Do the same for studio + make dev.shell.studio + + # In bash, install the package + source /edx/app/edxapp/edxapp_env && pip install ol-openedx-chat== + +- **Build the package locally and install it** + + :: + + Follow these steps in a terminal on your machine: + + 1. Navigate to open-edx-plugins directory + + 2. If you haven't done so already, run "pants build" + + 3. Run "pants package ::". This will create a "dist" directory inside "open-edx-plugins" directory with ".whl" & ".tar.gz" format packages for "ol_openedx_chat" and other "ol_openedx_*" plugins in "open-edx-plugins/src" + + 4. Move/copy any of the ".whl" or ".tar.gz" files for this plugin that were generated in the above step to the machine/container running Open edX (NOTE: If running devstack via Docker, you can use "docker cp" to copy these files into your LMS or CMS containers) + + 5. Run a shell in the machine/container running Open edX, and install this plugin using pip + +Configuration +============= + +1. edx-platform configuration +----------------------------- + + :: + + + Add the following configuration values to the config file in Open edX. For any release after Juniper, that config file is ``/edx/etc/lms.yml``. These should be added to the top level. **Ask a fellow developer or devops for these values.** + + .. code-block:: + + + OL_CHAT_SETTINGS: {: , : } + +2. Add database record +---------------------- + +- Create a record for the +``XBlockAsidesConfig`` model (LMS admin URL: +``/admin/lms_xblock/xblockasidesconfig/``). + +- Create a record in the ``StudioConfig`` model (CMS admin URL: +``/admin/xblock_config/studioconfig/``). -Getting Help -************ Documentation ============= diff --git a/src/ol_openedx_chat/apps.py b/src/ol_openedx_chat/apps.py index 96fc2f31..9297c53b 100644 --- a/src/ol_openedx_chat/apps.py +++ b/src/ol_openedx_chat/apps.py @@ -18,10 +18,14 @@ class OLOpenedxChatConfig(AppConfig): PluginSettings.CONFIG: { ProjectType.LMS: { SettingsType.COMMON: {PluginSettings.RELATIVE_PATH: "settings.common"}, + SettingsType.DEVSTACK: { + PluginSettings.RELATIVE_PATH: "settings.devstack" + }, }, ProjectType.CMS: { - SettingsType.COMMON: { - PluginSettings.RELATIVE_PATH: "settings.cms_settings" + SettingsType.COMMON: {PluginSettings.RELATIVE_PATH: "settings.common"}, + SettingsType.DEVSTACK: { + PluginSettings.RELATIVE_PATH: "settings.devstack" }, }, }, diff --git a/src/ol_openedx_chat/block.py b/src/ol_openedx_chat/block.py index a6f56f04..8f8d911a 100644 --- a/src/ol_openedx_chat/block.py +++ b/src/ol_openedx_chat/block.py @@ -1,10 +1,14 @@ import pkg_resources +from django.conf import settings from django.template import Context, Template +from django.utils.translation import gettext_lazy as _ +from ol_openedx_chat.utils import is_aside_applicable_to_block +from rest_framework import status as api_status from web_fragments.fragment import Fragment -from xblock.core import XBlockAside - -BLOCK_PROBLEM_CATEGORY = "problem" -MULTIPLE_CHOICE_TYPE = "multiplechoiceresponse" +from webob.response import Response +from xblock.core import XBlock, XBlockAside +from xblock.fields import Boolean, Scope, String +from xmodule.x_module import AUTHOR_VIEW, STUDENT_VIEW def get_resource_bytes(path): @@ -36,22 +40,72 @@ class OLChatAside(XBlockAside): XBlock aside that enables OL AI Chat functionality for an XBlock """ - @XBlockAside.aside_for("student_view") - def student_view_aside(self, block, context=None): # noqa: ARG002 + enabled = Boolean( + display_name=_("Open Learning Chat enabled status"), + default=False, + scope=Scope.content, + help=_("Indicates whether or not Open Learning chat is enabled for a block"), + ) + chat_prompts = String( + display_name=_("Open Learning Chat Prompt text"), + default="", + scope=Scope.content, + help=_("Prompt hint text for chat in a block"), + ) + additional_solution = String( + display_name=_("Additional solution for problem"), + default="", + scope=Scope.content, + help=_("Additional solution for the problem in context of chat"), + ) + llm_model = String( + display_name=_("Open Learning Chat selected LLM model"), + default="", + scope=Scope.content, + help=_("Selected LLM model to be used for a block"), + ) + + @XBlockAside.aside_for(STUDENT_VIEW) + def student_view_aside(self, block, context=None): """ Renders the aside contents for the student view """ # noqa: D401 + + # This is a workaround for those blocks which do not have has_author_view=True + # because when a block does not define has_author_view=True in it, the only view + # that gets rendered is student_view in place of author view. + + if getattr(self.runtime, "is_author_mode", False): + return self.author_view_aside(block, context) + fragment = Fragment("") fragment.add_content(render_template("static/html/student_view.html")) return fragment - @XBlockAside.aside_for("author_view") + @XBlockAside.aside_for(AUTHOR_VIEW) def author_view_aside(self, block, context=None): # noqa: ARG002 """ Renders the aside contents for the author view """ # noqa: D401 fragment = Fragment("") - fragment.add_content(render_template("static/html/studio_view.html")) + fragment.add_content( + render_template( + "static/html/studio_view.html", + { + "is_enabled": self.enabled, + "chat_prompts": self.chat_prompts, + "selected_llm_model": self.llm_model, + "additional_solution": self.additional_solution, + "llm_models_list": list( + settings.OL_CHAT_SETTINGS + ), # Converting dict keys into a list + "block_id": block.location.block_id, # Passing this along as a unique key for checkboxes # noqa: E501 + }, + ) + ) + fragment.add_css(get_resource_bytes("static/css/studio.css")) + fragment.add_javascript(get_resource_bytes("static/js/studio.js")) + fragment.initialize_js("OLChatInit") return fragment @classmethod @@ -64,16 +118,20 @@ def should_apply_to_block(cls, block): instances, the problem type of the given block needs to be retrieved in different ways. """ # noqa: D401 - if getattr(block, "category", None) != BLOCK_PROBLEM_CATEGORY: - return False - block_problem_types = None - # LMS passes in the block instance with `problem_types` as a property of - # `descriptor` - if hasattr(block, "descriptor"): - block_problem_types = getattr(block.descriptor, "problem_types", None) - # Studio passes in the block instance with `problem_types` as a top-level property # noqa: E501 - elif hasattr(block, "problem_types"): - block_problem_types = block.problem_types - # We only want this aside to apply to the block if the problem is multiple - # choice AND there are not multiple problem types. - return block_problem_types == {MULTIPLE_CHOICE_TYPE} + return is_aside_applicable_to_block(block=block) + + @XBlock.handler + def update_chat_config(self, request, suffix=""): # noqa: ARG002 + """Update the chat configurations""" + try: + posted_data = request.json + except ValueError: + return Response( + "Invalid request body", status=api_status.HTTP_400_BAD_REQUEST + ) + + self.chat_prompts = posted_data.get("chat_prompts", "") + self.llm_model = posted_data.get("selected_llm_model", "") + self.enabled = posted_data.get("is_enabled", False) + self.additional_solution = posted_data.get("additional_solution", "") + return Response() diff --git a/src/ol_openedx_chat/constants.py b/src/ol_openedx_chat/constants.py new file mode 100644 index 00000000..8e8670ba --- /dev/null +++ b/src/ol_openedx_chat/constants.py @@ -0,0 +1,3 @@ +# The dictionary should contain all the block types for which the chat should be +# applicable if a block has sub-blocks or sub category, that should be added in the list +CHAT_APPLICABLE_BLOCKS = ["problem", "video"] diff --git a/src/ol_openedx_chat/settings/cms_settings.py b/src/ol_openedx_chat/settings/cms_settings.py deleted file mode 100644 index c268b6eb..00000000 --- a/src/ol_openedx_chat/settings/cms_settings.py +++ /dev/null @@ -1,11 +0,0 @@ -# noqa: INP001 -"""Settings to provide to edX""" - - -def plugin_settings(settings): # noqa: ARG001 - """ - Populate CMS settings - """ - - -DEFAULT_AUTO_FIELD = "django.db.models.AutoField" diff --git a/src/ol_openedx_chat/settings/common.py b/src/ol_openedx_chat/settings/common.py index cc7f6433..23a0ce68 100644 --- a/src/ol_openedx_chat/settings/common.py +++ b/src/ol_openedx_chat/settings/common.py @@ -1,11 +1,21 @@ # noqa: INP001 + """Settings to provide to edX""" -def plugin_settings(settings): # noqa: ARG001 +def plugin_settings(settings): """ - Populate settings + Populate common settings """ + env_tokens = getattr(settings, "ENV_TOKENS", {}) + # .. setting_name: OL_CHAT_SETTINGS + # .. setting_default: {} + # .. setting_description: A dictionary containing the LLM model names as dictionary + # keys and model API tokens/keys as values. This dictionary keys would be + # as LLM model names in the chat settings form in CMS. + # + # A sample setting would look like: + # .. {"MODEL_NAME1": API_KEY, "MODEL_NAME2": API_KEY} -DEFAULT_AUTO_FIELD = "django.db.models.AutoField" + settings.OL_CHAT_SETTINGS = env_tokens.get("OL_CHAT_SETTINGS", {}) diff --git a/src/ol_openedx_chat/settings/devstack.py b/src/ol_openedx_chat/settings/devstack.py new file mode 100644 index 00000000..84291adf --- /dev/null +++ b/src/ol_openedx_chat/settings/devstack.py @@ -0,0 +1,10 @@ +# noqa: INP001 + +"""Settings to provide to edX""" + + +def plugin_settings(settings): + """ + Populate devstack settings + """ + settings.OL_CHAT_SETTINGS = {"GPT1": "TEST", "GPT2": "123", "GPT3": ""} diff --git a/src/ol_openedx_chat/static/css/studio.css b/src/ol_openedx_chat/static/css/studio.css new file mode 100644 index 00000000..a400aa3e --- /dev/null +++ b/src/ol_openedx_chat/static/css/studio.css @@ -0,0 +1,55 @@ +.form-container { + margin: 20px auto; + padding: 20px; + background-color: #f9f9f9; +} +.form-container label { + display: block; + margin: 18px 0 8px; + font-weight: 400; + font-size: 14px; + font-style: normal; +} +.form-container input, +.form-container select, +.form-container textarea { + width: 100%; + min-height: 96px; + font-style: normal; + font-size: 16px; + font-weight: 400; +} +.form-container button { + width: 100%; + padding: 8px; + margin-bottom: 10px; +} + +.checkbox-container { + display: flex; + align-items: center; + padding: 16px 0; +} +.enabled-check { + min-height: 16px !important; + width: auto !important; + margin-right: 8px; +} +.enabled-label { + margin: 0 !important; +} + +.sub-label { + font-size: 12px !important; + font-weight: 400 !important; + margin: 2px 0 8px !important; + +} + +.llm-dropdown { + height: 48px !important; + min-height: 48px !important; +} +.enabled-checkbox { + min-height: auto !important; +} diff --git a/src/ol_openedx_chat/static/html/studio_view.html b/src/ol_openedx_chat/static/html/studio_view.html index a5265085..68d98f25 100644 --- a/src/ol_openedx_chat/static/html/studio_view.html +++ b/src/ol_openedx_chat/static/html/studio_view.html @@ -1,25 +1,32 @@ -
+
+
+ + + + + + + + -
- - -
+ + + -
- - -
+ + +
+ + +
+ + + +
diff --git a/src/ol_openedx_chat/static/js/BUILD b/src/ol_openedx_chat/static/js/BUILD index 3afebf49..85d95faf 100644 --- a/src/ol_openedx_chat/static/js/BUILD +++ b/src/ol_openedx_chat/static/js/BUILD @@ -1,4 +1,4 @@ resources( name="ol_chat_js", - sources=["src_js/*.js","lib/*.js"], + sources=["*.js"], ) diff --git a/src/ol_openedx_chat/static/js/studio.js b/src/ol_openedx_chat/static/js/studio.js new file mode 100644 index 00000000..55e25188 --- /dev/null +++ b/src/ol_openedx_chat/static/js/studio.js @@ -0,0 +1,43 @@ +(function($) { + 'use strict'; + + function OpenLearningChatView(runtime, element) { + // Sometimes the element is a jQuery object instead of a DOM object which leads to the broken chat form reference + if (element instanceof jQuery){ + element = element[0] + } + const chatForm = element.querySelector("#ol-chat-form") + chatForm.addEventListener("submit", function(event) { + event.preventDefault(); + var studioRuntime = new window.StudioRuntime.v1(); + + const chatPromptsField = element.querySelector("#chat-prompts"); + const llmModelDropdown = element.querySelector("#llm-model-dropdown"); + const additionalSolutionField = element.querySelector("#additional-solution"); + const enabledCheck = element.querySelector("#is-enabled-"+chatForm.dataset.blockId); + + // Get the handler URL + const handlerUrl = studioRuntime.handlerUrl(element, 'update_chat_config'); + var dataToPost = {"chat_prompts": chatPromptsField.value, "selected_llm_model": llmModelDropdown.value, "is_enabled": enabledCheck.checked, "additional_solution": additionalSolutionField.value}; + + $.ajax({ + url: handlerUrl, + method: 'POST', + data: JSON.stringify(dataToPost), + contentType: 'application/json; charset=utf-8', + success: function (response) { + alert("Saved successfully!"); + }, + error: function (xhr, status, error) { + alert("There was an error saving the details. Please try again"); + } + }); + + }); + } + function initializeOLChat(runtime, element) { + return new OpenLearningChatView(runtime, element); + } + + window.OLChatInit = initializeOLChat; +}($)); diff --git a/src/ol_openedx_chat/utils.py b/src/ol_openedx_chat/utils.py new file mode 100644 index 00000000..e4c70884 --- /dev/null +++ b/src/ol_openedx_chat/utils.py @@ -0,0 +1,8 @@ +"""Utility methods for the AI chat""" + +from ol_openedx_chat.constants import CHAT_APPLICABLE_BLOCKS + + +def is_aside_applicable_to_block(block): + """Check if the xBlock should support AI Chat""" + return getattr(block, "category", None) in CHAT_APPLICABLE_BLOCKS