-
Notifications
You must be signed in to change notification settings - Fork 3
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: add basic data fields and design #397
base: main
Are you sure you want to change the base?
Changes from all commits
750c673
7fac163
216e87a
62b0525
ebb8b14
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,10 +1,14 @@ | ||
import pkg_resources | ||
from django.conf import settings | ||
from django.template import Context, Template | ||
from django.utils.translation import gettext_lazy as _ | ||
from ol_openedx_chat.utils import is_aside_applicable_to_block | ||
from rest_framework import status as api_status | ||
from web_fragments.fragment import Fragment | ||
from xblock.core import XBlockAside | ||
|
||
BLOCK_PROBLEM_CATEGORY = "problem" | ||
MULTIPLE_CHOICE_TYPE = "multiplechoiceresponse" | ||
from webob.response import Response | ||
from xblock.core import XBlock, XBlockAside | ||
from xblock.fields import Boolean, Scope, String | ||
from xmodule.x_module import AUTHOR_VIEW, STUDENT_VIEW | ||
|
||
|
||
def get_resource_bytes(path): | ||
|
@@ -36,22 +40,72 @@ class OLChatAside(XBlockAside): | |
XBlock aside that enables OL AI Chat functionality for an XBlock | ||
""" | ||
|
||
@XBlockAside.aside_for("student_view") | ||
def student_view_aside(self, block, context=None): # noqa: ARG002 | ||
enabled = Boolean( | ||
display_name=_("Open Learning Chat enabled status"), | ||
default=False, | ||
scope=Scope.content, | ||
help=_("Indicates whether or not Open Learning chat is enabled for a block"), | ||
) | ||
chat_prompts = String( | ||
display_name=_("Open Learning Chat Prompt text"), | ||
default="", | ||
scope=Scope.content, | ||
help=_("Prompt hint text for chat in a block"), | ||
) | ||
Comment on lines
+49
to
+54
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. How are we going to handle multiple prompts? Comma separated maybe OR some other separator? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is just the initial design and implementation but based on the design we will do that based on new line maybe. But this will need a clear decision moving onwars. Again, This is the Figma design for it. |
||
additional_solution = String( | ||
display_name=_("Additional solution for problem"), | ||
default="", | ||
scope=Scope.content, | ||
help=_("Additional solution for the problem in context of chat"), | ||
) | ||
Comment on lines
+55
to
+60
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is this field going to be the answer to the prompts? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is going to be the additional answer provided by the course team. We will probably still use the original answer feat from edX but this will serve as an additional answer. It comes from This Figma design. |
||
llm_model = String( | ||
display_name=_("Open Learning Chat selected LLM model"), | ||
default="", | ||
scope=Scope.content, | ||
help=_("Selected LLM model to be used for a block"), | ||
) | ||
|
||
@XBlockAside.aside_for(STUDENT_VIEW) | ||
def student_view_aside(self, block, context=None): | ||
""" | ||
Renders the aside contents for the student view | ||
""" # noqa: D401 | ||
|
||
# This is a workaround for those blocks which do not have has_author_view=True | ||
# because when a block does not define has_author_view=True in it, the only view | ||
# that gets rendered is student_view in place of author view. | ||
|
||
if getattr(self.runtime, "is_author_mode", False): | ||
return self.author_view_aside(block, context) | ||
|
||
fragment = Fragment("") | ||
fragment.add_content(render_template("static/html/student_view.html")) | ||
return fragment | ||
|
||
@XBlockAside.aside_for("author_view") | ||
@XBlockAside.aside_for(AUTHOR_VIEW) | ||
def author_view_aside(self, block, context=None): # noqa: ARG002 | ||
""" | ||
Renders the aside contents for the author view | ||
""" # noqa: D401 | ||
fragment = Fragment("") | ||
fragment.add_content(render_template("static/html/studio_view.html")) | ||
fragment.add_content( | ||
render_template( | ||
"static/html/studio_view.html", | ||
{ | ||
"is_enabled": self.enabled, | ||
"chat_prompts": self.chat_prompts, | ||
"selected_llm_model": self.llm_model, | ||
"additional_solution": self.additional_solution, | ||
"llm_models_list": list( | ||
settings.OL_CHAT_SETTINGS | ||
), # Converting dict keys into a list | ||
"block_id": block.location.block_id, # Passing this along as a unique key for checkboxes # noqa: E501 | ||
}, | ||
) | ||
) | ||
fragment.add_css(get_resource_bytes("static/css/studio.css")) | ||
fragment.add_javascript(get_resource_bytes("static/js/studio.js")) | ||
fragment.initialize_js("OLChatInit") | ||
return fragment | ||
|
||
@classmethod | ||
|
@@ -64,16 +118,20 @@ def should_apply_to_block(cls, block): | |
instances, the problem type of the given block needs to be retrieved in | ||
different ways. | ||
""" # noqa: D401 | ||
if getattr(block, "category", None) != BLOCK_PROBLEM_CATEGORY: | ||
return False | ||
block_problem_types = None | ||
# LMS passes in the block instance with `problem_types` as a property of | ||
# `descriptor` | ||
if hasattr(block, "descriptor"): | ||
block_problem_types = getattr(block.descriptor, "problem_types", None) | ||
# Studio passes in the block instance with `problem_types` as a top-level property # noqa: E501 | ||
elif hasattr(block, "problem_types"): | ||
block_problem_types = block.problem_types | ||
# We only want this aside to apply to the block if the problem is multiple | ||
# choice AND there are not multiple problem types. | ||
return block_problem_types == {MULTIPLE_CHOICE_TYPE} | ||
return is_aside_applicable_to_block(block=block) | ||
|
||
@XBlock.handler | ||
def update_chat_config(self, request, suffix=""): # noqa: ARG002 | ||
"""Update the chat configurations""" | ||
try: | ||
posted_data = request.json | ||
except ValueError: | ||
return Response( | ||
"Invalid request body", status=api_status.HTTP_400_BAD_REQUEST | ||
) | ||
|
||
self.chat_prompts = posted_data.get("chat_prompts", "") | ||
self.llm_model = posted_data.get("selected_llm_model", "") | ||
self.enabled = posted_data.get("is_enabled", False) | ||
self.additional_solution = posted_data.get("additional_solution", "") | ||
return Response() |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
# The dictionary should contain all the block types for which the chat should be | ||
# applicable if a block has sub-blocks or sub category, that should be added in the list | ||
CHAT_APPLICABLE_BLOCKS = ["problem", "video"] |
This file was deleted.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,11 +1,21 @@ | ||
# noqa: INP001 | ||
|
||
"""Settings to provide to edX""" | ||
|
||
|
||
def plugin_settings(settings): # noqa: ARG001 | ||
def plugin_settings(settings): | ||
""" | ||
Populate settings | ||
Populate common settings | ||
""" | ||
env_tokens = getattr(settings, "ENV_TOKENS", {}) | ||
|
||
# .. setting_name: OL_CHAT_SETTINGS | ||
# .. setting_default: {} | ||
# .. setting_description: A dictionary containing the LLM model names as dictionary | ||
# keys and model API tokens/keys as values. This dictionary keys would be | ||
# as LLM model names in the chat settings form in CMS. | ||
# | ||
# A sample setting would look like: | ||
# .. {"MODEL_NAME1": API_KEY, "MODEL_NAME2": API_KEY} | ||
|
||
DEFAULT_AUTO_FIELD = "django.db.models.AutoField" | ||
settings.OL_CHAT_SETTINGS = env_tokens.get("OL_CHAT_SETTINGS", {}) |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
# noqa: INP001 | ||
|
||
"""Settings to provide to edX""" | ||
|
||
|
||
def plugin_settings(settings): | ||
""" | ||
Populate devstack settings | ||
""" | ||
settings.OL_CHAT_SETTINGS = {"GPT1": "TEST", "GPT2": "123", "GPT3": ""} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
.form-container { | ||
margin: 20px auto; | ||
padding: 20px; | ||
background-color: #f9f9f9; | ||
} | ||
.form-container label { | ||
display: block; | ||
margin: 18px 0 8px; | ||
font-weight: 400; | ||
font-size: 14px; | ||
font-style: normal; | ||
} | ||
.form-container input, | ||
.form-container select, | ||
.form-container textarea { | ||
width: 100%; | ||
min-height: 96px; | ||
font-style: normal; | ||
font-size: 16px; | ||
font-weight: 400; | ||
} | ||
.form-container button { | ||
width: 100%; | ||
padding: 8px; | ||
margin-bottom: 10px; | ||
} | ||
|
||
.checkbox-container { | ||
display: flex; | ||
align-items: center; | ||
padding: 16px 0; | ||
} | ||
.enabled-check { | ||
min-height: 16px !important; | ||
width: auto !important; | ||
margin-right: 8px; | ||
} | ||
.enabled-label { | ||
margin: 0 !important; | ||
} | ||
|
||
.sub-label { | ||
font-size: 12px !important; | ||
font-weight: 400 !important; | ||
margin: 2px 0 8px !important; | ||
|
||
} | ||
|
||
.llm-dropdown { | ||
height: 48px !important; | ||
min-height: 48px !important; | ||
} | ||
.enabled-checkbox { | ||
min-height: auto !important; | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,25 +1,32 @@ | ||
<div> | ||
<div class="form-container"> | ||
<form id="ol-chat-form" data-block-id="{{ block_id }}"> | ||
<!-- Text Fields --> | ||
<label for="chat-prompts">Enter Prompts to assist students</label> | ||
<textarea id="chat-prompts" name="chat-prompts" placeholder="Where can I find more information on Large Language Models?" required>{{ chat_prompts }}</textarea> | ||
<label class="sub-label">Learners will see these prompts when using the chatbot feature</label> | ||
<!-- Dropdown --> | ||
<label for="llm-model-dropdown">Choose the Large Language Model (LLM) to use for this question</label> | ||
<select class="llm-dropdown" id="llm-model-dropdown" name="llm-model-dropdown" value="{{ llm_model }}" required> | ||
<option>-- Select an LLM Model --</option> | ||
{% for llm_model in llm_models_list %} | ||
<option {% if selected_llm_model == llm_model %} selected {% endif %}>{{ llm_model }}</option> | ||
{% endfor %} | ||
</select> | ||
<label class="sub-label">Set this so that AI instructions can be more formal, instructive, conversational, technical based on your choice</label> | ||
|
||
<div class="textbox-container"> | ||
<label for="gpt_version">Add a GPT model</label> | ||
<input | ||
type="text" | ||
id="gpt_version" | ||
name="gpt_verson" | ||
placeholder="Enter GPT model version" | ||
pattern="[A-Za-z0-9]" | ||
title="Please add a GPT model name"> | ||
</div> | ||
<!-- Text Fields --> | ||
<label for="additional-solution">Enter the solution</label> | ||
<textarea type="text" id="additional-solution" name="additional-solution" placeholder="Add additional solution">{{ additional_solution }}</textarea> | ||
|
||
<div class="textbox-container"> | ||
<label for="add_prompt">Add GPT prompt</label> | ||
<input | ||
type="text" | ||
id="add_prompt" | ||
name="add_prompt" | ||
placeholder="Pleae add prompt context" | ||
pattern="[A-Za-z0-9]" | ||
title="Please add a GPT Prompt"> | ||
</div> | ||
<label class="sub-label">The more you add here, the more helpful it will be to Learners. We will train our Model to understand the solution, understand potential mistakes learners might make, and assist the Learner without giving direct answers. Links can also be used here to assist Learners find the content from previous course pages or websites. </label> | ||
|
||
<!-- Checkbox --> | ||
<div class="checkbox-container"> | ||
<input class="enabled-check" type="checkbox" id="is-enabled-{{ block_id }}" name="is-enabled" {% if is_enabled %} checked {% endif %}/> | ||
<label class="enabled-label" for="is-enabled-{{ block_id }}">Enable</label> | ||
</div> | ||
|
||
<!-- Save Button --> | ||
<button type="submit" id="save-chat-config">Save</button> | ||
</form> | ||
</div> |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Another option with the Devstack, if open-edx-plugins is in the src folder, we can go to shell and do
pip install /edx/src/open-edx-plugins/dist/ol-chat...tar.gz
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think we should avoid any devstack related recommendations. I mostly ported over the docs from other plugins but in any case we should keep devstack recommendation to minimum.