Skip to content

Commit

Permalink
Load local yaml first, then look in resources.
Browse files Browse the repository at this point in the history
  • Loading branch information
dylanhogg committed Jan 31, 2024
1 parent 54bb279 commit 6a185e6
Show file tree
Hide file tree
Showing 8 changed files with 38 additions and 14 deletions.
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ GPTAuthor is a tool for writing long form, multi-chapter stories and novels usin

## Installation

You can install gptauthor using pip:
You can install [gptauthor](https://pypi.org/project/gptauthor/) using pip:

```bash
pip install gptauthor
Expand All @@ -23,20 +23,20 @@ This example reads the story prompt from the example file [prompts-openai-drama.

```bash
export OPENAI_API_KEY=sk-<your key>
gptauthor --story openai-drama --total-chapters 3 --llm-model gpt-3.5-turbo --llm-temperature 0.1
gptauthor --story prompts-openai-drama --total-chapters 3 --llm-model gpt-3.5-turbo --llm-temperature 0.1
```

### Required Arguments

- `--story TEXT`: The name within the yaml file name defining the story [default: openai-drama]
- `--story TEXT`: The name of the yaml file defining the story and prompts

### Optional Arguments

- `--llm-model TEXT`: The model name [default: gpt-3.5-turbo]
- `--llm-temperature FLOAT`: LLM temperature value (0 to 2, OpenAI default is 1) [default: 1]
- `--llm-top-p FLOAT`: LLM top_p probability value (0 to 2, OpenAI default is 1) [default: 1]
- `--llm-use-localhost INTEGER`: LLM use localhost:8081 instead of openai [default: 0]
- `--total-chapters INTEGER`: Total chapters to write [default: 5]
- `--total-chapters INTEGER`: Total chapters to write [default: 3]
- `--allow-user-input / --no-allow-user-input`: Allow command line user input [default: allow-user-input]
- `--version`: Display gptauthor version
- `--install-completion`: Install completion for the current shell.
Expand Down
9 changes: 5 additions & 4 deletions gptauthor/console.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,13 @@
def version_callback(value: bool):
if value:
print(f"{consts.package_name} version: {consts.version}")
print("Please visit https://github.com/dylanhogg/gptauthor for more info.")
raise typer.Exit()


@typer_app.command()
def run(
story: Annotated[str, typer.Option(help="The name within the yaml file name defining the story")],
story: Annotated[str, typer.Option(help="The name of the yaml file defining the story and prompts")],
llm_model: Annotated[str, typer.Option(help="The model name")] = consts.default_llm_model,
llm_temperature: Annotated[
float, typer.Option(help="LLM temperature value (0 to 2, OpenAI default is 1)")
Expand All @@ -44,7 +45,7 @@ def run(

try:
log.configure()
example_usage = f"Example usage: [bold green]{consts.package_name} --story openai-drama --total-chapters 3 --llm-model gpt-3.5-turbo --llm-temperature 0.1 --llm-top-p 1.0[/bold green]"
example_usage = f"Example usage: [bold green]{consts.package_name} --story prompts-openai-drama --total-chapters 3 --llm-model gpt-3.5-turbo --llm-temperature 0.1 --llm-top-p 1.0[/bold green]"

llm_api_key = env.get("OPENAI_API_KEY", "")
if not llm_use_localhost and not llm_api_key:
Expand All @@ -54,7 +55,7 @@ def run(
"\nAlternatively you can use the '--llm_use_localhost 1' argument to use a local LLM server."
)

story_file = f"prompts-{story}.yaml"
story_file = f"{story}.yaml"
llm_config = OmegaConf.create(
{
"version": consts.version,
Expand Down Expand Up @@ -84,7 +85,7 @@ def run(
if ex.exit_code == 0:
print()
print(
"[bold green]Good bye and thanks for using gptauthor! Please visit https://github.com/dylanhogg/gptauthor for more info.[/bold green]"
"[bold green]Goodbye and thanks for using gptauthor! Please visit https://github.com/dylanhogg/gptauthor for more info.[/bold green]"
)
return
print(example_usage)
Expand Down
2 changes: 1 addition & 1 deletion gptauthor/library/consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
version = pkg_resources.get_distribution(package_name).version

default_output_folder = "./_output/"
default_write_total_chapters = 5
default_write_total_chapters = 3

# https://platform.openai.com/docs/api-reference/chat/create
default_llm_use_localhost = 0
Expand Down
4 changes: 4 additions & 0 deletions gptauthor/library/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ def do_writing(llm_config):
p(
f"Start {consts.package_name} {consts.version}, {llm_config.total_chapters=}, {llm_config.story_file=}, {llm_config.allow_user_input=}..."
)
p(f"Full story prompt file path: {prompts.get_yaml_file(llm_config).resolve()}")
print()

# ------------------------------------------------------------------------------
# Create synopsis
Expand Down Expand Up @@ -256,3 +258,5 @@ def do_writing(llm_config):
return
else:
print("Invalid input. Please try again.")

return output_folder
23 changes: 21 additions & 2 deletions gptauthor/library/prompts.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,23 @@
import importlib.resources
from pathlib import Path

from loguru import logger
from omegaconf import DictConfig, OmegaConf

from gptauthor.library import consts
from gptauthor.library.classes import AppUsageException


def _get_common(key: str, llm_config: DictConfig):
yaml_file = importlib.resources.files(consts.package_name).joinpath(llm_config.story_file)
yaml_file = get_yaml_file(llm_config)
conf_file = OmegaConf.load(yaml_file)
if key not in conf_file:
raise Exception(f"{key} not in conf_file")
return conf_file[key]


def _get_conf(prompt_type: str, llm_config) -> (str, dict):
yaml_file = importlib.resources.files(consts.package_name).joinpath(llm_config.story_file)
yaml_file = get_yaml_file(llm_config)
conf_file = OmegaConf.load(yaml_file)
if prompt_type not in conf_file:
valid_prompt_types = sorted(
Expand All @@ -28,6 +30,23 @@ def _get_conf(prompt_type: str, llm_config) -> (str, dict):
return conf


def get_yaml_file(llm_config: DictConfig):
local_yaml_file = Path(llm_config.story_file)
if local_yaml_file.is_file():
logger.info(f"Using local yaml file: {local_yaml_file}")
return local_yaml_file

resources_yaml_file = Path(importlib.resources.files(consts.package_name).joinpath(llm_config.story_file))
if resources_yaml_file.is_file():
logger.info(f"Using resources yaml file: {resources_yaml_file}")
return resources_yaml_file

raise Exception(
f"Could not find yaml file: {llm_config.story_file} either locally or in the resources folder. "
"See here for an example: https://github.com/dylanhogg/gptauthor/blob/main/gptauthor/prompts-openai-drama.yaml"
)


def get_prompt(prompt_type: str, llm_config: DictConfig):
return _get_conf(prompt_type, llm_config).prompt

Expand Down
2 changes: 1 addition & 1 deletion gptauthor/library/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def synopsis_processer(synopsis_response):
return title, chapters


def get_folder(synopsis_title: str, synopsis_chapters: list[str], llm_config: dict):
def get_folder(synopsis_title: str, synopsis_chapters: list[str], llm_config: dict) -> Path:
now = datetime.now()
safe_title = _make_safe_filename(synopsis_title)
num_chapters = len(synopsis_chapters)
Expand Down
2 changes: 1 addition & 1 deletion gptauthor/prompts-openai-drama.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ synopsis:
The final chapter has a twist that is unexpected, but makes sense in hindsight.
First, give the title of the book.
Then give each of the {total_chapters} chapters an outline, in the format "Chapter N: <title>", followed by 4 to 6 bullet points identifing the key chapter elements contributing to the overall story arc. Ensure that the story and chapters flow.
Then give each of the {total_chapters} chapters an outline, in the format "Chapter N: <title>", followed by 4 to 6 bullet points that describe key chapter elements contributing to the overall story arc. Ensure that the story and chapters flow.
expand-chapter-first:
system: You are a clever and creative story book author. You are skilled at weaving stories that are coherent, and fun to read. You are skilled at creating characters that are engaging and believable.
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "gptauthor"
version = "0.3.0"
version = "0.4.0"
description = "GPTAuthor is a tool for writing long form stories using AI"
authors = ["Dylan Hogg <[email protected]>"]
license = "MIT"
Expand Down

0 comments on commit 6a185e6

Please sign in to comment.