File size: 2,376 Bytes
74dc293
a65fd1a
4be2fb0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a65fd1a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4be2fb0
56c5f7a
 
 
4be2fb0
 
b9ff9a8
4be2fb0
 
 
 
 
 
 
 
 
 
56c5f7a
4be2fb0
 
 
 
 
74dc293
4be2fb0
56c5f7a
a65fd1a
 
 
 
 
 
 
 
4be2fb0
 
56c5f7a
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
from collections.abc import Collection
import json
import os

import google.generativeai as genai


GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")

genai.configure(api_key=GOOGLE_API_KEY)

# TODO: Improve on this prompt. Just adding something simple for testing.
_GENERATE_PROMPT = """
Write a detailed prompt to help complete the between <task></task> block.

<task>
{task}
</task>

For custom user input, you can leave placeholder variables. For example, if you have
variable named EMAIL, it would like {{{{EMAIL}}}} in the resulting prompt.
""".strip()

_GENERATE_VARIABLES_PROMPT = """
Your job is to generate data for the given placeholders: {placeholders}.

The generated data should reflect the name of the placeholder.

Render the output as JSON.

Here is an example output for these placeholders: STORY, FEEDBACK_TYPE

{{
  "STORY": "Generated data for a story",
  "FEEDBACK_TYPE": "Type of feedback to provide on the story"
}}

Again, please generate outputs for these placeholders: {placeholders}
""".strip()


def _make_model(
  model_name: str, system_instruction: str = "", temperature: float = 1.0
) -> genai.GenerativeModel:
  return genai.GenerativeModel(
    model_name,
    system_instruction=system_instruction if system_instruction else None,
    generation_config={
      "temperature": temperature,
      "top_p": 0.95,
      "top_k": 64,
      "max_output_tokens": 16384,
    },
  )


def generate_prompt(task_description: str, model_name: str, temperature: float) -> str:
  model = _make_model(model_name, temperature=temperature)
  prompt = _GENERATE_PROMPT.format(task=task_description)
  return model.generate_content(prompt).text


def generate_variables(
  prompt: str, variable_names: Collection[str], model_name: str, temperature: float
) -> dict[str, str]:
  model = _make_model(model_name, temperature=temperature)
  output = (
    model.generate_content(
      _GENERATE_VARIABLES_PROMPT.format(placeholders=", ".join(variable_names))
    )
    .text.removeprefix("```json")
    .removesuffix("```")
  )
  return json.loads(output)


def run_prompt(
  prompt_with_variables: str, system_instruction: str, model_name: str, temperature: float
) -> str:
  model = _make_model(model_name, temperature=temperature, system_instruction=system_instruction)
  return model.generate_content(prompt_with_variables, request_options={"timeout": 120}).text