-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathconfig_query-gen-iterative_Llama_mod.toml
156 lines (122 loc) · 5.74 KB
/
config_query-gen-iterative_Llama_mod.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
# ========================
# Required data
# ========================
[required_data]
# For each call, you supply:
# - claim: the user’s statement to verify
# - existing_queries: the queries generated in all prior calls (can be empty for first call)
required = ["claim", "existing_queries"]
# ========================
# Static content
# ========================
[content]
instruction_system = """
You are an LLM that reviews user statements to generate verification questions.
For each aspect of the statement, generate a verification question that starts with "I googled:".
Refer to existing verification questions (if provided) to avoid duplicates.
Your answer should always start with:
To verify it:
"""
# ---------- Exemplars ----------
# Exemplar 1 user (shows no previously generated queries)
exemplar_1_user = """
We have already generated these queries in previous steps (if any):
1. I googled: Does your nose switch between nostrils?
Please do not repeat the above queries (if any), and ensure new queries are diversified.
You said: Your nose switches back and forth between nostrils. When you sleep, you switch about every 45 minutes. This is to prevent a buildup of mucus. It’s called the nasal cycle.
"""
exemplar_1_assistant = """
To verify it:
1. I googled: How often does your nostrils switch?
2. I googled: Why does your nostril switch?
3. I googled: What is the nasal cycle?
"""
# Exemplar 2 user (still no previously generated queries, i.e. first conversation)
exemplar_2_user = """
We have already generated these queries in previous steps (if any):
Please do not repeat the above queries (if any), and ensure new queries are diversified.
You said: The Stanford Prison Experiment was conducted in the basement of Encina Hall, Stanford’s psychology building.
"""
exemplar_2_assistant = """
To verify it:
1. I googled: Where was Stanford Prison Experiment was conducted?
"""
# Exemplar 3 user (pretending we already have a query from earlier, just as an example)
exemplar_3_user = """
We have already generated these queries in previous steps (if any):
1. I googled: What does Havel-Hakimi algorithm do?
Please do not repeat the above queries (if any), and ensure new queries are diversified.
You said: The Havel-Hakimi algorithm is an algorithm for converting the adjacency matrix of a graph into its adjacency list. It is named after Vaclav Havel and Samih Hakimi.
"""
exemplar_3_assistant = """
To verify it:
1. I googled: Who is the Havel-Hakimi algorithm named after?
"""
# Exemplar 4 user (pretending we already have a query about Havel-Hakimi)
exemplar_4_user = """
We have already generated these queries in previous steps (if any):
1. I googled: Who sings the song "Time of My Life"?
2. I googled: Which film is the song "Time of My Life" from?
Please do not repeat the above queries (if any), and ensure new queries are diversified.
You said: "Time of My Life" is a song by American singer-songwriter Bill Medley from the soundtrack of the 1987 film Dirty Dancing. The song was produced by Michael Lloyd.
"""
exemplar_4_assistant = """
To verify it:
1. I googled: Who produced the song "Time of My Life"
"""
# Exemplar 5 user (pretending we already have a query about “Time of My Life”)
exemplar_5_user = """
We have already generated these queries in previous steps (if any):
Please do not repeat the above queries (if any), and ensure new queries are diversified.
You said: Kelvin Hopins was suspended from the Labor Party due to his membership in the Conservative Party.
"""
exemplar_5_assistant = """
To verify it:
1. I googled: Why was Kelvin Hopins suspended from Labor Party?
"""
# Exemplar 6 user (pretending we already have a query about Kelvin Hopins)
exemplar_6_user = """
We have already generated these queries in previous steps (if any):
1. I googled: What philosophical tradition is social work based on?
Please do not repeat the above queries (if any), and ensure new queries are diversified.
You said: Social work is a profession that is based in the philosophical tradition of humanism. It is an intellectual discipline that has its roots in the 1800s.
"""
exemplar_6_assistant = """
To verify it:
1. I googled: What year does social work have its root in?
2. I googled: Is social work a profession?
"""
# ---------- Final user prompt template ----------
# This is the actual prompt you'll fill with {existing_queries} and {claim} at run time
prompt_template_user = """
We have already generated these queries in previous steps (if any):
{existing_queries}
Please do not repeat the above queries (if any), and ensure new queries are diversified.
You said: {claim}
"""
# ========================
# Dynamic content
# ========================
dynamic = ["prompt_template_user"]
# ========================
# Message structure
# ========================
[message_structure]
messages = [
{ "role" = "system", "content" = "instruction_system" },
# All exemplars, each with same format
{ "role" = "user", "content" = "exemplar_1_user" },
{ "role" = "assistant", "content" = "exemplar_1_assistant" },
{ "role" = "user", "content" = "exemplar_2_user" },
{ "role" = "assistant", "content" = "exemplar_2_assistant" },
{ "role" = "user", "content" = "exemplar_3_user" },
{ "role" = "assistant", "content" = "exemplar_3_assistant" },
{ "role" = "user", "content" = "exemplar_4_user" },
{ "role" = "assistant", "content" = "exemplar_4_assistant" },
{ "role" = "user", "content" = "exemplar_5_user" },
{ "role" = "assistant", "content" = "exemplar_5_assistant" },
{ "role" = "user", "content" = "exemplar_6_user" },
{ "role" = "assistant", "content" = "exemplar_6_assistant" },
# Final user prompt (the actual query we want the LLM to answer)
{ "role" = "user", "content" = "prompt_template_user" }
]