Skip to content

Commit

Permalink
Add citations support
Browse files Browse the repository at this point in the history
  • Loading branch information
hex committed Nov 19, 2024
1 parent bfeb067 commit a935fd9
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 3 deletions.
23 changes: 21 additions & 2 deletions llm_perplexity.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

@llm.hookimpl
def register_models(register):
# https://docs.perplexity.ai/docs/model-cards
# https://docs.perplexity.ai/guides/model-cards
register(Perplexity("llama-3.1-sonar-small-128k-online"), aliases=("sonar-small",))
register(Perplexity("llama-3.1-sonar-large-128k-online"), aliases=("sonar-large",))
register(Perplexity("llama-3.1-sonar-huge-128k-online"), aliases=("sonar-huge",))
Expand Down Expand Up @@ -52,6 +52,11 @@ class PerplexityOptions(llm.Options):
default=None,
)

return_citations: Optional[bool] = Field(
description="Determines whether or not a request to an online model should return citations",
default=False,
)

@field_validator("temperature")
@classmethod
def validate_temperature(cls, temperature):
Expand Down Expand Up @@ -115,7 +120,7 @@ def execute(self, prompt, stream, response, conversation):
kwargs = {
"model": self.model_id,
"messages": self.build_messages(prompt, conversation),
"stream": prompt.options.stream,
"stream": stream,
"max_tokens": prompt.options.max_tokens or None,
}

Expand All @@ -126,14 +131,28 @@ def execute(self, prompt, stream, response, conversation):

if prompt.options.top_k:
kwargs["top_k"] = prompt.options.top_k

if prompt.options.return_citations:
kwargs["return_citations"] = prompt.options.return_citations

if stream:
with client.chat.completions.create(**kwargs) as stream:
for text in stream:
yield text.choices[0].delta.content

if hasattr(text, 'citations') and text.citations:
yield "\n\nCitations:\n"
for i, citation in enumerate(text.citations, 1):
yield f"[{i}] {citation}\n"

else:
completion = client.chat.completions.create(**kwargs)
yield completion.choices[0].message.content
if hasattr(completion, 'citations') and completion.citations:
yield "\n\nCitations:\n"
for i, citation in enumerate(completion.citations, 1):
yield f"[{i}] {citation}\n"


def __str__(self):
return f"Perplexity: {self.model_id}"
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "llm-perplexity"
version = "0.9"
version = "2024.11.0"
description = "LLM access to pplx-api 3 by Perplexity Labs"
readme = "README.md"
authors = [{name = "hex"}]
Expand Down

0 comments on commit a935fd9

Please sign in to comment.