-
Notifications
You must be signed in to change notification settings - Fork 402
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #11 from decodingml/module-4
Module 4
- Loading branch information
Showing
20 changed files
with
1,160 additions
and
675 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -159,4 +159,7 @@ cython_debug/ | |
.vscode | ||
|
||
# MacOS | ||
.DS_Store | ||
.DS_Store | ||
|
||
# Ruff | ||
.ruff_cache |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
from langchain.chains.llm import LLMChain | ||
from langchain.prompts import PromptTemplate | ||
|
||
|
||
class GeneralChain: | ||
@staticmethod | ||
def get_chain(llm, template: PromptTemplate, output_key: str, verbose=True): | ||
return LLMChain( | ||
llm=llm, prompt=template, output_key=output_key, verbose=verbose | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,64 @@ | ||
from abc import ABC, abstractmethod | ||
|
||
from langchain.prompts import PromptTemplate | ||
from pydantic import BaseModel | ||
|
||
|
||
class BasePromptTemplate(ABC, BaseModel): | ||
@abstractmethod | ||
def create_template(self) -> PromptTemplate: | ||
pass | ||
|
||
|
||
class QueryExpansionTemplate(BasePromptTemplate): | ||
prompt: str = """You are an AI language model assistant. Your task is to generate Five | ||
different versions of the given user question to retrieve relevant documents from a vector | ||
database. By generating multiple perspectives on the user question, your goal is to help | ||
the user overcome some of the limitations of the distance-based similarity search. | ||
Provide these alternative questions seperated by newlines. | ||
Original question: {question}""" | ||
|
||
question: str | ||
|
||
def create_template(self) -> PromptTemplate: | ||
template = PromptTemplate( | ||
template=self.prompt, input_variables=["question"], verbose=True | ||
) | ||
template.format(question=self.question) | ||
return template | ||
|
||
|
||
class SelfQueryTemplate(BasePromptTemplate): | ||
prompt: str = """You are an AI language model assistant. Your task is to extract information from a user question. | ||
The required information that needs to be extracted is the user id. | ||
Your response should consists of only the extracted id (e.g. 1345256), nothing else. | ||
User question: {question}""" | ||
|
||
question: str | ||
|
||
def create_template(self) -> PromptTemplate: | ||
template = PromptTemplate( | ||
template=self.prompt, input_variables=["question"], verbose=True | ||
) | ||
template.format(question=self.question) | ||
return template | ||
|
||
|
||
class RerankingTemplate(BasePromptTemplate): | ||
prompt: str = """You are an AI language model assistant. Your task is to rerank passages related to a query | ||
based on their relevance. The most relevant passages should be put at the beginning and at the end. | ||
You should only pick at max 5 passages. | ||
The following are passages related to this query: {question}. | ||
Passages: {passages} | ||
""" | ||
|
||
question: str | ||
|
||
passages: str | ||
|
||
def create_template(self) -> PromptTemplate: | ||
template = PromptTemplate( | ||
template=self.prompt, input_variables=["question", "passages"], verbose=True | ||
) | ||
template.format(question=self.question, passages=self.passages) | ||
return template |
Oops, something went wrong.