Skip to content

Commit

Permalink
Change model_name_or_path and model_name params to model in exa…
Browse files Browse the repository at this point in the history
…mple code blocks (#134)
  • Loading branch information
bilgeyucel authored Jan 18, 2024
1 parent 0d8bb9a commit 61886f4
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 9 deletions.
6 changes: 3 additions & 3 deletions integrations/elasticsearch-document-store.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ from haystack.components.writers import DocumentWriter
document_store = ElasticsearchDocumentStore(hosts = "http://localhost:9200")
converter = TextFileToDocument()
splitter = DocumentSplitter()
doc_embedder = SentenceTransformersDocumentEmbedder(model_name_or_path="sentence-transformers/multi-qa-mpnet-base-dot-v1")
doc_embedder = SentenceTransformersDocumentEmbedder(model="sentence-transformers/multi-qa-mpnet-base-dot-v1")
writer = DocumentWriter(document_store)

indexing_pipeline = Pipeline()
Expand Down Expand Up @@ -96,13 +96,13 @@ from haystack.pipeline import Pipeline
from haystack.components.embedders import SentenceTransformersTextEmbedder
from elasticsearch_haystack.embedding_retriever import ElasticsearchEmbeddingRetriever

model_name_or_path = "sentence-transformers/multi-qa-mpnet-base-dot-v1"
model = "sentence-transformers/multi-qa-mpnet-base-dot-v1"

document_store = ElasticsearchDocumentStore(hosts = "http://localhost:9200")


retriever = ElasticsearchEmbeddingRetriever(document_store=document_store)
text_embedder = SentenceTransformersTextEmbedder(model_name_or_path=model_name_or_path)
text_embedder = SentenceTransformersTextEmbedder(model=model)

query_pipeline = Pipeline()
query_pipeline.add_component("text_embedder", text_embedder)
Expand Down
6 changes: 3 additions & 3 deletions integrations/huggingface.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ documents = [Document(content="My name is Wolfgang and I live in Berlin"),
Document(content="Germany has many big cities")]

indexing_pipeline = Pipeline()
indexing_pipeline.add_component("embedder", SentenceTransformersDocumentEmbedder(model_name_or_path="sentence-transformers/all-MiniLM-L6-v2"))
indexing_pipeline.add_component("embedder", SentenceTransformersDocumentEmbedder(model="sentence-transformers/all-MiniLM-L6-v2"))
indexing_pipeline.add_component("writer", DocumentWriter(document_store=document_store))
indexing_pipeline.connect("embedder", "writer")
indexing_pipeline.run({
Expand Down Expand Up @@ -128,7 +128,7 @@ document_store = InMemoryDocumentStore()
document_store.write_documents(docs)

retriever = InMemoryBM25Retriever(document_store = document_store)
ranker = TransformersSimilarityRanker(model_name_or_path="cross-encoder/ms-marco-MiniLM-L-6-v2")
ranker = TransformersSimilarityRanker(model="cross-encoder/ms-marco-MiniLM-L-6-v2")

document_ranker_pipeline = Pipeline()
document_ranker_pipeline.add_component(instance=retriever, name="retriever")
Expand Down Expand Up @@ -160,7 +160,7 @@ document_store = InMemoryDocumentStore()
document_store.write_documents(docs)

retriever = InMemoryBM25Retriever(document_store = document_store)
reader = ExtractiveReader(model_name_or_path="deepset/roberta-base-squad2-distilled")
reader = ExtractiveReader(model="deepset/roberta-base-squad2-distilled")

extractive_qa_pipeline = Pipeline()
extractive_qa_pipeline.add_component(instance=retriever, name="retriever")
Expand Down
4 changes: 2 additions & 2 deletions integrations/openai.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ documents = [Document(content="My name is Wolfgang and I live in Berlin"),
Document(content="Germany has many big cities")]

indexing_pipeline = Pipeline()
indexing_pipeline.add_component("embedder", OpenAIDocumentEmbedder(api_key="OPENAI_API_KEY", model_name="text-embedding-ada-002"))
indexing_pipeline.add_component("embedder", OpenAIDocumentEmbedder(api_key="OPENAI_API_KEY", model="text-embedding-ada-002"))
indexing_pipeline.add_component("writer", DocumentWriter(document_store=document_store))
indexing_pipeline.connect("embedder", "writer")

Expand Down Expand Up @@ -128,7 +128,7 @@ from haystack.document_stores import InMemoryDocumentStore

document_store = InMemoryDocumentStore()
pipeline = Pipeline()
pipeline.add_component(instance=LocalWhisperTranscriber(model_name_or_path="small"), name="transcriber")
pipeline.add_component(instance=LocalWhisperTranscriber(model="small"), name="transcriber")
pipeline.add_component(instance=DocumentCleaner(), name="cleaner")
pipeline.add_component(instance=DocumentSplitter(), name="splitter")
pipeline.add_component(instance=DocumentWriter(document_store=document_store), name="writer")
Expand Down
2 changes: 1 addition & 1 deletion integrations/pinecone-document-store.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ query_pipeline = Pipeline()
query_pipeline.add_component("text_embedder", SentenceTransformersTextEmbedder())
query_pipeline.add_component("retriever", PineconeDenseRetriever(document_store=document_store))
query_pipeline.add_component("prompt_builder", PromptBuilder(template=prompt_template))
query_pipeline.add_component("generator", OpenAIGenerator(api_key=YOUR_OPENAI_KEY, model_name="gpt-4"))
query_pipeline.add_component("generator", OpenAIGenerator(api_key=YOUR_OPENAI_KEY, model="gpt-4"))
query_pipeline.connect("text_embedder.embedding", "retriever.query_embedding")
query_pipeline.connect("retriever.documents", "prompt_builder.documents")
query_pipeline.connect("prompt_builder", "generator")
Expand Down

0 comments on commit 61886f4

Please sign in to comment.