Skip to content

Commit

Permalink
Sync with upstream
Browse files Browse the repository at this point in the history
  • Loading branch information
Крестников Константин Николаевич authored and Крестников Константин Николаевич committed Dec 4, 2023
2 parents d36e4f8 + 177463a commit 41fdd8f
Show file tree
Hide file tree
Showing 46 changed files with 2,397 additions and 1,370 deletions.
3 changes: 2 additions & 1 deletion .clabot
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
{
"contributors": ["eyurtsev", "hwchase17", "nfcampos", "efriis", "jacoblee93", "dqbd", "kreneskyp", "adarsh-jha-dev", "harris", "baskaryan", "hinthornw", "bracesproul", "jakerachleff", "craigsdennis", "anhi"],
"contributors": ["eyurtsev", "hwchase17", "nfcampos", "efriis", "jacoblee93", "dqbd", "kreneskyp", "adarsh-jha-dev", "harris", "baskaryan", "hinthornw", "bracesproul", "jakerachleff", "craigsdennis", "anhi", "169", "LarchLiu", "PaulLockett", "RCMatthias", "jwynia", "majiayu000", "mpskex", "shivachittamuru", "sinashaloudegi", "sowsan"
],
"message": "Thank you for your pull request and welcome to our community. We require contributors to sign our Contributor License Agreement, and we don't seem to have the username {{usersWithoutCLA}} on file. In order for us to review and merge your code, please complete the Individual Contributor License Agreement here https://forms.gle/Ljhqvt9Gdi1N385W6 .\n\nThis process is done manually on our side, so after signing the form one of the maintainers will add you to the contributors list.\n\nFor more details about why we have a CLA and other contribution guidelines please see: https://github.com/langchain-ai/langserve/blob/main/CONTRIBUTING.md."
}
2 changes: 1 addition & 1 deletion .github/workflows/_pydantic_compatibility.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ env:

jobs:
build:
timeout-minutes: 5
timeout-minutes: 10
defaults:
run:
working-directory: ${{ inputs.working-directory }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/langserve_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
working-directory: .
secrets: inherit
test:
timeout-minutes: 5
timeout-minutes: 10
runs-on: ubuntu-latest
defaults:
run:
Expand Down
12 changes: 12 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,18 @@ pip install "gigaserve[client]"

GigaServe работает как с runnable-интерфейсами(написанным с помощью constructed via [LangChain Expression Language](https://python.langchain.com/docs/expression_language/)), так и с классическими цепочками (посредством наследования от `Chain`). Но следует учиывать, что некоторые входные схемы для устаревших цепочек могут быть некорректными или неполными и могут вызывать ошибки. Это можно предотвратить, если обновить аттрибут `input_schema` таких цепочек в LangChain.

### Deploy to Azure

You can deploy to Azure using Azure Container Apps (Serverless):

```
az containerapp up --name [container-app-name] --source . --resource-group [resource-group-name] --environment [environment-name] --ingress external --target-port 8001 --env-vars=OPENAI_API_KEY=your_key
```

You can find more info [here](https://learn.microsoft.com/en-us/azure/container-apps/containerapp-up)

### Deploy to GCP

## Добавление аутентификации

О том как добавить аутентификацию на свой сервер GigaServe вы может узнать в разделах документации FastAPI, посвященных [безопасности](https://fastapi.tiangolo.com/tutorial/security/) и [использованию связующего ПО](https://fastapi.tiangolo.com/tutorial/middleware/).
Expand Down
23 changes: 22 additions & 1 deletion examples/configurable_chain/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,28 @@


###############################################################################
# EXAMPLE 2: Configure fields based on Request metadata #
# EXAMPLE 2: Configure prompt based on RunnableConfig #
###############################################################################
configurable_prompt = PromptTemplate.from_template(
"tell me a joke about {topic}."
).configurable_alternatives(
ConfigurableField(
id="prompt",
name="Prompt",
description="The prompt to use. Must contain {topic}.",
),
default_key="joke",
fact=PromptTemplate.from_template(
"tell me a fact about {topic} in {language} language."
),
)
prompt_chain = configurable_prompt | model | StrOutputParser()

add_routes(app, prompt_chain, path="/configurable_prompt")


###############################################################################
# EXAMPLE 3: Configure fields based on Request metadata #
###############################################################################


Expand Down
168 changes: 168 additions & 0 deletions examples/configurable_retrieval/client.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Client\n",
"\n",
"Demo of a client interacting with a configurable retriever (see server code)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"You can interact with this via API directly"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"{'output': [{'page_content': 'cats like fish',\n",
" 'metadata': {},\n",
" 'type': 'Document'},\n",
" {'page_content': 'dogs like sticks', 'metadata': {}, 'type': 'Document'}],\n",
" 'callback_events': [],\n",
" 'metadata': {'run_id': 'f375cdf6-2848-4976-9565-f69e175c24ce'}}"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import requests\n",
"\n",
"inputs = {\"input\": \"cat\"}\n",
"response = requests.post(\"http://localhost:8000/invoke\", json=inputs)\n",
"\n",
"response.json()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"You can also interact with this via the RemoteRunnable interface (to use in other chains)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"from langserve import RemoteRunnable\n",
"\n",
"remote_runnable = RemoteRunnable(\"http://localhost:8000/\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Remote runnable has the same interface as local runnables"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"[Document(page_content='cats like fish'),\n",
" Document(page_content='dogs like sticks')]"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"await remote_runnable.ainvoke(\"cat\")"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"[Document(page_content='cats like fish'),\n",
" Document(page_content='dogs like sticks')]"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"await remote_runnable.ainvoke(\"cat\", {\"configurable\": {\"collection_name\": \"Index 1\"}})"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"[Document(page_content='x_n+1=a * xn * (1-xn)')]"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"await remote_runnable.ainvoke(\"cat\", {\"configurable\": {\"collection_name\": \"Index 2\"}})"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
126 changes: 126 additions & 0 deletions examples/configurable_retrieval/server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
#!/usr/bin/env python
"""A more complex example that shows how to configure index name at run time."""
from typing import Any, Iterable, List, Optional, Type

from fastapi import FastAPI
from langchain.embeddings import OpenAIEmbeddings
from langchain.schema import Document
from langchain.schema.embeddings import Embeddings
from langchain.schema.retriever import BaseRetriever
from langchain.schema.runnable import (
ConfigurableFieldSingleOption,
RunnableConfig,
RunnableSerializable,
)
from langchain.schema.vectorstore import VST
from langchain.vectorstores import FAISS, VectorStore

from langserve import add_routes
from langserve.pydantic_v1 import BaseModel, Field

vectorstore1 = FAISS.from_texts(
["cats like fish", "dogs like sticks"], embedding=OpenAIEmbeddings()
)

vectorstore2 = FAISS.from_texts(["x_n+1=a * xn * (1-xn)"], embedding=OpenAIEmbeddings())


app = FastAPI(
title="LangChain Server",
version="1.0",
description="Spin up a simple api server using Langchain's Runnable interfaces",
)


class UnderlyingVectorStore(VectorStore):
"""This is a fake vectorstore for demo purposes."""

def __init__(self, collection_name: str) -> None:
"""Fake vectorstore that has a collection name."""
self.collection_name = collection_name

def as_retriever(self) -> BaseRetriever:
if self.collection_name == "index1":
return vectorstore1.as_retriever()
elif self.collection_name == "index2":
return vectorstore2.as_retriever()
else:
raise NotImplementedError(
f"No retriever for collection {self.collection_name}"
)

def add_texts(
self,
texts: Iterable[str],
metadatas: Optional[List[dict]] = None,
**kwargs: Any,
) -> List[str]:
raise NotImplementedError()

@classmethod
def from_texts(
cls: Type[VST],
texts: List[str],
embedding: Embeddings,
metadatas: Optional[List[dict]] = None,
**kwargs: Any,
) -> VST:
raise NotImplementedError()

def similarity_search(
self, embedding: List[float], k: int = 4, **kwargs: Any
) -> List[Document]:
raise NotImplementedError()


class ConfigurableRetriever(RunnableSerializable[str, List[Document]]):
"""Create a custom retriever that can be configured by the user.
This is an example of how to create a custom runnable that can be configured
to use a different collection name at run time.
Configuration involves instantiating a VectorStore with a collection name.
at run time, so the underlying vectorstore should be *cheap* to instantiate.
For example, it should not be making any network requests at instantiation time.
Make sure that the vectorstore you use meets this criteria.
"""

collection_name: str

def invoke(
self, input: str, config: Optional[RunnableConfig] = None
) -> List[Document]:
"""Invoke the retriever."""
vectorstore = UnderlyingVectorStore(self.collection_name)
retriever = vectorstore.as_retriever()
return retriever.invoke(input, config=config)


configurable_collection_name = ConfigurableRetriever(
collection_name="index1"
).configurable_fields(
collection_name=ConfigurableFieldSingleOption(
id="collection_name",
name="Collection Name",
description="The name of the collection to use for the retriever.",
options={
"Index 1": "index1",
"Index 2": "index2",
},
default="Index 1",
)
)


class Request(BaseModel):
__root__: str = Field(default="cat", description="Search query")


add_routes(app, configurable_collection_name.with_types(input_type=Request))

if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="localhost", port=8000)
8 changes: 5 additions & 3 deletions examples/conversational_retrieval_chain/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,12 +87,14 @@ class ChatHistory(BaseModel):

chat_history: List[Tuple[str, str]] = Field(
...,
extra={"widget": {"type": "chat", "input": "question", "output": "answer"}},
extra={"widget": {"type": "chat", "input": "question"}},
)
question: str


conversational_qa_chain = _inputs | _context | ANSWER_PROMPT | ChatOpenAI()
conversational_qa_chain = (
_inputs | _context | ANSWER_PROMPT | ChatOpenAI() | StrOutputParser()
)
chain = conversational_qa_chain.with_types(input_type=ChatHistory)

app = FastAPI(
Expand All @@ -104,7 +106,7 @@ class ChatHistory(BaseModel):
# /invoke
# /batch
# /stream
add_routes(app, chain)
add_routes(app, chain, enable_feedback_endpoint=True)

if __name__ == "__main__":
import uvicorn
Expand Down
Loading

0 comments on commit 41fdd8f

Please sign in to comment.