diff --git a/README.md b/README.md index 5243418b..93bea4d8 100644 --- a/README.md +++ b/README.md @@ -40,8 +40,8 @@ We will be releasing a hosted version of LangServe for one-click deployments of Use the `LangChain` CLI to bootstrap a `LangServe` project quickly. -To use the langchain CLI make sure that you have a recent version of `langchain-cli` -installed. You can install it with `pip install -U "langchain-cli[serve]"`. +To use the langchain CLI make sure that you have a recent version of `gigachain-cli` +installed. You can install it with `pip install -U "gigachain-cli[serve]"`. ```sh langchain app new ../path/to/directory diff --git a/langserve/server.py b/langserve/server.py index ddc262ec..2ad06387 100644 --- a/langserve/server.py +++ b/langserve/server.py @@ -210,7 +210,7 @@ def _add_tracing_info_to_metadata(config: Dict[str, Any], request: Request) -> N info = { "__useragent": request.headers.get("user-agent"), - "__langserve_version": __version__, + "__gigaserve_version": __version__, } metadata.update(info) config["metadata"] = metadata diff --git a/pyproject.toml b/pyproject.toml index 9171bdc5..a8821264 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ description = "" readme = "README.md" authors = ["LangChain", "GigaChain"] license = "LangServe" -repository = "https://github.com/langchain-ai/langserve" +repository = "https://github.com/ai-forever/gigaserve" exclude = ["langserve/playground"] include = ["langserve/playground/dist/**/*"] packages = [ diff --git a/tests/unit_tests/test_server_client.py b/tests/unit_tests/test_server_client.py index 138d73a5..a025dba1 100644 --- a/tests/unit_tests/test_server_client.py +++ b/tests/unit_tests/test_server_client.py @@ -867,7 +867,7 @@ async def add_one(x: int) -> int: config_seen = server_runnable_spy.call_args[0][1] assert "metadata" in config_seen assert "__useragent" in config_seen["metadata"] - assert "__langserve_version" in config_seen["metadata"] + assert "__gigaserve_version" in config_seen["metadata"] server_runnable2_spy = mocker.spy(server_runnable2, "ainvoke") async with get_async_remote_runnable(app, path="/add_one_config") as runnable2: @@ -879,7 +879,7 @@ async def add_one(x: int) -> int: assert config_seen["tags"] == ["test"] assert config_seen["metadata"]["a"] == 5 assert "__useragent" in config_seen["metadata"] - assert "__langserve_version" in config_seen["metadata"] + assert "__gigaserve_version" in config_seen["metadata"] @pytest.mark.asyncio @@ -1027,7 +1027,7 @@ async def add_one(x: int) -> int: @pytest.mark.asyncio async def test_configurable_runnables(event_loop: AbstractEventLoop) -> None: - """Add tests for using langchain's configurable runnables""" + """Add tests for using gigachain's configurable runnables""" template = PromptTemplate.from_template("say {name}").configurable_fields( template=ConfigurableField( @@ -1178,7 +1178,7 @@ async def add_two(y: int) -> int: assert response.json() == {"title": "RunnableBindingOutput", "type": "number"} # Just verify that the schema is not empty (it's pretty long) - # and the actual value should be tested in LangChain + # and the actual value should be tested in GigaChain response = await async_client.get("/prompt_1/output_schema") assert response.json() != {} # Long string