diff --git a/docs/book/examples/tutorial_tracing.md b/docs/book/examples/tutorial_tracing.md
index f71c8b4cb1..0db9ba707c 100644
--- a/docs/book/examples/tutorial_tracing.md
+++ b/docs/book/examples/tutorial_tracing.md
@@ -73,12 +73,12 @@ It is recommended to pass the key as an environment variable. [See Open AI docs]
Set up configuration details:
* The `address` is the destination backend to store collected traces. In this case, it is Evidently Cloud.
-* `Team_id` is the identification of the Evidently Team which defines who can access collected traces and helps organize your Projects. Go to the [Team page](https://app.evidently.cloud/teams), enter the selected Team and copy its ID.
+* `Project_id` is the identification of the Evidently Project. Go to the [Projects page](https://app.evidently.cloud/), enter the selected Project and copy its ID.
* `Dataset_name` helps identify the resulting Tracing dataset. All data with the same ID would be grouped into single dataset.
```python
address="https://app.evidently.cloud/"
-team_id="YOUR_TEAM_ID"
+project_id="YOUR_PROJECT_ID"
dataset_name="YOUR_TRACING_DATASET_NAME"
```
@@ -87,7 +87,7 @@ Initialize tracing:
init_tracing(
address=address,
api_key=my_token,
- team_id=team_id,
+ project_id=project_id,
export_name=dataset_name
)
```
@@ -196,12 +196,8 @@ Preview the dataset with `traced_data.head()`.
You can run evaluations on this dataset using the Evidently Python library. You can generate the Reports to view them locally or send them to Evidently Cloud.
For example, let’s evaluate the length and sentiment of the responses, and whether they include the word "Certainly".
-Create an evaluation Project in Evidently Cloud. This will allow you to organize your evaluations and track results over time.
-
```python
-project = ws.create_project("Trace Evals", team_id=team_id)
-project.description = "Evaluation example"
-project.save()
+project = ws.get_project(project_id)
```
Define the evaluations:
diff --git a/docs/book/get-started/cloud_quickstart_tracing.md b/docs/book/get-started/cloud_quickstart_tracing.md
index 88a080dd87..a3c8fbb086 100644
--- a/docs/book/get-started/cloud_quickstart_tracing.md
+++ b/docs/book/get-started/cloud_quickstart_tracing.md
@@ -13,7 +13,7 @@ Need help? Ask on [Discord](https://discord.com/invite/xZjKRaNp8b).
Set up your Evidently Cloud workspace:
* **Sign up**. If you do not have one yet, sign up for a free [Evidently Cloud account](https://app.evidently.cloud/signup).
* **Create an Organization**. When you log in the first time, create and name your Organization.
-* **Create a Team**. Click **Teams** in the left menu. Create a Team, copy and save the Team ID. ([Team page](https://app.evidently.cloud/teams)).
+* **Create a Project**. Click **+** button under Project List. Create a Project, copy and save the Project ID. ([Projects page](https://app.evidently.cloud/))
* **Get your API token**. Click the **Key** icon in the left menu. Generate and save the token. ([Token page](https://app.evidently.cloud/token)).
You can now go to your Python environment.
@@ -62,7 +62,7 @@ Set up tracing parameters. Copy the Team ID from the [Teams page](https://app.ev
init_tracing(
address="https://app.evidently.cloud/",
api_key="EVIDENTLY_API_KEY",
- team_id="YOUR_TEAM_ID",
+ project_id="YOUR_PROJECT_ID"
export_name="LLM tracing example"
)
```
diff --git a/docs/book/tracing/set_up_tracing.md b/docs/book/tracing/set_up_tracing.md
index 899a409000..918459ffa5 100644
--- a/docs/book/tracing/set_up_tracing.md
+++ b/docs/book/tracing/set_up_tracing.md
@@ -27,7 +27,7 @@ Use `init_tracing` to enable tracely tracing. Example:
init_tracing(
address="https://app.evidently.cloud/",
api_key=”YOUR_EVIDENTLY_TOKEN”,
- team_id="YOUR_EVIDENTLY_TEAM_ID",
+ project_id="YOUR_PROJECT_ID",
export_name="YOUR_TRACING_DATASET_NAME",
)
```
@@ -39,7 +39,7 @@ init_tracing(
| `address: Optional[str]` | The URL of the collector service where tracing data will be sent. For Evidently Cloud, set `https://app.evidently.cloud/`.
**Required:** No, **Default:** `None` |
| `exporter_type: Optional[str]` | Specifies the type of exporter to use for tracing. Options are `grpc` for gRPC protocol or `http` for HTTP protocol.
**Required:** No, **Default:** `None` |
| `api_key: Optional[str]` | The authorization API key for Evidently Cloud tracing. This key authenticates your requests and is necessary for sending data to Evidently Cloud.
**Required:** No, **Default:** `None` |
-| `team_id: Optional[str]` | The ID of your team in Evidently Cloud. This is used to organize and group tracing data under your specific team in the Evidently Cloud UI.
**Required:** No, **Default:** `None` |
+| `project_id: str` | The ID of your Project in Evidently Cloud.
**Required:** Yes, **Default:** `None` |
| `export_name: Optional[str]` | A string name assigned to the exported tracing data. All data with the same `export_name` will be grouped into a single dataset.
**Required:** No, **Default:** `None` |
| `as_global: bool = True` | Indicates whether to register the tracing provider globally for OpenTelemetry (`opentelemetry.trace.TracerProvider`) or use it locally within a scope. **Default:** `True` |
diff --git a/examples/sample_notebooks/llm_tracing_tutorial.ipynb b/examples/sample_notebooks/llm_tracing_tutorial.ipynb
index f46fb9678e..662538ac15 100644
--- a/examples/sample_notebooks/llm_tracing_tutorial.ipynb
+++ b/examples/sample_notebooks/llm_tracing_tutorial.ipynb
@@ -37,8 +37,8 @@
"Requirement already satisfied: pydantic>=1.10.13 in /usr/local/lib/python3.10/dist-packages (from evidently) (2.8.2)\n",
"Collecting litestar>=2.8.3 (from evidently)\n",
" Downloading litestar-2.10.0-py3-none-any.whl.metadata (103 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m103.1/103.1 kB\u001b[0m \u001b[31m920.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hCollecting typing-inspect>=0.9.0 (from evidently)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m103.1/103.1 kB\u001B[0m \u001B[31m920.5 kB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hCollecting typing-inspect>=0.9.0 (from evidently)\n",
" Downloading typing_inspect-0.9.0-py3-none-any.whl.metadata (1.5 kB)\n",
"Collecting uvicorn>=0.22.0 (from uvicorn[standard]>=0.22.0->evidently)\n",
" Downloading uvicorn-0.30.5-py3-none-any.whl.metadata (6.6 kB)\n",
@@ -135,59 +135,59 @@
"Collecting faker (from polyfactory>=2.6.3->litestar>=2.8.3->evidently)\n",
" Downloading Faker-26.2.0-py3-none-any.whl.metadata (15 kB)\n",
"Downloading evidently-0.4.33-py3-none-any.whl (3.4 MB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.4/3.4 MB\u001b[0m \u001b[31m4.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading dynaconf-3.2.6-py2.py3-none-any.whl (231 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m231.1/231.1 kB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading iterative_telemetry-0.0.8-py3-none-any.whl (10 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m3.4/3.4 MB\u001B[0m \u001B[31m4.8 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading dynaconf-3.2.6-py2.py3-none-any.whl (231 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m231.1/231.1 kB\u001B[0m \u001B[31m7.9 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading iterative_telemetry-0.0.8-py3-none-any.whl (10 kB)\n",
"Downloading litestar-2.10.0-py3-none-any.whl (531 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m531.5/531.5 kB\u001b[0m \u001b[31m6.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading opentelemetry_api-1.26.0-py3-none-any.whl (61 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m61.5/61.5 kB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading opentelemetry_exporter_otlp_proto_grpc-1.26.0-py3-none-any.whl (18 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m531.5/531.5 kB\u001B[0m \u001B[31m6.4 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading opentelemetry_api-1.26.0-py3-none-any.whl (61 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m61.5/61.5 kB\u001B[0m \u001B[31m3.8 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading opentelemetry_exporter_otlp_proto_grpc-1.26.0-py3-none-any.whl (18 kB)\n",
"Downloading opentelemetry_proto-1.26.0-py3-none-any.whl (52 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m52.5/52.5 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading opentelemetry_exporter_otlp_proto_common-1.26.0-py3-none-any.whl (17 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m52.5/52.5 kB\u001B[0m \u001B[31m2.8 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading opentelemetry_exporter_otlp_proto_common-1.26.0-py3-none-any.whl (17 kB)\n",
"Downloading opentelemetry_exporter_otlp_proto_http-1.26.0-py3-none-any.whl (16 kB)\n",
"Downloading opentelemetry_sdk-1.26.0-py3-none-any.whl (109 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m109.5/109.5 kB\u001b[0m \u001b[31m5.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl (138 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m138.0/138.0 kB\u001b[0m \u001b[31m6.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading requests-2.32.3-py3-none-any.whl (64 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.9/64.9 kB\u001b[0m \u001b[31m4.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m109.5/109.5 kB\u001B[0m \u001B[31m5.5 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl (138 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m138.0/138.0 kB\u001B[0m \u001B[31m6.3 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading requests-2.32.3-py3-none-any.whl (64 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m64.9/64.9 kB\u001B[0m \u001B[31m4.5 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)\n",
"Downloading ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (53 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m53.6/53.6 kB\u001b[0m \u001b[31m3.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading uvicorn-0.30.5-py3-none-any.whl (62 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.8/62.8 kB\u001b[0m \u001b[31m3.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl (83 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m83.0/83.0 kB\u001b[0m \u001b[31m4.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading Deprecated-1.2.14-py2.py3-none-any.whl (9.6 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m53.6/53.6 kB\u001B[0m \u001B[31m3.6 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading uvicorn-0.30.5-py3-none-any.whl (62 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m62.8/62.8 kB\u001B[0m \u001B[31m3.9 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl (83 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m83.0/83.0 kB\u001B[0m \u001B[31m4.2 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading Deprecated-1.2.14-py2.py3-none-any.whl (9.6 kB)\n",
"Downloading h11-0.14.0-py3-none-any.whl (58 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m3.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (341 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m341.4/341.4 kB\u001b[0m \u001b[31m6.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading httpx-0.27.0-py3-none-any.whl (75 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m75.6/75.6 kB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading httpcore-1.0.5-py3-none-any.whl (77 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m77.9/77.9 kB\u001b[0m \u001b[31m4.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading importlib_metadata-8.0.0-py3-none-any.whl (24 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m58.3/58.3 kB\u001B[0m \u001B[31m3.6 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (341 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m341.4/341.4 kB\u001B[0m \u001B[31m6.3 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading httpx-0.27.0-py3-none-any.whl (75 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m75.6/75.6 kB\u001B[0m \u001B[31m4.1 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading httpcore-1.0.5-py3-none-any.whl (77 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m77.9/77.9 kB\u001B[0m \u001B[31m4.3 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading importlib_metadata-8.0.0-py3-none-any.whl (24 kB)\n",
"Downloading msgspec-0.18.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (210 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m210.3/210.3 kB\u001b[0m \u001b[31m6.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m210.3/210.3 kB\u001B[0m \u001B[31m6.9 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n",
"Downloading polyfactory-2.16.2-py3-none-any.whl (58 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m3.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading python_dotenv-1.0.1-py3-none-any.whl (19 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m58.3/58.3 kB\u001B[0m \u001B[31m3.3 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading python_dotenv-1.0.1-py3-none-any.whl (19 kB)\n",
"Downloading uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.4 MB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.4/3.4 MB\u001b[0m \u001b[31m9.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading watchfiles-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (427 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m427.7/427.7 kB\u001b[0m \u001b[31m9.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (130 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m130.2/130.2 kB\u001b[0m \u001b[31m8.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m3.4/3.4 MB\u001B[0m \u001B[31m9.7 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading watchfiles-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (427 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m427.7/427.7 kB\u001B[0m \u001B[31m9.6 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (130 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m130.2/130.2 kB\u001B[0m \u001B[31m8.7 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)\n",
"Downloading rich_click-1.8.3-py3-none-any.whl (35 kB)\n",
"Downloading Faker-26.2.0-py3-none-any.whl (1.8 MB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.8/1.8 MB\u001b[0m \u001b[31m11.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hInstalling collected packages: appdirs, websockets, watchdog, uvloop, ujson, requests, python-dotenv, opentelemetry-proto, mypy-extensions, msgspec, importlib-metadata, httptools, h11, dynaconf, deprecated, watchfiles, uvicorn, typing-inspect, opentelemetry-exporter-otlp-proto-common, opentelemetry-api, iterative-telemetry, httpcore, faker, rich-click, polyfactory, opentelemetry-semantic-conventions, httpx, opentelemetry-sdk, litestar, opentelemetry-exporter-otlp-proto-http, opentelemetry-exporter-otlp-proto-grpc, evidently\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m1.8/1.8 MB\u001B[0m \u001B[31m11.9 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hInstalling collected packages: appdirs, websockets, watchdog, uvloop, ujson, requests, python-dotenv, opentelemetry-proto, mypy-extensions, msgspec, importlib-metadata, httptools, h11, dynaconf, deprecated, watchfiles, uvicorn, typing-inspect, opentelemetry-exporter-otlp-proto-common, opentelemetry-api, iterative-telemetry, httpcore, faker, rich-click, polyfactory, opentelemetry-semantic-conventions, httpx, opentelemetry-sdk, litestar, opentelemetry-exporter-otlp-proto-http, opentelemetry-exporter-otlp-proto-grpc, evidently\n",
" Attempting uninstall: requests\n",
" Found existing installation: requests 2.31.0\n",
" Uninstalling requests-2.31.0:\n",
@@ -196,9 +196,9 @@
" Found existing installation: importlib_metadata 8.2.0\n",
" Uninstalling importlib_metadata-8.2.0:\n",
" Successfully uninstalled importlib_metadata-8.2.0\n",
- "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
- "google-colab 1.0.0 requires requests==2.31.0, but you have requests 2.32.3 which is incompatible.\u001b[0m\u001b[31m\n",
- "\u001b[0mSuccessfully installed appdirs-1.4.4 deprecated-1.2.14 dynaconf-3.2.6 evidently-0.4.33 faker-26.2.0 h11-0.14.0 httpcore-1.0.5 httptools-0.6.1 httpx-0.27.0 importlib-metadata-8.0.0 iterative-telemetry-0.0.8 litestar-2.10.0 msgspec-0.18.6 mypy-extensions-1.0.0 opentelemetry-api-1.26.0 opentelemetry-exporter-otlp-proto-common-1.26.0 opentelemetry-exporter-otlp-proto-grpc-1.26.0 opentelemetry-exporter-otlp-proto-http-1.26.0 opentelemetry-proto-1.26.0 opentelemetry-sdk-1.26.0 opentelemetry-semantic-conventions-0.47b0 polyfactory-2.16.2 python-dotenv-1.0.1 requests-2.32.3 rich-click-1.8.3 typing-inspect-0.9.0 ujson-5.10.0 uvicorn-0.30.5 uvloop-0.19.0 watchdog-4.0.1 watchfiles-0.23.0 websockets-12.0\n"
+ "\u001B[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
+ "google-colab 1.0.0 requires requests==2.31.0, but you have requests 2.32.3 which is incompatible.\u001B[0m\u001B[31m\n",
+ "\u001B[0mSuccessfully installed appdirs-1.4.4 deprecated-1.2.14 dynaconf-3.2.6 evidently-0.4.33 faker-26.2.0 h11-0.14.0 httpcore-1.0.5 httptools-0.6.1 httpx-0.27.0 importlib-metadata-8.0.0 iterative-telemetry-0.0.8 litestar-2.10.0 msgspec-0.18.6 mypy-extensions-1.0.0 opentelemetry-api-1.26.0 opentelemetry-exporter-otlp-proto-common-1.26.0 opentelemetry-exporter-otlp-proto-grpc-1.26.0 opentelemetry-exporter-otlp-proto-http-1.26.0 opentelemetry-proto-1.26.0 opentelemetry-sdk-1.26.0 opentelemetry-semantic-conventions-0.47b0 polyfactory-2.16.2 python-dotenv-1.0.1 requests-2.32.3 rich-click-1.8.3 typing-inspect-0.9.0 ujson-5.10.0 uvicorn-0.30.5 uvloop-0.19.0 watchdog-4.0.1 watchfiles-0.23.0 websockets-12.0\n"
]
}
],
@@ -288,10 +288,10 @@
"Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from pydantic<3,>=1.9.0->openai) (0.7.0)\n",
"Requirement already satisfied: pydantic-core==2.20.1 in /usr/local/lib/python3.10/dist-packages (from pydantic<3,>=1.9.0->openai) (2.20.1)\n",
"Downloading openai-1.40.0-py3-none-any.whl (360 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m360.4/360.4 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hDownloading jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (318 kB)\n",
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m318.9/318.9 kB\u001b[0m \u001b[31m18.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
- "\u001b[?25hInstalling collected packages: jiter, openai\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m360.4/360.4 kB\u001B[0m \u001B[31m2.8 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hDownloading jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (318 kB)\n",
+ "\u001B[2K \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m318.9/318.9 kB\u001B[0m \u001B[31m18.2 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
+ "\u001B[?25hInstalling collected packages: jiter, openai\n",
"Successfully installed jiter-0.5.0 openai-1.40.0\n"
]
}
@@ -367,10 +367,22 @@
"outputs": [],
"source": [
"address=\"https://app.evidently.cloud/\"\n",
- "team_id=\"YOUR_TEAM_ID\"\n",
"dataset_name=\"YOUR_TRACING_DATASET_NAME\""
]
},
+ {
+ "metadata": {},
+ "cell_type": "code",
+ "outputs": [],
+ "execution_count": null,
+ "source": [
+ "from evidently.ui.workspace.cloud import CloudWorkspace\n",
+ "ws = CloudWorkspace(token=my_token, url=address)\n",
+ "project = ws.create_project(\"My project title\", team_id=\"YOUR_TEAM_ID\")\n",
+ "project.description = \"My project description\"\n",
+ "project.save()"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {
@@ -385,7 +397,7 @@
" - **address:** address of collector service\n",
" - **exporter_type:** type of exporter to use \"grpc\" or \"http\"\n",
" - **api_key:** authorization API key for Evidently tracing\n",
- " - **team_id:** team ID in Evidently Cloud\n",
+ " - **project_id:** project ID in Evidently Cloud\n",
" * **export_name**: string name of exported data, all data with the same ID would be grouped into single dataset\n",
" - **as_global:** indicated when to register provider globally for opentelemetry or use local one"
]
@@ -423,7 +435,7 @@
"init_tracing(\n",
" address=address,\n",
" api_key=my_token,\n",
- " team_id=team_id,\n",
+ " project_id=project.id,\n",
" export_name=dataset_name\n",
" )"
]
diff --git a/src/evidently/ui/datasets.py b/src/evidently/ui/datasets.py
index 7945a3e570..70ead2b8d0 100644
--- a/src/evidently/ui/datasets.py
+++ b/src/evidently/ui/datasets.py
@@ -1,3 +1,4 @@
+from enum import Enum
from typing import Dict
from evidently import ColumnMapping
@@ -6,6 +7,16 @@
from evidently.utils.data_preprocessing import FeatureDefinition
+class DatasetSourceType(Enum):
+ # This values correspond dataset_sources table in db.
+ # Changing this class care about data consistency manually: add/remove from db
+ file = 1
+ tracing = 2
+ snapshot_builder = 3
+ dataset = 4
+ datagen = 5
+
+
def inject_feature_types_in_column_mapping(
column_mapping: ColumnMapping, features_metadata: Dict[str, FeatureDefinition]
) -> ColumnMapping:
diff --git a/src/evidently/ui/workspace/base.py b/src/evidently/ui/workspace/base.py
index 6a4b35e5fa..90d80658f3 100644
--- a/src/evidently/ui/workspace/base.py
+++ b/src/evidently/ui/workspace/base.py
@@ -11,6 +11,7 @@
from evidently.suite.base_suite import Snapshot
from evidently.test_suite import TestSuite
from evidently.ui.base import Project
+from evidently.ui.datasets import DatasetSourceType
from evidently.ui.datasets import get_dataset_name_output_current
from evidently.ui.datasets import get_dataset_name_output_reference
from evidently.ui.type_aliases import STR_UUID
@@ -49,12 +50,20 @@ def _add_report_base(self, project_id: STR_UUID, report: ReportBase, include_dat
if current is not None:
dataset_name_current = get_dataset_name_output_current(snapshot.is_report, snapshot.id, run_from)
snapshot.links.datasets.output.current = self.add_dataset(
- current, dataset_name_current, project_id, column_mapping=column_mapping
+ current,
+ dataset_name_current,
+ project_id,
+ column_mapping=column_mapping,
+ dataset_source=DatasetSourceType.snapshot_builder,
)
if reference is not None:
dataset_name_reference = get_dataset_name_output_reference(snapshot.is_report, snapshot.id, run_from)
snapshot.links.datasets.output.reference = self.add_dataset(
- reference, dataset_name_reference, project_id, column_mapping=column_mapping
+ reference,
+ dataset_name_reference,
+ project_id,
+ column_mapping=column_mapping,
+ dataset_source=DatasetSourceType.snapshot_builder,
)
self.add_snapshot(project_id, snapshot)
@@ -86,5 +95,6 @@ def add_dataset(
project_id: STR_UUID,
description: Optional[str] = None,
column_mapping: Optional[ColumnMapping] = None,
+ dataset_source: DatasetSourceType = DatasetSourceType.file,
) -> DatasetID:
raise NotImplementedError
diff --git a/src/evidently/ui/workspace/cloud.py b/src/evidently/ui/workspace/cloud.py
index 8147f41464..56915b3b76 100644
--- a/src/evidently/ui/workspace/cloud.py
+++ b/src/evidently/ui/workspace/cloud.py
@@ -26,6 +26,7 @@
from evidently.ui.base import Org
from evidently.ui.base import ProjectManager
from evidently.ui.base import Team
+from evidently.ui.datasets import DatasetSourceType
from evidently.ui.errors import OrgNotFound
from evidently.ui.errors import ProjectNotFound
from evidently.ui.errors import TeamNotFound
@@ -34,6 +35,7 @@
from evidently.ui.type_aliases import ZERO_UUID
from evidently.ui.type_aliases import DatasetID
from evidently.ui.type_aliases import OrgID
+from evidently.ui.type_aliases import ProjectID
from evidently.ui.type_aliases import TeamID
from evidently.ui.workspace.remote import NoopBlobStorage
from evidently.ui.workspace.remote import NoopDataStorage
@@ -186,10 +188,10 @@ def add_dataset(
self,
file: BinaryIO,
name: str,
- org_id: OrgID,
- team_id: TeamID,
+ project_id: ProjectID,
description: Optional[str],
column_mapping: Optional[ColumnMapping],
+ dataset_source: DatasetSourceType = DatasetSourceType.file,
) -> DatasetID:
cm_payload = json.dumps(dataclasses.asdict(column_mapping)) if column_mapping is not None else None
response: Response = self._request(
@@ -200,8 +202,9 @@ def add_dataset(
"description": description,
"file": file,
"column_mapping": cm_payload,
+ "source_type": dataset_source.value,
},
- query_params={"org_id": org_id, "team_id": team_id},
+ query_params={"project_id": project_id},
form_data=True,
)
return DatasetID(response.json()["dataset_id"])
@@ -267,10 +270,10 @@ def add_dataset(
project_id: STR_UUID,
description: Optional[str] = None,
column_mapping: Optional[ColumnMapping] = None,
+ dataset_source: DatasetSourceType = DatasetSourceType.file,
) -> DatasetID:
file: Union[NamedBytesIO, BinaryIO]
assert isinstance(self.project_manager.metadata, CloudMetadataStorage)
- org_id, team_id = self._get_org_id_team_id(project_id)
if isinstance(data_or_path, str):
file = open(data_or_path, "rb")
elif isinstance(data_or_path, pd.DataFrame):
@@ -279,8 +282,12 @@ def add_dataset(
file.seek(0)
else:
raise NotImplementedError(f"Add datasets is not implemented for {get_classpath(data_or_path.__class__)}")
+ if isinstance(project_id, str):
+ project_id = ProjectID(project_id)
try:
- return self.project_manager.metadata.add_dataset(file, name, org_id, team_id, description, column_mapping)
+ return self.project_manager.metadata.add_dataset(
+ file, name, project_id, description, column_mapping, dataset_source
+ )
finally:
file.close()
diff --git a/src/evidently/ui/workspace/view.py b/src/evidently/ui/workspace/view.py
index fcacb4c224..9ce975978b 100644
--- a/src/evidently/ui/workspace/view.py
+++ b/src/evidently/ui/workspace/view.py
@@ -9,6 +9,7 @@
from evidently.suite.base_suite import Snapshot
from evidently.ui.base import Project
from evidently.ui.base import ProjectManager
+from evidently.ui.datasets import DatasetSourceType
from evidently.ui.type_aliases import STR_UUID
from evidently.ui.type_aliases import ZERO_UUID
from evidently.ui.type_aliases import DatasetID
@@ -82,6 +83,7 @@ def add_dataset(
project_id: STR_UUID,
description: Optional[str] = None,
column_mapping: Optional[ColumnMapping] = None,
+ dataset_source: DatasetSourceType = DatasetSourceType.file,
) -> DatasetID:
raise NotImplementedError("Adding datasets is not supported yet")