Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 1 addition & 67 deletions samples/csv-processor/entry-points.json
Original file line number Diff line number Diff line change
@@ -1,71 +1,5 @@
{
"$schema": "https://cloud.uipath.com/draft/2024-12/entry-point",
"$id": "entry-points.json",
"entryPoints": [
{
"filePath": "main",
"uniqueId": "99d18636-8167-4c38-b3de-448a2fad8032",
"type": "agent",
"input": {
"type": "object",
"properties": {
"attachment": {
"$ref": "#/definitions/job-attachment"
}
},
"required": [
"attachment"
],
"title": "Input",
"definitions": {
"job-attachment": {
"type": "object",
"required": [
"ID"
],
"x-uipath-resource-kind": "JobAttachment",
"properties": {
"ID": {
"type": "string"
},
"FullName": {
"type": "string"
},
"MimeType": {
"type": "string"
},
"Metadata": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
}
},
"output": {
"type": "object",
"properties": {
"ID": {
"format": "uuid",
"type": "string"
},
"FullName": {
"title": "Fullname",
"type": "string"
},
"MimeType": {
"title": "Mimetype",
"type": "string"
}
},
"required": [
"FullName",
"MimeType"
],
"title": "UiPathAttachment"
}
}
]
"entryPoints": []
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
{
"version": "1.0",
"id": "FileInputTestsLocalEval",
"name": "File Input Scenario Tests (Local Mode)",
"evaluatorRefs": [
"CSVShapeEvaluator",
"CSVColumnsEvaluator"
],
"evaluations": [
{
"id": "test-sales-data-csv",
"name": "Test Sales Data CSV Processing",
"description": "Test processing of sales_data.csv with 5 rows and 5 columns",
"inputs": {
"attachment": {
"ID": "11111111-1111-1111-1111-111111111111",
"FullName": "../../samples/csv-processor/test-data/sales_data.csv",
"MimeType": "text/csv"
}
},
"evaluationCriterias": {
"CSVShapeEvaluator": {
"expected_rows": 5,
"expected_columns": 5
},
"CSVColumnsEvaluator": {
"expected_columns": ["date", "product", "quantity", "price", "total"]
}
}
},
{
"id": "test-large-dataset-csv",
"name": "Test Large Dataset CSV Processing",
"description": "Test processing of large_dataset.csv with 5 rows and 13 columns",
"inputs": {
"attachment": {
"ID": "22222222-2222-2222-2222-222222222222",
"FullName": "../../samples/csv-processor/test-data/large_dataset.csv",
"MimeType": "text/csv"
}
},
"evaluationCriterias": {
"CSVShapeEvaluator": {
"expected_rows": 5,
"expected_columns": 13
},
"CSVColumnsEvaluator": {
"expected_columns": ["id", "name", "email", "department", "salary", "status"]
}
}
},
{
"id": "test-minimal-csv",
"name": "Test Minimal CSV Processing",
"description": "Test processing of minimal.csv with 1 row and 2 columns",
"inputs": {
"attachment": {
"ID": "33333333-3333-3333-3333-333333333333",
"FullName": "../../samples/csv-processor/test-data/minimal.csv",
"MimeType": "text/csv"
}
},
"evaluationCriterias": {
"CSVShapeEvaluator": {
"expected_rows": 1,
"expected_columns": 2
},
"CSVColumnsEvaluator": {
"expected_columns": ["id", "value"]
}
}
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"version": "1.0",
"id": "AttachmentCreatedEvaluator",
"description": "Checks if the agent successfully created an output attachment with processed CSV information",
"evaluatorSchema": "file://attachment_created_evaluator.py:AttachmentCreatedEvaluator",
"evaluatorConfig": {
"name": "AttachmentCreatedEvaluator",
"defaultEvaluationCriteria": {
"attachment_name": "processed_output.txt"
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
from uipath.eval.evaluators import (
BaseEvaluationCriteria,
BaseEvaluator,
BaseEvaluatorConfig,
)
from uipath.eval.models import AgentExecution, EvaluationResult, NumericEvaluationResult


class AttachmentCreatedEvaluationCriteria(BaseEvaluationCriteria):
"""Evaluation criteria for the attachment created evaluator."""

attachment_name: str = "processed_output.txt"


class AttachmentCreatedEvaluatorConfig(
BaseEvaluatorConfig[AttachmentCreatedEvaluationCriteria]
):
"""Configuration for the attachment created evaluator."""

name: str = "AttachmentCreatedEvaluator"
default_evaluation_criteria: AttachmentCreatedEvaluationCriteria = (
AttachmentCreatedEvaluationCriteria()
)


class AttachmentCreatedEvaluator(
BaseEvaluator[
AttachmentCreatedEvaluationCriteria, AttachmentCreatedEvaluatorConfig, None
]
):
"""A custom evaluator that checks if the agent successfully created an output attachment."""

@classmethod
def get_evaluator_id(cls) -> str:
return "AttachmentCreatedEvaluator"

async def evaluate(
self,
agent_execution: AgentExecution,
evaluation_criteria: AttachmentCreatedEvaluationCriteria,
) -> EvaluationResult:
# Check if the agent created an attachment by looking for:
# 1. Span with name containing "create_attachment"
# 2. Or output containing attachment ID/information

attachment_created = False

# Look for attachment creation in traces
for span in agent_execution.agent_trace:
# Check span name for attachment operations
if "attachment" in span.name.lower() or "create" in span.name.lower():
attachment_created = True
break

# Check span attributes for attachment information
if span.attributes:
for attr_key, attr_value in span.attributes.items():
if isinstance(attr_value, str):
if (
"attachment" in attr_key.lower()
or evaluation_criteria.attachment_name in attr_value
):
attachment_created = True
break

if attachment_created:
break

# Also check if output contains attachment information
if not attachment_created and agent_execution.agent_output:
output_str = str(agent_execution.agent_output)
if (
"attachment" in output_str.lower()
or evaluation_criteria.attachment_name in output_str
):
attachment_created = True

return NumericEvaluationResult(
score=float(attachment_created),
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"version": "1.0",
"id": "CSVColumnsEvaluator",
"description": "Checks if the expected CSV column names are correctly identified in the output",
"evaluatorSchema": "file://csv_columns_evaluator.py:CSVColumnsEvaluator",
"evaluatorConfig": {
"name": "CSVColumnsEvaluator",
"defaultEvaluationCriteria": {
"expected_columns": []
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
from typing import List

from uipath.eval.evaluators import (
BaseEvaluationCriteria,
BaseEvaluator,
BaseEvaluatorConfig,
)
from uipath.eval.models import AgentExecution, EvaluationResult, NumericEvaluationResult


class CSVColumnsEvaluationCriteria(BaseEvaluationCriteria):
"""Evaluation criteria for the CSV columns evaluator."""

expected_columns: List[str]


class CSVColumnsEvaluatorConfig(BaseEvaluatorConfig[CSVColumnsEvaluationCriteria]):
"""Configuration for the CSV columns evaluator."""

name: str = "CSVColumnsEvaluator"
default_evaluation_criteria: CSVColumnsEvaluationCriteria = (
CSVColumnsEvaluationCriteria(expected_columns=[])
)


class CSVColumnsEvaluator(
BaseEvaluator[CSVColumnsEvaluationCriteria, CSVColumnsEvaluatorConfig, None]
):
"""A custom evaluator that checks if the CSV column names are correctly identified."""

@classmethod
def get_evaluator_id(cls) -> str:
return "CSVColumnsEvaluator"

async def evaluate(
self,
agent_execution: AgentExecution,
evaluation_criteria: CSVColumnsEvaluationCriteria,
) -> EvaluationResult:
# Check if all expected columns are mentioned in the output
# The agent writes: f"CSV shape {df.shape}\n\nCSV columns {df.columns}"

columns_found = set()
total_columns = len(evaluation_criteria.expected_columns)

if total_columns == 0:
return NumericEvaluationResult(score=1.0)

# Look for column names in agent traces (where print output is captured)
for span in agent_execution.agent_trace:
# Check span attributes
if span.attributes:
for attr_value in span.attributes.values():
if isinstance(attr_value, str):
for column in evaluation_criteria.expected_columns:
if column in attr_value:
columns_found.add(column)

# Check span events (where stdout might be captured)
if hasattr(span, 'events') and span.events:
for event in span.events:
if hasattr(event, 'attributes') and event.attributes:
for attr_value in event.attributes.values():
if isinstance(attr_value, str):
for column in evaluation_criteria.expected_columns:
if column in attr_value:
columns_found.add(column)

# Also check in the output
if len(columns_found) < total_columns and agent_execution.agent_output:
output_str = str(agent_execution.agent_output)
for column in evaluation_criteria.expected_columns:
if column in output_str:
columns_found.add(column)

# Calculate score as ratio of found columns
score = len(columns_found) / total_columns

return NumericEvaluationResult(
score=score,
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"version": "1.0",
"id": "CSVShapeEvaluator",
"description": "Checks if the CSV shape information (rows, columns) is correct in the output",
"evaluatorSchema": "file://csv_shape_evaluator.py:CSVShapeEvaluator",
"evaluatorConfig": {
"name": "CSVShapeEvaluator",
"defaultEvaluationCriteria": {
"expected_rows": 1,
"expected_columns": 1
}
}
}
Loading