repo_name
stringlengths
1
62
dataset
stringclasses
1 value
lang
stringclasses
11 values
pr_id
int64
1
20.1k
owner
stringlengths
2
34
reviewer
stringlengths
2
39
diff_hunk
stringlengths
15
262k
code_review_comment
stringlengths
1
99.6k
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -220,7 +245,7 @@ def create( Examples -------- from finto import ChatMessage, CreatePromptRequest_Chat - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse
logic: still using 'from finto import ChatMessage, CreatePromptRequest_Chat' while other imports were updated to langfuse.api.client
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -53,7 +53,7 @@ def create( Examples -------- from finto import CreateScoreRequest - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse
logic: still importing CreateScoreRequest from 'finto' instead of 'langfuse.api'
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -426,7 +426,7 @@ async def create( import asyncio from finto import CreateScoreRequest - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse
logic: still importing CreateScoreRequest from 'finto' instead of 'langfuse.api'
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -45,7 +48,7 @@ def create( Examples -------- from finto import ConfigCategory, CreateScoreConfigRequest, ScoreDataType - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse
logic: The import statement still references 'finto' package but the client is imported from 'langfuse.api.client'
langfuse-python
github_2023
others
1,011
langfuse
greptile-apps[bot]
@@ -37,20 +37,20 @@ pytest-asyncio = ">=0.21.1,<0.24.0" pytest-httpserver = "^1.0.8" boto3 = "^1.28.59" ruff = ">=0.1.8,<0.6.0" -langchain-mistralai = ">=0.0.1,<0.1.8" +langchain-mistralai = ">=0.0.1,<0.3" google-cloud-aiplatform = "^1.38.1" cohere = ">=4.46,<6.0" -langchain-google-vertexai = ">=0.0.5,<1.0.5" -lan...
logic: Major version bump from <1.0.5 to >=2.0.0 for langchain-google-vertexai could introduce breaking changes. Verify compatibility.
langfuse-python
github_2023
others
1,011
langfuse
greptile-apps[bot]
@@ -27,8 +30,8 @@ Create a comment. Comments may be attached to different object types (trace, obs <dd> ```python -from finto import CreateCommentRequest -from finto.client import FernLangfuse +from langfuse.api import CreateCommentRequest +from langfuse.api.client import FernLangfuse
logic: The import statement still uses 'from finto import CreateCommentRequest' while the FernLangfuse client import was updated. This should be updated to use langfuse.api for consistency.
langfuse-python
github_2023
others
1,011
langfuse
greptile-apps[bot]
@@ -296,8 +303,8 @@ Create a dataset item <dd> ```python -from finto import CreateDatasetItemRequest, DatasetStatus -from finto.client import FernLangfuse +from langfuse.api import CreateDatasetItemRequest, DatasetStatus +from langfuse.api.client import FernLangfuse
logic: The import statement still uses 'from finto import CreateDatasetItemRequest, DatasetStatus' while the FernLangfuse client import was updated. This should be updated to use langfuse.api for consistency.
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -11,7 +11,7 @@ class HealthResponse(pydantic_v1.BaseModel): """ Examples -------- - from finto import HealthResponse + from langfuse.api import HealthResponse
logic: import path should be updated from 'finto' to 'langfuse.api.client' to match other changes in the PR
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -13,7 +13,7 @@ class ScoreBody(pydantic_v1.BaseModel): """ Examples -------- - from finto import ScoreBody + from langfuse.api import ScoreBody
logic: import path in example still uses 'finto' instead of 'langfuse.api.client' which is inconsistent with the PR's goal of updating import paths
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -13,7 +13,7 @@ class CreateScoreRequest(pydantic_v1.BaseModel): """ Examples -------- - from finto import CreateScoreRequest + from langfuse.api import CreateScoreRequest
logic: import path in example still references 'finto' instead of 'langfuse.api.client' which is inconsistent with the PR's stated goal of updating import paths
langfuse-python
github_2023
others
1,011
langfuse
greptile-apps[bot]
@@ -1,5 +1,7 @@ # Reference + ## Comments + <details><summary><code>client.comments.<a href="src/finto/resources/comments/client.py">create</a>(...)</code></summary>
logic: The href still points to 'src/finto/resources/comments/client.py' but should be updated to reflect the new path structure
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -69,27 +72,41 @@ def create( ) """ _response = self._client_wrapper.httpx_client.request( - "api/public/dataset-items", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/dataset-items",
logic: API endpoint path missing leading forward slash which could cause request failures
langfuse-python
github_2023
others
1,011
langfuse
greptile-apps[bot]
@@ -37,20 +40,20 @@ pytest-asyncio = ">=0.21.1,<0.24.0" pytest-httpserver = "^1.0.8" boto3 = "^1.28.59" ruff = ">=0.1.8,<0.6.0" -langchain-mistralai = ">=0.0.1,<0.1.8" +langchain-mistralai = ">=0.0.1,<0.3" google-cloud-aiplatform = "^1.38.1" cohere = ">=4.46,<6.0" -langchain-google-vertexai = ">=0.0.5,<1.0.5" -lan...
logic: langchain-community package is not properly placed under any section. Should be under [tool.poetry.dependencies] or [tool.poetry.group.dev.dependencies].
langfuse-python
github_2023
others
1,011
langfuse
ellipsis-dev[bot]
@@ -19,6 +19,9 @@ packaging = ">=23.2,<25.0" idna = "^3.7" anyio = "^4.4.0" requests = "^2" +langchain-ollama = "^0.2.0"
Consider marking `langchain-ollama`, `langchain-cohere`, and `langchain-huggingface` as optional dependencies to avoid unnecessary installations for users who do not need these features.
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -68,24 +62,15 @@ "claude-3-sonnet-20240229", ChatAnthropic(model="claude-3-sonnet-20240229"), ), - ("anthropic", Anthropic()), - ("anthropic", Anthropic()), + ("claude-2", Anthropic()), + ("claude-2", Anthropic()),
logic: duplicate test case for Anthropic() with default settings
langfuse-python
github_2023
others
1,011
langfuse
greptile-apps[bot]
@@ -38,10 +38,10 @@ jobs: fail-fast: false matrix: python-version: - - "3.8" - "3.9" - "3.10" - "3.11" +
logic: Python 3.12 was removed from the test matrix. This seems inconsistent with the PR's goal of supporting Python versions '>=3.9,<4.0'. Consider keeping Python 3.12 in the test matrix to ensure compatibility.
langfuse-python
github_2023
python
1,011
langfuse
greptile-apps[bot]
@@ -46,4 +48,4 @@ def test_entire_llm_call_using_langchain_openai(expected_model, model): assert len(trace.observations) == 1 generation = list(filter(lambda o: o.type == "GENERATION", trace.observations))[0]
logic: filter operation could return empty list if no GENERATION observations exist, causing IndexError on next line
langfuse-python
github_2023
python
1,008
langfuse
greptile-apps[bot]
@@ -200,6 +200,7 @@ def _process_upload_media_job( headers={ "Content-Type": data["content_type"], "x-amz-checksum-sha256": data["content_sha256_hash"], + "x-ms-blob-type": "BlockBlob",
logic: AWS and Azure headers are mixed together. Should conditionally set headers based on the storage provider to avoid sending AWS headers to Azure and vice versa.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -1,56 +1,45 @@ -"""@private""" - -import atexit import json import logging -import queue import threading -from queue import Empty, Queue import time -from typing import List, Any, Optional import typing +from queue import Empty, Queue +from typing import Any, List, Optional -from langfuse.Sampler import Samp...
logic: _api_client is declared but never initialized or used
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,245 @@ +import base64 +import hashlib +import logging +from queue import Empty +from typing import Literal + +import requests + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.client import FernLangfuse +from langfuse.utils import _get_timestamp + +from .media_upload_que...
logic: Inconsistent handling of output field - processes single output directly but also processes it again in list handling below
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,245 @@ +import base64 +import hashlib +import logging +from queue import Empty +from typing import Literal + +import requests + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.client import FernLangfuse +from langfuse.utils import _get_timestamp + +from .media_upload_que...
logic: Missing validation that content is a list before iteration
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,245 @@ +import base64 +import hashlib +import logging +from queue import Empty +from typing import Literal + +import requests + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.client import FernLangfuse +from langfuse.utils import _get_timestamp + +from .media_upload_que...
logic: No timeout set for PUT request - could hang indefinitely on slow uploads
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,245 @@ +import base64 +import hashlib +import logging +from queue import Empty +from typing import Literal + +import requests + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.client import FernLangfuse +from langfuse.utils import _get_timestamp + +from .media_upload_que...
logic: Queue put() with block=True could deadlock if queue is full
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,41 @@ +import logging +import threading + +from .media_manager import MediaManager + + +class MediaUploadConsumer(threading.Thread): + _log = logging.getLogger(__name__) + _identifier: int + _max_retries: int + _media_manager: MediaManager
logic: max_retries is defined as a class attribute but never used in the implementation
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,41 @@ +import logging +import threading + +from .media_manager import MediaManager + + +class MediaUploadConsumer(threading.Thread): + _log = logging.getLogger(__name__) + _identifier: int + _max_retries: int + _media_manager: MediaManager + + def __init__( + self, + *, + ...
logic: no error handling around process_next_media_upload() - unhandled exceptions will crash the thread
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,195 @@ +"""@private""" + +import atexit +import logging +import queue +from queue import Queue +from typing import List, Optional + +from langfuse.api.client import FernLangfuse +from langfuse.request import LangfuseClient +from langfuse.types import MaskFunction +from langfuse.utils import _get_timestamp + ...
logic: Queue.put() with block=False can silently drop events. Consider using block=True with a timeout to ensure critical events aren't lost
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,195 @@ +"""@private""" + +import atexit +import logging +import queue +from queue import Queue +from typing import List, Optional + +from langfuse.api.client import FernLangfuse +from langfuse.request import LangfuseClient +from langfuse.types import MaskFunction +from langfuse.utils import _get_timestamp + ...
logic: qsize() followed by join() is not thread-safe - the logged size may be inaccurate by the time join() completes
langfuse-python
github_2023
others
996
langfuse
greptile-apps[bot]
@@ -27,16 +27,13 @@ client = FernLangfuse( password="YOUR_PASSWORD", base_url="https://yourhost.com/path/to/api", ) -client.dataset_items.create( - request=CreateDatasetItemRequest( - dataset_name="string", - input={"key": "value"}, - expected_output={"key": "value"}, - metadat...
logic: object_type should be one of the enum values from CommentObjectType (TRACE, OBSERVATION, SESSION, PROMPT). Add example using actual enum value.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,534 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ....
logic: Documentation indicates objectType is required when objectId is provided, but no validation is implemented
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,69 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class CreateCommentRequest(pydantic_v1.BaseModel): + ...
logic: object_type should use the CommentObjectType enum instead of str to ensure valid values
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,69 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class CreateCommentRequest(pydantic_v1.BaseModel): + ...
logic: content field needs max_length=500 validator to enforce character limit mentioned in docstring
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,29 @@ +# This file was auto-generated by Fern from our API Definition. + +import enum +import typing + +T_Result = typing.TypeVar("T_Result") + + +class CommentObjectType(str, enum.Enum): + TRACE = "TRACE" + OBSERVATION = "OBSERVATION" + SESSION = "SESSION" + PROMPT = "PROMPT" + + def visit( +...
logic: visit method lacks exhaustive pattern matching - could raise undefined behavior if new enum values are added. Consider adding an else clause that raises NotImplementedError
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,509 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ....
logic: Consider handling the case where response.json() fails but status code is not in the expected range - currently it will raise an ApiError with potentially invalid json
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,509 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ....
logic: API endpoints should start with a leading slash - change to '/api/public/media/'
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,72 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class GetMediaResponse(pydantic_v1.BaseModel): + med...
logic: url_expiry should be dt.datetime type since it represents a timestamp, not a string. This ensures proper date validation and handling.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,74 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class GetMediaUploadUrlRequest(pydantic_v1.BaseModel): ...
logic: field should be constrained to only allow 'input', 'output', or 'metadata' values using Literal type
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,74 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class GetMediaUploadUrlRequest(pydantic_v1.BaseModel): ...
logic: content_length should have a minimum value of 0 to prevent negative sizes
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,191 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from ...commons.type...
logic: Boolean score model has inconsistent value types - using both float and string_value for what should be a boolean value. This could cause data integrity issues.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -1,8 +1,7 @@ # This file was auto-generated by Fern from our API Definition. - -from langfuse.api.core.http_client import get_request_body -from langfuse.api.core.request_options import RequestOptions +from finto.core.http_client import get_request_body +from finto.core.request_options import RequestOptions
logic: Import paths changed from 'langfuse.api.core' to 'finto.core' - this seems incorrect as it's importing from a different package namespace. Should these still be using the langfuse namespace?
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -215,15 +213,31 @@ def _extract_chat_response(kwargs: any): "role": kwargs.get("role", None), } + audio_content = None + if kwargs.get("function_call") is not None: response.update({"function_call": kwargs["function_call"]}) if kwargs.get("tool_calls") is not None: re...
logic: Audio content extraction assumes audio object has __dict__ attribute - may fail for custom audio response objects
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -215,15 +213,31 @@ def _extract_chat_response(kwargs: any): "role": kwargs.get("role", None), } + audio_content = None + if kwargs.get("function_call") is not None: response.update({"function_call": kwargs["function_call"]}) if kwargs.get("tool_calls") is not None: re...
logic: Using 'or' operator for content fallback could cause issues if content is an empty string or false-y value
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -2211,3 +2210,36 @@ def _generate_random_dict(n: int, key_length: int = 8) -> Dict[str, Any]: print(f"Full execution took {duration_full}ms") assert duration_full > 1000, "Full execution should take longer than 1 second" + + +def test_multimodal(): + import base64 + + from langsmith.wrappers import...
logic: model='gpt-4o-mini' appears to be a typo - should likely be 'gpt-4-vision-preview'
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -2211,3 +2210,36 @@ def _generate_random_dict(n: int, key_length: int = 8) -> Dict[str, Any]: print(f"Full execution took {duration_full}ms") assert duration_full > 1000, "Full execution should take longer than 1 second" + + +def test_multimodal():
logic: test_multimodal() lacks assertions to verify the response and proper error handling for file operations
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -205,16 +205,16 @@ def _truncate_item_in_place( break # if item does not have body or input/output fields, drop the event - if "body" not in item or ( - "input" not in item["body"] and "output" not in item["body"] + if "body" not in event o...
logic: this check drops events that only have metadata, which may be valid events
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,196 @@ +"""@private""" + +import atexit +import logging +import queue +from queue import Queue +from typing import List, Optional + +from langfuse.api.client import FernLangfuse +from langfuse.request import LangfuseClient +from langfuse.types import MaskFunction +from langfuse.utils import _get_timestamp + ...
logic: init_resources() is called before atexit.register(), meaning cleanup could be registered before resources are fully initialized
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,196 @@ +"""@private""" + +import atexit +import logging +import queue +from queue import Queue +from typing import List, Optional + +from langfuse.api.client import FernLangfuse +from langfuse.request import LangfuseClient +from langfuse.types import MaskFunction +from langfuse.utils import _get_timestamp + ...
logic: No error handling if thread.start() fails - could leave system in inconsistent state
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,196 @@ +"""@private""" + +import atexit +import logging +import queue +from queue import Queue +from typing import List, Optional + +from langfuse.api.client import FernLangfuse +from langfuse.request import LangfuseClient +from langfuse.types import MaskFunction +from langfuse.utils import _get_timestamp + ...
logic: Race condition possible between pausing consumers and joining them - consumers could process new items after pause
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,196 @@ +"""@private""" + +import atexit +import logging +import queue +from queue import Queue +from typing import List, Optional + +from langfuse.api.client import FernLangfuse +from langfuse.request import LangfuseClient +from langfuse.types import MaskFunction +from langfuse.utils import _get_timestamp + ...
logic: flush() and join() could raise exceptions, leaving system in partially shutdown state
langfuse-python
github_2023
others
996
langfuse
greptile-apps[bot]
@@ -949,6 +1218,258 @@ client.ingestion.batch( </dl> +</dd> +</dl> +</details> + +## Media +<details><summary><code>client.media.<a href="src/finto/resources/media/client.py">get</a>(...)</code></summary> +<dl> +<dd> + +#### 📝 Description + +<dl> +<dd> + +<dl> +<dd> + +Get a media record +</dd> +</dl> +</dd> +</d...
logic: content_type parameter is missing from GetMediaUploadUrlRequest example, which is required for multimodal uploads
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,534 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ....
logic: Request path should start with a forward slash ('/api/public/comments') to ensure consistent URL construction across different client configurations
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,534 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ....
logic: Final ApiError is raised unconditionally, which means successful responses in the 200-299 range will still raise an error if they reach this point
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,54 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .comment_object_type import CommentObjectType + + +cla...
logic: Merging two dicts with different exclude settings could lead to unexpected behavior if fields are present in one but not the other
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,66 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class PatchMediaBody(pydantic_v1.BaseModel): + uploa...
logic: HTTP status field should have validation to ensure it's a valid HTTP status code (100-599)
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,66 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ....core.datetime_utils import serialize_datetime +from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class PatchMediaBody(pydantic_v1.BaseModel): + uploa...
logic: upload_time_ms should have validation to ensure it's a positive integer
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,200 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaReference ...
logic: no validation of key/value format - could raise unhandled exception if = is missing
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,200 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaReference ...
logic: content_type extraction assumes ;base64 suffix - could break with additional parameters
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -40,6 +43,12 @@ def default(self, obj: Any): # Timezone-awareness check return serialize_datetime(obj) + if isinstance(obj, LangfuseMedia): + return ( + obj._reference_string + or "<Failed to generate reference string...
logic: Consider adding error handling for when _reference_string is None but the object exists. The current fallback message doesn't provide enough context about why the reference string generation failed.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -1457,3 +1460,28 @@ def main(): if o.parent_observation_id == mock_parent_observation_id ] assert len(child_observations) == 2 + + +def test_pdf_in_metadata(): + mock_trace_id = create_uuid() + + with open("static/bitcoin.pdf", "rb") as pdf_file: + pdf_bytes = pdf_file.read() + + @...
logic: Incorrect constructor usage - first positional argument is unused but passed. Remove pdf_bytes from first position.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,106 @@ +import base64 +import pytest +from langfuse.media import LangfuseMedia + +# Test data +SAMPLE_JPEG_BYTES = b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x01\x00H\x00H\x00\x00" +SAMPLE_BASE64_DATA_URI = ( + "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/4QBARXhpZgAA" +) + + +def test_init_with_ba...
logic: silently failing without raising errors on invalid initialization is dangerous - should test that appropriate warnings are logged
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,106 @@ +import base64 +import pytest +from langfuse.media import LangfuseMedia + +# Test data +SAMPLE_JPEG_BYTES = b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x01\x00H\x00H\x00\x00" +SAMPLE_BASE64_DATA_URI = ( + "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/4QBARXhpZgAA" +) + + +def test_init_with_ba...
logic: test only verifies hash is base64 decodable, but not that it's actually a valid SHA-256 hash of the content
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -390,15 +388,11 @@ def _get_input_from_func_args( ) -> Any: # Remove implicitly passed "self" or "cls" argument for instance or class methods logged_args = func_args[1:] if is_method else func_args - raw_input = { + return { "args": logged_args, "kwargs"...
logic: Removing JSON serialization could allow non-serializable objects to pass through. Consider adding validation or documentation about supported input types.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -446,13 +440,10 @@ def _handle_call_result( ) end_time = observation_params["end_time"] or _get_timestamp() - raw_output = observation_params["output"] or ( + output = observation_params["output"] or ( result if result and capture_output else None ...
logic: Same concern with output handling - need to ensure non-serializable outputs are handled appropriately by downstream components.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,202 @@ +"""@private""" + +import atexit +import logging +import queue +from queue import Queue +from typing import List, Optional + +from langfuse.api.client import FernLangfuse +from langfuse.request import LangfuseClient +from langfuse.types import MaskFunction +from langfuse.utils import _get_timestamp + ...
logic: Pause media upload consumers after ingestion consumers - could cause deadlock if media upload depends on ingestion
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,200 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaReference ...
logic: file_path existence check should happen after content_type check to avoid unnecessary filesystem access
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,200 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaReference ...
logic: should handle case where header contains base64 but not at the end (e.g. base64;charset=utf-8)
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,243 @@ +import logging +from queue import Empty +from typing import Any, Callable, Optional, TypeVar +from typing_extensions import ParamSpec + +import time +import requests +import backoff + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.core import ApiError +from lang...
logic: This exception will never be reached since the previous except block re-raises all exceptions
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,200 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaReference ...
logic: return type changed from ParsedMediaReference to dict - breaks type safety
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -172,14 +179,14 @@ def _truncate_item_in_place( item_size, ) - if "body" in item: + if "body" in event: drop_candidates = ["input", "output", "metadata"] sorted_field_sizes = sorted( [ ...
logic: potential KeyError if field exists in drop_candidates but event['body'][field] is None
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -191,10 +198,10 @@ def _truncate_item_in_place( for _ in range(len(sorted_field_sizes)): field_to_drop, size_to_drop = sorted_field_sizes.pop() - if field_to_drop not in item["body"]: + if field_to_drop not in event["body"]: ...
logic: item_size calculation may be inaccurate after replacing field with log_message - should recalculate actual size
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,203 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaReference ...
logic: no error handling for file permissions or disk space issues
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,203 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaReference ...
logic: no memory limit on hash calculation - could cause OOM with large files
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,203 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaReference ...
logic: check parsed_data for None before unpacking tuple
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,203 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast, Tuple + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaRef...
logic: content_type not validated against MediaContentType enum before casting
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,203 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast, Tuple + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaRef...
logic: base64.b64decode() errors not specifically caught and handled
langfuse-python
github_2023
python
996
langfuse
maxdeichmann
@@ -1,56 +1,45 @@ -"""@private""" - -import atexit import json import logging -import queue import threading -from queue import Empty, Queue import time -from typing import List, Any, Optional -import typing -from langfuse.Sampler import Sampler -from langfuse.parse_error import handle_exception -from langfuse.re...
why are these optional?
langfuse-python
github_2023
python
996
langfuse
maxdeichmann
@@ -1,56 +1,45 @@ -"""@private""" - -import atexit import json import logging -import queue import threading -from queue import Empty, Queue import time -from typing import List, Any, Optional -import typing -from langfuse.Sampler import Sampler -from langfuse.parse_error import handle_exception -from langfuse.re...
This one can be removed, right?
langfuse-python
github_2023
python
996
langfuse
maxdeichmann
@@ -0,0 +1,245 @@ +import logging +from queue import Empty +from typing import Any, Callable, Optional, TypeVar +from typing_extensions import ParamSpec + +import time +import requests +import backoff + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.core import ApiError +from lang...
LOL. this will be super noisy if debug logs enabled :D
langfuse-python
github_2023
python
996
langfuse
maxdeichmann
@@ -0,0 +1,245 @@ +import logging +from queue import Empty +from typing import Any, Callable, Optional, TypeVar +from typing_extensions import ParamSpec + +import time +import requests +import backoff + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.core import ApiError +from lang...
for users with media upload and lots of traffic, this could become a performance bottleneck at some point. Just heads up, no action required now.
langfuse-python
github_2023
python
996
langfuse
maxdeichmann
@@ -0,0 +1,245 @@ +import logging +from queue import Empty +from typing import Any, Callable, Optional, TypeVar +from typing_extensions import ParamSpec + +import time +import requests +import backoff + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.core import ApiError +from lang...
In the openai integration, we specifically add langfusemedia for audio. This is handled here, correct?
langfuse-python
github_2023
python
996
langfuse
maxdeichmann
@@ -0,0 +1,245 @@ +import logging +from queue import Empty +from typing import Any, Callable, Optional, TypeVar +from typing_extensions import ParamSpec + +import time +import requests +import backoff + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.core import ApiError +from lang...
What is this for? All images in openai?
langfuse-python
github_2023
python
996
langfuse
maxdeichmann
@@ -0,0 +1,245 @@ +import logging +from queue import Empty +from typing import Any, Callable, Optional, TypeVar +from typing_extensions import ParamSpec + +import time +import requests +import backoff + +from langfuse.api import GetMediaUploadUrlRequest, PatchMediaBody +from langfuse.api.core import ApiError +from lang...
we only handle strings, dicts, lists here. Nothing else. I think this is expected, just bringing this up if you think we missed something.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -97,73 +87,88 @@ def __init__( self._sdk_integration = sdk_integration self._mask = mask self._sampler = Sampler(sample_rate) + self._media_manager = media_manager def _next(self): """Return the next batch of items to upload.""" - queue = self._queue - ...
logic: potential race condition if flush_interval - elapsed becomes negative
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -68,7 +64,6 @@ def test_default_langfuse_trace_callback(): assert callback.log.level == 30 assert callback.log.level == 30
logic: duplicate assertion - line 65 is identical to line 64
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -79,4 +74,3 @@ def test_debug_langfuse_trace_callback(): assert callback.log.level == 10 assert callback.log.level == 10
logic: duplicate assertion - line 75 is identical to line 74
langfuse-python
github_2023
python
996
langfuse
ellipsis-dev[bot]
@@ -58,7 +58,17 @@ def handler(request: Request): ) tm = TaskManager( - langfuse_client, 10, 0.1, 3, 1, 10_000, "test-sdk", "1.0.0", "default" + client=langfuse_client, + api_client=None, + public_key="pk",
Avoid using hardcoded public keys like 'pk'. Consider using environment variables or configuration files to manage sensitive information. This applies to all instances in this file.
langfuse-python
github_2023
python
996
langfuse
greptile-apps[bot]
@@ -0,0 +1,203 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast, Tuple + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaRef...
logic: no validation of data length before split - could raise IndexError if comma is missing
langfuse-python
github_2023
python
996
langfuse
ellipsis-dev[bot]
@@ -0,0 +1,203 @@ +"""This module contains the LangfuseMedia class, which is used to wrap media objects for upload to Langfuse.""" + +import base64 +import hashlib +import logging +import os +from typing import Optional, cast, Tuple + +from langfuse.api import MediaContentType +from langfuse.types import ParsedMediaRef...
The use of `rstrip("@@@")` is incorrect here. It will remove all trailing '@' characters, not just the specific suffix '@@@'. Consider using `removesuffix("@@@")` instead. ```suggestion content = reference_string[len("@@@langfuseMedia:") :].removesuffix("@@@") ```
langfuse-python
github_2023
python
987
langfuse
greptile-apps[bot]
@@ -519,7 +528,11 @@ def _get_langfuse_data_from_default_response(resource: OpenAiDefinition, respons usage = response.get("usage", None) - return model, completion, usage.__dict__ if _is_openai_v1() and usage is not None else usage + return ( + model, + completion, + usage.__dict__ ...
logic: consider adding a check for usage being a dict before accessing __dict__ to prevent potential AttributeError
langfuse-python
github_2023
python
987
langfuse
fancyweb
@@ -170,6 +170,17 @@ def get_langfuse_args(self): return {**self.args, **self.kwargs} def get_openai_args(self): + # OpenAI returns streaming usage not by default but only if stream_options has include_usage set + if self.kwargs.get("stream") and "stream_options" not in self.kwargs:
adding this automatically adds an empty final "choices" chunk and consumer code might not handle that, so that's a breaking change :grimacing:
langfuse-python
github_2023
python
994
langfuse
greptile-apps[bot]
@@ -2087,7 +2089,9 @@ def generation( "body": new_body.dict(exclude_none=True, exclude_unset=False), } - self.log.debug(f"Creating generation {new_body}...") + self.log.debug( + f"Creating generation {_filter_io_from_event_body(generation_body)}..." +...
logic: duplicate debug log statement that logs the same information twice
langfuse-python
github_2023
python
994
langfuse
greptile-apps[bot]
@@ -107,13 +113,33 @@ def _next(self): try: item = queue.get(block=True, timeout=self._flush_interval - elapsed) + # convert pydantic models to dicts + if "body" in item and isinstance(item["body"], pydantic.BaseModel): + item["body"] = it...
logic: Sampling should happen before Pydantic conversion to avoid unnecessary serialization of sampled-out events
langfuse-python
github_2023
python
994
langfuse
ellipsis-dev[bot]
@@ -107,13 +113,37 @@ def _next(self): try: item = queue.get(block=True, timeout=self._flush_interval - elapsed) + # convert pydantic models to dicts + if "body" in item and isinstance(item["body"], pydantic.BaseModel): + item["body"] = it...
`queue.task_done()` is called twice for the same item in `_next()`. It should be called only once per item processing to avoid logical errors. Consider restructuring the code to ensure it's called once per item.
langfuse-python
github_2023
python
994
langfuse
maxdeichmann
@@ -991,14 +991,8 @@ def _log_debug_event( parent_run_id: Optional[UUID] = None, **kwargs, ): - kwargs_log = ( - ", " + ", ".join([f"{key}: {value}" for key, value in kwargs.items()]) - if len(kwargs) > 0 - else "" - ) self.log.debug( ...
Why did you remove this?
langfuse-python
github_2023
python
994
langfuse
maxdeichmann
@@ -3350,3 +3360,7 @@ def __init__(self, dataset: Dataset, items: typing.List[DatasetItemClient]): self.created_at = dataset.created_at self.updated_at = dataset.updated_at self.items = items + + +def _filter_io_from_event_body(event_body: Dict[str, Any]): + return {k: v for k, v in event_...
i would also remove metadata here.
langfuse-python
github_2023
python
994
langfuse
maxdeichmann
@@ -107,13 +113,37 @@ def _next(self): try: item = queue.get(block=True, timeout=self._flush_interval - elapsed) + # convert pydantic models to dicts + if "body" in item and isinstance(item["body"], pydantic.BaseModel): + item["body"] = it...
exclude_none=True is missing here i think. I think we should only send diffs to the backend.
langfuse-python
github_2023
python
994
langfuse
greptile-apps[bot]
@@ -107,13 +113,37 @@ def _next(self): try: item = queue.get(block=True, timeout=self._flush_interval - elapsed) + # convert pydantic models to dicts + if "body" in item and isinstance(item["body"], pydantic.BaseModel): + item["body"] = it...
logic: The serialization check should happen before truncation to avoid unnecessary truncation of items that will fail serialization anyway
langfuse-python
github_2023
python
991
langfuse
greptile-apps[bot]
@@ -138,7 +138,7 @@ def default(self, obj: Any): except Exception as e: print(obj.__dict__) - logger.warning( + logger.debug( f"Serialization failed for object of type {type(obj).__name__}", exc_info=e, )
logic: reducing log level to DEBUG means serialization failures will not be visible in production by default. Consider keeping WARNING level but adding a configuration option to suppress specific serialization errors if needed.
langfuse-python
github_2023
python
988
langfuse
greptile-apps[bot]
@@ -72,13 +72,17 @@ def default(self, obj: Any): return obj.isoformat() if isinstance(obj, BaseModel): - obj.model_rebuild() # This method forces the OpenAI model to instantiate its serializer to avoid errors when serializing + obj.model_rebuild() if pydant...
logic: raw model rebuild could fail silently if raw exists but is not a BaseModel - consider adding error handling
langfuse-python
github_2023
python
970
langfuse
greptile-apps[bot]
@@ -71,14 +75,17 @@ def default(self, obj: Any): if isinstance(obj, (date)): return obj.isoformat() - if isinstance(obj, BaseModel): - obj.model_rebuild() # This method forces the OpenAI model to instantiate its serializer to avoid errors when serializing + ...
logic: BaseModel is undefined here - needs to be imported from pydantic or referenced as pydantic.BaseModel
langfuse-python
github_2023
others
976
langfuse
greptile-apps[bot]
@@ -35,7 +35,7 @@ lark = "^1.1.7" pytest-asyncio = ">=0.21.1,<0.24.0" pytest-httpserver = "^1.0.8" boto3 = "^1.28.59" -ruff = ">=0.1.8,<0.6.0" +ruff = ">=0.1.8,<0.8.0"
logic: if using lint.allow-unused-imports in ruff config, must migrate to lint.pyflakes.allow-unused-imports