sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
ocrmypdf/OCRmyPDF:tests/test_ocr_engine_interface.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MPL-2.0
"""Unit tests for OcrEngine interface extensions.
These tests verify that the OcrEngine ABC has the new generate_ocr() method
and that OcrElement classes are exported from the public API.
"""
from __future__ import annotations
from pa... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "tests/test_ocr_engine_interface.py",
"license": "Mozilla Public License 2.0",
"lines": 93,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ocrmypdf/OCRmyPDF:tests/test_ocr_engine_selection.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MPL-2.0
"""Unit tests for OCR engine selection mechanism.
Tests verify that the --ocr-engine option works correctly and that
engine-specific options are available.
"""
from __future__ import annotations
import pytest
class TestOcrEngineCliO... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "tests/test_ocr_engine_selection.py",
"license": "Mozilla Public License 2.0",
"lines": 92,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ocrmypdf/OCRmyPDF:tests/test_pdf_renderer.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MPL-2.0
"""Unit tests for Fpdf2PdfRenderer class."""
from __future__ import annotations
from io import StringIO
from pathlib import Path
import pytest
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfm... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "tests/test_pdf_renderer.py",
"license": "Mozilla Public License 2.0",
"lines": 501,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ocrmypdf/OCRmyPDF:tests/test_pipeline_generate_ocr.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MPL-2.0
"""Unit tests for pipeline support of generate_ocr().
These tests verify that the pipeline supports the new generate_ocr() API
alongside the existing hOCR path.
"""
from __future__ import annotations
import dataclasses
from pathlib im... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "tests/test_pipeline_generate_ocr.py",
"license": "Mozilla Public License 2.0",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ocrmypdf/OCRmyPDF:tests/test_rasterizer.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MPL-2.0
"""Tests for the --rasterizer CLI option."""
from __future__ import annotations
from io import BytesIO
import img2pdf
import pikepdf
import pytest
from PIL import Image
from ocrmypdf._options import OcrOptions
from ocrmypdf._plugin_m... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "tests/test_rasterizer.py",
"license": "Mozilla Public License 2.0",
"lines": 518,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ocrmypdf/OCRmyPDF:tests/test_system_font_provider.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MPL-2.0
"""Unit tests for SystemFontProvider and ChainedFontProvider."""
from __future__ import annotations
import sys
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
from ocrmypdf.font import (
BuiltinF... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "tests/test_system_font_provider.py",
"license": "Mozilla Public License 2.0",
"lines": 267,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ocrmypdf/OCRmyPDF:tests/test_verapdf.py | # SPDX-FileCopyrightText: 2024 James R. Barlow
# SPDX-License-Identifier: CC-BY-SA-4.0
"""Tests for verapdf wrapper and speculative PDF/A conversion."""
from __future__ import annotations
import pikepdf
import pytest
from pikepdf import Name
from ocrmypdf._exec import verapdf
from ocrmypdf.pdfa import (
_pdfa_p... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "tests/test_verapdf.py",
"license": "Mozilla Public License 2.0",
"lines": 128,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ocrmypdf/OCRmyPDF:tests/test_page_boxes.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MPL-2.0
from __future__ import annotations
import pikepdf
import pytest
from ocrmypdf._exec import verapdf
from .conftest import check_ocrmypdf
page_rect = [0, 0, 612, 792]
inset_rect = [200, 200, 612, 792]
wh_rect = [0, 0, 412, 592]
neg_re... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "tests/test_page_boxes.py",
"license": "Mozilla Public License 2.0",
"lines": 111,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ocrmypdf/OCRmyPDF:tests/test_watcher.py | from __future__ import annotations
import datetime as dt
import os
import shutil
import subprocess
import sys
import time
from pathlib import Path
import pytest
watchdog = pytest.importorskip('watchdog')
@pytest.mark.parametrize('year_month', [True, False])
def test_watcher(tmp_path, resources, year_month):
in... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "tests/test_watcher.py",
"license": "Mozilla Public License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ocrmypdf/OCRmyPDF:misc/_webservice.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: AGPL-3.0-or-later
"""This is a simple web service/HTTP wrapper for OCRmyPDF.
This may be more convenient than the command line tool for some Docker users.
Note that OCRmyPDF uses Ghostscript, which is licensed under AGPLv3+. While
OCRmyPDF is u... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "misc/_webservice.py",
"license": "Mozilla Public License 2.0",
"lines": 210,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
ocrmypdf/OCRmyPDF:misc/ocrmypdf_compare.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MIT
"""Run OCRmyPDF on the same PDF with different options."""
from __future__ import annotations
import os
import shlex
from io import BytesIO
from pathlib import Path
from subprocess import check_output, run
from tempfile import TemporaryDir... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "misc/ocrmypdf_compare.py",
"license": "Mozilla Public License 2.0",
"lines": 109,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
ocrmypdf/OCRmyPDF:misc/pdf_compare.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MIT
"""Compare two PDFs."""
from __future__ import annotations
import os
from io import BytesIO
from pathlib import Path
from tempfile import TemporaryDirectory
import pikepdf
import pymupdf
import streamlit as st
from lxml import etree
from ... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "misc/pdf_compare.py",
"license": "Mozilla Public License 2.0",
"lines": 66,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
ocrmypdf/OCRmyPDF:misc/pdf_text_diff.py | # SPDX-FileCopyrightText: 2025 James R. Barlow
# SPDX-License-Identifier: MPL-2.0
"""Compare text in PDFs."""
from __future__ import annotations
from pathlib import Path
from subprocess import run
from tempfile import NamedTemporaryFile
from typing import Annotated
import cyclopts
app = cyclopts.App()
@app.defau... | {
"repo_id": "ocrmypdf/OCRmyPDF",
"file_path": "misc/pdf_text_diff.py",
"license": "Mozilla Public License 2.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
onnx/onnx:onnx/backend/test/case/node/bitcast.py | # Copyright (c) ONNX Project Contributors
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import numpy as np
import onnx
from onnx.backend.test.case.base import Base
from onnx.backend.test.case.node import expect
class BitCast(Base):
@staticmethod
def export_bitcast_float32_to_int... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/backend/test/case/node/bitcast.py",
"license": "Apache License 2.0",
"lines": 134,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
onnx/onnx:onnx/reference/ops/op_bitcast.py | # Copyright (c) ONNX Project Contributors
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import numpy as np
import onnx
from onnx.reference.op_run import OpRun
class BitCast(OpRun):
def _run(self, x, to: int): # type: ignore
if to == onnx.TensorProto.STRING:
raise... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/reference/ops/op_bitcast.py",
"license": "Apache License 2.0",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
onnx/onnx:onnx/backend/test/case/node/cumprod.py | # Copyright (c) ONNX Project Contributors
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import numpy as np
import onnx
from onnx.backend.test.case.base import Base
from onnx.backend.test.case.node import expect
class CumProd(Base):
@staticmethod
def export_cumprod_1d() -> None:
... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/backend/test/case/node/cumprod.py",
"license": "Apache License 2.0",
"lines": 117,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
onnx/onnx:onnx/reference/ops/op_cum_prod.py | # Copyright (c) ONNX Project Contributors
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import numpy as np
from onnx.reference.op_run import OpRun
class CumProd(OpRun):
def _run(self, x, axis, exclusive=None, reverse=None):
axis = np.asarray(axis)
if axis.ndim != 0:
... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/reference/ops/op_cum_prod.py",
"license": "Apache License 2.0",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
onnx/onnx:tools/spec_to_yaml.py | # Copyright (c) ONNX Project Contributors
#
# SPDX-License-Identifier: Apache-2.0
"""Output ONNX spec in YAML format.
Usage:
python spec_to_yaml.py --output onnx-spec/defs
"""
from __future__ import annotations
import argparse
import enum
import pathlib
from collections.abc import Iterable
from typing import An... | {
"repo_id": "onnx/onnx",
"file_path": "tools/spec_to_yaml.py",
"license": "Apache License 2.0",
"lines": 96,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
onnx/onnx:onnx/test/test_env_python_executable.py | # Copyright (c) ONNX Project Contributors
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import os
import pathlib
import platform
import sys
import sysconfig
import tempfile
import unittest
from unittest.mock import patch
# Extract get_python_execute function from setup.py for testing
def ... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/test/test_env_python_executable.py",
"license": "Apache License 2.0",
"lines": 198,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
onnx/onnx:onnx/test/node_shape_inference_test.py | # SPDX-License-Identifier: Apache-2.0
# Copyright (c) ONNX Project Contributors
from __future__ import annotations
import unittest
import parameterized
import onnx.helper
import onnx.shape_inference
class NodeInferenceTest(unittest.TestCase):
@parameterized.parameterized.expand(
[
("Greate... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/test/node_shape_inference_test.py",
"license": "Apache License 2.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
onnx/onnx:onnx/reference/ops/op_tensor_scatter.py | # Copyright (c) ONNX Project Contributors
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import numpy as np
from onnx.reference.op_run import OpRun
class TensorScatter(OpRun):
def _run(self, past_cache, update, write_indices=None, mode="linear", axis=-2):
if mode not in {"line... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/reference/ops/op_tensor_scatter.py",
"license": "Apache License 2.0",
"lines": 39,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
onnx/onnx:onnx/backend/test/case/node/swish.py | # Copyright (c) ONNX Project Contributors
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import numpy as np
import onnx
from onnx.backend.test.case.base import Base
from onnx.backend.test.case.node import expect
def swish(x: np.ndarray, alpha: float) -> np.ndarray:
return x * (1 / (1... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/backend/test/case/node/swish.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
onnx/onnx:onnx/reference/ops/op_swish.py | # Copyright (c) ONNX Project Contributors
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import numpy as np
from onnx.reference.ops._op import OpRunUnaryNum
class Swish(OpRunUnaryNum):
def _run(self, x, alpha=None):
alpha = self.alpha if alpha is None else alpha
return... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/reference/ops/op_swish.py",
"license": "Apache License 2.0",
"lines": 9,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
onnx/onnx:onnx/backend/test/case/node/lpnormalization.py | # Copyright (c) ONNX Project Contributors
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import numpy as np
import onnx
from onnx.backend.test.case.base import Base
from onnx.backend.test.case.node import expect
class LpNormalization(Base):
@staticmethod
def export_l2normalizatio... | {
"repo_id": "onnx/onnx",
"file_path": "onnx/backend/test/case/node/lpnormalization.py",
"license": "Apache License 2.0",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
openai/gpt-oss:tests/gpt_oss/tools/simple_browser/test_backend.py | import pytest
from typing import Generator, Any
from unittest import mock
from aiohttp import ClientSession
from gpt_oss.tools.simple_browser.backend import YouComBackend
class MockAiohttpResponse:
"""Mocks responses for get/post requests from async libraries."""
def __init__(self, json: dict, status: int):
... | {
"repo_id": "openai/gpt-oss",
"file_path": "tests/gpt_oss/tools/simple_browser/test_backend.py",
"license": "Apache License 2.0",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
openai/gpt-oss:examples/gradio/gradio_chat.py | import json
import requests
import gradio as gr
DEFAULT_FUNCTION_PROPERTIES = """
{
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
}
},
"required": ["location"]
}
""".strip()
def chat... | {
"repo_id": "openai/gpt-oss",
"file_path": "examples/gradio/gradio_chat.py",
"license": "Apache License 2.0",
"lines": 199,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/evals/basic_eval.py | """
Basic eval
"""
from . import report
from .types import Eval, EvalResult, SamplerBase, SingleEvalResult
class BasicEval(Eval):
def __init__(self,):
self.examples = [{
"question": "hi",
"answer": "hi, how can i help?",
}]
def __call__(self, sampler: SamplerBase) -> E... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/evals/basic_eval.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:tests/test_api_endpoints.py | import pytest
import json
import asyncio
from fastapi import status
from unittest.mock import patch, MagicMock, AsyncMock
class TestResponsesEndpoint:
def test_basic_response_creation(self, api_client, sample_request_data):
response = api_client.post("/v1/responses", json=sample_request_data)
... | {
"repo_id": "openai/gpt-oss",
"file_path": "tests/test_api_endpoints.py",
"license": "Apache License 2.0",
"lines": 188,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
openai/gpt-oss:examples/agents-sdk-python/example.py | import asyncio
from pathlib import Path
import shutil
from openai import AsyncOpenAI
from agents import (
Agent,
ItemHelpers,
Runner,
set_default_openai_api,
set_default_openai_client,
set_tracing_disabled,
function_tool,
)
from agents.mcp import MCPServerStdio
async def prompt_user(quest... | {
"repo_id": "openai/gpt-oss",
"file_path": "examples/agents-sdk-python/example.py",
"license": "Apache License 2.0",
"lines": 84,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt-oss-mcp-server/browser_server.py | import os
from collections.abc import AsyncIterator
from contextlib import asynccontextmanager
from dataclasses import dataclass, field
from typing import Union, Optional
from mcp.server.fastmcp import Context, FastMCP
from gpt_oss.tools.simple_browser import SimpleBrowserTool
from gpt_oss.tools.simple_browser.backend... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt-oss-mcp-server/browser_server.py",
"license": "Apache License 2.0",
"lines": 106,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt-oss-mcp-server/build-system-prompt.py | import datetime
import asyncio
from gpt_oss.tokenizer import get_tokenizer
from openai_harmony import (
Conversation,
DeveloperContent,
HarmonyEncodingName,
Message,
ReasoningEffort,
Role,
SystemContent,
ToolNamespaceConfig,
ToolDescription,
load_harmony_encoding,
)
from mcp i... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt-oss-mcp-server/build-system-prompt.py",
"license": "Apache License 2.0",
"lines": 93,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt-oss-mcp-server/python_server.py | from mcp.server.fastmcp import FastMCP
from gpt_oss.tools.python_docker.docker_tool import PythonTool
from openai_harmony import Message, TextContent, Author, Role
# Pass lifespan to server
mcp = FastMCP(
name="python",
instructions=r"""
Use this tool to execute Python code in your chain of thought. The code w... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt-oss-mcp-server/python_server.py",
"license": "Apache License 2.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt-oss-mcp-server/reference-system-prompt.py | import datetime
from gpt_oss.tools.simple_browser import SimpleBrowserTool
from gpt_oss.tools.simple_browser.backend import YouComBackend
from gpt_oss.tools.python_docker.docker_tool import PythonTool
from gpt_oss.tokenizer import tokenizer
from openai_harmony import (
Conversation,
DeveloperContent,
Harm... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt-oss-mcp-server/reference-system-prompt.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/responses_api/inference/transformers.py | """
NOTE: this is not the most efficient way to use transformers. It's a simple implementation that infers
one token at a time to mimic the behavior of the Triton implementation.
"""
import os
from typing import Callable, List
# Transformers imports
from transformers import AutoModelForCausalLM, PreTrainedModel
impor... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/responses_api/inference/transformers.py",
"license": "Apache License 2.0",
"lines": 42,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:examples/streamlit/streamlit_chat.py | import json
import requests
import streamlit as st
DEFAULT_FUNCTION_PROPERTIES = """
{
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
}
},
"required": ["location"]
}
""".strip()
# Se... | {
"repo_id": "openai/gpt-oss",
"file_path": "examples/streamlit/streamlit_chat.py",
"license": "Apache License 2.0",
"lines": 330,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/chat.py | """
Harmony chat with tools
"""
import atexit
import argparse
import asyncio
import datetime
import os
from pathlib import Path
try:
import gnureadline as readline
except ImportError:
import readline
import torch
import termcolor
from gpt_oss.tools import apply_patch
from gpt_oss.tools.simple_browser import... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/chat.py",
"license": "Apache License 2.0",
"lines": 331,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/evals/__main__.py | import argparse
import json
from datetime import datetime
from . import report
from .basic_eval import BasicEval
from .gpqa_eval import GPQAEval
from .aime_eval import AIME25Eval
from .healthbench_eval import HealthBenchEval
from .chat_completions_sampler import (
OPENAI_SYSTEM_MESSAGE_API,
ChatCompletionsSamp... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/evals/__main__.py",
"license": "Apache License 2.0",
"lines": 194,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/evals/abcd_grader.py | import re
import sys
_PATTERNS = [
# 0)"**Answer:** A" or "*Answers* – B", i.e. markdown‐wrapped "Answer(s)" with an unwrapped letter.
re.compile(
r'''(?ix) # case‐insensitive, ignore‐space
(?:\*{1,2}|_{1,2}) # leading *…* or _…_
Answer[s]? ... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/evals/abcd_grader.py",
"license": "Apache License 2.0",
"lines": 99,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/evals/aime_eval.py | """
AIME 2025: https://huggingface.co/datasets/opencompass/AIME2025
"""
import random
import re
import pandas
from . import report
from .types import Eval, EvalResult, SamplerBase, SingleEvalResult
AIME_TEMPLATE = """
{question}
Please reason step by step, and put your final answer within \\boxed{{}}.
"""
def forma... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/evals/aime_eval.py",
"license": "Apache License 2.0",
"lines": 87,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/evals/gpqa_eval.py | """
GPQA: A Graduate-Level Google-Proof Q&A Benchmark
David Rein, Betty Li Hou, Asa Cooper Stickland, Jackson Petty, Richard Yuanzhe Pang, Julien Dirani, Julian Michael, Samuel R. Bowman
https://arxiv.org/abs/2311.12022
"""
import random
import pandas
from . import report
from .types import Eval, EvalResult, Sampler... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/evals/gpqa_eval.py",
"license": "Apache License 2.0",
"lines": 104,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/evals/healthbench_eval.py | """
This script evaluates the performance of a model on the HealthBench dataset.
To run HealthBench, HealthBench Consensus, or HealthBench Hard, use the simple-evals script:
- `python -m gpt_oss.evals --eval=healthbench --model=gpt-oss-120b`
- `python -m gpt_oss.evals --eval=healthbench_consensus --model=gpt-oss-120b`... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/evals/healthbench_eval.py",
"license": "Apache License 2.0",
"lines": 531,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/evals/report.py | import os
from collections import defaultdict
from multiprocessing.pool import ThreadPool
from typing import Any, Callable
import jinja2
import numpy as np
from tqdm import tqdm
from .types import EvalResult, Message, SingleEvalResult
HTML_JINJA = """
<h3>Prompt conversation</h3>
{% for message in prompt_messages %... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/evals/report.py",
"license": "Apache License 2.0",
"lines": 186,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
openai/gpt-oss:gpt_oss/evals/responses_sampler.py | import time
from typing import Any
import openai
from openai import OpenAI
from .types import MessageList, SamplerBase, SamplerResponse
class ResponsesSampler(SamplerBase):
"""
Sample from OpenAI's responses API
"""
def __init__(
self,
model: str,
developer_message: str | No... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/evals/responses_sampler.py",
"license": "Apache License 2.0",
"lines": 76,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/evals/types.py | from dataclasses import dataclass, field
from typing import Any, Literal, overload
Message = dict[str, Any] # keys role, content
MessageList = list[Message]
@dataclass
class SamplerResponse:
"""
Response from a sampler.
"""
response_text: str
actual_queried_message_list: MessageList
respons... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/evals/types.py",
"license": "Apache License 2.0",
"lines": 50,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/generate.py | # Model parallel inference
# Note: This script is for demonstration purposes only. It is not designed for production use.
# See gpt_oss.chat for a more complete example with the Harmony parser.
# torchrun --nproc-per-node=4 -m gpt_oss.generate -p "why did the chicken cross the road?" model/
import argparse
from... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/generate.py",
"license": "Apache License 2.0",
"lines": 87,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/metal/examples/chat.py | #!/usr/bin/env python
import argparse
import sys
from datetime import date
from gpt_oss.metal import Context, Model
DEFAULT_PROMPT = f"""You are ChatGPT, a large language model trained by OpenAI.
Knowledge cutoff: 2024-06
Current date: {date.today().isoformat()}
reasoning effort high
# Valid channels: analysis, f... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/metal/examples/chat.py",
"license": "Apache License 2.0",
"lines": 87,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/metal/examples/generate.py | #!/usr/bin/env python
import argparse
import sys
from gpt_oss.metal import Context, Model
parser = argparse.ArgumentParser(description='Chat with gpt-oss', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('model', metavar='PATH', type=str, help='Path to gpt-oss checkpoint')
parser.add_arg... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/metal/examples/generate.py",
"license": "Apache License 2.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/metal/scripts/create-local-model.py | import argparse
import os
import math
import sys
import json
import itertools
import struct
from uuid import UUID
import tiktoken
import torch
from safetensors import safe_open
from tqdm import tqdm
from openai_harmony import load_harmony_encoding, HarmonyEncodingName
parser = argparse.ArgumentParser(prog='create-lo... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/metal/scripts/create-local-model.py",
"license": "Apache License 2.0",
"lines": 299,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/responses_api/api_server.py | import os
import datetime
import uuid
from typing import Callable, Literal, Optional, Union
from fastapi import FastAPI, Request
from fastapi.exception_handlers import request_validation_exception_handler
from fastapi.exceptions import RequestValidationError
from fastapi.responses import StreamingResponse
from openai_... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/responses_api/api_server.py",
"license": "Apache License 2.0",
"lines": 1249,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/responses_api/events.py | # torchrun --nproc-per-node=4 responses_api.py
from typing import Literal, Optional, Union
from pydantic import BaseModel
from .types import (
CodeInterpreterCallItem,
CodeInterpreterOutputImage,
CodeInterpreterOutputLogs,
FunctionCallItem,
Item,
ReasoningItem,
ReasoningTextContentItem,
... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/responses_api/events.py",
"license": "Apache License 2.0",
"lines": 158,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/responses_api/inference/metal.py | """Metal backend for :mod:`gpt_oss.responses_api`."""
from typing import Callable
from gpt_oss.metal import Context, Model
# Tunables
MAX_OUTPUT_TOKENS = 100
def setup_model(checkpoint: str) -> Callable[[list[int], float], int]:
"""Load the Metal model and return an inference function."""
model = Model(c... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/responses_api/inference/metal.py",
"license": "Apache License 2.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/responses_api/inference/ollama.py | """
NOTE: this is a stitched together implementation that uses Ollama for inference. It's primarily used
for testing and development. It does not leverage any prompt caching or other optimizations and
can therefore be slow between turns.
"""
import json
import threading
import time
from typing import Callable, Optiona... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/responses_api/inference/ollama.py",
"license": "Apache License 2.0",
"lines": 154,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/responses_api/inference/triton.py | import datetime
import os
from typing import Callable
os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True"
import torch
import torch.distributed as dist
from gpt_oss.triton.model import Cache, ModelConfig, Transformer
DEFAULT_TEMPERATURE = 0.0
CONTEXT = 16_384
CONCURRENT_SESSIONS = 1
rank = int(
o... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/responses_api/inference/triton.py",
"license": "Apache License 2.0",
"lines": 79,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/responses_api/inference/vllm.py | """
NOTE: this is not the most efficient way to use vLLM. It's a simple implementation that infers
one token at a time to mimic the behavior of the Triton implementation.
"""
import os
from typing import Callable, List, Optional
# vLLM imports
from vllm import LLM, SamplingParams
from vllm.inputs import TokensPromp... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/responses_api/inference/vllm.py",
"license": "Apache License 2.0",
"lines": 66,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/responses_api/serve.py | # torchrun --nproc-per-node=4 serve.py
import argparse
import uvicorn
from openai_harmony import (
HarmonyEncodingName,
load_harmony_encoding,
)
from .api_server import create_api_server
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Responses API server")
parser.add_argume... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/responses_api/serve.py",
"license": "Apache License 2.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/responses_api/types.py | from typing import Any, Dict, Literal, Optional, Union
from openai_harmony import ReasoningEffort
from pydantic import BaseModel, ConfigDict
MODEL_IDENTIFIER = "gpt-oss-120b"
DEFAULT_TEMPERATURE = 0.0
REASONING_EFFORT = ReasoningEffort.LOW
DEFAULT_MAX_OUTPUT_TOKENS = 131072
class UrlCitation(BaseModel):
type: L... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/responses_api/types.py",
"license": "Apache License 2.0",
"lines": 165,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/tokenizer.py | import tiktoken
def get_tokenizer():
o200k_base = tiktoken.get_encoding("o200k_base")
tokenizer = tiktoken.Encoding(
name="o200k_harmony",
pat_str=o200k_base._pat_str,
mergeable_ranks=o200k_base._mergeable_ranks,
special_tokens={
**o200k_base._special_tokens,
... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/tokenizer.py",
"license": "Apache License 2.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/tools/apply_patch.py | #!/usr/bin/env python3
"""
A self-contained **pure-Python 3.9+** utility for applying human-readable
“pseudo-diff” patch files to a collection of text files.
Source: https://cookbook.openai.com/examples/gpt4-1_prompting_guide
"""
from __future__ import annotations
import pathlib
from dataclasses import dataclass, f... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/tools/apply_patch.py",
"license": "Apache License 2.0",
"lines": 446,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/tools/python_docker/docker_tool.py | # Run this before running the tool:
# $ docker image pull python:3.11
import asyncio
import contextlib
import io
import os
import queue
import subprocess
import tarfile
import tempfile
from pathlib import Path
from typing import Any, AsyncIterator
import docker
from openai_harmony import (
Author,
Content,
... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/tools/python_docker/docker_tool.py",
"license": "Apache License 2.0",
"lines": 316,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/tools/simple_browser/backend.py | """
Simple backend for the simple browser tool.
"""
import functools
import asyncio
import logging
import os
from abc import abstractmethod
from importlib.metadata import version
from typing import Callable, ParamSpec, TypeVar
from urllib.parse import quote
import chz
from aiohttp import ClientSession, ClientTimeout
... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/tools/simple_browser/backend.py",
"license": "Apache License 2.0",
"lines": 225,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/tools/simple_browser/page_contents.py | """
Page contents for the simple browser tool.
"""
from __future__ import annotations
import dataclasses
import functools
import logging
import re
from urllib.parse import urljoin, urlparse
import aiohttp
import html2text
import lxml
import lxml.etree
import lxml.html
import pydantic
import tiktoken
logger = loggi... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/tools/simple_browser/page_contents.py",
"license": "Apache License 2.0",
"lines": 245,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/tools/simple_browser/simple_browser_tool.py | import contextvars
import dataclasses
import functools
import itertools
import json
import re
import textwrap
from typing import Any, AsyncIterator, Callable, ParamSpec, Sequence
from urllib.parse import quote, unquote
import pydantic
import structlog
import tiktoken
from aiohttp import ClientSession
from openai_harmo... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/tools/simple_browser/simple_browser_tool.py",
"license": "Apache License 2.0",
"lines": 588,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/tools/tool.py | from abc import ABC, abstractmethod
from uuid import UUID, uuid4
from typing import AsyncIterator
from openai_harmony import (
Author,
Role,
Message,
TextContent,
)
def _maybe_update_inplace_and_validate_channel(
*, input_message: Message, tool_message: Message
) -> None:
# If the channel of ... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/tools/tool.py",
"license": "Apache License 2.0",
"lines": 83,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/torch/model.py | import json
import math
import os
from dataclasses import dataclass
import torch
import torch.distributed as dist
from gpt_oss.torch.weights import Checkpoint
@dataclass
class ModelConfig:
num_hidden_layers: int = 36
num_experts: int = 128
experts_per_token: int = 4
vocab_size: int = 201088
hidd... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/torch/model.py",
"license": "Apache License 2.0",
"lines": 422,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/torch/utils.py | import os
import torch
import torch.distributed as dist
def suppress_output(rank):
"""Suppress printing on the current device. Force printing with `force=True`."""
import builtins as __builtin__
builtin_print = __builtin__.print
def print(*args, **kwargs):
force = kwargs.pop('force', False)
... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/torch/utils.py",
"license": "Apache License 2.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/torch/weights.py | import math
import os
import torch
from safetensors import safe_open
# Bytes per MXFP4 block: 32 FP4 numbers packed in 16 bytes
BYTES_PER_BLOCK = 16
FP4_VALUES = [
+0.0, +0.5, +1.0, +1.5, +2.0, +3.0, +4.0, +6.0,
-0.0, -0.5, -1.0, -1.5, -2.0, -3.0, -4.0, -6.0,
]
# Map the names assumed in this implementatio... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/torch/weights.py",
"license": "Apache License 2.0",
"lines": 109,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/triton/model.py | import json
import math
import os
import torch
from torch.profiler import record_function
from gpt_oss.torch.model import ModelConfig, RMSNorm
from gpt_oss.torch.weights import Checkpoint
from gpt_oss.triton.attention import attention, attention_ref
from gpt_oss.triton.moe import quantize_mx4, moe
class RotaryEmbed... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/triton/model.py",
"license": "Apache License 2.0",
"lines": 461,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/gpt-oss:gpt_oss/triton/moe.py | import torch
from torch.profiler import record_function
import triton_kernels
import triton_kernels.swiglu
from triton_kernels.numerics_details.mxfp import downcast_to_mxfp
from triton_kernels.matmul_ogs import PrecisionConfig, FlexCtx, FnSpecs, FusedActivation
from triton_kernels.matmul_ogs import matmul_ogs
from tri... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/triton/moe.py",
"license": "Apache License 2.0",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:gpt_oss/vllm/token_generator.py | from vllm import LLMEngine, EngineArgs, SamplingParams, TokensPrompt
class TokenGenerator:
def __init__(self, model_path: str, tensor_parallel_size: int = 1):
args = EngineArgs(
model=model_path,
tensor_parallel_size=tensor_parallel_size,
)
self.engine = LLMEngine.f... | {
"repo_id": "openai/gpt-oss",
"file_path": "gpt_oss/vllm/token_generator.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/gpt-oss:tests/test_responses_api.py | import time
import pytest
from fastapi.testclient import TestClient
from openai_harmony import (
HarmonyEncodingName,
load_harmony_encoding,
)
from gpt_oss.responses_api.api_server import create_api_server
encoding = load_harmony_encoding(HarmonyEncodingName.HARMONY_GPT_OSS)
fake_tokens = encoding.encode(
... | {
"repo_id": "openai/gpt-oss",
"file_path": "tests/test_responses_api.py",
"license": "Apache License 2.0",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
openai/openai-python:examples/responses/websocket.py | from __future__ import annotations
import json
import argparse
from typing import TYPE_CHECKING, Dict, Union, Literal, Optional, TypedDict, NamedTuple, cast
from openai import OpenAI
from openai.types.responses import (
FunctionToolParam,
ToolChoiceOptions,
ResponseInputParam,
ResponseFailedEvent,
... | {
"repo_id": "openai/openai-python",
"file_path": "examples/responses/websocket.py",
"license": "Apache License 2.0",
"lines": 362,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/openai-python:src/openai/types/responses/response_conversation_param_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Required, TypedDict
__all__ = ["ResponseConversationParamParam"]
class ResponseConversationParamParam(TypedDict, total=False):
"""The conversation that this res... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/response_conversation_param_param.py",
"license": "Apache License 2.0",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/response_input.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
from typing_extensions import TypeAlias
from .response_input_item import ResponseInputItem
__all__ = ["ResponseInput"]
ResponseInput: TypeAlias = List[ResponseInputItem]
| {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/response_input.py",
"license": "Apache License 2.0",
"lines": 6,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/responses_client_event.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
from typing_extensions import Literal, TypeAlias
from .tool import Tool
from ..._models import BaseModel
from .response_input import ResponseInput
from .response_prompt import ResponsePrompt
... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/responses_client_event.py",
"license": "Apache License 2.0",
"lines": 258,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
openai/openai-python:src/openai/types/responses/responses_client_event_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import List, Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
from .tool_param import ToolParam
from .response_includable import Response... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/responses_client_event_param.py",
"license": "Apache License 2.0",
"lines": 258,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
openai/openai-python:src/openai/types/responses/responses_server_event.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Union
from typing_extensions import Annotated, TypeAlias
from ..._utils import PropertyInfo
from .response_error_event import ResponseErrorEvent
from .response_failed_event import ResponseFailedEvent
from .respons... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/responses_server_event.py",
"license": "Apache License 2.0",
"lines": 116,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/resources/skills/content.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
import httpx
from ... import _legacy_response
from ..._types import Body, Query, Headers, NotGiven, not_given
from ..._compat import cached_property
from ..._resource import SyncAPIResource, Async... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/resources/skills/content.py",
"license": "Apache License 2.0",
"lines": 131,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/openai-python:src/openai/resources/skills/skills.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Union, Mapping, cast
from typing_extensions import Literal
import httpx
from ... import _legacy_response
from ...types import skill_list_params, skill_create_params, skill_upda... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/resources/skills/skills.py",
"license": "Apache License 2.0",
"lines": 519,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/openai-python:src/openai/resources/skills/versions/content.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
import httpx
from .... import _legacy_response
from ...._types import Body, Query, Headers, NotGiven, not_given
from ...._compat import cached_property
from ...._resource import SyncAPIResource, A... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/resources/skills/versions/content.py",
"license": "Apache License 2.0",
"lines": 139,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/openai-python:src/openai/resources/skills/versions/versions.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Union, Mapping, cast
from typing_extensions import Literal
import httpx
from .... import _legacy_response
from .content import (
Content,
AsyncContent,
ContentWithR... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/resources/skills/versions/versions.py",
"license": "Apache License 2.0",
"lines": 457,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
openai/openai-python:src/openai/types/deleted_skill.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
from .._models import BaseModel
__all__ = ["DeletedSkill"]
class DeletedSkill(BaseModel):
id: str
deleted: bool
object: Literal["skill.deleted"]
| {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/deleted_skill.py",
"license": "Apache License 2.0",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_auto.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
from typing_extensions import Literal, Annotated, TypeAlias
from ..._utils import PropertyInfo
from ..._models import BaseModel
from .inline_skill import InlineSkill
from .skill_reference imp... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_auto.py",
"license": "Apache License 2.0",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_auto_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
from ..._types import SequenceNotStr
from .inline_skill_param import InlineSkillP... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_auto_param.py",
"license": "Apache License 2.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_network_policy_allowlist.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Optional
from typing_extensions import Literal
from ..._models import BaseModel
from .container_network_policy_domain_secret import ContainerNetworkPolicyDomainSecret
__all__ = ["ContainerNetworkPolicyAllow... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_network_policy_allowlist.py",
"license": "Apache License 2.0",
"lines": 13,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_network_policy_allowlist_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Iterable
from typing_extensions import Literal, Required, TypedDict
from ..._types import SequenceNotStr
from .container_network_policy_domain_secret_param import ContainerNetwo... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_network_policy_allowlist_param.py",
"license": "Apache License 2.0",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_network_policy_disabled.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
from ..._models import BaseModel
__all__ = ["ContainerNetworkPolicyDisabled"]
class ContainerNetworkPolicyDisabled(BaseModel):
type: Literal["disabled"]
"""Disable outbound network ac... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_network_policy_disabled.py",
"license": "Apache License 2.0",
"lines": 7,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_network_policy_disabled_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, TypedDict
__all__ = ["ContainerNetworkPolicyDisabledParam"]
class ContainerNetworkPolicyDisabledParam(TypedDict, total=False):
type: Required... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_network_policy_disabled_param.py",
"license": "Apache License 2.0",
"lines": 7,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_network_policy_domain_secret.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from ..._models import BaseModel
__all__ = ["ContainerNetworkPolicyDomainSecret"]
class ContainerNetworkPolicyDomainSecret(BaseModel):
domain: str
"""The domain associated with the secret."""
name: str
"""The name... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_network_policy_domain_secret.py",
"license": "Apache License 2.0",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_network_policy_domain_secret_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Required, TypedDict
__all__ = ["ContainerNetworkPolicyDomainSecretParam"]
class ContainerNetworkPolicyDomainSecretParam(TypedDict, total=False):
domain: Require... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_network_policy_domain_secret_param.py",
"license": "Apache License 2.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_reference.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
from ..._models import BaseModel
__all__ = ["ContainerReference"]
class ContainerReference(BaseModel):
container_id: str
"""The ID of the referenced container."""
type: Literal["... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_reference.py",
"license": "Apache License 2.0",
"lines": 9,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/container_reference_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, TypedDict
__all__ = ["ContainerReferenceParam"]
class ContainerReferenceParam(TypedDict, total=False):
container_id: Required[str]
"""The... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/container_reference_param.py",
"license": "Apache License 2.0",
"lines": 9,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/inline_skill.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
from ..._models import BaseModel
from .inline_skill_source import InlineSkillSource
__all__ = ["InlineSkill"]
class InlineSkill(BaseModel):
description: str
"""The description of the ... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/inline_skill.py",
"license": "Apache License 2.0",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/inline_skill_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, TypedDict
from .inline_skill_source_param import InlineSkillSourceParam
__all__ = ["InlineSkillParam"]
class InlineSkillParam(TypedDict, total=F... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/inline_skill_param.py",
"license": "Apache License 2.0",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/inline_skill_source.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
from ..._models import BaseModel
__all__ = ["InlineSkillSource"]
class InlineSkillSource(BaseModel):
"""Inline skill payload"""
data: str
"""Base64-encoded skill zip bundle."""
... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/inline_skill_source.py",
"license": "Apache License 2.0",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/inline_skill_source_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, TypedDict
__all__ = ["InlineSkillSourceParam"]
class InlineSkillSourceParam(TypedDict, total=False):
"""Inline skill payload"""
data: Re... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/inline_skill_source_param.py",
"license": "Apache License 2.0",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/local_environment.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Optional
from typing_extensions import Literal
from ..._models import BaseModel
from .local_skill import LocalSkill
__all__ = ["LocalEnvironment"]
class LocalEnvironment(BaseModel):
type: Literal["loc... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/local_environment.py",
"license": "Apache License 2.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/local_environment_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Iterable
from typing_extensions import Literal, Required, TypedDict
from .local_skill_param import LocalSkillParam
__all__ = ["LocalEnvironmentParam"]
class LocalEnvironmentP... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/local_environment_param.py",
"license": "Apache License 2.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/local_skill.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from ..._models import BaseModel
__all__ = ["LocalSkill"]
class LocalSkill(BaseModel):
description: str
"""The description of the skill."""
name: str
"""The name of the skill."""
path: str
"""The path to ... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/local_skill.py",
"license": "Apache License 2.0",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/local_skill_param.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Required, TypedDict
__all__ = ["LocalSkillParam"]
class LocalSkillParam(TypedDict, total=False):
description: Required[str]
"""The description of the skill.... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/local_skill_param.py",
"license": "Apache License 2.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
openai/openai-python:src/openai/types/responses/response_container_reference.py | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing_extensions import Literal
from ..._models import BaseModel
__all__ = ["ResponseContainerReference"]
class ResponseContainerReference(BaseModel):
"""Represents a container created with /v1/containers."""
conta... | {
"repo_id": "openai/openai-python",
"file_path": "src/openai/types/responses/response_container_reference.py",
"license": "Apache License 2.0",
"lines": 9,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.