Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
bf6e000
changes made
satvshr Dec 29, 2025
8b6af81
Merge branch 'issue1564' of https://github.com/satvshr/openml-python …
satvshr Dec 30, 2025
834782c
bug fixing
satvshr Dec 30, 2025
38ae9be
test failures fix
satvshr Dec 30, 2025
93ab9c2
Update flow_id_tutorial.py
satvshr Dec 30, 2025
aa25dd6
_defaults bug fixing
satvshr Dec 30, 2025
a98b6b1
removed __setattr__ given it is not supported
satvshr Dec 31, 2025
7c82054
Merge branch 'main' into issue1564
satvshr Dec 31, 2025
f8fbe1e
Merge branch 'main' into issue1564
satvshr Jan 1, 2026
4fdcb64
Merge branch 'main' into issue1564
satvshr Jan 2, 2026
b3513f0
Merge branch 'main' into issue1564
satvshr Jan 4, 2026
146dd21
Update all files
satvshr Jan 5, 2026
7a67bf0
Update introduction_tutorial.py
satvshr Jan 5, 2026
b111905
Merge branch 'main' into pr/1577
fkiraly Jan 7, 2026
83f36c2
Merge branch 'issue1564' of https://github.com/satvshr/openml-python …
fkiraly Jan 7, 2026
4241624
bug fixing
satvshr Jan 12, 2026
f01c1e9
Update test_utils.py
satvshr Jan 12, 2026
07cc1c8
Update test_config.py
satvshr Jan 12, 2026
1dbc780
merge main
satvshr Jan 14, 2026
021a1e1
made requested changes
satvshr Jan 15, 2026
4c4a12c
Merge branch 'main' into issue1564
satvshr Jan 15, 2026
1d91220
made requested changes
satvshr Jan 15, 2026
3e26ace
Merge branch 'issue1564' of https://github.com/satvshr/openml-python …
satvshr Jan 15, 2026
0060b2e
fixed bugs
satvshr Jan 15, 2026
65ba66b
fixed bugs
satvshr Jan 15, 2026
317c6e9
fixed bugs
satvshr Jan 15, 2026
503ab82
fixed bugs
satvshr Jan 15, 2026
fd7ea2b
Merge branch 'main' into issue1564
satvshr Jan 16, 2026
fa3cd40
bug fixing
satvshr Jan 16, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion openml/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,11 @@
# License: BSD 3-Clause
from __future__ import annotations

from typing import TYPE_CHECKING

from . import (
_api_calls,
config,
_config as _config_module,
datasets,
evaluations,
exceptions,
Expand Down Expand Up @@ -49,6 +51,11 @@
OpenMLTask,
)

if TYPE_CHECKING:
from ._config import OpenMLConfigManager

config: OpenMLConfigManager = _config_module._config


def populate_cache(
task_ids: list[int] | None = None,
Expand Down
21 changes: 12 additions & 9 deletions openml/_api_calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,16 @@
import xml
import zipfile
from pathlib import Path
from typing import cast

import minio
import requests
import requests.utils
import xmltodict
from urllib3 import ProxyManager

from . import config
import openml

from .__version__ import __version__
from .exceptions import (
OpenMLHashException,
Expand Down Expand Up @@ -70,7 +72,7 @@ def resolve_env_proxies(url: str) -> str | None:


def _create_url_from_endpoint(endpoint: str) -> str:
url = config.server
url = cast("str", openml.config.server)
if not url.endswith("/"):
url += "/"
url += endpoint
Expand Down Expand Up @@ -171,7 +173,7 @@ def _download_minio_file(
bucket_name=bucket,
object_name=object_name,
file_path=str(destination),
progress=ProgressBar() if config.show_progress else None,
progress=ProgressBar() if openml.config.show_progress else None,
request_headers=_HEADERS,
)
if destination.is_file() and destination.suffix == ".zip":
Expand Down Expand Up @@ -300,7 +302,8 @@ def _file_id_to_url(file_id: int, filename: str | None = None) -> str:
Presents the URL how to download a given file id
filename is optional
"""
openml_url = config.server.split("/api/")
openml_server = cast("str", openml.config.server)
openml_url = openml_server.split("/api/")
url = openml_url[0] + f"/data/download/{file_id!s}"
if filename is not None:
url += "/" + filename
Expand All @@ -316,7 +319,7 @@ def _read_url_files(
and sending file_elements as files
"""
data = {} if data is None else data
data["api_key"] = config.apikey
data["api_key"] = openml.config.apikey
if file_elements is None:
file_elements = {}
# Using requests.post sets header 'Accept-encoding' automatically to
Expand All @@ -336,8 +339,8 @@ def __read_url(
md5_checksum: str | None = None,
) -> requests.Response:
data = {} if data is None else data
if config.apikey:
data["api_key"] = config.apikey
if openml.config.apikey:
data["api_key"] = openml.config.apikey
return _send_request(
request_method=request_method,
url=url,
Expand All @@ -362,10 +365,10 @@ def _send_request( # noqa: C901, PLR0912
files: FILE_ELEMENTS_TYPE | None = None,
md5_checksum: str | None = None,
) -> requests.Response:
n_retries = max(1, config.connection_n_retries)
n_retries = max(1, openml.config.connection_n_retries)

response: requests.Response | None = None
delay_method = _human_delay if config.retry_policy == "human" else _robot_delay
delay_method = _human_delay if openml.config.retry_policy == "human" else _robot_delay

# Error to raise in case of retrying too often. Will be set to the last observed exception.
retry_raise_e: Exception | None = None
Expand Down
Loading