diff --git a/.github/workflows/mypy.yaml b/.github/workflows/mypy.yaml new file mode 100644 index 0000000..791243f --- /dev/null +++ b/.github/workflows/mypy.yaml @@ -0,0 +1,30 @@ +name: Mypy + +on: + push: + pull_request: + workflow_dispatch: + +jobs: + mypy: + runs-on: ubuntu-latest + env: + UV_CACHE_DIR: /tmp/.uv-cache + steps: + - name: Install git-annex + run: | + sudo apt-get update + sudo apt-get install -y git-annex + - name: Check out the repository + uses: https://github.com/RouxAntoine/checkout@v4.1.8 + with: + show-progress: true + - name: Install uv + uses: astral-sh/setup-uv@v7 + with: + python-version: ${{ matrix.python-version }} + enable-cache: true + - name: Install corrlib + run: uv sync --locked --all-extras --dev --python "3.12" + - name: Run tests + run: uv run mypy corrlib diff --git a/corrlib/__main__.py b/corrlib/__main__.py index ff94be6..24f9c83 100644 --- a/corrlib/__main__.py +++ b/corrlib/__main__.py @@ -1,8 +1,9 @@ from corrlib import cli, __app_name__ -def main(): +def main() -> None: cli.app(prog_name=__app_name__) + return if __name__ == "__main__": diff --git a/corrlib/find.py b/corrlib/find.py index d65413c..50f9a84 100644 --- a/corrlib/find.py +++ b/corrlib/find.py @@ -5,10 +5,11 @@ import pandas as pd import numpy as np from .input.implementations import codes from .tools import k2m, get_file +from typing import Any, Union, Optional # this will implement the search functionality -def _project_lookup_by_alias(db, alias): +def _project_lookup_by_alias(db: str, alias: str) -> str: # this will lookup the project name based on the alias conn = sqlite3.connect(db) c = conn.cursor() @@ -19,10 +20,10 @@ def _project_lookup_by_alias(db, alias): print("Error: multiple projects found with alias " + alias) elif len(results) == 0: raise Exception("Error: no project found with alias " + alias) - return results[0][0] + return str(results[0][0]) -def _project_lookup_by_id(db, uuid): +def _project_lookup_by_id(db: str, uuid: str) -> list[tuple[str, str]]: conn = sqlite3.connect(db) c = conn.cursor() c.execute(f"SELECT * FROM 'projects' WHERE id = '{uuid}'") @@ -31,7 +32,8 @@ def _project_lookup_by_id(db, uuid): return results -def _db_lookup(db, ensemble, correlator_name,code, project=None, parameters=None, created_before=None, created_after=None, updated_before=None, updated_after=None, revision=None): +def _db_lookup(db: str, ensemble: str, correlator_name: str, code: str, project: Optional[str]=None, parameters: Optional[str]=None, + created_before: Optional[str]=None, created_after: Optional[Any]=None, updated_before: Optional[Any]=None, updated_after: Optional[Any]=None) -> pd.DataFrame: project_str = project search_expr = f"SELECT * FROM 'backlogs' WHERE name = '{correlator_name}' AND ensemble = '{ensemble}'" @@ -55,7 +57,7 @@ def _db_lookup(db, ensemble, correlator_name,code, project=None, parameters=Non return results -def sfcf_filter(results, **kwargs): +def sfcf_filter(results: pd.DataFrame, **kwargs: Any) -> pd.DataFrame: drops = [] for ind in range(len(results)): result = results.iloc[ind] @@ -138,24 +140,25 @@ def sfcf_filter(results, **kwargs): return results.drop(drops) -def find_record(path, ensemble, correlator_name, code, project=None, parameters=None, created_before=None, created_after=None, updated_before=None, updated_after=None, revision=None, **kwargs): +def find_record(path: str, ensemble: str, correlator_name: str, code: str, project: Optional[str]=None, parameters: Optional[str]=None, + created_before: Optional[str]=None, created_after: Optional[str]=None, updated_before: Optional[str]=None, updated_after: Optional[str]=None, revision: Optional[str]=None, **kwargs: Any) -> pd.DataFrame: db = path + '/backlogger.db' if code not in codes: raise ValueError("Code " + code + "unknown, take one of the following:" + ", ".join(codes)) get_file(path, "backlogger.db") - results = _db_lookup(db, ensemble, correlator_name,code, project, parameters=parameters, created_before=created_before, created_after=created_after, updated_before=updated_before, updated_after=updated_after, revision=revision) + results = _db_lookup(db, ensemble, correlator_name,code, project, parameters=parameters, created_before=created_before, created_after=created_after, updated_before=updated_before, updated_after=updated_after) if code == "sfcf": results = sfcf_filter(results, **kwargs) print("Found " + str(len(results)) + " result" + ("s" if len(results)>1 else "")) return results.reset_index() -def find_project(path, name): +def find_project(path: str, name: str) -> str: get_file(path, "backlogger.db") return _project_lookup_by_alias(os.path.join(path, "backlogger.db"), name) -def list_projects(path): +def list_projects(path: str) -> list[tuple[str, str]]: db = path + '/backlogger.db' get_file(path, "backlogger.db") conn = sqlite3.connect(db) diff --git a/corrlib/git_tools.py b/corrlib/git_tools.py index 6569be0..bde9871 100644 --- a/corrlib/git_tools.py +++ b/corrlib/git_tools.py @@ -5,7 +5,7 @@ import git GITMODULES_FILE = '.gitmodules' -def move_submodule(repo_path, old_path, new_path): +def move_submodule(repo_path: str, old_path: str, new_path: str) -> None: """ Move a submodule to a new location. @@ -41,3 +41,4 @@ def move_submodule(repo_path, old_path, new_path): repo.git.add('.gitmodules') # save new state of the dataset dl.save(repo_path, message=f"Move module from {old_path} to {new_path}", dataset=repo_path) + return diff --git a/corrlib/initialization.py b/corrlib/initialization.py index f6ef5aa..9bc48d0 100644 --- a/corrlib/initialization.py +++ b/corrlib/initialization.py @@ -3,7 +3,7 @@ import datalad.api as dl import os -def _create_db(db): +def _create_db(db: str) -> None: """ Create the database file and the table. @@ -32,9 +32,10 @@ def _create_db(db): updated_at TEXT)''') conn.commit() conn.close() + return -def create(path): +def create(path: str) -> None: """ Create folder of backlogs. @@ -50,3 +51,4 @@ def create(path): fp.write(".cache") fp.close() dl.save(path, dataset=path, message="Initialize backlogger directory.") + return \ No newline at end of file diff --git a/corrlib/input/openQCD.py b/corrlib/input/openQCD.py index 0342a00..e0caba6 100644 --- a/corrlib/input/openQCD.py +++ b/corrlib/input/openQCD.py @@ -2,7 +2,7 @@ import pyerrors.input.openQCD as input import datalad.api as dl import os import fnmatch -from typing import Any +from typing import Any, Optional def read_ms1_param(path: str, project: str, file_in_project: str) -> dict[str, Any]: @@ -67,7 +67,7 @@ def read_ms3_param(path: str, project: str, file_in_project: str) -> dict[str, A return param -def read_rwms(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, postfix: str="ms1", version: str='2.0', names: list[str]=None, files: list[str]=None) -> dict[str, Any]: +def read_rwms(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, postfix: str="ms1", version: str='2.0', names: Optional[list[str]]=None, files: Optional[list[str]]=None) -> dict[str, Any]: dataset = os.path.join(path, "projects", project) directory = os.path.join(dataset, dir_in_project) if files is None: @@ -94,7 +94,7 @@ def read_rwms(path: str, project: str, dir_in_project: str, param: dict[str, Any return rw_dict -def extract_t0(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, dtr_read: int, xmin: int, spatial_extent: int, fit_range: int = 5, postfix: str=None, names: list[str]=None, files: list[str]=None) -> dict[str, Any]: +def extract_t0(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, dtr_read: int, xmin: int, spatial_extent: int, fit_range: int = 5, postfix: str="", names: Optional[list[str]]=None, files: Optional[list[str]]=None) -> dict[str, Any]: dataset = os.path.join(path, "projects", project) directory = os.path.join(dataset, dir_in_project) if files is None: @@ -132,7 +132,7 @@ def extract_t0(path: str, project: str, dir_in_project: str, param: dict[str, An return t0_dict -def extract_t1(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, dtr_read: int, xmin: int, spatial_extent: int, fit_range: int = 5, postfix: str = None, names: list[str]=None, files: list[str]=None) -> dict[str, Any]: +def extract_t1(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, dtr_read: int, xmin: int, spatial_extent: int, fit_range: int = 5, postfix: str = "", names: Optional[list[str]]=None, files: Optional[list[str]]=None) -> dict[str, Any]: directory = os.path.join(path, "projects", project, dir_in_project) if files is None: files = [] diff --git a/corrlib/input/sfcf.py b/corrlib/input/sfcf.py index 79532e9..ed5a45a 100644 --- a/corrlib/input/sfcf.py +++ b/corrlib/input/sfcf.py @@ -5,7 +5,7 @@ import os from typing import Any -bi_corrs: list = ["f_P", "fP", "f_p", +bi_corrs: list[str] = ["f_P", "fP", "f_p", "g_P", "gP", "g_p", "fA0", "f_A", "f_a", "gA0", "g_A", "g_a", @@ -43,7 +43,7 @@ bi_corrs: list = ["f_P", "fP", "f_p", "l3A2", "l3_A2", "g_av23", ] -bb_corrs: list = [ +bb_corrs: list[str] = [ 'F1', 'F_1', 'f_1', @@ -64,7 +64,7 @@ bb_corrs: list = [ 'F_sPdP_d', ] -bib_corrs: list = [ +bib_corrs: list[str] = [ 'F_V0', 'K_V0', ] @@ -184,7 +184,7 @@ def read_param(path: str, project: str, file_in_project: str) -> dict[str, Any]: return params -def _map_params(params: dict, spec_list: list) -> dict[str, Any]: +def _map_params(params: dict[str, Any], spec_list: list[str]) -> dict[str, Any]: """ Map the extracted parameters to the extracted data. @@ -228,7 +228,7 @@ def _map_params(params: dict, spec_list: list) -> dict[str, Any]: return new_specs -def get_specs(key, parameters, sep='/') -> str: +def get_specs(key: str, parameters: dict[str, Any], sep: str = '/') -> str: key_parts = key.split(sep) if corr_types[key_parts[0]] == 'bi': param = _map_params(parameters, key_parts[1:-1]) @@ -238,7 +238,7 @@ def get_specs(key, parameters, sep='/') -> str: return s -def read_data(path, project, dir_in_project, prefix, param, version='1.0c', cfg_seperator='n', sep='/', **kwargs) -> dict: +def read_data(path: str, project: str, dir_in_project: str, prefix: str, param: dict[str, Any], version: str = '1.0c', cfg_seperator: str = 'n', sep: str = '/', **kwargs: Any) -> dict[str, Any]: """ Extract the data from the sfcf file. diff --git a/corrlib/main.py b/corrlib/main.py index 0c2b0ea..e0bfbf2 100644 --- a/corrlib/main.py +++ b/corrlib/main.py @@ -6,10 +6,10 @@ from .git_tools import move_submodule import shutil from .find import _project_lookup_by_id from .tools import list2str, str2list, get_file -from typing import Union +from typing import Union, Optional -def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Union[str, None]=None, aliases: Union[str, None]=None, code: Union[str, None]=None): +def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Union[list[str], None]=None, aliases: Union[list[str], None]=None, code: Union[str, None]=None) -> None: """ Create a new project entry in the database. @@ -33,10 +33,10 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni raise ValueError("Project already imported, use update_project() instead.") dl.unlock(db, dataset=path) - alias_str = None + alias_str = "" if aliases is not None: alias_str = list2str(aliases) - tag_str = None + tag_str = "" if tags is not None: tag_str = list2str(tags) c.execute("INSERT INTO projects (id, aliases, customTags, owner, code, created_at, updated_at) VALUES (?, ?, ?, ?, ?, datetime('now'), datetime('now'))", (uuid, alias_str, tag_str, owner, code)) @@ -45,7 +45,7 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni dl.save(db, message="Added entry for project " + uuid + " to database", dataset=path) -def update_project_data(path, uuid, prop, value = None): +def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None] = None) -> None: get_file(path, "backlogger.db") conn = sqlite3.connect(os.path.join(path, "backlogger.db")) c = conn.cursor() @@ -55,7 +55,7 @@ def update_project_data(path, uuid, prop, value = None): return -def update_aliases(path: str, uuid: str, aliases: list[str]): +def update_aliases(path: str, uuid: str, aliases: list[str]) -> None: db = os.path.join(path, "backlogger.db") get_file(path, "backlogger.db") known_data = _project_lookup_by_id(db, uuid)[0] @@ -82,7 +82,7 @@ def update_aliases(path: str, uuid: str, aliases: list[str]): return -def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Union[str, None]=None, aliases: Union[str, None]=None, code: Union[str, None]=None, isDataset: bool=True): +def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Optional[list[str]]=None, aliases: Optional[list[str]]=None, code: Optional[str]=None, isDataset: bool=True) -> str: """ Parameters ---------- @@ -117,7 +117,7 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Unio dl.install(path=tmp_path, source=url, dataset=path) tmp_ds = dl.Dataset(tmp_path) conf = dlc.ConfigManager(tmp_ds) - uuid = conf.get("datalad.dataset.id") + uuid = str(conf.get("datalad.dataset.id")) if not uuid: raise ValueError("The dataset does not have a uuid!") if not os.path.exists(path + "/projects/" + uuid): @@ -142,9 +142,10 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Unio return uuid -def drop_project_data(path: str, uuid: str, path_in_project: str = ""): +def drop_project_data(path: str, uuid: str, path_in_project: str = "") -> None: """ Drop (parts of) a prject to free up diskspace """ dl.drop(path + "/projects/" + uuid + "/" + path_in_project) + return diff --git a/corrlib/meas_io.py b/corrlib/meas_io.py index 435f055..8d8a055 100644 --- a/corrlib/meas_io.py +++ b/corrlib/meas_io.py @@ -9,9 +9,10 @@ from pyerrors import Obs, Corr, dump_object, load_object from hashlib import sha256 from .tools import cached, get_file import shutil +from typing import Any -def write_measurement(path, ensemble, measurement, uuid, code, parameter_file=None): +def write_measurement(path: str, ensemble: str, measurement: dict[str, dict[str, dict[str, Any]]], uuid: str, code: str, parameter_file: str) -> None: """ Write a measurement to the backlog. If the file for the measurement already exists, update the measurement. @@ -97,7 +98,7 @@ def write_measurement(path, ensemble, measurement, uuid, code, parameter_file=No dl.save(files, message="Add measurements to database", dataset=path) -def load_record(path: str, meas_path: str): +def load_record(path: str, meas_path: str) -> Union[Corr, Obs]: """ Load a list of records by their paths. @@ -116,7 +117,7 @@ def load_record(path: str, meas_path: str): return load_records(path, [meas_path])[0] -def load_records(path: str, meas_paths: list[str], preloaded = {}) -> list[Union[Corr, Obs]]: +def load_records(path: str, meas_paths: list[str], preloaded: dict[str, Any] = {}) -> list[Union[Corr, Obs]]: """ Load a list of records by their paths. @@ -138,7 +139,7 @@ def load_records(path: str, meas_paths: list[str], preloaded = {}) -> list[Union needed_data[file] = [] key = mpath.split("::")[1] needed_data[file].append(key) - returned_data: list = [] + returned_data: list[Any] = [] for file in needed_data.keys(): for key in list(needed_data[file]): if os.path.exists(cache_path(path, file, key) + ".p"): @@ -154,7 +155,7 @@ def load_records(path: str, meas_paths: list[str], preloaded = {}) -> list[Union return returned_data -def cache_dir(path, file): +def cache_dir(path: str, file: str) -> str: cache_path_list = [path] cache_path_list.append(".cache") cache_path_list.extend(file.split("/")[1:]) @@ -162,19 +163,19 @@ def cache_dir(path, file): return cache_path -def cache_path(path, file, key): +def cache_path(path: str, file: str, key: str) -> str: cache_path = os.path.join(cache_dir(path, file), key) return cache_path -def preload(path: str, file: str): +def preload(path: str, file: str) -> dict[str, Any]: get_file(path, file) - filedict = pj.load_json_dict(os.path.join(path, file)) + filedict: dict[str, Any] = pj.load_json_dict(os.path.join(path, file)) print("> read file") return filedict -def drop_record(path: str, meas_path: str): +def drop_record(path: str, meas_path: str) -> None: file_in_archive = meas_path.split("::")[0] file = os.path.join(path, file_in_archive) db = os.path.join(path, 'backlogger.db') @@ -199,7 +200,9 @@ def drop_record(path: str, meas_path: str): else: raise ValueError("This measurement does not exist as a file!") -def drop_cache(path: str): + +def drop_cache(path: str) -> None: cache_dir = os.path.join(path, ".cache") for f in os.listdir(cache_dir): shutil.rmtree(os.path.join(cache_dir, f)) + return diff --git a/corrlib/toml.py b/corrlib/toml.py index abb5545..11065fe 100644 --- a/corrlib/toml.py +++ b/corrlib/toml.py @@ -16,15 +16,16 @@ from .meas_io import write_measurement import datalad.api as dl import os from .input.implementations import codes as known_codes +from typing import Any -def replace_string(string: str, name: str, val: str): +def replace_string(string: str, name: str, val: str) -> str: if '{' + name + '}' in string: n = string.replace('{' + name + '}', val) return n else: return string -def replace_in_meas(measurements: dict, vars: dict[str, str]): +def replace_in_meas(measurements: dict[str, dict[str, Any]], vars: dict[str, str]) -> dict[str, dict[str, Any]]: # replace global variables for name, value in vars.items(): for m in measurements.keys(): @@ -36,7 +37,7 @@ def replace_in_meas(measurements: dict, vars: dict[str, str]): measurements[m][key][i] = replace_string(measurements[m][key][i], name, value) return measurements -def fill_cons(measurements, constants): +def fill_cons(measurements: dict[str, dict[str, Any]], constants: dict[str, str]) -> dict[str, dict[str, Any]]: for m in measurements.keys(): for name, val in constants.items(): if name not in measurements[m].keys(): @@ -44,7 +45,7 @@ def fill_cons(measurements, constants): return measurements -def check_project_data(d: dict) -> None: +def check_project_data(d: dict[str, dict[str, str]]) -> None: if 'project' not in d.keys() or 'measurements' not in d.keys() or len(list(d.keys())) > 4: raise ValueError('There should only be maximally be four keys on the top level, "project" and "measurements" are mandatory, "contants" is optional!') project_data = d['project'] @@ -57,7 +58,7 @@ def check_project_data(d: dict) -> None: return -def check_measurement_data(measurements: dict, code: str) -> None: +def check_measurement_data(measurements: dict[str, dict[str, str]], code: str) -> None: var_names: list[str] = [] if code == "sfcf": var_names = ["path", "ensemble", "param_file", "version", "prefix", "cfg_seperator", "names"] @@ -91,14 +92,14 @@ def import_toml(path: str, file: str, copy_file: bool=True) -> None: with open(file, 'rb') as fp: toml_dict = toml.load(fp) check_project_data(toml_dict) - project: dict = toml_dict['project'] + project: dict[str, Any] = toml_dict['project'] if project['code'] not in known_codes: raise ValueError('Code' + project['code'] + 'has no import implementation!') - measurements: dict = toml_dict['measurements'] + measurements: dict[str, dict[str, Any]] = toml_dict['measurements'] measurements = fill_cons(measurements, toml_dict['constants'] if 'constants' in toml_dict else {}) measurements = replace_in_meas(measurements, toml_dict['replace'] if 'replace' in toml_dict else {}) check_measurement_data(measurements, project['code']) - aliases = project.get('aliases', None) + aliases = project.get('aliases', []) uuid = project.get('uuid', None) if uuid is not None: if not os.path.exists(path + "/projects/" + uuid): @@ -133,16 +134,16 @@ def import_toml(path: str, file: str, copy_file: bool=True) -> None: for rwp in ["integrator", "eps", "ntot", "dnms"]: param[rwp] = "Unknown" param['type'] = 't0' - measurement = openQCD.extract_t0(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"], - fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None), files=md.get('files', None)) + measurement = openQCD.extract_t0(path, uuid, md['path'], param, str(md["prefix"]), int(md["dtr_read"]), int(md["xmin"]), int(md["spatial_extent"]), + fit_range=int(md.get('fit_range', 5)), postfix=str(md.get('postfix', '')), names=md.get('names', []), files=md.get('files', [])) elif md['measurement'] == 't1': if 'param_file' in md: param = openQCD.read_ms3_param(path, uuid, md['param_file']) param['type'] = 't1' - measurement = openQCD.extract_t1(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"], - fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None), files=md.get('files', None)) + measurement = openQCD.extract_t1(path, uuid, md['path'], param, str(md["prefix"]), int(md["dtr_read"]), int(md["xmin"]), int(md["spatial_extent"]), + fit_range=int(md.get('fit_range', 5)), postfix=str(md.get('postfix', '')), names=md.get('names', []), files=md.get('files', [])) - write_measurement(path, ensemble, measurement, uuid, project['code'], (md['param_file'] if 'param_file' in md else None)) + write_measurement(path, ensemble, measurement, uuid, project['code'], (md['param_file'] if 'param_file' in md else '')) if not os.path.exists(os.path.join(path, "toml_imports", uuid)): os.makedirs(os.path.join(path, "toml_imports", uuid)) @@ -155,7 +156,7 @@ def import_toml(path: str, file: str, copy_file: bool=True) -> None: return -def reimport_project(path, uuid): +def reimport_project(path: str, uuid: str) -> None: """ Reimport an existing project using the files that are already available for this project. @@ -173,6 +174,7 @@ def reimport_project(path, uuid): return -def update_project(path, uuid): +def update_project(path: str, uuid: str) -> None: dl.update(how='merge', follow='sibling', dataset=os.path.join(path, "projects", uuid)) # reimport_project(path, uuid) + return diff --git a/corrlib/tools.py b/corrlib/tools.py index 0ed2229..b4ae89e 100644 --- a/corrlib/tools.py +++ b/corrlib/tools.py @@ -2,10 +2,10 @@ import os import datalad.api as dl -def str2list(string: str): +def str2list(string: str) -> list[str]: return string.split(",") -def list2str(mylist): +def list2str(mylist: list[str]) -> str: s = ",".join(mylist) return s @@ -19,10 +19,11 @@ def k2m(k: float) -> float: return (1/(2*k))-4 -def get_file(path: str, file: str): +def get_file(path: str, file: str) -> None: if file == "backlogger.db": print("Downloading database...") else: print("Downloading data...") dl.get(os.path.join(path, file), dataset=path) print("> downloaded file") + return diff --git a/pyproject.toml b/pyproject.toml index ebcc834..faf7e6c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,8 +9,8 @@ dynamic = ["version"] dependencies = [ "gitpython>=3.1.45", 'pyerrors>=2.11.1', - 'datalad>=1.1.0', - 'typer>=0.12.5' + "datalad>=1.1.0", + 'typer>=0.12.5', ] description = "Python correlation library" authors = [ @@ -35,9 +35,16 @@ extend-select = [ "F", ] +[tool.mypy] +strict = true +implicit_reexport = false +follow_untyped_imports = false +ignore_missing_imports = true + [dependency-groups] dev = [ "mypy>=1.19.0", + "pandas-stubs>=2.3.3.251201", "pytest>=9.0.1", "pytest-cov>=7.0.0", "pytest-pretty>=1.3.0", diff --git a/uv.lock b/uv.lock index e294cdb..f76ee81 100644 --- a/uv.lock +++ b/uv.lock @@ -416,6 +416,7 @@ dependencies = [ [package.dev-dependencies] dev = [ { name = "mypy" }, + { name = "pandas-stubs" }, { name = "pytest" }, { name = "pytest-cov" }, { name = "pytest-pretty" }, @@ -433,6 +434,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "mypy", specifier = ">=1.19.0" }, + { name = "pandas-stubs", specifier = ">=2.3.3.251201" }, { name = "pytest", specifier = ">=9.0.1" }, { name = "pytest-cov", specifier = ">=7.0.0" }, { name = "pytest-pretty", specifier = ">=1.3.0" }, @@ -1772,6 +1774,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" }, ] +[[package]] +name = "pandas-stubs" +version = "2.3.3.251201" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "types-pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ee/a6/491b2af2cb3ee232765a73fb273a44cc1ac33b154f7745b2df2ee1dc4d01/pandas_stubs-2.3.3.251201.tar.gz", hash = "sha256:7a980f4f08cff2a6d7e4c6d6d26f4c5fcdb82a6f6531489b2f75c81567fe4536", size = 107787, upload-time = "2025-12-01T18:29:22.403Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/68/78a3c253f146254b8e2c19f4a4768f272e12ef11001d9b45ec7b165db054/pandas_stubs-2.3.3.251201-py3-none-any.whl", hash = "sha256:eb5c9b6138bd8492fd74a47b09c9497341a278fcfbc8633ea4b35b230ebf4be5", size = 164638, upload-time = "2025-12-01T18:29:21.006Z" }, +] + [[package]] name = "pathspec" version = "0.12.1" @@ -2456,6 +2472,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/64/7713ffe4b5983314e9d436a90d5bd4f63b6054e2aca783a3cfc44cb95bbf/typer-0.20.0-py3-none-any.whl", hash = "sha256:5b463df6793ec1dca6213a3cf4c0f03bc6e322ac5e16e13ddd622a889489784a", size = 47028, upload-time = "2025-10-20T17:03:47.617Z" }, ] +[[package]] +name = "types-pytz" +version = "2025.2.0.20251108" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/40/ff/c047ddc68c803b46470a357454ef76f4acd8c1088f5cc4891cdd909bfcf6/types_pytz-2025.2.0.20251108.tar.gz", hash = "sha256:fca87917836ae843f07129567b74c1929f1870610681b4c92cb86a3df5817bdb", size = 10961, upload-time = "2025-11-08T02:55:57.001Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/c1/56ef16bf5dcd255155cc736d276efa6ae0a5c26fd685e28f0412a4013c01/types_pytz-2025.2.0.20251108-py3-none-any.whl", hash = "sha256:0f1c9792cab4eb0e46c52f8845c8f77cf1e313cb3d68bf826aa867fe4717d91c", size = 10116, upload-time = "2025-11-08T02:55:56.194Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0"