Merge branch 'develop' into refactor/data_backend
Some checks failed
Mypy / mypy (push) Failing after 34s
Pytest / pytest (3.12) (push) Failing after 47s
Pytest / pytest (3.13) (push) Failing after 44s
Pytest / pytest (3.14) (push) Failing after 46s
Ruff / ruff (push) Failing after 33s

This commit is contained in:
Justus Kuhlmann 2025-12-04 11:16:23 +01:00
commit 641c612a59
Signed by: jkuhl
GPG key ID: 00ED992DD79B85A6
26 changed files with 3012 additions and 110 deletions

View file

@ -7,10 +7,10 @@ import shutil
from .find import _project_lookup_by_id
from .tools import list2str, str2list
from .tracker import get_file
from typing import Union
from typing import Union, Optional
def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Union[str, None]=None, aliases: Union[str, None]=None, code: Union[str, None]=None):
def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Union[list[str], None]=None, aliases: Union[list[str], None]=None, code: Union[str, None]=None) -> None:
"""
Create a new project entry in the database.
@ -34,10 +34,10 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
raise ValueError("Project already imported, use update_project() instead.")
dl.unlock(db, dataset=path)
alias_str = None
alias_str = ""
if aliases is not None:
alias_str = list2str(aliases)
tag_str = None
tag_str = ""
if tags is not None:
tag_str = list2str(tags)
c.execute("INSERT INTO projects (id, aliases, customTags, owner, code, created_at, updated_at) VALUES (?, ?, ?, ?, ?, datetime('now'), datetime('now'))", (uuid, alias_str, tag_str, owner, code))
@ -46,7 +46,7 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
dl.save(db, message="Added entry for project " + uuid + " to database", dataset=path)
def update_project_data(path, uuid, prop, value = None):
def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None] = None) -> None:
get_file(path, "backlogger.db")
conn = sqlite3.connect(os.path.join(path, "backlogger.db"))
c = conn.cursor()
@ -56,7 +56,7 @@ def update_project_data(path, uuid, prop, value = None):
return
def update_aliases(path: str, uuid: str, aliases: list[str]):
def update_aliases(path: str, uuid: str, aliases: list[str]) -> None:
db = os.path.join(path, "backlogger.db")
get_file(path, "backlogger.db")
known_data = _project_lookup_by_id(db, uuid)[0]
@ -83,7 +83,7 @@ def update_aliases(path: str, uuid: str, aliases: list[str]):
return
def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Union[str, None]=None, aliases: Union[str, None]=None, code: Union[str, None]=None, isDataset: bool=True):
def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Optional[list[str]]=None, aliases: Optional[list[str]]=None, code: Optional[str]=None, isDataset: bool=True) -> str:
"""
Parameters
----------
@ -118,7 +118,7 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Unio
dl.install(path=tmp_path, source=url, dataset=path)
tmp_ds = dl.Dataset(tmp_path)
conf = dlc.ConfigManager(tmp_ds)
uuid = conf.get("datalad.dataset.id")
uuid = str(conf.get("datalad.dataset.id"))
if not uuid:
raise ValueError("The dataset does not have a uuid!")
if not os.path.exists(path + "/projects/" + uuid):
@ -131,21 +131,22 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Unio
dl.save([db, path + '/projects/' + uuid], message="Import project from " + url, dataset=path)
else:
dl.drop(tmp_path, reckless='kill')
shutil.rmtree(tmp_path)
shutil.rmtree(tmp_path)
if aliases is not None:
if isinstance(aliases, str):
alias_list = [aliases]
else:
alias_list = aliases
update_aliases(path, uuid, alias_list)
# make this more concrete
return uuid
def drop_project_data(path: str, uuid: str, path_in_project: str = ""):
def drop_project_data(path: str, uuid: str, path_in_project: str = "") -> None:
"""
Drop (parts of) a prject to free up diskspace
"""
dl.drop(path + "/projects/" + uuid + "/" + path_in_project)
return