Compare commits

..

2 commits

Author SHA1 Message Date
00ec9f7f8a
roll out tracker unlock implementation
Some checks failed
Mypy / mypy (push) Successful in 49s
Pytest / pytest (3.12) (push) Successful in 49s
Pytest / pytest (3.13) (push) Successful in 48s
Pytest / pytest (3.14) (push) Successful in 48s
Ruff / ruff (push) Failing after 33s
Mypy / mypy (pull_request) Successful in 44s
Pytest / pytest (3.12) (pull_request) Successful in 50s
Pytest / pytest (3.13) (pull_request) Successful in 50s
Pytest / pytest (3.14) (pull_request) Successful in 47s
Ruff / ruff (pull_request) Failing after 33s
2025-12-04 15:15:24 +01:00
bc57087a5a
remove temporary non-datalad implementation 2025-12-04 15:14:28 +01:00
4 changed files with 58 additions and 45 deletions

View file

@ -6,7 +6,7 @@ from .git_tools import move_submodule
import shutil import shutil
from .find import _project_lookup_by_id from .find import _project_lookup_by_id
from .tools import list2str, str2list, get_db_file from .tools import list2str, str2list, get_db_file
from .tracker import get, save from .tracker import get, save, unlock, init, clone, drop
from typing import Union, Optional from typing import Union, Optional
@ -34,7 +34,7 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
if known_projects.fetchone(): if known_projects.fetchone():
raise ValueError("Project already imported, use update_project() instead.") raise ValueError("Project already imported, use update_project() instead.")
dl.unlock(db, dataset=path) unlock(path, db_file)
alias_str = "" alias_str = ""
if aliases is not None: if aliases is not None:
alias_str = list2str(aliases) alias_str = list2str(aliases)
@ -80,7 +80,7 @@ def update_aliases(path: str, uuid: str, aliases: list[str]) -> None:
if not len(new_alias_list) == len(known_alias_list): if not len(new_alias_list) == len(known_alias_list):
alias_str = list2str(new_alias_list) alias_str = list2str(new_alias_list)
dl.unlock(db, dataset=path) unlock(path, db_file)
update_project_data(path, uuid, "aliases", alias_str) update_project_data(path, uuid, "aliases", alias_str)
save(path, message="Updated aliases for project " + uuid, files=[db_file]) save(path, message="Updated aliases for project " + uuid, files=[db_file])
return return
@ -113,12 +113,7 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Opti
""" """
tmp_path = os.path.join(path, 'projects/tmp') tmp_path = os.path.join(path, 'projects/tmp')
if not isDataset: clone(path, source=url, target=tmp_path)
dl.create(tmp_path, dataset=path)
shutil.copytree(url + "/*", path + '/projects/tmp/')
save(path, message="Created temporary project dataset", files=['projects/tmp'])
else:
dl.install(path=tmp_path, source=url, dataset=path)
tmp_ds = dl.Dataset(tmp_path) tmp_ds = dl.Dataset(tmp_path)
conf = dlc.ConfigManager(tmp_ds) conf = dlc.ConfigManager(tmp_ds)
uuid = str(conf.get("datalad.dataset.id")) uuid = str(conf.get("datalad.dataset.id"))
@ -126,9 +121,8 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Opti
raise ValueError("The dataset does not have a uuid!") raise ValueError("The dataset does not have a uuid!")
if not os.path.exists(path + "/projects/" + uuid): if not os.path.exists(path + "/projects/" + uuid):
db_file = get_db_file(path) db_file = get_db_file(path)
db = os.path.join(path, db_file)
get(path, db_file) get(path, db_file)
dl.unlock(db, dataset=path) unlock(path, db_file)
create_project(path, uuid, owner, tags, aliases, code) create_project(path, uuid, owner, tags, aliases, code)
move_submodule(path, 'projects/tmp', 'projects/' + uuid) move_submodule(path, 'projects/tmp', 'projects/' + uuid)
os.mkdir(path + '/import_scripts/' + uuid) os.mkdir(path + '/import_scripts/' + uuid)
@ -151,6 +145,6 @@ def drop_project_data(path: str, uuid: str, path_in_project: str = "") -> None:
""" """
Drop (parts of) a project to free up diskspace Drop (parts of) a project to free up diskspace
""" """
dl.drop(path + "/projects/" + uuid + "/" + path_in_project) drop(path + "/projects/" + uuid + "/" + path_in_project)
return return

View file

@ -8,7 +8,7 @@ from typing import Union
from pyerrors import Obs, Corr, dump_object, load_object from pyerrors import Obs, Corr, dump_object, load_object
from hashlib import sha256 from hashlib import sha256
from .tools import get_db_file, cache_enabled from .tools import get_db_file, cache_enabled
from .tracker import get, save from .tracker import get, save, unlock
import shutil import shutil
from typing import Any from typing import Any
@ -32,7 +32,7 @@ def write_measurement(path: str, ensemble: str, measurement: dict[str, dict[str,
db_file = get_db_file(path) db_file = get_db_file(path)
db = os.path.join(path, db_file) db = os.path.join(path, db_file)
get(path, db_file) get(path, db_file)
dl.unlock(db, dataset=path) unlock(path, db_file)
conn = sqlite3.connect(db) conn = sqlite3.connect(db)
c = conn.cursor() c = conn.cursor()
files = [] files = []
@ -45,7 +45,7 @@ def write_measurement(path: str, ensemble: str, measurement: dict[str, dict[str,
os.makedirs(os.path.join(path, '.', 'archive', ensemble, corr)) os.makedirs(os.path.join(path, '.', 'archive', ensemble, corr))
else: else:
if os.path.exists(file): if os.path.exists(file):
dl.unlock(file, dataset=path) unlock(path, file_in_archive)
known_meas = pj.load_json_dict(file) known_meas = pj.load_json_dict(file)
if code == "sfcf": if code == "sfcf":
parameters = sfcf.read_param(path, uuid, parameter_file) parameters = sfcf.read_param(path, uuid, parameter_file)
@ -184,7 +184,7 @@ def drop_record(path: str, meas_path: str) -> None:
db = os.path.join(path, db_file) db = os.path.join(path, db_file)
get(path, db_file) get(path, db_file)
sub_key = meas_path.split("::")[1] sub_key = meas_path.split("::")[1]
dl.unlock(db, dataset=path) unlock(path, db_file)
conn = sqlite3.connect(db) conn = sqlite3.connect(db)
c = conn.cursor() c = conn.cursor()
if c.execute("SELECT * FROM backlogs WHERE path = ?", (meas_path, )).fetchone() is not None: if c.execute("SELECT * FROM backlogs WHERE path = ?", (meas_path, )).fetchone() is not None:
@ -196,7 +196,7 @@ def drop_record(path: str, meas_path: str) -> None:
known_meas = pj.load_json_dict(file) known_meas = pj.load_json_dict(file)
if sub_key in known_meas: if sub_key in known_meas:
del known_meas[sub_key] del known_meas[sub_key]
dl.unlock(file, dataset=path) unlock(path, file_in_archive)
pj.dump_dict_to_json(known_meas, file) pj.dump_dict_to_json(known_meas, file)
save(path, message="Drop measurements to database", files=[db, file]) save(path, message="Drop measurements to database", files=[db, file])
return return

View file

@ -1,7 +1,9 @@
import os import os
from configparser import ConfigParser from configparser import ConfigParser
from .trackers import datalad as dl import datalad.api as dl
from typing import Optional from typing import Optional
import shutil
from .tools import get_db_file
def get_tracker(path: str) -> str: def get_tracker(path: str) -> str:
@ -18,7 +20,12 @@ def get_tracker(path: str) -> str:
def get(path: str, file: str) -> None: def get(path: str, file: str) -> None:
tracker = get_tracker(path) tracker = get_tracker(path)
if tracker == 'datalad': if tracker == 'datalad':
dl.get(path, file) if file == get_db_file(path):
print("Downloading database...")
else:
print("Downloading data...")
dl.get(os.path.join(path, file), dataset=path)
print("> downloaded file")
elif tracker == 'None': elif tracker == 'None':
pass pass
else: else:
@ -29,7 +36,9 @@ def get(path: str, file: str) -> None:
def save(path: str, message: str, files: Optional[list[str]]=None) -> None: def save(path: str, message: str, files: Optional[list[str]]=None) -> None:
tracker = get_tracker(path) tracker = get_tracker(path)
if tracker == 'datalad': if tracker == 'datalad':
dl.save(path, message, files) if files is not None:
files = [os.path.join(path, f) for f in files]
dl.save(files, message=message, dataset=path)
elif tracker == 'None': elif tracker == 'None':
pass pass
else: else:
@ -44,3 +53,38 @@ def init(path: str, tracker: str='datalad') -> None:
else: else:
raise ValueError(f"Tracker {tracker} is not supported.") raise ValueError(f"Tracker {tracker} is not supported.")
return return
def unlock(path: str, file: str) -> None:
tracker = get_tracker(path)
if tracker == 'datalad':
dl.unlock(file, dataset=path)
elif tracker == 'None':
pass
else:
raise ValueError(f"Tracker {tracker} is not supported.")
return
def clone(path: str, source: str, target: str) -> None:
tracker = get_tracker(path)
if tracker == 'datalad':
dl.clone(target=target, source=source, dataset=path)
elif tracker == 'None':
os.makedirs(path, exist_ok=True)
# Implement a simple clone by copying files
shutil.copytree(source, target, dirs_exist_ok=False)
else:
raise ValueError(f"Tracker {tracker} is not supported.")
return
def drop(path: str, reckless: Optional[str]=None) -> None:
tracker = get_tracker(path)
if tracker == 'datalad':
dl.drop(path, reckless=reckless)
elif tracker == 'None':
shutil.rmtree(path)
else:
raise ValueError(f"Tracker {tracker} is not supported.")
return

View file

@ -1,25 +0,0 @@
import datalad.api as dl
import os
from typing import Optional
def get(path: str, file: str) -> None:
if file == "backlogger.db":
print("Downloading database...")
else:
print("Downloading data...")
dl.get(os.path.join(path, file), dataset=path)
print("> downloaded file")
return
def save(path: str, message: str, files: Optional[list[str]]=None) -> None:
if files is not None:
files = [os.path.join(path, f) for f in files]
dl.save(files, message=message, dataset=path)
return
def create(path: str) -> None:
dl.create(path)
return