From b21d35858d5b24c2877f0a7250d950b20dc03087 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Wed, 10 May 2023 19:14:46 +0530 Subject: [PATCH 01/84] Using pyzenodo3 as a client to connect with Zenodo. --- tvb_library/tvb/.utils.py.swp | Bin 0 -> 12288 bytes tvb_library/tvb/datasets/__init__.py | 2 + tvb_library/tvb/datasets/utils.py | 171 +++++++++++++++++++++++++++ tvb_library/tvb/datasets/zenodo.py | 168 ++++++++++++++++++++++++++ 4 files changed, 341 insertions(+) create mode 100644 tvb_library/tvb/.utils.py.swp create mode 100644 tvb_library/tvb/datasets/__init__.py create mode 100644 tvb_library/tvb/datasets/utils.py create mode 100644 tvb_library/tvb/datasets/zenodo.py diff --git a/tvb_library/tvb/.utils.py.swp b/tvb_library/tvb/.utils.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..77ebcc6dbcb19cb0a274d7194960a843e2e1abe6 GIT binary patch literal 12288 zcmeI2ON8`v0nXUYp%@;XvG^I{8h_R9AiV z)xW;4sy5Kr{m7A{d|zod!*dUPq|e1k#+-&H&q&>?TCvR9jeDIHo0u0*XAR4dQsuUa z+_GM1jK@)==}X<0jo691^u3&D>8nzwyShR9)+%5XxCRBb%+QKSe6|f3e1*`&A z0jq#jz$#!BunJfOtO8bnb*O;TWb6|D*#GQiIFEn-@BRIM^=`(#1|Nb;;BD|0xB#97 zv*0ke7inY~@EmA^7B~v>AP25)hOgi&@D9-61UL>3fEsuJYy!V+LVNHjcoD?lEI0v6+2RG)@!iCC zw5Ti3nW#<{TGAJFKXS)*)?81x^C}6(rV+2A#m(=!47F&-Qg@g3D5edl!}x(l6#3}J z2BSMt2|lBBEE|ay2je)33$2_C-6)QQtGVYWkGGQ0)iMhCg5xKm$b&>_-Vi(rL5pio z@Ss^OayVNllE)K!O;MdRb$h1vnzCA%DDo#^R>%d>G^XpB>8EmI#wP(4;2;S@PbS!i z9I!HwgV$>P`7!DN^~mL(-H;mO;1YV}rl|#Cp5<)A)9bK}Ua-wFyURK~vW9DR8ok!w zG}OKhr_pN-wky|Rxw5Jl2$X@%T0feNdP^6N)(-8--m;-SY-{)UEb3Bk>1`UKl%KlW5kvJmYn)nAMp6*uaye7FQMe#tZStc(1lN&i z%$tsO$WE^dPp6pnsq3gtL*jqm4aS+~m>CU{=dCE_NhnbS$ayaLVwCtz?#p>W11v`I zJYSUBLyG!cGaf8)vZ#=XXdv*~_j%7n2j3};$<4GGnaMmrs$(F*6D7koH491{E0U*S z$%r_oMcBj(sptc37Co{Wz&Io zN;XBz+aeUPbos-FXUxLx$4tX{KJIDVQPbse8&gdhB{vGn=S3Jbqh2AEN`xv`_Dxmu z<~y1MBGlve@0*;OnwY9oaIs-|vj$l(sFEknHDy~UT^Ji=HP)2}rKE#p69v~x!ud2D zsGe5CRB;({sYP7youJWl_%v^oV&OE?4s|8Zi?JeaXmNQQs+BBRY1Jqj;G!9^R@ptV7X#);V*YnPz{B zUTK=^YnHTmFEh*jp1$HN^T>rtkum~p(~^!`{DwkB?jRxY9mk#twDl6(mF2|vkGNiW z!S|cFQ=-wMZg}>ZQAd-PE&}Ps(K{t(2G=WWx&5n-*)r)aRh=}+PZur6?9YtMqIY&+ z4DSK`YsQLUlimHYcnF5~x>2=m{9Z@X3Wn68(|sTnqt~lxh2~3yDw#f{^XO*TzcwsY ziTAcjmOpglp@a3=nPZ3R$4<^3gjaJRF2DtIwV-aa7tf;PI3kz~IuXu!x_F|Vwc;q~ cpPkt|YMZ{mnNNpeR&UJq48-CrPAje0zi%ujX#fBK literal 0 HcmV?d00001 diff --git a/tvb_library/tvb/datasets/__init__.py b/tvb_library/tvb/datasets/__init__.py new file mode 100644 index 0000000000..cd42a007bc --- /dev/null +++ b/tvb_library/tvb/datasets/__init__.py @@ -0,0 +1,2 @@ +from .utils import * +from .zenodo import Record, Zenodo diff --git a/tvb_library/tvb/datasets/utils.py b/tvb_library/tvb/datasets/utils.py new file mode 100644 index 0000000000..988f2c2daa --- /dev/null +++ b/tvb_library/tvb/datasets/utils.py @@ -0,0 +1,171 @@ +import requests +from pathlib import Path +import hashlib +import urllib +from tqdm import tqdm + +""" +functions related to hashes functions + +""" + +USER_AGENT = "TVB_ROOT/TVB_LIBRARY" + +def calculate_md5(file_path:Path, chunk_size:int =1024) -> str : + """ + A function to calculate the md5 hash of a file. + + """ + m = hashlib.md5() + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + return m.hexdigest(); + + + + +def calculate_sha256(file_path:Path, chunk_size:int =1024) -> str: + """ + / A function to calculate the sha256 hash of a file + """ + s = hashlib.sha256() + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + s.update(chunk) + return s.hexdigest(); + + +def calculate_sha1(file_path:Path, chunk_size:int=1024)->str: + s = hashlib.sha1() + + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + + return s.hexdigest() + + + +def calculate_sha224(file_path:Path, chunk_size:int=1024)->str: + s = hashlib.sha224() + + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + + return s.hexdigest() + + +def calculate_sha384(file_path:Path, chunk_size:int=1024)->str: + s = hashlib.sha384() + + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + + return s.hexdigest() + +# +def calculate_sha512(file_path:Path, chunk_size:int=1024): + s = hashlib.sha512() + + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + + return s.hexdigest() +#. + +# okay there are some stuff which would consider generic SHA hash; link -https://github.com/zenodo/zenodo/issues/1985#issuecomment-796882811 + + + +AVAILABLE_HASH_FUNCTIONS = {"md5": calculate_md5, "sha1": calculate_sha1,"sha224":calculate_sha224, "sha256":calculate_sha256, "sha384":calculate_sha384, "sha512": calculate_sha512} # can extend this further + + +def convert_to_pathlib(file_path: str) ->Path: + """ + convert the file_path to Path datatype + """ + + if (type(file_path)!= Path): + return Path(file_path) + return file_path + + + +#should we keep a way to download a file without having to check the checksum? + +def check_integrity(file_loc, checksum:str, hash_function="md5")->bool: + """ + This function checks if the file at `file_loc` has same checksum. + """ + + if hash_function not in AVAILABLE_HASH_FUNCTIONS.keys(): + raise AttributeError(f"incorrect hash function value, must be one of the md5, sha1,sha224,sha256, sha384, sha512, received {hash_functio}") + + if hash_function== "md5": + return calculate_md5(file_loc)==checksum + + if hash_function == "sha1": + return calculate_sha1(file_loc) == checksum + + if hash_function == "sha224": + return calculate_sha224(file_loc) == checksum + + if hash_function == "sha256": + return calculate_sha256(file_loc) == checksum + + if hash_function == "sha384": + return calculate_sha384(file_loc) == checksum + + if hash_function == "sha512": + return calculate_sha512(file_loc) == checksum + + + + +def download_file(url, checksum, hash_function, root): + if hash_function not in AVAILABLE_HASH_FUNCTIONS.keys(): + raise AttributeError(f"incorrect hash function value, must be one of the md5, sha1,sha224,sha256, sha384, sha512, received {hash_functio}") + + root = Path(root) + + if (not root.is_dir()): + root.mkdir(parents=True) + + file_name = url.split("/")[-1] + file_loc = root/file_name + + if (file_loc.is_file() and check_integrity(file_loc, checksum, hash_function)): + print(f"File {file_name} already downloaded at location {file_loc}") + return + + _urlretrieve(url, file_loc) + + #ToDO : what to do when the hash of the downloaded file doesnt match with the online value? discard the file ? warning the user? both? + + print(f"file {file_loc} downloaded successfully") + + + +# following functions are inspired from the torchvision. +def _save_response_content( + content, + destination, + length= None, +) : + with open(destination, "wb") as fh, tqdm(total=length) as pbar: + for chunk in content: + # filter out keep-alive new chunks + if not chunk: + continue + + fh.write(chunk) + pbar.update(len(chunk)) + + +def _urlretrieve(url, file_loc, chunk_size = 1024 * 32): + with urllib.request.urlopen(urllib.request.Request(url, headers={"User-Agent": USER_AGENT})) as response: + _save_response_content(iter(lambda: response.read(chunk_size), b""), file_loc, length=response.length) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py new file mode 100644 index 0000000000..2f8dabc0cb --- /dev/null +++ b/tvb_library/tvb/datasets/zenodo.py @@ -0,0 +1,168 @@ +# code from https://github.com/space-physics/pyzenodo3 and https://github.com/space-physics/pyzenodo3/pull/9 +# code is copied here because the repo is inactive and author is not responding; hence no maintainance guarantee. + +import requests +import re +from bs4 import BeautifulSoup +from bs4.element import Tag +from urllib.parse import urlencode +from pathlib import Path +from .utils import download_file + +BASE_URL = "https://zenodo.org/api/" + + +class Record: + def __init__(self, data, zenodo, base_url: str = BASE_URL) -> None: + self.base_url = base_url + self.data = data + self._zenodo = zenodo + + def _row_to_version(self, row: Tag) -> dict[str, str]: + link = row.select("a")[0] + linkrec = row.select("a")[0].attrs["href"] + if not linkrec: + raise KeyError("record not found in parsed HTML") + + texts = row.select("small") + recmatch = re.match(r"/record/(\d*)", linkrec) + if not recmatch: + raise LookupError("record match not found in parsed HTML") + + recid = recmatch.group(1) + + return { + "recid": recid, + "name": link.text, + "doi": texts[0].text, + "date": texts[1].text, + "original_version": self._zenodo.get_record(recid).original_version(), + } + + def get_versions(self) -> list: + url = f"{self.base_url}srecords?all_versions=1&size=100&q=conceptrecid:{self.data['conceptrecid']}" + + data = requests.get(url).json() + + return [Record(hit, self._zenodo) for hit in data["hits"]["hits"]] + + def get_versions_from_webpage(self) -> list[dict]: + """Get version details from Zenodo webpage (it is not available in the REST api)""" + res = requests.get("https://zenodo.org/record/" + self.data["conceptrecid"]) + soup = BeautifulSoup(res.text, "html.parser") + version_rows = soup.select(".well.metadata > table.table tr") + if len(version_rows) == 0: # when only 1 version + return [ + { + "recid": self.data["id"], + "name": "1", + "doi": self.data["doi"], + "date": self.data["created"], + "original_version": self.original_version(), + } + ] + return [self._row_to_version(row) for row in version_rows if len(row.select("td")) > 1] + + def original_version(self): #TODO: check the implementation once again. + for identifier in self.data["metadata"]["related_identifiers"]: + if identifier["relation"] == "isSupplementTo": + return re.match(r".*/tree/(.*$)", identifier["identifier"]).group(1) + return None + + def __str__(self): + return str(self.data) # TODO: pretty print? Format the json to more readable version. + + def download(self, root="./"): + _root = Path(root) + print(self.data) + if 'files' not in self.data: + raise AttributeError("No files to download! Please check if the id entered is correct!") + + + for file in self.data['files']: + url = file['links']['self'] + hash_function, checksum = file['checksum'].split(":") + # now we will download the files to the root. + file_name_zenodo = file['key'] + + if file_name_zenodo in list(_root.iterdir()) and check_integrity(_root+file_name_zenodo,checksum, hash_function): + print(f"{file_name_zenodo} already exists at {root} having same checksum. Hence skipping the download!") + continue # the file already exists at the given location and checksum also matches! + + + download_file(root= root,url= url, checksum = checksum, hash_function=hash_function) + + + +class Zenodo: + def __init__(self, api_key: str = "", base_url: str = BASE_URL) -> None: + self.base_url = base_url + self._api_key = api_key + self.re_github_repo = re.compile(r".*github.com/(.*?/.*?)[/$]") + + def search(self, search: str) -> list[Record]: + """search Zenodo record for string `search` + + :param search: string to search + :return: Record[] results + """ + search = search.replace("/", " ") # zenodo can't handle '/' in search query + params = {"q": search} + + recs = self._get_records(params) + + if not recs: + raise LookupError(f"No records found for search {search}") + + return recs + + def _extract_github_repo(self, identifier): + matches = self.re_github_repo.match(identifier) + + if matches: + return matches.group(1) + + raise LookupError(f"No records found with {identifier}") + + def find_record_by_github_repo(self, search: str): + records = self.search(search) + for record in records: + if ( + "metadata" not in record.data + or "related_identifiers" not in record.data["metadata"] + ): + continue + + for identifier in [ + identifier["identifier"] + for identifier in record.data["metadata"]["related_identifiers"] + ]: + repo = self._extract_github_repo(identifier) + + if repo and repo.upper() == search.upper(): + return record + + raise LookupError(f"No records found in {search}") + + def find_record_by_doi(self, doi: str): + params = {"q": f"conceptdoi:{doi.replace('/', '*')}"} + records = self._get_records(params) + + if len(records) > 0: + return records[0] + else: + params = {"q": "doi:%s" % doi.replace("/", "*")} + return self._get_records(params)[0] + + def get_record(self, recid: str) -> Record: + + url = self.base_url + "records/" + recid + + return Record(requests.get(url).json(), self) + + #TODO: can also add get record by user? Will that be useful by any means? + + def _get_records(self, params: dict[str, str]) -> list[Record]: + url = self.base_url + "records?" + urlencode(params) + + return [Record(hit, self) for hit in requests.get(url).json()["hits"]["hits"]] From 1f9ba1e4eeafe56e14c8d1718a0747a0bf88c14a Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 12 May 2023 19:47:34 +0530 Subject: [PATCH 02/84] remove the swp file from git, add .swp extension in gitignore --- .gitignore | 3 +++ tvb_library/tvb/.utils.py.swp | Bin 12288 -> 0 bytes 2 files changed, 3 insertions(+) delete mode 100644 tvb_library/tvb/.utils.py.swp diff --git a/.gitignore b/.gitignore index 330ded9df8..8a03e8377c 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,9 @@ *.py[cod] __pycache__ +#vim binaries +*.swp + # packaging *.egg-info/ dist/ diff --git a/tvb_library/tvb/.utils.py.swp b/tvb_library/tvb/.utils.py.swp deleted file mode 100644 index 77ebcc6dbcb19cb0a274d7194960a843e2e1abe6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeI2ON8`v0nXUYp%@;XvG^I{8h_R9AiV z)xW;4sy5Kr{m7A{d|zod!*dUPq|e1k#+-&H&q&>?TCvR9jeDIHo0u0*XAR4dQsuUa z+_GM1jK@)==}X<0jo691^u3&D>8nzwyShR9)+%5XxCRBb%+QKSe6|f3e1*`&A z0jq#jz$#!BunJfOtO8bnb*O;TWb6|D*#GQiIFEn-@BRIM^=`(#1|Nb;;BD|0xB#97 zv*0ke7inY~@EmA^7B~v>AP25)hOgi&@D9-61UL>3fEsuJYy!V+LVNHjcoD?lEI0v6+2RG)@!iCC zw5Ti3nW#<{TGAJFKXS)*)?81x^C}6(rV+2A#m(=!47F&-Qg@g3D5edl!}x(l6#3}J z2BSMt2|lBBEE|ay2je)33$2_C-6)QQtGVYWkGGQ0)iMhCg5xKm$b&>_-Vi(rL5pio z@Ss^OayVNllE)K!O;MdRb$h1vnzCA%DDo#^R>%d>G^XpB>8EmI#wP(4;2;S@PbS!i z9I!HwgV$>P`7!DN^~mL(-H;mO;1YV}rl|#Cp5<)A)9bK}Ua-wFyURK~vW9DR8ok!w zG}OKhr_pN-wky|Rxw5Jl2$X@%T0feNdP^6N)(-8--m;-SY-{)UEb3Bk>1`UKl%KlW5kvJmYn)nAMp6*uaye7FQMe#tZStc(1lN&i z%$tsO$WE^dPp6pnsq3gtL*jqm4aS+~m>CU{=dCE_NhnbS$ayaLVwCtz?#p>W11v`I zJYSUBLyG!cGaf8)vZ#=XXdv*~_j%7n2j3};$<4GGnaMmrs$(F*6D7koH491{E0U*S z$%r_oMcBj(sptc37Co{Wz&Io zN;XBz+aeUPbos-FXUxLx$4tX{KJIDVQPbse8&gdhB{vGn=S3Jbqh2AEN`xv`_Dxmu z<~y1MBGlve@0*;OnwY9oaIs-|vj$l(sFEknHDy~UT^Ji=HP)2}rKE#p69v~x!ud2D zsGe5CRB;({sYP7youJWl_%v^oV&OE?4s|8Zi?JeaXmNQQs+BBRY1Jqj;G!9^R@ptV7X#);V*YnPz{B zUTK=^YnHTmFEh*jp1$HN^T>rtkum~p(~^!`{DwkB?jRxY9mk#twDl6(mF2|vkGNiW z!S|cFQ=-wMZg}>ZQAd-PE&}Ps(K{t(2G=WWx&5n-*)r)aRh=}+PZur6?9YtMqIY&+ z4DSK`YsQLUlimHYcnF5~x>2=m{9Z@X3Wn68(|sTnqt~lxh2~3yDw#f{^XO*TzcwsY ziTAcjmOpglp@a3=nPZ3R$4<^3gjaJRF2DtIwV-aa7tf;PI3kz~IuXu!x_F|Vwc;q~ cpPkt|YMZ{mnNNpeR&UJq48-CrPAje0zi%ujX#fBK From d3a869b0a2ecaf24ae5ab39ad3025b93436064db Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Tue, 6 Jun 2023 22:52:30 +0530 Subject: [PATCH 03/84] remove the search related methods from zenodo class, currently we will just focus on the downloading functionality. If required searching functionalities can be added --- tvb_library/tvb/datasets/__init__.py | 1 - tvb_library/tvb/datasets/utils.py | 171 --------------------------- tvb_library/tvb/datasets/zenodo.py | 149 +++++------------------ 3 files changed, 28 insertions(+), 293 deletions(-) delete mode 100644 tvb_library/tvb/datasets/utils.py diff --git a/tvb_library/tvb/datasets/__init__.py b/tvb_library/tvb/datasets/__init__.py index cd42a007bc..ec00e5f91b 100644 --- a/tvb_library/tvb/datasets/__init__.py +++ b/tvb_library/tvb/datasets/__init__.py @@ -1,2 +1 @@ -from .utils import * from .zenodo import Record, Zenodo diff --git a/tvb_library/tvb/datasets/utils.py b/tvb_library/tvb/datasets/utils.py deleted file mode 100644 index 988f2c2daa..0000000000 --- a/tvb_library/tvb/datasets/utils.py +++ /dev/null @@ -1,171 +0,0 @@ -import requests -from pathlib import Path -import hashlib -import urllib -from tqdm import tqdm - -""" -functions related to hashes functions - -""" - -USER_AGENT = "TVB_ROOT/TVB_LIBRARY" - -def calculate_md5(file_path:Path, chunk_size:int =1024) -> str : - """ - A function to calculate the md5 hash of a file. - - """ - m = hashlib.md5() - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - return m.hexdigest(); - - - - -def calculate_sha256(file_path:Path, chunk_size:int =1024) -> str: - """ - / A function to calculate the sha256 hash of a file - """ - s = hashlib.sha256() - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - s.update(chunk) - return s.hexdigest(); - - -def calculate_sha1(file_path:Path, chunk_size:int=1024)->str: - s = hashlib.sha1() - - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - - return s.hexdigest() - - - -def calculate_sha224(file_path:Path, chunk_size:int=1024)->str: - s = hashlib.sha224() - - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - - return s.hexdigest() - - -def calculate_sha384(file_path:Path, chunk_size:int=1024)->str: - s = hashlib.sha384() - - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - - return s.hexdigest() - -# -def calculate_sha512(file_path:Path, chunk_size:int=1024): - s = hashlib.sha512() - - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - - return s.hexdigest() -#. - -# okay there are some stuff which would consider generic SHA hash; link -https://github.com/zenodo/zenodo/issues/1985#issuecomment-796882811 - - - -AVAILABLE_HASH_FUNCTIONS = {"md5": calculate_md5, "sha1": calculate_sha1,"sha224":calculate_sha224, "sha256":calculate_sha256, "sha384":calculate_sha384, "sha512": calculate_sha512} # can extend this further - - -def convert_to_pathlib(file_path: str) ->Path: - """ - convert the file_path to Path datatype - """ - - if (type(file_path)!= Path): - return Path(file_path) - return file_path - - - -#should we keep a way to download a file without having to check the checksum? - -def check_integrity(file_loc, checksum:str, hash_function="md5")->bool: - """ - This function checks if the file at `file_loc` has same checksum. - """ - - if hash_function not in AVAILABLE_HASH_FUNCTIONS.keys(): - raise AttributeError(f"incorrect hash function value, must be one of the md5, sha1,sha224,sha256, sha384, sha512, received {hash_functio}") - - if hash_function== "md5": - return calculate_md5(file_loc)==checksum - - if hash_function == "sha1": - return calculate_sha1(file_loc) == checksum - - if hash_function == "sha224": - return calculate_sha224(file_loc) == checksum - - if hash_function == "sha256": - return calculate_sha256(file_loc) == checksum - - if hash_function == "sha384": - return calculate_sha384(file_loc) == checksum - - if hash_function == "sha512": - return calculate_sha512(file_loc) == checksum - - - - -def download_file(url, checksum, hash_function, root): - if hash_function not in AVAILABLE_HASH_FUNCTIONS.keys(): - raise AttributeError(f"incorrect hash function value, must be one of the md5, sha1,sha224,sha256, sha384, sha512, received {hash_functio}") - - root = Path(root) - - if (not root.is_dir()): - root.mkdir(parents=True) - - file_name = url.split("/")[-1] - file_loc = root/file_name - - if (file_loc.is_file() and check_integrity(file_loc, checksum, hash_function)): - print(f"File {file_name} already downloaded at location {file_loc}") - return - - _urlretrieve(url, file_loc) - - #ToDO : what to do when the hash of the downloaded file doesnt match with the online value? discard the file ? warning the user? both? - - print(f"file {file_loc} downloaded successfully") - - - -# following functions are inspired from the torchvision. -def _save_response_content( - content, - destination, - length= None, -) : - with open(destination, "wb") as fh, tqdm(total=length) as pbar: - for chunk in content: - # filter out keep-alive new chunks - if not chunk: - continue - - fh.write(chunk) - pbar.update(len(chunk)) - - -def _urlretrieve(url, file_loc, chunk_size = 1024 * 32): - with urllib.request.urlopen(urllib.request.Request(url, headers={"User-Agent": USER_AGENT})) as response: - _save_response_content(iter(lambda: response.read(chunk_size), b""), file_loc, length=response.length) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 2f8dabc0cb..37787e18d0 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -1,14 +1,9 @@ # code from https://github.com/space-physics/pyzenodo3 and https://github.com/space-physics/pyzenodo3/pull/9 # code is copied here because the repo is inactive and author is not responding; hence no maintainance guarantee. - import requests import re -from bs4 import BeautifulSoup -from bs4.element import Tag -from urllib.parse import urlencode +import pooch from pathlib import Path -from .utils import download_file - BASE_URL = "https://zenodo.org/api/" @@ -18,151 +13,63 @@ def __init__(self, data, zenodo, base_url: str = BASE_URL) -> None: self.data = data self._zenodo = zenodo - def _row_to_version(self, row: Tag) -> dict[str, str]: - link = row.select("a")[0] - linkrec = row.select("a")[0].attrs["href"] - if not linkrec: - raise KeyError("record not found in parsed HTML") - - texts = row.select("small") - recmatch = re.match(r"/record/(\d*)", linkrec) - if not recmatch: - raise LookupError("record match not found in parsed HTML") - - recid = recmatch.group(1) - - return { - "recid": recid, - "name": link.text, - "doi": texts[0].text, - "date": texts[1].text, - "original_version": self._zenodo.get_record(recid).original_version(), - } - - def get_versions(self) -> list: - url = f"{self.base_url}srecords?all_versions=1&size=100&q=conceptrecid:{self.data['conceptrecid']}" - - data = requests.get(url).json() - - return [Record(hit, self._zenodo) for hit in data["hits"]["hits"]] - - def get_versions_from_webpage(self) -> list[dict]: - """Get version details from Zenodo webpage (it is not available in the REST api)""" - res = requests.get("https://zenodo.org/record/" + self.data["conceptrecid"]) - soup = BeautifulSoup(res.text, "html.parser") - version_rows = soup.select(".well.metadata > table.table tr") - if len(version_rows) == 0: # when only 1 version - return [ - { - "recid": self.data["id"], - "name": "1", - "doi": self.data["doi"], - "date": self.data["created"], - "original_version": self.original_version(), - } - ] - return [self._row_to_version(row) for row in version_rows if len(row.select("td")) > 1] - - def original_version(self): #TODO: check the implementation once again. - for identifier in self.data["metadata"]["related_identifiers"]: - if identifier["relation"] == "isSupplementTo": - return re.match(r".*/tree/(.*$)", identifier["identifier"]).group(1) - return None + + def describe(self): + + return self.data['metadata']['description'] + def __str__(self): return str(self.data) # TODO: pretty print? Format the json to more readable version. def download(self, root="./"): _root = Path(root) - print(self.data) + #print(self.data) if 'files' not in self.data: raise AttributeError("No files to download! Please check if the id entered is correct!") - for file in self.data['files']: + + for file in self.data["files"]: url = file['links']['self'] - hash_function, checksum = file['checksum'].split(":") - # now we will download the files to the root. - file_name_zenodo = file['key'] + known_hash = file['checksum'] + file_name = file['key'] - if file_name_zenodo in list(_root.iterdir()) and check_integrity(_root+file_name_zenodo,checksum, hash_function): - print(f"{file_name_zenodo} already exists at {root} having same checksum. Hence skipping the download!") - continue # the file already exists at the given location and checksum also matches! + pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) - download_file(root= root,url= url, checksum = checksum, hash_function=hash_function) - - - -class Zenodo: - def __init__(self, api_key: str = "", base_url: str = BASE_URL) -> None: - self.base_url = base_url - self._api_key = api_key - self.re_github_repo = re.compile(r".*github.com/(.*?/.*?)[/$]") - - def search(self, search: str) -> list[Record]: - """search Zenodo record for string `search` - - :param search: string to search - :return: Record[] results - """ - search = search.replace("/", " ") # zenodo can't handle '/' in search query - params = {"q": search} - - recs = self._get_records(params) - - if not recs: - raise LookupError(f"No records found for search {search}") - - return recs - def _extract_github_repo(self, identifier): - matches = self.re_github_repo.match(identifier) - if matches: - return matches.group(1) - raise LookupError(f"No records found with {identifier}") - def find_record_by_github_repo(self, search: str): - records = self.search(search) - for record in records: - if ( - "metadata" not in record.data - or "related_identifiers" not in record.data["metadata"] - ): - continue - for identifier in [ - identifier["identifier"] - for identifier in record.data["metadata"]["related_identifiers"] - ]: - repo = self._extract_github_repo(identifier) - if repo and repo.upper() == search.upper(): - return record - raise LookupError(f"No records found in {search}") - def find_record_by_doi(self, doi: str): - params = {"q": f"conceptdoi:{doi.replace('/', '*')}"} - records = self._get_records(params) +class Zenodo: + def __init__(self, api_key: str = "", base_url: str = BASE_URL) -> None: + """ + This class handles all the interactions of the user to the zenodo platform. - if len(records) > 0: - return records[0] - else: - params = {"q": "doi:%s" % doi.replace("/", "*")} - return self._get_records(params)[0] + + """ + self.base_url = base_url + self._api_key = api_key + self.re_github_repo = re.compile(r".*github.com/(.*?/.*?)[/$]") + def get_record(self, recid: str) -> Record: - + """ + recid: unique id of the data repository + """ url = self.base_url + "records/" + recid - return Record(requests.get(url).json(), self) - #TODO: can also add get record by user? Will that be useful by any means? def _get_records(self, params: dict[str, str]) -> list[Record]: url = self.base_url + "records?" + urlencode(params) return [Record(hit, self) for hit in requests.get(url).json()["hits"]["hits"]] + + + From 67331b893599e1184949ebac575e080393240619 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Wed, 7 Jun 2023 21:34:17 +0530 Subject: [PATCH 04/84] add method `get_latest_version` to fetch the latest version of the Record --- tvb_library/tvb/datasets/zenodo.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 37787e18d0..783a427fec 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -1,9 +1,12 @@ # code from https://github.com/space-physics/pyzenodo3 and https://github.com/space-physics/pyzenodo3/pull/9 -# code is copied here because the repo is inactive and author is not responding; hence no maintainance guarantee. + + import requests import re import pooch from pathlib import Path +import json + BASE_URL = "https://zenodo.org/api/" @@ -15,32 +18,28 @@ def __init__(self, data, zenodo, base_url: str = BASE_URL) -> None: def describe(self): - return self.data['metadata']['description'] def __str__(self): - return str(self.data) # TODO: pretty print? Format the json to more readable version. + return json.dumps(self.data) # TODO: pretty print? Format the json to more readable version. - def download(self, root="./"): - _root = Path(root) - #print(self.data) + def download(self): if 'files' not in self.data: raise AttributeError("No files to download! Please check if the id entered is correct!") - - for file in self.data["files"]: url = file['links']['self'] known_hash = file['checksum'] file_name = file['key'] - pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) - - - + file_path = pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) + print(f"file {file_name} is downloaded at {file_path}") + def get_latest_version(self): + + return Zenodo().get_record(self.data['links']['latest'].split("/")[-1]) @@ -63,6 +62,7 @@ def get_record(self, recid: str) -> Record: recid: unique id of the data repository """ url = self.base_url + "records/" + recid + return Record(requests.get(url).json(), self) From 783aaea8d2594784669a9101ea7f6ae650da795d Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sun, 11 Jun 2023 21:26:57 +0530 Subject: [PATCH 05/84] add pooch as a dependancy --- tvb_library/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb_library/setup.py b/tvb_library/setup.py index 08f98bf48b..a1ce380f38 100644 --- a/tvb_library/setup.py +++ b/tvb_library/setup.py @@ -40,7 +40,7 @@ LIBRARY_TEAM = "Marmaduke Woodman, Jan Fousek, Stuart Knock, Paula Sanz Leon, Viktor Jirsa" LIBRARY_REQUIRED_PACKAGES = ["autopep8", "Deprecated", "docutils", "ipywidgets", "lxml", "mako>=1.1.4", "matplotlib", - "networkx", "numba", "numexpr", "numpy", "pylems", "scipy", "six"] + "networkx", "numba", "numexpr", "numpy", "pooch","pylems", "scipy", "six"] LIBRARY_REQUIRED_EXTRA = ["h5py", "pytest", "pytest-benchmark", "pytest-xdist", "tvb-gdist", "tvb-data"] From 198f0efcc4aa1fa8634bfb2a5d4ccd1676fb7eb7 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Mon, 12 Jun 2023 17:44:45 +0530 Subject: [PATCH 06/84] add tvb class and tests --- tvb_library/tvb/datasets/tvb_data.py | 23 +++++++++ tvb_library/tvb/datasets/zenodo.py | 49 ++++++++++++++++--- .../tvb/tests/library/datasets/__init__.py | 0 .../tvb/tests/library/datasets/zenodo_test.py | 44 +++++++++++++++++ 4 files changed, 108 insertions(+), 8 deletions(-) create mode 100644 tvb_library/tvb/datasets/tvb_data.py create mode 100644 tvb_library/tvb/tests/library/datasets/__init__.py create mode 100644 tvb_library/tvb/tests/library/datasets/zenodo_test.py diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py new file mode 100644 index 0000000000..7dd6398a98 --- /dev/null +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -0,0 +1,23 @@ +from .zenodo import Zenodo, Record + + +class TVB_Data: + + conceptid = "" + + def __init__(self, version= "2.7", ): + + recid = Zenodo().get_version_info(self.conceptid)[version] + self.rec = Zenodo.get_record(recid) + + + def download(self): + + self.rec.download() + + def fetch_data(self): + pass + + + + diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 783a427fec..c7cf418027 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -11,37 +11,45 @@ class Record: - def __init__(self, data, zenodo, base_url: str = BASE_URL) -> None: + def __init__(self, data, base_url: str = BASE_URL) -> None: self.base_url = base_url self.data = data - self._zenodo = zenodo - + self.file_loc = {} def describe(self): return self.data['metadata']['description'] def __str__(self): - return json.dumps(self.data) # TODO: pretty print? Format the json to more readable version. + return json.dumps(self.data, indent=2) def download(self): + if 'files' not in self.data: - raise AttributeError("No files to download! Please check if the id entered is correct!") + raise AttributeError("No files to download! Please check if the record id entered is correct! or the data is publically accessible") + + for file in self.data["files"]: url = file['links']['self'] known_hash = file['checksum'] file_name = file['key'] - + file_path = pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) + + self.file_loc['file_name'] = file_path + print(f"file {file_name} is downloaded at {file_path}") + def get_latest_version(self): return Zenodo().get_record(self.data['links']['latest'].split("/")[-1]) + def __eq__(self, record_b): + return (self.data == record_b.data) @@ -63,13 +71,38 @@ def get_record(self, recid: str) -> Record: """ url = self.base_url + "records/" + recid - return Record(requests.get(url).json(), self) + return Record(requests.get(url).json()) def _get_records(self, params: dict[str, str]) -> list[Record]: url = self.base_url + "records?" + urlencode(params) - return [Record(hit, self) for hit in requests.get(url).json()["hits"]["hits"]] + return [Record(hit) for hit in requests.get(url).json()["hits"]["hits"]] + + + + + def get_versions_info(self, recid): + + recid = self.get_record(recid).data['metadata']['relations']['version'][0]['parent']['pid_value'] + + print(recid) + + versions = {} + + url = f"{self.base_url}records?q=conceptrecid:{recid}&all_versions=true" + + + for hit in requests.get(url).json()['hits']['hits']: + + version = hit['metadata']['version'] + recid = hit['doi'].split(".")[-1] + + versions[version] = recid + + + return versions + diff --git a/tvb_library/tvb/tests/library/datasets/__init__.py b/tvb_library/tvb/tests/library/datasets/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tvb_library/tvb/tests/library/datasets/zenodo_test.py b/tvb_library/tvb/tests/library/datasets/zenodo_test.py new file mode 100644 index 0000000000..ce5baccab8 --- /dev/null +++ b/tvb_library/tvb/tests/library/datasets/zenodo_test.py @@ -0,0 +1,44 @@ +from tvb.datasets import Zenodo, Record +from pathlib import Path + + +class TestZenodo(BaseTestCase): + + def test_get_record(self): + + zenodo = Zenodo() + rec = zenodo.get_record("7574266") + + assert type(rec) == Record + assert rec.data["doi"] == "10.5281/zenodo.7574266" + + del rec + del zenodo + + + def test_get_versions(self): + + zenodo = Zenodo() + versions = zenodo.get_versions_info() + + assert type(versions) == dict + assert versions == {'2.0.1': '3497545', '1.5.9.b': '3474071', '2.0.0': '3491055', '2.0.3': '4263723', '2.0.2': '3688773', '1.5.9': '3417207', '2.7': '7574266'} + + del zenodo + del versions + +class TestRecord(BaseTestCase): + + + def test_download(self): + + zen = Zenodo() + + rec = zenodo.get_record("7574266") + + rec.download() + + for file_name, file_path in rec.file_loc: + assert Path(file_path).is_file() + + From 80f3e3ff19f78c7e733566dde1c9914ffec6a9f4 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Mon, 12 Jun 2023 17:54:04 +0530 Subject: [PATCH 07/84] minor nits and fixes --- tvb_library/tvb/datasets/__init__.py | 1 + tvb_library/tvb/datasets/tvb_data.py | 6 +++--- tvb_library/tvb/datasets/zenodo.py | 6 +++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/tvb_library/tvb/datasets/__init__.py b/tvb_library/tvb/datasets/__init__.py index ec00e5f91b..e2d2cf8117 100644 --- a/tvb_library/tvb/datasets/__init__.py +++ b/tvb_library/tvb/datasets/__init__.py @@ -1 +1,2 @@ from .zenodo import Record, Zenodo +from .tvb_data import TVB_Data diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 7dd6398a98..e4357dbb61 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -3,12 +3,12 @@ class TVB_Data: - conceptid = "" + conceptid = "3417206" def __init__(self, version= "2.7", ): - recid = Zenodo().get_version_info(self.conceptid)[version] - self.rec = Zenodo.get_record(recid) + recid = Zenodo().get_versions_info(self.conceptid)[version] + self.rec = Zenodo().get_record(recid) def download(self): diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index c7cf418027..fa9023aae8 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -83,11 +83,11 @@ def _get_records(self, params: dict[str, str]) -> list[Record]: def get_versions_info(self, recid): - + """ + recid: unique id of the data repository + """ recid = self.get_record(recid).data['metadata']['relations']['version'][0]['parent']['pid_value'] - print(recid) - versions = {} url = f"{self.base_url}records?q=conceptrecid:{recid}&all_versions=true" From c906f195e0b6596385d1ec7a20fdcb224633a41c Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 16 Jun 2023 08:41:28 +0530 Subject: [PATCH 08/84] added todo in fetch_data method --- tvb_library/tvb/datasets/base.py | 19 +++++ tvb_library/tvb/datasets/tvb_data.py | 117 ++++++++++++++++++++++++--- tvb_library/tvb/datasets/zenodo.py | 9 ++- 3 files changed, 133 insertions(+), 12 deletions(-) create mode 100644 tvb_library/tvb/datasets/base.py diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py new file mode 100644 index 0000000000..0ea746bc11 --- /dev/null +++ b/tvb_library/tvb/datasets/base.py @@ -0,0 +1,19 @@ + + +class ZenodoDataset: + + def __init__(self, version): + self.version = version + + def download(self): + pass + + def fetch_data(self, file_name, extract_dir): + pass + + def get_version(self): + return self.version + + def get_recid(self): + return self.recid + diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index e4357dbb61..305c088e63 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -1,23 +1,122 @@ -from .zenodo import Zenodo, Record +import requests +import json +import pooch +from pathlib import Path +import logging +from zipfile import ZipFile +from .base import ZenodoDataset +from .zenodo import Zenodo, Record, BASE_URL -class TVB_Data: +class TVB_Data(ZenodoDataset): - conceptid = "3417206" + CONCEPTID = "3417206" - def __init__(self, version= "2.7", ): + def __init__(self, version= "2.7"): + """ + Constructor for TVB_Data class - recid = Zenodo().get_versions_info(self.conceptid)[version] - self.rec = Zenodo().get_record(recid) + parameters + ----------- + version: str + - Version number of the dataset, Default value is 2. - def download(self): + """ + super().__init__(version) + try: + self.recid = self.read_cached_response()[version]['conceptrecid'] + except: + logging.warning("Data not found in cached response, updating the cached responses") + self.recid = Zenodo().get_versions_info(self.CONCEPTID)[version] + self.update_cached_response() + + self.rec = Record(self.read_cached_response()[self.version]) + logging.info(f"instantiated TVB_Data class with version {version}") + + def download(self): + """ + Downloads the dataset to the cached location, skips download is file already present at the path. + """ self.rec.download() - def fetch_data(self): - pass + def fetch_data(self, file_name=None, extract_dir=None): + """ + Fetches the data + + parameters: + ----------- + file_name: str + - Name of the file from the downloaded zip file to fetch. If `None`, extracts whole archive. Default is `None` + extract_dir: str + - Path where you want to extract the archive, if `None` extracts the archive to current working directory. Default is `None` + + + returns: Pathlib.Path + path of the file which was extracted + """ + + #TODO: errrors when absolute path given. + try: + file_path = self.rec.file_loc['tvb_data.zip'] + except: + self.download() + file_path = self.rec.file_loc['tvb_data.zip'] + + + if file_name == None: + ZipFile(file_path).extractall(path=extract_dir) + if extract_dir==None: + return Path.cwd() + return Path.cwd()/ Path(extract_dir) + + with ZipFile(file_path) as zf: + file_names_in_zip = zf.namelist() + zf.close() + + file_names_in_zip = {str(Path(i).name): i for i in file_names_in_zip} + ZipFile(file_path).extract(file_names_in_zip[file_name]) + return Path.cwd() / file_names_in_zip[file_name] + + + def update_cached_response(self): + """ + gets responses from zenodo server and saves them to cache file. + """ + + file_dir = pooch.os_cache("pooch")/ "tvb_cached_responses.txt" + + responses = {} + + url = f"{BASE_URL}records?q=conceptrecid:{self.CONCEPTID}&all_versions=true" + + for hit in requests.get(url).json()['hits']['hits']: + version = hit['metadata']['version'] + response = hit + + responses[version] = response + + with open(file_dir, "w") as fp: + json.dump(responses, fp) + fp.close() + + return + + def read_cached_response(self): + """ + reads responses from the cache file. + + """ + + file_dir = pooch.os_cache("pooch") / "tvb_cached_responses.txt" + + with open(file_dir) as fp: + responses = json.load(fp) + fp.close() + responses = dict(responses) + return responses diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index fa9023aae8..859dd6ea7b 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -36,8 +36,8 @@ def download(self): file_name = file['key'] file_path = pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) - - self.file_loc['file_name'] = file_path + + self.file_loc[f'{file_name}'] = file_path print(f"file {file_name} is downloaded at {file_path}") @@ -62,7 +62,6 @@ def __init__(self, api_key: str = "", base_url: str = BASE_URL) -> None: """ self.base_url = base_url self._api_key = api_key - self.re_github_repo = re.compile(r".*github.com/(.*?/.*?)[/$]") def get_record(self, recid: str) -> Record: @@ -85,7 +84,11 @@ def _get_records(self, params: dict[str, str]) -> list[Record]: def get_versions_info(self, recid): """ recid: unique id of the data repository + """ + # needs ineternet + + recid = self.get_record(recid).data['metadata']['relations']['version'][0]['parent']['pid_value'] versions = {} From faaec3b9210a596d184215e6c900024b753a1a07 Mon Sep 17 00:00:00 2001 From: Abhijit Deo <72816663+abhi-glitchhg@users.noreply.github.com> Date: Fri, 16 Jun 2023 10:09:12 +0530 Subject: [PATCH 09/84] Update tvb_library/tvb/datasets/tvb_data.py --- tvb_library/tvb/datasets/tvb_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 305c088e63..7011bc766a 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -20,7 +20,7 @@ def __init__(self, version= "2.7"): ----------- version: str - - Version number of the dataset, Default value is 2. + - Version number of the dataset, Default value is 2.7 """ super().__init__(version) From a3e060a6c8e30ddd3714bbb84f38d13e278219dd Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 16 Jun 2023 10:27:25 +0530 Subject: [PATCH 10/84] fixed the absolute path error --- tvb_library/tvb/datasets/tvb_data.py | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 305c088e63..3475e97e78 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -57,27 +57,44 @@ def fetch_data(self, file_name=None, extract_dir=None): path of the file which was extracted """ - #TODO: errrors when absolute path given. + + try: file_path = self.rec.file_loc['tvb_data.zip'] except: self.download() file_path = self.rec.file_loc['tvb_data.zip'] - + if (extract_dir!=None): + extract_dir = Path(extract_dir).expanduser() + if file_name == None: ZipFile(file_path).extractall(path=extract_dir) if extract_dir==None: return Path.cwd() - return Path.cwd()/ Path(extract_dir) + if extract_dir.is_absolute(): + return extract_dir + + return Path.cwd()/ extract_dir with ZipFile(file_path) as zf: file_names_in_zip = zf.namelist() zf.close() file_names_in_zip = {str(Path(i).name): i for i in file_names_in_zip} - ZipFile(file_path).extract(file_names_in_zip[file_name]) - return Path.cwd() / file_names_in_zip[file_name] + if extract_dir==None: + ZipFile(file_path).extract(file_names_in_zip[file_name]) + + ZipFile(file_path).extract(file_names_in_zip[file_name], path = extract_dir) + + + if extract_dir == None: + return Path.cwd() / file_names_in_zip[file_name] + if extract_dir.is_absolute(): + return extract_dir / file_names_in_zip[file_name] + + + return Path.cwd()/ extract_dir / file_names_in_zip[file_name] def update_cached_response(self): From ae10817e480289978e6464a4595ad2ff1f6383e7 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 11:47:11 +0530 Subject: [PATCH 11/84] added header --- tvb_library/tvb/datasets/base.py | 30 ++++++++++++++++++++++++++++ tvb_library/tvb/datasets/tvb_data.py | 30 ++++++++++++++++++++++++++++ tvb_library/tvb/datasets/zenodo.py | 30 ++++++++++++++++++++++++++++ 3 files changed, 90 insertions(+) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 0ea746bc11..8499d12ead 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -1,3 +1,33 @@ +# -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" + class ZenodoDataset: diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 3475e97e78..308550a8c3 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -1,3 +1,33 @@ +# -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" + import requests import json import pooch diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 859dd6ea7b..f3c103c97e 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -1,3 +1,33 @@ +## -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" + # code from https://github.com/space-physics/pyzenodo3 and https://github.com/space-physics/pyzenodo3/pull/9 From c57ed91e3b7bc540dbfbd148821b39403af7883e Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 11:48:47 +0530 Subject: [PATCH 12/84] added header --- .../tvb/tests/library/datasets/zenodo_test.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tvb_library/tvb/tests/library/datasets/zenodo_test.py b/tvb_library/tvb/tests/library/datasets/zenodo_test.py index ce5baccab8..6d3ff779bd 100644 --- a/tvb_library/tvb/tests/library/datasets/zenodo_test.py +++ b/tvb_library/tvb/tests/library/datasets/zenodo_test.py @@ -1,3 +1,37 @@ + +# -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" + + + + from tvb.datasets import Zenodo, Record from pathlib import Path From 2d7ebdc8cce67ad4ae9830766a7db5d949649726 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 12:00:02 +0530 Subject: [PATCH 13/84] added DATASETS_FOLDER attribute to config settings. --- tvb_library/tvb/basic/config/profile_settings.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tvb_library/tvb/basic/config/profile_settings.py b/tvb_library/tvb/basic/config/profile_settings.py index 052ef6a7ab..eaf8018775 100644 --- a/tvb_library/tvb/basic/config/profile_settings.py +++ b/tvb_library/tvb/basic/config/profile_settings.py @@ -95,6 +95,10 @@ def __init__(self): # The number of logs in a message batch that are sent to the server self.ELASTICSEARCH_BUFFER_THRESHOLD = self.manager.get_attribute(stored.KEY_ELASTICSEARCH_BUFFER_THRESHOLD, 1000000, int) + # Directory where all the datasets will be extracted/unzipped. + self.DATASETS_FOLDER = os.path(self.TVB_STORAGE, "DATASETS") + + @property def BIN_FOLDER(self): """ From cd536afeb087b4b2c6823941bdbab4d6cbb79cc3 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 20:12:01 +0530 Subject: [PATCH 14/84] some improvements in tests, now from_cls method of connectivity loads data using new method --- .../tvb/basic/config/profile_settings.py | 2 +- tvb_library/tvb/datasets/__init__.py | 2 +- tvb_library/tvb/datasets/base.py | 18 +++--- tvb_library/tvb/datasets/tvb_data.py | 58 +++++++++++++------ tvb_library/tvb/datatypes/connectivity.py | 6 +- .../tvb/tests/library/datasets/zenodo_test.py | 16 ++--- 6 files changed, 64 insertions(+), 38 deletions(-) diff --git a/tvb_library/tvb/basic/config/profile_settings.py b/tvb_library/tvb/basic/config/profile_settings.py index eaf8018775..880cb6303b 100644 --- a/tvb_library/tvb/basic/config/profile_settings.py +++ b/tvb_library/tvb/basic/config/profile_settings.py @@ -96,7 +96,7 @@ def __init__(self): self.ELASTICSEARCH_BUFFER_THRESHOLD = self.manager.get_attribute(stored.KEY_ELASTICSEARCH_BUFFER_THRESHOLD, 1000000, int) # Directory where all the datasets will be extracted/unzipped. - self.DATASETS_FOLDER = os.path(self.TVB_STORAGE, "DATASETS") + self.DATASETS_FOLDER = os.path.join(self.TVB_STORAGE, "DATASETS") @property diff --git a/tvb_library/tvb/datasets/__init__.py b/tvb_library/tvb/datasets/__init__.py index e2d2cf8117..995b08e09f 100644 --- a/tvb_library/tvb/datasets/__init__.py +++ b/tvb_library/tvb/datasets/__init__.py @@ -1,2 +1,2 @@ from .zenodo import Record, Zenodo -from .tvb_data import TVB_Data +from .tvb_data import TVBZenodoDataset diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 8499d12ead..6ca123c4fe 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -30,20 +30,22 @@ -class ZenodoDataset: +from tvb.basic.logger.builder import get_logger +from tvb.basic.profile import TvbProfile - def __init__(self, version): +class BaseDataset: + + def __init__(self, version, target_download=None): + + self.log = get_logger(self.__class__.__module__) + self.cached_files = None self.version = version def download(self): pass - def fetch_data(self, file_name, extract_dir): + def fetch_data(self): pass - + def get_version(self): return self.version - - def get_recid(self): - return self.recid - diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 308550a8c3..4ae3c8bedd 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -27,18 +27,17 @@ """ .. moduleauthor:: Abhijit Deo """ - +import os import requests import json import pooch from pathlib import Path -import logging from zipfile import ZipFile -from .base import ZenodoDataset +from .base import BaseDataset from .zenodo import Zenodo, Record, BASE_URL -class TVB_Data(ZenodoDataset): +class TVBZenodoDataset(BaseDataset): CONCEPTID = "3417206" @@ -54,17 +53,23 @@ def __init__(self, version= "2.7"): """ super().__init__(version) + self.cached_file = pooch.os_cache("pooch")/ "tvb_cached_responses.txt" + try: self.recid = self.read_cached_response()[version]['conceptrecid'] - except: - logging.warning("Data not found in cached response, updating the cached responses") + + except KeyError: + self.log.warning(f"Failed to read data from cached response.") self.recid = Zenodo().get_versions_info(self.CONCEPTID)[version] - self.update_cached_response() + self.update_cached_response() + except: + self.log.warning(f"Failed to get the desired version {version} of TVB_Data, please check if version {version} is available as a public record on zenodo.org or Please check your internet connection") + + # add logging errors method by catching the exact exceptions. self.rec = Record(self.read_cached_response()[self.version]) - - logging.info(f"instantiated TVB_Data class with version {version}") - + + print(type(self)) def download(self): """ Downloads the dataset to the cached location, skips download is file already present at the path. @@ -95,13 +100,14 @@ def fetch_data(self, file_name=None, extract_dir=None): self.download() file_path = self.rec.file_loc['tvb_data.zip'] - if (extract_dir!=None): - extract_dir = Path(extract_dir).expanduser() + if (extract_dir==None): + extract_dir = TvbProfile.current.DATASETS_FOLDER + + extract_dir = Path(extract_dir).expanduser() if file_name == None: ZipFile(file_path).extractall(path=extract_dir) - if extract_dir==None: - return Path.cwd() + if extract_dir.is_absolute(): return extract_dir @@ -118,8 +124,6 @@ def fetch_data(self, file_name=None, extract_dir=None): ZipFile(file_path).extract(file_names_in_zip[file_name], path = extract_dir) - if extract_dir == None: - return Path.cwd() / file_names_in_zip[file_name] if extract_dir.is_absolute(): return extract_dir / file_names_in_zip[file_name] @@ -132,7 +136,7 @@ def update_cached_response(self): gets responses from zenodo server and saves them to cache file. """ - file_dir = pooch.os_cache("pooch")/ "tvb_cached_responses.txt" + file_dir = self.cached_file responses = {} @@ -147,7 +151,7 @@ def update_cached_response(self): with open(file_dir, "w") as fp: json.dump(responses, fp) fp.close() - + self.log.warning("Updated the cache response file") return def read_cached_response(self): @@ -156,7 +160,7 @@ def read_cached_response(self): """ - file_dir = pooch.os_cache("pooch") / "tvb_cached_responses.txt" + file_dir = self.cached_file with open(file_dir) as fp: @@ -167,3 +171,19 @@ def read_cached_response(self): responses = dict(responses) return responses + + + def describe(self): + return self.rec.describe() + + def get_record(self): + return self.recid + + def __str__(self): + return f"TVB Data version : {self.version}" + + def __eq__(self, other): + if isinstace(other, TVBZenodoDataset): + return self.rec == tvb_data.rec + return False + diff --git a/tvb_library/tvb/datatypes/connectivity.py b/tvb_library/tvb/datatypes/connectivity.py index fec76be6ad..ce6d84a582 100644 --- a/tvb_library/tvb/datatypes/connectivity.py +++ b/tvb_library/tvb/datatypes/connectivity.py @@ -40,7 +40,7 @@ from tvb.basic.neotraits.api import Attr, NArray, List, HasTraits, Int, narray_summary_info from tvb.basic.neotraits.ex import TraitAttributeError from tvb.basic.readers import ZipReader, H5Reader, try_get_absolute_path - +from tvb.datasets import TVBZenodoDataset class Connectivity(HasTraits): region_labels = NArray( @@ -719,8 +719,10 @@ def _read(cls, reader): def from_file(cls, source_file="connectivity_76.zip"): result = Connectivity() - source_full_path = try_get_absolute_path("tvb_data.connectivity", source_file) + #source_full_path = try_get_absolute_path("tvb_data.connectivity", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) + if source_file.endswith(".h5"): reader = H5Reader(source_full_path) diff --git a/tvb_library/tvb/tests/library/datasets/zenodo_test.py b/tvb_library/tvb/tests/library/datasets/zenodo_test.py index 6d3ff779bd..50084e7e75 100644 --- a/tvb_library/tvb/tests/library/datasets/zenodo_test.py +++ b/tvb_library/tvb/tests/library/datasets/zenodo_test.py @@ -30,10 +30,11 @@ """ - +#TODO : change the record id to the latest when done testing on local machine. :) :wq from tvb.datasets import Zenodo, Record from pathlib import Path +from tvb.tests.library.base_testcase import BaseTestCase class TestZenodo(BaseTestCase): @@ -41,10 +42,10 @@ class TestZenodo(BaseTestCase): def test_get_record(self): zenodo = Zenodo() - rec = zenodo.get_record("7574266") + rec = zenodo.get_record("4263723") assert type(rec) == Record - assert rec.data["doi"] == "10.5281/zenodo.7574266" + assert rec.data["doi"] == "10.5281/zenodo.4263723" del rec del zenodo @@ -53,7 +54,7 @@ def test_get_record(self): def test_get_versions(self): zenodo = Zenodo() - versions = zenodo.get_versions_info() + versions = zenodo.get_versions_info("3491055") assert type(versions) == dict assert versions == {'2.0.1': '3497545', '1.5.9.b': '3474071', '2.0.0': '3491055', '2.0.3': '4263723', '2.0.2': '3688773', '1.5.9': '3417207', '2.7': '7574266'} @@ -68,11 +69,12 @@ def test_download(self): zen = Zenodo() - rec = zenodo.get_record("7574266") + rec = zen.get_record("4263723") - rec.download() - for file_name, file_path in rec.file_loc: + rec.download() + print(rec.file_loc) + for file_name, file_path in rec.file_loc.items(): assert Path(file_path).is_file() From 96e87cb0d3da7cf352d6ae19cc565c6b809d31e7 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 20:19:52 +0530 Subject: [PATCH 15/84] fix typos --- tvb_library/tvb/datasets/tvb_data.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 46b117998e..3086e96c62 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -66,10 +66,9 @@ def __init__(self, version= "2.7"): except: self.log.warning(f"Failed to get the desired version {version} of TVB_Data, please check if version {version} is available as a public record on zenodo.org or Please check your internet connection") - # add logging errors method by catching the exact exceptions. + #TODO add logging errors method by catching the exact exceptions. self.rec = Record(self.read_cached_response()[self.version]) - print(type(self)) def download(self): """ Downloads the dataset to the cached location, skips download is file already present at the path. From 1af4cb779d7120a6556fd6c6884b49a957cb576a Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Tue, 20 Jun 2023 14:03:23 +0530 Subject: [PATCH 16/84] extract dir parameter when instantiating the class. --- tvb_library/tvb/datasets/base.py | 6 ++++++ tvb_library/tvb/datasets/tvb_data.py | 6 ++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 6ca123c4fe..93e678bdfd 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -41,6 +41,12 @@ def __init__(self, version, target_download=None): self.cached_files = None self.version = version + if (extract_dir==None): + extract_dir = TvbProfile.current.DATASETS_FOLDER + + self.extract_dir = Path(extract_dir).expanduser() + + def download(self): pass diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 3086e96c62..c7a0ecbc77 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -75,7 +75,7 @@ def download(self): """ self.rec.download() - def fetch_data(self, file_name=None, extract_dir=None): + def fetch_data(self, file_name=None): """ Fetches the data @@ -99,10 +99,8 @@ def fetch_data(self, file_name=None, extract_dir=None): self.download() file_path = self.rec.file_loc['tvb_data.zip'] - if (extract_dir==None): - extract_dir = TvbProfile.current.DATASETS_FOLDER + extract_dir = self.extract_dir - extract_dir = Path(extract_dir).expanduser() if file_name == None: ZipFile(file_path).extractall(path=extract_dir) From 2d2188c2c576849b9169845e72b051b86e458fef Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 23 Jun 2023 10:33:27 +0530 Subject: [PATCH 17/84] change download_dir to .cache/tvb from .cache/pooch --- tvb_library/tvb/datasets/base.py | 3 ++- tvb_library/tvb/datasets/tvb_data.py | 10 ++++------ tvb_library/tvb/datasets/zenodo.py | 2 +- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 93e678bdfd..599c47c466 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -32,10 +32,11 @@ from tvb.basic.logger.builder import get_logger from tvb.basic.profile import TvbProfile +from pathlib import Path class BaseDataset: - def __init__(self, version, target_download=None): + def __init__(self, version, extract_dir=None): self.log = get_logger(self.__class__.__module__) self.cached_files = None diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index c7a0ecbc77..78b07a0cef 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -41,7 +41,7 @@ class TVBZenodoDataset(BaseDataset): CONCEPTID = "3417206" - def __init__(self, version= "2.7"): + def __init__(self, version= "2.7", extract_dir = None): """ Constructor for TVB_Data class @@ -52,19 +52,17 @@ def __init__(self, version= "2.7"): - Version number of the dataset, Default value is 2.7 """ - super().__init__(version) - self.cached_file = pooch.os_cache("pooch")/ "tvb_cached_responses.txt" + super().__init__(version, extract_dir) + self.cached_file = pooch.os_cache("tvb")/ "tvb_cached_responses.txt" try: self.recid = self.read_cached_response()[version]['conceptrecid'] - except KeyError: + except : self.log.warning(f"Failed to read data from cached response.") self.recid = Zenodo().get_versions_info(self.CONCEPTID)[version] self.update_cached_response() - except: - self.log.warning(f"Failed to get the desired version {version} of TVB_Data, please check if version {version} is available as a public record on zenodo.org or Please check your internet connection") #TODO add logging errors method by catching the exact exceptions. self.rec = Record(self.read_cached_response()[self.version]) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index f3c103c97e..1685a2e03f 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -65,7 +65,7 @@ def download(self): known_hash = file['checksum'] file_name = file['key'] - file_path = pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) + file_path = pooch.retrieve(url= url, known_hash= known_hash, path = pooch.os_cache("tvb") ,progressbar = True) self.file_loc[f'{file_name}'] = file_path From d9a14f3a73566036a95b0cadc98d771dd5c8a565 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Thu, 29 Jun 2023 11:23:00 +0530 Subject: [PATCH 18/84] . --- tvb_library/tvb/datasets/tvb_data.py | 54 ++++++------ tvb_library/tvb/datasets/zenodo.py | 37 +++++--- .../library/datasets/TVBZenodoDataset_test.py | 84 +++++++++++++++++++ .../tvb/tests/library/datasets/zenodo_test.py | 5 +- 4 files changed, 142 insertions(+), 38 deletions(-) create mode 100644 tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 78b07a0cef..50f97b02e3 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -27,12 +27,14 @@ """ .. moduleauthor:: Abhijit Deo """ + import os import requests import json import pooch from pathlib import Path from zipfile import ZipFile +import shutil from .base import BaseDataset from .zenodo import Zenodo, Record, BASE_URL @@ -53,8 +55,12 @@ def __init__(self, version= "2.7", extract_dir = None): """ super().__init__(version, extract_dir) - self.cached_file = pooch.os_cache("tvb")/ "tvb_cached_responses.txt" - + self.cached_dir = self.extract_dir / ".cache" + self.cached_file = self.cached_dir / "tvb_cached_responses.txt" + + if not self.cached_dir.is_dir(): + self.cached_dir.mkdir(parents=True) + try: self.recid = self.read_cached_response()[version]['conceptrecid'] @@ -67,51 +73,46 @@ def __init__(self, version= "2.7", extract_dir = None): #TODO add logging errors method by catching the exact exceptions. self.rec = Record(self.read_cached_response()[self.version]) - def download(self): + def download(self, path=None): """ - Downloads the dataset to the cached location, skips download is file already present at the path. + Downloads the dataset to `path` """ - self.rec.download() + self.rec.download(path) - def fetch_data(self, file_name=None): + def fetch_data(self, file_name): """ Fetches the data parameters: ----------- file_name: str - - Name of the file from the downloaded zip file to fetch. If `None`, extracts whole archive. Default is `None` + - Name of the file from the downloaded zip file to fetch. extract_dir: str - - Path where you want to extract the archive, if `None` extracts the archive to current working directory. Default is `None` + - Path where you want to extract the archive. If Path is None, dataset is extracted according to the tvb profile configuration returns: Pathlib.Path path of the file which was extracted """ + # TODO: extract dir needs better description. - + + extract_dir = self.extract_dir + download_dir = self.cached_dir / "TVB_Data" try: file_path = self.rec.file_loc['tvb_data.zip'] except: - self.download() + self.download(path = download_dir) file_path = self.rec.file_loc['tvb_data.zip'] - extract_dir = self.extract_dir - - - if file_name == None: - ZipFile(file_path).extractall(path=extract_dir) - - if extract_dir.is_absolute(): - return extract_dir - - return Path.cwd()/ extract_dir - with ZipFile(file_path) as zf: file_names_in_zip = zf.namelist() zf.close() + file_name = file_name.strip() + + file_names_in_zip = {str(Path(i).name): i for i in file_names_in_zip} if extract_dir==None: ZipFile(file_path).extract(file_names_in_zip[file_name]) @@ -125,6 +126,10 @@ def fetch_data(self, file_name=None): return Path.cwd()/ extract_dir / file_names_in_zip[file_name] + def delete_data(self): + _dir = self.extract_dir / "tvb_data" + shutil.rmtree(_dir) + def update_cached_response(self): """ @@ -141,7 +146,9 @@ def update_cached_response(self): version = hit['metadata']['version'] response = hit - responses[version] = response + responses[version] = response + + Path(file_dir).touch(exist_ok=True) with open(file_dir, "w") as fp: json.dump(responses, fp) @@ -174,9 +181,6 @@ def describe(self): def get_record(self): return self.recid - def __str__(self): - return f"TVB Data version : {self.version}" - def __eq__(self, other): if isinstace(other, TVBZenodoDataset): return self.rec == tvb_data.rec diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 1685a2e03f..36a6f3db69 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -42,30 +42,31 @@ class Record: def __init__(self, data, base_url: str = BASE_URL) -> None: + """ + Record represents the repsonse from the Zenodo. + """ + self.base_url = base_url self.data = data self.file_loc = {} - def describe(self): - return self.data['metadata']['description'] + - - def __str__(self): - return json.dumps(self.data, indent=2) - - def download(self): + def download(self, path=None): if 'files' not in self.data: raise AttributeError("No files to download! Please check if the record id entered is correct! or the data is publically accessible") + if path == None: + path = pooch.os_cache("tvb") for file in self.data["files"]: url = file['links']['self'] known_hash = file['checksum'] file_name = file['key'] - file_path = pooch.retrieve(url= url, known_hash= known_hash, path = pooch.os_cache("tvb") ,progressbar = True) + file_path = pooch.retrieve(url= url, known_hash= known_hash, path = path,progressbar = True) self.file_loc[f'{file_name}'] = file_path @@ -73,14 +74,24 @@ def download(self): print(f"file {file_name} is downloaded at {file_path}") - def get_latest_version(self): - + def get_latest_version(self): return Zenodo().get_record(self.data['links']['latest'].split("/")[-1]) + + def describe(self): + return self.data['metadata']['description'] + def get_record_id(self): + return self.data['conceptrecid'] + def is_open_access(self): + return self.data['metadata']['access_right'] != "closed" + def __eq__(self, record_b): return (self.data == record_b.data) + def __str__(self): + return json.dumps(self.data, indent=2) + class Zenodo: @@ -115,6 +126,9 @@ def get_versions_info(self, recid): """ recid: unique id of the data repository + + + """ # needs ineternet @@ -130,7 +144,8 @@ def get_versions_info(self, recid): version = hit['metadata']['version'] recid = hit['doi'].split(".")[-1] - + if hit['metadata']['access_right'] == "closed": + continue versions[version] = recid diff --git a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py b/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py new file mode 100644 index 0000000000..0e6e6542f3 --- /dev/null +++ b/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py @@ -0,0 +1,84 @@ + +# -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" +import socket + +from tvb.datasets import TVBZenodoDataset +from pathlib import Path +from tvb.tests.library.base_testcase import BaseTestCase + + + +def no_internet_decorator(func): + class block_network(socket.socket): + def __init__(self, *args, **kwargs): + raise Exception("Network call blocked") + + socket.socket = block_network + + def iner_func(): + return func + return iner_func + + + + + + +class Test_TVBZenodoDataset(BaseTestCase): + + + def test_extract(self): + + tvb_data = TVBZenodoDataset() + connectivity66_dir = tvb_data.fetch_data("connectivity_66.zip") + assert connectivity66_dir.is_file() + tvb_data.delete_data() + assert not connectivity66_dir.is_file() + + tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="tvb_data") + connectivity66_dir = tvb_data.fetch_data("connectivity_66.zip") + assert connectivity66_dir.is_file() + tvb_data.delete_data() + assert not connectivity66_dir.is_file() + + tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="~/tvb_data") + matfile_dir = tvb_data.fetch_data("local_connectivity_80k.mat") + assert matfile_dir.is_file() + tvb_data.delete_data() + assert not matfile_dir.is_file() + + + all_extract = TVBZenodoDataset(version = "2.0.3", extract_dir="~/tvb_data").fetch_data(" ConnectivityTable_regions.xls") + assert all_extract.is_file() + tvb_data.delete_data() + assert not all_extract.is_file() + + #TODO add no interenet tests diff --git a/tvb_library/tvb/tests/library/datasets/zenodo_test.py b/tvb_library/tvb/tests/library/datasets/zenodo_test.py index 50084e7e75..59e82b974e 100644 --- a/tvb_library/tvb/tests/library/datasets/zenodo_test.py +++ b/tvb_library/tvb/tests/library/datasets/zenodo_test.py @@ -57,7 +57,7 @@ def test_get_versions(self): versions = zenodo.get_versions_info("3491055") assert type(versions) == dict - assert versions == {'2.0.1': '3497545', '1.5.9.b': '3474071', '2.0.0': '3491055', '2.0.3': '4263723', '2.0.2': '3688773', '1.5.9': '3417207', '2.7': '7574266'} + assert versions == {'1.5.9': '3457454', '2.0.3': '4263723', '2.0.1': '3497545', '2.0.2': '3688773', '2.0.0': '3491055', '1.5.9.b': '3474071', '2.7': '7574266'} del zenodo del versions @@ -69,12 +69,13 @@ def test_download(self): zen = Zenodo() - rec = zen.get_record("4263723") + rec = zen.get_record("7929679") rec.download() print(rec.file_loc) for file_name, file_path in rec.file_loc.items(): assert Path(file_path).is_file() + Path(file_path).unlink() From 51a2f9ca9bd9d1a1335a73da8e08b642b8e712f8 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 30 Jun 2023 10:26:56 +0530 Subject: [PATCH 19/84] fixed the file not found errors with remaining datatypes' --- tvb_library/tvb/datasets/tvb_data.py | 4 ++-- tvb_library/tvb/datasets/zenodo.py | 20 +++++++++---------- .../tvb/datatypes/local_connectivity.py | 4 ++-- tvb_library/tvb/datatypes/projections.py | 4 ++-- tvb_library/tvb/datatypes/region_mapping.py | 4 ++-- tvb_library/tvb/datatypes/sensors.py | 6 +++--- tvb_library/tvb/datatypes/surfaces.py | 3 ++- .../library/datasets/TVBZenodoDataset_test.py | 20 ++++--------------- 8 files changed, 26 insertions(+), 39 deletions(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 50f97b02e3..d81b09ec05 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -121,10 +121,10 @@ def fetch_data(self, file_name): if extract_dir.is_absolute(): - return extract_dir / file_names_in_zip[file_name] + return str(extract_dir / file_names_in_zip[file_name]) - return Path.cwd()/ extract_dir / file_names_in_zip[file_name] + return str(Path.cwd()/ extract_dir / file_names_in_zip[file_name]) def delete_data(self): _dir = self.extract_dir / "tvb_data" diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 36a6f3db69..945b836273 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -34,6 +34,7 @@ import requests import re import pooch +from typing import List from pathlib import Path import json @@ -52,7 +53,7 @@ def __init__(self, data, base_url: str = BASE_URL) -> None: - def download(self, path=None): + def download(self, path: str = None) -> None: if 'files' not in self.data: raise AttributeError("No files to download! Please check if the record id entered is correct! or the data is publically accessible") @@ -77,19 +78,19 @@ def download(self, path=None): def get_latest_version(self): return Zenodo().get_record(self.data['links']['latest'].split("/")[-1]) - def describe(self): + def describe(self) -> str: return self.data['metadata']['description'] - def get_record_id(self): + def get_record_id(self) -> str: return self.data['conceptrecid'] - def is_open_access(self): + def is_open_access(self) -> str: return self.data['metadata']['access_right'] != "closed" - def __eq__(self, record_b): + def __eq__(self, record_b) -> bool: return (self.data == record_b.data) - def __str__(self): + def __str__(self) -> str: return json.dumps(self.data, indent=2) @@ -114,7 +115,7 @@ def get_record(self, recid: str) -> Record: return Record(requests.get(url).json()) - def _get_records(self, params: dict[str, str]) -> list[Record]: + def _get_records(self, params: dict[str, str]) -> List[Record]: url = self.base_url + "records?" + urlencode(params) return [Record(hit) for hit in requests.get(url).json()["hits"]["hits"]] @@ -122,13 +123,10 @@ def _get_records(self, params: dict[str, str]) -> list[Record]: - def get_versions_info(self, recid): + def get_versions_info(self, recid) -> dict: """ recid: unique id of the data repository - - - """ # needs ineternet diff --git a/tvb_library/tvb/datatypes/local_connectivity.py b/tvb_library/tvb/datatypes/local_connectivity.py index 6c60ceb3f2..e50bd6dcc1 100644 --- a/tvb_library/tvb/datatypes/local_connectivity.py +++ b/tvb_library/tvb/datatypes/local_connectivity.py @@ -31,7 +31,7 @@ from tvb.basic.neotraits.api import HasTraits, Attr, Float, narray_summary_info from tvb.basic.readers import try_get_absolute_path, FileReader from tvb.datatypes import equations, surfaces - +from tvb.datasets import TVBZenodoDataset class LocalConnectivity(HasTraits): """ @@ -109,7 +109,7 @@ def from_file(source_file="local_connectivity_16384.mat"): result = LocalConnectivity() - source_full_path = try_get_absolute_path("tvb_data.local_connectivity", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) result.matrix = reader.read_array(matlab_data_name="LocalCoupling") diff --git a/tvb_library/tvb/datatypes/projections.py b/tvb_library/tvb/datatypes/projections.py index 713472b3af..fac6dbe8aa 100644 --- a/tvb_library/tvb/datatypes/projections.py +++ b/tvb_library/tvb/datatypes/projections.py @@ -32,7 +32,7 @@ from tvb.basic.readers import try_get_absolute_path, FileReader from tvb.datatypes import surfaces, sensors from tvb.basic.neotraits.api import HasTraits, TVBEnum, Attr, NArray, Final - +from tvb.datasets import TVBZenodoDataset class ProjectionsTypeEnum(TVBEnum): EEG = "projEEG" @@ -88,7 +88,7 @@ def from_file(cls, source_file, matlab_data_name=None, is_brainstorm=False): proj = cls() - source_full_path = try_get_absolute_path("tvb_data.projectionMatrix", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) if is_brainstorm: proj.projection_data = reader.read_gain_from_brainstorm() diff --git a/tvb_library/tvb/datatypes/region_mapping.py b/tvb_library/tvb/datatypes/region_mapping.py index 12eca3cd53..0c59c37fca 100644 --- a/tvb_library/tvb/datatypes/region_mapping.py +++ b/tvb_library/tvb/datatypes/region_mapping.py @@ -39,7 +39,7 @@ from tvb.datatypes.surfaces import Surface from tvb.datatypes.volumes import Volume from tvb.basic.neotraits.api import HasTraits, Attr, NArray - +from tvb.datasets import TVBZenodoDataset class RegionMapping(HasTraits): """ @@ -55,7 +55,7 @@ class RegionMapping(HasTraits): @staticmethod def from_file(source_file="regionMapping_16k_76.txt"): - source_full_path = try_get_absolute_path("tvb_data.regionMapping", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) result = RegionMapping() diff --git a/tvb_library/tvb/datatypes/sensors.py b/tvb_library/tvb/datatypes/sensors.py index 250a190e46..c88d7a831c 100644 --- a/tvb_library/tvb/datatypes/sensors.py +++ b/tvb_library/tvb/datatypes/sensors.py @@ -40,7 +40,7 @@ from tvb.basic.readers import FileReader, try_get_absolute_path from tvb.basic.neotraits.api import HasTraits, Attr, NArray, Int, TVBEnum, Final - +from tvb.datasets import TVBZenodoDataset class SensorTypesEnum(TVBEnum): TYPE_EEG = "EEG" @@ -77,7 +77,7 @@ class Sensors(HasTraits): def from_file(cls, source_file="eeg_brainstorm_65.txt"): result = cls() - source_full_path = try_get_absolute_path("tvb_data.sensors", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) result.labels = reader.read_array(dtype=numpy.str_, use_cols=(0,)) @@ -236,7 +236,7 @@ class SensorsMEG(Sensors): def from_file(cls, source_file="meg_151.txt.bz2"): result = super(SensorsMEG, cls).from_file(source_file) - source_full_path = try_get_absolute_path("tvb_data.sensors", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) result.orientations = reader.read_array(use_cols=(4, 5, 6)) diff --git a/tvb_library/tvb/datatypes/surfaces.py b/tvb_library/tvb/datatypes/surfaces.py index e7d011dbce..aae6cbada6 100644 --- a/tvb_library/tvb/datatypes/surfaces.py +++ b/tvb_library/tvb/datatypes/surfaces.py @@ -41,6 +41,7 @@ from tvb.basic.neotraits.api import TVBEnum from tvb.basic.neotraits.api import HasTraits, Attr, NArray, Final, Int, Float, narray_describe from tvb.basic.readers import ZipReader, try_get_absolute_path +from tvb.datasets import TVBZenodoDataset try: import gdist @@ -162,7 +163,7 @@ def _read(cls, reader): @classmethod def from_file(cls, source_file="cortex_16384.zip"): """Construct a Surface from source_file.""" - source_full_path = try_get_absolute_path("tvb_data.surfaceData", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = ZipReader(source_full_path) return cls._read(reader) diff --git a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py b/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py index 0e6e6542f3..c03f9256c8 100644 --- a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py @@ -28,7 +28,6 @@ """ .. moduleauthor:: Abhijit Deo """ -import socket from tvb.datasets import TVBZenodoDataset from pathlib import Path @@ -36,17 +35,6 @@ -def no_internet_decorator(func): - class block_network(socket.socket): - def __init__(self, *args, **kwargs): - raise Exception("Network call blocked") - - socket.socket = block_network - - def iner_func(): - return func - return iner_func - @@ -58,25 +46,25 @@ class Test_TVBZenodoDataset(BaseTestCase): def test_extract(self): tvb_data = TVBZenodoDataset() - connectivity66_dir = tvb_data.fetch_data("connectivity_66.zip") + connectivity66_dir = Path(tvb_data.fetch_data("connectivity_66.zip")) assert connectivity66_dir.is_file() tvb_data.delete_data() assert not connectivity66_dir.is_file() tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="tvb_data") - connectivity66_dir = tvb_data.fetch_data("connectivity_66.zip") + connectivity66_dir = Path(tvb_data.fetch_data("connectivity_66.zip")) assert connectivity66_dir.is_file() tvb_data.delete_data() assert not connectivity66_dir.is_file() tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="~/tvb_data") - matfile_dir = tvb_data.fetch_data("local_connectivity_80k.mat") + matfile_dir = Path(tvb_data.fetch_data("local_connectivity_80k.mat")) assert matfile_dir.is_file() tvb_data.delete_data() assert not matfile_dir.is_file() - all_extract = TVBZenodoDataset(version = "2.0.3", extract_dir="~/tvb_data").fetch_data(" ConnectivityTable_regions.xls") + all_extract = Path(TVBZenodoDataset(version = "2.0.3", extract_dir="~/tvb_data").fetch_data(" ConnectivityTable_regions.xls")) assert all_extract.is_file() tvb_data.delete_data() assert not all_extract.is_file() From d651659f51be88d8b87680184ee75962fce7a68c Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 30 Jun 2023 16:02:55 +0530 Subject: [PATCH 20/84] add types --- tvb_library/tvb/datasets/zenodo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 945b836273..6e8450fa5a 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -34,7 +34,7 @@ import requests import re import pooch -from typing import List +from typing import List, Dict from pathlib import Path import json @@ -123,7 +123,7 @@ def _get_records(self, params: dict[str, str]) -> List[Record]: - def get_versions_info(self, recid) -> dict: + def get_versions_info(self, recid) -> Dict: """ recid: unique id of the data repository From a04b6d9f83323dab4fd4e4a5586652ff473eb8b3 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Thu, 6 Jul 2023 21:55:06 +0530 Subject: [PATCH 21/84] removed the data setup from build.yml, fix the errors found in tests --- .github/workflows/build.yml | 34 ++++++++++++++-------------- tvb_library/tvb/datasets/base.py | 10 ++++++-- tvb_library/tvb/datasets/tvb_data.py | 2 +- tvb_library/tvb/datasets/zenodo.py | 2 +- 4 files changed, 27 insertions(+), 21 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 08dc688957..6f2a1c5c19 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -47,25 +47,25 @@ jobs: cd tvb_build bash install_full_tvb.sh - - name: cache data - id: cache-data - uses: actions/cache@v3 - with: - path: tvb_data - key: tvb-data + #- name: cache data + # id: cache-data + # uses: actions/cache@v3 + # with: + # path: tvb_data + # key: tvb-data - - name: download data - if: steps.cache-data.outputs.cache-hit != 'true' - run: | - wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - mkdir tvb_data - unzip tvb_data.zip -d tvb_data - rm tvb_data.zip + #- name: download data + # if: steps.cache-data.outputs.cache-hit != 'true' + # run: | + # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip + # mkdir tvb_data + # unzip tvb_data.zip -d tvb_data + # rm tvb_data.zip - - name: setup data - run: | - cd tvb_data - python3 setup.py develop + #- name: setup data + # run: | + # cd tvb_data + # python3 setup.py develop - name: run library tests run: pytest -v tvb_library --cov --cov-report=xml && mv coverage.xml coverage-library.xml diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 599c47c466..df01d6c153 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -51,8 +51,14 @@ def __init__(self, version, extract_dir=None): def download(self): pass - def fetch_data(self): - pass + def fetch_data(self, file_name): + if Path(file_name).is_absolute(): + return file_name + + return self._fetch_data(file_name) + def _fetch_data(self, file_name): + pass + def get_version(self): return self.version diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index d81b09ec05..451e5f2070 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -79,7 +79,7 @@ def download(self, path=None): """ self.rec.download(path) - def fetch_data(self, file_name): + def _fetch_data(self, file_name): """ Fetches the data diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 6e8450fa5a..04232e26e7 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -115,7 +115,7 @@ def get_record(self, recid: str) -> Record: return Record(requests.get(url).json()) - def _get_records(self, params: dict[str, str]) -> List[Record]: + def _get_records(self, params: Dict[str, str]) -> List[Record]: url = self.base_url + "records?" + urlencode(params) return [Record(hit) for hit in requests.get(url).json()["hits"]["hits"]] From 7eadc0bb0d44420ee24677c749603c1d63654818 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 8 Jul 2023 22:26:29 +0530 Subject: [PATCH 22/84] replacing tvb_data package from code. --- .github/workflows/lib-tests.yml | 20 ++--- .github/workflows/notebooks.yml | 30 +++---- .github/workflows/pg-tests.yml | 30 +++---- .github/workflows/win-tests.yml | 30 +++---- tvb_build/build_step1.py | 6 +- tvb_build/docker/Dockerfile-build | 10 +-- .../scripts/datatypes/lookup_tables.py | 4 +- .../code_update_scripts/4455_update_code.py | 9 +- .../code_update_scripts/4750_update_code.py | 6 +- .../code_update_scripts/6093_update_code.py | 6 +- .../code_update_scripts/6600_update_code.py | 7 +- .../tvb/core/services/user_service.py | 6 +- .../tvb/interfaces/command/benchmark.py | 15 ++-- .../brain_tumor_connectivity_importer.py | 8 +- .../interfaces/rest/client/examples/utils.py | 5 +- .../framework/adapters/analyzers/bct_test.py | 6 +- .../timeseries_metrics_adapter_test.py | 3 +- .../creators/stimulus_creator_test.py | 14 ++- .../simulator/simulator_adapter_test.py | 11 ++- .../connectivity_zip_importer_test.py | 6 +- .../adapters/uploaders/csv_importer_test.py | 22 ++--- .../uploaders/encrypt_decrypt_test.py | 18 ++-- .../adapters/uploaders/gifti_importer_test.py | 11 ++- .../uploaders/mat_timeseries_importer_test.py | 12 ++- .../adapters/uploaders/nifti_importer_test.py | 25 ++++-- .../adapters/uploaders/obj_importer_test.py | 12 ++- .../projection_matrix_importer_test.py | 13 +-- .../uploaders/region_mapping_importer_test.py | 16 ++-- .../uploaders/sensors_importer_test.py | 10 ++- .../uploaders/zip_surface_importer_test.py | 6 +- .../adapters/visualizers/brainviewer_test.py | 15 ++-- .../visualizers/connectivityviewer_test.py | 6 +- .../visualizers/sensorsviewer_test.py | 17 ++-- .../visualizers/surfaceviewer_test.py | 14 +-- .../tvb/tests/framework/core/factory.py | 10 ++- .../framework/core/neotraits/forms_test.py | 5 +- .../core/services/import_service_test.py | 12 ++- .../framework/core/services/links_test.py | 10 ++- .../core/services/project_service_test.py | 10 ++- .../services/serialization_manager_test.py | 6 +- .../interfaces/rest/datatype_resource_test.py | 9 +- .../interfaces/rest/project_resource_test.py | 7 +- .../controllers/simulator_controller_test.py | 87 ++++++++++++------- tvb_library/tvb/datasets/tvb_data.py | 2 + 44 files changed, 368 insertions(+), 219 deletions(-) diff --git a/.github/workflows/lib-tests.yml b/.github/workflows/lib-tests.yml index 07dff59116..b9d2d80531 100644 --- a/.github/workflows/lib-tests.yml +++ b/.github/workflows/lib-tests.yml @@ -27,16 +27,16 @@ jobs: pip3 install pipenv cd tvb_library && pipenv install -d --python $(which python3) - - name: download data - run: | - wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - mkdir tvb_data - unzip tvb_data.zip -d tvb_data - rm tvb_data.zip - - - name: setup data - run: | - cd tvb_library && pipenv run bash -c 'cd ../tvb_data && python3 setup.py develop' + #- name: download data + # run: | + # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip + # mkdir tvb_data + # unzip tvb_data.zip -d tvb_data + # rm tvb_data.zip + + #- name: setup data + # run: | + # cd tvb_library && pipenv run bash -c 'cd ../tvb_data && python3 setup.py develop' - name: importlib_metadata? run: cd tvb_library && pipenv install importlib_metadata diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index da9eab305a..7728a8a70c 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -40,22 +40,22 @@ jobs: cd tvb_build cmd /k "install_full_tvb.bat" - - name: cache data - id: cache-data - uses: actions/cache@v3 - with: - path: tvb_data - key: tvbdata + #- name: cache data + # id: cache-data + # uses: actions/cache@v3 + # with: + # path: tvb_data + # key: tvbdata - - name: download data - if: steps.cache-data.outputs.cache-hit != 'true' - shell: pwsh - run: | - Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" - Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data - del C:\\TEMP\\tvb_data.zip - cd C:\\tvb_data - python setup.py develop + #- name: download data + # if: steps.cache-data.outputs.cache-hit != 'true' + # shell: pwsh + # run: | + # Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" + # Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data + # del C:\\TEMP\\tvb_data.zip + # cd C:\\tvb_data + # python setup.py develop - name: run notebooks env: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index b360fd2605..0abac2e24c 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -52,23 +52,23 @@ jobs: - name: setup tvb run: cd tvb_build && bash install_full_tvb.sh - - name: cache data - id: cache-data - uses: actions/cache@v3 - with: - path: tvb_data - key: tvb-data + #- name: cache data + # id: cache-data + # uses: actions/cache@v3 + # with: + # path: tvb_data + # key: tvb-data - - name: download data - if: steps.cache-data.outputs.cache-hit != 'true' - run: | - wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - mkdir tvb_data - unzip tvb_data.zip -d tvb_data - rm tvb_data.zip + #- name: download data + # if: steps.cache-data.outputs.cache-hit != 'true' + # run: | + # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip + # mkdir tvb_data + # unzip tvb_data.zip -d tvb_data + # rm tvb_data.zip - - name: setup data - run: cd tvb_data && python3 setup.py develop + #- name: setup data + # run: cd tvb_data && python3 setup.py develop - name: run framework tests run: | diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index 7680c5fc50..84411eb114 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -35,22 +35,22 @@ jobs: pip install --user -r tvb_framework/requirements.txt pip install --user --no-build-isolation tvb-gdist - - name: cache data - id: cache-data - uses: actions/cache@v3 - with: - path: tvb_data - key: tvbdata + #- name: cache data + # id: cache-data + # uses: actions/cache@v3 + # with: + # path: tvb_data + # key: tvbdata - - name: download data - if: steps.cache-data.outputs.cache-hit != 'true' - shell: pwsh - run: | - Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" - Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data - del C:\\TEMP\\tvb_data.zip - cd C:\\tvb_data - python setup.py develop + #- name: download data + # if: steps.cache-data.outputs.cache-hit != 'true' + # shell: pwsh + # run: | + # Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" + # Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data + # del C:\\TEMP\\tvb_data.zip + # cd C:\\tvb_data + # python setup.py develop - name: run framework tests shell: pwsh diff --git a/tvb_build/build_step1.py b/tvb_build/build_step1.py index 885b1ab54f..f7abecb888 100644 --- a/tvb_build/build_step1.py +++ b/tvb_build/build_step1.py @@ -44,7 +44,8 @@ import requests import tvb_bin -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from subprocess import Popen, PIPE # source paths @@ -53,7 +54,8 @@ FW_FOLDER = os.path.join(TVB_ROOT, 'tvb_framework') LICENSE_PATH = os.path.join(FW_FOLDER, 'LICENSE') RELEASE_NOTES_PATH = os.path.join(TVB_ROOT, 'tvb_documentation', 'RELEASE_NOTES') -DATA_SRC_FOLDER = os.path.dirname(tvb_data.__file__) +#DATA_SRC_FOLDER = os.path.dirname(tvb_data.__file__) +DATA_SRC_FOLDER = TVBZenodoDataset().extract_dir DEMOS_MATLAB_FOLDER = os.path.join(TVB_ROOT, 'tvb_documentation', 'matlab') # dest paths diff --git a/tvb_build/docker/Dockerfile-build b/tvb_build/docker/Dockerfile-build index 8fa3c0cbc2..5299909282 100644 --- a/tvb_build/docker/Dockerfile-build +++ b/tvb_build/docker/Dockerfile-build @@ -38,11 +38,11 @@ RUN /bin/bash -c "source activate tvb-run"; \ /opt/conda/envs/tvb-run/bin/jupyter notebook --generate-config; \ echo "c.NotebookApp.password='sha1:12bff019c253:9daecd92c2e9bdb10b3b8a06767a74a0fe078d7c'">>$JUPYTER_CONFIG/jupyter_notebook_config.py -RUN wget https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip; \ - mkdir tvb_data; unzip tvb_data.zip -d tvb_data; rm tvb_data.zip; \ - cd tvb_data; \ - /opt/conda/envs/tvb-run/bin/python setup.py develop;\ - /opt/conda/envs/tvb-docs/bin/python setup.py develop +#RUN wget https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip; \ +# mkdir tvb_data; unzip tvb_data.zip -d tvb_data; rm tvb_data.zip; \ +# cd tvb_data; \ +# /opt/conda/envs/tvb-run/bin/python setup.py develop;\ +# /opt/conda/envs/tvb-docs/bin/python setup.py develop WORKDIR $USER_HOME COPY requirements_group requirements.txt diff --git a/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py b/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py index 478259c622..cb4d573ccb 100644 --- a/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py +++ b/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py @@ -31,6 +31,7 @@ """ import numpy +from tvb.datasets import TVBZenodoDataset from tvb.basic.readers import try_get_absolute_path from tvb.basic.neotraits.api import HasTraits, Attr, NArray, Int, Float @@ -74,7 +75,8 @@ class LookUpTable(HasTraits): @staticmethod def populate_table(result, source_file): - source_full_path = try_get_absolute_path("tvb_data.tables", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) + #source_full_path = try_get_absolute_path("tvb_data.tables", source_file) zip_data = numpy.load(source_full_path) result.df = zip_data['df'] diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py index cd9381b8df..7b8636295c 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py @@ -29,15 +29,18 @@ """ import os -import tvb_data.obj +#import tvb_data.obj +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.obj_importer import ObjSurfaceImporter from tvb.basic.logger.builder import get_logger from tvb.core.services.operation_service import OperationService from tvb.core.entities.storage import dao from tvb.datatypes.surfaces import SurfaceTypesEnum -DATA_FILE_EEG_CAP = os.path.join(os.path.dirname(tvb_data.obj.__file__), "eeg_cap.obj") -DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") +#DATA_FILE_EEG_CAP = os.path.join(os.path.dirname(tvb_data.obj.__file__), "eeg_cap.obj") +#DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") +DATA_FILE_EEG_CAP = TVBZenodoDataset().fetch_data("eeg_cap.obj") +DATA_FILE_FACE = TVBZenodoDataset().fetch_data('face_surface.obj') LOGGER = get_logger(__name__) PAGE_SIZE = 20 diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py index 7f107c564f..d2c999c579 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py @@ -28,13 +28,15 @@ .. moduleauthor:: Bogdan Neacsa """ import os -import tvb_data.sensors +#import tvb_data.sensors +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.sensors_importer import SensorsImporter from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.operation_service import OperationService -DATA_FILE = os.path.join(os.path.dirname(tvb_data.sensors.__file__), "seeg_39.txt.bz2") +#DATA_FILE = os.path.join(os.path.dirname(tvb_data.sensors.__file__), "seeg_39.txt.bz2") +DATA_FILE = TVBZenodoDataset().fetch_data('seeg_39.txt.bz2') LOGGER = get_logger(__name__) PAGE_SIZE = 20 diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py index ffffdfe028..59d30e5dbc 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py @@ -30,14 +30,16 @@ .. moduleauthor:: Mihai Andrei """ import os -import tvb_data.obj +#import tvb_data.obj +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.obj_importer import ObjSurfaceImporter from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.operation_service import OperationService from tvb.datatypes.surfaces import SurfaceTypesEnum -DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") +#DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") +DATA_FILE_FACE = TVBZenodoDataset().fetch_data('face_surface.obj') LOGGER = get_logger(__name__) PAGE_SIZE = 20 diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py index a8a89a476f..ea6f5e1d4a 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py @@ -33,10 +33,11 @@ from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.import_service import ImportService -import tvb_data - -DATA_FILE = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") +#import tvb_data +from tvb.datasets import TVBZenodoDataset +#DATA_FILE = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") +DATA_FILE = TVBZenodoDataset().fetch_data('Default_Project.zip') LOGGER = get_logger(__name__) diff --git a/tvb_framework/tvb/core/services/user_service.py b/tvb_framework/tvb/core/services/user_service.py index 9a2de56dc6..7a0cbd62a6 100644 --- a/tvb_framework/tvb/core/services/user_service.py +++ b/tvb_framework/tvb/core/services/user_service.py @@ -33,7 +33,8 @@ import os import random import six -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.basic.logger.builder import get_logger from tvb.basic.profile import TvbProfile from tvb.config import DEFAULT_PROJECT_GID @@ -120,7 +121,8 @@ def create_user(self, username=None, display_name=None, password=None, password2 user = dao.store_entity(user) if role == ROLE_ADMINISTRATOR and not skip_import: - to_upload = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") + #to_upload = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") + to_upload = TVBZenodoDataset().fetch_data('Default_Project.zip') if not os.path.exists(to_upload): self.logger.warning("Could not find DEFAULT PROJECT at path %s. You might want to import it " "yourself. See TVB documentation about where to find it!" % to_upload) diff --git a/tvb_framework/tvb/interfaces/command/benchmark.py b/tvb_framework/tvb/interfaces/command/benchmark.py index ae736f87b9..9acfd1ca85 100644 --- a/tvb_framework/tvb/interfaces/command/benchmark.py +++ b/tvb_framework/tvb/interfaces/command/benchmark.py @@ -31,7 +31,8 @@ from os import path -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.core.entities.file.simulator.view_model import HeunDeterministicViewModel from tvb.interfaces.command.lab import * @@ -56,12 +57,16 @@ def _fire_simulation(project_id, simulator_vm): def _create_bench_project(): prj = new_project("benchmark_project_ %s" % datetime.now()) - data_dir = path.abspath(path.dirname(tvb_data.__file__)) - zip_path = path.join(data_dir, 'connectivity', 'connectivity_68.zip') + #data_dir = path.abspath(path.dirname(tvb_data.__file__)) + #zip_path = path.join(data_dir, 'connectivity', 'connectivity_68.zip') + tvb_data = TVBZenodoDataset() + zip_path = tvb_data.fetch_data('connectivity_68.zip') import_conn_zip(prj.id, zip_path) - zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip') + #zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip') + zip_path = tvb_data.fetch_data('connectivity_96.zip') import_conn_zip(prj.id, zip_path) - zip_path = path.join(data_dir, 'connectivity', 'connectivity_192.zip') + #zip_path = path.join(data_dir, 'connectivity', 'connectivity_192.zip') + zip_path = tvb_data.fetch_data('connectivity_192.zip') import_conn_zip(prj.id, zip_path) conn68 = dao.get_generic_entity(ConnectivityIndex, 68, "number_of_regions")[0] diff --git a/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py b/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py index a69006cf0b..195ccba551 100644 --- a/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py +++ b/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py @@ -66,8 +66,12 @@ def import_tumor_connectivities(project_id, folder_path): def import_surface_rm(project_id, conn_gid): # Import surface and region mapping from tvb_data berlin subjects (68 regions) - rm_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_RegionMapping.txt") - surface_zip_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_Surface_Cortex.zip") + #rm_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_RegionMapping.txt") + #surface_zip_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_Surface_Cortex.zip") + from tvb.datasets import TVBZenodoDataset + tvb_data = TVBZenodoDataset() + rm_file = tvb_data.fetch_data('DH_20120806_RegionMapping.txt') + surface_zip_file = tvb_data.fetch_data('DH_20120806_Surface_Cortex.zip') surface_importer = ABCAdapter.build_adapter_from_class(ZIPSurfaceImporter) surface_imp_model = ZIPSurfaceImporterModel() diff --git a/tvb_framework/tvb/interfaces/rest/client/examples/utils.py b/tvb_framework/tvb/interfaces/rest/client/examples/utils.py index bb1711a435..b88b675f05 100644 --- a/tvb_framework/tvb/interfaces/rest/client/examples/utils.py +++ b/tvb_framework/tvb/interfaces/rest/client/examples/utils.py @@ -28,7 +28,8 @@ import sys import time -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.basic.logger.builder import get_logger from tvb.core.entities.model.model_operation import STATUS_ERROR, STATUS_CANCELED, STATUS_FINISHED @@ -44,7 +45,7 @@ def compute_rest_url(): def compute_tvb_data_path(folder, filename): - return os.path.join(os.path.dirname(tvb_data.__file__), folder, filename) + return os.path.join(TVBZenodoDataset().extract_dir, folder, filename) logger = get_logger(__name__) diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py index d26ca55b0e..202fbf8f66 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py @@ -29,7 +29,8 @@ """ import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.analyzers.bct_adapters import BaseBCTModel from tvb.core.entities.model.model_operation import Algorithm from tvb.tests.framework.core.base_testcase import TransactionalTestCase @@ -56,7 +57,8 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user("BCT_User") self.test_project = TestFactory.create_project(self.test_user, "BCT-Project") # Make sure Connectivity is in DB - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') self.connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) algorithms = dao.get_generic_entity(Algorithm, 'Brain Connectivity Toolbox', 'group_description') diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py index 9b68008bb2..77d2ceba0c 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py @@ -29,7 +29,8 @@ """ import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset import json from tvb.adapters.datatypes.db.mapped_value import DatatypeMeasureIndex diff --git a/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py b/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py index 77d623909c..46169239df 100644 --- a/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py @@ -27,8 +27,9 @@ import json import os import numpy -import tvb_data -import tvb_data.surfaceData +#import tvb_data +#import tvb_data.surfaceData +from tvb.datasets import TVBZenodoDataset from tvb.adapters.creators.stimulus_creator import RegionStimulusCreator, SurfaceStimulusCreator from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex @@ -52,11 +53,16 @@ def transactional_setup_method(self): self.test_project = TestFactory.create_project(self.test_user, "Stim_Project") self.storage_interface = StorageInterface() - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + tvb_data = TVBZenodoDataset() + + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = tvb_data.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex) - cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + cortex = tvb_data.fetch_data('cortex_16384.zip') + self.surface = TestFactory.import_surface_zip(self.test_user, self.test_project, cortex, SurfaceTypesEnum.CORTICAL_SURFACE) diff --git a/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py index 0db157f22c..ac22a01d8e 100644 --- a/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py @@ -28,8 +28,9 @@ .. moduleauthor:: Lia Domide """ -import tvb_data.surfaceData -import tvb_data.regionMapping +#import tvb_data.surfaceData +#import tvb_data.regionMapping +from tvb.datasets import TVBZenodoDataset from os import path from tvb.adapters.datatypes.db.time_series import TimeSeriesRegionIndex @@ -112,10 +113,12 @@ def test_estimate_execution_time(self, connectivity_index_factory): estimation1 = self.simulator_adapter.get_execution_time_approximation(model) # import surfaceData and region mapping - cortex_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #cortex_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + cortex_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, cortex_file, SurfaceTypesEnum.CORTICAL_SURFACE) - rm_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #rm_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + rm_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, rm_file, surface.gid, model.connectivity.hex) local_conn = TestFactory.create_local_connectivity(self.test_user, self.test_project, surface.gid) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py index abe8b64008..e2b6ff98e0 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py @@ -30,7 +30,8 @@ """ from os import path -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.tests.framework.core.base_testcase import BaseTestCase from tvb.tests.framework.core.factory import TestFactory @@ -58,7 +59,8 @@ def test_happy_flow_import(self): """ Test that importing a CFF generates at least one DataType in DB. """ - zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') dt_count_before = TestFactory.get_entity_count(self.test_project, ConnectivityIndex) TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John", False) dt_count_after = TestFactory.get_entity_count(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py index ba9af4fbd6..2a77cdb890 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py @@ -31,7 +31,7 @@ from os import path import pytest -import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.uploaders.csv_connectivity_importer import CSVConnectivityImporter from tvb.adapters.uploaders.csv_connectivity_importer import CSVConnectivityParser, CSVConnectivityImporterModel @@ -47,10 +47,10 @@ class TestCSVConnectivityParser(BaseTestCase): - BASE_PTH = path.join(path.dirname(tvb_data.__file__), 'dti_pipeline_toronto') - + #BASE_PTH = path.join(path.dirname(tvb_data.__file__), 'dti_pipeline_toronto') + def test_parse_happy(self): - cap_pth = path.join(self.BASE_PTH, 'output_ConnectionDistanceMatrix.csv') + cap_pth = TVBZenodoDataset().fetch_data('output_ConnectionDistanceMatrix.csv') with open(cap_pth) as f: result_conn = CSVConnectivityParser(f).result_conn @@ -77,11 +77,11 @@ def teardown_method(self): def _import_csv_test_connectivity(self, reference_connectivity_gid, subject): ### First prepare input data: - data_dir = path.abspath(path.dirname(tvb_data.__file__)) + #data_dir = path.abspath(path.dirname(tvb_data.__file__)) - toronto_dir = path.join(data_dir, 'dti_pipeline_toronto') - weights = path.join(toronto_dir, 'output_ConnectionCapacityMatrix.csv') - tracts = path.join(toronto_dir, 'output_ConnectionDistanceMatrix.csv') + #toronto_dir = path.join(data_dir, 'dti_pipeline_toronto') + weights = TVBZenodoDataset().fetch_data('output_ConnectionCapacityMatrix.csv') + tracts = TVBZenodoDataset().fetch_data('output_ConnectionDistanceMatrix.csv') weights_tmp = weights + '.tmp' tracts_tmp = tracts + '.tmp' self.storage_interface.copy_file(weights, weights_tmp) @@ -99,7 +99,8 @@ def test_happy_flow_import(self): Test that importing a CFF generates at least one DataType in DB. """ - zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, subject=TEST_SUBJECT_A) field = FilterChain.datatype + '.subject' @@ -131,7 +132,8 @@ def test_happy_flow_import(self): assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all() def test_bad_reference(self): - zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) field = FilterChain.datatype + '.subject' filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!=']) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py index 34ce6e634c..2fb86149bd 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py @@ -31,7 +31,8 @@ import os import pyAesCrypt import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa @@ -46,12 +47,13 @@ class TestEncryptionDecryption(TransactionalTestCase): + tvb_data = TVBZenodoDataset() # noinspection PyTypeChecker - @pytest.mark.parametrize("dir_name, file_name", [('connectivity', 'connectivity_76.zip'), - ('surfaceData', 'cortex_2x120k.zip'), - ('projectionMatrix', 'projection_meg_276_surface_16k.npy'), - ('h5', 'TimeSeriesRegion.h5')]) - def test_encrypt_decrypt(self, dir_name, file_name): + @pytest.mark.parametrize(" file_name", [('connectivity_76.zip'), + ( 'cortex_2x120k.zip'), + ( 'projection_meg_276_surface_16k.npy'), + ( 'TimeSeriesRegion.h5')]) + def test_encrypt_decrypt(self, file_name): import_export_encryption_handler = StorageInterface.get_import_export_encryption_handler() # Generate a private key and public key @@ -75,7 +77,9 @@ def test_encrypt_decrypt(self, dir_name, file_name): with open(private_key_path, 'wb') as f: f.write(pem) - path_to_file = os.path.join(os.path.dirname(tvb_data.__file__), dir_name, file_name) + #path_to_file = os.path.join(os.path.dirname(tvb_data.__file__), dir_name, file_name) + path_to_file = self.tvb_data.fetch_data(file_name) + # Create model for ABCUploader connectivity_model = ZIPConnectivityImporterModel() diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py index 00f1e21662..ffa9976a1c 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py @@ -29,7 +29,8 @@ """ import os -import tvb_data.gifti as demo_data +#import tvb_data.gifti as demo_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.gifti.parser import GIFTIParser from tvb.core.services.exceptions import OperationException from tvb.storage.storage_interface import StorageInterface @@ -42,9 +43,13 @@ class TestGIFTISurfaceImporter(BaseTestCase): Unit-tests for GIFTI Surface importer. """ - GIFTI_SURFACE_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.cortex.gii') - GIFTI_TIME_SERIES_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.time_series.gii') + #GIFTI_SURFACE_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.cortex.gii') + #GIFTI_TIME_SERIES_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.time_series.gii') WRONG_GII_FILE = os.path.abspath(__file__) + + tvb_data = TVBZenodoDataset() + GIFTI_SURFACE_FILE = tvb_data.fetch_data('sample.cortex.gii') + GIFTI_TIME_SERIES_FILE = tvb_data.fetch_data( 'sample.time_series.gii') def setup_method(self): self.test_user = TestFactory.create_user('Gifti_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py index 5c8df773f2..492dc2e640 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py @@ -32,7 +32,8 @@ import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.time_series import TimeSeriesRegionIndex from tvb.adapters.uploaders.mat_timeseries_importer import RegionMatTimeSeriesImporterModel, RegionTimeSeriesImporter from tvb.tests.framework.core.base_testcase import BaseTestCase @@ -40,9 +41,12 @@ class TestMatTimeSeriesImporter(BaseTestCase): - base_pth = os.path.join(os.path.dirname(tvb_data.__file__), 'berlinSubjects', 'QL_20120814') - bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat') - connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip') + #base_pth = os.path.join(os.path.dirname(tvb_data.__file__), 'berlinSubjects', 'QL_20120814') + tvb_data = TVBZenodoDataset() + bold_path = tvb_data.fetch_data('QL_BOLD_regiontimecourse.mat') + #bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat') + connectivity_path = tvb_data.fetch_data('QL_20120814_Connectivity.zip') + #connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip') def setup_method(self): self.test_user = TestFactory.create_user('Mat_Timeseries_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py index d1c0572e6b..97413f1124 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py @@ -31,8 +31,9 @@ import os import numpy -import tvb_data -import tvb_data.nifti as demo_data +#import tvb_data +#import tvb_data.nifti as demo_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.region_mapping import RegionVolumeMappingIndex from tvb.adapters.datatypes.db.structural import StructuralMRIIndex @@ -51,11 +52,18 @@ class TestNIFTIImporter(BaseTestCase): Unit-tests for NIFTI importer. """ - NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii') - GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii.gz') - TIMESERIES_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'time_series_152.nii.gz') - WRONG_NII_FILE = os.path.abspath(__file__) - TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'volume_mapping/mapping_FS_76.txt') + #NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii') + #GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii.gz') + #TIMESERIES_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'time_series_152.nii.gz') + #WRONG_NII_FILE = os.path.abspath(__file__) + #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'volume_mapping/mapping_FS_76.txt') + + tvb_data = TVBZenodoDataset() + NII_FILE = tvb_data.fetch_data('minimal.nii') + GZ_NII_FILE = tvb_data.fetch_data('minimal.nii.gz') + TIMESERIES_NII_FILE = tvb_data.fetch_data('time_series_152.nii.gz') + WRONG_NII_FILE = os.path.abspath(__file__) #? + TXT_FILE = tvb_data.fetch_data('mapping_FS_76.txt') DEFAULT_ORIGIN = [[0.0, 0.0, 0.0]] UNKNOWN_STR = "unknown" @@ -144,7 +152,8 @@ def test_import_region_mapping(self): """ This method tests import of a NIFTI file compressed in GZ format. """ - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") to_link_conn = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py index 27b0776b10..bea8679714 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py @@ -29,8 +29,8 @@ """ import os -import tvb_data.obj - +#import tvb_data.obj +from tvb.datasets import TVBZenodoDataset from tvb.core.neocom import h5 from tvb.datatypes.surfaces import SurfaceTypesEnum from tvb.tests.framework.core.base_testcase import BaseTestCase @@ -41,9 +41,13 @@ class TestObjSurfaceImporter(BaseTestCase): """ Unit-tests for Obj Surface importer. """ + + #torus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj') + #face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj') + tvb_data = TVBZenodoDataset() + torus = tvb_data.fetch_data('test_torus.obj') + face = tvb_data.fetch_data('face_surface.obj') - torus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj') - face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj') def setup_method(self): self.test_user = TestFactory.create_user('Obj_Importer_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py index 5723ec6913..9ed34fff46 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py @@ -31,9 +31,10 @@ import os -import tvb_data.projectionMatrix as dataset -import tvb_data.sensors -import tvb_data.surfaceData +#import tvb_data.projectionMatrix as dataset +#import tvb_data.sensors +#import tvb_data.surfaceData +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.projections import ProjectionMatrixIndex from tvb.core.services.exceptions import OperationException from tvb.datatypes.sensors import SensorTypesEnum @@ -54,11 +55,13 @@ def setup_method(self): self.test_user = TestFactory.create_user("UserPM") self.test_project = TestFactory.create_project(self.test_user) - zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_brainstorm_65.txt') + #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_brainstorm_65.txt') + zip_path = TVBZenodoDataset().fetch_data('eeg_brainstorm_65.txt') self.sensors = TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_EEG) - zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py index 42e1bb6370..50d54feaa9 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py @@ -30,8 +30,9 @@ import os import tvb.tests.framework.adapters.uploaders.test_data as test_data -import tvb_data.regionMapping as demo_data -import tvb_data.surfaceData +#import tvb_data.regionMapping as demo_data +#import tvb_data.surfaceData +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.surface import SurfaceIndex from tvb.basic.neotraits.ex import TraitValueError from tvb.core.adapters.exceptions import LaunchException @@ -48,10 +49,15 @@ class TestRegionMappingImporter(BaseTestCase): """ Unit-tests for RegionMapping importer. """ + tvb_data = TVBZenodoDataset() - TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') - ZIP_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.zip') - BZ2_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.bz2') + #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') + #ZIP_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.zip') + #BZ2_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.bz2') + + TXT_FILE = tvb_data.fetch_data('regionMapping_16k_76.txt') + ZIP_FILE = tvb_data.fetch_data('regionMapping_16k_76.zip') + BZ2_FILE = tvb_data.fetch_data('regionMapping_16k_76.bz2') # Wrong data WRONG_FILE_1 = os.path.join(os.path.dirname(test_data.__file__), 'region_mapping_wrong_1.txt') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py index d05e14edfb..e0a8ede2b8 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py @@ -30,7 +30,8 @@ import os -import tvb_data.sensors as demo_data +#import tvb_data.sensors as demo_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.sensors_importer import SensorsImporter, SensorsImporterModel from tvb.core.neocom import h5 from tvb.core.services.exceptions import OperationException @@ -44,8 +45,11 @@ class TestSensorsImporter(BaseTestCase): """ Unit-tests for Sensors importer. """ - EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'eeg_unitvector_62.txt.bz2') - MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_151.txt.bz2') + #EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'eeg_unitvector_62.txt.bz2') + #MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_151.txt.bz2') + tvb_data = TVBZenodoDataset() + EEG_FILE = tvb_data.fetch_data('eeg_unitvector_62.txt.bz2') + MEG_FILE = tvb_data.fetch_data('meg_151.txt.bz2') def setup_method(self): """ diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py index d047a63b56..387398b1bc 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py @@ -30,7 +30,8 @@ import os -import tvb_data.surfaceData +#import tvb_data.surfaceData +from tvb.datasets import TVBZenodoDataset from tvb.datatypes.surfaces import SurfaceTypesEnum from tvb.tests.framework.core.base_testcase import BaseTestCase from tvb.tests.framework.core.factory import TestFactory @@ -41,7 +42,8 @@ class TestZIPSurfaceImporter(BaseTestCase): Unit-tests for Zip Surface importer. """ - surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip') + #surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip') + surf_skull = TVBZenodoDataset().fetch_data('outer_skull_4096.zip') def setup_method(self): self.test_user = TestFactory.create_user('Zip_Surface_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py index 92db5dd4c6..cefc81a116 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py @@ -29,8 +29,9 @@ """ import os -import tvb_data.surfaceData -import tvb_data.regionMapping +#import tvb_data.surfaceData +#import tvb_data.regionMapping +from tvb.datasets import TVBZenodoDataset from tvb.core.neocom import h5 from tvb.tests.framework.core.base_testcase import TransactionalTestCase @@ -50,8 +51,11 @@ class TestBrainViewer(TransactionalTestCase): EXPECTED_EXTRA_KEYS = ['urlMeasurePointsLabels', 'urlMeasurePoints', 'pageSize', 'shellObject', 'extended_view', 'legendLabels', 'labelsStateVar', 'labelsModes', 'title'] - cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - region_mapping_path = os.path.join(os.path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #region_mapping_path = os.path.join(os.path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + tvb_data = TVBZenodoDataset() + cortex = tvb_data.fetch_data('cortex_16384.zip') + region_mapping = tvb_data.fetch_data('regionMapping_16k_76.txt') def transactional_setup_method(self): """ @@ -62,7 +66,8 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user('Brain_Viewer_User') self.test_project = TestFactory.create_project(self.test_user, 'Brain_Viewer_Project') - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = self.tvb_data.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") connectivity_idx = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert connectivity_idx is not None diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py index 1db6322f11..f297bc9762 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py @@ -28,7 +28,8 @@ """ import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.tests.framework.core.base_testcase import TransactionalTestCase @@ -51,7 +52,8 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user("UserCVV") self.test_project = TestFactory.create_project(self.test_user) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert self.connectivity_index is not None diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py index f6f53d93bc..29307b091b 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py @@ -29,8 +29,9 @@ """ import os -import tvb_data.obj -import tvb_data.sensors +#import tvb_data.obj +#import tvb_data.sensors +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.sensors import SensorsIndex from tvb.adapters.datatypes.db.surface import SurfaceIndex from tvb.adapters.uploaders.sensors_importer import SensorsImporterModel @@ -71,7 +72,8 @@ def test_launch_eeg(self): Check that all required keys are present in output from EegSensorViewer launch. """ # Import Sensors - zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt.bz2') + #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt.bz2') + zip_path = TVBZenodoDataset().fetch_data('eeg_unitvector_62.txt.bz2') TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_EEG) field = FilterChain.datatype + '.sensors_type' @@ -79,7 +81,8 @@ def test_launch_eeg(self): sensors_index = TestFactory.get_entity(self.test_project, SensorsIndex, filters) # Import EEGCap - cap_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'eeg_cap.obj') + #cap_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'eeg_cap.obj') + cap_path = TVBZenodoDataset().fetch_data('eeg_cap.obj') TestFactory.import_surface_obj(self.test_user, self.test_project, cap_path, SurfaceTypesEnum.EEG_CAP_SURFACE) field = FilterChain.datatype + '.surface_type' filters = FilterChain('', [field], [SurfaceTypesEnum.EEG_CAP_SURFACE.value], ['==']) @@ -106,7 +109,8 @@ def test_launch_meg(self): Check that all required keys are present in output from MEGSensorViewer launch. """ - zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'meg_151.txt.bz2') + #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'meg_151.txt.bz2') + zip_path = TVBZenodoDataset().fetch_data('meg_151.txt.bz2') TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_MEG) @@ -126,7 +130,8 @@ def test_launch_internal(self): """ Check that all required keys are present in output from InternalSensorViewer launch. """ - zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'seeg_39.txt.bz2') + #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'seeg_39.txt.bz2') + zip_path = TVBZenodoDataset().fetch_data('seeg_39.txt.bz2') sensors_index = TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_INTERNAL) viewer = SensorsViewer() diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py index c6715dbbc7..422ff64896 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py @@ -29,8 +29,9 @@ """ import os -import tvb_data.surfaceData -import tvb_data.regionMapping as demo_data +#import tvb_data.surfaceData +#import tvb_data.regionMapping as demo_data +from tvb.datasets import TVBZenodoDataset from uuid import UUID from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex @@ -59,17 +60,20 @@ def transactional_setup_method(self): test_user = TestFactory.create_user('Surface_Viewer_User') self.test_project = TestFactory.create_project(test_user, 'Surface_Viewer_Project') - surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + surf_skull = TVBZenodoDataset().fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(test_user, self.test_project, surf_skull, SurfaceTypesEnum.CORTICAL_SURFACE) assert self.surface is not None - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(test_user, self.test_project, zip_path, "John") connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert connectivity_index is not None - TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') + #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') + TXT_FILE = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') self.region_mapping = TestFactory.import_region_mapping(test_user, self.test_project, TXT_FILE, self.surface.gid, connectivity_index.gid) assert self.region_mapping is not None diff --git a/tvb_framework/tvb/tests/framework/core/factory.py b/tvb_framework/tvb/tests/framework/core/factory.py index f41d970496..acd85b78a8 100644 --- a/tvb_framework/tvb/tests/framework/core/factory.py +++ b/tvb_framework/tvb/tests/framework/core/factory.py @@ -37,8 +37,8 @@ import os import random import uuid -import tvb_data - +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.local_connectivity import LocalConnectivityIndex from tvb.adapters.datatypes.db.projections import ProjectionMatrixIndex @@ -220,7 +220,8 @@ def import_default_project(admin_user=None): if not admin_user: admin_user = TestFactory.create_user() - project_path = os.path.join(os.path.dirname(tvb_data.__file__), 'Default_Project.zip') + #project_path = os.path.join(os.path.dirname(tvb_data.__file__), 'Default_Project.zip') + project_path = TVBBZenodoDataset().fetch_data('Default_Project.zip') import_service = ImportService() import_service.import_project_structure(project_path, admin_user.id) return import_service.created_projects[0] @@ -312,7 +313,8 @@ def import_zip_connectivity(user, project, zip_path=None, subject=DataTypeMetaDa same_process=True): if zip_path is None: - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') count = dao.count_datatypes(project.id, ConnectivityIndex) view_model = ZIPConnectivityImporterModel() diff --git a/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py b/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py index fb49b1ae1c..7add7679e5 100644 --- a/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py +++ b/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py @@ -27,7 +27,8 @@ import uuid import numpy import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.basic.neotraits.api import Attr, Float, Int, NArray, List from tvb.core.entities.file.simulator.view_model import SimulatorAdapterModel @@ -54,7 +55,7 @@ def teardown_method(self): self.clean_database() def test_upload_field(self): - connectivity_file = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + connectivity_file = TVBZenodoDataset().fetch_data('connectivity_96.zip') data_file = Str('Test Upload Field') required_type = '.zip' upload_field = TraitUploadField(data_file, required_type, self.name) diff --git a/tvb_framework/tvb/tests/framework/core/services/import_service_test.py b/tvb_framework/tvb/tests/framework/core/services/import_service_test.py index 54a904c6df..37795c621d 100644 --- a/tvb_framework/tvb/tests/framework/core/services/import_service_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/import_service_test.py @@ -31,7 +31,8 @@ import os import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from PIL import Image from time import sleep from tvb.adapters.datatypes.db.mapped_value import ValueWrapperIndex @@ -88,7 +89,8 @@ def test_import_export(self, user_factory, project_factory, value_wrapper_factor """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport", "test_desc") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) value_wrapper = value_wrapper_factory(test_user, test_project) ProjectService.set_datatype_visibility(value_wrapper.gid, False) @@ -138,7 +140,8 @@ def test_import_export_existing(self, user_factory, project_factory): """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport2") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) count_operations = dao.get_filtered_operations(test_project.id, None, is_count=True) @@ -182,7 +185,8 @@ def test_export_import_figures(self, user_factory, project_factory): # Prepare data user = user_factory() project = project_factory(user, "TestImportExportFigures") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') + zip_path = TVBZenodoDataset().fetch_data('paupau.zip') TestFactory.import_zip_connectivity(user, project, zip_path) figure_service = FigureService() diff --git a/tvb_framework/tvb/tests/framework/core/services/links_test.py b/tvb_framework/tvb/tests/framework/core/services/links_test.py index 5a08dffdb5..36dbe4b312 100644 --- a/tvb_framework/tvb/tests/framework/core/services/links_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/links_test.py @@ -32,7 +32,8 @@ """ import pytest import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.sensors import SensorsIndex from tvb.adapters.exporters.export_manager import ExportManager @@ -66,9 +67,12 @@ def initialize_two_projects(self, dummy_datatype_index_factory, project_factory, src_user = user_factory(username="Links Test") self.src_usr_id = src_user.id self.src_project = project_factory(src_user, "Src_Project") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') + tvb_data = TVBZenodoDataset() + zip_path = tvb_data.fetch_data("paupau.zip") self.red_datatype = TestFactory.import_zip_connectivity(src_user, self.src_project, zip_path, "John") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'sensors', 'eeg_unitvector_62.txt.bz2') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'sensors', 'eeg_unitvector_62.txt.bz2') + zip_path = tvb_data.fetch_data('eeg_unitvector_62.txt.bz2') self.blue_datatype = TestFactory.import_sensors(src_user, self.src_project, zip_path, SensorTypesEnum.TYPE_EEG) assert 1 == self.red_datatypes_in(self.src_project.id) diff --git a/tvb_framework/tvb/tests/framework/core/services/project_service_test.py b/tvb_framework/tvb/tests/framework/core/services/project_service_test.py index 7b876beb64..aae5609378 100644 --- a/tvb_framework/tvb/tests/framework/core/services/project_service_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/project_service_test.py @@ -33,7 +33,8 @@ import pytest import sqlalchemy -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.basic.profile import TvbProfile from tvb.core.entities.model import model_datatype, model_project, model_operation from tvb.core.entities.storage import dao @@ -331,11 +332,14 @@ def test_empty_project_has_zero_disk_size(self): def test_project_disk_size(self): project1 = TestFactory.create_project(self.test_user, 'test_proj1') - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + tvb_data = TVBZenodoDataset() + zip_path = tvb_data.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, project1, zip_path, 'testSubject') project2 = TestFactory.create_project(self.test_user, 'test_proj2') - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = tvb_data.fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, project2, zip_path, 'testSubject') projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0] diff --git a/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py b/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py index a7c48010b5..19b4191113 100644 --- a/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py @@ -29,7 +29,8 @@ """ from os import path -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.forms.model_forms import ModelsEnum @@ -45,7 +46,8 @@ class TestSerializationManager(TransactionalTestCase): def transactional_setup_method(self): self.test_user = TestFactory.create_user(username="test_user") self.test_project = TestFactory.create_project(self.test_user, "Test") - zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py index e811994a36..bb60be982f 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py @@ -27,7 +27,8 @@ import os import flask import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.interfaces.rest.commons.exceptions import InvalidIdentifierException from tvb.interfaces.rest.server.resources.datatype.datatype_resource import RetrieveDatatypeResource @@ -53,7 +54,8 @@ def test_server_retrieve_datatype_inexistent_gid(self, mocker): def test_server_retrieve_datatype(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get(project_gid=self.test_project.gid) @@ -79,7 +81,8 @@ def send_file_dummy(path, as_attachment, attachment_filename): def test_server_get_operations_for_datatype(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get(project_gid=self.test_project.gid) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py index a1320bc645..0fc8a18144 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py @@ -27,7 +27,8 @@ import os import flask import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.interfaces.rest.commons.exceptions import InvalidIdentifierException from tvb.interfaces.rest.commons.strings import Strings @@ -45,7 +46,9 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user('Rest_User') self.test_project_without_data = TestFactory.create_project(self.test_user, 'Rest_Project', users=[self.test_user.id]) self.test_project_with_data = TestFactory.create_project(self.test_user, 'Rest_Project2', users=[self.test_user.id]) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + tvb_data = TVBZenodoDataset() + zip_path = tvb_data.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project_with_data, zip_path) def test_server_get_data_in_project_inexistent_gid(self, mocker): diff --git a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py index 1443c9db3d..df22050065 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py @@ -25,11 +25,13 @@ # import numpy -import tvb_data.connectivity -import tvb_data.surfaceData -import tvb_data.sensors -import tvb_data.regionMapping -import tvb_data.projectionMatrix +#import tvb_data.connectivity +#import tvb_data.surfaceData +#import tvb_data.sensors +#import tvb_data.regionMapping +#import tvb_data.projectionMatrix +from tvb.datasets import TVBZenodoDataset + from os import path from uuid import UUID from unittest.mock import patch @@ -90,7 +92,8 @@ def test_index(self): assert not result_dict['errors'], 'Some errors were encountered!' def test_set_connectivity(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['connectivity'] = connectivity.gid @@ -126,7 +129,8 @@ def test_set_coupling_params(self): assert self.session_stored_simulator.coupling.b[0] == [0.0], "b value was not set correctly." def test_set_surface(self): - zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) surface = TestFactory.get_entity(self.test_project, SurfaceIndex) @@ -147,14 +151,17 @@ def test_set_surface_none(self): assert self.session_stored_simulator.surface is None, "Surface should not be set." def test_set_cortex_without_local_connectivity(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) @@ -176,14 +183,17 @@ def test_set_cortex_without_local_connectivity(self): "coupling_strength was not set correctly." def test_set_cortex_with_local_connectivity(self, local_connectivity_index_factory): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt' ) region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) @@ -214,7 +224,8 @@ def test_set_stimulus_none(self): assert self.session_stored_simulator.stimulus is None, "Stimulus should not be set." def test_set_stimulus(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity_index = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) weight_array = numpy.zeros(connectivity_index.number_of_regions) @@ -443,29 +454,35 @@ def test_set_monitor_params(self): assert not rendering_rules['renderer'].include_next_button, 'Next button should not be displayed!' def set_region_mapping(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) return region_mapping def set_eeg(self): - eeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt') + #eeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt') + eeg_sensors_file = TVBZenodoDataset().fetch_data('eeg_unitvector_62.txt') eeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, eeg_sensors_file, SensorTypesEnum.TYPE_EEG) - surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), + #eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_eeg_62_surface_16k.mat') + eeg_projection_file = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') eeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, eeg_projection_file, eeg_sensors.gid, surface.gid) return eeg_sensors, eeg_projection @@ -502,16 +519,19 @@ def test_set_eeg_monitor_params(self): "Projection wasn't stored correctly." def set_meg(self): - meg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'meg_brainstorm_276.txt') + #meg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'meg_brainstorm_276.txt') + meg_sensors_file = TVBZenodoDataset().fetch_data('meg_brainstorm_276.txt') meg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, meg_sensors_file, SensorTypesEnum.TYPE_MEG) - surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), + #meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_meg_276_surface_16k.npy') + meg_projection_file = TVBZenodoDataset().fetch_data('projection_meg_276_surface_16k.npy') meg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, meg_projection_file, meg_sensors.gid, surface.gid) return meg_sensors, meg_projection @@ -549,16 +569,19 @@ def test_set_meg_monitor_params(self): "Projection wasn't stored correctly." def set_seeg(self): - seeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'seeg_588.txt') + #seeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'seeg_588.txt') + seeg_sensors_file = TVBZenodoDataset().fetch_data('seeg_588.txt') seeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, seeg_sensors_file, SensorTypesEnum.TYPE_INTERNAL) - surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), + #seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_seeg_588_surface_16k.npy') + seeg_projection_file = TVBZenodoDataset().fetch_data('projection_seeg_588_surface_16k.npy') seeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, seeg_projection_file, seeg_sensors.gid, surface.gid) return seeg_sensors, seeg_projection @@ -697,7 +720,8 @@ def test_load_burst_history(self): assert len(burst_parameters['burst_list']) == 3, "The burst configurations where not stored." def test_reset_simulator_configuration(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['connectivity'] = connectivity.gid @@ -753,7 +777,8 @@ def test_rename_burst(self): assert dao.get_bursts_for_project(self.test_project.id)[0].name == new_name, "Name wasn't actually changed." def test_copy_simulator_configuration(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) @@ -787,7 +812,8 @@ def test_copy_simulator_configuration(self): assert rendering_rules['renderer'].disable_fields, 'Fragments should be read-only!' def test_load_burst(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) @@ -837,7 +863,8 @@ def test_launch_simulation(self): assert burst_config.status == 'running', 'Simulation launching has failed!' def test_launch_branch_simulation(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['input_simulation_name_id'] = 'HappySimulation' diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 451e5f2070..5bf9c86ba3 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -186,3 +186,5 @@ def __eq__(self, other): return self.rec == tvb_data.rec return False + + From bf2148e2239302fd7e63ef2cf98bef4e451562f9 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 8 Jul 2023 23:12:43 +0530 Subject: [PATCH 23/84] fix tvb_data not found error and one identation error --- .../uploaders/connectivity_measure_importer_test.py | 6 ++++-- .../interfaces/rest/operation_resource_test.py | 13 ++++++++----- .../web/controllers/simulator_controller_test.py | 3 +-- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py index e7003a17d7..c314e5739b 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py @@ -32,7 +32,8 @@ import os.path import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.graph import ConnectivityMeasureIndex from tvb.adapters.uploaders.connectivity_measure_importer import ConnectivityMeasureImporter from tvb.adapters.uploaders.connectivity_measure_importer import ConnectivityMeasureImporterModel @@ -48,7 +49,8 @@ class TestConnectivityMeasureImporter(BaseTestCase): """ def setup_method(self): - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') self.test_user = TestFactory.create_user('Test_User_CM') self.test_project = TestFactory.create_project(self.test_user, "Test_Project_CM") self.connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py index 4600fc6686..ffb92d8413 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py @@ -29,8 +29,8 @@ from uuid import UUID import flask import pytest -import tvb_data - +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.analyzers.fourier_adapter import FFTAdapterModel from tvb.basic.exceptions import TVBException from tvb.core.neocom import h5 @@ -65,7 +65,8 @@ def test_server_get_operation_status_inexistent_gid(self, mocker): def test_server_get_operation_status(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) @@ -84,7 +85,8 @@ def test_server_get_operation_results_inexistent_gid(self, mocker): def test_server_get_operation_results(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) @@ -98,7 +100,8 @@ def test_server_get_operation_results(self, mocker): def test_server_get_operation_results_failed_operation(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_90.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_90.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_90.zip') with pytest.raises(TVBException): TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) diff --git a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py index df22050065..7459fa9bc0 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py @@ -480,8 +480,7 @@ def set_eeg(self): surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), - 'projection_eeg_62_surface_16k.mat') + #eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_eeg_62_surface_16k.mat') eeg_projection_file = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') eeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, eeg_projection_file, eeg_sensors.gid, surface.gid) From 38b8fab3d60f1a5a10e486005ac0da577c85834f Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 8 Jul 2023 23:35:23 +0530 Subject: [PATCH 24/84] fix the identation --- .../interfaces/web/controllers/simulator_controller_test.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py index 7459fa9bc0..82df6fd227 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py @@ -528,8 +528,7 @@ def set_meg(self): surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), - 'projection_meg_276_surface_16k.npy') + #meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__),'projection_meg_276_surface_16k.npy') meg_projection_file = TVBZenodoDataset().fetch_data('projection_meg_276_surface_16k.npy') meg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, meg_projection_file, meg_sensors.gid, surface.gid) @@ -578,8 +577,7 @@ def set_seeg(self): surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), - 'projection_seeg_588_surface_16k.npy') + #seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_seeg_588_surface_16k.npy') seeg_projection_file = TVBZenodoDataset().fetch_data('projection_seeg_588_surface_16k.npy') seeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, seeg_projection_file, seeg_sensors.gid, surface.gid) From 4c9a0bba0251e77524ebef476dd2e7f985792be7 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 9 Jul 2023 00:07:11 +0530 Subject: [PATCH 25/84] fix the data loading issues that i missed earlier. --- .../adapters/uploaders/projection_matrix_importer_test.py | 4 ++-- tvb_framework/tvb/tests/framework/core/services/links_test.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py index 9ed34fff46..73504c9eb6 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py @@ -75,8 +75,8 @@ def test_wrong_shape(self): """ Verifies that importing a different shape throws exception """ - file_path = os.path.join(os.path.abspath(os.path.dirname(dataset.__file__)), - 'projection_eeg_62_surface_16k.mat') + #file_path = os.path.join(os.path.abspath(os.path.dirname(dataset.__file__)), 'projection_eeg_62_surface_16k.mat') + file_path = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') try: TestFactory.import_projection_matrix(self.test_user, self.test_project, file_path, self.sensors.gid, diff --git a/tvb_framework/tvb/tests/framework/core/services/links_test.py b/tvb_framework/tvb/tests/framework/core/services/links_test.py index 36dbe4b312..3e6ff86f3f 100644 --- a/tvb_framework/tvb/tests/framework/core/services/links_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/links_test.py @@ -211,7 +211,8 @@ def build(): Project dest will have the derived VW and links """ # add a connectivity to src project and link it to dest project - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') conn = TestFactory.import_zip_connectivity(self.dst_user, self.src_project, zip_path, "John") self.algorithm_service.create_link(conn.id, self.dest_project.id) From 82046c36fcb4e1011719f3c2e5267b3d46a371f6 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 9 Jul 2023 00:33:16 +0530 Subject: [PATCH 26/84] some more fixes.. --- .../adapters/analyzers/timeseries_metrics_adapter_test.py | 3 ++- .../tests/framework/adapters/visualizers/brainviewer_test.py | 2 +- tvb_framework/tvb/tests/framework/core/factory.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py index 77d2ceba0c..d3994aa662 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py @@ -52,7 +52,8 @@ def transactional_setup_method(self): """ self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) def test_adapter_launch(self, connectivity_factory, region_mapping_factory, diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py index cefc81a116..032c4117af 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py @@ -55,7 +55,7 @@ class TestBrainViewer(TransactionalTestCase): #region_mapping_path = os.path.join(os.path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') tvb_data = TVBZenodoDataset() cortex = tvb_data.fetch_data('cortex_16384.zip') - region_mapping = tvb_data.fetch_data('regionMapping_16k_76.txt') + region_mapping_path = tvb_data.fetch_data('regionMapping_16k_76.txt') def transactional_setup_method(self): """ diff --git a/tvb_framework/tvb/tests/framework/core/factory.py b/tvb_framework/tvb/tests/framework/core/factory.py index acd85b78a8..c0a9327604 100644 --- a/tvb_framework/tvb/tests/framework/core/factory.py +++ b/tvb_framework/tvb/tests/framework/core/factory.py @@ -221,7 +221,7 @@ def import_default_project(admin_user=None): admin_user = TestFactory.create_user() #project_path = os.path.join(os.path.dirname(tvb_data.__file__), 'Default_Project.zip') - project_path = TVBBZenodoDataset().fetch_data('Default_Project.zip') + project_path = TVBZenodoDataset().fetch_data('Default_Project.zip') import_service = ImportService() import_service.import_project_structure(project_path, admin_user.id) return import_service.created_projects[0] From 33bab6a50026da8ab78f5e5b690612e32bed43db Mon Sep 17 00:00:00 2001 From: abhi_win Date: Tue, 11 Jul 2023 23:08:17 +0530 Subject: [PATCH 27/84] fix typo --- .../tests/framework/adapters/uploaders/encrypt_decrypt_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py index 02a6252db5..5b2da5609b 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py @@ -57,7 +57,7 @@ class TestEncryptionDecryption(TransactionalTestCase): ( 'projection_meg_276_surface_16k.npy'), ( 'TimeSeriesRegion.h5')]) def test_encrypt_decrypt(self, file_name): - import_export_encryption_handler = StorageInterface.get_import_export_encryption_handler() + handler = StorageInterface.get_import_export_encryption_handler() # Generate a private key and public key From 19e1d8d1e9d5834813c9d5bedbee656cebd0c592 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Wed, 12 Jul 2023 23:32:37 +0530 Subject: [PATCH 28/84] removed the commented lines, removed the tvb_data setup from ci --- .github/workflows/build.yml | 18 ---- .github/workflows/lib-tests.yml | 11 +-- .github/workflows/notebooks.yml | 15 ---- .github/workflows/pg-tests.yml | 16 ---- .github/workflows/win-tests.yml | 15 ---- tvb_build/build_step1.py | 2 - tvb_build/docker/Dockerfile-build | 6 +- tvb_build/docker/Dockerfile-run | 5 +- tvb_build/docker/Dockerfile-win | 6 -- .../scripts/datatypes/lookup_tables.py | 1 - .../code_update_scripts/4455_update_code.py | 8 +- .../code_update_scripts/4750_update_code.py | 2 - .../code_update_scripts/6093_update_code.py | 2 - .../code_update_scripts/6600_update_code.py | 2 - .../tvb/core/services/user_service.py | 2 - .../tvb/interfaces/command/benchmark.py | 13 +-- .../brain_tumor_connectivity_importer.py | 10 +-- .../interfaces/rest/client/examples/utils.py | 1 - .../framework/adapters/analyzers/bct_test.py | 2 - .../timeseries_metrics_adapter_test.py | 2 - .../creators/stimulus_creator_test.py | 10 +-- .../simulator/simulator_adapter_test.py | 9 +- .../connectivity_measure_importer_test.py | 2 - .../adapters/uploaders/csv_importer_test.py | 15 ++-- .../uploaders/encrypt_decrypt_test.py | 6 +- .../adapters/uploaders/gifti_importer_test.py | 9 +- .../uploaders/mat_timeseries_importer_test.py | 10 +-- .../adapters/uploaders/nifti_importer_test.py | 18 ++-- .../adapters/uploaders/obj_importer_test.py | 9 +- .../projection_matrix_importer_test.py | 16 ++-- .../uploaders/region_mapping_importer_test.py | 17 ++-- .../uploaders/sensors_importer_test.py | 9 +- .../uploaders/zip_surface_importer_test.py | 2 - .../adapters/visualizers/brainviewer_test.py | 13 +-- .../visualizers/connectivityviewer_test.py | 2 - .../visualizers/sensorsviewer_test.py | 15 ++-- .../visualizers/surfaceviewer_test.py | 12 +-- .../tvb/tests/framework/core/factory.py | 5 +- .../framework/core/neotraits/forms_test.py | 1 - .../core/services/import_service_test.py | 12 +-- .../framework/core/services/links_test.py | 13 ++- .../core/services/project_service_test.py | 9 +- .../services/serialization_manager_test.py | 2 - .../interfaces/rest/datatype_resource_test.py | 9 +- .../rest/operation_resource_test.py | 20 +++-- .../interfaces/rest/project_resource_test.py | 6 +- .../controllers/simulator_controller_test.py | 82 ++++++------------- tvb_library/setup.py | 2 +- ...set_test.py => tvb_zenodo_dataset_test.py} | 7 -- 49 files changed, 125 insertions(+), 356 deletions(-) rename tvb_library/tvb/tests/library/datasets/{TVBZenodoDataset_test.py => tvb_zenodo_dataset_test.py} (99%) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6f2a1c5c19..5c8a4ec9ea 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -47,25 +47,7 @@ jobs: cd tvb_build bash install_full_tvb.sh - #- name: cache data - # id: cache-data - # uses: actions/cache@v3 - # with: - # path: tvb_data - # key: tvb-data - #- name: download data - # if: steps.cache-data.outputs.cache-hit != 'true' - # run: | - # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - # mkdir tvb_data - # unzip tvb_data.zip -d tvb_data - # rm tvb_data.zip - - #- name: setup data - # run: | - # cd tvb_data - # python3 setup.py develop - name: run library tests run: pytest -v tvb_library --cov --cov-report=xml && mv coverage.xml coverage-library.xml diff --git a/.github/workflows/lib-tests.yml b/.github/workflows/lib-tests.yml index b9d2d80531..921bf2c63e 100644 --- a/.github/workflows/lib-tests.yml +++ b/.github/workflows/lib-tests.yml @@ -27,16 +27,7 @@ jobs: pip3 install pipenv cd tvb_library && pipenv install -d --python $(which python3) - #- name: download data - # run: | - # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - # mkdir tvb_data - # unzip tvb_data.zip -d tvb_data - # rm tvb_data.zip - - #- name: setup data - # run: | - # cd tvb_library && pipenv run bash -c 'cd ../tvb_data && python3 setup.py develop' + - name: importlib_metadata? run: cd tvb_library && pipenv install importlib_metadata diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 7728a8a70c..6773c09787 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -40,22 +40,7 @@ jobs: cd tvb_build cmd /k "install_full_tvb.bat" - #- name: cache data - # id: cache-data - # uses: actions/cache@v3 - # with: - # path: tvb_data - # key: tvbdata - #- name: download data - # if: steps.cache-data.outputs.cache-hit != 'true' - # shell: pwsh - # run: | - # Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" - # Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data - # del C:\\TEMP\\tvb_data.zip - # cd C:\\tvb_data - # python setup.py develop - name: run notebooks env: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index 0abac2e24c..e49b3cf3c5 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -52,23 +52,7 @@ jobs: - name: setup tvb run: cd tvb_build && bash install_full_tvb.sh - #- name: cache data - # id: cache-data - # uses: actions/cache@v3 - # with: - # path: tvb_data - # key: tvb-data - #- name: download data - # if: steps.cache-data.outputs.cache-hit != 'true' - # run: | - # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - # mkdir tvb_data - # unzip tvb_data.zip -d tvb_data - # rm tvb_data.zip - - #- name: setup data - # run: cd tvb_data && python3 setup.py develop - name: run framework tests run: | diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index 84411eb114..59007561f9 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -35,22 +35,7 @@ jobs: pip install --user -r tvb_framework/requirements.txt pip install --user --no-build-isolation tvb-gdist - #- name: cache data - # id: cache-data - # uses: actions/cache@v3 - # with: - # path: tvb_data - # key: tvbdata - #- name: download data - # if: steps.cache-data.outputs.cache-hit != 'true' - # shell: pwsh - # run: | - # Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" - # Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data - # del C:\\TEMP\\tvb_data.zip - # cd C:\\tvb_data - # python setup.py develop - name: run framework tests shell: pwsh diff --git a/tvb_build/build_step1.py b/tvb_build/build_step1.py index f7abecb888..3541f95bd7 100644 --- a/tvb_build/build_step1.py +++ b/tvb_build/build_step1.py @@ -44,7 +44,6 @@ import requests import tvb_bin -#import tvb_data from tvb.datasets import TVBZenodoDataset from subprocess import Popen, PIPE @@ -54,7 +53,6 @@ FW_FOLDER = os.path.join(TVB_ROOT, 'tvb_framework') LICENSE_PATH = os.path.join(FW_FOLDER, 'LICENSE') RELEASE_NOTES_PATH = os.path.join(TVB_ROOT, 'tvb_documentation', 'RELEASE_NOTES') -#DATA_SRC_FOLDER = os.path.dirname(tvb_data.__file__) DATA_SRC_FOLDER = TVBZenodoDataset().extract_dir DEMOS_MATLAB_FOLDER = os.path.join(TVB_ROOT, 'tvb_documentation', 'matlab') diff --git a/tvb_build/docker/Dockerfile-build b/tvb_build/docker/Dockerfile-build index 5299909282..4dc15045d6 100644 --- a/tvb_build/docker/Dockerfile-build +++ b/tvb_build/docker/Dockerfile-build @@ -38,11 +38,7 @@ RUN /bin/bash -c "source activate tvb-run"; \ /opt/conda/envs/tvb-run/bin/jupyter notebook --generate-config; \ echo "c.NotebookApp.password='sha1:12bff019c253:9daecd92c2e9bdb10b3b8a06767a74a0fe078d7c'">>$JUPYTER_CONFIG/jupyter_notebook_config.py -#RUN wget https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip; \ -# mkdir tvb_data; unzip tvb_data.zip -d tvb_data; rm tvb_data.zip; \ -# cd tvb_data; \ -# /opt/conda/envs/tvb-run/bin/python setup.py develop;\ -# /opt/conda/envs/tvb-docs/bin/python setup.py develop + WORKDIR $USER_HOME COPY requirements_group requirements.txt diff --git a/tvb_build/docker/Dockerfile-run b/tvb_build/docker/Dockerfile-run index d634c1d1d2..4ac08786a3 100644 --- a/tvb_build/docker/Dockerfile-run +++ b/tvb_build/docker/Dockerfile-run @@ -31,10 +31,7 @@ RUN /bin/bash -c "source activate tvb-run"; \ $ENV_BIN/jupyter notebook --generate-config; \ echo "c.NotebookApp.password='sha1:12bff019c253:9daecd92c2e9bdb10b3b8a06767a74a0fe078d7c'">>$JUPYTER_CONFIG/jupyter_notebook_config.py -RUN wget https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip; \ - mkdir tvb_data; unzip tvb_data.zip -d tvb_data; rm tvb_data.zip; \ - cd tvb_data; \ - $ENV_BIN/python setup.py develop + WORKDIR $USER_HOME COPY requirements_group requirements.txt diff --git a/tvb_build/docker/Dockerfile-win b/tvb_build/docker/Dockerfile-win index 40d21eba5f..4152cea4fd 100644 --- a/tvb_build/docker/Dockerfile-win +++ b/tvb_build/docker/Dockerfile-win @@ -17,12 +17,6 @@ RUN activate tvb-run && pip install lockfile scikit-build RUN activate tvb-run && pip install syncrypto -# Download and install tvb data -RUN mkdir C:\\TVB_CODE -WORKDIR /TVB_CODE -ADD https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 tvb_data.zip -RUN tar -xf tvb_data.zip && dir && del tvb_data.zip -RUN activate tvb-run && python setup.py develop COPY requirements_group requirements.txt RUN activate tvb-run && pip install -r requirements.txt diff --git a/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py b/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py index cb4d573ccb..8396a739c1 100644 --- a/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py +++ b/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py @@ -76,7 +76,6 @@ class LookUpTable(HasTraits): @staticmethod def populate_table(result, source_file): source_full_path = TVBZenodoDataset().fetch_data(source_file) - #source_full_path = try_get_absolute_path("tvb_data.tables", source_file) zip_data = numpy.load(source_full_path) result.df = zip_data['df'] diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py index 7b8636295c..00be86d4a3 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data.obj from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.obj_importer import ObjSurfaceImporter from tvb.basic.logger.builder import get_logger @@ -37,10 +36,9 @@ from tvb.core.entities.storage import dao from tvb.datatypes.surfaces import SurfaceTypesEnum -#DATA_FILE_EEG_CAP = os.path.join(os.path.dirname(tvb_data.obj.__file__), "eeg_cap.obj") -#DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") -DATA_FILE_EEG_CAP = TVBZenodoDataset().fetch_data("eeg_cap.obj") -DATA_FILE_FACE = TVBZenodoDataset().fetch_data('face_surface.obj') +dataset = TVBZenodoDataset() +DATA_FILE_EEG_CAP = dataset.fetch_data("eeg_cap.obj") +DATA_FILE_FACE = dataset.fetch_data('face_surface.obj') LOGGER = get_logger(__name__) PAGE_SIZE = 20 diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py index d2c999c579..1cb0ac2dbc 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py @@ -28,14 +28,12 @@ .. moduleauthor:: Bogdan Neacsa """ import os -#import tvb_data.sensors from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.sensors_importer import SensorsImporter from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.operation_service import OperationService -#DATA_FILE = os.path.join(os.path.dirname(tvb_data.sensors.__file__), "seeg_39.txt.bz2") DATA_FILE = TVBZenodoDataset().fetch_data('seeg_39.txt.bz2') LOGGER = get_logger(__name__) diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py index 59d30e5dbc..76e0a7dd79 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py @@ -30,7 +30,6 @@ .. moduleauthor:: Mihai Andrei """ import os -#import tvb_data.obj from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.obj_importer import ObjSurfaceImporter from tvb.basic.logger.builder import get_logger @@ -38,7 +37,6 @@ from tvb.core.services.operation_service import OperationService from tvb.datatypes.surfaces import SurfaceTypesEnum -#DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") DATA_FILE_FACE = TVBZenodoDataset().fetch_data('face_surface.obj') LOGGER = get_logger(__name__) diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py index ea6f5e1d4a..03ef9089e6 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py @@ -33,10 +33,8 @@ from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.import_service import ImportService -#import tvb_data from tvb.datasets import TVBZenodoDataset -#DATA_FILE = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") DATA_FILE = TVBZenodoDataset().fetch_data('Default_Project.zip') LOGGER = get_logger(__name__) diff --git a/tvb_framework/tvb/core/services/user_service.py b/tvb_framework/tvb/core/services/user_service.py index 7a0cbd62a6..0fe08e7415 100644 --- a/tvb_framework/tvb/core/services/user_service.py +++ b/tvb_framework/tvb/core/services/user_service.py @@ -33,7 +33,6 @@ import os import random import six -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.basic.logger.builder import get_logger from tvb.basic.profile import TvbProfile @@ -121,7 +120,6 @@ def create_user(self, username=None, display_name=None, password=None, password2 user = dao.store_entity(user) if role == ROLE_ADMINISTRATOR and not skip_import: - #to_upload = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") to_upload = TVBZenodoDataset().fetch_data('Default_Project.zip') if not os.path.exists(to_upload): self.logger.warning("Could not find DEFAULT PROJECT at path %s. You might want to import it " diff --git a/tvb_framework/tvb/interfaces/command/benchmark.py b/tvb_framework/tvb/interfaces/command/benchmark.py index 9acfd1ca85..02a536d459 100644 --- a/tvb_framework/tvb/interfaces/command/benchmark.py +++ b/tvb_framework/tvb/interfaces/command/benchmark.py @@ -31,7 +31,6 @@ from os import path -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.core.entities.file.simulator.view_model import HeunDeterministicViewModel @@ -57,16 +56,12 @@ def _fire_simulation(project_id, simulator_vm): def _create_bench_project(): prj = new_project("benchmark_project_ %s" % datetime.now()) - #data_dir = path.abspath(path.dirname(tvb_data.__file__)) - #zip_path = path.join(data_dir, 'connectivity', 'connectivity_68.zip') - tvb_data = TVBZenodoDataset() - zip_path = tvb_data.fetch_data('connectivity_68.zip') + dataset = TVBZenodoDataset() + zip_path = dataset.fetch_data('connectivity_68.zip') import_conn_zip(prj.id, zip_path) - #zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip') - zip_path = tvb_data.fetch_data('connectivity_96.zip') + zip_path = dataset.fetch_data('connectivity_96.zip') import_conn_zip(prj.id, zip_path) - #zip_path = path.join(data_dir, 'connectivity', 'connectivity_192.zip') - zip_path = tvb_data.fetch_data('connectivity_192.zip') + zip_path = dataset.fetch_data('connectivity_192.zip') import_conn_zip(prj.id, zip_path) conn68 = dao.get_generic_entity(ConnectivityIndex, 68, "number_of_regions")[0] diff --git a/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py b/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py index 195ccba551..7bbb0a369a 100644 --- a/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py +++ b/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py @@ -31,6 +31,7 @@ .. moduleauthor:: Bogdan Valean """ import sys +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.region_mapping_importer import RegionMappingImporter, RegionMappingImporterModel from tvb.adapters.uploaders.zip_surface_importer import ZIPSurfaceImporter, ZIPSurfaceImporterModel from tvb.basic.logger.builder import get_logger @@ -66,12 +67,9 @@ def import_tumor_connectivities(project_id, folder_path): def import_surface_rm(project_id, conn_gid): # Import surface and region mapping from tvb_data berlin subjects (68 regions) - #rm_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_RegionMapping.txt") - #surface_zip_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_Surface_Cortex.zip") - from tvb.datasets import TVBZenodoDataset - tvb_data = TVBZenodoDataset() - rm_file = tvb_data.fetch_data('DH_20120806_RegionMapping.txt') - surface_zip_file = tvb_data.fetch_data('DH_20120806_Surface_Cortex.zip') + dataset = TVBZenodoDataset() + rm_file = dataset.fetch_data('DH_20120806_RegionMapping.txt') + surface_zip_file = dataset.fetch_data('DH_20120806_Surface_Cortex.zip') surface_importer = ABCAdapter.build_adapter_from_class(ZIPSurfaceImporter) surface_imp_model = ZIPSurfaceImporterModel() diff --git a/tvb_framework/tvb/interfaces/rest/client/examples/utils.py b/tvb_framework/tvb/interfaces/rest/client/examples/utils.py index b88b675f05..0111c74980 100644 --- a/tvb_framework/tvb/interfaces/rest/client/examples/utils.py +++ b/tvb_framework/tvb/interfaces/rest/client/examples/utils.py @@ -28,7 +28,6 @@ import sys import time -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.basic.logger.builder import get_logger from tvb.core.entities.model.model_operation import STATUS_ERROR, STATUS_CANCELED, STATUS_FINISHED diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py index 202fbf8f66..b8aca921ff 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.analyzers.bct_adapters import BaseBCTModel from tvb.core.entities.model.model_operation import Algorithm @@ -57,7 +56,6 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user("BCT_User") self.test_project = TestFactory.create_project(self.test_user, "BCT-Project") # Make sure Connectivity is in DB - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') self.connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py index d3994aa662..20b089f39c 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data from tvb.datasets import TVBZenodoDataset import json @@ -52,7 +51,6 @@ def transactional_setup_method(self): """ self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) diff --git a/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py b/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py index 46169239df..64122a9dde 100644 --- a/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py @@ -27,8 +27,6 @@ import json import os import numpy -#import tvb_data -#import tvb_data.surfaceData from tvb.datasets import TVBZenodoDataset from tvb.adapters.creators.stimulus_creator import RegionStimulusCreator, SurfaceStimulusCreator @@ -53,15 +51,13 @@ def transactional_setup_method(self): self.test_project = TestFactory.create_project(self.test_user, "Stim_Project") self.storage_interface = StorageInterface() - tvb_data = TVBZenodoDataset() + dataset = TVBZenodoDataset() - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - zip_path = tvb_data.fetch_data('connectivity_66.zip') + zip_path = dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex) - #cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - cortex = tvb_data.fetch_data('cortex_16384.zip') + cortex = dataset.fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(self.test_user, self.test_project, cortex, SurfaceTypesEnum.CORTICAL_SURFACE) diff --git a/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py index ac22a01d8e..5f9210c3c0 100644 --- a/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py @@ -28,8 +28,6 @@ .. moduleauthor:: Lia Domide """ -#import tvb_data.surfaceData -#import tvb_data.regionMapping from tvb.datasets import TVBZenodoDataset from os import path @@ -112,13 +110,12 @@ def test_estimate_execution_time(self, connectivity_index_factory): self.simulator_adapter.configure(model) estimation1 = self.simulator_adapter.get_execution_time_approximation(model) + dataset = TVBZenodoDataset() # import surfaceData and region mapping - #cortex_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - cortex_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') + cortex_file = dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, cortex_file, SurfaceTypesEnum.CORTICAL_SURFACE) - #rm_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - rm_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') + rm_file = dataset.fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, rm_file, surface.gid, model.connectivity.hex) local_conn = TestFactory.create_local_connectivity(self.test_user, self.test_project, surface.gid) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py index c314e5739b..bc0d58d340 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py @@ -32,7 +32,6 @@ import os.path import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.graph import ConnectivityMeasureIndex from tvb.adapters.uploaders.connectivity_measure_importer import ConnectivityMeasureImporter @@ -49,7 +48,6 @@ class TestConnectivityMeasureImporter(BaseTestCase): """ def setup_method(self): - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') self.test_user = TestFactory.create_user('Test_User_CM') self.test_project = TestFactory.create_project(self.test_user, "Test_Project_CM") diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py index 9b5891eae2..63a4f4ecf8 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py @@ -47,7 +47,6 @@ class TestCSVConnectivityParser(BaseTestCase): - #BASE_PTH = path.join(path.dirname(tvb_data.__file__), 'dti_pipeline_toronto') def test_parse_happy(self): cap_pth = TVBZenodoDataset().fetch_data('output_ConnectionDistanceMatrix.csv') @@ -63,6 +62,7 @@ class TestCSVConnectivityImporter(BaseTestCase): """ Unit-tests for csv connectivity importer. """ + dataset = TVBZenodoDataset() def setup_method(self): self.test_user = TestFactory.create_user() @@ -78,11 +78,8 @@ def teardown_method(self): def _import_csv_test_connectivity(self, reference_connectivity_gid, subject): ### First prepare input data: - #data_dir = path.abspath(path.dirname(tvb_data.__file__)) - - #toronto_dir = path.join(data_dir, 'dti_pipeline_toronto') - weights = TVBZenodoDataset().fetch_data('output_ConnectionCapacityMatrix.csv') - tracts = TVBZenodoDataset().fetch_data('output_ConnectionDistanceMatrix.csv') + weights = self.dataset.fetch_data('output_ConnectionCapacityMatrix.csv') + tracts = self.dataset.fetch_data('output_ConnectionDistanceMatrix.csv') weights_tmp = weights + '.tmp' tracts_tmp = tracts + '.tmp' @@ -101,8 +98,7 @@ def test_happy_flow_import(self): Test that importing a CFF generates at least one DataType in DB. """ - #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, subject=TEST_SUBJECT_A) field = FilterChain.datatype + '.subject' @@ -134,8 +130,7 @@ def test_happy_flow_import(self): assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all() def test_bad_reference(self): - #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) field = FilterChain.datatype + '.subject' filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!=']) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py index 5b2da5609b..d5d226e123 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py @@ -32,7 +32,6 @@ import pyAesCrypt import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset import tempfile @@ -49,7 +48,7 @@ class TestEncryptionDecryption(TransactionalTestCase): - tvb_data = TVBZenodoDataset() + dataset = TVBZenodoDataset() # noinspection PyTypeChecker @pytest.mark.parametrize(" file_name", [('connectivity_76.zip'), @@ -80,8 +79,7 @@ def test_encrypt_decrypt(self, file_name): with open(private_key_path, 'wb') as f: f.write(pem) - #path_to_file = os.path.join(os.path.dirname(tvb_data.__file__), dir_name, file_name) - path_to_file = self.tvb_data.fetch_data(file_name) + path_to_file = self.dataset.fetch_data(file_name) # Create model for ABCUploader diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py index ffa9976a1c..c0735b4c45 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data.gifti as demo_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.gifti.parser import GIFTIParser from tvb.core.services.exceptions import OperationException @@ -43,13 +42,11 @@ class TestGIFTISurfaceImporter(BaseTestCase): Unit-tests for GIFTI Surface importer. """ - #GIFTI_SURFACE_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.cortex.gii') - #GIFTI_TIME_SERIES_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.time_series.gii') WRONG_GII_FILE = os.path.abspath(__file__) - tvb_data = TVBZenodoDataset() - GIFTI_SURFACE_FILE = tvb_data.fetch_data('sample.cortex.gii') - GIFTI_TIME_SERIES_FILE = tvb_data.fetch_data( 'sample.time_series.gii') + dataset = TVBZenodoDataset() + GIFTI_SURFACE_FILE = dataset.fetch_data('sample.cortex.gii') + GIFTI_TIME_SERIES_FILE = dataset.fetch_data( 'sample.time_series.gii') def setup_method(self): self.test_user = TestFactory.create_user('Gifti_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py index 492dc2e640..855b08d504 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py @@ -32,7 +32,6 @@ import os -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.time_series import TimeSeriesRegionIndex from tvb.adapters.uploaders.mat_timeseries_importer import RegionMatTimeSeriesImporterModel, RegionTimeSeriesImporter @@ -41,12 +40,9 @@ class TestMatTimeSeriesImporter(BaseTestCase): - #base_pth = os.path.join(os.path.dirname(tvb_data.__file__), 'berlinSubjects', 'QL_20120814') - tvb_data = TVBZenodoDataset() - bold_path = tvb_data.fetch_data('QL_BOLD_regiontimecourse.mat') - #bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat') - connectivity_path = tvb_data.fetch_data('QL_20120814_Connectivity.zip') - #connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip') + dataset = TVBZenodoDataset() + bold_path = dataset.fetch_data('QL_BOLD_regiontimecourse.mat') + connectivity_path = dataset.fetch_data('QL_20120814_Connectivity.zip') def setup_method(self): self.test_user = TestFactory.create_user('Mat_Timeseries_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py index 97413f1124..ef8da6a888 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py @@ -31,8 +31,6 @@ import os import numpy -#import tvb_data -#import tvb_data.nifti as demo_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.region_mapping import RegionVolumeMappingIndex @@ -52,18 +50,13 @@ class TestNIFTIImporter(BaseTestCase): Unit-tests for NIFTI importer. """ - #NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii') - #GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii.gz') - #TIMESERIES_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'time_series_152.nii.gz') - #WRONG_NII_FILE = os.path.abspath(__file__) - #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'volume_mapping/mapping_FS_76.txt') - tvb_data = TVBZenodoDataset() - NII_FILE = tvb_data.fetch_data('minimal.nii') - GZ_NII_FILE = tvb_data.fetch_data('minimal.nii.gz') - TIMESERIES_NII_FILE = tvb_data.fetch_data('time_series_152.nii.gz') + dataset = TVBZenodoDataset() + NII_FILE = dataset.fetch_data('minimal.nii') + GZ_NII_FILE = dataset.fetch_data('minimal.nii.gz') + TIMESERIES_NII_FILE = dataset.fetch_data('time_series_152.nii.gz') WRONG_NII_FILE = os.path.abspath(__file__) #? - TXT_FILE = tvb_data.fetch_data('mapping_FS_76.txt') + TXT_FILE = dataset.fetch_data('mapping_FS_76.txt') DEFAULT_ORIGIN = [[0.0, 0.0, 0.0]] UNKNOWN_STR = "unknown" @@ -152,7 +145,6 @@ def test_import_region_mapping(self): """ This method tests import of a NIFTI file compressed in GZ format. """ - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") to_link_conn = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py index bea8679714..45a3792b87 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data.obj from tvb.datasets import TVBZenodoDataset from tvb.core.neocom import h5 from tvb.datatypes.surfaces import SurfaceTypesEnum @@ -42,11 +41,9 @@ class TestObjSurfaceImporter(BaseTestCase): Unit-tests for Obj Surface importer. """ - #torus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj') - #face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj') - tvb_data = TVBZenodoDataset() - torus = tvb_data.fetch_data('test_torus.obj') - face = tvb_data.fetch_data('face_surface.obj') + dataset = TVBZenodoDataset() + torus = dataset.fetch_data('test_torus.obj') + face = dataset.fetch_data('face_surface.obj') def setup_method(self): diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py index 73504c9eb6..1f413def77 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py @@ -31,9 +31,6 @@ import os -#import tvb_data.projectionMatrix as dataset -#import tvb_data.sensors -#import tvb_data.surfaceData from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.projections import ProjectionMatrixIndex from tvb.core.services.exceptions import OperationException @@ -47,6 +44,7 @@ class TestProjectionMatrix(BaseTestCase): """ Unit-tests for CFF-importer. """ + dataset = TVBZenodoDataset() def setup_method(self): """ @@ -55,13 +53,11 @@ def setup_method(self): self.test_user = TestFactory.create_user("UserPM") self.test_project = TestFactory.create_project(self.test_user) - #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_brainstorm_65.txt') - zip_path = TVBZenodoDataset().fetch_data('eeg_brainstorm_65.txt') + zip_path = self.dataset.fetch_data('eeg_brainstorm_65.txt') self.sensors = TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_EEG) - #zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) @@ -75,8 +71,7 @@ def test_wrong_shape(self): """ Verifies that importing a different shape throws exception """ - #file_path = os.path.join(os.path.abspath(os.path.dirname(dataset.__file__)), 'projection_eeg_62_surface_16k.mat') - file_path = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') + file_path = self.dataset.fetch_data('projection_eeg_62_surface_16k.mat') try: TestFactory.import_projection_matrix(self.test_user, self.test_project, file_path, self.sensors.gid, @@ -90,8 +85,7 @@ def test_happy_flow_surface_import(self): Verifies the happy flow for importing a surface. """ dt_count_before = TestFactory.get_entity_count(self.test_project, ProjectionMatrixIndex) - file_path = os.path.join(os.path.abspath(os.path.dirname(dataset.__file__)), - 'projection_eeg_65_surface_16k.npy') + file_path = self.dataset.fetch_data('projection_eeg_65_surface_16k.npy') TestFactory.import_projection_matrix(self.test_user, self.test_project, file_path, self.sensors.gid, self.surface.gid, False) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py index 50d54feaa9..c27d7152d3 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py @@ -30,8 +30,6 @@ import os import tvb.tests.framework.adapters.uploaders.test_data as test_data -#import tvb_data.regionMapping as demo_data -#import tvb_data.surfaceData from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.surface import SurfaceIndex from tvb.basic.neotraits.ex import TraitValueError @@ -49,15 +47,12 @@ class TestRegionMappingImporter(BaseTestCase): """ Unit-tests for RegionMapping importer. """ - tvb_data = TVBZenodoDataset() + dataset = TVBZenodoDataset() - #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') - #ZIP_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.zip') - #BZ2_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.bz2') - TXT_FILE = tvb_data.fetch_data('regionMapping_16k_76.txt') - ZIP_FILE = tvb_data.fetch_data('regionMapping_16k_76.zip') - BZ2_FILE = tvb_data.fetch_data('regionMapping_16k_76.bz2') + TXT_FILE = dataset.fetch_data('regionMapping_16k_76.txt') + ZIP_FILE = dataset.fetch_data('regionMapping_16k_76.zip') + BZ2_FILE = dataset.fetch_data('regionMapping_16k_76.bz2') # Wrong data WRONG_FILE_1 = os.path.join(os.path.dirname(test_data.__file__), 'region_mapping_wrong_1.txt') @@ -73,12 +68,12 @@ def setup_method(self): self.test_user = TestFactory.create_user("UserRM") self.test_project = TestFactory.create_project(self.test_user) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = self.dataset.fetch_data("connectivity_76.zip") self.connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") field = FilterChain.datatype + '.surface_type' filters = FilterChain('', [field], [SurfaceTypesEnum.CORTICAL_SURFACE.value], ['==']) - cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + cortex = self.dataset.fetch_data('cortex_16384.zip') TestFactory.import_surface_zip(self.test_user, self.test_project, cortex, SurfaceTypesEnum.CORTICAL_SURFACE) self.surface = TestFactory.get_entity(self.test_project, SurfaceIndex, filters) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py index e0a8ede2b8..407484ed49 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py @@ -30,7 +30,6 @@ import os -#import tvb_data.sensors as demo_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.sensors_importer import SensorsImporter, SensorsImporterModel from tvb.core.neocom import h5 @@ -45,11 +44,9 @@ class TestSensorsImporter(BaseTestCase): """ Unit-tests for Sensors importer. """ - #EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'eeg_unitvector_62.txt.bz2') - #MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_151.txt.bz2') - tvb_data = TVBZenodoDataset() - EEG_FILE = tvb_data.fetch_data('eeg_unitvector_62.txt.bz2') - MEG_FILE = tvb_data.fetch_data('meg_151.txt.bz2') + dataset = TVBZenodoDataset() + EEG_FILE = dataset.fetch_data('eeg_unitvector_62.txt.bz2') + MEG_FILE = dataset.fetch_data('meg_151.txt.bz2') def setup_method(self): """ diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py index 387398b1bc..65e1152efa 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py @@ -30,7 +30,6 @@ import os -#import tvb_data.surfaceData from tvb.datasets import TVBZenodoDataset from tvb.datatypes.surfaces import SurfaceTypesEnum from tvb.tests.framework.core.base_testcase import BaseTestCase @@ -42,7 +41,6 @@ class TestZIPSurfaceImporter(BaseTestCase): Unit-tests for Zip Surface importer. """ - #surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip') surf_skull = TVBZenodoDataset().fetch_data('outer_skull_4096.zip') def setup_method(self): diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py index 032c4117af..c697ac2011 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py @@ -29,8 +29,6 @@ """ import os -#import tvb_data.surfaceData -#import tvb_data.regionMapping from tvb.datasets import TVBZenodoDataset from tvb.core.neocom import h5 @@ -51,11 +49,9 @@ class TestBrainViewer(TransactionalTestCase): EXPECTED_EXTRA_KEYS = ['urlMeasurePointsLabels', 'urlMeasurePoints', 'pageSize', 'shellObject', 'extended_view', 'legendLabels', 'labelsStateVar', 'labelsModes', 'title'] - #cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - #region_mapping_path = os.path.join(os.path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - tvb_data = TVBZenodoDataset() - cortex = tvb_data.fetch_data('cortex_16384.zip') - region_mapping_path = tvb_data.fetch_data('regionMapping_16k_76.txt') + dataset = TVBZenodoDataset() + cortex = dataset.fetch_data('cortex_16384.zip') + region_mapping_path = dataset.fetch_data('regionMapping_16k_76.txt') def transactional_setup_method(self): """ @@ -66,8 +62,7 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user('Brain_Viewer_User') self.test_project = TestFactory.create_project(self.test_user, 'Brain_Viewer_Project') - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = self.tvb_data.fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") connectivity_idx = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert connectivity_idx is not None diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py index f297bc9762..502289adf5 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py @@ -28,7 +28,6 @@ """ import os -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex @@ -52,7 +51,6 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user("UserCVV") self.test_project = TestFactory.create_project(self.test_user) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py index 29307b091b..39bff3dac1 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py @@ -29,8 +29,6 @@ """ import os -#import tvb_data.obj -#import tvb_data.sensors from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.sensors import SensorsIndex from tvb.adapters.datatypes.db.surface import SurfaceIndex @@ -47,6 +45,7 @@ class TestSensorViewers(TransactionalTestCase): """ Unit-tests for Sensors viewers. """ + dataset = TVBZenodoDataset() EXPECTED_KEYS_INTERNAL = {'urlMeasurePoints': None, 'urlMeasurePointsLabels': None, 'noOfMeasurePoints': 103, 'minMeasure': 0, 'maxMeasure': 103, 'urlMeasure': None, 'shellObject': None} @@ -72,8 +71,7 @@ def test_launch_eeg(self): Check that all required keys are present in output from EegSensorViewer launch. """ # Import Sensors - #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt.bz2') - zip_path = TVBZenodoDataset().fetch_data('eeg_unitvector_62.txt.bz2') + zip_path = self.dataset.fetch_data('eeg_unitvector_62.txt.bz2') TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_EEG) field = FilterChain.datatype + '.sensors_type' @@ -81,8 +79,7 @@ def test_launch_eeg(self): sensors_index = TestFactory.get_entity(self.test_project, SensorsIndex, filters) # Import EEGCap - #cap_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'eeg_cap.obj') - cap_path = TVBZenodoDataset().fetch_data('eeg_cap.obj') + cap_path = self.dataset.fetch_data('eeg_cap.obj') TestFactory.import_surface_obj(self.test_user, self.test_project, cap_path, SurfaceTypesEnum.EEG_CAP_SURFACE) field = FilterChain.datatype + '.surface_type' filters = FilterChain('', [field], [SurfaceTypesEnum.EEG_CAP_SURFACE.value], ['==']) @@ -109,8 +106,7 @@ def test_launch_meg(self): Check that all required keys are present in output from MEGSensorViewer launch. """ - #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'meg_151.txt.bz2') - zip_path = TVBZenodoDataset().fetch_data('meg_151.txt.bz2') + zip_path = self.dataset.fetch_data('meg_151.txt.bz2') TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_MEG) @@ -130,8 +126,7 @@ def test_launch_internal(self): """ Check that all required keys are present in output from InternalSensorViewer launch. """ - #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'seeg_39.txt.bz2') - zip_path = TVBZenodoDataset().fetch_data('seeg_39.txt.bz2') + zip_path = self.dataset.fetch_data('seeg_39.txt.bz2') sensors_index = TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_INTERNAL) viewer = SensorsViewer() diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py index 422ff64896..c1ff9220bf 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py @@ -29,8 +29,6 @@ """ import os -#import tvb_data.surfaceData -#import tvb_data.regionMapping as demo_data from tvb.datasets import TVBZenodoDataset from uuid import UUID @@ -57,23 +55,21 @@ def transactional_setup_method(self): creates a test user, a test project, a connectivity and a surface; imports a CFF data-set """ + dataset = TVBZenodoDataset() test_user = TestFactory.create_user('Surface_Viewer_User') self.test_project = TestFactory.create_project(test_user, 'Surface_Viewer_Project') - #surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - surf_skull = TVBZenodoDataset().fetch_data('cortex_16384.zip') + surf_skull = dataset.fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(test_user, self.test_project, surf_skull, SurfaceTypesEnum.CORTICAL_SURFACE) assert self.surface is not None - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = dataset.fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(test_user, self.test_project, zip_path, "John") connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert connectivity_index is not None - #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') - TXT_FILE = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') + TXT_FILE = dataset.fetch_data('regionMapping_16k_76.txt') self.region_mapping = TestFactory.import_region_mapping(test_user, self.test_project, TXT_FILE, self.surface.gid, connectivity_index.gid) assert self.region_mapping is not None diff --git a/tvb_framework/tvb/tests/framework/core/factory.py b/tvb_framework/tvb/tests/framework/core/factory.py index c0a9327604..0fa6e7a171 100644 --- a/tvb_framework/tvb/tests/framework/core/factory.py +++ b/tvb_framework/tvb/tests/framework/core/factory.py @@ -37,7 +37,6 @@ import os import random import uuid -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.local_connectivity import LocalConnectivityIndex @@ -77,7 +76,7 @@ class TestFactory(object): """ Expose mostly static methods for creating different entities used in tests. """ - + @staticmethod def get_entity(project, expected_data, filters=None): """ @@ -220,7 +219,6 @@ def import_default_project(admin_user=None): if not admin_user: admin_user = TestFactory.create_user() - #project_path = os.path.join(os.path.dirname(tvb_data.__file__), 'Default_Project.zip') project_path = TVBZenodoDataset().fetch_data('Default_Project.zip') import_service = ImportService() import_service.import_project_structure(project_path, admin_user.id) @@ -314,7 +312,6 @@ def import_zip_connectivity(user, project, zip_path=None, subject=DataTypeMetaDa if zip_path is None: zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') count = dao.count_datatypes(project.id, ConnectivityIndex) view_model = ZIPConnectivityImporterModel() diff --git a/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py b/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py index 7add7679e5..a9bef543e3 100644 --- a/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py +++ b/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py @@ -27,7 +27,6 @@ import uuid import numpy import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.basic.neotraits.api import Attr, Float, Int, NArray, List diff --git a/tvb_framework/tvb/tests/framework/core/services/import_service_test.py b/tvb_framework/tvb/tests/framework/core/services/import_service_test.py index 37795c621d..4315b787ee 100644 --- a/tvb_framework/tvb/tests/framework/core/services/import_service_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/import_service_test.py @@ -31,7 +31,6 @@ import os import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from PIL import Image from time import sleep @@ -59,7 +58,7 @@ class TestImportService(BaseTestCase): """ This class contains tests for the tvb.core.services.import_service module. """ - + dataset = TVBZenodoDataset() def setup_method(self): """ Reset the database before each test. @@ -89,8 +88,7 @@ def test_import_export(self, user_factory, project_factory, value_wrapper_factor """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport", "test_desc") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) value_wrapper = value_wrapper_factory(test_user, test_project) ProjectService.set_datatype_visibility(value_wrapper.gid, False) @@ -140,8 +138,7 @@ def test_import_export_existing(self, user_factory, project_factory): """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport2") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) count_operations = dao.get_filtered_operations(test_project.id, None, is_count=True) @@ -185,8 +182,7 @@ def test_export_import_figures(self, user_factory, project_factory): # Prepare data user = user_factory() project = project_factory(user, "TestImportExportFigures") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') - zip_path = TVBZenodoDataset().fetch_data('paupau.zip') + zip_path = self.dataset.fetch_data('paupau.zip') TestFactory.import_zip_connectivity(user, project, zip_path) figure_service = FigureService() diff --git a/tvb_framework/tvb/tests/framework/core/services/links_test.py b/tvb_framework/tvb/tests/framework/core/services/links_test.py index 3e6ff86f3f..5c317f2520 100644 --- a/tvb_framework/tvb/tests/framework/core/services/links_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/links_test.py @@ -32,7 +32,6 @@ """ import pytest import os -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.sensors import SensorsIndex @@ -50,6 +49,8 @@ class _BaseLinksTest(TransactionalTestCase): + dataset = TVBZenodoDataset() + @pytest.fixture() def initialize_two_projects(self, dummy_datatype_index_factory, project_factory, user_factory): """ @@ -67,12 +68,9 @@ def initialize_two_projects(self, dummy_datatype_index_factory, project_factory, src_user = user_factory(username="Links Test") self.src_usr_id = src_user.id self.src_project = project_factory(src_user, "Src_Project") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') - tvb_data = TVBZenodoDataset() - zip_path = tvb_data.fetch_data("paupau.zip") + zip_path = self.dataset.fetch_data("paupau.zip") self.red_datatype = TestFactory.import_zip_connectivity(src_user, self.src_project, zip_path, "John") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'sensors', 'eeg_unitvector_62.txt.bz2') - zip_path = tvb_data.fetch_data('eeg_unitvector_62.txt.bz2') + zip_path = self.dataset.fetch_data('eeg_unitvector_62.txt.bz2') self.blue_datatype = TestFactory.import_sensors(src_user, self.src_project, zip_path, SensorTypesEnum.TYPE_EEG) assert 1 == self.red_datatypes_in(self.src_project.id) @@ -211,8 +209,7 @@ def build(): Project dest will have the derived VW and links """ # add a connectivity to src project and link it to dest project - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') conn = TestFactory.import_zip_connectivity(self.dst_user, self.src_project, zip_path, "John") self.algorithm_service.create_link(conn.id, self.dest_project.id) diff --git a/tvb_framework/tvb/tests/framework/core/services/project_service_test.py b/tvb_framework/tvb/tests/framework/core/services/project_service_test.py index aae5609378..324c91beb0 100644 --- a/tvb_framework/tvb/tests/framework/core/services/project_service_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/project_service_test.py @@ -33,7 +33,6 @@ import pytest import sqlalchemy -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.basic.profile import TvbProfile from tvb.core.entities.model import model_datatype, model_project, model_operation @@ -332,14 +331,12 @@ def test_empty_project_has_zero_disk_size(self): def test_project_disk_size(self): project1 = TestFactory.create_project(self.test_user, 'test_proj1') - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - tvb_data = TVBZenodoDataset() - zip_path = tvb_data.fetch_data('connectivity_66.zip') + dataset = TVBZenodoDataset() + zip_path = dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, project1, zip_path, 'testSubject') project2 = TestFactory.create_project(self.test_user, 'test_proj2') - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') - zip_path = tvb_data.fetch_data('connectivity_76.zip') + zip_path = dataset.fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, project2, zip_path, 'testSubject') projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0] diff --git a/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py b/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py index 19b4191113..84611b3637 100644 --- a/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py @@ -29,7 +29,6 @@ """ from os import path -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex @@ -46,7 +45,6 @@ class TestSerializationManager(TransactionalTestCase): def transactional_setup_method(self): self.test_user = TestFactory.create_user(username="test_user") self.test_project = TestFactory.create_project(self.test_user, "Test") - #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py index bb60be982f..5b1563c494 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py @@ -27,7 +27,6 @@ import os import flask import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.interfaces.rest.commons.exceptions import InvalidIdentifierException @@ -40,6 +39,8 @@ class TestDatatypeResource(RestResourceTest): + dataset = TVBZenodoDataset() + def transactional_setup_method(self): self.test_user = TestFactory.create_user('Rest_User') self.test_project = TestFactory.create_project(self.test_user, 'Rest_Project', users=[self.test_user.id]) @@ -54,8 +55,7 @@ def test_server_retrieve_datatype_inexistent_gid(self, mocker): def test_server_retrieve_datatype(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get(project_gid=self.test_project.gid) @@ -81,8 +81,7 @@ def send_file_dummy(path, as_attachment, attachment_filename): def test_server_get_operations_for_datatype(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get(project_gid=self.test_project.gid) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py index ffb92d8413..4ea3a982a0 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py @@ -29,7 +29,6 @@ from uuid import UUID import flask import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.analyzers.fourier_adapter import FFTAdapterModel from tvb.basic.exceptions import TVBException @@ -49,6 +48,8 @@ class TestOperationResource(RestResourceTest): + dataset = TVBZenodoDataset() + def transactional_setup_method(self): self.test_user = TestFactory.create_user('Rest_User') self.test_project = TestFactory.create_project(self.test_user, 'Rest_Project', users=[self.test_user.id]) @@ -65,8 +66,7 @@ def test_server_get_operation_status_inexistent_gid(self, mocker): def test_server_get_operation_status(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) @@ -85,8 +85,7 @@ def test_server_get_operation_results_inexistent_gid(self, mocker): def test_server_get_operation_results(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) @@ -100,10 +99,13 @@ def test_server_get_operation_results(self, mocker): def test_server_get_operation_results_failed_operation(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_90.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_90.zip') - with pytest.raises(TVBException): - TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) + with pytest.raises(KeyError): + zip_path = self.dataset.fetch_data('connectivity_90.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') + zip_path.replace("connectivity_96", "connectivity_90") + print(zip_path) + + TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) request_mock.args = {Strings.PAGE_NUMBER: '1'} diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py index 0fc8a18144..b6da293795 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py @@ -27,7 +27,6 @@ import os import flask import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.interfaces.rest.commons.exceptions import InvalidIdentifierException @@ -46,9 +45,8 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user('Rest_User') self.test_project_without_data = TestFactory.create_project(self.test_user, 'Rest_Project', users=[self.test_user.id]) self.test_project_with_data = TestFactory.create_project(self.test_user, 'Rest_Project2', users=[self.test_user.id]) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - tvb_data = TVBZenodoDataset() - zip_path = tvb_data.fetch_data('connectivity_96.zip') + dataset = TVBZenodoDataset() + zip_path = dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project_with_data, zip_path) def test_server_get_data_in_project_inexistent_gid(self, mocker): diff --git a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py index 82df6fd227..1158476fd6 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py @@ -25,11 +25,6 @@ # import numpy -#import tvb_data.connectivity -#import tvb_data.surfaceData -#import tvb_data.sensors -#import tvb_data.regionMapping -#import tvb_data.projectionMatrix from tvb.datasets import TVBZenodoDataset from os import path @@ -65,7 +60,7 @@ class TestSimulationController(BaseTransactionalControllerTest): - + dataset = TVBZenodoDataset() def transactional_setup_method(self): self.simulator_controller = SimulatorController() self.test_user = TestFactory.create_user('SimulationController_User') @@ -92,8 +87,7 @@ def test_index(self): assert not result_dict['errors'], 'Some errors were encountered!' def test_set_connectivity(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['connectivity'] = connectivity.gid @@ -129,8 +123,7 @@ def test_set_coupling_params(self): assert self.session_stored_simulator.coupling.b[0] == [0.0], "b value was not set correctly." def test_set_surface(self): - #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) surface = TestFactory.get_entity(self.test_project, SurfaceIndex) @@ -151,17 +144,14 @@ def test_set_surface_none(self): assert self.session_stored_simulator.surface is None, "Surface should not be set." def test_set_cortex_without_local_connectivity(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = self.dataset.fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') + text_file = self.dataset.fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) @@ -183,17 +173,14 @@ def test_set_cortex_without_local_connectivity(self): "coupling_strength was not set correctly." def test_set_cortex_with_local_connectivity(self, local_connectivity_index_factory): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = self.dataset.fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt' ) + text_file = self.dataset.fetch_data('regionMapping_16k_76.txt' ) region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) @@ -224,8 +211,7 @@ def test_set_stimulus_none(self): assert self.session_stored_simulator.stimulus is None, "Stimulus should not be set." def test_set_stimulus(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity_index = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) weight_array = numpy.zeros(connectivity_index.number_of_regions) @@ -454,34 +440,28 @@ def test_set_monitor_params(self): assert not rendering_rules['renderer'].include_next_button, 'Next button should not be displayed!' def set_region_mapping(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = self.dataset.fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') + text_file = self.dataset.fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) return region_mapping def set_eeg(self): - #eeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt') - eeg_sensors_file = TVBZenodoDataset().fetch_data('eeg_unitvector_62.txt') + eeg_sensors_file = self.dataset.fetch_data('eeg_unitvector_62.txt.bz2') eeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, eeg_sensors_file, SensorTypesEnum.TYPE_EEG) - #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') + surface_file = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_eeg_62_surface_16k.mat') - eeg_projection_file = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') + eeg_projection_file = self.dataset.fetch_data('projection_eeg_62_surface_16k.mat') eeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, eeg_projection_file, eeg_sensors.gid, surface.gid) return eeg_sensors, eeg_projection @@ -518,18 +498,15 @@ def test_set_eeg_monitor_params(self): "Projection wasn't stored correctly." def set_meg(self): - #meg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'meg_brainstorm_276.txt') - meg_sensors_file = TVBZenodoDataset().fetch_data('meg_brainstorm_276.txt') + meg_sensors_file = self.dataset.fetch_data('meg_brainstorm_276.txt') meg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, meg_sensors_file, SensorTypesEnum.TYPE_MEG) - #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') + surface_file = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__),'projection_meg_276_surface_16k.npy') - meg_projection_file = TVBZenodoDataset().fetch_data('projection_meg_276_surface_16k.npy') + meg_projection_file = self.dataset.fetch_data('projection_meg_276_surface_16k.npy') meg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, meg_projection_file, meg_sensors.gid, surface.gid) return meg_sensors, meg_projection @@ -567,18 +544,15 @@ def test_set_meg_monitor_params(self): "Projection wasn't stored correctly." def set_seeg(self): - #seeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'seeg_588.txt') - seeg_sensors_file = TVBZenodoDataset().fetch_data('seeg_588.txt') + seeg_sensors_file = self.dataset.fetch_data('seeg_588.txt') seeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, seeg_sensors_file, SensorTypesEnum.TYPE_INTERNAL) - #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') + surface_file = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_seeg_588_surface_16k.npy') - seeg_projection_file = TVBZenodoDataset().fetch_data('projection_seeg_588_surface_16k.npy') + seeg_projection_file = self.dataset.fetch_data('projection_seeg_588_surface_16k.npy') seeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, seeg_projection_file, seeg_sensors.gid, surface.gid) return seeg_sensors, seeg_projection @@ -717,8 +691,7 @@ def test_load_burst_history(self): assert len(burst_parameters['burst_list']) == 3, "The burst configurations where not stored." def test_reset_simulator_configuration(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['connectivity'] = connectivity.gid @@ -774,8 +747,7 @@ def test_rename_burst(self): assert dao.get_bursts_for_project(self.test_project.id)[0].name == new_name, "Name wasn't actually changed." def test_copy_simulator_configuration(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) @@ -809,8 +781,7 @@ def test_copy_simulator_configuration(self): assert rendering_rules['renderer'].disable_fields, 'Fragments should be read-only!' def test_load_burst(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) @@ -860,8 +831,7 @@ def test_launch_simulation(self): assert burst_config.status == 'running', 'Simulation launching has failed!' def test_launch_branch_simulation(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['input_simulation_name_id'] = 'HappySimulation' diff --git a/tvb_library/setup.py b/tvb_library/setup.py index 1e971ab73e..c7745984a3 100644 --- a/tvb_library/setup.py +++ b/tvb_library/setup.py @@ -40,7 +40,7 @@ LIBRARY_TEAM = "Marmaduke Woodman, Jan Fousek, Stuart Knock, Paula Sanz Leon, Viktor Jirsa" LIBRARY_REQUIRED_PACKAGES = ["autopep8", "Deprecated", "docutils", "ipywidgets", "lxml", "mako>=1.1.4", "matplotlib", - "networkx", "numba", "numexpr", "numpy", "pooch","pylems", "scipy", "six"] + "networkx", "numba", "numexpr", "numpy", "pooch", "pylems", "scipy", "six"] LIBRARY_REQUIRED_EXTRA = ["h5py", "pytest", "pytest-benchmark", "pytest-xdist", "tvb-gdist", "tvb-data"] diff --git a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py similarity index 99% rename from tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py rename to tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py index c03f9256c8..e275410f81 100644 --- a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py @@ -33,13 +33,6 @@ from pathlib import Path from tvb.tests.library.base_testcase import BaseTestCase - - - - - - - class Test_TVBZenodoDataset(BaseTestCase): From 808f991b838c5f064dbd230181645f26ef8a9715 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Thu, 13 Jul 2023 00:40:29 +0530 Subject: [PATCH 29/84] fix test; reuse tvbzenododataset instance --- .../framework/adapters/uploaders/nifti_importer_test.py | 2 +- .../framework/interfaces/rest/operation_resource_test.py | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py index ef8da6a888..e494ce8bbd 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py @@ -145,7 +145,7 @@ def test_import_region_mapping(self): """ This method tests import of a NIFTI file compressed in GZ format. """ - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = self.dataset.fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") to_link_conn = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py index 4ea3a982a0..5950e51a0b 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py @@ -102,10 +102,9 @@ def test_server_get_operation_results_failed_operation(self, mocker): with pytest.raises(KeyError): zip_path = self.dataset.fetch_data('connectivity_90.zip') zip_path = self.dataset.fetch_data('connectivity_96.zip') - zip_path.replace("connectivity_96", "connectivity_90") - print(zip_path) - - TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) + zip_path = zip_path.replace("connectivity_96", "connectivity_90") + with pytest.raises(TVBException): + TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) request_mock.args = {Strings.PAGE_NUMBER: '1'} From 6888ca1d59d469c9f009a7c1ebe8690997e3ea15 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 16 Jul 2023 12:30:37 +0530 Subject: [PATCH 30/84] fix the notebooks --- tvb_documentation/demos/encrypt_data.ipynb | 14 ++++--- ...ting_with_rest_api_launch_operations.ipynb | 19 ++++++---- .../interacting_with_the_framework.ipynb | 4 +- .../demos/simulate_for_mouse.ipynb | 38 +++++++++++++++---- 4 files changed, 51 insertions(+), 24 deletions(-) diff --git a/tvb_documentation/demos/encrypt_data.ipynb b/tvb_documentation/demos/encrypt_data.ipynb index f40bdab160..051421a3fc 100644 --- a/tvb_documentation/demos/encrypt_data.ipynb +++ b/tvb_documentation/demos/encrypt_data.ipynb @@ -28,7 +28,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { "colab": {}, "colab_type": "code", @@ -113,10 +113,12 @@ "outputs": [], "source": [ "# EDIT paths to data files here inside the list\n", - "import tvb_data, os, tvb\n", - "from tvb.basic.readers import try_get_absolute_path\n", + "import os, tvb\n", + "from tvb.datasets import TVBZenodoDataset\n", + "dataset = TVBZenodoDataset()\n", + "\n", "\n", - "paths_to_files = [try_get_absolute_path(\"tvb_data.connectivity\", \"connectivity_76.zip\")]\n", + "paths_to_files = [dataset.fetch_data(\"connectivity_76.zip\")]\n", "import_export_encryption_handler = StorageInterface.get_import_export_encryption_handler()\n", "\n", "buffer_size = TvbProfile.current.hpc.CRYPT_BUFFER_SIZE\n", @@ -151,7 +153,7 @@ "encrypted_password = import_export_encryption_handler.encrypt_password(public_key, password_bytes)\n", "\n", "# EDIT path for saving the encrypted password\n", - "encrypted_password_path = os.path.join(tvb_data.__path__[0], 'connectivity')\n", + "encrypted_password_path = os.path.join(dataset.extract_dir, 'tvb_data','connectivity')\n", "\n", "import_export_encryption_handler.save_encrypted_password(encrypted_password, encrypted_password_path)\n", "\n", @@ -198,7 +200,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.9.0" } }, "nbformat": 4, diff --git a/tvb_documentation/demos/interacting_with_rest_api_launch_operations.ipynb b/tvb_documentation/demos/interacting_with_rest_api_launch_operations.ipynb index 21c88fb5f2..735b493f65 100644 --- a/tvb_documentation/demos/interacting_with_rest_api_launch_operations.ipynb +++ b/tvb_documentation/demos/interacting_with_rest_api_launch_operations.ipynb @@ -27,10 +27,10 @@ "import os\n", "import time\n", "\n", - "import tvb_data\n", "from keycloak import KeycloakOpenID\n", "from tvb.core.entities.model.model_operation import STATUS_ERROR, STATUS_CANCELED, STATUS_FINISHED\n", - "from tvb.interfaces.rest.client.tvb_client import TVBClient" + "from tvb.interfaces.rest.client.tvb_client import TVBClient\n", + "from tvb.datasets import TVBZenodoDataset" ] }, { @@ -39,8 +39,7 @@ "metadata": {}, "outputs": [], "source": [ - "def compute_tvb_data_path(folder, filename):\n", - " return os.path.join(os.path.dirname(tvb_data.__file__), folder, filename)\n", + "\n", "\n", "def monitor_operation(tvb_client, operation_gid):\n", " while True:\n", @@ -95,9 +94,13 @@ "source": [ "from tvb.adapters.uploaders.zip_connectivity_importer import ZIPConnectivityImporterModel, ZIPConnectivityImporter\n", "\n", + "#Loading TVB Zenodo data\n", + "\n", + "dataset = TVBZenodoDataset()\n", + "\n", "# Importing a connectivity from ZIP\n", "zip_connectivity_importer_model = ZIPConnectivityImporterModel()\n", - "zip_connectivity_importer_model.uploaded = compute_tvb_data_path('connectivity', 'connectivity_96.zip')\n", + "zip_connectivity_importer_model.uploaded = dataset.fetch_data(\"connectivity_96.zip\")\n", "zip_connectivity_importer_model.normalization = 'region'\n", "operation_gid = tvb_client.launch_operation(default_project_gid, ZIPConnectivityImporter,\n", " zip_connectivity_importer_model)\n", @@ -122,7 +125,7 @@ "\n", "# Importing a surface from ZIP\n", "zip_surface_importer_model = ZIPSurfaceImporterModel()\n", - "zip_surface_importer_model.uploaded = compute_tvb_data_path('surfaceData', 'cortex_16384.zip')\n", + "zip_surface_importer_model.uploaded = dataset.fetch_data('cortex_16384.zip')\n", "zip_surface_importer_model.surface_type = SurfaceTypesEnum.CORTICAL_SURFACE\n", "zip_surface_importer_model.should_center = False\n", "\n", @@ -146,7 +149,7 @@ "\n", "# Importing a region mapping\n", "rm_importer_model = RegionMappingImporterModel()\n", - "rm_importer_model.mapping_file = compute_tvb_data_path('regionMapping', 'regionMapping_16k_76.txt')\n", + "rm_importer_model.mapping_file = dataset.fetch_data('regionMapping_16k_76.txt')\n", "rm_importer_model.connectivity = connectivity_dto.gid\n", "rm_importer_model.surface = surface_gid\n", "\n", @@ -260,4 +263,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} diff --git a/tvb_documentation/demos/interacting_with_the_framework.ipynb b/tvb_documentation/demos/interacting_with_the_framework.ipynb index d13afe11be..80a66a5c11 100644 --- a/tvb_documentation/demos/interacting_with_the_framework.ipynb +++ b/tvb_documentation/demos/interacting_with_the_framework.ipynb @@ -100,8 +100,8 @@ "outputs": [], "source": [ "import os\n", - "import tvb_data\n", - "p = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity/connectivity_66.zip')\n", + "from tvb.dataset import TVBZenodoDataset\n", + "p = TVBZenodoDataset.fetch_data('connectivity_66.zip')\n", "import_op = import_conn_zip(proj.id, p)\n", "\n", "import_op = wait_to_finish(import_op)\n", diff --git a/tvb_documentation/demos/simulate_for_mouse.ipynb b/tvb_documentation/demos/simulate_for_mouse.ipynb index cfc5fda7f6..0f8fea4ebd 100644 --- a/tvb_documentation/demos/simulate_for_mouse.ipynb +++ b/tvb_documentation/demos/simulate_for_mouse.ipynb @@ -63,14 +63,35 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "file tvb_data.zip is downloaded at C:\\Users\\Abhijit_asus\\TVB\\DATASETS\\.cache\\TVB_Data\\c042692ba786b0ecebdffd58e9efac21-tvb_data.zip\n" + ] + }, + { + "ename": "NameError", + "evalue": "name 'import_conn_h5' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[1], line 5\u001b[0m\n\u001b[0;32m 2\u001b[0m dataset \u001b[39m=\u001b[39m TVBZenodoDataset()\n\u001b[0;32m 4\u001b[0m connectivity_path \u001b[39m=\u001b[39m dataset\u001b[39m.\u001b[39mfetch_data(\u001b[39m\"\u001b[39m\u001b[39mConnectivity.h5\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[1;32m----> 5\u001b[0m import_op \u001b[39m=\u001b[39m import_conn_h5(\u001b[39m1\u001b[39m, connectivity_path)\n\u001b[0;32m 6\u001b[0m import_op \u001b[39m=\u001b[39m wait_to_finish(import_op)\n\u001b[0;32m 7\u001b[0m import_op\n", + "\u001b[1;31mNameError\u001b[0m: name 'import_conn_h5' is not defined" + ] + } + ], "source": [ - "from tvb.basic.readers import try_get_absolute_path\n", - "connectivity_path = try_get_absolute_path(\"tvb_data\",\"mouse/allen_2mm/Connectivity.h5\")\n", + "from tvb.datasets import TVBZenodoDataset\n", + "dataset = TVBZenodoDataset()\n", + "\n", + "connectivity_path = dataset.fetch_data(\"Connectivity.h5\")\n", "import_op = import_conn_h5(1, connectivity_path)\n", "import_op = wait_to_finish(import_op)\n", "import_op" @@ -401,6 +422,7 @@ "outputs": [], "source": [ "# copy all the ids of the ConnectivityMeasureIndexes obtained before\n", + "\n", "connectivity_measure_ids = [i.id for i in get_operation_results(launched_operation.id)[1:]]" ] }, @@ -419,16 +441,16 @@ "\n", "import h5py\n", "from mpl_toolkits.axes_grid1 import make_axes_locatable\n", - "from tvb.basic.readers import try_get_absolute_path\n", + "\n", "\n", "fig, axes = plt.subplots(1,3)\n", "slice_idy=73\n", "j=0\n", "for conn_measure_id in connectivity_measure_ids:\n", - " f_path = try_get_absolute_path(\"tvb_data\", \"mouse/allen_2mm/RegionVolumeMapping.h5\")\n", + " f_path = dataset.fetch_data(\"RegionVolumeMapping.h5\")\n", " f = h5py.File(f_path, 'r', libver='latest')\n", " Vol=f['array_data'][:,:,:]\n", - " f_path = try_get_absolute_path(\"tvb_data\", \"mouse/allen_2mm/StructuralMRI.h5\")\n", + " f_path = dataset.fetch_data('StructuralMRI.h5')\n", " f = h5py.File(f_path, 'r', libver='latest')\n", " template=f['array_data'][:,:,:]\n", " conn_measure = load_dt(conn_measure_id)\n", @@ -499,7 +521,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.9.0" } }, "nbformat": 4, From 08532fc2182af1d397b7454dc79860d57891ab96 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 16 Jul 2023 12:48:13 +0530 Subject: [PATCH 31/84] fixed the notebooks --- .../demos/interacting_with_the_framework.ipynb | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/tvb_documentation/demos/interacting_with_the_framework.ipynb b/tvb_documentation/demos/interacting_with_the_framework.ipynb index 80a66a5c11..1ce1444d33 100644 --- a/tvb_documentation/demos/interacting_with_the_framework.ipynb +++ b/tvb_documentation/demos/interacting_with_the_framework.ipynb @@ -200,20 +200,6 @@ "source": [ "You can re-evaluate this cell multiple times while it's running to see how the results gradually show up as the simulation finishes." ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { From b71ea47ff52f9d217155f68f154d41bd615fb530 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 16 Jul 2023 12:56:22 +0530 Subject: [PATCH 32/84] trigger for prs as well --- .github/workflows/build.yml | 3 ++- .github/workflows/lib-tests.yml | 3 ++- .github/workflows/notebooks.yml | 3 ++- .github/workflows/pg-tests.yml | 3 ++- .github/workflows/win-tests.yml | 3 ++- 5 files changed, 10 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5c8a4ec9ea..a00a16b811 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,5 +1,6 @@ name: Test Py -on: [push] +on: pull_request + push jobs: build: diff --git a/.github/workflows/lib-tests.yml b/.github/workflows/lib-tests.yml index 921bf2c63e..e24708cc15 100644 --- a/.github/workflows/lib-tests.yml +++ b/.github/workflows/lib-tests.yml @@ -1,5 +1,6 @@ name: Test lib -on: [push] +on: pull_request + push jobs: lib-tests: diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 6773c09787..84ddc06b0f 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -1,5 +1,6 @@ name: Test Notebooks -on: [push] +on: pull_request + push jobs: build: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index e49b3cf3c5..5c4ecafa34 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -1,5 +1,6 @@ name: Test PG -on: [push] +on: pull_request + push jobs: build: diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index 59007561f9..d372f1b2ae 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -1,5 +1,6 @@ name: Test Win -on: [push] +on: pull_request + push jobs: build: From 8e38ba5ab6fa893bba7982a620211c95353eba08 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 22 Jul 2023 17:35:08 +0530 Subject: [PATCH 33/84] fix build_step1.py, added ability to unzip whole data. Also fetch_data accepts relative paths. --- tvb_build/build_step1.py | 8 +- .../demos/simulate_for_mouse.ipynb | 30 +---- .../connectivity_zip_importer_test.py | 1 - tvb_library/tvb/datasets/base.py | 18 +++ tvb_library/tvb/datasets/tvb_data.py | 113 ++++++++++------ tvb_library/tvb/datasets/zenodo.py | 15 ++- .../datasets/tvb_zenodo_dataset_test.py | 125 ++++++++++++++++-- 7 files changed, 224 insertions(+), 86 deletions(-) diff --git a/tvb_build/build_step1.py b/tvb_build/build_step1.py index 3541f95bd7..3ecadacb92 100644 --- a/tvb_build/build_step1.py +++ b/tvb_build/build_step1.py @@ -53,7 +53,8 @@ FW_FOLDER = os.path.join(TVB_ROOT, 'tvb_framework') LICENSE_PATH = os.path.join(FW_FOLDER, 'LICENSE') RELEASE_NOTES_PATH = os.path.join(TVB_ROOT, 'tvb_documentation', 'RELEASE_NOTES') -DATA_SRC_FOLDER = TVBZenodoDataset().extract_dir +dataset = TVBZenodoDataset() +DATA_SRC_FOLDER = dataset.extract_dir / 'tvb_data' DEMOS_MATLAB_FOLDER = os.path.join(TVB_ROOT, 'tvb_documentation', 'matlab') # dest paths @@ -114,6 +115,10 @@ "mouse/allen_2mm/RegionVolumeMapping.h5", ] +def fetch_data_to_include(filenames_list, dataset): + for i in filenames_list: + dataset.fetch_data("tvb_data/"+i) + def _copy_dataset(dataset_files, dataset_destination): for pth in dataset_files: @@ -230,6 +235,7 @@ def build_step1(): shutil.copytree(DEMOS_MATLAB_FOLDER, os.path.join(DIST_FOLDER, 'matlab'), ignore=shutil.ignore_patterns('.svn', '*.rst')) + fetch_data_to_include(INCLUDED_INSIDE_DATA, dataset) copy_distribution_dataset() _copy_demos_collapsed({os.path.join("..", "tvb_documentation", "demos"): os.path.join(DIST_FOLDER, "demo_scripts"), diff --git a/tvb_documentation/demos/simulate_for_mouse.ipynb b/tvb_documentation/demos/simulate_for_mouse.ipynb index 0f8fea4ebd..f2bc4141fb 100644 --- a/tvb_documentation/demos/simulate_for_mouse.ipynb +++ b/tvb_documentation/demos/simulate_for_mouse.ipynb @@ -63,30 +63,11 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "file tvb_data.zip is downloaded at C:\\Users\\Abhijit_asus\\TVB\\DATASETS\\.cache\\TVB_Data\\c042692ba786b0ecebdffd58e9efac21-tvb_data.zip\n" - ] - }, - { - "ename": "NameError", - "evalue": "name 'import_conn_h5' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[1], line 5\u001b[0m\n\u001b[0;32m 2\u001b[0m dataset \u001b[39m=\u001b[39m TVBZenodoDataset()\n\u001b[0;32m 4\u001b[0m connectivity_path \u001b[39m=\u001b[39m dataset\u001b[39m.\u001b[39mfetch_data(\u001b[39m\"\u001b[39m\u001b[39mConnectivity.h5\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[1;32m----> 5\u001b[0m import_op \u001b[39m=\u001b[39m import_conn_h5(\u001b[39m1\u001b[39m, connectivity_path)\n\u001b[0;32m 6\u001b[0m import_op \u001b[39m=\u001b[39m wait_to_finish(import_op)\n\u001b[0;32m 7\u001b[0m import_op\n", - "\u001b[1;31mNameError\u001b[0m: name 'import_conn_h5' is not defined" - ] - } - ], + "outputs": [], "source": [ "from tvb.datasets import TVBZenodoDataset\n", "dataset = TVBZenodoDataset()\n", @@ -496,13 +477,6 @@ "\n", "[7] Newman, Mark EJ. \"The mathematics of networks.\" The new palgrave encyclopedia of economics 2, no. 2008 (2008): 1-12." ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py index e2b6ff98e0..4d30e57789 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py @@ -59,7 +59,6 @@ def test_happy_flow_import(self): """ Test that importing a CFF generates at least one DataType in DB. """ - #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') dt_count_before = TestFactory.get_entity_count(self.test_project, ConnectivityIndex) TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John", False) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index df01d6c153..8de0c37155 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -33,6 +33,7 @@ from tvb.basic.logger.builder import get_logger from tvb.basic.profile import TvbProfile from pathlib import Path +from zipfile import ZipFile class BaseDataset: @@ -53,6 +54,7 @@ def download(self): def fetch_data(self, file_name): if Path(file_name).is_absolute(): + self.log.warning("Given file name is an absolute path. No operations are done. The path is returned as it is") return file_name return self._fetch_data(file_name) @@ -60,5 +62,21 @@ def fetch_data(self, file_name): def _fetch_data(self, file_name): pass + def read_zipfile_structure(self, file_path): + """ + Reads the zipfile structure and returns the dictionary containing file_names as keys and list of relative paths having same file name. + """ + with ZipFile(file_path) as zf: + file_names_in_zip = zf.namelist() + zf.close() + + file_names_dict = {} + for i in file_names_in_zip: + if str(Path(i).name) not in file_names_dict.keys(): + file_names_dict[str(Path(i).name)] = [i] + else: + file_names_dict[str(Path(i).name)].append(i) + return file_names_dict + def get_version(self): return self.version diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 5bf9c86ba3..11d9120d06 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -52,11 +52,16 @@ def __init__(self, version= "2.7", extract_dir = None): version: str - Version number of the dataset, Default value is 2.7 + + extract_dir: str + - path where you want to extract the archive. + - If `extract_dir` is None, Dataset is downloaded at location according to your profile settings. """ super().__init__(version, extract_dir) self.cached_dir = self.extract_dir / ".cache" self.cached_file = self.cached_dir / "tvb_cached_responses.txt" + self.files_in_zip_dict = None if not self.cached_dir.is_dir(): self.cached_dir.mkdir(parents=True) @@ -68,72 +73,103 @@ def __init__(self, version= "2.7", extract_dir = None): self.log.warning(f"Failed to read data from cached response.") self.recid = Zenodo().get_versions_info(self.CONCEPTID)[version] self.update_cached_response() - - - #TODO add logging errors method by catching the exact exceptions. + self.rec = Record(self.read_cached_response()[self.version]) - def download(self, path=None): + def download(self, path=None, fname=None): """ Downloads the dataset to `path` + parameters + ----------- + path: + - path where you want to download the Dataset. + - If `path` is None, Dataset is downloaded at location according to your profile settings. + fname: + - The name that will be used to save the file. Should NOT include the full the path, just the file name (it will be appended to path). + - If fname is None, file will be saved with a unique name that contains hash of the file and the last part of the url from where the file would be fetched. """ - self.rec.download(path) + if path == None: + path = self.cached_dir + self.rec.download(path, fname) def _fetch_data(self, file_name): """ - Fetches the data + Function to fetch the file having `file_name` as name of the file. The function checks if the dataset is downloaded or not. If not, function downloads the dataset and then extracts/unzip the file. parameters: ----------- file_name: str - - Name of the file from the downloaded zip file to fetch. - extract_dir: str - - Path where you want to extract the archive. If Path is None, dataset is extracted according to the tvb profile configuration - - - returns: Pathlib.Path - path of the file which was extracted + - Name of the file from the downloaded zip file to fetch. Also accepts relative path of the file with respect to tvb_data.zip. This is useful when having multiple files with same name. + + returns: str + path of the extracted/Unzipped file. """ - # TODO: extract dir needs better description. - extract_dir = self.extract_dir - download_dir = self.cached_dir / "TVB_Data" try: file_path = self.rec.file_loc['tvb_data.zip'] except: - self.download(path = download_dir) + self.download(path = self.cached_dir, fname=f"tvb_data_{self.version}.zip") file_path = self.rec.file_loc['tvb_data.zip'] - with ZipFile(file_path) as zf: - file_names_in_zip = zf.namelist() - zf.close() + if self.files_in_zip_dict == None: + self.files_in_zip_dict = self.read_zipfile_structure(file_path=file_path) file_name = file_name.strip() - file_names_in_zip = {str(Path(i).name): i for i in file_names_in_zip} - if extract_dir==None: - ZipFile(file_path).extract(file_names_in_zip[file_name]) + if file_name.startswith("tvb_data"): + if file_name in self.files_in_zip_dict[str(Path(file_name).name)] : + ZipFile(file_path).extract(file_name, path=extract_dir) + + if extract_dir.is_absolute(): + return str(extract_dir / file_name) + return str(Path.cwd()/ extract_dir / file_name) + else: + self.log.error("file_name not found, please mention correct relative file path") + + elif len(self.files_in_zip_dict[file_name]) == 1: + ZipFile(file_path).extract(self.files_in_zip_dict[file_name][0], path=extract_dir) + + if extract_dir.is_absolute(): + return str(extract_dir / self.files_in_zip_dict[file_name][0]) + return str(Path.cwd()/ extract_dir / self.files_in_zip_dict[file_name][0]) + + + elif len(self.files_in_zip_dict[file_name]) > 1: - ZipFile(file_path).extract(file_names_in_zip[file_name], path = extract_dir) + self.log.error(f"""There are more than 1 files with same names in the zip file. Please mention relative path of the file with respect to the tvb_data.zip. + file_name should be one of the following paths: {self.files_in_zip_dict[file_name]}""") + raise NameError(f"file name should be one of the {self.files_in_zip_dict[file_name]}, but got {file_name}") - if extract_dir.is_absolute(): - return str(extract_dir / file_names_in_zip[file_name]) + def fetch_all_data(self): + if self.files_in_zip_dict == None: + self.download(path = self.cached_dir, fname=f"tvb_data_{self.version}.zip") + self.files_in_zip_dict = self.read_zipfile_structure(self.rec.file_loc['tvb_data.zip']) + + + for file_paths in self.files_in_zip_dict.values(): + for file_path in file_paths: + self.fetch_data(file_path) + + return str(self.extract_dir / 'tvb_data') - return str(Path.cwd()/ extract_dir / file_names_in_zip[file_name]) def delete_data(self): + """ + Deletes the `tvb_data` folder in the `self.extract_dir` directory. + """ _dir = self.extract_dir / "tvb_data" shutil.rmtree(_dir) + self.log.info(f"deleting {self.extract_dir/'tvb_data'} directory.") def update_cached_response(self): """ - gets responses from zenodo server and saves them to cache file. + Gets responses from zenodo server and saves them to a cache file. """ file_dir = self.cached_file @@ -158,33 +194,32 @@ def update_cached_response(self): def read_cached_response(self): """ - reads responses from the cache file. - + Reads responses from the cache file. """ file_dir = self.cached_file - with open(file_dir) as fp: responses = json.load(fp) - fp.close() - responses = dict(responses) return responses def describe(self): + """ + Returns the project description mentioned on the zenodo website. + """ return self.rec.describe() - def get_record(self): + def get_recordid(self): + """ + returns record id of the dataset + """ return self.recid def __eq__(self, other): - if isinstace(other, TVBZenodoDataset): - return self.rec == tvb_data.rec + if isinstance(other, TVBZenodoDataset): + return self.rec == other.rec return False - - - diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 04232e26e7..055381b206 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -53,7 +53,13 @@ def __init__(self, data, base_url: str = BASE_URL) -> None: - def download(self, path: str = None) -> None: + def download(self, path: str = None, fname=None) -> None: + """ + Download the files entity from the json response at `path`. If the `path` is None, the data is downloaded at os caches. + + For more info about os cache, have a look at https://www.fatiando.org/pooch/latest/api/generated/pooch.os_cache.html. + In our use case, the is `tvb`. + """ if 'files' not in self.data: raise AttributeError("No files to download! Please check if the record id entered is correct! or the data is publically accessible") @@ -61,13 +67,16 @@ def download(self, path: str = None) -> None: if path == None: path = pooch.os_cache("tvb") + + #convert pathlib.Path objects to strings. + path = str(path) for file in self.data["files"]: url = file['links']['self'] known_hash = file['checksum'] file_name = file['key'] - file_path = pooch.retrieve(url= url, known_hash= known_hash, path = path,progressbar = True) + file_path = pooch.retrieve(url= url, known_hash= known_hash, path = path, fname=fname ,progressbar = True) self.file_loc[f'{file_name}'] = file_path @@ -85,7 +94,7 @@ def get_record_id(self) -> str: return self.data['conceptrecid'] def is_open_access(self) -> str: - return self.data['metadata']['access_right'] != "closed" + return self.data['metadata']['access_right'] == "open" def __eq__(self, record_b) -> bool: return (self.data == record_b.data) diff --git a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py index e275410f81..b9c233130c 100644 --- a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py @@ -32,34 +32,131 @@ from tvb.datasets import TVBZenodoDataset from pathlib import Path from tvb.tests.library.base_testcase import BaseTestCase +import zipfile +import pytest class Test_TVBZenodoDataset(BaseTestCase): def test_extract(self): - tvb_data = TVBZenodoDataset() - connectivity66_dir = Path(tvb_data.fetch_data("connectivity_66.zip")) + dataset = TVBZenodoDataset() + connectivity66_dir = Path(dataset.fetch_data("connectivity_66.zip")) + + assert str(connectivity66_dir).endswith(".zip") assert connectivity66_dir.is_file() - tvb_data.delete_data() + dataset.delete_data() assert not connectivity66_dir.is_file() - tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="tvb_data") - connectivity66_dir = Path(tvb_data.fetch_data("connectivity_66.zip")) - assert connectivity66_dir.is_file() - tvb_data.delete_data() + dataset = TVBZenodoDataset(version="2.0.3", extract_dir="dataset") + connectivity66_dir = Path(dataset.fetch_data("connectivity_66.zip")) + + assert str(connectivity66_dir).endswith(".zip") + assert "dataset" in str(connectivity66_dir) + assert (Path.cwd()/"dataset").is_dir() + assert (Path.cwd()/"dataset"/"tvb_data").is_dir() + dataset.delete_data() assert not connectivity66_dir.is_file() - tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="~/tvb_data") - matfile_dir = Path(tvb_data.fetch_data("local_connectivity_80k.mat")) + dataset = TVBZenodoDataset(version="2.0.3", extract_dir="~/dataset") + matfile_dir = Path(dataset.fetch_data("local_connectivity_80k.mat")) + + assert str(matfile_dir).endswith(".mat") assert matfile_dir.is_file() - tvb_data.delete_data() + dataset.delete_data() assert not matfile_dir.is_file() - all_extract = Path(TVBZenodoDataset(version = "2.0.3", extract_dir="~/tvb_data").fetch_data(" ConnectivityTable_regions.xls")) - assert all_extract.is_file() - tvb_data.delete_data() - assert not all_extract.is_file() + + excel_extract = Path(dataset.fetch_data(" ConnectivityTable_regions.xls")) + assert excel_extract.is_file() + dataset.delete_data() + assert not excel_extract.is_file() + + + + all_extract =Path(dataset.fetch_all_data()) + assert all_extract.is_dir() + assert all_extract + + dataset.delete_data() + + def test_check_content(self): + + #check if connectivity_66 contains expected files. + dataset = TVBZenodoDataset() + connectivity66_dir = Path(dataset.fetch_data("connectivity_66.zip")) + + assert "centres.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "info.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "weights.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + + + dataset = TVBZenodoDataset(version= "2.0.3", extract_dir="~/dataset") + connectivity66_dir = Path(dataset.fetch_data("connectivity_66.zip")) + assert "centres.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "info.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "weights.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + + dataset = TVBZenodoDataset(version="2.0.3", extract_dir="~/dataset") + extract_dir = dataset.fetch_all_data() + assert (extract_dir/ "tvb_data" /"mouse"/"allen_2mm"/"Connectivity.h5").is_file() + assert (extract_dir/ "tvb_data" /"surfaceData"/"inner_skull_4096.zip").is_file() + + + + + def test_file_name_variants(self): + dataset = TVBZenodoDataset(version= "2.0.3", extract_dir="~/dataset") + connectivity66_dir_1 = Path(dataset.fetch_data("connectivity_66.zip")) + connectivity66_dir_2 = Path(dataset.fetch_data('tvb_data/connectivity/connectivity_66.zip')) + assert connectivity66_dir_1 == connectivity66_dir_2 + + dataset.delete_data() + + dataset = TVBZenodoDataset() + connectivity66_dir_1 = Path(dataset.fetch_data("connectivity_66.zip")) + connectivity66_dir_2 = Path(dataset.fetch_data('tvb_data/connectivity/connectivity_66.zip')) + assert connectivity66_dir_1 == connectivity66_dir_2 + + dataset.delete_data() + + + dataset = TVBZenodoDataset(version= "2.0.3", extract_dir="dataset") + connectivity66_dir_1 = Path(dataset.fetch_data("connectivity_66.zip")) + connectivity66_dir_2 = Path(dataset.fetch_data('tvb_data/connectivity/connectivity_66.zip')) + assert connectivity66_dir_1 == connectivity66_dir_2 + + dataset.delete_data() + + + # should raise error cause there are two files with name mapping_FS_84.txt + with pytest.raises(NameError): + dataset = TVBZenodoDataset() + data = dataset.fetch_data("mapping_FS_84.txt") + + # no error when relative path given + dataset = TVBZenodoDataset() + data = Path(dataset.fetch_data(" tvb_data/macaque/mapping_FS_84.txt")) + assert data.is_file() + + data = Path(dataset.fetch_data('tvb_data/nifti/volume_mapping/mapping_FS_84.txt')) + assert data.is_file() + + dataset.delete_data() + + + + + + + + + + + + #TODO add no interenet tests From 4fc469395d654b0454021d0c92713d3192267d90 Mon Sep 17 00:00:00 2001 From: Abhijit Deo <72816663+abhi-glitchhg@users.noreply.github.com> Date: Sat, 22 Jul 2023 17:49:33 +0530 Subject: [PATCH 34/84] revert back to original notebooks metadata --- tvb_documentation/demos/encrypt_data.ipynb | 4 ++-- tvb_documentation/demos/simulate_for_mouse.ipynb | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tvb_documentation/demos/encrypt_data.ipynb b/tvb_documentation/demos/encrypt_data.ipynb index 051421a3fc..2197f07bb8 100644 --- a/tvb_documentation/demos/encrypt_data.ipynb +++ b/tvb_documentation/demos/encrypt_data.ipynb @@ -28,7 +28,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "colab": {}, "colab_type": "code", @@ -200,7 +200,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.10.9" } }, "nbformat": 4, diff --git a/tvb_documentation/demos/simulate_for_mouse.ipynb b/tvb_documentation/demos/simulate_for_mouse.ipynb index f2bc4141fb..6ca7857928 100644 --- a/tvb_documentation/demos/simulate_for_mouse.ipynb +++ b/tvb_documentation/demos/simulate_for_mouse.ipynb @@ -495,7 +495,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.10.9" } }, "nbformat": 4, From 2ca6c02d795c6a4cf43c53bd9783a077cbaec5bd Mon Sep 17 00:00:00 2001 From: Abhijit Deo <72816663+abhi-glitchhg@users.noreply.github.com> Date: Sat, 22 Jul 2023 18:11:06 +0530 Subject: [PATCH 35/84] add missing `:` to the github workflow files. :) --- .github/workflows/build.yml | 4 ++-- .github/workflows/lib-tests.yml | 4 ++-- .github/workflows/notebooks.yml | 4 ++-- .github/workflows/pg-tests.yml | 4 ++-- .github/workflows/win-tests.yml | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a00a16b811..586cbf31c8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,6 +1,6 @@ name: Test Py -on: pull_request - push +on: pull_request: + push: jobs: build: diff --git a/.github/workflows/lib-tests.yml b/.github/workflows/lib-tests.yml index e24708cc15..5fac55cee2 100644 --- a/.github/workflows/lib-tests.yml +++ b/.github/workflows/lib-tests.yml @@ -1,6 +1,6 @@ name: Test lib -on: pull_request - push +on: pull_request: + push: jobs: lib-tests: diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 84ddc06b0f..041e5150d7 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -1,6 +1,6 @@ name: Test Notebooks -on: pull_request - push +on: pull_request: + push: jobs: build: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index 5c4ecafa34..2d269ac322 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -1,6 +1,6 @@ name: Test PG -on: pull_request - push +on: pull_request: + push: jobs: build: diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index d372f1b2ae..c0a707e9e3 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -1,6 +1,6 @@ name: Test Win -on: pull_request - push +on: pull_request: + push: jobs: build: From d2aa85aeacb6da51d1012077a62e1b5ab6921664 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 22 Jul 2023 18:17:36 +0530 Subject: [PATCH 36/84] fixed the silly indentation mistakes in the github actions files. --- .github/workflows/build.yml | 5 +++-- .github/workflows/lib-tests.yml | 5 +++-- .github/workflows/notebooks.yml | 5 +++-- .github/workflows/pg-tests.yml | 5 +++-- .github/workflows/win-tests.yml | 5 +++-- 5 files changed, 15 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 586cbf31c8..9ec10866ff 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,6 +1,7 @@ name: Test Py -on: pull_request: - push: +on: + pull_request: + push: jobs: build: diff --git a/.github/workflows/lib-tests.yml b/.github/workflows/lib-tests.yml index 5fac55cee2..78d1e44e4e 100644 --- a/.github/workflows/lib-tests.yml +++ b/.github/workflows/lib-tests.yml @@ -1,6 +1,7 @@ name: Test lib -on: pull_request: - push: +on: + pull_request: + push: jobs: lib-tests: diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 041e5150d7..d5590e62bf 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -1,6 +1,7 @@ name: Test Notebooks -on: pull_request: - push: +on: + pull_request: + push: jobs: build: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index 2d269ac322..b9fc618c7d 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -1,6 +1,7 @@ name: Test PG -on: pull_request: - push: +on: + pull_request: + push: jobs: build: diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index c0a707e9e3..69244e217b 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -1,6 +1,7 @@ name: Test Win -on: pull_request: - push: +on: + pull_request: + push: jobs: build: From 58ae3d08bc69cf1c80b63304bff49b6ebc406413 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 23 Jul 2023 12:50:51 +0530 Subject: [PATCH 37/84] fix the directory path in the tests --- tvb_library/tvb/datasets/tvb_data.py | 6 +++-- .../datasets/tvb_zenodo_dataset_test.py | 22 ++++++++++++++----- 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 11d9120d06..2786641dae 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -154,8 +154,10 @@ def fetch_all_data(self): for file_paths in self.files_in_zip_dict.values(): for file_path in file_paths: self.fetch_data(file_path) - - return str(self.extract_dir / 'tvb_data') + + if self.extract_dir.is_absolute(): + return str(self.extract_dir) + return str(Path.cwd()/self.extract_dir) def delete_data(self): diff --git a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py index b9c233130c..fdc511bdb2 100644 --- a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py @@ -91,6 +91,7 @@ def test_check_content(self): assert "info.txt" in zipfile.ZipFile(connectivity66_dir).namelist() assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66_dir).namelist() assert "weights.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + dataset.delete_data() dataset = TVBZenodoDataset(version= "2.0.3", extract_dir="~/dataset") @@ -99,13 +100,24 @@ def test_check_content(self): assert "info.txt" in zipfile.ZipFile(connectivity66_dir).namelist() assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66_dir).namelist() assert "weights.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + dataset.delete_data() - dataset = TVBZenodoDataset(version="2.0.3", extract_dir="~/dataset") - extract_dir = dataset.fetch_all_data() - assert (extract_dir/ "tvb_data" /"mouse"/"allen_2mm"/"Connectivity.h5").is_file() - assert (extract_dir/ "tvb_data" /"surfaceData"/"inner_skull_4096.zip").is_file() - + dataset = TVBZenodoDataset(version="2.0.3", extract_dir="dataset") + extract_dir = Path(dataset.fetch_all_data()) + assert (extract_dir/"tvb_data").is_dir() + assert (extract_dir/"tvb_data/mouse/allen_2mm/Connectivity.h5").is_file() + assert (extract_dir/"tvb_data/surfaceData/inner_skull_4096.zip").is_file() + + connectivity66 = extract_dir/"tvb_data/connectivity/connectivity_96.zip" + assert connectivity66.is_file() + + assert "centres.txt" in zipfile.ZipFile(connectivity66).namelist() + assert "info.txt" in zipfile.ZipFile(connectivity66).namelist() + assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66).namelist() + assert "weights.txt" in zipfile.ZipFile(connectivity66).namelist() + + dataset.delete_data() def test_file_name_variants(self): From f420d75d6ff04c61558b6a39049f97b52f468ccd Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Wed, 10 May 2023 19:14:46 +0530 Subject: [PATCH 38/84] Using pyzenodo3 as a client to connect with Zenodo. --- tvb_library/tvb/.utils.py.swp | Bin 0 -> 12288 bytes tvb_library/tvb/datasets/__init__.py | 2 + tvb_library/tvb/datasets/utils.py | 171 +++++++++++++++++++++++++++ tvb_library/tvb/datasets/zenodo.py | 168 ++++++++++++++++++++++++++ 4 files changed, 341 insertions(+) create mode 100644 tvb_library/tvb/.utils.py.swp create mode 100644 tvb_library/tvb/datasets/__init__.py create mode 100644 tvb_library/tvb/datasets/utils.py create mode 100644 tvb_library/tvb/datasets/zenodo.py diff --git a/tvb_library/tvb/.utils.py.swp b/tvb_library/tvb/.utils.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..77ebcc6dbcb19cb0a274d7194960a843e2e1abe6 GIT binary patch literal 12288 zcmeI2ON8`v0nXUYp%@;XvG^I{8h_R9AiV z)xW;4sy5Kr{m7A{d|zod!*dUPq|e1k#+-&H&q&>?TCvR9jeDIHo0u0*XAR4dQsuUa z+_GM1jK@)==}X<0jo691^u3&D>8nzwyShR9)+%5XxCRBb%+QKSe6|f3e1*`&A z0jq#jz$#!BunJfOtO8bnb*O;TWb6|D*#GQiIFEn-@BRIM^=`(#1|Nb;;BD|0xB#97 zv*0ke7inY~@EmA^7B~v>AP25)hOgi&@D9-61UL>3fEsuJYy!V+LVNHjcoD?lEI0v6+2RG)@!iCC zw5Ti3nW#<{TGAJFKXS)*)?81x^C}6(rV+2A#m(=!47F&-Qg@g3D5edl!}x(l6#3}J z2BSMt2|lBBEE|ay2je)33$2_C-6)QQtGVYWkGGQ0)iMhCg5xKm$b&>_-Vi(rL5pio z@Ss^OayVNllE)K!O;MdRb$h1vnzCA%DDo#^R>%d>G^XpB>8EmI#wP(4;2;S@PbS!i z9I!HwgV$>P`7!DN^~mL(-H;mO;1YV}rl|#Cp5<)A)9bK}Ua-wFyURK~vW9DR8ok!w zG}OKhr_pN-wky|Rxw5Jl2$X@%T0feNdP^6N)(-8--m;-SY-{)UEb3Bk>1`UKl%KlW5kvJmYn)nAMp6*uaye7FQMe#tZStc(1lN&i z%$tsO$WE^dPp6pnsq3gtL*jqm4aS+~m>CU{=dCE_NhnbS$ayaLVwCtz?#p>W11v`I zJYSUBLyG!cGaf8)vZ#=XXdv*~_j%7n2j3};$<4GGnaMmrs$(F*6D7koH491{E0U*S z$%r_oMcBj(sptc37Co{Wz&Io zN;XBz+aeUPbos-FXUxLx$4tX{KJIDVQPbse8&gdhB{vGn=S3Jbqh2AEN`xv`_Dxmu z<~y1MBGlve@0*;OnwY9oaIs-|vj$l(sFEknHDy~UT^Ji=HP)2}rKE#p69v~x!ud2D zsGe5CRB;({sYP7youJWl_%v^oV&OE?4s|8Zi?JeaXmNQQs+BBRY1Jqj;G!9^R@ptV7X#);V*YnPz{B zUTK=^YnHTmFEh*jp1$HN^T>rtkum~p(~^!`{DwkB?jRxY9mk#twDl6(mF2|vkGNiW z!S|cFQ=-wMZg}>ZQAd-PE&}Ps(K{t(2G=WWx&5n-*)r)aRh=}+PZur6?9YtMqIY&+ z4DSK`YsQLUlimHYcnF5~x>2=m{9Z@X3Wn68(|sTnqt~lxh2~3yDw#f{^XO*TzcwsY ziTAcjmOpglp@a3=nPZ3R$4<^3gjaJRF2DtIwV-aa7tf;PI3kz~IuXu!x_F|Vwc;q~ cpPkt|YMZ{mnNNpeR&UJq48-CrPAje0zi%ujX#fBK literal 0 HcmV?d00001 diff --git a/tvb_library/tvb/datasets/__init__.py b/tvb_library/tvb/datasets/__init__.py new file mode 100644 index 0000000000..cd42a007bc --- /dev/null +++ b/tvb_library/tvb/datasets/__init__.py @@ -0,0 +1,2 @@ +from .utils import * +from .zenodo import Record, Zenodo diff --git a/tvb_library/tvb/datasets/utils.py b/tvb_library/tvb/datasets/utils.py new file mode 100644 index 0000000000..988f2c2daa --- /dev/null +++ b/tvb_library/tvb/datasets/utils.py @@ -0,0 +1,171 @@ +import requests +from pathlib import Path +import hashlib +import urllib +from tqdm import tqdm + +""" +functions related to hashes functions + +""" + +USER_AGENT = "TVB_ROOT/TVB_LIBRARY" + +def calculate_md5(file_path:Path, chunk_size:int =1024) -> str : + """ + A function to calculate the md5 hash of a file. + + """ + m = hashlib.md5() + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + return m.hexdigest(); + + + + +def calculate_sha256(file_path:Path, chunk_size:int =1024) -> str: + """ + / A function to calculate the sha256 hash of a file + """ + s = hashlib.sha256() + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + s.update(chunk) + return s.hexdigest(); + + +def calculate_sha1(file_path:Path, chunk_size:int=1024)->str: + s = hashlib.sha1() + + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + + return s.hexdigest() + + + +def calculate_sha224(file_path:Path, chunk_size:int=1024)->str: + s = hashlib.sha224() + + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + + return s.hexdigest() + + +def calculate_sha384(file_path:Path, chunk_size:int=1024)->str: + s = hashlib.sha384() + + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + + return s.hexdigest() + +# +def calculate_sha512(file_path:Path, chunk_size:int=1024): + s = hashlib.sha512() + + with open(file_path, "rb") as f: + for chunk in iter(lambda : f.read(chunk_size), b""): + m.update(chunk) + + return s.hexdigest() +#. + +# okay there are some stuff which would consider generic SHA hash; link -https://github.com/zenodo/zenodo/issues/1985#issuecomment-796882811 + + + +AVAILABLE_HASH_FUNCTIONS = {"md5": calculate_md5, "sha1": calculate_sha1,"sha224":calculate_sha224, "sha256":calculate_sha256, "sha384":calculate_sha384, "sha512": calculate_sha512} # can extend this further + + +def convert_to_pathlib(file_path: str) ->Path: + """ + convert the file_path to Path datatype + """ + + if (type(file_path)!= Path): + return Path(file_path) + return file_path + + + +#should we keep a way to download a file without having to check the checksum? + +def check_integrity(file_loc, checksum:str, hash_function="md5")->bool: + """ + This function checks if the file at `file_loc` has same checksum. + """ + + if hash_function not in AVAILABLE_HASH_FUNCTIONS.keys(): + raise AttributeError(f"incorrect hash function value, must be one of the md5, sha1,sha224,sha256, sha384, sha512, received {hash_functio}") + + if hash_function== "md5": + return calculate_md5(file_loc)==checksum + + if hash_function == "sha1": + return calculate_sha1(file_loc) == checksum + + if hash_function == "sha224": + return calculate_sha224(file_loc) == checksum + + if hash_function == "sha256": + return calculate_sha256(file_loc) == checksum + + if hash_function == "sha384": + return calculate_sha384(file_loc) == checksum + + if hash_function == "sha512": + return calculate_sha512(file_loc) == checksum + + + + +def download_file(url, checksum, hash_function, root): + if hash_function not in AVAILABLE_HASH_FUNCTIONS.keys(): + raise AttributeError(f"incorrect hash function value, must be one of the md5, sha1,sha224,sha256, sha384, sha512, received {hash_functio}") + + root = Path(root) + + if (not root.is_dir()): + root.mkdir(parents=True) + + file_name = url.split("/")[-1] + file_loc = root/file_name + + if (file_loc.is_file() and check_integrity(file_loc, checksum, hash_function)): + print(f"File {file_name} already downloaded at location {file_loc}") + return + + _urlretrieve(url, file_loc) + + #ToDO : what to do when the hash of the downloaded file doesnt match with the online value? discard the file ? warning the user? both? + + print(f"file {file_loc} downloaded successfully") + + + +# following functions are inspired from the torchvision. +def _save_response_content( + content, + destination, + length= None, +) : + with open(destination, "wb") as fh, tqdm(total=length) as pbar: + for chunk in content: + # filter out keep-alive new chunks + if not chunk: + continue + + fh.write(chunk) + pbar.update(len(chunk)) + + +def _urlretrieve(url, file_loc, chunk_size = 1024 * 32): + with urllib.request.urlopen(urllib.request.Request(url, headers={"User-Agent": USER_AGENT})) as response: + _save_response_content(iter(lambda: response.read(chunk_size), b""), file_loc, length=response.length) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py new file mode 100644 index 0000000000..2f8dabc0cb --- /dev/null +++ b/tvb_library/tvb/datasets/zenodo.py @@ -0,0 +1,168 @@ +# code from https://github.com/space-physics/pyzenodo3 and https://github.com/space-physics/pyzenodo3/pull/9 +# code is copied here because the repo is inactive and author is not responding; hence no maintainance guarantee. + +import requests +import re +from bs4 import BeautifulSoup +from bs4.element import Tag +from urllib.parse import urlencode +from pathlib import Path +from .utils import download_file + +BASE_URL = "https://zenodo.org/api/" + + +class Record: + def __init__(self, data, zenodo, base_url: str = BASE_URL) -> None: + self.base_url = base_url + self.data = data + self._zenodo = zenodo + + def _row_to_version(self, row: Tag) -> dict[str, str]: + link = row.select("a")[0] + linkrec = row.select("a")[0].attrs["href"] + if not linkrec: + raise KeyError("record not found in parsed HTML") + + texts = row.select("small") + recmatch = re.match(r"/record/(\d*)", linkrec) + if not recmatch: + raise LookupError("record match not found in parsed HTML") + + recid = recmatch.group(1) + + return { + "recid": recid, + "name": link.text, + "doi": texts[0].text, + "date": texts[1].text, + "original_version": self._zenodo.get_record(recid).original_version(), + } + + def get_versions(self) -> list: + url = f"{self.base_url}srecords?all_versions=1&size=100&q=conceptrecid:{self.data['conceptrecid']}" + + data = requests.get(url).json() + + return [Record(hit, self._zenodo) for hit in data["hits"]["hits"]] + + def get_versions_from_webpage(self) -> list[dict]: + """Get version details from Zenodo webpage (it is not available in the REST api)""" + res = requests.get("https://zenodo.org/record/" + self.data["conceptrecid"]) + soup = BeautifulSoup(res.text, "html.parser") + version_rows = soup.select(".well.metadata > table.table tr") + if len(version_rows) == 0: # when only 1 version + return [ + { + "recid": self.data["id"], + "name": "1", + "doi": self.data["doi"], + "date": self.data["created"], + "original_version": self.original_version(), + } + ] + return [self._row_to_version(row) for row in version_rows if len(row.select("td")) > 1] + + def original_version(self): #TODO: check the implementation once again. + for identifier in self.data["metadata"]["related_identifiers"]: + if identifier["relation"] == "isSupplementTo": + return re.match(r".*/tree/(.*$)", identifier["identifier"]).group(1) + return None + + def __str__(self): + return str(self.data) # TODO: pretty print? Format the json to more readable version. + + def download(self, root="./"): + _root = Path(root) + print(self.data) + if 'files' not in self.data: + raise AttributeError("No files to download! Please check if the id entered is correct!") + + + for file in self.data['files']: + url = file['links']['self'] + hash_function, checksum = file['checksum'].split(":") + # now we will download the files to the root. + file_name_zenodo = file['key'] + + if file_name_zenodo in list(_root.iterdir()) and check_integrity(_root+file_name_zenodo,checksum, hash_function): + print(f"{file_name_zenodo} already exists at {root} having same checksum. Hence skipping the download!") + continue # the file already exists at the given location and checksum also matches! + + + download_file(root= root,url= url, checksum = checksum, hash_function=hash_function) + + + +class Zenodo: + def __init__(self, api_key: str = "", base_url: str = BASE_URL) -> None: + self.base_url = base_url + self._api_key = api_key + self.re_github_repo = re.compile(r".*github.com/(.*?/.*?)[/$]") + + def search(self, search: str) -> list[Record]: + """search Zenodo record for string `search` + + :param search: string to search + :return: Record[] results + """ + search = search.replace("/", " ") # zenodo can't handle '/' in search query + params = {"q": search} + + recs = self._get_records(params) + + if not recs: + raise LookupError(f"No records found for search {search}") + + return recs + + def _extract_github_repo(self, identifier): + matches = self.re_github_repo.match(identifier) + + if matches: + return matches.group(1) + + raise LookupError(f"No records found with {identifier}") + + def find_record_by_github_repo(self, search: str): + records = self.search(search) + for record in records: + if ( + "metadata" not in record.data + or "related_identifiers" not in record.data["metadata"] + ): + continue + + for identifier in [ + identifier["identifier"] + for identifier in record.data["metadata"]["related_identifiers"] + ]: + repo = self._extract_github_repo(identifier) + + if repo and repo.upper() == search.upper(): + return record + + raise LookupError(f"No records found in {search}") + + def find_record_by_doi(self, doi: str): + params = {"q": f"conceptdoi:{doi.replace('/', '*')}"} + records = self._get_records(params) + + if len(records) > 0: + return records[0] + else: + params = {"q": "doi:%s" % doi.replace("/", "*")} + return self._get_records(params)[0] + + def get_record(self, recid: str) -> Record: + + url = self.base_url + "records/" + recid + + return Record(requests.get(url).json(), self) + + #TODO: can also add get record by user? Will that be useful by any means? + + def _get_records(self, params: dict[str, str]) -> list[Record]: + url = self.base_url + "records?" + urlencode(params) + + return [Record(hit, self) for hit in requests.get(url).json()["hits"]["hits"]] From e7c0eb1ad8a55bdcceb38f2668790708c1f24566 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 12 May 2023 19:47:34 +0530 Subject: [PATCH 39/84] remove the swp file from git, add .swp extension in gitignore --- .gitignore | 3 +++ tvb_library/tvb/.utils.py.swp | Bin 12288 -> 0 bytes 2 files changed, 3 insertions(+) delete mode 100644 tvb_library/tvb/.utils.py.swp diff --git a/.gitignore b/.gitignore index 330ded9df8..8a03e8377c 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,9 @@ *.py[cod] __pycache__ +#vim binaries +*.swp + # packaging *.egg-info/ dist/ diff --git a/tvb_library/tvb/.utils.py.swp b/tvb_library/tvb/.utils.py.swp deleted file mode 100644 index 77ebcc6dbcb19cb0a274d7194960a843e2e1abe6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeI2ON8`v0nXUYp%@;XvG^I{8h_R9AiV z)xW;4sy5Kr{m7A{d|zod!*dUPq|e1k#+-&H&q&>?TCvR9jeDIHo0u0*XAR4dQsuUa z+_GM1jK@)==}X<0jo691^u3&D>8nzwyShR9)+%5XxCRBb%+QKSe6|f3e1*`&A z0jq#jz$#!BunJfOtO8bnb*O;TWb6|D*#GQiIFEn-@BRIM^=`(#1|Nb;;BD|0xB#97 zv*0ke7inY~@EmA^7B~v>AP25)hOgi&@D9-61UL>3fEsuJYy!V+LVNHjcoD?lEI0v6+2RG)@!iCC zw5Ti3nW#<{TGAJFKXS)*)?81x^C}6(rV+2A#m(=!47F&-Qg@g3D5edl!}x(l6#3}J z2BSMt2|lBBEE|ay2je)33$2_C-6)QQtGVYWkGGQ0)iMhCg5xKm$b&>_-Vi(rL5pio z@Ss^OayVNllE)K!O;MdRb$h1vnzCA%DDo#^R>%d>G^XpB>8EmI#wP(4;2;S@PbS!i z9I!HwgV$>P`7!DN^~mL(-H;mO;1YV}rl|#Cp5<)A)9bK}Ua-wFyURK~vW9DR8ok!w zG}OKhr_pN-wky|Rxw5Jl2$X@%T0feNdP^6N)(-8--m;-SY-{)UEb3Bk>1`UKl%KlW5kvJmYn)nAMp6*uaye7FQMe#tZStc(1lN&i z%$tsO$WE^dPp6pnsq3gtL*jqm4aS+~m>CU{=dCE_NhnbS$ayaLVwCtz?#p>W11v`I zJYSUBLyG!cGaf8)vZ#=XXdv*~_j%7n2j3};$<4GGnaMmrs$(F*6D7koH491{E0U*S z$%r_oMcBj(sptc37Co{Wz&Io zN;XBz+aeUPbos-FXUxLx$4tX{KJIDVQPbse8&gdhB{vGn=S3Jbqh2AEN`xv`_Dxmu z<~y1MBGlve@0*;OnwY9oaIs-|vj$l(sFEknHDy~UT^Ji=HP)2}rKE#p69v~x!ud2D zsGe5CRB;({sYP7youJWl_%v^oV&OE?4s|8Zi?JeaXmNQQs+BBRY1Jqj;G!9^R@ptV7X#);V*YnPz{B zUTK=^YnHTmFEh*jp1$HN^T>rtkum~p(~^!`{DwkB?jRxY9mk#twDl6(mF2|vkGNiW z!S|cFQ=-wMZg}>ZQAd-PE&}Ps(K{t(2G=WWx&5n-*)r)aRh=}+PZur6?9YtMqIY&+ z4DSK`YsQLUlimHYcnF5~x>2=m{9Z@X3Wn68(|sTnqt~lxh2~3yDw#f{^XO*TzcwsY ziTAcjmOpglp@a3=nPZ3R$4<^3gjaJRF2DtIwV-aa7tf;PI3kz~IuXu!x_F|Vwc;q~ cpPkt|YMZ{mnNNpeR&UJq48-CrPAje0zi%ujX#fBK From 594be2afc90a69269476a8eb395b4eb85a2a278c Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Tue, 6 Jun 2023 22:52:30 +0530 Subject: [PATCH 40/84] remove the search related methods from zenodo class, currently we will just focus on the downloading functionality. If required searching functionalities can be added --- tvb_library/tvb/datasets/__init__.py | 1 - tvb_library/tvb/datasets/utils.py | 171 --------------------------- tvb_library/tvb/datasets/zenodo.py | 149 +++++------------------ 3 files changed, 28 insertions(+), 293 deletions(-) delete mode 100644 tvb_library/tvb/datasets/utils.py diff --git a/tvb_library/tvb/datasets/__init__.py b/tvb_library/tvb/datasets/__init__.py index cd42a007bc..ec00e5f91b 100644 --- a/tvb_library/tvb/datasets/__init__.py +++ b/tvb_library/tvb/datasets/__init__.py @@ -1,2 +1 @@ -from .utils import * from .zenodo import Record, Zenodo diff --git a/tvb_library/tvb/datasets/utils.py b/tvb_library/tvb/datasets/utils.py deleted file mode 100644 index 988f2c2daa..0000000000 --- a/tvb_library/tvb/datasets/utils.py +++ /dev/null @@ -1,171 +0,0 @@ -import requests -from pathlib import Path -import hashlib -import urllib -from tqdm import tqdm - -""" -functions related to hashes functions - -""" - -USER_AGENT = "TVB_ROOT/TVB_LIBRARY" - -def calculate_md5(file_path:Path, chunk_size:int =1024) -> str : - """ - A function to calculate the md5 hash of a file. - - """ - m = hashlib.md5() - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - return m.hexdigest(); - - - - -def calculate_sha256(file_path:Path, chunk_size:int =1024) -> str: - """ - / A function to calculate the sha256 hash of a file - """ - s = hashlib.sha256() - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - s.update(chunk) - return s.hexdigest(); - - -def calculate_sha1(file_path:Path, chunk_size:int=1024)->str: - s = hashlib.sha1() - - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - - return s.hexdigest() - - - -def calculate_sha224(file_path:Path, chunk_size:int=1024)->str: - s = hashlib.sha224() - - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - - return s.hexdigest() - - -def calculate_sha384(file_path:Path, chunk_size:int=1024)->str: - s = hashlib.sha384() - - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - - return s.hexdigest() - -# -def calculate_sha512(file_path:Path, chunk_size:int=1024): - s = hashlib.sha512() - - with open(file_path, "rb") as f: - for chunk in iter(lambda : f.read(chunk_size), b""): - m.update(chunk) - - return s.hexdigest() -#. - -# okay there are some stuff which would consider generic SHA hash; link -https://github.com/zenodo/zenodo/issues/1985#issuecomment-796882811 - - - -AVAILABLE_HASH_FUNCTIONS = {"md5": calculate_md5, "sha1": calculate_sha1,"sha224":calculate_sha224, "sha256":calculate_sha256, "sha384":calculate_sha384, "sha512": calculate_sha512} # can extend this further - - -def convert_to_pathlib(file_path: str) ->Path: - """ - convert the file_path to Path datatype - """ - - if (type(file_path)!= Path): - return Path(file_path) - return file_path - - - -#should we keep a way to download a file without having to check the checksum? - -def check_integrity(file_loc, checksum:str, hash_function="md5")->bool: - """ - This function checks if the file at `file_loc` has same checksum. - """ - - if hash_function not in AVAILABLE_HASH_FUNCTIONS.keys(): - raise AttributeError(f"incorrect hash function value, must be one of the md5, sha1,sha224,sha256, sha384, sha512, received {hash_functio}") - - if hash_function== "md5": - return calculate_md5(file_loc)==checksum - - if hash_function == "sha1": - return calculate_sha1(file_loc) == checksum - - if hash_function == "sha224": - return calculate_sha224(file_loc) == checksum - - if hash_function == "sha256": - return calculate_sha256(file_loc) == checksum - - if hash_function == "sha384": - return calculate_sha384(file_loc) == checksum - - if hash_function == "sha512": - return calculate_sha512(file_loc) == checksum - - - - -def download_file(url, checksum, hash_function, root): - if hash_function not in AVAILABLE_HASH_FUNCTIONS.keys(): - raise AttributeError(f"incorrect hash function value, must be one of the md5, sha1,sha224,sha256, sha384, sha512, received {hash_functio}") - - root = Path(root) - - if (not root.is_dir()): - root.mkdir(parents=True) - - file_name = url.split("/")[-1] - file_loc = root/file_name - - if (file_loc.is_file() and check_integrity(file_loc, checksum, hash_function)): - print(f"File {file_name} already downloaded at location {file_loc}") - return - - _urlretrieve(url, file_loc) - - #ToDO : what to do when the hash of the downloaded file doesnt match with the online value? discard the file ? warning the user? both? - - print(f"file {file_loc} downloaded successfully") - - - -# following functions are inspired from the torchvision. -def _save_response_content( - content, - destination, - length= None, -) : - with open(destination, "wb") as fh, tqdm(total=length) as pbar: - for chunk in content: - # filter out keep-alive new chunks - if not chunk: - continue - - fh.write(chunk) - pbar.update(len(chunk)) - - -def _urlretrieve(url, file_loc, chunk_size = 1024 * 32): - with urllib.request.urlopen(urllib.request.Request(url, headers={"User-Agent": USER_AGENT})) as response: - _save_response_content(iter(lambda: response.read(chunk_size), b""), file_loc, length=response.length) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 2f8dabc0cb..37787e18d0 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -1,14 +1,9 @@ # code from https://github.com/space-physics/pyzenodo3 and https://github.com/space-physics/pyzenodo3/pull/9 # code is copied here because the repo is inactive and author is not responding; hence no maintainance guarantee. - import requests import re -from bs4 import BeautifulSoup -from bs4.element import Tag -from urllib.parse import urlencode +import pooch from pathlib import Path -from .utils import download_file - BASE_URL = "https://zenodo.org/api/" @@ -18,151 +13,63 @@ def __init__(self, data, zenodo, base_url: str = BASE_URL) -> None: self.data = data self._zenodo = zenodo - def _row_to_version(self, row: Tag) -> dict[str, str]: - link = row.select("a")[0] - linkrec = row.select("a")[0].attrs["href"] - if not linkrec: - raise KeyError("record not found in parsed HTML") - - texts = row.select("small") - recmatch = re.match(r"/record/(\d*)", linkrec) - if not recmatch: - raise LookupError("record match not found in parsed HTML") - - recid = recmatch.group(1) - - return { - "recid": recid, - "name": link.text, - "doi": texts[0].text, - "date": texts[1].text, - "original_version": self._zenodo.get_record(recid).original_version(), - } - - def get_versions(self) -> list: - url = f"{self.base_url}srecords?all_versions=1&size=100&q=conceptrecid:{self.data['conceptrecid']}" - - data = requests.get(url).json() - - return [Record(hit, self._zenodo) for hit in data["hits"]["hits"]] - - def get_versions_from_webpage(self) -> list[dict]: - """Get version details from Zenodo webpage (it is not available in the REST api)""" - res = requests.get("https://zenodo.org/record/" + self.data["conceptrecid"]) - soup = BeautifulSoup(res.text, "html.parser") - version_rows = soup.select(".well.metadata > table.table tr") - if len(version_rows) == 0: # when only 1 version - return [ - { - "recid": self.data["id"], - "name": "1", - "doi": self.data["doi"], - "date": self.data["created"], - "original_version": self.original_version(), - } - ] - return [self._row_to_version(row) for row in version_rows if len(row.select("td")) > 1] - - def original_version(self): #TODO: check the implementation once again. - for identifier in self.data["metadata"]["related_identifiers"]: - if identifier["relation"] == "isSupplementTo": - return re.match(r".*/tree/(.*$)", identifier["identifier"]).group(1) - return None + + def describe(self): + + return self.data['metadata']['description'] + def __str__(self): return str(self.data) # TODO: pretty print? Format the json to more readable version. def download(self, root="./"): _root = Path(root) - print(self.data) + #print(self.data) if 'files' not in self.data: raise AttributeError("No files to download! Please check if the id entered is correct!") - for file in self.data['files']: + + for file in self.data["files"]: url = file['links']['self'] - hash_function, checksum = file['checksum'].split(":") - # now we will download the files to the root. - file_name_zenodo = file['key'] + known_hash = file['checksum'] + file_name = file['key'] - if file_name_zenodo in list(_root.iterdir()) and check_integrity(_root+file_name_zenodo,checksum, hash_function): - print(f"{file_name_zenodo} already exists at {root} having same checksum. Hence skipping the download!") - continue # the file already exists at the given location and checksum also matches! + pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) - download_file(root= root,url= url, checksum = checksum, hash_function=hash_function) - - - -class Zenodo: - def __init__(self, api_key: str = "", base_url: str = BASE_URL) -> None: - self.base_url = base_url - self._api_key = api_key - self.re_github_repo = re.compile(r".*github.com/(.*?/.*?)[/$]") - - def search(self, search: str) -> list[Record]: - """search Zenodo record for string `search` - - :param search: string to search - :return: Record[] results - """ - search = search.replace("/", " ") # zenodo can't handle '/' in search query - params = {"q": search} - - recs = self._get_records(params) - - if not recs: - raise LookupError(f"No records found for search {search}") - - return recs - def _extract_github_repo(self, identifier): - matches = self.re_github_repo.match(identifier) - if matches: - return matches.group(1) - raise LookupError(f"No records found with {identifier}") - def find_record_by_github_repo(self, search: str): - records = self.search(search) - for record in records: - if ( - "metadata" not in record.data - or "related_identifiers" not in record.data["metadata"] - ): - continue - for identifier in [ - identifier["identifier"] - for identifier in record.data["metadata"]["related_identifiers"] - ]: - repo = self._extract_github_repo(identifier) - if repo and repo.upper() == search.upper(): - return record - raise LookupError(f"No records found in {search}") - def find_record_by_doi(self, doi: str): - params = {"q": f"conceptdoi:{doi.replace('/', '*')}"} - records = self._get_records(params) +class Zenodo: + def __init__(self, api_key: str = "", base_url: str = BASE_URL) -> None: + """ + This class handles all the interactions of the user to the zenodo platform. - if len(records) > 0: - return records[0] - else: - params = {"q": "doi:%s" % doi.replace("/", "*")} - return self._get_records(params)[0] + + """ + self.base_url = base_url + self._api_key = api_key + self.re_github_repo = re.compile(r".*github.com/(.*?/.*?)[/$]") + def get_record(self, recid: str) -> Record: - + """ + recid: unique id of the data repository + """ url = self.base_url + "records/" + recid - return Record(requests.get(url).json(), self) - #TODO: can also add get record by user? Will that be useful by any means? def _get_records(self, params: dict[str, str]) -> list[Record]: url = self.base_url + "records?" + urlencode(params) return [Record(hit, self) for hit in requests.get(url).json()["hits"]["hits"]] + + + From 906bf0383be0153cdb865b0d09ab17b2f5d2dc41 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Wed, 7 Jun 2023 21:34:17 +0530 Subject: [PATCH 41/84] add method `get_latest_version` to fetch the latest version of the Record --- tvb_library/tvb/datasets/zenodo.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 37787e18d0..783a427fec 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -1,9 +1,12 @@ # code from https://github.com/space-physics/pyzenodo3 and https://github.com/space-physics/pyzenodo3/pull/9 -# code is copied here because the repo is inactive and author is not responding; hence no maintainance guarantee. + + import requests import re import pooch from pathlib import Path +import json + BASE_URL = "https://zenodo.org/api/" @@ -15,32 +18,28 @@ def __init__(self, data, zenodo, base_url: str = BASE_URL) -> None: def describe(self): - return self.data['metadata']['description'] def __str__(self): - return str(self.data) # TODO: pretty print? Format the json to more readable version. + return json.dumps(self.data) # TODO: pretty print? Format the json to more readable version. - def download(self, root="./"): - _root = Path(root) - #print(self.data) + def download(self): if 'files' not in self.data: raise AttributeError("No files to download! Please check if the id entered is correct!") - - for file in self.data["files"]: url = file['links']['self'] known_hash = file['checksum'] file_name = file['key'] - pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) - - - + file_path = pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) + print(f"file {file_name} is downloaded at {file_path}") + def get_latest_version(self): + + return Zenodo().get_record(self.data['links']['latest'].split("/")[-1]) @@ -63,6 +62,7 @@ def get_record(self, recid: str) -> Record: recid: unique id of the data repository """ url = self.base_url + "records/" + recid + return Record(requests.get(url).json(), self) From 34440b7f23a5b49852f64d25bf9f1733c8835468 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sun, 11 Jun 2023 21:26:57 +0530 Subject: [PATCH 42/84] add pooch as a dependancy --- tvb_library/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb_library/setup.py b/tvb_library/setup.py index eac16d7970..1e971ab73e 100644 --- a/tvb_library/setup.py +++ b/tvb_library/setup.py @@ -40,7 +40,7 @@ LIBRARY_TEAM = "Marmaduke Woodman, Jan Fousek, Stuart Knock, Paula Sanz Leon, Viktor Jirsa" LIBRARY_REQUIRED_PACKAGES = ["autopep8", "Deprecated", "docutils", "ipywidgets", "lxml", "mako>=1.1.4", "matplotlib", - "networkx", "numba", "numexpr", "numpy", "pylems", "scipy", "six"] + "networkx", "numba", "numexpr", "numpy", "pooch","pylems", "scipy", "six"] LIBRARY_REQUIRED_EXTRA = ["h5py", "pytest", "pytest-benchmark", "pytest-xdist", "tvb-gdist", "tvb-data"] From 7a2fcee61ae24bdd6ca23395242298cc096a88fa Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Mon, 12 Jun 2023 17:44:45 +0530 Subject: [PATCH 43/84] add tvb class and tests --- tvb_library/tvb/datasets/tvb_data.py | 23 +++++++++ tvb_library/tvb/datasets/zenodo.py | 49 ++++++++++++++++--- .../tvb/tests/library/datasets/__init__.py | 0 .../tvb/tests/library/datasets/zenodo_test.py | 44 +++++++++++++++++ 4 files changed, 108 insertions(+), 8 deletions(-) create mode 100644 tvb_library/tvb/datasets/tvb_data.py create mode 100644 tvb_library/tvb/tests/library/datasets/__init__.py create mode 100644 tvb_library/tvb/tests/library/datasets/zenodo_test.py diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py new file mode 100644 index 0000000000..7dd6398a98 --- /dev/null +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -0,0 +1,23 @@ +from .zenodo import Zenodo, Record + + +class TVB_Data: + + conceptid = "" + + def __init__(self, version= "2.7", ): + + recid = Zenodo().get_version_info(self.conceptid)[version] + self.rec = Zenodo.get_record(recid) + + + def download(self): + + self.rec.download() + + def fetch_data(self): + pass + + + + diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 783a427fec..c7cf418027 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -11,37 +11,45 @@ class Record: - def __init__(self, data, zenodo, base_url: str = BASE_URL) -> None: + def __init__(self, data, base_url: str = BASE_URL) -> None: self.base_url = base_url self.data = data - self._zenodo = zenodo - + self.file_loc = {} def describe(self): return self.data['metadata']['description'] def __str__(self): - return json.dumps(self.data) # TODO: pretty print? Format the json to more readable version. + return json.dumps(self.data, indent=2) def download(self): + if 'files' not in self.data: - raise AttributeError("No files to download! Please check if the id entered is correct!") + raise AttributeError("No files to download! Please check if the record id entered is correct! or the data is publically accessible") + + for file in self.data["files"]: url = file['links']['self'] known_hash = file['checksum'] file_name = file['key'] - + file_path = pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) + + self.file_loc['file_name'] = file_path + print(f"file {file_name} is downloaded at {file_path}") + def get_latest_version(self): return Zenodo().get_record(self.data['links']['latest'].split("/")[-1]) + def __eq__(self, record_b): + return (self.data == record_b.data) @@ -63,13 +71,38 @@ def get_record(self, recid: str) -> Record: """ url = self.base_url + "records/" + recid - return Record(requests.get(url).json(), self) + return Record(requests.get(url).json()) def _get_records(self, params: dict[str, str]) -> list[Record]: url = self.base_url + "records?" + urlencode(params) - return [Record(hit, self) for hit in requests.get(url).json()["hits"]["hits"]] + return [Record(hit) for hit in requests.get(url).json()["hits"]["hits"]] + + + + + def get_versions_info(self, recid): + + recid = self.get_record(recid).data['metadata']['relations']['version'][0]['parent']['pid_value'] + + print(recid) + + versions = {} + + url = f"{self.base_url}records?q=conceptrecid:{recid}&all_versions=true" + + + for hit in requests.get(url).json()['hits']['hits']: + + version = hit['metadata']['version'] + recid = hit['doi'].split(".")[-1] + + versions[version] = recid + + + return versions + diff --git a/tvb_library/tvb/tests/library/datasets/__init__.py b/tvb_library/tvb/tests/library/datasets/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tvb_library/tvb/tests/library/datasets/zenodo_test.py b/tvb_library/tvb/tests/library/datasets/zenodo_test.py new file mode 100644 index 0000000000..ce5baccab8 --- /dev/null +++ b/tvb_library/tvb/tests/library/datasets/zenodo_test.py @@ -0,0 +1,44 @@ +from tvb.datasets import Zenodo, Record +from pathlib import Path + + +class TestZenodo(BaseTestCase): + + def test_get_record(self): + + zenodo = Zenodo() + rec = zenodo.get_record("7574266") + + assert type(rec) == Record + assert rec.data["doi"] == "10.5281/zenodo.7574266" + + del rec + del zenodo + + + def test_get_versions(self): + + zenodo = Zenodo() + versions = zenodo.get_versions_info() + + assert type(versions) == dict + assert versions == {'2.0.1': '3497545', '1.5.9.b': '3474071', '2.0.0': '3491055', '2.0.3': '4263723', '2.0.2': '3688773', '1.5.9': '3417207', '2.7': '7574266'} + + del zenodo + del versions + +class TestRecord(BaseTestCase): + + + def test_download(self): + + zen = Zenodo() + + rec = zenodo.get_record("7574266") + + rec.download() + + for file_name, file_path in rec.file_loc: + assert Path(file_path).is_file() + + From cbeb92b8e2e9b789aefe7d5f798b069a11d2b08e Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Mon, 12 Jun 2023 17:54:04 +0530 Subject: [PATCH 44/84] minor nits and fixes --- tvb_library/tvb/datasets/__init__.py | 1 + tvb_library/tvb/datasets/tvb_data.py | 6 +++--- tvb_library/tvb/datasets/zenodo.py | 6 +++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/tvb_library/tvb/datasets/__init__.py b/tvb_library/tvb/datasets/__init__.py index ec00e5f91b..e2d2cf8117 100644 --- a/tvb_library/tvb/datasets/__init__.py +++ b/tvb_library/tvb/datasets/__init__.py @@ -1 +1,2 @@ from .zenodo import Record, Zenodo +from .tvb_data import TVB_Data diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 7dd6398a98..e4357dbb61 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -3,12 +3,12 @@ class TVB_Data: - conceptid = "" + conceptid = "3417206" def __init__(self, version= "2.7", ): - recid = Zenodo().get_version_info(self.conceptid)[version] - self.rec = Zenodo.get_record(recid) + recid = Zenodo().get_versions_info(self.conceptid)[version] + self.rec = Zenodo().get_record(recid) def download(self): diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index c7cf418027..fa9023aae8 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -83,11 +83,11 @@ def _get_records(self, params: dict[str, str]) -> list[Record]: def get_versions_info(self, recid): - + """ + recid: unique id of the data repository + """ recid = self.get_record(recid).data['metadata']['relations']['version'][0]['parent']['pid_value'] - print(recid) - versions = {} url = f"{self.base_url}records?q=conceptrecid:{recid}&all_versions=true" From b675f92379c2fb9ecd6d1a81d6298fde3a74002f Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 16 Jun 2023 08:41:28 +0530 Subject: [PATCH 45/84] added todo in fetch_data method --- tvb_library/tvb/datasets/base.py | 19 +++++ tvb_library/tvb/datasets/tvb_data.py | 117 ++++++++++++++++++++++++--- tvb_library/tvb/datasets/zenodo.py | 9 ++- 3 files changed, 133 insertions(+), 12 deletions(-) create mode 100644 tvb_library/tvb/datasets/base.py diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py new file mode 100644 index 0000000000..0ea746bc11 --- /dev/null +++ b/tvb_library/tvb/datasets/base.py @@ -0,0 +1,19 @@ + + +class ZenodoDataset: + + def __init__(self, version): + self.version = version + + def download(self): + pass + + def fetch_data(self, file_name, extract_dir): + pass + + def get_version(self): + return self.version + + def get_recid(self): + return self.recid + diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index e4357dbb61..305c088e63 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -1,23 +1,122 @@ -from .zenodo import Zenodo, Record +import requests +import json +import pooch +from pathlib import Path +import logging +from zipfile import ZipFile +from .base import ZenodoDataset +from .zenodo import Zenodo, Record, BASE_URL -class TVB_Data: +class TVB_Data(ZenodoDataset): - conceptid = "3417206" + CONCEPTID = "3417206" - def __init__(self, version= "2.7", ): + def __init__(self, version= "2.7"): + """ + Constructor for TVB_Data class - recid = Zenodo().get_versions_info(self.conceptid)[version] - self.rec = Zenodo().get_record(recid) + parameters + ----------- + version: str + - Version number of the dataset, Default value is 2. - def download(self): + """ + super().__init__(version) + try: + self.recid = self.read_cached_response()[version]['conceptrecid'] + except: + logging.warning("Data not found in cached response, updating the cached responses") + self.recid = Zenodo().get_versions_info(self.CONCEPTID)[version] + self.update_cached_response() + + self.rec = Record(self.read_cached_response()[self.version]) + logging.info(f"instantiated TVB_Data class with version {version}") + + def download(self): + """ + Downloads the dataset to the cached location, skips download is file already present at the path. + """ self.rec.download() - def fetch_data(self): - pass + def fetch_data(self, file_name=None, extract_dir=None): + """ + Fetches the data + + parameters: + ----------- + file_name: str + - Name of the file from the downloaded zip file to fetch. If `None`, extracts whole archive. Default is `None` + extract_dir: str + - Path where you want to extract the archive, if `None` extracts the archive to current working directory. Default is `None` + + + returns: Pathlib.Path + path of the file which was extracted + """ + + #TODO: errrors when absolute path given. + try: + file_path = self.rec.file_loc['tvb_data.zip'] + except: + self.download() + file_path = self.rec.file_loc['tvb_data.zip'] + + + if file_name == None: + ZipFile(file_path).extractall(path=extract_dir) + if extract_dir==None: + return Path.cwd() + return Path.cwd()/ Path(extract_dir) + + with ZipFile(file_path) as zf: + file_names_in_zip = zf.namelist() + zf.close() + + file_names_in_zip = {str(Path(i).name): i for i in file_names_in_zip} + ZipFile(file_path).extract(file_names_in_zip[file_name]) + return Path.cwd() / file_names_in_zip[file_name] + + + def update_cached_response(self): + """ + gets responses from zenodo server and saves them to cache file. + """ + + file_dir = pooch.os_cache("pooch")/ "tvb_cached_responses.txt" + + responses = {} + + url = f"{BASE_URL}records?q=conceptrecid:{self.CONCEPTID}&all_versions=true" + + for hit in requests.get(url).json()['hits']['hits']: + version = hit['metadata']['version'] + response = hit + + responses[version] = response + + with open(file_dir, "w") as fp: + json.dump(responses, fp) + fp.close() + + return + + def read_cached_response(self): + """ + reads responses from the cache file. + + """ + + file_dir = pooch.os_cache("pooch") / "tvb_cached_responses.txt" + + with open(file_dir) as fp: + responses = json.load(fp) + fp.close() + responses = dict(responses) + return responses diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index fa9023aae8..859dd6ea7b 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -36,8 +36,8 @@ def download(self): file_name = file['key'] file_path = pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) - - self.file_loc['file_name'] = file_path + + self.file_loc[f'{file_name}'] = file_path print(f"file {file_name} is downloaded at {file_path}") @@ -62,7 +62,6 @@ def __init__(self, api_key: str = "", base_url: str = BASE_URL) -> None: """ self.base_url = base_url self._api_key = api_key - self.re_github_repo = re.compile(r".*github.com/(.*?/.*?)[/$]") def get_record(self, recid: str) -> Record: @@ -85,7 +84,11 @@ def _get_records(self, params: dict[str, str]) -> list[Record]: def get_versions_info(self, recid): """ recid: unique id of the data repository + """ + # needs ineternet + + recid = self.get_record(recid).data['metadata']['relations']['version'][0]['parent']['pid_value'] versions = {} From 7c8248762fdf49f659b81053dfb0cc20ec5bd111 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 16 Jun 2023 10:27:25 +0530 Subject: [PATCH 46/84] fixed the absolute path error --- tvb_library/tvb/datasets/tvb_data.py | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 305c088e63..3475e97e78 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -57,27 +57,44 @@ def fetch_data(self, file_name=None, extract_dir=None): path of the file which was extracted """ - #TODO: errrors when absolute path given. + + try: file_path = self.rec.file_loc['tvb_data.zip'] except: self.download() file_path = self.rec.file_loc['tvb_data.zip'] - + if (extract_dir!=None): + extract_dir = Path(extract_dir).expanduser() + if file_name == None: ZipFile(file_path).extractall(path=extract_dir) if extract_dir==None: return Path.cwd() - return Path.cwd()/ Path(extract_dir) + if extract_dir.is_absolute(): + return extract_dir + + return Path.cwd()/ extract_dir with ZipFile(file_path) as zf: file_names_in_zip = zf.namelist() zf.close() file_names_in_zip = {str(Path(i).name): i for i in file_names_in_zip} - ZipFile(file_path).extract(file_names_in_zip[file_name]) - return Path.cwd() / file_names_in_zip[file_name] + if extract_dir==None: + ZipFile(file_path).extract(file_names_in_zip[file_name]) + + ZipFile(file_path).extract(file_names_in_zip[file_name], path = extract_dir) + + + if extract_dir == None: + return Path.cwd() / file_names_in_zip[file_name] + if extract_dir.is_absolute(): + return extract_dir / file_names_in_zip[file_name] + + + return Path.cwd()/ extract_dir / file_names_in_zip[file_name] def update_cached_response(self): From 7e78f97e97e6151a12a7e5fbb63ef8a3d95b1784 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 11:47:11 +0530 Subject: [PATCH 47/84] added header --- tvb_library/tvb/datasets/base.py | 30 ++++++++++++++++++++++++++++ tvb_library/tvb/datasets/tvb_data.py | 30 ++++++++++++++++++++++++++++ tvb_library/tvb/datasets/zenodo.py | 30 ++++++++++++++++++++++++++++ 3 files changed, 90 insertions(+) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 0ea746bc11..8499d12ead 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -1,3 +1,33 @@ +# -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" + class ZenodoDataset: diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 3475e97e78..308550a8c3 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -1,3 +1,33 @@ +# -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" + import requests import json import pooch diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 859dd6ea7b..f3c103c97e 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -1,3 +1,33 @@ +## -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" + # code from https://github.com/space-physics/pyzenodo3 and https://github.com/space-physics/pyzenodo3/pull/9 From 027587f3712f42588d1f8c6baacef9ceca16cb34 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 11:48:47 +0530 Subject: [PATCH 48/84] added header --- .../tvb/tests/library/datasets/zenodo_test.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tvb_library/tvb/tests/library/datasets/zenodo_test.py b/tvb_library/tvb/tests/library/datasets/zenodo_test.py index ce5baccab8..6d3ff779bd 100644 --- a/tvb_library/tvb/tests/library/datasets/zenodo_test.py +++ b/tvb_library/tvb/tests/library/datasets/zenodo_test.py @@ -1,3 +1,37 @@ + +# -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" + + + + from tvb.datasets import Zenodo, Record from pathlib import Path From 16eef16d36a51f15cb6dc9bce3bad4cbca744254 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 12:00:02 +0530 Subject: [PATCH 49/84] added DATASETS_FOLDER attribute to config settings. --- tvb_library/tvb/basic/config/profile_settings.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tvb_library/tvb/basic/config/profile_settings.py b/tvb_library/tvb/basic/config/profile_settings.py index 052ef6a7ab..eaf8018775 100644 --- a/tvb_library/tvb/basic/config/profile_settings.py +++ b/tvb_library/tvb/basic/config/profile_settings.py @@ -95,6 +95,10 @@ def __init__(self): # The number of logs in a message batch that are sent to the server self.ELASTICSEARCH_BUFFER_THRESHOLD = self.manager.get_attribute(stored.KEY_ELASTICSEARCH_BUFFER_THRESHOLD, 1000000, int) + # Directory where all the datasets will be extracted/unzipped. + self.DATASETS_FOLDER = os.path(self.TVB_STORAGE, "DATASETS") + + @property def BIN_FOLDER(self): """ From 696b599285ae267d78ff8bb11f05c3f9373873f2 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 20:12:01 +0530 Subject: [PATCH 50/84] some improvements in tests, now from_cls method of connectivity loads data using new method --- .../tvb/basic/config/profile_settings.py | 2 +- tvb_library/tvb/datasets/__init__.py | 2 +- tvb_library/tvb/datasets/base.py | 18 +++--- tvb_library/tvb/datasets/tvb_data.py | 58 +++++++++++++------ tvb_library/tvb/datatypes/connectivity.py | 6 +- .../tvb/tests/library/datasets/zenodo_test.py | 16 ++--- 6 files changed, 64 insertions(+), 38 deletions(-) diff --git a/tvb_library/tvb/basic/config/profile_settings.py b/tvb_library/tvb/basic/config/profile_settings.py index eaf8018775..880cb6303b 100644 --- a/tvb_library/tvb/basic/config/profile_settings.py +++ b/tvb_library/tvb/basic/config/profile_settings.py @@ -96,7 +96,7 @@ def __init__(self): self.ELASTICSEARCH_BUFFER_THRESHOLD = self.manager.get_attribute(stored.KEY_ELASTICSEARCH_BUFFER_THRESHOLD, 1000000, int) # Directory where all the datasets will be extracted/unzipped. - self.DATASETS_FOLDER = os.path(self.TVB_STORAGE, "DATASETS") + self.DATASETS_FOLDER = os.path.join(self.TVB_STORAGE, "DATASETS") @property diff --git a/tvb_library/tvb/datasets/__init__.py b/tvb_library/tvb/datasets/__init__.py index e2d2cf8117..995b08e09f 100644 --- a/tvb_library/tvb/datasets/__init__.py +++ b/tvb_library/tvb/datasets/__init__.py @@ -1,2 +1,2 @@ from .zenodo import Record, Zenodo -from .tvb_data import TVB_Data +from .tvb_data import TVBZenodoDataset diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 8499d12ead..6ca123c4fe 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -30,20 +30,22 @@ -class ZenodoDataset: +from tvb.basic.logger.builder import get_logger +from tvb.basic.profile import TvbProfile - def __init__(self, version): +class BaseDataset: + + def __init__(self, version, target_download=None): + + self.log = get_logger(self.__class__.__module__) + self.cached_files = None self.version = version def download(self): pass - def fetch_data(self, file_name, extract_dir): + def fetch_data(self): pass - + def get_version(self): return self.version - - def get_recid(self): - return self.recid - diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 308550a8c3..4ae3c8bedd 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -27,18 +27,17 @@ """ .. moduleauthor:: Abhijit Deo """ - +import os import requests import json import pooch from pathlib import Path -import logging from zipfile import ZipFile -from .base import ZenodoDataset +from .base import BaseDataset from .zenodo import Zenodo, Record, BASE_URL -class TVB_Data(ZenodoDataset): +class TVBZenodoDataset(BaseDataset): CONCEPTID = "3417206" @@ -54,17 +53,23 @@ def __init__(self, version= "2.7"): """ super().__init__(version) + self.cached_file = pooch.os_cache("pooch")/ "tvb_cached_responses.txt" + try: self.recid = self.read_cached_response()[version]['conceptrecid'] - except: - logging.warning("Data not found in cached response, updating the cached responses") + + except KeyError: + self.log.warning(f"Failed to read data from cached response.") self.recid = Zenodo().get_versions_info(self.CONCEPTID)[version] - self.update_cached_response() + self.update_cached_response() + except: + self.log.warning(f"Failed to get the desired version {version} of TVB_Data, please check if version {version} is available as a public record on zenodo.org or Please check your internet connection") + + # add logging errors method by catching the exact exceptions. self.rec = Record(self.read_cached_response()[self.version]) - - logging.info(f"instantiated TVB_Data class with version {version}") - + + print(type(self)) def download(self): """ Downloads the dataset to the cached location, skips download is file already present at the path. @@ -95,13 +100,14 @@ def fetch_data(self, file_name=None, extract_dir=None): self.download() file_path = self.rec.file_loc['tvb_data.zip'] - if (extract_dir!=None): - extract_dir = Path(extract_dir).expanduser() + if (extract_dir==None): + extract_dir = TvbProfile.current.DATASETS_FOLDER + + extract_dir = Path(extract_dir).expanduser() if file_name == None: ZipFile(file_path).extractall(path=extract_dir) - if extract_dir==None: - return Path.cwd() + if extract_dir.is_absolute(): return extract_dir @@ -118,8 +124,6 @@ def fetch_data(self, file_name=None, extract_dir=None): ZipFile(file_path).extract(file_names_in_zip[file_name], path = extract_dir) - if extract_dir == None: - return Path.cwd() / file_names_in_zip[file_name] if extract_dir.is_absolute(): return extract_dir / file_names_in_zip[file_name] @@ -132,7 +136,7 @@ def update_cached_response(self): gets responses from zenodo server and saves them to cache file. """ - file_dir = pooch.os_cache("pooch")/ "tvb_cached_responses.txt" + file_dir = self.cached_file responses = {} @@ -147,7 +151,7 @@ def update_cached_response(self): with open(file_dir, "w") as fp: json.dump(responses, fp) fp.close() - + self.log.warning("Updated the cache response file") return def read_cached_response(self): @@ -156,7 +160,7 @@ def read_cached_response(self): """ - file_dir = pooch.os_cache("pooch") / "tvb_cached_responses.txt" + file_dir = self.cached_file with open(file_dir) as fp: @@ -167,3 +171,19 @@ def read_cached_response(self): responses = dict(responses) return responses + + + def describe(self): + return self.rec.describe() + + def get_record(self): + return self.recid + + def __str__(self): + return f"TVB Data version : {self.version}" + + def __eq__(self, other): + if isinstace(other, TVBZenodoDataset): + return self.rec == tvb_data.rec + return False + diff --git a/tvb_library/tvb/datatypes/connectivity.py b/tvb_library/tvb/datatypes/connectivity.py index fec76be6ad..ce6d84a582 100644 --- a/tvb_library/tvb/datatypes/connectivity.py +++ b/tvb_library/tvb/datatypes/connectivity.py @@ -40,7 +40,7 @@ from tvb.basic.neotraits.api import Attr, NArray, List, HasTraits, Int, narray_summary_info from tvb.basic.neotraits.ex import TraitAttributeError from tvb.basic.readers import ZipReader, H5Reader, try_get_absolute_path - +from tvb.datasets import TVBZenodoDataset class Connectivity(HasTraits): region_labels = NArray( @@ -719,8 +719,10 @@ def _read(cls, reader): def from_file(cls, source_file="connectivity_76.zip"): result = Connectivity() - source_full_path = try_get_absolute_path("tvb_data.connectivity", source_file) + #source_full_path = try_get_absolute_path("tvb_data.connectivity", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) + if source_file.endswith(".h5"): reader = H5Reader(source_full_path) diff --git a/tvb_library/tvb/tests/library/datasets/zenodo_test.py b/tvb_library/tvb/tests/library/datasets/zenodo_test.py index 6d3ff779bd..50084e7e75 100644 --- a/tvb_library/tvb/tests/library/datasets/zenodo_test.py +++ b/tvb_library/tvb/tests/library/datasets/zenodo_test.py @@ -30,10 +30,11 @@ """ - +#TODO : change the record id to the latest when done testing on local machine. :) :wq from tvb.datasets import Zenodo, Record from pathlib import Path +from tvb.tests.library.base_testcase import BaseTestCase class TestZenodo(BaseTestCase): @@ -41,10 +42,10 @@ class TestZenodo(BaseTestCase): def test_get_record(self): zenodo = Zenodo() - rec = zenodo.get_record("7574266") + rec = zenodo.get_record("4263723") assert type(rec) == Record - assert rec.data["doi"] == "10.5281/zenodo.7574266" + assert rec.data["doi"] == "10.5281/zenodo.4263723" del rec del zenodo @@ -53,7 +54,7 @@ def test_get_record(self): def test_get_versions(self): zenodo = Zenodo() - versions = zenodo.get_versions_info() + versions = zenodo.get_versions_info("3491055") assert type(versions) == dict assert versions == {'2.0.1': '3497545', '1.5.9.b': '3474071', '2.0.0': '3491055', '2.0.3': '4263723', '2.0.2': '3688773', '1.5.9': '3417207', '2.7': '7574266'} @@ -68,11 +69,12 @@ def test_download(self): zen = Zenodo() - rec = zenodo.get_record("7574266") + rec = zen.get_record("4263723") - rec.download() - for file_name, file_path in rec.file_loc: + rec.download() + print(rec.file_loc) + for file_name, file_path in rec.file_loc.items(): assert Path(file_path).is_file() From 7cbc3f609786224a9a0b6de4b3b2d917b445956e Mon Sep 17 00:00:00 2001 From: Abhijit Deo <72816663+abhi-glitchhg@users.noreply.github.com> Date: Fri, 16 Jun 2023 10:09:12 +0530 Subject: [PATCH 51/84] Update tvb_library/tvb/datasets/tvb_data.py --- tvb_library/tvb/datasets/tvb_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 4ae3c8bedd..46b117998e 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -49,7 +49,7 @@ def __init__(self, version= "2.7"): ----------- version: str - - Version number of the dataset, Default value is 2. + - Version number of the dataset, Default value is 2.7 """ super().__init__(version) From c454341fc265a3149ad8a22c2e0e8da984246b23 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Sat, 17 Jun 2023 20:19:52 +0530 Subject: [PATCH 52/84] fix typos --- tvb_library/tvb/datasets/tvb_data.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 46b117998e..3086e96c62 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -66,10 +66,9 @@ def __init__(self, version= "2.7"): except: self.log.warning(f"Failed to get the desired version {version} of TVB_Data, please check if version {version} is available as a public record on zenodo.org or Please check your internet connection") - # add logging errors method by catching the exact exceptions. + #TODO add logging errors method by catching the exact exceptions. self.rec = Record(self.read_cached_response()[self.version]) - print(type(self)) def download(self): """ Downloads the dataset to the cached location, skips download is file already present at the path. From ce408d412e0bf746b10c8437b1c876d6c37241f2 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Tue, 20 Jun 2023 14:03:23 +0530 Subject: [PATCH 53/84] extract dir parameter when instantiating the class. --- tvb_library/tvb/datasets/base.py | 6 ++++++ tvb_library/tvb/datasets/tvb_data.py | 6 ++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 6ca123c4fe..93e678bdfd 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -41,6 +41,12 @@ def __init__(self, version, target_download=None): self.cached_files = None self.version = version + if (extract_dir==None): + extract_dir = TvbProfile.current.DATASETS_FOLDER + + self.extract_dir = Path(extract_dir).expanduser() + + def download(self): pass diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 3086e96c62..c7a0ecbc77 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -75,7 +75,7 @@ def download(self): """ self.rec.download() - def fetch_data(self, file_name=None, extract_dir=None): + def fetch_data(self, file_name=None): """ Fetches the data @@ -99,10 +99,8 @@ def fetch_data(self, file_name=None, extract_dir=None): self.download() file_path = self.rec.file_loc['tvb_data.zip'] - if (extract_dir==None): - extract_dir = TvbProfile.current.DATASETS_FOLDER + extract_dir = self.extract_dir - extract_dir = Path(extract_dir).expanduser() if file_name == None: ZipFile(file_path).extractall(path=extract_dir) From 3bdf9db14fbbbf5c543fb8d23df295cf226f7490 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 23 Jun 2023 10:33:27 +0530 Subject: [PATCH 54/84] change download_dir to .cache/tvb from .cache/pooch --- tvb_library/tvb/datasets/base.py | 3 ++- tvb_library/tvb/datasets/tvb_data.py | 10 ++++------ tvb_library/tvb/datasets/zenodo.py | 2 +- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 93e678bdfd..599c47c466 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -32,10 +32,11 @@ from tvb.basic.logger.builder import get_logger from tvb.basic.profile import TvbProfile +from pathlib import Path class BaseDataset: - def __init__(self, version, target_download=None): + def __init__(self, version, extract_dir=None): self.log = get_logger(self.__class__.__module__) self.cached_files = None diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index c7a0ecbc77..78b07a0cef 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -41,7 +41,7 @@ class TVBZenodoDataset(BaseDataset): CONCEPTID = "3417206" - def __init__(self, version= "2.7"): + def __init__(self, version= "2.7", extract_dir = None): """ Constructor for TVB_Data class @@ -52,19 +52,17 @@ def __init__(self, version= "2.7"): - Version number of the dataset, Default value is 2.7 """ - super().__init__(version) - self.cached_file = pooch.os_cache("pooch")/ "tvb_cached_responses.txt" + super().__init__(version, extract_dir) + self.cached_file = pooch.os_cache("tvb")/ "tvb_cached_responses.txt" try: self.recid = self.read_cached_response()[version]['conceptrecid'] - except KeyError: + except : self.log.warning(f"Failed to read data from cached response.") self.recid = Zenodo().get_versions_info(self.CONCEPTID)[version] self.update_cached_response() - except: - self.log.warning(f"Failed to get the desired version {version} of TVB_Data, please check if version {version} is available as a public record on zenodo.org or Please check your internet connection") #TODO add logging errors method by catching the exact exceptions. self.rec = Record(self.read_cached_response()[self.version]) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index f3c103c97e..1685a2e03f 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -65,7 +65,7 @@ def download(self): known_hash = file['checksum'] file_name = file['key'] - file_path = pooch.retrieve(url= url, known_hash= known_hash, progressbar=True) + file_path = pooch.retrieve(url= url, known_hash= known_hash, path = pooch.os_cache("tvb") ,progressbar = True) self.file_loc[f'{file_name}'] = file_path From 64c2d08969477c32d7fd8d9721477594a72e2fd2 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Thu, 29 Jun 2023 11:23:00 +0530 Subject: [PATCH 55/84] . --- tvb_library/tvb/datasets/tvb_data.py | 54 ++++++------ tvb_library/tvb/datasets/zenodo.py | 37 +++++--- .../library/datasets/TVBZenodoDataset_test.py | 84 +++++++++++++++++++ .../tvb/tests/library/datasets/zenodo_test.py | 5 +- 4 files changed, 142 insertions(+), 38 deletions(-) create mode 100644 tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 78b07a0cef..50f97b02e3 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -27,12 +27,14 @@ """ .. moduleauthor:: Abhijit Deo """ + import os import requests import json import pooch from pathlib import Path from zipfile import ZipFile +import shutil from .base import BaseDataset from .zenodo import Zenodo, Record, BASE_URL @@ -53,8 +55,12 @@ def __init__(self, version= "2.7", extract_dir = None): """ super().__init__(version, extract_dir) - self.cached_file = pooch.os_cache("tvb")/ "tvb_cached_responses.txt" - + self.cached_dir = self.extract_dir / ".cache" + self.cached_file = self.cached_dir / "tvb_cached_responses.txt" + + if not self.cached_dir.is_dir(): + self.cached_dir.mkdir(parents=True) + try: self.recid = self.read_cached_response()[version]['conceptrecid'] @@ -67,51 +73,46 @@ def __init__(self, version= "2.7", extract_dir = None): #TODO add logging errors method by catching the exact exceptions. self.rec = Record(self.read_cached_response()[self.version]) - def download(self): + def download(self, path=None): """ - Downloads the dataset to the cached location, skips download is file already present at the path. + Downloads the dataset to `path` """ - self.rec.download() + self.rec.download(path) - def fetch_data(self, file_name=None): + def fetch_data(self, file_name): """ Fetches the data parameters: ----------- file_name: str - - Name of the file from the downloaded zip file to fetch. If `None`, extracts whole archive. Default is `None` + - Name of the file from the downloaded zip file to fetch. extract_dir: str - - Path where you want to extract the archive, if `None` extracts the archive to current working directory. Default is `None` + - Path where you want to extract the archive. If Path is None, dataset is extracted according to the tvb profile configuration returns: Pathlib.Path path of the file which was extracted """ + # TODO: extract dir needs better description. - + + extract_dir = self.extract_dir + download_dir = self.cached_dir / "TVB_Data" try: file_path = self.rec.file_loc['tvb_data.zip'] except: - self.download() + self.download(path = download_dir) file_path = self.rec.file_loc['tvb_data.zip'] - extract_dir = self.extract_dir - - - if file_name == None: - ZipFile(file_path).extractall(path=extract_dir) - - if extract_dir.is_absolute(): - return extract_dir - - return Path.cwd()/ extract_dir - with ZipFile(file_path) as zf: file_names_in_zip = zf.namelist() zf.close() + file_name = file_name.strip() + + file_names_in_zip = {str(Path(i).name): i for i in file_names_in_zip} if extract_dir==None: ZipFile(file_path).extract(file_names_in_zip[file_name]) @@ -125,6 +126,10 @@ def fetch_data(self, file_name=None): return Path.cwd()/ extract_dir / file_names_in_zip[file_name] + def delete_data(self): + _dir = self.extract_dir / "tvb_data" + shutil.rmtree(_dir) + def update_cached_response(self): """ @@ -141,7 +146,9 @@ def update_cached_response(self): version = hit['metadata']['version'] response = hit - responses[version] = response + responses[version] = response + + Path(file_dir).touch(exist_ok=True) with open(file_dir, "w") as fp: json.dump(responses, fp) @@ -174,9 +181,6 @@ def describe(self): def get_record(self): return self.recid - def __str__(self): - return f"TVB Data version : {self.version}" - def __eq__(self, other): if isinstace(other, TVBZenodoDataset): return self.rec == tvb_data.rec diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 1685a2e03f..36a6f3db69 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -42,30 +42,31 @@ class Record: def __init__(self, data, base_url: str = BASE_URL) -> None: + """ + Record represents the repsonse from the Zenodo. + """ + self.base_url = base_url self.data = data self.file_loc = {} - def describe(self): - return self.data['metadata']['description'] + - - def __str__(self): - return json.dumps(self.data, indent=2) - - def download(self): + def download(self, path=None): if 'files' not in self.data: raise AttributeError("No files to download! Please check if the record id entered is correct! or the data is publically accessible") + if path == None: + path = pooch.os_cache("tvb") for file in self.data["files"]: url = file['links']['self'] known_hash = file['checksum'] file_name = file['key'] - file_path = pooch.retrieve(url= url, known_hash= known_hash, path = pooch.os_cache("tvb") ,progressbar = True) + file_path = pooch.retrieve(url= url, known_hash= known_hash, path = path,progressbar = True) self.file_loc[f'{file_name}'] = file_path @@ -73,14 +74,24 @@ def download(self): print(f"file {file_name} is downloaded at {file_path}") - def get_latest_version(self): - + def get_latest_version(self): return Zenodo().get_record(self.data['links']['latest'].split("/")[-1]) + + def describe(self): + return self.data['metadata']['description'] + def get_record_id(self): + return self.data['conceptrecid'] + def is_open_access(self): + return self.data['metadata']['access_right'] != "closed" + def __eq__(self, record_b): return (self.data == record_b.data) + def __str__(self): + return json.dumps(self.data, indent=2) + class Zenodo: @@ -115,6 +126,9 @@ def get_versions_info(self, recid): """ recid: unique id of the data repository + + + """ # needs ineternet @@ -130,7 +144,8 @@ def get_versions_info(self, recid): version = hit['metadata']['version'] recid = hit['doi'].split(".")[-1] - + if hit['metadata']['access_right'] == "closed": + continue versions[version] = recid diff --git a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py b/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py new file mode 100644 index 0000000000..0e6e6542f3 --- /dev/null +++ b/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py @@ -0,0 +1,84 @@ + +# -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Scientific Package. This package holds all simulators, and +# analysers necessary to run brain-simulations. You can use it stand alone or +# in conjunction with TheVirtualBrain-Framework Package. See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2023, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as explained here: +# https://www.thevirtualbrain.org/tvb/zwei/neuroscience-publications +# +# + +""" +.. moduleauthor:: Abhijit Deo +""" +import socket + +from tvb.datasets import TVBZenodoDataset +from pathlib import Path +from tvb.tests.library.base_testcase import BaseTestCase + + + +def no_internet_decorator(func): + class block_network(socket.socket): + def __init__(self, *args, **kwargs): + raise Exception("Network call blocked") + + socket.socket = block_network + + def iner_func(): + return func + return iner_func + + + + + + +class Test_TVBZenodoDataset(BaseTestCase): + + + def test_extract(self): + + tvb_data = TVBZenodoDataset() + connectivity66_dir = tvb_data.fetch_data("connectivity_66.zip") + assert connectivity66_dir.is_file() + tvb_data.delete_data() + assert not connectivity66_dir.is_file() + + tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="tvb_data") + connectivity66_dir = tvb_data.fetch_data("connectivity_66.zip") + assert connectivity66_dir.is_file() + tvb_data.delete_data() + assert not connectivity66_dir.is_file() + + tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="~/tvb_data") + matfile_dir = tvb_data.fetch_data("local_connectivity_80k.mat") + assert matfile_dir.is_file() + tvb_data.delete_data() + assert not matfile_dir.is_file() + + + all_extract = TVBZenodoDataset(version = "2.0.3", extract_dir="~/tvb_data").fetch_data(" ConnectivityTable_regions.xls") + assert all_extract.is_file() + tvb_data.delete_data() + assert not all_extract.is_file() + + #TODO add no interenet tests diff --git a/tvb_library/tvb/tests/library/datasets/zenodo_test.py b/tvb_library/tvb/tests/library/datasets/zenodo_test.py index 50084e7e75..59e82b974e 100644 --- a/tvb_library/tvb/tests/library/datasets/zenodo_test.py +++ b/tvb_library/tvb/tests/library/datasets/zenodo_test.py @@ -57,7 +57,7 @@ def test_get_versions(self): versions = zenodo.get_versions_info("3491055") assert type(versions) == dict - assert versions == {'2.0.1': '3497545', '1.5.9.b': '3474071', '2.0.0': '3491055', '2.0.3': '4263723', '2.0.2': '3688773', '1.5.9': '3417207', '2.7': '7574266'} + assert versions == {'1.5.9': '3457454', '2.0.3': '4263723', '2.0.1': '3497545', '2.0.2': '3688773', '2.0.0': '3491055', '1.5.9.b': '3474071', '2.7': '7574266'} del zenodo del versions @@ -69,12 +69,13 @@ def test_download(self): zen = Zenodo() - rec = zen.get_record("4263723") + rec = zen.get_record("7929679") rec.download() print(rec.file_loc) for file_name, file_path in rec.file_loc.items(): assert Path(file_path).is_file() + Path(file_path).unlink() From 15550d2f760663dd156c4c8c7b89d40921956619 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 30 Jun 2023 10:26:56 +0530 Subject: [PATCH 56/84] fixed the file not found errors with remaining datatypes' --- tvb_library/tvb/datasets/tvb_data.py | 4 ++-- tvb_library/tvb/datasets/zenodo.py | 20 +++++++++---------- .../tvb/datatypes/local_connectivity.py | 4 ++-- tvb_library/tvb/datatypes/projections.py | 4 ++-- tvb_library/tvb/datatypes/region_mapping.py | 4 ++-- tvb_library/tvb/datatypes/sensors.py | 6 +++--- tvb_library/tvb/datatypes/surfaces.py | 3 ++- .../library/datasets/TVBZenodoDataset_test.py | 20 ++++--------------- 8 files changed, 26 insertions(+), 39 deletions(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 50f97b02e3..d81b09ec05 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -121,10 +121,10 @@ def fetch_data(self, file_name): if extract_dir.is_absolute(): - return extract_dir / file_names_in_zip[file_name] + return str(extract_dir / file_names_in_zip[file_name]) - return Path.cwd()/ extract_dir / file_names_in_zip[file_name] + return str(Path.cwd()/ extract_dir / file_names_in_zip[file_name]) def delete_data(self): _dir = self.extract_dir / "tvb_data" diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 36a6f3db69..945b836273 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -34,6 +34,7 @@ import requests import re import pooch +from typing import List from pathlib import Path import json @@ -52,7 +53,7 @@ def __init__(self, data, base_url: str = BASE_URL) -> None: - def download(self, path=None): + def download(self, path: str = None) -> None: if 'files' not in self.data: raise AttributeError("No files to download! Please check if the record id entered is correct! or the data is publically accessible") @@ -77,19 +78,19 @@ def download(self, path=None): def get_latest_version(self): return Zenodo().get_record(self.data['links']['latest'].split("/")[-1]) - def describe(self): + def describe(self) -> str: return self.data['metadata']['description'] - def get_record_id(self): + def get_record_id(self) -> str: return self.data['conceptrecid'] - def is_open_access(self): + def is_open_access(self) -> str: return self.data['metadata']['access_right'] != "closed" - def __eq__(self, record_b): + def __eq__(self, record_b) -> bool: return (self.data == record_b.data) - def __str__(self): + def __str__(self) -> str: return json.dumps(self.data, indent=2) @@ -114,7 +115,7 @@ def get_record(self, recid: str) -> Record: return Record(requests.get(url).json()) - def _get_records(self, params: dict[str, str]) -> list[Record]: + def _get_records(self, params: dict[str, str]) -> List[Record]: url = self.base_url + "records?" + urlencode(params) return [Record(hit) for hit in requests.get(url).json()["hits"]["hits"]] @@ -122,13 +123,10 @@ def _get_records(self, params: dict[str, str]) -> list[Record]: - def get_versions_info(self, recid): + def get_versions_info(self, recid) -> dict: """ recid: unique id of the data repository - - - """ # needs ineternet diff --git a/tvb_library/tvb/datatypes/local_connectivity.py b/tvb_library/tvb/datatypes/local_connectivity.py index 6c60ceb3f2..e50bd6dcc1 100644 --- a/tvb_library/tvb/datatypes/local_connectivity.py +++ b/tvb_library/tvb/datatypes/local_connectivity.py @@ -31,7 +31,7 @@ from tvb.basic.neotraits.api import HasTraits, Attr, Float, narray_summary_info from tvb.basic.readers import try_get_absolute_path, FileReader from tvb.datatypes import equations, surfaces - +from tvb.datasets import TVBZenodoDataset class LocalConnectivity(HasTraits): """ @@ -109,7 +109,7 @@ def from_file(source_file="local_connectivity_16384.mat"): result = LocalConnectivity() - source_full_path = try_get_absolute_path("tvb_data.local_connectivity", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) result.matrix = reader.read_array(matlab_data_name="LocalCoupling") diff --git a/tvb_library/tvb/datatypes/projections.py b/tvb_library/tvb/datatypes/projections.py index 713472b3af..fac6dbe8aa 100644 --- a/tvb_library/tvb/datatypes/projections.py +++ b/tvb_library/tvb/datatypes/projections.py @@ -32,7 +32,7 @@ from tvb.basic.readers import try_get_absolute_path, FileReader from tvb.datatypes import surfaces, sensors from tvb.basic.neotraits.api import HasTraits, TVBEnum, Attr, NArray, Final - +from tvb.datasets import TVBZenodoDataset class ProjectionsTypeEnum(TVBEnum): EEG = "projEEG" @@ -88,7 +88,7 @@ def from_file(cls, source_file, matlab_data_name=None, is_brainstorm=False): proj = cls() - source_full_path = try_get_absolute_path("tvb_data.projectionMatrix", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) if is_brainstorm: proj.projection_data = reader.read_gain_from_brainstorm() diff --git a/tvb_library/tvb/datatypes/region_mapping.py b/tvb_library/tvb/datatypes/region_mapping.py index 12eca3cd53..0c59c37fca 100644 --- a/tvb_library/tvb/datatypes/region_mapping.py +++ b/tvb_library/tvb/datatypes/region_mapping.py @@ -39,7 +39,7 @@ from tvb.datatypes.surfaces import Surface from tvb.datatypes.volumes import Volume from tvb.basic.neotraits.api import HasTraits, Attr, NArray - +from tvb.datasets import TVBZenodoDataset class RegionMapping(HasTraits): """ @@ -55,7 +55,7 @@ class RegionMapping(HasTraits): @staticmethod def from_file(source_file="regionMapping_16k_76.txt"): - source_full_path = try_get_absolute_path("tvb_data.regionMapping", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) result = RegionMapping() diff --git a/tvb_library/tvb/datatypes/sensors.py b/tvb_library/tvb/datatypes/sensors.py index 250a190e46..c88d7a831c 100644 --- a/tvb_library/tvb/datatypes/sensors.py +++ b/tvb_library/tvb/datatypes/sensors.py @@ -40,7 +40,7 @@ from tvb.basic.readers import FileReader, try_get_absolute_path from tvb.basic.neotraits.api import HasTraits, Attr, NArray, Int, TVBEnum, Final - +from tvb.datasets import TVBZenodoDataset class SensorTypesEnum(TVBEnum): TYPE_EEG = "EEG" @@ -77,7 +77,7 @@ class Sensors(HasTraits): def from_file(cls, source_file="eeg_brainstorm_65.txt"): result = cls() - source_full_path = try_get_absolute_path("tvb_data.sensors", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) result.labels = reader.read_array(dtype=numpy.str_, use_cols=(0,)) @@ -236,7 +236,7 @@ class SensorsMEG(Sensors): def from_file(cls, source_file="meg_151.txt.bz2"): result = super(SensorsMEG, cls).from_file(source_file) - source_full_path = try_get_absolute_path("tvb_data.sensors", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = FileReader(source_full_path) result.orientations = reader.read_array(use_cols=(4, 5, 6)) diff --git a/tvb_library/tvb/datatypes/surfaces.py b/tvb_library/tvb/datatypes/surfaces.py index e7d011dbce..aae6cbada6 100644 --- a/tvb_library/tvb/datatypes/surfaces.py +++ b/tvb_library/tvb/datatypes/surfaces.py @@ -41,6 +41,7 @@ from tvb.basic.neotraits.api import TVBEnum from tvb.basic.neotraits.api import HasTraits, Attr, NArray, Final, Int, Float, narray_describe from tvb.basic.readers import ZipReader, try_get_absolute_path +from tvb.datasets import TVBZenodoDataset try: import gdist @@ -162,7 +163,7 @@ def _read(cls, reader): @classmethod def from_file(cls, source_file="cortex_16384.zip"): """Construct a Surface from source_file.""" - source_full_path = try_get_absolute_path("tvb_data.surfaceData", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) reader = ZipReader(source_full_path) return cls._read(reader) diff --git a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py b/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py index 0e6e6542f3..c03f9256c8 100644 --- a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py @@ -28,7 +28,6 @@ """ .. moduleauthor:: Abhijit Deo """ -import socket from tvb.datasets import TVBZenodoDataset from pathlib import Path @@ -36,17 +35,6 @@ -def no_internet_decorator(func): - class block_network(socket.socket): - def __init__(self, *args, **kwargs): - raise Exception("Network call blocked") - - socket.socket = block_network - - def iner_func(): - return func - return iner_func - @@ -58,25 +46,25 @@ class Test_TVBZenodoDataset(BaseTestCase): def test_extract(self): tvb_data = TVBZenodoDataset() - connectivity66_dir = tvb_data.fetch_data("connectivity_66.zip") + connectivity66_dir = Path(tvb_data.fetch_data("connectivity_66.zip")) assert connectivity66_dir.is_file() tvb_data.delete_data() assert not connectivity66_dir.is_file() tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="tvb_data") - connectivity66_dir = tvb_data.fetch_data("connectivity_66.zip") + connectivity66_dir = Path(tvb_data.fetch_data("connectivity_66.zip")) assert connectivity66_dir.is_file() tvb_data.delete_data() assert not connectivity66_dir.is_file() tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="~/tvb_data") - matfile_dir = tvb_data.fetch_data("local_connectivity_80k.mat") + matfile_dir = Path(tvb_data.fetch_data("local_connectivity_80k.mat")) assert matfile_dir.is_file() tvb_data.delete_data() assert not matfile_dir.is_file() - all_extract = TVBZenodoDataset(version = "2.0.3", extract_dir="~/tvb_data").fetch_data(" ConnectivityTable_regions.xls") + all_extract = Path(TVBZenodoDataset(version = "2.0.3", extract_dir="~/tvb_data").fetch_data(" ConnectivityTable_regions.xls")) assert all_extract.is_file() tvb_data.delete_data() assert not all_extract.is_file() From d6c2c76ff8c6f946bf294ebd1ff6e205084a4ee4 Mon Sep 17 00:00:00 2001 From: abhijit_linux Date: Fri, 30 Jun 2023 16:02:55 +0530 Subject: [PATCH 57/84] add types --- tvb_library/tvb/datasets/zenodo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 945b836273..6e8450fa5a 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -34,7 +34,7 @@ import requests import re import pooch -from typing import List +from typing import List, Dict from pathlib import Path import json @@ -123,7 +123,7 @@ def _get_records(self, params: dict[str, str]) -> List[Record]: - def get_versions_info(self, recid) -> dict: + def get_versions_info(self, recid) -> Dict: """ recid: unique id of the data repository From b3d3d6369a03de593d6e9b94b3a7cdce84a46b14 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Thu, 6 Jul 2023 21:55:06 +0530 Subject: [PATCH 58/84] removed the data setup from build.yml, fix the errors found in tests --- .github/workflows/build.yml | 34 ++++++++++++++-------------- tvb_library/tvb/datasets/base.py | 10 ++++++-- tvb_library/tvb/datasets/tvb_data.py | 2 +- tvb_library/tvb/datasets/zenodo.py | 2 +- 4 files changed, 27 insertions(+), 21 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 08dc688957..6f2a1c5c19 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -47,25 +47,25 @@ jobs: cd tvb_build bash install_full_tvb.sh - - name: cache data - id: cache-data - uses: actions/cache@v3 - with: - path: tvb_data - key: tvb-data + #- name: cache data + # id: cache-data + # uses: actions/cache@v3 + # with: + # path: tvb_data + # key: tvb-data - - name: download data - if: steps.cache-data.outputs.cache-hit != 'true' - run: | - wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - mkdir tvb_data - unzip tvb_data.zip -d tvb_data - rm tvb_data.zip + #- name: download data + # if: steps.cache-data.outputs.cache-hit != 'true' + # run: | + # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip + # mkdir tvb_data + # unzip tvb_data.zip -d tvb_data + # rm tvb_data.zip - - name: setup data - run: | - cd tvb_data - python3 setup.py develop + #- name: setup data + # run: | + # cd tvb_data + # python3 setup.py develop - name: run library tests run: pytest -v tvb_library --cov --cov-report=xml && mv coverage.xml coverage-library.xml diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 599c47c466..df01d6c153 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -51,8 +51,14 @@ def __init__(self, version, extract_dir=None): def download(self): pass - def fetch_data(self): - pass + def fetch_data(self, file_name): + if Path(file_name).is_absolute(): + return file_name + + return self._fetch_data(file_name) + def _fetch_data(self, file_name): + pass + def get_version(self): return self.version diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index d81b09ec05..451e5f2070 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -79,7 +79,7 @@ def download(self, path=None): """ self.rec.download(path) - def fetch_data(self, file_name): + def _fetch_data(self, file_name): """ Fetches the data diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 6e8450fa5a..04232e26e7 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -115,7 +115,7 @@ def get_record(self, recid: str) -> Record: return Record(requests.get(url).json()) - def _get_records(self, params: dict[str, str]) -> List[Record]: + def _get_records(self, params: Dict[str, str]) -> List[Record]: url = self.base_url + "records?" + urlencode(params) return [Record(hit) for hit in requests.get(url).json()["hits"]["hits"]] From b51a7a1ffc6eb49ef4aa2b1d1d7bda14b6ae8556 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 8 Jul 2023 22:26:29 +0530 Subject: [PATCH 59/84] replacing tvb_data package from code. --- .github/workflows/lib-tests.yml_disabled | 20 ++--- .github/workflows/notebooks.yml | 30 +++---- .github/workflows/pg-tests.yml | 30 +++---- .github/workflows/win-tests.yml | 32 +++---- tvb_build/build_step1.py | 6 +- tvb_build/docker/Dockerfile-build | 10 +-- .../scripts/datatypes/lookup_tables.py | 4 +- .../code_update_scripts/4455_update_code.py | 9 +- .../code_update_scripts/4750_update_code.py | 6 +- .../code_update_scripts/6093_update_code.py | 6 +- .../code_update_scripts/6600_update_code.py | 7 +- .../tvb/core/services/user_service.py | 6 +- .../tvb/interfaces/command/benchmark.py | 15 ++-- .../brain_tumor_connectivity_importer.py | 8 +- .../interfaces/rest/client/examples/utils.py | 5 +- .../framework/adapters/analyzers/bct_test.py | 6 +- .../timeseries_metrics_adapter_test.py | 3 +- .../creators/stimulus_creator_test.py | 14 ++- .../simulator/simulator_adapter_test.py | 11 ++- .../connectivity_zip_importer_test.py | 6 +- .../adapters/uploaders/csv_importer_test.py | 32 +++---- .../uploaders/encrypt_decrypt_test.py | 21 +++-- .../adapters/uploaders/gifti_importer_test.py | 11 ++- .../uploaders/mat_timeseries_importer_test.py | 12 ++- .../adapters/uploaders/nifti_importer_test.py | 25 ++++-- .../adapters/uploaders/obj_importer_test.py | 12 ++- .../projection_matrix_importer_test.py | 13 +-- .../uploaders/region_mapping_importer_test.py | 16 ++-- .../uploaders/sensors_importer_test.py | 10 ++- .../uploaders/zip_surface_importer_test.py | 6 +- .../adapters/visualizers/brainviewer_test.py | 15 ++-- .../visualizers/connectivityviewer_test.py | 6 +- .../visualizers/sensorsviewer_test.py | 17 ++-- .../visualizers/surfaceviewer_test.py | 14 +-- .../tvb/tests/framework/core/factory.py | 10 ++- .../framework/core/neotraits/forms_test.py | 5 +- .../core/services/import_service_test.py | 12 ++- .../framework/core/services/links_test.py | 10 ++- .../core/services/project_service_test.py | 10 ++- .../services/serialization_manager_test.py | 6 +- .../interfaces/rest/datatype_resource_test.py | 9 +- .../interfaces/rest/project_resource_test.py | 7 +- .../controllers/simulator_controller_test.py | 87 ++++++++++++------- tvb_library/tvb/datasets/tvb_data.py | 2 + 44 files changed, 374 insertions(+), 228 deletions(-) diff --git a/.github/workflows/lib-tests.yml_disabled b/.github/workflows/lib-tests.yml_disabled index 07dff59116..b9d2d80531 100644 --- a/.github/workflows/lib-tests.yml_disabled +++ b/.github/workflows/lib-tests.yml_disabled @@ -27,16 +27,16 @@ jobs: pip3 install pipenv cd tvb_library && pipenv install -d --python $(which python3) - - name: download data - run: | - wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - mkdir tvb_data - unzip tvb_data.zip -d tvb_data - rm tvb_data.zip - - - name: setup data - run: | - cd tvb_library && pipenv run bash -c 'cd ../tvb_data && python3 setup.py develop' + #- name: download data + # run: | + # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip + # mkdir tvb_data + # unzip tvb_data.zip -d tvb_data + # rm tvb_data.zip + + #- name: setup data + # run: | + # cd tvb_library && pipenv run bash -c 'cd ../tvb_data && python3 setup.py develop' - name: importlib_metadata? run: cd tvb_library && pipenv install importlib_metadata diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index da9eab305a..7728a8a70c 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -40,22 +40,22 @@ jobs: cd tvb_build cmd /k "install_full_tvb.bat" - - name: cache data - id: cache-data - uses: actions/cache@v3 - with: - path: tvb_data - key: tvbdata + #- name: cache data + # id: cache-data + # uses: actions/cache@v3 + # with: + # path: tvb_data + # key: tvbdata - - name: download data - if: steps.cache-data.outputs.cache-hit != 'true' - shell: pwsh - run: | - Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" - Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data - del C:\\TEMP\\tvb_data.zip - cd C:\\tvb_data - python setup.py develop + #- name: download data + # if: steps.cache-data.outputs.cache-hit != 'true' + # shell: pwsh + # run: | + # Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" + # Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data + # del C:\\TEMP\\tvb_data.zip + # cd C:\\tvb_data + # python setup.py develop - name: run notebooks env: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index b360fd2605..0abac2e24c 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -52,23 +52,23 @@ jobs: - name: setup tvb run: cd tvb_build && bash install_full_tvb.sh - - name: cache data - id: cache-data - uses: actions/cache@v3 - with: - path: tvb_data - key: tvb-data + #- name: cache data + # id: cache-data + # uses: actions/cache@v3 + # with: + # path: tvb_data + # key: tvb-data - - name: download data - if: steps.cache-data.outputs.cache-hit != 'true' - run: | - wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - mkdir tvb_data - unzip tvb_data.zip -d tvb_data - rm tvb_data.zip + #- name: download data + # if: steps.cache-data.outputs.cache-hit != 'true' + # run: | + # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip + # mkdir tvb_data + # unzip tvb_data.zip -d tvb_data + # rm tvb_data.zip - - name: setup data - run: cd tvb_data && python3 setup.py develop + #- name: setup data + # run: cd tvb_data && python3 setup.py develop - name: run framework tests run: | diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index 122853097f..2d86279e54 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -34,22 +34,22 @@ jobs: pip install -r tvb_framework/requirements.txt pip install --no-build-isolation tvb-gdist - - name: cache data - id: cache-data - uses: actions/cache@v3 - with: - path: tvb_data - key: tvbdata - - - name: download data - if: steps.cache-data.outputs.cache-hit != 'true' - shell: pwsh - run: | - Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" - Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data - del C:\\TEMP\\tvb_data.zip - cd C:\\tvb_data - pip install -e . + #- name: cache data + # id: cache-data + # uses: actions/cache@v3 + # with: + # path: tvb_data + # key: tvbdata + + #- name: download data + # if: steps.cache-data.outputs.cache-hit != 'true' + # shell: pwsh + # run: | + # Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" + # Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data + # del C:\\TEMP\\tvb_data.zip + # cd C:\\tvb_data + # python setup.py develop - name: run framework tests shell: pwsh diff --git a/tvb_build/build_step1.py b/tvb_build/build_step1.py index 885b1ab54f..f7abecb888 100644 --- a/tvb_build/build_step1.py +++ b/tvb_build/build_step1.py @@ -44,7 +44,8 @@ import requests import tvb_bin -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from subprocess import Popen, PIPE # source paths @@ -53,7 +54,8 @@ FW_FOLDER = os.path.join(TVB_ROOT, 'tvb_framework') LICENSE_PATH = os.path.join(FW_FOLDER, 'LICENSE') RELEASE_NOTES_PATH = os.path.join(TVB_ROOT, 'tvb_documentation', 'RELEASE_NOTES') -DATA_SRC_FOLDER = os.path.dirname(tvb_data.__file__) +#DATA_SRC_FOLDER = os.path.dirname(tvb_data.__file__) +DATA_SRC_FOLDER = TVBZenodoDataset().extract_dir DEMOS_MATLAB_FOLDER = os.path.join(TVB_ROOT, 'tvb_documentation', 'matlab') # dest paths diff --git a/tvb_build/docker/Dockerfile-build b/tvb_build/docker/Dockerfile-build index 8fa3c0cbc2..5299909282 100644 --- a/tvb_build/docker/Dockerfile-build +++ b/tvb_build/docker/Dockerfile-build @@ -38,11 +38,11 @@ RUN /bin/bash -c "source activate tvb-run"; \ /opt/conda/envs/tvb-run/bin/jupyter notebook --generate-config; \ echo "c.NotebookApp.password='sha1:12bff019c253:9daecd92c2e9bdb10b3b8a06767a74a0fe078d7c'">>$JUPYTER_CONFIG/jupyter_notebook_config.py -RUN wget https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip; \ - mkdir tvb_data; unzip tvb_data.zip -d tvb_data; rm tvb_data.zip; \ - cd tvb_data; \ - /opt/conda/envs/tvb-run/bin/python setup.py develop;\ - /opt/conda/envs/tvb-docs/bin/python setup.py develop +#RUN wget https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip; \ +# mkdir tvb_data; unzip tvb_data.zip -d tvb_data; rm tvb_data.zip; \ +# cd tvb_data; \ +# /opt/conda/envs/tvb-run/bin/python setup.py develop;\ +# /opt/conda/envs/tvb-docs/bin/python setup.py develop WORKDIR $USER_HOME COPY requirements_group requirements.txt diff --git a/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py b/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py index 478259c622..cb4d573ccb 100644 --- a/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py +++ b/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py @@ -31,6 +31,7 @@ """ import numpy +from tvb.datasets import TVBZenodoDataset from tvb.basic.readers import try_get_absolute_path from tvb.basic.neotraits.api import HasTraits, Attr, NArray, Int, Float @@ -74,7 +75,8 @@ class LookUpTable(HasTraits): @staticmethod def populate_table(result, source_file): - source_full_path = try_get_absolute_path("tvb_data.tables", source_file) + source_full_path = TVBZenodoDataset().fetch_data(source_file) + #source_full_path = try_get_absolute_path("tvb_data.tables", source_file) zip_data = numpy.load(source_full_path) result.df = zip_data['df'] diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py index cd9381b8df..7b8636295c 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py @@ -29,15 +29,18 @@ """ import os -import tvb_data.obj +#import tvb_data.obj +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.obj_importer import ObjSurfaceImporter from tvb.basic.logger.builder import get_logger from tvb.core.services.operation_service import OperationService from tvb.core.entities.storage import dao from tvb.datatypes.surfaces import SurfaceTypesEnum -DATA_FILE_EEG_CAP = os.path.join(os.path.dirname(tvb_data.obj.__file__), "eeg_cap.obj") -DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") +#DATA_FILE_EEG_CAP = os.path.join(os.path.dirname(tvb_data.obj.__file__), "eeg_cap.obj") +#DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") +DATA_FILE_EEG_CAP = TVBZenodoDataset().fetch_data("eeg_cap.obj") +DATA_FILE_FACE = TVBZenodoDataset().fetch_data('face_surface.obj') LOGGER = get_logger(__name__) PAGE_SIZE = 20 diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py index 7f107c564f..d2c999c579 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py @@ -28,13 +28,15 @@ .. moduleauthor:: Bogdan Neacsa """ import os -import tvb_data.sensors +#import tvb_data.sensors +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.sensors_importer import SensorsImporter from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.operation_service import OperationService -DATA_FILE = os.path.join(os.path.dirname(tvb_data.sensors.__file__), "seeg_39.txt.bz2") +#DATA_FILE = os.path.join(os.path.dirname(tvb_data.sensors.__file__), "seeg_39.txt.bz2") +DATA_FILE = TVBZenodoDataset().fetch_data('seeg_39.txt.bz2') LOGGER = get_logger(__name__) PAGE_SIZE = 20 diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py index ffffdfe028..59d30e5dbc 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py @@ -30,14 +30,16 @@ .. moduleauthor:: Mihai Andrei """ import os -import tvb_data.obj +#import tvb_data.obj +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.obj_importer import ObjSurfaceImporter from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.operation_service import OperationService from tvb.datatypes.surfaces import SurfaceTypesEnum -DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") +#DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") +DATA_FILE_FACE = TVBZenodoDataset().fetch_data('face_surface.obj') LOGGER = get_logger(__name__) PAGE_SIZE = 20 diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py index a8a89a476f..ea6f5e1d4a 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py @@ -33,10 +33,11 @@ from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.import_service import ImportService -import tvb_data - -DATA_FILE = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") +#import tvb_data +from tvb.datasets import TVBZenodoDataset +#DATA_FILE = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") +DATA_FILE = TVBZenodoDataset().fetch_data('Default_Project.zip') LOGGER = get_logger(__name__) diff --git a/tvb_framework/tvb/core/services/user_service.py b/tvb_framework/tvb/core/services/user_service.py index 9a2de56dc6..7a0cbd62a6 100644 --- a/tvb_framework/tvb/core/services/user_service.py +++ b/tvb_framework/tvb/core/services/user_service.py @@ -33,7 +33,8 @@ import os import random import six -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.basic.logger.builder import get_logger from tvb.basic.profile import TvbProfile from tvb.config import DEFAULT_PROJECT_GID @@ -120,7 +121,8 @@ def create_user(self, username=None, display_name=None, password=None, password2 user = dao.store_entity(user) if role == ROLE_ADMINISTRATOR and not skip_import: - to_upload = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") + #to_upload = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") + to_upload = TVBZenodoDataset().fetch_data('Default_Project.zip') if not os.path.exists(to_upload): self.logger.warning("Could not find DEFAULT PROJECT at path %s. You might want to import it " "yourself. See TVB documentation about where to find it!" % to_upload) diff --git a/tvb_framework/tvb/interfaces/command/benchmark.py b/tvb_framework/tvb/interfaces/command/benchmark.py index ae736f87b9..9acfd1ca85 100644 --- a/tvb_framework/tvb/interfaces/command/benchmark.py +++ b/tvb_framework/tvb/interfaces/command/benchmark.py @@ -31,7 +31,8 @@ from os import path -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.core.entities.file.simulator.view_model import HeunDeterministicViewModel from tvb.interfaces.command.lab import * @@ -56,12 +57,16 @@ def _fire_simulation(project_id, simulator_vm): def _create_bench_project(): prj = new_project("benchmark_project_ %s" % datetime.now()) - data_dir = path.abspath(path.dirname(tvb_data.__file__)) - zip_path = path.join(data_dir, 'connectivity', 'connectivity_68.zip') + #data_dir = path.abspath(path.dirname(tvb_data.__file__)) + #zip_path = path.join(data_dir, 'connectivity', 'connectivity_68.zip') + tvb_data = TVBZenodoDataset() + zip_path = tvb_data.fetch_data('connectivity_68.zip') import_conn_zip(prj.id, zip_path) - zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip') + #zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip') + zip_path = tvb_data.fetch_data('connectivity_96.zip') import_conn_zip(prj.id, zip_path) - zip_path = path.join(data_dir, 'connectivity', 'connectivity_192.zip') + #zip_path = path.join(data_dir, 'connectivity', 'connectivity_192.zip') + zip_path = tvb_data.fetch_data('connectivity_192.zip') import_conn_zip(prj.id, zip_path) conn68 = dao.get_generic_entity(ConnectivityIndex, 68, "number_of_regions")[0] diff --git a/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py b/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py index a69006cf0b..195ccba551 100644 --- a/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py +++ b/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py @@ -66,8 +66,12 @@ def import_tumor_connectivities(project_id, folder_path): def import_surface_rm(project_id, conn_gid): # Import surface and region mapping from tvb_data berlin subjects (68 regions) - rm_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_RegionMapping.txt") - surface_zip_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_Surface_Cortex.zip") + #rm_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_RegionMapping.txt") + #surface_zip_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_Surface_Cortex.zip") + from tvb.datasets import TVBZenodoDataset + tvb_data = TVBZenodoDataset() + rm_file = tvb_data.fetch_data('DH_20120806_RegionMapping.txt') + surface_zip_file = tvb_data.fetch_data('DH_20120806_Surface_Cortex.zip') surface_importer = ABCAdapter.build_adapter_from_class(ZIPSurfaceImporter) surface_imp_model = ZIPSurfaceImporterModel() diff --git a/tvb_framework/tvb/interfaces/rest/client/examples/utils.py b/tvb_framework/tvb/interfaces/rest/client/examples/utils.py index bb1711a435..b88b675f05 100644 --- a/tvb_framework/tvb/interfaces/rest/client/examples/utils.py +++ b/tvb_framework/tvb/interfaces/rest/client/examples/utils.py @@ -28,7 +28,8 @@ import sys import time -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.basic.logger.builder import get_logger from tvb.core.entities.model.model_operation import STATUS_ERROR, STATUS_CANCELED, STATUS_FINISHED @@ -44,7 +45,7 @@ def compute_rest_url(): def compute_tvb_data_path(folder, filename): - return os.path.join(os.path.dirname(tvb_data.__file__), folder, filename) + return os.path.join(TVBZenodoDataset().extract_dir, folder, filename) logger = get_logger(__name__) diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py index d26ca55b0e..202fbf8f66 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py @@ -29,7 +29,8 @@ """ import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.analyzers.bct_adapters import BaseBCTModel from tvb.core.entities.model.model_operation import Algorithm from tvb.tests.framework.core.base_testcase import TransactionalTestCase @@ -56,7 +57,8 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user("BCT_User") self.test_project = TestFactory.create_project(self.test_user, "BCT-Project") # Make sure Connectivity is in DB - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') self.connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) algorithms = dao.get_generic_entity(Algorithm, 'Brain Connectivity Toolbox', 'group_description') diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py index 9b68008bb2..77d2ceba0c 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py @@ -29,7 +29,8 @@ """ import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset import json from tvb.adapters.datatypes.db.mapped_value import DatatypeMeasureIndex diff --git a/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py b/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py index 77d623909c..46169239df 100644 --- a/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py @@ -27,8 +27,9 @@ import json import os import numpy -import tvb_data -import tvb_data.surfaceData +#import tvb_data +#import tvb_data.surfaceData +from tvb.datasets import TVBZenodoDataset from tvb.adapters.creators.stimulus_creator import RegionStimulusCreator, SurfaceStimulusCreator from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex @@ -52,11 +53,16 @@ def transactional_setup_method(self): self.test_project = TestFactory.create_project(self.test_user, "Stim_Project") self.storage_interface = StorageInterface() - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + tvb_data = TVBZenodoDataset() + + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = tvb_data.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex) - cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + cortex = tvb_data.fetch_data('cortex_16384.zip') + self.surface = TestFactory.import_surface_zip(self.test_user, self.test_project, cortex, SurfaceTypesEnum.CORTICAL_SURFACE) diff --git a/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py index 0db157f22c..ac22a01d8e 100644 --- a/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py @@ -28,8 +28,9 @@ .. moduleauthor:: Lia Domide """ -import tvb_data.surfaceData -import tvb_data.regionMapping +#import tvb_data.surfaceData +#import tvb_data.regionMapping +from tvb.datasets import TVBZenodoDataset from os import path from tvb.adapters.datatypes.db.time_series import TimeSeriesRegionIndex @@ -112,10 +113,12 @@ def test_estimate_execution_time(self, connectivity_index_factory): estimation1 = self.simulator_adapter.get_execution_time_approximation(model) # import surfaceData and region mapping - cortex_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #cortex_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + cortex_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, cortex_file, SurfaceTypesEnum.CORTICAL_SURFACE) - rm_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #rm_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + rm_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, rm_file, surface.gid, model.connectivity.hex) local_conn = TestFactory.create_local_connectivity(self.test_user, self.test_project, surface.gid) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py index abe8b64008..e2b6ff98e0 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py @@ -30,7 +30,8 @@ """ from os import path -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.tests.framework.core.base_testcase import BaseTestCase from tvb.tests.framework.core.factory import TestFactory @@ -58,7 +59,8 @@ def test_happy_flow_import(self): """ Test that importing a CFF generates at least one DataType in DB. """ - zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') dt_count_before = TestFactory.get_entity_count(self.test_project, ConnectivityIndex) TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John", False) dt_count_after = TestFactory.get_entity_count(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py index 4db2d6475d..fb18ce3543 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py @@ -29,8 +29,7 @@ """ import pytest -import tvb_data -from os import path +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.uploaders.csv_connectivity_importer import CSVConnectivityImporter from tvb.adapters.uploaders.csv_connectivity_importer import CSVConnectivityParser, CSVConnectivityImporterModel @@ -46,10 +45,10 @@ class TestCSVConnectivityParser(BaseTestCase): - BASE_PTH = path.join(path.dirname(tvb_data.__file__), 'dti_pipeline_toronto') - + #BASE_PTH = path.join(path.dirname(tvb_data.__file__), 'dti_pipeline_toronto') + def test_parse_happy(self): - cap_pth = path.join(self.BASE_PTH, 'output_ConnectionDistanceMatrix.csv') + cap_pth = TVBZenodoDataset().fetch_data('output_ConnectionDistanceMatrix.csv') with open(cap_pth) as f: result_conn = CSVConnectivityParser(f).result_conn @@ -75,15 +74,14 @@ def teardown_method(self): self.clean_database() def _import_csv_test_connectivity(self, reference_connectivity_gid, subject): - # First prepare the input data: - data_dir = path.abspath(path.dirname(tvb_data.__file__)) - - toronto_dir = path.join(data_dir, 'dti_pipeline_toronto') - weights = path.join(toronto_dir, 'output_ConnectionCapacityMatrix.csv') - tracts = path.join(toronto_dir, 'output_ConnectionDistanceMatrix.csv') - tmp_folder = self.storage_interface.get_temp_folder(self.test_project.name) - weights_tmp = path.join(tmp_folder, 'output_ConnectionCapacityMatrix.csv.tmp') - tracts_tmp = path.join(tmp_folder, 'output_ConnectionDistanceMatrix.csv.tmp') + ### First prepare input data: + #data_dir = path.abspath(path.dirname(tvb_data.__file__)) + + #toronto_dir = path.join(data_dir, 'dti_pipeline_toronto') + weights = TVBZenodoDataset().fetch_data('output_ConnectionCapacityMatrix.csv') + tracts = TVBZenodoDataset().fetch_data('output_ConnectionDistanceMatrix.csv') + weights_tmp = weights + '.tmp' + tracts_tmp = tracts + '.tmp' self.storage_interface.copy_file(weights, weights_tmp) self.storage_interface.copy_file(tracts, tracts_tmp) @@ -99,7 +97,8 @@ def test_happy_flow_import(self): Test that importing a CFF generates at least one DataType in DB. """ - zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, subject=TEST_SUBJECT_A) field = FilterChain.datatype + '.subject' @@ -131,7 +130,8 @@ def test_happy_flow_import(self): assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all() def test_bad_reference(self): - zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) field = FilterChain.datatype + '.subject' filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!=']) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py index 990ffb7f85..ba89a14b32 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py @@ -31,8 +31,8 @@ import os import pyAesCrypt import pytest -import tvb_data -import tempfile +#import tvb_data +from tvb.datasets import TVBZenodoDataset from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa @@ -46,13 +46,14 @@ class TestEncryptionDecryption(TransactionalTestCase): + tvb_data = TVBZenodoDataset() # noinspection PyTypeChecker - @pytest.mark.parametrize("dir_name, file_name", [('connectivity', 'connectivity_76.zip'), - ('surfaceData', 'cortex_2x120k.zip'), - ('projectionMatrix', 'projection_meg_276_surface_16k.npy'), - ('h5', 'TimeSeriesRegion.h5')]) - def test_encrypt_decrypt(self, dir_name, file_name): - handler = StorageInterface.get_import_export_encryption_handler() + @pytest.mark.parametrize(" file_name", [('connectivity_76.zip'), + ( 'cortex_2x120k.zip'), + ( 'projection_meg_276_surface_16k.npy'), + ( 'TimeSeriesRegion.h5')]) + def test_encrypt_decrypt(self, file_name): + import_export_encryption_handler = StorageInterface.get_import_export_encryption_handler() # Generate a private key and public key private_key = rsa.generate_private_key( @@ -74,7 +75,9 @@ def test_encrypt_decrypt(self, dir_name, file_name): with open(private_key_path, 'wb') as f: f.write(pem) - path_to_file = os.path.join(os.path.dirname(tvb_data.__file__), dir_name, file_name) + #path_to_file = os.path.join(os.path.dirname(tvb_data.__file__), dir_name, file_name) + path_to_file = self.tvb_data.fetch_data(file_name) + # Create model for ABCUploader connectivity_model = ZIPConnectivityImporterModel() diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py index 00f1e21662..ffa9976a1c 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py @@ -29,7 +29,8 @@ """ import os -import tvb_data.gifti as demo_data +#import tvb_data.gifti as demo_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.gifti.parser import GIFTIParser from tvb.core.services.exceptions import OperationException from tvb.storage.storage_interface import StorageInterface @@ -42,9 +43,13 @@ class TestGIFTISurfaceImporter(BaseTestCase): Unit-tests for GIFTI Surface importer. """ - GIFTI_SURFACE_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.cortex.gii') - GIFTI_TIME_SERIES_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.time_series.gii') + #GIFTI_SURFACE_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.cortex.gii') + #GIFTI_TIME_SERIES_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.time_series.gii') WRONG_GII_FILE = os.path.abspath(__file__) + + tvb_data = TVBZenodoDataset() + GIFTI_SURFACE_FILE = tvb_data.fetch_data('sample.cortex.gii') + GIFTI_TIME_SERIES_FILE = tvb_data.fetch_data( 'sample.time_series.gii') def setup_method(self): self.test_user = TestFactory.create_user('Gifti_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py index 5c8df773f2..492dc2e640 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py @@ -32,7 +32,8 @@ import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.time_series import TimeSeriesRegionIndex from tvb.adapters.uploaders.mat_timeseries_importer import RegionMatTimeSeriesImporterModel, RegionTimeSeriesImporter from tvb.tests.framework.core.base_testcase import BaseTestCase @@ -40,9 +41,12 @@ class TestMatTimeSeriesImporter(BaseTestCase): - base_pth = os.path.join(os.path.dirname(tvb_data.__file__), 'berlinSubjects', 'QL_20120814') - bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat') - connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip') + #base_pth = os.path.join(os.path.dirname(tvb_data.__file__), 'berlinSubjects', 'QL_20120814') + tvb_data = TVBZenodoDataset() + bold_path = tvb_data.fetch_data('QL_BOLD_regiontimecourse.mat') + #bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat') + connectivity_path = tvb_data.fetch_data('QL_20120814_Connectivity.zip') + #connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip') def setup_method(self): self.test_user = TestFactory.create_user('Mat_Timeseries_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py index d1c0572e6b..97413f1124 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py @@ -31,8 +31,9 @@ import os import numpy -import tvb_data -import tvb_data.nifti as demo_data +#import tvb_data +#import tvb_data.nifti as demo_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.region_mapping import RegionVolumeMappingIndex from tvb.adapters.datatypes.db.structural import StructuralMRIIndex @@ -51,11 +52,18 @@ class TestNIFTIImporter(BaseTestCase): Unit-tests for NIFTI importer. """ - NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii') - GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii.gz') - TIMESERIES_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'time_series_152.nii.gz') - WRONG_NII_FILE = os.path.abspath(__file__) - TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'volume_mapping/mapping_FS_76.txt') + #NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii') + #GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii.gz') + #TIMESERIES_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'time_series_152.nii.gz') + #WRONG_NII_FILE = os.path.abspath(__file__) + #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'volume_mapping/mapping_FS_76.txt') + + tvb_data = TVBZenodoDataset() + NII_FILE = tvb_data.fetch_data('minimal.nii') + GZ_NII_FILE = tvb_data.fetch_data('minimal.nii.gz') + TIMESERIES_NII_FILE = tvb_data.fetch_data('time_series_152.nii.gz') + WRONG_NII_FILE = os.path.abspath(__file__) #? + TXT_FILE = tvb_data.fetch_data('mapping_FS_76.txt') DEFAULT_ORIGIN = [[0.0, 0.0, 0.0]] UNKNOWN_STR = "unknown" @@ -144,7 +152,8 @@ def test_import_region_mapping(self): """ This method tests import of a NIFTI file compressed in GZ format. """ - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") to_link_conn = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py index 27b0776b10..bea8679714 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py @@ -29,8 +29,8 @@ """ import os -import tvb_data.obj - +#import tvb_data.obj +from tvb.datasets import TVBZenodoDataset from tvb.core.neocom import h5 from tvb.datatypes.surfaces import SurfaceTypesEnum from tvb.tests.framework.core.base_testcase import BaseTestCase @@ -41,9 +41,13 @@ class TestObjSurfaceImporter(BaseTestCase): """ Unit-tests for Obj Surface importer. """ + + #torus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj') + #face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj') + tvb_data = TVBZenodoDataset() + torus = tvb_data.fetch_data('test_torus.obj') + face = tvb_data.fetch_data('face_surface.obj') - torus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj') - face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj') def setup_method(self): self.test_user = TestFactory.create_user('Obj_Importer_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py index 5723ec6913..9ed34fff46 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py @@ -31,9 +31,10 @@ import os -import tvb_data.projectionMatrix as dataset -import tvb_data.sensors -import tvb_data.surfaceData +#import tvb_data.projectionMatrix as dataset +#import tvb_data.sensors +#import tvb_data.surfaceData +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.projections import ProjectionMatrixIndex from tvb.core.services.exceptions import OperationException from tvb.datatypes.sensors import SensorTypesEnum @@ -54,11 +55,13 @@ def setup_method(self): self.test_user = TestFactory.create_user("UserPM") self.test_project = TestFactory.create_project(self.test_user) - zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_brainstorm_65.txt') + #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_brainstorm_65.txt') + zip_path = TVBZenodoDataset().fetch_data('eeg_brainstorm_65.txt') self.sensors = TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_EEG) - zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py index 42e1bb6370..50d54feaa9 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py @@ -30,8 +30,9 @@ import os import tvb.tests.framework.adapters.uploaders.test_data as test_data -import tvb_data.regionMapping as demo_data -import tvb_data.surfaceData +#import tvb_data.regionMapping as demo_data +#import tvb_data.surfaceData +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.surface import SurfaceIndex from tvb.basic.neotraits.ex import TraitValueError from tvb.core.adapters.exceptions import LaunchException @@ -48,10 +49,15 @@ class TestRegionMappingImporter(BaseTestCase): """ Unit-tests for RegionMapping importer. """ + tvb_data = TVBZenodoDataset() - TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') - ZIP_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.zip') - BZ2_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.bz2') + #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') + #ZIP_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.zip') + #BZ2_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.bz2') + + TXT_FILE = tvb_data.fetch_data('regionMapping_16k_76.txt') + ZIP_FILE = tvb_data.fetch_data('regionMapping_16k_76.zip') + BZ2_FILE = tvb_data.fetch_data('regionMapping_16k_76.bz2') # Wrong data WRONG_FILE_1 = os.path.join(os.path.dirname(test_data.__file__), 'region_mapping_wrong_1.txt') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py index d05e14edfb..e0a8ede2b8 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py @@ -30,7 +30,8 @@ import os -import tvb_data.sensors as demo_data +#import tvb_data.sensors as demo_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.sensors_importer import SensorsImporter, SensorsImporterModel from tvb.core.neocom import h5 from tvb.core.services.exceptions import OperationException @@ -44,8 +45,11 @@ class TestSensorsImporter(BaseTestCase): """ Unit-tests for Sensors importer. """ - EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'eeg_unitvector_62.txt.bz2') - MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_151.txt.bz2') + #EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'eeg_unitvector_62.txt.bz2') + #MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_151.txt.bz2') + tvb_data = TVBZenodoDataset() + EEG_FILE = tvb_data.fetch_data('eeg_unitvector_62.txt.bz2') + MEG_FILE = tvb_data.fetch_data('meg_151.txt.bz2') def setup_method(self): """ diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py index d047a63b56..387398b1bc 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py @@ -30,7 +30,8 @@ import os -import tvb_data.surfaceData +#import tvb_data.surfaceData +from tvb.datasets import TVBZenodoDataset from tvb.datatypes.surfaces import SurfaceTypesEnum from tvb.tests.framework.core.base_testcase import BaseTestCase from tvb.tests.framework.core.factory import TestFactory @@ -41,7 +42,8 @@ class TestZIPSurfaceImporter(BaseTestCase): Unit-tests for Zip Surface importer. """ - surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip') + #surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip') + surf_skull = TVBZenodoDataset().fetch_data('outer_skull_4096.zip') def setup_method(self): self.test_user = TestFactory.create_user('Zip_Surface_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py index 92db5dd4c6..cefc81a116 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py @@ -29,8 +29,9 @@ """ import os -import tvb_data.surfaceData -import tvb_data.regionMapping +#import tvb_data.surfaceData +#import tvb_data.regionMapping +from tvb.datasets import TVBZenodoDataset from tvb.core.neocom import h5 from tvb.tests.framework.core.base_testcase import TransactionalTestCase @@ -50,8 +51,11 @@ class TestBrainViewer(TransactionalTestCase): EXPECTED_EXTRA_KEYS = ['urlMeasurePointsLabels', 'urlMeasurePoints', 'pageSize', 'shellObject', 'extended_view', 'legendLabels', 'labelsStateVar', 'labelsModes', 'title'] - cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - region_mapping_path = os.path.join(os.path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #region_mapping_path = os.path.join(os.path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + tvb_data = TVBZenodoDataset() + cortex = tvb_data.fetch_data('cortex_16384.zip') + region_mapping = tvb_data.fetch_data('regionMapping_16k_76.txt') def transactional_setup_method(self): """ @@ -62,7 +66,8 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user('Brain_Viewer_User') self.test_project = TestFactory.create_project(self.test_user, 'Brain_Viewer_Project') - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = self.tvb_data.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") connectivity_idx = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert connectivity_idx is not None diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py index 1db6322f11..f297bc9762 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py @@ -28,7 +28,8 @@ """ import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.tests.framework.core.base_testcase import TransactionalTestCase @@ -51,7 +52,8 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user("UserCVV") self.test_project = TestFactory.create_project(self.test_user) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert self.connectivity_index is not None diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py index f6f53d93bc..29307b091b 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py @@ -29,8 +29,9 @@ """ import os -import tvb_data.obj -import tvb_data.sensors +#import tvb_data.obj +#import tvb_data.sensors +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.sensors import SensorsIndex from tvb.adapters.datatypes.db.surface import SurfaceIndex from tvb.adapters.uploaders.sensors_importer import SensorsImporterModel @@ -71,7 +72,8 @@ def test_launch_eeg(self): Check that all required keys are present in output from EegSensorViewer launch. """ # Import Sensors - zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt.bz2') + #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt.bz2') + zip_path = TVBZenodoDataset().fetch_data('eeg_unitvector_62.txt.bz2') TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_EEG) field = FilterChain.datatype + '.sensors_type' @@ -79,7 +81,8 @@ def test_launch_eeg(self): sensors_index = TestFactory.get_entity(self.test_project, SensorsIndex, filters) # Import EEGCap - cap_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'eeg_cap.obj') + #cap_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'eeg_cap.obj') + cap_path = TVBZenodoDataset().fetch_data('eeg_cap.obj') TestFactory.import_surface_obj(self.test_user, self.test_project, cap_path, SurfaceTypesEnum.EEG_CAP_SURFACE) field = FilterChain.datatype + '.surface_type' filters = FilterChain('', [field], [SurfaceTypesEnum.EEG_CAP_SURFACE.value], ['==']) @@ -106,7 +109,8 @@ def test_launch_meg(self): Check that all required keys are present in output from MEGSensorViewer launch. """ - zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'meg_151.txt.bz2') + #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'meg_151.txt.bz2') + zip_path = TVBZenodoDataset().fetch_data('meg_151.txt.bz2') TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_MEG) @@ -126,7 +130,8 @@ def test_launch_internal(self): """ Check that all required keys are present in output from InternalSensorViewer launch. """ - zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'seeg_39.txt.bz2') + #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'seeg_39.txt.bz2') + zip_path = TVBZenodoDataset().fetch_data('seeg_39.txt.bz2') sensors_index = TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_INTERNAL) viewer = SensorsViewer() diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py index c6715dbbc7..422ff64896 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py @@ -29,8 +29,9 @@ """ import os -import tvb_data.surfaceData -import tvb_data.regionMapping as demo_data +#import tvb_data.surfaceData +#import tvb_data.regionMapping as demo_data +from tvb.datasets import TVBZenodoDataset from uuid import UUID from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex @@ -59,17 +60,20 @@ def transactional_setup_method(self): test_user = TestFactory.create_user('Surface_Viewer_User') self.test_project = TestFactory.create_project(test_user, 'Surface_Viewer_Project') - surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + surf_skull = TVBZenodoDataset().fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(test_user, self.test_project, surf_skull, SurfaceTypesEnum.CORTICAL_SURFACE) assert self.surface is not None - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(test_user, self.test_project, zip_path, "John") connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert connectivity_index is not None - TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') + #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') + TXT_FILE = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') self.region_mapping = TestFactory.import_region_mapping(test_user, self.test_project, TXT_FILE, self.surface.gid, connectivity_index.gid) assert self.region_mapping is not None diff --git a/tvb_framework/tvb/tests/framework/core/factory.py b/tvb_framework/tvb/tests/framework/core/factory.py index f41d970496..acd85b78a8 100644 --- a/tvb_framework/tvb/tests/framework/core/factory.py +++ b/tvb_framework/tvb/tests/framework/core/factory.py @@ -37,8 +37,8 @@ import os import random import uuid -import tvb_data - +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.local_connectivity import LocalConnectivityIndex from tvb.adapters.datatypes.db.projections import ProjectionMatrixIndex @@ -220,7 +220,8 @@ def import_default_project(admin_user=None): if not admin_user: admin_user = TestFactory.create_user() - project_path = os.path.join(os.path.dirname(tvb_data.__file__), 'Default_Project.zip') + #project_path = os.path.join(os.path.dirname(tvb_data.__file__), 'Default_Project.zip') + project_path = TVBBZenodoDataset().fetch_data('Default_Project.zip') import_service = ImportService() import_service.import_project_structure(project_path, admin_user.id) return import_service.created_projects[0] @@ -312,7 +313,8 @@ def import_zip_connectivity(user, project, zip_path=None, subject=DataTypeMetaDa same_process=True): if zip_path is None: - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') count = dao.count_datatypes(project.id, ConnectivityIndex) view_model = ZIPConnectivityImporterModel() diff --git a/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py b/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py index fb49b1ae1c..7add7679e5 100644 --- a/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py +++ b/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py @@ -27,7 +27,8 @@ import uuid import numpy import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.basic.neotraits.api import Attr, Float, Int, NArray, List from tvb.core.entities.file.simulator.view_model import SimulatorAdapterModel @@ -54,7 +55,7 @@ def teardown_method(self): self.clean_database() def test_upload_field(self): - connectivity_file = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + connectivity_file = TVBZenodoDataset().fetch_data('connectivity_96.zip') data_file = Str('Test Upload Field') required_type = '.zip' upload_field = TraitUploadField(data_file, required_type, self.name) diff --git a/tvb_framework/tvb/tests/framework/core/services/import_service_test.py b/tvb_framework/tvb/tests/framework/core/services/import_service_test.py index 54a904c6df..37795c621d 100644 --- a/tvb_framework/tvb/tests/framework/core/services/import_service_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/import_service_test.py @@ -31,7 +31,8 @@ import os import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from PIL import Image from time import sleep from tvb.adapters.datatypes.db.mapped_value import ValueWrapperIndex @@ -88,7 +89,8 @@ def test_import_export(self, user_factory, project_factory, value_wrapper_factor """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport", "test_desc") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) value_wrapper = value_wrapper_factory(test_user, test_project) ProjectService.set_datatype_visibility(value_wrapper.gid, False) @@ -138,7 +140,8 @@ def test_import_export_existing(self, user_factory, project_factory): """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport2") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) count_operations = dao.get_filtered_operations(test_project.id, None, is_count=True) @@ -182,7 +185,8 @@ def test_export_import_figures(self, user_factory, project_factory): # Prepare data user = user_factory() project = project_factory(user, "TestImportExportFigures") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') + zip_path = TVBZenodoDataset().fetch_data('paupau.zip') TestFactory.import_zip_connectivity(user, project, zip_path) figure_service = FigureService() diff --git a/tvb_framework/tvb/tests/framework/core/services/links_test.py b/tvb_framework/tvb/tests/framework/core/services/links_test.py index 5a08dffdb5..36dbe4b312 100644 --- a/tvb_framework/tvb/tests/framework/core/services/links_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/links_test.py @@ -32,7 +32,8 @@ """ import pytest import os -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.sensors import SensorsIndex from tvb.adapters.exporters.export_manager import ExportManager @@ -66,9 +67,12 @@ def initialize_two_projects(self, dummy_datatype_index_factory, project_factory, src_user = user_factory(username="Links Test") self.src_usr_id = src_user.id self.src_project = project_factory(src_user, "Src_Project") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') + tvb_data = TVBZenodoDataset() + zip_path = tvb_data.fetch_data("paupau.zip") self.red_datatype = TestFactory.import_zip_connectivity(src_user, self.src_project, zip_path, "John") - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'sensors', 'eeg_unitvector_62.txt.bz2') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'sensors', 'eeg_unitvector_62.txt.bz2') + zip_path = tvb_data.fetch_data('eeg_unitvector_62.txt.bz2') self.blue_datatype = TestFactory.import_sensors(src_user, self.src_project, zip_path, SensorTypesEnum.TYPE_EEG) assert 1 == self.red_datatypes_in(self.src_project.id) diff --git a/tvb_framework/tvb/tests/framework/core/services/project_service_test.py b/tvb_framework/tvb/tests/framework/core/services/project_service_test.py index 7b876beb64..aae5609378 100644 --- a/tvb_framework/tvb/tests/framework/core/services/project_service_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/project_service_test.py @@ -33,7 +33,8 @@ import pytest import sqlalchemy -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.basic.profile import TvbProfile from tvb.core.entities.model import model_datatype, model_project, model_operation from tvb.core.entities.storage import dao @@ -331,11 +332,14 @@ def test_empty_project_has_zero_disk_size(self): def test_project_disk_size(self): project1 = TestFactory.create_project(self.test_user, 'test_proj1') - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + tvb_data = TVBZenodoDataset() + zip_path = tvb_data.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, project1, zip_path, 'testSubject') project2 = TestFactory.create_project(self.test_user, 'test_proj2') - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = tvb_data.fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, project2, zip_path, 'testSubject') projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0] diff --git a/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py b/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py index a7c48010b5..19b4191113 100644 --- a/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py @@ -29,7 +29,8 @@ """ from os import path -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.forms.model_forms import ModelsEnum @@ -45,7 +46,8 @@ class TestSerializationManager(TransactionalTestCase): def transactional_setup_method(self): self.test_user = TestFactory.create_user(username="test_user") self.test_project = TestFactory.create_project(self.test_user, "Test") - zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py index e811994a36..bb60be982f 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py @@ -27,7 +27,8 @@ import os import flask import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.interfaces.rest.commons.exceptions import InvalidIdentifierException from tvb.interfaces.rest.server.resources.datatype.datatype_resource import RetrieveDatatypeResource @@ -53,7 +54,8 @@ def test_server_retrieve_datatype_inexistent_gid(self, mocker): def test_server_retrieve_datatype(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get(project_gid=self.test_project.gid) @@ -79,7 +81,8 @@ def send_file_dummy(path, as_attachment, attachment_filename): def test_server_get_operations_for_datatype(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get(project_gid=self.test_project.gid) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py index a1320bc645..0fc8a18144 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py @@ -27,7 +27,8 @@ import os import flask import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.interfaces.rest.commons.exceptions import InvalidIdentifierException from tvb.interfaces.rest.commons.strings import Strings @@ -45,7 +46,9 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user('Rest_User') self.test_project_without_data = TestFactory.create_project(self.test_user, 'Rest_Project', users=[self.test_user.id]) self.test_project_with_data = TestFactory.create_project(self.test_user, 'Rest_Project2', users=[self.test_user.id]) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + tvb_data = TVBZenodoDataset() + zip_path = tvb_data.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project_with_data, zip_path) def test_server_get_data_in_project_inexistent_gid(self, mocker): diff --git a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py index 1443c9db3d..df22050065 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py @@ -25,11 +25,13 @@ # import numpy -import tvb_data.connectivity -import tvb_data.surfaceData -import tvb_data.sensors -import tvb_data.regionMapping -import tvb_data.projectionMatrix +#import tvb_data.connectivity +#import tvb_data.surfaceData +#import tvb_data.sensors +#import tvb_data.regionMapping +#import tvb_data.projectionMatrix +from tvb.datasets import TVBZenodoDataset + from os import path from uuid import UUID from unittest.mock import patch @@ -90,7 +92,8 @@ def test_index(self): assert not result_dict['errors'], 'Some errors were encountered!' def test_set_connectivity(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['connectivity'] = connectivity.gid @@ -126,7 +129,8 @@ def test_set_coupling_params(self): assert self.session_stored_simulator.coupling.b[0] == [0.0], "b value was not set correctly." def test_set_surface(self): - zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) surface = TestFactory.get_entity(self.test_project, SurfaceIndex) @@ -147,14 +151,17 @@ def test_set_surface_none(self): assert self.session_stored_simulator.surface is None, "Surface should not be set." def test_set_cortex_without_local_connectivity(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) @@ -176,14 +183,17 @@ def test_set_cortex_without_local_connectivity(self): "coupling_strength was not set correctly." def test_set_cortex_with_local_connectivity(self, local_connectivity_index_factory): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt' ) region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) @@ -214,7 +224,8 @@ def test_set_stimulus_none(self): assert self.session_stored_simulator.stimulus is None, "Stimulus should not be set." def test_set_stimulus(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity_index = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) weight_array = numpy.zeros(connectivity_index.number_of_regions) @@ -443,29 +454,35 @@ def test_set_monitor_params(self): assert not rendering_rules['renderer'].include_next_button, 'Next button should not be displayed!' def set_region_mapping(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') + text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) return region_mapping def set_eeg(self): - eeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt') + #eeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt') + eeg_sensors_file = TVBZenodoDataset().fetch_data('eeg_unitvector_62.txt') eeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, eeg_sensors_file, SensorTypesEnum.TYPE_EEG) - surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), + #eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_eeg_62_surface_16k.mat') + eeg_projection_file = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') eeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, eeg_projection_file, eeg_sensors.gid, surface.gid) return eeg_sensors, eeg_projection @@ -502,16 +519,19 @@ def test_set_eeg_monitor_params(self): "Projection wasn't stored correctly." def set_meg(self): - meg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'meg_brainstorm_276.txt') + #meg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'meg_brainstorm_276.txt') + meg_sensors_file = TVBZenodoDataset().fetch_data('meg_brainstorm_276.txt') meg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, meg_sensors_file, SensorTypesEnum.TYPE_MEG) - surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), + #meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_meg_276_surface_16k.npy') + meg_projection_file = TVBZenodoDataset().fetch_data('projection_meg_276_surface_16k.npy') meg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, meg_projection_file, meg_sensors.gid, surface.gid) return meg_sensors, meg_projection @@ -549,16 +569,19 @@ def test_set_meg_monitor_params(self): "Projection wasn't stored correctly." def set_seeg(self): - seeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'seeg_588.txt') + #seeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'seeg_588.txt') + seeg_sensors_file = TVBZenodoDataset().fetch_data('seeg_588.txt') seeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, seeg_sensors_file, SensorTypesEnum.TYPE_INTERNAL) - surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), + #seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_seeg_588_surface_16k.npy') + seeg_projection_file = TVBZenodoDataset().fetch_data('projection_seeg_588_surface_16k.npy') seeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, seeg_projection_file, seeg_sensors.gid, surface.gid) return seeg_sensors, seeg_projection @@ -697,7 +720,8 @@ def test_load_burst_history(self): assert len(burst_parameters['burst_list']) == 3, "The burst configurations where not stored." def test_reset_simulator_configuration(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['connectivity'] = connectivity.gid @@ -753,7 +777,8 @@ def test_rename_burst(self): assert dao.get_bursts_for_project(self.test_project.id)[0].name == new_name, "Name wasn't actually changed." def test_copy_simulator_configuration(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) @@ -787,7 +812,8 @@ def test_copy_simulator_configuration(self): assert rendering_rules['renderer'].disable_fields, 'Fragments should be read-only!' def test_load_burst(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) @@ -837,7 +863,8 @@ def test_launch_simulation(self): assert burst_config.status == 'running', 'Simulation launching has failed!' def test_launch_branch_simulation(self): - zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['input_simulation_name_id'] = 'HappySimulation' diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 451e5f2070..5bf9c86ba3 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -186,3 +186,5 @@ def __eq__(self, other): return self.rec == tvb_data.rec return False + + From c3f5947767490b8d7f3427db2771682dcf5236ca Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 8 Jul 2023 23:12:43 +0530 Subject: [PATCH 60/84] fix tvb_data not found error and one identation error --- .../uploaders/connectivity_measure_importer_test.py | 6 ++++-- .../interfaces/rest/operation_resource_test.py | 13 ++++++++----- .../web/controllers/simulator_controller_test.py | 3 +-- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py index e7003a17d7..c314e5739b 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py @@ -32,7 +32,8 @@ import os.path import pytest -import tvb_data +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.graph import ConnectivityMeasureIndex from tvb.adapters.uploaders.connectivity_measure_importer import ConnectivityMeasureImporter from tvb.adapters.uploaders.connectivity_measure_importer import ConnectivityMeasureImporterModel @@ -48,7 +49,8 @@ class TestConnectivityMeasureImporter(BaseTestCase): """ def setup_method(self): - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') self.test_user = TestFactory.create_user('Test_User_CM') self.test_project = TestFactory.create_project(self.test_user, "Test_Project_CM") self.connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py index 4600fc6686..ffb92d8413 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py @@ -29,8 +29,8 @@ from uuid import UUID import flask import pytest -import tvb_data - +#import tvb_data +from tvb.datasets import TVBZenodoDataset from tvb.adapters.analyzers.fourier_adapter import FFTAdapterModel from tvb.basic.exceptions import TVBException from tvb.core.neocom import h5 @@ -65,7 +65,8 @@ def test_server_get_operation_status_inexistent_gid(self, mocker): def test_server_get_operation_status(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) @@ -84,7 +85,8 @@ def test_server_get_operation_results_inexistent_gid(self, mocker): def test_server_get_operation_results(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) @@ -98,7 +100,8 @@ def test_server_get_operation_results(self, mocker): def test_server_get_operation_results_failed_operation(self, mocker): self._mock_user(mocker) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_90.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_90.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_90.zip') with pytest.raises(TVBException): TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) diff --git a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py index df22050065..7459fa9bc0 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py @@ -480,8 +480,7 @@ def set_eeg(self): surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), - 'projection_eeg_62_surface_16k.mat') + #eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_eeg_62_surface_16k.mat') eeg_projection_file = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') eeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, eeg_projection_file, eeg_sensors.gid, surface.gid) From f45c69b695a333727751118af430753065674e3e Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 8 Jul 2023 23:35:23 +0530 Subject: [PATCH 61/84] fix the identation --- .../interfaces/web/controllers/simulator_controller_test.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py index 7459fa9bc0..82df6fd227 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py @@ -528,8 +528,7 @@ def set_meg(self): surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), - 'projection_meg_276_surface_16k.npy') + #meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__),'projection_meg_276_surface_16k.npy') meg_projection_file = TVBZenodoDataset().fetch_data('projection_meg_276_surface_16k.npy') meg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, meg_projection_file, meg_sensors.gid, surface.gid) @@ -578,8 +577,7 @@ def set_seeg(self): surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), - 'projection_seeg_588_surface_16k.npy') + #seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_seeg_588_surface_16k.npy') seeg_projection_file = TVBZenodoDataset().fetch_data('projection_seeg_588_surface_16k.npy') seeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, seeg_projection_file, seeg_sensors.gid, surface.gid) From 24a21a64b07291e8d2dad770d02d7ea792a6bce3 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 9 Jul 2023 00:07:11 +0530 Subject: [PATCH 62/84] fix the data loading issues that i missed earlier. --- .../adapters/uploaders/projection_matrix_importer_test.py | 4 ++-- tvb_framework/tvb/tests/framework/core/services/links_test.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py index 9ed34fff46..73504c9eb6 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py @@ -75,8 +75,8 @@ def test_wrong_shape(self): """ Verifies that importing a different shape throws exception """ - file_path = os.path.join(os.path.abspath(os.path.dirname(dataset.__file__)), - 'projection_eeg_62_surface_16k.mat') + #file_path = os.path.join(os.path.abspath(os.path.dirname(dataset.__file__)), 'projection_eeg_62_surface_16k.mat') + file_path = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') try: TestFactory.import_projection_matrix(self.test_user, self.test_project, file_path, self.sensors.gid, diff --git a/tvb_framework/tvb/tests/framework/core/services/links_test.py b/tvb_framework/tvb/tests/framework/core/services/links_test.py index 36dbe4b312..3e6ff86f3f 100644 --- a/tvb_framework/tvb/tests/framework/core/services/links_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/links_test.py @@ -211,7 +211,8 @@ def build(): Project dest will have the derived VW and links """ # add a connectivity to src project and link it to dest project - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') conn = TestFactory.import_zip_connectivity(self.dst_user, self.src_project, zip_path, "John") self.algorithm_service.create_link(conn.id, self.dest_project.id) From 2105be47979342a9589bee7595c630cff1ac6e88 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 9 Jul 2023 00:33:16 +0530 Subject: [PATCH 63/84] some more fixes.. --- .../adapters/analyzers/timeseries_metrics_adapter_test.py | 3 ++- .../tests/framework/adapters/visualizers/brainviewer_test.py | 2 +- tvb_framework/tvb/tests/framework/core/factory.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py index 77d2ceba0c..d3994aa662 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py @@ -52,7 +52,8 @@ def transactional_setup_method(self): """ self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') + zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) def test_adapter_launch(self, connectivity_factory, region_mapping_factory, diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py index cefc81a116..032c4117af 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py @@ -55,7 +55,7 @@ class TestBrainViewer(TransactionalTestCase): #region_mapping_path = os.path.join(os.path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') tvb_data = TVBZenodoDataset() cortex = tvb_data.fetch_data('cortex_16384.zip') - region_mapping = tvb_data.fetch_data('regionMapping_16k_76.txt') + region_mapping_path = tvb_data.fetch_data('regionMapping_16k_76.txt') def transactional_setup_method(self): """ diff --git a/tvb_framework/tvb/tests/framework/core/factory.py b/tvb_framework/tvb/tests/framework/core/factory.py index acd85b78a8..c0a9327604 100644 --- a/tvb_framework/tvb/tests/framework/core/factory.py +++ b/tvb_framework/tvb/tests/framework/core/factory.py @@ -221,7 +221,7 @@ def import_default_project(admin_user=None): admin_user = TestFactory.create_user() #project_path = os.path.join(os.path.dirname(tvb_data.__file__), 'Default_Project.zip') - project_path = TVBBZenodoDataset().fetch_data('Default_Project.zip') + project_path = TVBZenodoDataset().fetch_data('Default_Project.zip') import_service = ImportService() import_service.import_project_structure(project_path, admin_user.id) return import_service.created_projects[0] From c4869fcec304a37cba57fe9ddc656eabc9317da5 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Tue, 11 Jul 2023 23:08:17 +0530 Subject: [PATCH 64/84] fix typo --- .../tests/framework/adapters/uploaders/encrypt_decrypt_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py index ba89a14b32..152bc3be9a 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py @@ -53,7 +53,7 @@ class TestEncryptionDecryption(TransactionalTestCase): ( 'projection_meg_276_surface_16k.npy'), ( 'TimeSeriesRegion.h5')]) def test_encrypt_decrypt(self, file_name): - import_export_encryption_handler = StorageInterface.get_import_export_encryption_handler() + handler = StorageInterface.get_import_export_encryption_handler() # Generate a private key and public key private_key = rsa.generate_private_key( From 230f88fc9cf20c91aab8adc85be123f8d4bbd03b Mon Sep 17 00:00:00 2001 From: abhi_win Date: Wed, 12 Jul 2023 23:32:37 +0530 Subject: [PATCH 65/84] removed the commented lines, removed the tvb_data setup from ci --- .github/workflows/build.yml | 18 ---- .github/workflows/lib-tests.yml_disabled | 11 +-- .github/workflows/notebooks.yml | 15 ---- .github/workflows/pg-tests.yml | 16 ---- .github/workflows/win-tests.yml | 17 +--- tvb_build/build_step1.py | 2 - tvb_build/docker/Dockerfile-build | 6 +- tvb_build/docker/Dockerfile-run | 5 +- tvb_build/docker/Dockerfile-win | 6 -- .../scripts/datatypes/lookup_tables.py | 1 - .../code_update_scripts/4455_update_code.py | 8 +- .../code_update_scripts/4750_update_code.py | 2 - .../code_update_scripts/6093_update_code.py | 2 - .../code_update_scripts/6600_update_code.py | 2 - .../tvb/core/services/user_service.py | 2 - .../tvb/interfaces/command/benchmark.py | 13 +-- .../brain_tumor_connectivity_importer.py | 10 +-- .../interfaces/rest/client/examples/utils.py | 1 - .../framework/adapters/analyzers/bct_test.py | 2 - .../timeseries_metrics_adapter_test.py | 2 - .../creators/stimulus_creator_test.py | 10 +-- .../simulator/simulator_adapter_test.py | 9 +- .../connectivity_measure_importer_test.py | 2 - .../adapters/uploaders/csv_importer_test.py | 15 ++-- .../uploaders/encrypt_decrypt_test.py | 6 +- .../adapters/uploaders/gifti_importer_test.py | 9 +- .../uploaders/mat_timeseries_importer_test.py | 10 +-- .../adapters/uploaders/nifti_importer_test.py | 18 ++-- .../adapters/uploaders/obj_importer_test.py | 9 +- .../projection_matrix_importer_test.py | 16 ++-- .../uploaders/region_mapping_importer_test.py | 17 ++-- .../uploaders/sensors_importer_test.py | 9 +- .../uploaders/zip_surface_importer_test.py | 2 - .../adapters/visualizers/brainviewer_test.py | 13 +-- .../visualizers/connectivityviewer_test.py | 2 - .../visualizers/sensorsviewer_test.py | 15 ++-- .../visualizers/surfaceviewer_test.py | 12 +-- .../tvb/tests/framework/core/factory.py | 5 +- .../framework/core/neotraits/forms_test.py | 1 - .../core/services/import_service_test.py | 12 +-- .../framework/core/services/links_test.py | 13 ++- .../core/services/project_service_test.py | 9 +- .../services/serialization_manager_test.py | 2 - .../interfaces/rest/datatype_resource_test.py | 9 +- .../rest/operation_resource_test.py | 20 +++-- .../interfaces/rest/project_resource_test.py | 6 +- .../controllers/simulator_controller_test.py | 82 ++++++------------- tvb_library/setup.py | 2 +- ...set_test.py => tvb_zenodo_dataset_test.py} | 7 -- 49 files changed, 126 insertions(+), 357 deletions(-) rename tvb_library/tvb/tests/library/datasets/{TVBZenodoDataset_test.py => tvb_zenodo_dataset_test.py} (99%) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6f2a1c5c19..5c8a4ec9ea 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -47,25 +47,7 @@ jobs: cd tvb_build bash install_full_tvb.sh - #- name: cache data - # id: cache-data - # uses: actions/cache@v3 - # with: - # path: tvb_data - # key: tvb-data - #- name: download data - # if: steps.cache-data.outputs.cache-hit != 'true' - # run: | - # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - # mkdir tvb_data - # unzip tvb_data.zip -d tvb_data - # rm tvb_data.zip - - #- name: setup data - # run: | - # cd tvb_data - # python3 setup.py develop - name: run library tests run: pytest -v tvb_library --cov --cov-report=xml && mv coverage.xml coverage-library.xml diff --git a/.github/workflows/lib-tests.yml_disabled b/.github/workflows/lib-tests.yml_disabled index b9d2d80531..921bf2c63e 100644 --- a/.github/workflows/lib-tests.yml_disabled +++ b/.github/workflows/lib-tests.yml_disabled @@ -27,16 +27,7 @@ jobs: pip3 install pipenv cd tvb_library && pipenv install -d --python $(which python3) - #- name: download data - # run: | - # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - # mkdir tvb_data - # unzip tvb_data.zip -d tvb_data - # rm tvb_data.zip - - #- name: setup data - # run: | - # cd tvb_library && pipenv run bash -c 'cd ../tvb_data && python3 setup.py develop' + - name: importlib_metadata? run: cd tvb_library && pipenv install importlib_metadata diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 7728a8a70c..6773c09787 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -40,22 +40,7 @@ jobs: cd tvb_build cmd /k "install_full_tvb.bat" - #- name: cache data - # id: cache-data - # uses: actions/cache@v3 - # with: - # path: tvb_data - # key: tvbdata - #- name: download data - # if: steps.cache-data.outputs.cache-hit != 'true' - # shell: pwsh - # run: | - # Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" - # Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data - # del C:\\TEMP\\tvb_data.zip - # cd C:\\tvb_data - # python setup.py develop - name: run notebooks env: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index 0abac2e24c..e49b3cf3c5 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -52,23 +52,7 @@ jobs: - name: setup tvb run: cd tvb_build && bash install_full_tvb.sh - #- name: cache data - # id: cache-data - # uses: actions/cache@v3 - # with: - # path: tvb_data - # key: tvb-data - #- name: download data - # if: steps.cache-data.outputs.cache-hit != 'true' - # run: | - # wget -q https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip - # mkdir tvb_data - # unzip tvb_data.zip -d tvb_data - # rm tvb_data.zip - - #- name: setup data - # run: cd tvb_data && python3 setup.py develop - name: run framework tests run: | diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index 2d86279e54..f4ba58bb25 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -34,22 +34,7 @@ jobs: pip install -r tvb_framework/requirements.txt pip install --no-build-isolation tvb-gdist - #- name: cache data - # id: cache-data - # uses: actions/cache@v3 - # with: - # path: tvb_data - # key: tvbdata - - #- name: download data - # if: steps.cache-data.outputs.cache-hit != 'true' - # shell: pwsh - # run: | - # Invoke-WebRequest -OutFile C:\\TEMP\\tvb_data.zip -Uri "https://zenodo.org/record/7574266/files/tvb_data.zip?download=1" - # Expand-Archive 'C:\\TEMP\\tvb_data.zip' C:\\tvb_data - # del C:\\TEMP\\tvb_data.zip - # cd C:\\tvb_data - # python setup.py develop + - name: run framework tests shell: pwsh diff --git a/tvb_build/build_step1.py b/tvb_build/build_step1.py index f7abecb888..3541f95bd7 100644 --- a/tvb_build/build_step1.py +++ b/tvb_build/build_step1.py @@ -44,7 +44,6 @@ import requests import tvb_bin -#import tvb_data from tvb.datasets import TVBZenodoDataset from subprocess import Popen, PIPE @@ -54,7 +53,6 @@ FW_FOLDER = os.path.join(TVB_ROOT, 'tvb_framework') LICENSE_PATH = os.path.join(FW_FOLDER, 'LICENSE') RELEASE_NOTES_PATH = os.path.join(TVB_ROOT, 'tvb_documentation', 'RELEASE_NOTES') -#DATA_SRC_FOLDER = os.path.dirname(tvb_data.__file__) DATA_SRC_FOLDER = TVBZenodoDataset().extract_dir DEMOS_MATLAB_FOLDER = os.path.join(TVB_ROOT, 'tvb_documentation', 'matlab') diff --git a/tvb_build/docker/Dockerfile-build b/tvb_build/docker/Dockerfile-build index 5299909282..4dc15045d6 100644 --- a/tvb_build/docker/Dockerfile-build +++ b/tvb_build/docker/Dockerfile-build @@ -38,11 +38,7 @@ RUN /bin/bash -c "source activate tvb-run"; \ /opt/conda/envs/tvb-run/bin/jupyter notebook --generate-config; \ echo "c.NotebookApp.password='sha1:12bff019c253:9daecd92c2e9bdb10b3b8a06767a74a0fe078d7c'">>$JUPYTER_CONFIG/jupyter_notebook_config.py -#RUN wget https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip; \ -# mkdir tvb_data; unzip tvb_data.zip -d tvb_data; rm tvb_data.zip; \ -# cd tvb_data; \ -# /opt/conda/envs/tvb-run/bin/python setup.py develop;\ -# /opt/conda/envs/tvb-docs/bin/python setup.py develop + WORKDIR $USER_HOME COPY requirements_group requirements.txt diff --git a/tvb_build/docker/Dockerfile-run b/tvb_build/docker/Dockerfile-run index d634c1d1d2..4ac08786a3 100644 --- a/tvb_build/docker/Dockerfile-run +++ b/tvb_build/docker/Dockerfile-run @@ -31,10 +31,7 @@ RUN /bin/bash -c "source activate tvb-run"; \ $ENV_BIN/jupyter notebook --generate-config; \ echo "c.NotebookApp.password='sha1:12bff019c253:9daecd92c2e9bdb10b3b8a06767a74a0fe078d7c'">>$JUPYTER_CONFIG/jupyter_notebook_config.py -RUN wget https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 -O tvb_data.zip; \ - mkdir tvb_data; unzip tvb_data.zip -d tvb_data; rm tvb_data.zip; \ - cd tvb_data; \ - $ENV_BIN/python setup.py develop + WORKDIR $USER_HOME COPY requirements_group requirements.txt diff --git a/tvb_build/docker/Dockerfile-win b/tvb_build/docker/Dockerfile-win index 40d21eba5f..4152cea4fd 100644 --- a/tvb_build/docker/Dockerfile-win +++ b/tvb_build/docker/Dockerfile-win @@ -17,12 +17,6 @@ RUN activate tvb-run && pip install lockfile scikit-build RUN activate tvb-run && pip install syncrypto -# Download and install tvb data -RUN mkdir C:\\TVB_CODE -WORKDIR /TVB_CODE -ADD https://zenodo.org/record/7574266/files/tvb_data.zip?download=1 tvb_data.zip -RUN tar -xf tvb_data.zip && dir && del tvb_data.zip -RUN activate tvb-run && python setup.py develop COPY requirements_group requirements.txt RUN activate tvb-run && pip install -r requirements.txt diff --git a/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py b/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py index cb4d573ccb..8396a739c1 100644 --- a/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py +++ b/tvb_contrib/tvb/contrib/scripts/datatypes/lookup_tables.py @@ -76,7 +76,6 @@ class LookUpTable(HasTraits): @staticmethod def populate_table(result, source_file): source_full_path = TVBZenodoDataset().fetch_data(source_file) - #source_full_path = try_get_absolute_path("tvb_data.tables", source_file) zip_data = numpy.load(source_full_path) result.df = zip_data['df'] diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py index 7b8636295c..00be86d4a3 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/4455_update_code.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data.obj from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.obj_importer import ObjSurfaceImporter from tvb.basic.logger.builder import get_logger @@ -37,10 +36,9 @@ from tvb.core.entities.storage import dao from tvb.datatypes.surfaces import SurfaceTypesEnum -#DATA_FILE_EEG_CAP = os.path.join(os.path.dirname(tvb_data.obj.__file__), "eeg_cap.obj") -#DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") -DATA_FILE_EEG_CAP = TVBZenodoDataset().fetch_data("eeg_cap.obj") -DATA_FILE_FACE = TVBZenodoDataset().fetch_data('face_surface.obj') +dataset = TVBZenodoDataset() +DATA_FILE_EEG_CAP = dataset.fetch_data("eeg_cap.obj") +DATA_FILE_FACE = dataset.fetch_data('face_surface.obj') LOGGER = get_logger(__name__) PAGE_SIZE = 20 diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py index d2c999c579..1cb0ac2dbc 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/4750_update_code.py @@ -28,14 +28,12 @@ .. moduleauthor:: Bogdan Neacsa """ import os -#import tvb_data.sensors from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.sensors_importer import SensorsImporter from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.operation_service import OperationService -#DATA_FILE = os.path.join(os.path.dirname(tvb_data.sensors.__file__), "seeg_39.txt.bz2") DATA_FILE = TVBZenodoDataset().fetch_data('seeg_39.txt.bz2') LOGGER = get_logger(__name__) diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py index 59d30e5dbc..76e0a7dd79 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/6093_update_code.py @@ -30,7 +30,6 @@ .. moduleauthor:: Mihai Andrei """ import os -#import tvb_data.obj from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.obj_importer import ObjSurfaceImporter from tvb.basic.logger.builder import get_logger @@ -38,7 +37,6 @@ from tvb.core.services.operation_service import OperationService from tvb.datatypes.surfaces import SurfaceTypesEnum -#DATA_FILE_FACE = os.path.join(os.path.dirname(tvb_data.obj.__file__), "face_surface.obj") DATA_FILE_FACE = TVBZenodoDataset().fetch_data('face_surface.obj') LOGGER = get_logger(__name__) diff --git a/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py b/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py index ea6f5e1d4a..03ef9089e6 100644 --- a/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py +++ b/tvb_framework/tvb/core/code_versions/code_update_scripts/6600_update_code.py @@ -33,10 +33,8 @@ from tvb.basic.logger.builder import get_logger from tvb.core.entities.storage import dao from tvb.core.services.import_service import ImportService -#import tvb_data from tvb.datasets import TVBZenodoDataset -#DATA_FILE = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") DATA_FILE = TVBZenodoDataset().fetch_data('Default_Project.zip') LOGGER = get_logger(__name__) diff --git a/tvb_framework/tvb/core/services/user_service.py b/tvb_framework/tvb/core/services/user_service.py index 7a0cbd62a6..0fe08e7415 100644 --- a/tvb_framework/tvb/core/services/user_service.py +++ b/tvb_framework/tvb/core/services/user_service.py @@ -33,7 +33,6 @@ import os import random import six -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.basic.logger.builder import get_logger from tvb.basic.profile import TvbProfile @@ -121,7 +120,6 @@ def create_user(self, username=None, display_name=None, password=None, password2 user = dao.store_entity(user) if role == ROLE_ADMINISTRATOR and not skip_import: - #to_upload = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip") to_upload = TVBZenodoDataset().fetch_data('Default_Project.zip') if not os.path.exists(to_upload): self.logger.warning("Could not find DEFAULT PROJECT at path %s. You might want to import it " diff --git a/tvb_framework/tvb/interfaces/command/benchmark.py b/tvb_framework/tvb/interfaces/command/benchmark.py index 9acfd1ca85..02a536d459 100644 --- a/tvb_framework/tvb/interfaces/command/benchmark.py +++ b/tvb_framework/tvb/interfaces/command/benchmark.py @@ -31,7 +31,6 @@ from os import path -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.core.entities.file.simulator.view_model import HeunDeterministicViewModel @@ -57,16 +56,12 @@ def _fire_simulation(project_id, simulator_vm): def _create_bench_project(): prj = new_project("benchmark_project_ %s" % datetime.now()) - #data_dir = path.abspath(path.dirname(tvb_data.__file__)) - #zip_path = path.join(data_dir, 'connectivity', 'connectivity_68.zip') - tvb_data = TVBZenodoDataset() - zip_path = tvb_data.fetch_data('connectivity_68.zip') + dataset = TVBZenodoDataset() + zip_path = dataset.fetch_data('connectivity_68.zip') import_conn_zip(prj.id, zip_path) - #zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip') - zip_path = tvb_data.fetch_data('connectivity_96.zip') + zip_path = dataset.fetch_data('connectivity_96.zip') import_conn_zip(prj.id, zip_path) - #zip_path = path.join(data_dir, 'connectivity', 'connectivity_192.zip') - zip_path = tvb_data.fetch_data('connectivity_192.zip') + zip_path = dataset.fetch_data('connectivity_192.zip') import_conn_zip(prj.id, zip_path) conn68 = dao.get_generic_entity(ConnectivityIndex, 68, "number_of_regions")[0] diff --git a/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py b/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py index 195ccba551..7bbb0a369a 100644 --- a/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py +++ b/tvb_framework/tvb/interfaces/command/demos/importers/brain_tumor_connectivity_importer.py @@ -31,6 +31,7 @@ .. moduleauthor:: Bogdan Valean """ import sys +from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.region_mapping_importer import RegionMappingImporter, RegionMappingImporterModel from tvb.adapters.uploaders.zip_surface_importer import ZIPSurfaceImporter, ZIPSurfaceImporterModel from tvb.basic.logger.builder import get_logger @@ -66,12 +67,9 @@ def import_tumor_connectivities(project_id, folder_path): def import_surface_rm(project_id, conn_gid): # Import surface and region mapping from tvb_data berlin subjects (68 regions) - #rm_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_RegionMapping.txt") - #surface_zip_file = try_get_absolute_path("tvb_data", "berlinSubjects/DH_20120806/DH_20120806_Surface_Cortex.zip") - from tvb.datasets import TVBZenodoDataset - tvb_data = TVBZenodoDataset() - rm_file = tvb_data.fetch_data('DH_20120806_RegionMapping.txt') - surface_zip_file = tvb_data.fetch_data('DH_20120806_Surface_Cortex.zip') + dataset = TVBZenodoDataset() + rm_file = dataset.fetch_data('DH_20120806_RegionMapping.txt') + surface_zip_file = dataset.fetch_data('DH_20120806_Surface_Cortex.zip') surface_importer = ABCAdapter.build_adapter_from_class(ZIPSurfaceImporter) surface_imp_model = ZIPSurfaceImporterModel() diff --git a/tvb_framework/tvb/interfaces/rest/client/examples/utils.py b/tvb_framework/tvb/interfaces/rest/client/examples/utils.py index b88b675f05..0111c74980 100644 --- a/tvb_framework/tvb/interfaces/rest/client/examples/utils.py +++ b/tvb_framework/tvb/interfaces/rest/client/examples/utils.py @@ -28,7 +28,6 @@ import sys import time -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.basic.logger.builder import get_logger from tvb.core.entities.model.model_operation import STATUS_ERROR, STATUS_CANCELED, STATUS_FINISHED diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py index 202fbf8f66..b8aca921ff 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/bct_test.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.analyzers.bct_adapters import BaseBCTModel from tvb.core.entities.model.model_operation import Algorithm @@ -57,7 +56,6 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user("BCT_User") self.test_project = TestFactory.create_project(self.test_user, "BCT-Project") # Make sure Connectivity is in DB - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') self.connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) diff --git a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py index d3994aa662..20b089f39c 100644 --- a/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/analyzers/timeseries_metrics_adapter_test.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data from tvb.datasets import TVBZenodoDataset import json @@ -52,7 +51,6 @@ def transactional_setup_method(self): """ self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) diff --git a/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py b/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py index 46169239df..64122a9dde 100644 --- a/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/creators/stimulus_creator_test.py @@ -27,8 +27,6 @@ import json import os import numpy -#import tvb_data -#import tvb_data.surfaceData from tvb.datasets import TVBZenodoDataset from tvb.adapters.creators.stimulus_creator import RegionStimulusCreator, SurfaceStimulusCreator @@ -53,15 +51,13 @@ def transactional_setup_method(self): self.test_project = TestFactory.create_project(self.test_user, "Stim_Project") self.storage_interface = StorageInterface() - tvb_data = TVBZenodoDataset() + dataset = TVBZenodoDataset() - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - zip_path = tvb_data.fetch_data('connectivity_66.zip') + zip_path = dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex) - #cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - cortex = tvb_data.fetch_data('cortex_16384.zip') + cortex = dataset.fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(self.test_user, self.test_project, cortex, SurfaceTypesEnum.CORTICAL_SURFACE) diff --git a/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py b/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py index ac22a01d8e..5f9210c3c0 100644 --- a/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/simulator/simulator_adapter_test.py @@ -28,8 +28,6 @@ .. moduleauthor:: Lia Domide """ -#import tvb_data.surfaceData -#import tvb_data.regionMapping from tvb.datasets import TVBZenodoDataset from os import path @@ -112,13 +110,12 @@ def test_estimate_execution_time(self, connectivity_index_factory): self.simulator_adapter.configure(model) estimation1 = self.simulator_adapter.get_execution_time_approximation(model) + dataset = TVBZenodoDataset() # import surfaceData and region mapping - #cortex_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - cortex_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') + cortex_file = dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, cortex_file, SurfaceTypesEnum.CORTICAL_SURFACE) - #rm_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - rm_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') + rm_file = dataset.fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, rm_file, surface.gid, model.connectivity.hex) local_conn = TestFactory.create_local_connectivity(self.test_user, self.test_project, surface.gid) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py index c314e5739b..bc0d58d340 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_measure_importer_test.py @@ -32,7 +32,6 @@ import os.path import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.graph import ConnectivityMeasureIndex from tvb.adapters.uploaders.connectivity_measure_importer import ConnectivityMeasureImporter @@ -49,7 +48,6 @@ class TestConnectivityMeasureImporter(BaseTestCase): """ def setup_method(self): - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') self.test_user = TestFactory.create_user('Test_User_CM') self.test_project = TestFactory.create_project(self.test_user, "Test_Project_CM") diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py index fb18ce3543..04080dd6f9 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/csv_importer_test.py @@ -45,7 +45,6 @@ class TestCSVConnectivityParser(BaseTestCase): - #BASE_PTH = path.join(path.dirname(tvb_data.__file__), 'dti_pipeline_toronto') def test_parse_happy(self): cap_pth = TVBZenodoDataset().fetch_data('output_ConnectionDistanceMatrix.csv') @@ -61,6 +60,7 @@ class TestCSVConnectivityImporter(BaseTestCase): """ Unit-tests for csv connectivity importer. """ + dataset = TVBZenodoDataset() def setup_method(self): self.test_user = TestFactory.create_user() @@ -75,11 +75,8 @@ def teardown_method(self): def _import_csv_test_connectivity(self, reference_connectivity_gid, subject): ### First prepare input data: - #data_dir = path.abspath(path.dirname(tvb_data.__file__)) - - #toronto_dir = path.join(data_dir, 'dti_pipeline_toronto') - weights = TVBZenodoDataset().fetch_data('output_ConnectionCapacityMatrix.csv') - tracts = TVBZenodoDataset().fetch_data('output_ConnectionDistanceMatrix.csv') + weights = self.dataset.fetch_data('output_ConnectionCapacityMatrix.csv') + tracts = self.dataset.fetch_data('output_ConnectionDistanceMatrix.csv') weights_tmp = weights + '.tmp' tracts_tmp = tracts + '.tmp' self.storage_interface.copy_file(weights, weights_tmp) @@ -97,8 +94,7 @@ def test_happy_flow_import(self): Test that importing a CFF generates at least one DataType in DB. """ - #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, subject=TEST_SUBJECT_A) field = FilterChain.datatype + '.subject' @@ -130,8 +126,7 @@ def test_happy_flow_import(self): assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all() def test_bad_reference(self): - #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) field = FilterChain.datatype + '.subject' filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!=']) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py index 152bc3be9a..226abde59d 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py @@ -31,7 +31,6 @@ import os import pyAesCrypt import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization @@ -46,7 +45,7 @@ class TestEncryptionDecryption(TransactionalTestCase): - tvb_data = TVBZenodoDataset() + dataset = TVBZenodoDataset() # noinspection PyTypeChecker @pytest.mark.parametrize(" file_name", [('connectivity_76.zip'), ( 'cortex_2x120k.zip'), @@ -75,8 +74,7 @@ def test_encrypt_decrypt(self, file_name): with open(private_key_path, 'wb') as f: f.write(pem) - #path_to_file = os.path.join(os.path.dirname(tvb_data.__file__), dir_name, file_name) - path_to_file = self.tvb_data.fetch_data(file_name) + path_to_file = self.dataset.fetch_data(file_name) # Create model for ABCUploader diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py index ffa9976a1c..c0735b4c45 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/gifti_importer_test.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data.gifti as demo_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.gifti.parser import GIFTIParser from tvb.core.services.exceptions import OperationException @@ -43,13 +42,11 @@ class TestGIFTISurfaceImporter(BaseTestCase): Unit-tests for GIFTI Surface importer. """ - #GIFTI_SURFACE_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.cortex.gii') - #GIFTI_TIME_SERIES_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'sample.time_series.gii') WRONG_GII_FILE = os.path.abspath(__file__) - tvb_data = TVBZenodoDataset() - GIFTI_SURFACE_FILE = tvb_data.fetch_data('sample.cortex.gii') - GIFTI_TIME_SERIES_FILE = tvb_data.fetch_data( 'sample.time_series.gii') + dataset = TVBZenodoDataset() + GIFTI_SURFACE_FILE = dataset.fetch_data('sample.cortex.gii') + GIFTI_TIME_SERIES_FILE = dataset.fetch_data( 'sample.time_series.gii') def setup_method(self): self.test_user = TestFactory.create_user('Gifti_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py index 492dc2e640..855b08d504 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/mat_timeseries_importer_test.py @@ -32,7 +32,6 @@ import os -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.time_series import TimeSeriesRegionIndex from tvb.adapters.uploaders.mat_timeseries_importer import RegionMatTimeSeriesImporterModel, RegionTimeSeriesImporter @@ -41,12 +40,9 @@ class TestMatTimeSeriesImporter(BaseTestCase): - #base_pth = os.path.join(os.path.dirname(tvb_data.__file__), 'berlinSubjects', 'QL_20120814') - tvb_data = TVBZenodoDataset() - bold_path = tvb_data.fetch_data('QL_BOLD_regiontimecourse.mat') - #bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat') - connectivity_path = tvb_data.fetch_data('QL_20120814_Connectivity.zip') - #connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip') + dataset = TVBZenodoDataset() + bold_path = dataset.fetch_data('QL_BOLD_regiontimecourse.mat') + connectivity_path = dataset.fetch_data('QL_20120814_Connectivity.zip') def setup_method(self): self.test_user = TestFactory.create_user('Mat_Timeseries_User') diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py index 97413f1124..ef8da6a888 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py @@ -31,8 +31,6 @@ import os import numpy -#import tvb_data -#import tvb_data.nifti as demo_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.region_mapping import RegionVolumeMappingIndex @@ -52,18 +50,13 @@ class TestNIFTIImporter(BaseTestCase): Unit-tests for NIFTI importer. """ - #NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii') - #GZ_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'minimal.nii.gz') - #TIMESERIES_NII_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'time_series_152.nii.gz') - #WRONG_NII_FILE = os.path.abspath(__file__) - #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'volume_mapping/mapping_FS_76.txt') - tvb_data = TVBZenodoDataset() - NII_FILE = tvb_data.fetch_data('minimal.nii') - GZ_NII_FILE = tvb_data.fetch_data('minimal.nii.gz') - TIMESERIES_NII_FILE = tvb_data.fetch_data('time_series_152.nii.gz') + dataset = TVBZenodoDataset() + NII_FILE = dataset.fetch_data('minimal.nii') + GZ_NII_FILE = dataset.fetch_data('minimal.nii.gz') + TIMESERIES_NII_FILE = dataset.fetch_data('time_series_152.nii.gz') WRONG_NII_FILE = os.path.abspath(__file__) #? - TXT_FILE = tvb_data.fetch_data('mapping_FS_76.txt') + TXT_FILE = dataset.fetch_data('mapping_FS_76.txt') DEFAULT_ORIGIN = [[0.0, 0.0, 0.0]] UNKNOWN_STR = "unknown" @@ -152,7 +145,6 @@ def test_import_region_mapping(self): """ This method tests import of a NIFTI file compressed in GZ format. """ - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") to_link_conn = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py index bea8679714..45a3792b87 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/obj_importer_test.py @@ -29,7 +29,6 @@ """ import os -#import tvb_data.obj from tvb.datasets import TVBZenodoDataset from tvb.core.neocom import h5 from tvb.datatypes.surfaces import SurfaceTypesEnum @@ -42,11 +41,9 @@ class TestObjSurfaceImporter(BaseTestCase): Unit-tests for Obj Surface importer. """ - #torus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj') - #face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj') - tvb_data = TVBZenodoDataset() - torus = tvb_data.fetch_data('test_torus.obj') - face = tvb_data.fetch_data('face_surface.obj') + dataset = TVBZenodoDataset() + torus = dataset.fetch_data('test_torus.obj') + face = dataset.fetch_data('face_surface.obj') def setup_method(self): diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py index 73504c9eb6..1f413def77 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/projection_matrix_importer_test.py @@ -31,9 +31,6 @@ import os -#import tvb_data.projectionMatrix as dataset -#import tvb_data.sensors -#import tvb_data.surfaceData from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.projections import ProjectionMatrixIndex from tvb.core.services.exceptions import OperationException @@ -47,6 +44,7 @@ class TestProjectionMatrix(BaseTestCase): """ Unit-tests for CFF-importer. """ + dataset = TVBZenodoDataset() def setup_method(self): """ @@ -55,13 +53,11 @@ def setup_method(self): self.test_user = TestFactory.create_user("UserPM") self.test_project = TestFactory.create_project(self.test_user) - #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_brainstorm_65.txt') - zip_path = TVBZenodoDataset().fetch_data('eeg_brainstorm_65.txt') + zip_path = self.dataset.fetch_data('eeg_brainstorm_65.txt') self.sensors = TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_EEG) - #zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) @@ -75,8 +71,7 @@ def test_wrong_shape(self): """ Verifies that importing a different shape throws exception """ - #file_path = os.path.join(os.path.abspath(os.path.dirname(dataset.__file__)), 'projection_eeg_62_surface_16k.mat') - file_path = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') + file_path = self.dataset.fetch_data('projection_eeg_62_surface_16k.mat') try: TestFactory.import_projection_matrix(self.test_user, self.test_project, file_path, self.sensors.gid, @@ -90,8 +85,7 @@ def test_happy_flow_surface_import(self): Verifies the happy flow for importing a surface. """ dt_count_before = TestFactory.get_entity_count(self.test_project, ProjectionMatrixIndex) - file_path = os.path.join(os.path.abspath(os.path.dirname(dataset.__file__)), - 'projection_eeg_65_surface_16k.npy') + file_path = self.dataset.fetch_data('projection_eeg_65_surface_16k.npy') TestFactory.import_projection_matrix(self.test_user, self.test_project, file_path, self.sensors.gid, self.surface.gid, False) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py index 50d54feaa9..c27d7152d3 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/region_mapping_importer_test.py @@ -30,8 +30,6 @@ import os import tvb.tests.framework.adapters.uploaders.test_data as test_data -#import tvb_data.regionMapping as demo_data -#import tvb_data.surfaceData from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.surface import SurfaceIndex from tvb.basic.neotraits.ex import TraitValueError @@ -49,15 +47,12 @@ class TestRegionMappingImporter(BaseTestCase): """ Unit-tests for RegionMapping importer. """ - tvb_data = TVBZenodoDataset() + dataset = TVBZenodoDataset() - #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') - #ZIP_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.zip') - #BZ2_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.bz2') - TXT_FILE = tvb_data.fetch_data('regionMapping_16k_76.txt') - ZIP_FILE = tvb_data.fetch_data('regionMapping_16k_76.zip') - BZ2_FILE = tvb_data.fetch_data('regionMapping_16k_76.bz2') + TXT_FILE = dataset.fetch_data('regionMapping_16k_76.txt') + ZIP_FILE = dataset.fetch_data('regionMapping_16k_76.zip') + BZ2_FILE = dataset.fetch_data('regionMapping_16k_76.bz2') # Wrong data WRONG_FILE_1 = os.path.join(os.path.dirname(test_data.__file__), 'region_mapping_wrong_1.txt') @@ -73,12 +68,12 @@ def setup_method(self): self.test_user = TestFactory.create_user("UserRM") self.test_project = TestFactory.create_project(self.test_user) - zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') + zip_path = self.dataset.fetch_data("connectivity_76.zip") self.connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") field = FilterChain.datatype + '.surface_type' filters = FilterChain('', [field], [SurfaceTypesEnum.CORTICAL_SURFACE.value], ['==']) - cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') + cortex = self.dataset.fetch_data('cortex_16384.zip') TestFactory.import_surface_zip(self.test_user, self.test_project, cortex, SurfaceTypesEnum.CORTICAL_SURFACE) self.surface = TestFactory.get_entity(self.test_project, SurfaceIndex, filters) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py index e0a8ede2b8..407484ed49 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/sensors_importer_test.py @@ -30,7 +30,6 @@ import os -#import tvb_data.sensors as demo_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.uploaders.sensors_importer import SensorsImporter, SensorsImporterModel from tvb.core.neocom import h5 @@ -45,11 +44,9 @@ class TestSensorsImporter(BaseTestCase): """ Unit-tests for Sensors importer. """ - #EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'eeg_unitvector_62.txt.bz2') - #MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_151.txt.bz2') - tvb_data = TVBZenodoDataset() - EEG_FILE = tvb_data.fetch_data('eeg_unitvector_62.txt.bz2') - MEG_FILE = tvb_data.fetch_data('meg_151.txt.bz2') + dataset = TVBZenodoDataset() + EEG_FILE = dataset.fetch_data('eeg_unitvector_62.txt.bz2') + MEG_FILE = dataset.fetch_data('meg_151.txt.bz2') def setup_method(self): """ diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py index 387398b1bc..65e1152efa 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/zip_surface_importer_test.py @@ -30,7 +30,6 @@ import os -#import tvb_data.surfaceData from tvb.datasets import TVBZenodoDataset from tvb.datatypes.surfaces import SurfaceTypesEnum from tvb.tests.framework.core.base_testcase import BaseTestCase @@ -42,7 +41,6 @@ class TestZIPSurfaceImporter(BaseTestCase): Unit-tests for Zip Surface importer. """ - #surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip') surf_skull = TVBZenodoDataset().fetch_data('outer_skull_4096.zip') def setup_method(self): diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py index 032c4117af..c697ac2011 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/brainviewer_test.py @@ -29,8 +29,6 @@ """ import os -#import tvb_data.surfaceData -#import tvb_data.regionMapping from tvb.datasets import TVBZenodoDataset from tvb.core.neocom import h5 @@ -51,11 +49,9 @@ class TestBrainViewer(TransactionalTestCase): EXPECTED_EXTRA_KEYS = ['urlMeasurePointsLabels', 'urlMeasurePoints', 'pageSize', 'shellObject', 'extended_view', 'legendLabels', 'labelsStateVar', 'labelsModes', 'title'] - #cortex = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - #region_mapping_path = os.path.join(os.path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - tvb_data = TVBZenodoDataset() - cortex = tvb_data.fetch_data('cortex_16384.zip') - region_mapping_path = tvb_data.fetch_data('regionMapping_16k_76.txt') + dataset = TVBZenodoDataset() + cortex = dataset.fetch_data('cortex_16384.zip') + region_mapping_path = dataset.fetch_data('regionMapping_16k_76.txt') def transactional_setup_method(self): """ @@ -66,8 +62,7 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user('Brain_Viewer_User') self.test_project = TestFactory.create_project(self.test_user, 'Brain_Viewer_Project') - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = self.tvb_data.fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") connectivity_idx = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert connectivity_idx is not None diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py index f297bc9762..502289adf5 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/connectivityviewer_test.py @@ -28,7 +28,6 @@ """ import os -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex @@ -52,7 +51,6 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user("UserCVV") self.test_project = TestFactory.create_project(self.test_user) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py index 29307b091b..39bff3dac1 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/sensorsviewer_test.py @@ -29,8 +29,6 @@ """ import os -#import tvb_data.obj -#import tvb_data.sensors from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.sensors import SensorsIndex from tvb.adapters.datatypes.db.surface import SurfaceIndex @@ -47,6 +45,7 @@ class TestSensorViewers(TransactionalTestCase): """ Unit-tests for Sensors viewers. """ + dataset = TVBZenodoDataset() EXPECTED_KEYS_INTERNAL = {'urlMeasurePoints': None, 'urlMeasurePointsLabels': None, 'noOfMeasurePoints': 103, 'minMeasure': 0, 'maxMeasure': 103, 'urlMeasure': None, 'shellObject': None} @@ -72,8 +71,7 @@ def test_launch_eeg(self): Check that all required keys are present in output from EegSensorViewer launch. """ # Import Sensors - #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt.bz2') - zip_path = TVBZenodoDataset().fetch_data('eeg_unitvector_62.txt.bz2') + zip_path = self.dataset.fetch_data('eeg_unitvector_62.txt.bz2') TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_EEG) field = FilterChain.datatype + '.sensors_type' @@ -81,8 +79,7 @@ def test_launch_eeg(self): sensors_index = TestFactory.get_entity(self.test_project, SensorsIndex, filters) # Import EEGCap - #cap_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'eeg_cap.obj') - cap_path = TVBZenodoDataset().fetch_data('eeg_cap.obj') + cap_path = self.dataset.fetch_data('eeg_cap.obj') TestFactory.import_surface_obj(self.test_user, self.test_project, cap_path, SurfaceTypesEnum.EEG_CAP_SURFACE) field = FilterChain.datatype + '.surface_type' filters = FilterChain('', [field], [SurfaceTypesEnum.EEG_CAP_SURFACE.value], ['==']) @@ -109,8 +106,7 @@ def test_launch_meg(self): Check that all required keys are present in output from MEGSensorViewer launch. """ - #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'meg_151.txt.bz2') - zip_path = TVBZenodoDataset().fetch_data('meg_151.txt.bz2') + zip_path = self.dataset.fetch_data('meg_151.txt.bz2') TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_MEG) @@ -130,8 +126,7 @@ def test_launch_internal(self): """ Check that all required keys are present in output from InternalSensorViewer launch. """ - #zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'seeg_39.txt.bz2') - zip_path = TVBZenodoDataset().fetch_data('seeg_39.txt.bz2') + zip_path = self.dataset.fetch_data('seeg_39.txt.bz2') sensors_index = TestFactory.import_sensors(self.test_user, self.test_project, zip_path, SensorTypesEnum.TYPE_INTERNAL) viewer = SensorsViewer() diff --git a/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py b/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py index 422ff64896..c1ff9220bf 100644 --- a/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/visualizers/surfaceviewer_test.py @@ -29,8 +29,6 @@ """ import os -#import tvb_data.surfaceData -#import tvb_data.regionMapping as demo_data from tvb.datasets import TVBZenodoDataset from uuid import UUID @@ -57,23 +55,21 @@ def transactional_setup_method(self): creates a test user, a test project, a connectivity and a surface; imports a CFF data-set """ + dataset = TVBZenodoDataset() test_user = TestFactory.create_user('Surface_Viewer_User') self.test_project = TestFactory.create_project(test_user, 'Surface_Viewer_Project') - #surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - surf_skull = TVBZenodoDataset().fetch_data('cortex_16384.zip') + surf_skull = dataset.fetch_data('cortex_16384.zip') self.surface = TestFactory.import_surface_zip(test_user, self.test_project, surf_skull, SurfaceTypesEnum.CORTICAL_SURFACE) assert self.surface is not None - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = dataset.fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(test_user, self.test_project, zip_path, "John") connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex) assert connectivity_index is not None - #TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') - TXT_FILE = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') + TXT_FILE = dataset.fetch_data('regionMapping_16k_76.txt') self.region_mapping = TestFactory.import_region_mapping(test_user, self.test_project, TXT_FILE, self.surface.gid, connectivity_index.gid) assert self.region_mapping is not None diff --git a/tvb_framework/tvb/tests/framework/core/factory.py b/tvb_framework/tvb/tests/framework/core/factory.py index c0a9327604..0fa6e7a171 100644 --- a/tvb_framework/tvb/tests/framework/core/factory.py +++ b/tvb_framework/tvb/tests/framework/core/factory.py @@ -37,7 +37,6 @@ import os import random import uuid -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.local_connectivity import LocalConnectivityIndex @@ -77,7 +76,7 @@ class TestFactory(object): """ Expose mostly static methods for creating different entities used in tests. """ - + @staticmethod def get_entity(project, expected_data, filters=None): """ @@ -220,7 +219,6 @@ def import_default_project(admin_user=None): if not admin_user: admin_user = TestFactory.create_user() - #project_path = os.path.join(os.path.dirname(tvb_data.__file__), 'Default_Project.zip') project_path = TVBZenodoDataset().fetch_data('Default_Project.zip') import_service = ImportService() import_service.import_project_structure(project_path, admin_user.id) @@ -314,7 +312,6 @@ def import_zip_connectivity(user, project, zip_path=None, subject=DataTypeMetaDa if zip_path is None: zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') count = dao.count_datatypes(project.id, ConnectivityIndex) view_model = ZIPConnectivityImporterModel() diff --git a/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py b/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py index 7add7679e5..a9bef543e3 100644 --- a/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py +++ b/tvb_framework/tvb/tests/framework/core/neotraits/forms_test.py @@ -27,7 +27,6 @@ import uuid import numpy import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.basic.neotraits.api import Attr, Float, Int, NArray, List diff --git a/tvb_framework/tvb/tests/framework/core/services/import_service_test.py b/tvb_framework/tvb/tests/framework/core/services/import_service_test.py index 37795c621d..4315b787ee 100644 --- a/tvb_framework/tvb/tests/framework/core/services/import_service_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/import_service_test.py @@ -31,7 +31,6 @@ import os import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from PIL import Image from time import sleep @@ -59,7 +58,7 @@ class TestImportService(BaseTestCase): """ This class contains tests for the tvb.core.services.import_service module. """ - + dataset = TVBZenodoDataset() def setup_method(self): """ Reset the database before each test. @@ -89,8 +88,7 @@ def test_import_export(self, user_factory, project_factory, value_wrapper_factor """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport", "test_desc") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) value_wrapper = value_wrapper_factory(test_user, test_project) ProjectService.set_datatype_visibility(value_wrapper.gid, False) @@ -140,8 +138,7 @@ def test_import_export_existing(self, user_factory, project_factory): """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport2") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) count_operations = dao.get_filtered_operations(test_project.id, None, is_count=True) @@ -185,8 +182,7 @@ def test_export_import_figures(self, user_factory, project_factory): # Prepare data user = user_factory() project = project_factory(user, "TestImportExportFigures") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') - zip_path = TVBZenodoDataset().fetch_data('paupau.zip') + zip_path = self.dataset.fetch_data('paupau.zip') TestFactory.import_zip_connectivity(user, project, zip_path) figure_service = FigureService() diff --git a/tvb_framework/tvb/tests/framework/core/services/links_test.py b/tvb_framework/tvb/tests/framework/core/services/links_test.py index 3e6ff86f3f..5c317f2520 100644 --- a/tvb_framework/tvb/tests/framework/core/services/links_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/links_test.py @@ -32,7 +32,6 @@ """ import pytest import os -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.adapters.datatypes.db.sensors import SensorsIndex @@ -50,6 +49,8 @@ class _BaseLinksTest(TransactionalTestCase): + dataset = TVBZenodoDataset() + @pytest.fixture() def initialize_two_projects(self, dummy_datatype_index_factory, project_factory, user_factory): """ @@ -67,12 +68,9 @@ def initialize_two_projects(self, dummy_datatype_index_factory, project_factory, src_user = user_factory(username="Links Test") self.src_usr_id = src_user.id self.src_project = project_factory(src_user, "Src_Project") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') - tvb_data = TVBZenodoDataset() - zip_path = tvb_data.fetch_data("paupau.zip") + zip_path = self.dataset.fetch_data("paupau.zip") self.red_datatype = TestFactory.import_zip_connectivity(src_user, self.src_project, zip_path, "John") - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'sensors', 'eeg_unitvector_62.txt.bz2') - zip_path = tvb_data.fetch_data('eeg_unitvector_62.txt.bz2') + zip_path = self.dataset.fetch_data('eeg_unitvector_62.txt.bz2') self.blue_datatype = TestFactory.import_sensors(src_user, self.src_project, zip_path, SensorTypesEnum.TYPE_EEG) assert 1 == self.red_datatypes_in(self.src_project.id) @@ -211,8 +209,7 @@ def build(): Project dest will have the derived VW and links """ # add a connectivity to src project and link it to dest project - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') conn = TestFactory.import_zip_connectivity(self.dst_user, self.src_project, zip_path, "John") self.algorithm_service.create_link(conn.id, self.dest_project.id) diff --git a/tvb_framework/tvb/tests/framework/core/services/project_service_test.py b/tvb_framework/tvb/tests/framework/core/services/project_service_test.py index aae5609378..324c91beb0 100644 --- a/tvb_framework/tvb/tests/framework/core/services/project_service_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/project_service_test.py @@ -33,7 +33,6 @@ import pytest import sqlalchemy -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.basic.profile import TvbProfile from tvb.core.entities.model import model_datatype, model_project, model_operation @@ -332,14 +331,12 @@ def test_empty_project_has_zero_disk_size(self): def test_project_disk_size(self): project1 = TestFactory.create_project(self.test_user, 'test_proj1') - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') - tvb_data = TVBZenodoDataset() - zip_path = tvb_data.fetch_data('connectivity_66.zip') + dataset = TVBZenodoDataset() + zip_path = dataset.fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, project1, zip_path, 'testSubject') project2 = TestFactory.create_project(self.test_user, 'test_proj2') - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip') - zip_path = tvb_data.fetch_data('connectivity_76.zip') + zip_path = dataset.fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, project2, zip_path, 'testSubject') projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0] diff --git a/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py b/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py index 19b4191113..84611b3637 100644 --- a/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py +++ b/tvb_framework/tvb/tests/framework/core/services/serialization_manager_test.py @@ -29,7 +29,6 @@ """ from os import path -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex @@ -46,7 +45,6 @@ class TestSerializationManager(TransactionalTestCase): def transactional_setup_method(self): self.test_user = TestFactory.create_user(username="test_user") self.test_project = TestFactory.create_project(self.test_user, "Test") - #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py index bb60be982f..5b1563c494 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/datatype_resource_test.py @@ -27,7 +27,6 @@ import os import flask import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.datatypes.db.connectivity import ConnectivityIndex from tvb.interfaces.rest.commons.exceptions import InvalidIdentifierException @@ -40,6 +39,8 @@ class TestDatatypeResource(RestResourceTest): + dataset = TVBZenodoDataset() + def transactional_setup_method(self): self.test_user = TestFactory.create_user('Rest_User') self.test_project = TestFactory.create_project(self.test_user, 'Rest_Project', users=[self.test_user.id]) @@ -54,8 +55,7 @@ def test_server_retrieve_datatype_inexistent_gid(self, mocker): def test_server_retrieve_datatype(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get(project_gid=self.test_project.gid) @@ -81,8 +81,7 @@ def send_file_dummy(path, as_attachment, attachment_filename): def test_server_get_operations_for_datatype(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get(project_gid=self.test_project.gid) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py index ffb92d8413..4ea3a982a0 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py @@ -29,7 +29,6 @@ from uuid import UUID import flask import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.adapters.analyzers.fourier_adapter import FFTAdapterModel from tvb.basic.exceptions import TVBException @@ -49,6 +48,8 @@ class TestOperationResource(RestResourceTest): + dataset = TVBZenodoDataset() + def transactional_setup_method(self): self.test_user = TestFactory.create_user('Rest_User') self.test_project = TestFactory.create_project(self.test_user, 'Rest_Project', users=[self.test_user.id]) @@ -65,8 +66,7 @@ def test_server_get_operation_status_inexistent_gid(self, mocker): def test_server_get_operation_status(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) @@ -85,8 +85,7 @@ def test_server_get_operation_results_inexistent_gid(self, mocker): def test_server_get_operation_results(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) @@ -100,10 +99,13 @@ def test_server_get_operation_results(self, mocker): def test_server_get_operation_results_failed_operation(self, mocker): self._mock_user(mocker) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_90.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_90.zip') - with pytest.raises(TVBException): - TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) + with pytest.raises(KeyError): + zip_path = self.dataset.fetch_data('connectivity_90.zip') + zip_path = self.dataset.fetch_data('connectivity_96.zip') + zip_path.replace("connectivity_96", "connectivity_90") + print(zip_path) + + TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) request_mock.args = {Strings.PAGE_NUMBER: '1'} diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py index 0fc8a18144..b6da293795 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/project_resource_test.py @@ -27,7 +27,6 @@ import os import flask import pytest -#import tvb_data from tvb.datasets import TVBZenodoDataset from tvb.interfaces.rest.commons.exceptions import InvalidIdentifierException @@ -46,9 +45,8 @@ def transactional_setup_method(self): self.test_user = TestFactory.create_user('Rest_User') self.test_project_without_data = TestFactory.create_project(self.test_user, 'Rest_Project', users=[self.test_user.id]) self.test_project_with_data = TestFactory.create_project(self.test_user, 'Rest_Project2', users=[self.test_user.id]) - #zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') - tvb_data = TVBZenodoDataset() - zip_path = tvb_data.fetch_data('connectivity_96.zip') + dataset = TVBZenodoDataset() + zip_path = dataset.fetch_data('connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project_with_data, zip_path) def test_server_get_data_in_project_inexistent_gid(self, mocker): diff --git a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py index 82df6fd227..1158476fd6 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/web/controllers/simulator_controller_test.py @@ -25,11 +25,6 @@ # import numpy -#import tvb_data.connectivity -#import tvb_data.surfaceData -#import tvb_data.sensors -#import tvb_data.regionMapping -#import tvb_data.projectionMatrix from tvb.datasets import TVBZenodoDataset from os import path @@ -65,7 +60,7 @@ class TestSimulationController(BaseTransactionalControllerTest): - + dataset = TVBZenodoDataset() def transactional_setup_method(self): self.simulator_controller = SimulatorController() self.test_user = TestFactory.create_user('SimulationController_User') @@ -92,8 +87,7 @@ def test_index(self): assert not result_dict['errors'], 'Some errors were encountered!' def test_set_connectivity(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['connectivity'] = connectivity.gid @@ -129,8 +123,7 @@ def test_set_coupling_params(self): assert self.session_stored_simulator.coupling.b[0] == [0.0], "b value was not set correctly." def test_set_surface(self): - #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) surface = TestFactory.get_entity(self.test_project, SurfaceIndex) @@ -151,17 +144,14 @@ def test_set_surface_none(self): assert self.session_stored_simulator.surface is None, "Surface should not be set." def test_set_cortex_without_local_connectivity(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = self.dataset.fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') + text_file = self.dataset.fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) @@ -183,17 +173,14 @@ def test_set_cortex_without_local_connectivity(self): "coupling_strength was not set correctly." def test_set_cortex_with_local_connectivity(self, local_connectivity_index_factory): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = self.dataset.fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt' ) + text_file = self.dataset.fetch_data('regionMapping_16k_76.txt' ) region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) @@ -224,8 +211,7 @@ def test_set_stimulus_none(self): assert self.session_stored_simulator.stimulus is None, "Stimulus should not be set." def test_set_stimulus(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity_index = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) weight_array = numpy.zeros(connectivity_index.number_of_regions) @@ -454,34 +440,28 @@ def test_set_monitor_params(self): assert not rendering_rules['renderer'].include_next_button, 'Next button should not be displayed!' def set_region_mapping(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_76.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = self.dataset.fetch_data('connectivity_76.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") - #zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - zip_path = TVBZenodoDataset().fetch_data('cortex_16384.zip') + zip_path = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt') - text_file = TVBZenodoDataset().fetch_data('regionMapping_16k_76.txt') + text_file = self.dataset.fetch_data('regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid, connectivity.gid) return region_mapping def set_eeg(self): - #eeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt') - eeg_sensors_file = TVBZenodoDataset().fetch_data('eeg_unitvector_62.txt') + eeg_sensors_file = self.dataset.fetch_data('eeg_unitvector_62.txt.bz2') eeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, eeg_sensors_file, SensorTypesEnum.TYPE_EEG) - #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') + surface_file = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_eeg_62_surface_16k.mat') - eeg_projection_file = TVBZenodoDataset().fetch_data('projection_eeg_62_surface_16k.mat') + eeg_projection_file = self.dataset.fetch_data('projection_eeg_62_surface_16k.mat') eeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, eeg_projection_file, eeg_sensors.gid, surface.gid) return eeg_sensors, eeg_projection @@ -518,18 +498,15 @@ def test_set_eeg_monitor_params(self): "Projection wasn't stored correctly." def set_meg(self): - #meg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'meg_brainstorm_276.txt') - meg_sensors_file = TVBZenodoDataset().fetch_data('meg_brainstorm_276.txt') + meg_sensors_file = self.dataset.fetch_data('meg_brainstorm_276.txt') meg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, meg_sensors_file, SensorTypesEnum.TYPE_MEG) - #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') + surface_file = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__),'projection_meg_276_surface_16k.npy') - meg_projection_file = TVBZenodoDataset().fetch_data('projection_meg_276_surface_16k.npy') + meg_projection_file = self.dataset.fetch_data('projection_meg_276_surface_16k.npy') meg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, meg_projection_file, meg_sensors.gid, surface.gid) return meg_sensors, meg_projection @@ -567,18 +544,15 @@ def test_set_meg_monitor_params(self): "Projection wasn't stored correctly." def set_seeg(self): - #seeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'seeg_588.txt') - seeg_sensors_file = TVBZenodoDataset().fetch_data('seeg_588.txt') + seeg_sensors_file = self.dataset.fetch_data('seeg_588.txt') seeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, seeg_sensors_file, SensorTypesEnum.TYPE_INTERNAL) - #surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') - surface_file = TVBZenodoDataset().fetch_data('cortex_16384.zip') + surface_file = self.dataset.fetch_data('cortex_16384.zip') surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, SurfaceTypesEnum.CORTICAL_SURFACE, True) - #seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__), 'projection_seeg_588_surface_16k.npy') - seeg_projection_file = TVBZenodoDataset().fetch_data('projection_seeg_588_surface_16k.npy') + seeg_projection_file = self.dataset.fetch_data('projection_seeg_588_surface_16k.npy') seeg_projection = TestFactory.import_projection_matrix(self.test_user, self.test_project, seeg_projection_file, seeg_sensors.gid, surface.gid) return seeg_sensors, seeg_projection @@ -717,8 +691,7 @@ def test_load_burst_history(self): assert len(burst_parameters['burst_list']) == 3, "The burst configurations where not stored." def test_reset_simulator_configuration(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['connectivity'] = connectivity.gid @@ -774,8 +747,7 @@ def test_rename_burst(self): assert dao.get_bursts_for_project(self.test_project.id)[0].name == new_name, "Name wasn't actually changed." def test_copy_simulator_configuration(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) @@ -809,8 +781,7 @@ def test_copy_simulator_configuration(self): assert rendering_rules['renderer'].disable_fields, 'Fragments should be read-only!' def test_load_burst(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) @@ -860,8 +831,7 @@ def test_launch_simulation(self): assert burst_config.status == 'running', 'Simulation launching has failed!' def test_launch_branch_simulation(self): - #zip_path = path.join(path.dirname(tvb_data.connectivity.__file__), 'connectivity_66.zip') - zip_path = TVBZenodoDataset().fetch_data('connectivity_66.zip') + zip_path = self.dataset.fetch_data('connectivity_66.zip') connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") self.sess_mock['input_simulation_name_id'] = 'HappySimulation' diff --git a/tvb_library/setup.py b/tvb_library/setup.py index 1e971ab73e..c7745984a3 100644 --- a/tvb_library/setup.py +++ b/tvb_library/setup.py @@ -40,7 +40,7 @@ LIBRARY_TEAM = "Marmaduke Woodman, Jan Fousek, Stuart Knock, Paula Sanz Leon, Viktor Jirsa" LIBRARY_REQUIRED_PACKAGES = ["autopep8", "Deprecated", "docutils", "ipywidgets", "lxml", "mako>=1.1.4", "matplotlib", - "networkx", "numba", "numexpr", "numpy", "pooch","pylems", "scipy", "six"] + "networkx", "numba", "numexpr", "numpy", "pooch", "pylems", "scipy", "six"] LIBRARY_REQUIRED_EXTRA = ["h5py", "pytest", "pytest-benchmark", "pytest-xdist", "tvb-gdist", "tvb-data"] diff --git a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py similarity index 99% rename from tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py rename to tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py index c03f9256c8..e275410f81 100644 --- a/tvb_library/tvb/tests/library/datasets/TVBZenodoDataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py @@ -33,13 +33,6 @@ from pathlib import Path from tvb.tests.library.base_testcase import BaseTestCase - - - - - - - class Test_TVBZenodoDataset(BaseTestCase): From c410999756624d8562bde3f7f9ef27a842b3aa54 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Thu, 13 Jul 2023 00:40:29 +0530 Subject: [PATCH 66/84] fix test; reuse tvbzenododataset instance --- .../framework/adapters/uploaders/nifti_importer_test.py | 2 +- .../framework/interfaces/rest/operation_resource_test.py | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py index ef8da6a888..e494ce8bbd 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/nifti_importer_test.py @@ -145,7 +145,7 @@ def test_import_region_mapping(self): """ This method tests import of a NIFTI file compressed in GZ format. """ - zip_path = TVBZenodoDataset().fetch_data('connectivity_76.zip') + zip_path = self.dataset.fetch_data('connectivity_76.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") to_link_conn = TestFactory.get_entity(self.test_project, ConnectivityIndex) diff --git a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py index 4ea3a982a0..5950e51a0b 100644 --- a/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py +++ b/tvb_framework/tvb/tests/framework/interfaces/rest/operation_resource_test.py @@ -102,10 +102,9 @@ def test_server_get_operation_results_failed_operation(self, mocker): with pytest.raises(KeyError): zip_path = self.dataset.fetch_data('connectivity_90.zip') zip_path = self.dataset.fetch_data('connectivity_96.zip') - zip_path.replace("connectivity_96", "connectivity_90") - print(zip_path) - - TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) + zip_path = zip_path.replace("connectivity_96", "connectivity_90") + with pytest.raises(TVBException): + TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) request_mock = mocker.patch.object(flask, 'request', spec={}) request_mock.args = {Strings.PAGE_NUMBER: '1'} From 50e8125d19fcd9a8335a0b77892f9fe392a5768c Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 16 Jul 2023 12:30:37 +0530 Subject: [PATCH 67/84] fix the notebooks --- tvb_documentation/demos/encrypt_data.ipynb | 14 ++++--- ...ting_with_rest_api_launch_operations.ipynb | 19 ++++++---- .../interacting_with_the_framework.ipynb | 4 +- .../demos/simulate_for_mouse.ipynb | 38 +++++++++++++++---- 4 files changed, 51 insertions(+), 24 deletions(-) diff --git a/tvb_documentation/demos/encrypt_data.ipynb b/tvb_documentation/demos/encrypt_data.ipynb index f40bdab160..051421a3fc 100644 --- a/tvb_documentation/demos/encrypt_data.ipynb +++ b/tvb_documentation/demos/encrypt_data.ipynb @@ -28,7 +28,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { "colab": {}, "colab_type": "code", @@ -113,10 +113,12 @@ "outputs": [], "source": [ "# EDIT paths to data files here inside the list\n", - "import tvb_data, os, tvb\n", - "from tvb.basic.readers import try_get_absolute_path\n", + "import os, tvb\n", + "from tvb.datasets import TVBZenodoDataset\n", + "dataset = TVBZenodoDataset()\n", + "\n", "\n", - "paths_to_files = [try_get_absolute_path(\"tvb_data.connectivity\", \"connectivity_76.zip\")]\n", + "paths_to_files = [dataset.fetch_data(\"connectivity_76.zip\")]\n", "import_export_encryption_handler = StorageInterface.get_import_export_encryption_handler()\n", "\n", "buffer_size = TvbProfile.current.hpc.CRYPT_BUFFER_SIZE\n", @@ -151,7 +153,7 @@ "encrypted_password = import_export_encryption_handler.encrypt_password(public_key, password_bytes)\n", "\n", "# EDIT path for saving the encrypted password\n", - "encrypted_password_path = os.path.join(tvb_data.__path__[0], 'connectivity')\n", + "encrypted_password_path = os.path.join(dataset.extract_dir, 'tvb_data','connectivity')\n", "\n", "import_export_encryption_handler.save_encrypted_password(encrypted_password, encrypted_password_path)\n", "\n", @@ -198,7 +200,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.9.0" } }, "nbformat": 4, diff --git a/tvb_documentation/demos/interacting_with_rest_api_launch_operations.ipynb b/tvb_documentation/demos/interacting_with_rest_api_launch_operations.ipynb index 21c88fb5f2..735b493f65 100644 --- a/tvb_documentation/demos/interacting_with_rest_api_launch_operations.ipynb +++ b/tvb_documentation/demos/interacting_with_rest_api_launch_operations.ipynb @@ -27,10 +27,10 @@ "import os\n", "import time\n", "\n", - "import tvb_data\n", "from keycloak import KeycloakOpenID\n", "from tvb.core.entities.model.model_operation import STATUS_ERROR, STATUS_CANCELED, STATUS_FINISHED\n", - "from tvb.interfaces.rest.client.tvb_client import TVBClient" + "from tvb.interfaces.rest.client.tvb_client import TVBClient\n", + "from tvb.datasets import TVBZenodoDataset" ] }, { @@ -39,8 +39,7 @@ "metadata": {}, "outputs": [], "source": [ - "def compute_tvb_data_path(folder, filename):\n", - " return os.path.join(os.path.dirname(tvb_data.__file__), folder, filename)\n", + "\n", "\n", "def monitor_operation(tvb_client, operation_gid):\n", " while True:\n", @@ -95,9 +94,13 @@ "source": [ "from tvb.adapters.uploaders.zip_connectivity_importer import ZIPConnectivityImporterModel, ZIPConnectivityImporter\n", "\n", + "#Loading TVB Zenodo data\n", + "\n", + "dataset = TVBZenodoDataset()\n", + "\n", "# Importing a connectivity from ZIP\n", "zip_connectivity_importer_model = ZIPConnectivityImporterModel()\n", - "zip_connectivity_importer_model.uploaded = compute_tvb_data_path('connectivity', 'connectivity_96.zip')\n", + "zip_connectivity_importer_model.uploaded = dataset.fetch_data(\"connectivity_96.zip\")\n", "zip_connectivity_importer_model.normalization = 'region'\n", "operation_gid = tvb_client.launch_operation(default_project_gid, ZIPConnectivityImporter,\n", " zip_connectivity_importer_model)\n", @@ -122,7 +125,7 @@ "\n", "# Importing a surface from ZIP\n", "zip_surface_importer_model = ZIPSurfaceImporterModel()\n", - "zip_surface_importer_model.uploaded = compute_tvb_data_path('surfaceData', 'cortex_16384.zip')\n", + "zip_surface_importer_model.uploaded = dataset.fetch_data('cortex_16384.zip')\n", "zip_surface_importer_model.surface_type = SurfaceTypesEnum.CORTICAL_SURFACE\n", "zip_surface_importer_model.should_center = False\n", "\n", @@ -146,7 +149,7 @@ "\n", "# Importing a region mapping\n", "rm_importer_model = RegionMappingImporterModel()\n", - "rm_importer_model.mapping_file = compute_tvb_data_path('regionMapping', 'regionMapping_16k_76.txt')\n", + "rm_importer_model.mapping_file = dataset.fetch_data('regionMapping_16k_76.txt')\n", "rm_importer_model.connectivity = connectivity_dto.gid\n", "rm_importer_model.surface = surface_gid\n", "\n", @@ -260,4 +263,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} diff --git a/tvb_documentation/demos/interacting_with_the_framework.ipynb b/tvb_documentation/demos/interacting_with_the_framework.ipynb index d13afe11be..80a66a5c11 100644 --- a/tvb_documentation/demos/interacting_with_the_framework.ipynb +++ b/tvb_documentation/demos/interacting_with_the_framework.ipynb @@ -100,8 +100,8 @@ "outputs": [], "source": [ "import os\n", - "import tvb_data\n", - "p = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity/connectivity_66.zip')\n", + "from tvb.dataset import TVBZenodoDataset\n", + "p = TVBZenodoDataset.fetch_data('connectivity_66.zip')\n", "import_op = import_conn_zip(proj.id, p)\n", "\n", "import_op = wait_to_finish(import_op)\n", diff --git a/tvb_documentation/demos/simulate_for_mouse.ipynb b/tvb_documentation/demos/simulate_for_mouse.ipynb index cfc5fda7f6..0f8fea4ebd 100644 --- a/tvb_documentation/demos/simulate_for_mouse.ipynb +++ b/tvb_documentation/demos/simulate_for_mouse.ipynb @@ -63,14 +63,35 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "file tvb_data.zip is downloaded at C:\\Users\\Abhijit_asus\\TVB\\DATASETS\\.cache\\TVB_Data\\c042692ba786b0ecebdffd58e9efac21-tvb_data.zip\n" + ] + }, + { + "ename": "NameError", + "evalue": "name 'import_conn_h5' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[1], line 5\u001b[0m\n\u001b[0;32m 2\u001b[0m dataset \u001b[39m=\u001b[39m TVBZenodoDataset()\n\u001b[0;32m 4\u001b[0m connectivity_path \u001b[39m=\u001b[39m dataset\u001b[39m.\u001b[39mfetch_data(\u001b[39m\"\u001b[39m\u001b[39mConnectivity.h5\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[1;32m----> 5\u001b[0m import_op \u001b[39m=\u001b[39m import_conn_h5(\u001b[39m1\u001b[39m, connectivity_path)\n\u001b[0;32m 6\u001b[0m import_op \u001b[39m=\u001b[39m wait_to_finish(import_op)\n\u001b[0;32m 7\u001b[0m import_op\n", + "\u001b[1;31mNameError\u001b[0m: name 'import_conn_h5' is not defined" + ] + } + ], "source": [ - "from tvb.basic.readers import try_get_absolute_path\n", - "connectivity_path = try_get_absolute_path(\"tvb_data\",\"mouse/allen_2mm/Connectivity.h5\")\n", + "from tvb.datasets import TVBZenodoDataset\n", + "dataset = TVBZenodoDataset()\n", + "\n", + "connectivity_path = dataset.fetch_data(\"Connectivity.h5\")\n", "import_op = import_conn_h5(1, connectivity_path)\n", "import_op = wait_to_finish(import_op)\n", "import_op" @@ -401,6 +422,7 @@ "outputs": [], "source": [ "# copy all the ids of the ConnectivityMeasureIndexes obtained before\n", + "\n", "connectivity_measure_ids = [i.id for i in get_operation_results(launched_operation.id)[1:]]" ] }, @@ -419,16 +441,16 @@ "\n", "import h5py\n", "from mpl_toolkits.axes_grid1 import make_axes_locatable\n", - "from tvb.basic.readers import try_get_absolute_path\n", + "\n", "\n", "fig, axes = plt.subplots(1,3)\n", "slice_idy=73\n", "j=0\n", "for conn_measure_id in connectivity_measure_ids:\n", - " f_path = try_get_absolute_path(\"tvb_data\", \"mouse/allen_2mm/RegionVolumeMapping.h5\")\n", + " f_path = dataset.fetch_data(\"RegionVolumeMapping.h5\")\n", " f = h5py.File(f_path, 'r', libver='latest')\n", " Vol=f['array_data'][:,:,:]\n", - " f_path = try_get_absolute_path(\"tvb_data\", \"mouse/allen_2mm/StructuralMRI.h5\")\n", + " f_path = dataset.fetch_data('StructuralMRI.h5')\n", " f = h5py.File(f_path, 'r', libver='latest')\n", " template=f['array_data'][:,:,:]\n", " conn_measure = load_dt(conn_measure_id)\n", @@ -499,7 +521,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.9.0" } }, "nbformat": 4, From f19b1ad4fb952206f5118f9f2dd5b06e2fcd24e7 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 16 Jul 2023 12:48:13 +0530 Subject: [PATCH 68/84] fixed the notebooks --- .../demos/interacting_with_the_framework.ipynb | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/tvb_documentation/demos/interacting_with_the_framework.ipynb b/tvb_documentation/demos/interacting_with_the_framework.ipynb index 80a66a5c11..1ce1444d33 100644 --- a/tvb_documentation/demos/interacting_with_the_framework.ipynb +++ b/tvb_documentation/demos/interacting_with_the_framework.ipynb @@ -200,20 +200,6 @@ "source": [ "You can re-evaluate this cell multiple times while it's running to see how the results gradually show up as the simulation finishes." ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { From cc5062b3c8b8f6904f9fef84cd0c9a20987c48e7 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 16 Jul 2023 12:56:22 +0530 Subject: [PATCH 69/84] trigger for prs as well --- .github/workflows/build.yml | 3 ++- .github/workflows/lib-tests.yml_disabled | 3 ++- .github/workflows/notebooks.yml | 3 ++- .github/workflows/pg-tests.yml | 3 ++- .github/workflows/win-tests.yml | 3 ++- 5 files changed, 10 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5c8a4ec9ea..a00a16b811 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,5 +1,6 @@ name: Test Py -on: [push] +on: pull_request + push jobs: build: diff --git a/.github/workflows/lib-tests.yml_disabled b/.github/workflows/lib-tests.yml_disabled index 921bf2c63e..e24708cc15 100644 --- a/.github/workflows/lib-tests.yml_disabled +++ b/.github/workflows/lib-tests.yml_disabled @@ -1,5 +1,6 @@ name: Test lib -on: [push] +on: pull_request + push jobs: lib-tests: diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 6773c09787..84ddc06b0f 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -1,5 +1,6 @@ name: Test Notebooks -on: [push] +on: pull_request + push jobs: build: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index e49b3cf3c5..5c4ecafa34 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -1,5 +1,6 @@ name: Test PG -on: [push] +on: pull_request + push jobs: build: diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index f4ba58bb25..3350c639d9 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -1,5 +1,6 @@ name: Test Win -on: [push] +on: pull_request + push jobs: build: From be5b36550df1e04c0535e7de8a2f37f780933983 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 22 Jul 2023 17:35:08 +0530 Subject: [PATCH 70/84] fix build_step1.py, added ability to unzip whole data. Also fetch_data accepts relative paths. --- tvb_build/build_step1.py | 8 +- .../demos/simulate_for_mouse.ipynb | 30 +---- .../connectivity_zip_importer_test.py | 1 - tvb_library/tvb/datasets/base.py | 18 +++ tvb_library/tvb/datasets/tvb_data.py | 113 ++++++++++------ tvb_library/tvb/datasets/zenodo.py | 15 ++- .../datasets/tvb_zenodo_dataset_test.py | 125 ++++++++++++++++-- 7 files changed, 224 insertions(+), 86 deletions(-) diff --git a/tvb_build/build_step1.py b/tvb_build/build_step1.py index 3541f95bd7..3ecadacb92 100644 --- a/tvb_build/build_step1.py +++ b/tvb_build/build_step1.py @@ -53,7 +53,8 @@ FW_FOLDER = os.path.join(TVB_ROOT, 'tvb_framework') LICENSE_PATH = os.path.join(FW_FOLDER, 'LICENSE') RELEASE_NOTES_PATH = os.path.join(TVB_ROOT, 'tvb_documentation', 'RELEASE_NOTES') -DATA_SRC_FOLDER = TVBZenodoDataset().extract_dir +dataset = TVBZenodoDataset() +DATA_SRC_FOLDER = dataset.extract_dir / 'tvb_data' DEMOS_MATLAB_FOLDER = os.path.join(TVB_ROOT, 'tvb_documentation', 'matlab') # dest paths @@ -114,6 +115,10 @@ "mouse/allen_2mm/RegionVolumeMapping.h5", ] +def fetch_data_to_include(filenames_list, dataset): + for i in filenames_list: + dataset.fetch_data("tvb_data/"+i) + def _copy_dataset(dataset_files, dataset_destination): for pth in dataset_files: @@ -230,6 +235,7 @@ def build_step1(): shutil.copytree(DEMOS_MATLAB_FOLDER, os.path.join(DIST_FOLDER, 'matlab'), ignore=shutil.ignore_patterns('.svn', '*.rst')) + fetch_data_to_include(INCLUDED_INSIDE_DATA, dataset) copy_distribution_dataset() _copy_demos_collapsed({os.path.join("..", "tvb_documentation", "demos"): os.path.join(DIST_FOLDER, "demo_scripts"), diff --git a/tvb_documentation/demos/simulate_for_mouse.ipynb b/tvb_documentation/demos/simulate_for_mouse.ipynb index 0f8fea4ebd..f2bc4141fb 100644 --- a/tvb_documentation/demos/simulate_for_mouse.ipynb +++ b/tvb_documentation/demos/simulate_for_mouse.ipynb @@ -63,30 +63,11 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "file tvb_data.zip is downloaded at C:\\Users\\Abhijit_asus\\TVB\\DATASETS\\.cache\\TVB_Data\\c042692ba786b0ecebdffd58e9efac21-tvb_data.zip\n" - ] - }, - { - "ename": "NameError", - "evalue": "name 'import_conn_h5' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[1], line 5\u001b[0m\n\u001b[0;32m 2\u001b[0m dataset \u001b[39m=\u001b[39m TVBZenodoDataset()\n\u001b[0;32m 4\u001b[0m connectivity_path \u001b[39m=\u001b[39m dataset\u001b[39m.\u001b[39mfetch_data(\u001b[39m\"\u001b[39m\u001b[39mConnectivity.h5\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[1;32m----> 5\u001b[0m import_op \u001b[39m=\u001b[39m import_conn_h5(\u001b[39m1\u001b[39m, connectivity_path)\n\u001b[0;32m 6\u001b[0m import_op \u001b[39m=\u001b[39m wait_to_finish(import_op)\n\u001b[0;32m 7\u001b[0m import_op\n", - "\u001b[1;31mNameError\u001b[0m: name 'import_conn_h5' is not defined" - ] - } - ], + "outputs": [], "source": [ "from tvb.datasets import TVBZenodoDataset\n", "dataset = TVBZenodoDataset()\n", @@ -496,13 +477,6 @@ "\n", "[7] Newman, Mark EJ. \"The mathematics of networks.\" The new palgrave encyclopedia of economics 2, no. 2008 (2008): 1-12." ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py index e2b6ff98e0..4d30e57789 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/connectivity_zip_importer_test.py @@ -59,7 +59,6 @@ def test_happy_flow_import(self): """ Test that importing a CFF generates at least one DataType in DB. """ - #zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') zip_path = TVBZenodoDataset().fetch_data('connectivity_96.zip') dt_count_before = TestFactory.get_entity_count(self.test_project, ConnectivityIndex) TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John", False) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index df01d6c153..8de0c37155 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -33,6 +33,7 @@ from tvb.basic.logger.builder import get_logger from tvb.basic.profile import TvbProfile from pathlib import Path +from zipfile import ZipFile class BaseDataset: @@ -53,6 +54,7 @@ def download(self): def fetch_data(self, file_name): if Path(file_name).is_absolute(): + self.log.warning("Given file name is an absolute path. No operations are done. The path is returned as it is") return file_name return self._fetch_data(file_name) @@ -60,5 +62,21 @@ def fetch_data(self, file_name): def _fetch_data(self, file_name): pass + def read_zipfile_structure(self, file_path): + """ + Reads the zipfile structure and returns the dictionary containing file_names as keys and list of relative paths having same file name. + """ + with ZipFile(file_path) as zf: + file_names_in_zip = zf.namelist() + zf.close() + + file_names_dict = {} + for i in file_names_in_zip: + if str(Path(i).name) not in file_names_dict.keys(): + file_names_dict[str(Path(i).name)] = [i] + else: + file_names_dict[str(Path(i).name)].append(i) + return file_names_dict + def get_version(self): return self.version diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 5bf9c86ba3..11d9120d06 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -52,11 +52,16 @@ def __init__(self, version= "2.7", extract_dir = None): version: str - Version number of the dataset, Default value is 2.7 + + extract_dir: str + - path where you want to extract the archive. + - If `extract_dir` is None, Dataset is downloaded at location according to your profile settings. """ super().__init__(version, extract_dir) self.cached_dir = self.extract_dir / ".cache" self.cached_file = self.cached_dir / "tvb_cached_responses.txt" + self.files_in_zip_dict = None if not self.cached_dir.is_dir(): self.cached_dir.mkdir(parents=True) @@ -68,72 +73,103 @@ def __init__(self, version= "2.7", extract_dir = None): self.log.warning(f"Failed to read data from cached response.") self.recid = Zenodo().get_versions_info(self.CONCEPTID)[version] self.update_cached_response() - - - #TODO add logging errors method by catching the exact exceptions. + self.rec = Record(self.read_cached_response()[self.version]) - def download(self, path=None): + def download(self, path=None, fname=None): """ Downloads the dataset to `path` + parameters + ----------- + path: + - path where you want to download the Dataset. + - If `path` is None, Dataset is downloaded at location according to your profile settings. + fname: + - The name that will be used to save the file. Should NOT include the full the path, just the file name (it will be appended to path). + - If fname is None, file will be saved with a unique name that contains hash of the file and the last part of the url from where the file would be fetched. """ - self.rec.download(path) + if path == None: + path = self.cached_dir + self.rec.download(path, fname) def _fetch_data(self, file_name): """ - Fetches the data + Function to fetch the file having `file_name` as name of the file. The function checks if the dataset is downloaded or not. If not, function downloads the dataset and then extracts/unzip the file. parameters: ----------- file_name: str - - Name of the file from the downloaded zip file to fetch. - extract_dir: str - - Path where you want to extract the archive. If Path is None, dataset is extracted according to the tvb profile configuration - - - returns: Pathlib.Path - path of the file which was extracted + - Name of the file from the downloaded zip file to fetch. Also accepts relative path of the file with respect to tvb_data.zip. This is useful when having multiple files with same name. + + returns: str + path of the extracted/Unzipped file. """ - # TODO: extract dir needs better description. - extract_dir = self.extract_dir - download_dir = self.cached_dir / "TVB_Data" try: file_path = self.rec.file_loc['tvb_data.zip'] except: - self.download(path = download_dir) + self.download(path = self.cached_dir, fname=f"tvb_data_{self.version}.zip") file_path = self.rec.file_loc['tvb_data.zip'] - with ZipFile(file_path) as zf: - file_names_in_zip = zf.namelist() - zf.close() + if self.files_in_zip_dict == None: + self.files_in_zip_dict = self.read_zipfile_structure(file_path=file_path) file_name = file_name.strip() - file_names_in_zip = {str(Path(i).name): i for i in file_names_in_zip} - if extract_dir==None: - ZipFile(file_path).extract(file_names_in_zip[file_name]) + if file_name.startswith("tvb_data"): + if file_name in self.files_in_zip_dict[str(Path(file_name).name)] : + ZipFile(file_path).extract(file_name, path=extract_dir) + + if extract_dir.is_absolute(): + return str(extract_dir / file_name) + return str(Path.cwd()/ extract_dir / file_name) + else: + self.log.error("file_name not found, please mention correct relative file path") + + elif len(self.files_in_zip_dict[file_name]) == 1: + ZipFile(file_path).extract(self.files_in_zip_dict[file_name][0], path=extract_dir) + + if extract_dir.is_absolute(): + return str(extract_dir / self.files_in_zip_dict[file_name][0]) + return str(Path.cwd()/ extract_dir / self.files_in_zip_dict[file_name][0]) + + + elif len(self.files_in_zip_dict[file_name]) > 1: - ZipFile(file_path).extract(file_names_in_zip[file_name], path = extract_dir) + self.log.error(f"""There are more than 1 files with same names in the zip file. Please mention relative path of the file with respect to the tvb_data.zip. + file_name should be one of the following paths: {self.files_in_zip_dict[file_name]}""") + raise NameError(f"file name should be one of the {self.files_in_zip_dict[file_name]}, but got {file_name}") - if extract_dir.is_absolute(): - return str(extract_dir / file_names_in_zip[file_name]) + def fetch_all_data(self): + if self.files_in_zip_dict == None: + self.download(path = self.cached_dir, fname=f"tvb_data_{self.version}.zip") + self.files_in_zip_dict = self.read_zipfile_structure(self.rec.file_loc['tvb_data.zip']) + + + for file_paths in self.files_in_zip_dict.values(): + for file_path in file_paths: + self.fetch_data(file_path) + + return str(self.extract_dir / 'tvb_data') - return str(Path.cwd()/ extract_dir / file_names_in_zip[file_name]) def delete_data(self): + """ + Deletes the `tvb_data` folder in the `self.extract_dir` directory. + """ _dir = self.extract_dir / "tvb_data" shutil.rmtree(_dir) + self.log.info(f"deleting {self.extract_dir/'tvb_data'} directory.") def update_cached_response(self): """ - gets responses from zenodo server and saves them to cache file. + Gets responses from zenodo server and saves them to a cache file. """ file_dir = self.cached_file @@ -158,33 +194,32 @@ def update_cached_response(self): def read_cached_response(self): """ - reads responses from the cache file. - + Reads responses from the cache file. """ file_dir = self.cached_file - with open(file_dir) as fp: responses = json.load(fp) - fp.close() - responses = dict(responses) return responses def describe(self): + """ + Returns the project description mentioned on the zenodo website. + """ return self.rec.describe() - def get_record(self): + def get_recordid(self): + """ + returns record id of the dataset + """ return self.recid def __eq__(self, other): - if isinstace(other, TVBZenodoDataset): - return self.rec == tvb_data.rec + if isinstance(other, TVBZenodoDataset): + return self.rec == other.rec return False - - - diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 04232e26e7..055381b206 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -53,7 +53,13 @@ def __init__(self, data, base_url: str = BASE_URL) -> None: - def download(self, path: str = None) -> None: + def download(self, path: str = None, fname=None) -> None: + """ + Download the files entity from the json response at `path`. If the `path` is None, the data is downloaded at os caches. + + For more info about os cache, have a look at https://www.fatiando.org/pooch/latest/api/generated/pooch.os_cache.html. + In our use case, the is `tvb`. + """ if 'files' not in self.data: raise AttributeError("No files to download! Please check if the record id entered is correct! or the data is publically accessible") @@ -61,13 +67,16 @@ def download(self, path: str = None) -> None: if path == None: path = pooch.os_cache("tvb") + + #convert pathlib.Path objects to strings. + path = str(path) for file in self.data["files"]: url = file['links']['self'] known_hash = file['checksum'] file_name = file['key'] - file_path = pooch.retrieve(url= url, known_hash= known_hash, path = path,progressbar = True) + file_path = pooch.retrieve(url= url, known_hash= known_hash, path = path, fname=fname ,progressbar = True) self.file_loc[f'{file_name}'] = file_path @@ -85,7 +94,7 @@ def get_record_id(self) -> str: return self.data['conceptrecid'] def is_open_access(self) -> str: - return self.data['metadata']['access_right'] != "closed" + return self.data['metadata']['access_right'] == "open" def __eq__(self, record_b) -> bool: return (self.data == record_b.data) diff --git a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py index e275410f81..b9c233130c 100644 --- a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py @@ -32,34 +32,131 @@ from tvb.datasets import TVBZenodoDataset from pathlib import Path from tvb.tests.library.base_testcase import BaseTestCase +import zipfile +import pytest class Test_TVBZenodoDataset(BaseTestCase): def test_extract(self): - tvb_data = TVBZenodoDataset() - connectivity66_dir = Path(tvb_data.fetch_data("connectivity_66.zip")) + dataset = TVBZenodoDataset() + connectivity66_dir = Path(dataset.fetch_data("connectivity_66.zip")) + + assert str(connectivity66_dir).endswith(".zip") assert connectivity66_dir.is_file() - tvb_data.delete_data() + dataset.delete_data() assert not connectivity66_dir.is_file() - tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="tvb_data") - connectivity66_dir = Path(tvb_data.fetch_data("connectivity_66.zip")) - assert connectivity66_dir.is_file() - tvb_data.delete_data() + dataset = TVBZenodoDataset(version="2.0.3", extract_dir="dataset") + connectivity66_dir = Path(dataset.fetch_data("connectivity_66.zip")) + + assert str(connectivity66_dir).endswith(".zip") + assert "dataset" in str(connectivity66_dir) + assert (Path.cwd()/"dataset").is_dir() + assert (Path.cwd()/"dataset"/"tvb_data").is_dir() + dataset.delete_data() assert not connectivity66_dir.is_file() - tvb_data = TVBZenodoDataset(version="2.0.3", extract_dir="~/tvb_data") - matfile_dir = Path(tvb_data.fetch_data("local_connectivity_80k.mat")) + dataset = TVBZenodoDataset(version="2.0.3", extract_dir="~/dataset") + matfile_dir = Path(dataset.fetch_data("local_connectivity_80k.mat")) + + assert str(matfile_dir).endswith(".mat") assert matfile_dir.is_file() - tvb_data.delete_data() + dataset.delete_data() assert not matfile_dir.is_file() - all_extract = Path(TVBZenodoDataset(version = "2.0.3", extract_dir="~/tvb_data").fetch_data(" ConnectivityTable_regions.xls")) - assert all_extract.is_file() - tvb_data.delete_data() - assert not all_extract.is_file() + + excel_extract = Path(dataset.fetch_data(" ConnectivityTable_regions.xls")) + assert excel_extract.is_file() + dataset.delete_data() + assert not excel_extract.is_file() + + + + all_extract =Path(dataset.fetch_all_data()) + assert all_extract.is_dir() + assert all_extract + + dataset.delete_data() + + def test_check_content(self): + + #check if connectivity_66 contains expected files. + dataset = TVBZenodoDataset() + connectivity66_dir = Path(dataset.fetch_data("connectivity_66.zip")) + + assert "centres.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "info.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "weights.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + + + dataset = TVBZenodoDataset(version= "2.0.3", extract_dir="~/dataset") + connectivity66_dir = Path(dataset.fetch_data("connectivity_66.zip")) + assert "centres.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "info.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + assert "weights.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + + dataset = TVBZenodoDataset(version="2.0.3", extract_dir="~/dataset") + extract_dir = dataset.fetch_all_data() + assert (extract_dir/ "tvb_data" /"mouse"/"allen_2mm"/"Connectivity.h5").is_file() + assert (extract_dir/ "tvb_data" /"surfaceData"/"inner_skull_4096.zip").is_file() + + + + + def test_file_name_variants(self): + dataset = TVBZenodoDataset(version= "2.0.3", extract_dir="~/dataset") + connectivity66_dir_1 = Path(dataset.fetch_data("connectivity_66.zip")) + connectivity66_dir_2 = Path(dataset.fetch_data('tvb_data/connectivity/connectivity_66.zip')) + assert connectivity66_dir_1 == connectivity66_dir_2 + + dataset.delete_data() + + dataset = TVBZenodoDataset() + connectivity66_dir_1 = Path(dataset.fetch_data("connectivity_66.zip")) + connectivity66_dir_2 = Path(dataset.fetch_data('tvb_data/connectivity/connectivity_66.zip')) + assert connectivity66_dir_1 == connectivity66_dir_2 + + dataset.delete_data() + + + dataset = TVBZenodoDataset(version= "2.0.3", extract_dir="dataset") + connectivity66_dir_1 = Path(dataset.fetch_data("connectivity_66.zip")) + connectivity66_dir_2 = Path(dataset.fetch_data('tvb_data/connectivity/connectivity_66.zip')) + assert connectivity66_dir_1 == connectivity66_dir_2 + + dataset.delete_data() + + + # should raise error cause there are two files with name mapping_FS_84.txt + with pytest.raises(NameError): + dataset = TVBZenodoDataset() + data = dataset.fetch_data("mapping_FS_84.txt") + + # no error when relative path given + dataset = TVBZenodoDataset() + data = Path(dataset.fetch_data(" tvb_data/macaque/mapping_FS_84.txt")) + assert data.is_file() + + data = Path(dataset.fetch_data('tvb_data/nifti/volume_mapping/mapping_FS_84.txt')) + assert data.is_file() + + dataset.delete_data() + + + + + + + + + + + + #TODO add no interenet tests From ba100ccd6d09b2a167bbdd6d3368bb284b502c89 Mon Sep 17 00:00:00 2001 From: Abhijit Deo <72816663+abhi-glitchhg@users.noreply.github.com> Date: Sat, 22 Jul 2023 17:49:33 +0530 Subject: [PATCH 71/84] revert back to original notebooks metadata --- tvb_documentation/demos/encrypt_data.ipynb | 4 ++-- tvb_documentation/demos/simulate_for_mouse.ipynb | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tvb_documentation/demos/encrypt_data.ipynb b/tvb_documentation/demos/encrypt_data.ipynb index 051421a3fc..2197f07bb8 100644 --- a/tvb_documentation/demos/encrypt_data.ipynb +++ b/tvb_documentation/demos/encrypt_data.ipynb @@ -28,7 +28,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "colab": {}, "colab_type": "code", @@ -200,7 +200,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.10.9" } }, "nbformat": 4, diff --git a/tvb_documentation/demos/simulate_for_mouse.ipynb b/tvb_documentation/demos/simulate_for_mouse.ipynb index f2bc4141fb..6ca7857928 100644 --- a/tvb_documentation/demos/simulate_for_mouse.ipynb +++ b/tvb_documentation/demos/simulate_for_mouse.ipynb @@ -495,7 +495,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.10.9" } }, "nbformat": 4, From f639109ca88df37daa08ee851a5ec95691fbdf98 Mon Sep 17 00:00:00 2001 From: Abhijit Deo <72816663+abhi-glitchhg@users.noreply.github.com> Date: Sat, 22 Jul 2023 18:11:06 +0530 Subject: [PATCH 72/84] add missing `:` to the github workflow files. :) --- .github/workflows/build.yml | 4 ++-- .github/workflows/lib-tests.yml_disabled | 4 ++-- .github/workflows/notebooks.yml | 4 ++-- .github/workflows/pg-tests.yml | 4 ++-- .github/workflows/win-tests.yml | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a00a16b811..586cbf31c8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,6 +1,6 @@ name: Test Py -on: pull_request - push +on: pull_request: + push: jobs: build: diff --git a/.github/workflows/lib-tests.yml_disabled b/.github/workflows/lib-tests.yml_disabled index e24708cc15..5fac55cee2 100644 --- a/.github/workflows/lib-tests.yml_disabled +++ b/.github/workflows/lib-tests.yml_disabled @@ -1,6 +1,6 @@ name: Test lib -on: pull_request - push +on: pull_request: + push: jobs: lib-tests: diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 84ddc06b0f..041e5150d7 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -1,6 +1,6 @@ name: Test Notebooks -on: pull_request - push +on: pull_request: + push: jobs: build: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index 5c4ecafa34..2d269ac322 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -1,6 +1,6 @@ name: Test PG -on: pull_request - push +on: pull_request: + push: jobs: build: diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index 3350c639d9..7fd1e3536b 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -1,6 +1,6 @@ name: Test Win -on: pull_request - push +on: pull_request: + push: jobs: build: From 7c328f992c44deccfbb24c1299f466f654f70576 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 22 Jul 2023 18:17:36 +0530 Subject: [PATCH 73/84] fixed the silly indentation mistakes in the github actions files. --- .github/workflows/build.yml | 5 +++-- .github/workflows/lib-tests.yml_disabled | 5 +++-- .github/workflows/notebooks.yml | 5 +++-- .github/workflows/pg-tests.yml | 5 +++-- .github/workflows/win-tests.yml | 5 +++-- 5 files changed, 15 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 586cbf31c8..9ec10866ff 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,6 +1,7 @@ name: Test Py -on: pull_request: - push: +on: + pull_request: + push: jobs: build: diff --git a/.github/workflows/lib-tests.yml_disabled b/.github/workflows/lib-tests.yml_disabled index 5fac55cee2..78d1e44e4e 100644 --- a/.github/workflows/lib-tests.yml_disabled +++ b/.github/workflows/lib-tests.yml_disabled @@ -1,6 +1,7 @@ name: Test lib -on: pull_request: - push: +on: + pull_request: + push: jobs: lib-tests: diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 041e5150d7..d5590e62bf 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -1,6 +1,7 @@ name: Test Notebooks -on: pull_request: - push: +on: + pull_request: + push: jobs: build: diff --git a/.github/workflows/pg-tests.yml b/.github/workflows/pg-tests.yml index 2d269ac322..b9fc618c7d 100644 --- a/.github/workflows/pg-tests.yml +++ b/.github/workflows/pg-tests.yml @@ -1,6 +1,7 @@ name: Test PG -on: pull_request: - push: +on: + pull_request: + push: jobs: build: diff --git a/.github/workflows/win-tests.yml b/.github/workflows/win-tests.yml index 7fd1e3536b..1a0bf31d0e 100644 --- a/.github/workflows/win-tests.yml +++ b/.github/workflows/win-tests.yml @@ -1,6 +1,7 @@ name: Test Win -on: pull_request: - push: +on: + pull_request: + push: jobs: build: From a8b5f70e7cf22e21b892de218a107c713f15f96e Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 23 Jul 2023 12:50:51 +0530 Subject: [PATCH 74/84] fix the directory path in the tests --- tvb_library/tvb/datasets/tvb_data.py | 6 +++-- .../datasets/tvb_zenodo_dataset_test.py | 22 ++++++++++++++----- 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 11d9120d06..2786641dae 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -154,8 +154,10 @@ def fetch_all_data(self): for file_paths in self.files_in_zip_dict.values(): for file_path in file_paths: self.fetch_data(file_path) - - return str(self.extract_dir / 'tvb_data') + + if self.extract_dir.is_absolute(): + return str(self.extract_dir) + return str(Path.cwd()/self.extract_dir) def delete_data(self): diff --git a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py index b9c233130c..fdc511bdb2 100644 --- a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py @@ -91,6 +91,7 @@ def test_check_content(self): assert "info.txt" in zipfile.ZipFile(connectivity66_dir).namelist() assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66_dir).namelist() assert "weights.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + dataset.delete_data() dataset = TVBZenodoDataset(version= "2.0.3", extract_dir="~/dataset") @@ -99,13 +100,24 @@ def test_check_content(self): assert "info.txt" in zipfile.ZipFile(connectivity66_dir).namelist() assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66_dir).namelist() assert "weights.txt" in zipfile.ZipFile(connectivity66_dir).namelist() + dataset.delete_data() - dataset = TVBZenodoDataset(version="2.0.3", extract_dir="~/dataset") - extract_dir = dataset.fetch_all_data() - assert (extract_dir/ "tvb_data" /"mouse"/"allen_2mm"/"Connectivity.h5").is_file() - assert (extract_dir/ "tvb_data" /"surfaceData"/"inner_skull_4096.zip").is_file() - + dataset = TVBZenodoDataset(version="2.0.3", extract_dir="dataset") + extract_dir = Path(dataset.fetch_all_data()) + assert (extract_dir/"tvb_data").is_dir() + assert (extract_dir/"tvb_data/mouse/allen_2mm/Connectivity.h5").is_file() + assert (extract_dir/"tvb_data/surfaceData/inner_skull_4096.zip").is_file() + + connectivity66 = extract_dir/"tvb_data/connectivity/connectivity_96.zip" + assert connectivity66.is_file() + + assert "centres.txt" in zipfile.ZipFile(connectivity66).namelist() + assert "info.txt" in zipfile.ZipFile(connectivity66).namelist() + assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66).namelist() + assert "weights.txt" in zipfile.ZipFile(connectivity66).namelist() + + dataset.delete_data() def test_file_name_variants(self): From c63a507b914e8bc425f055b9bbb9cb766555102a Mon Sep 17 00:00:00 2001 From: abhi_win Date: Fri, 28 Jul 2023 20:09:07 +0530 Subject: [PATCH 75/84] add line which was removed accidentally while resolving the conflict --- .../tests/framework/adapters/uploaders/encrypt_decrypt_test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py index 226abde59d..9485f70d66 100644 --- a/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py +++ b/tvb_framework/tvb/tests/framework/adapters/uploaders/encrypt_decrypt_test.py @@ -32,6 +32,8 @@ import pyAesCrypt import pytest from tvb.datasets import TVBZenodoDataset + +import tempfile from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa From f6e2e99a17390175298b066a7f388f10f6c7a3df Mon Sep 17 00:00:00 2001 From: abhi_win Date: Fri, 28 Jul 2023 23:38:42 +0530 Subject: [PATCH 76/84] typo fix --- tvb_documentation/demos/interacting_with_the_framework.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb_documentation/demos/interacting_with_the_framework.ipynb b/tvb_documentation/demos/interacting_with_the_framework.ipynb index 1ce1444d33..e9a629b9fe 100644 --- a/tvb_documentation/demos/interacting_with_the_framework.ipynb +++ b/tvb_documentation/demos/interacting_with_the_framework.ipynb @@ -100,7 +100,7 @@ "outputs": [], "source": [ "import os\n", - "from tvb.dataset import TVBZenodoDataset\n", + "from tvb.datasets import TVBZenodoDataset\n", "p = TVBZenodoDataset.fetch_data('connectivity_66.zip')\n", "import_op = import_conn_zip(proj.id, p)\n", "\n", From 02a31b442447771422ff797e6965b259faec13ac Mon Sep 17 00:00:00 2001 From: abhi_win Date: Mon, 31 Jul 2023 08:53:09 +0530 Subject: [PATCH 77/84] api change and fix typo --- .../interacting_with_the_framework.ipynb | 2 +- tvb_library/tvb/datasets/base.py | 23 ++--- tvb_library/tvb/datasets/tvb_data.py | 85 +++++++++---------- 3 files changed, 53 insertions(+), 57 deletions(-) diff --git a/tvb_documentation/demos/interacting_with_the_framework.ipynb b/tvb_documentation/demos/interacting_with_the_framework.ipynb index e9a629b9fe..8d7d62590a 100644 --- a/tvb_documentation/demos/interacting_with_the_framework.ipynb +++ b/tvb_documentation/demos/interacting_with_the_framework.ipynb @@ -101,7 +101,7 @@ "source": [ "import os\n", "from tvb.datasets import TVBZenodoDataset\n", - "p = TVBZenodoDataset.fetch_data('connectivity_66.zip')\n", + "p = TVBZenodoDataset().fetch_data('connectivity_66.zip')\n", "import_op = import_conn_zip(proj.id, p)\n", "\n", "import_op = wait_to_finish(import_op)\n", diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 8de0c37155..abbb173537 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -37,7 +37,7 @@ class BaseDataset: - def __init__(self, version, extract_dir=None): + def __init__(self, version : str , extract_dir : str =None) -> None: self.log = get_logger(self.__class__.__module__) self.cached_files = None @@ -49,20 +49,20 @@ def __init__(self, version, extract_dir=None): self.extract_dir = Path(extract_dir).expanduser() - def download(self): - pass - - def fetch_data(self, file_name): + def fetch_data(self, file_name:str) -> str: if Path(file_name).is_absolute(): self.log.warning("Given file name is an absolute path. No operations are done. The path is returned as it is") return file_name return self._fetch_data(file_name) + + def get_version(self) -> str: + return self.version - def _fetch_data(self, file_name): - pass + def delete_data(self): + raise NotImplemented - def read_zipfile_structure(self, file_path): + def _read_zipfile_structure(self, file_path): """ Reads the zipfile structure and returns the dictionary containing file_names as keys and list of relative paths having same file name. """ @@ -78,5 +78,8 @@ def read_zipfile_structure(self, file_path): file_names_dict[str(Path(i).name)].append(i) return file_names_dict - def get_version(self): - return self.version + def _fetch_data(self,file_name): + raise NotImplemented + + def _download(self): + raise NotImplemented diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 2786641dae..0a2b710f35 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -31,19 +31,20 @@ import os import requests import json -import pooch + from pathlib import Path from zipfile import ZipFile import shutil + from .base import BaseDataset -from .zenodo import Zenodo, Record, BASE_URL +from .zenodo import Record, BASE_URL, Zenodo class TVBZenodoDataset(BaseDataset): CONCEPTID = "3417206" - def __init__(self, version= "2.7", extract_dir = None): + def __init__(self, version : str= "2.7", extract_dir : str = None): """ Constructor for TVB_Data class @@ -67,30 +68,14 @@ def __init__(self, version= "2.7", extract_dir = None): self.cached_dir.mkdir(parents=True) try: - self.recid = self.read_cached_response()[version]['conceptrecid'] + self.recid = self._read_cached_response()[version]['conceptrecid'] except : self.log.warning(f"Failed to read data from cached response.") self.recid = Zenodo().get_versions_info(self.CONCEPTID)[version] - self.update_cached_response() + self._update_cached_response() - self.rec = Record(self.read_cached_response()[self.version]) - - def download(self, path=None, fname=None): - """ - Downloads the dataset to `path` - parameters - ----------- - path: - - path where you want to download the Dataset. - - If `path` is None, Dataset is downloaded at location according to your profile settings. - fname: - - The name that will be used to save the file. Should NOT include the full the path, just the file name (it will be appended to path). - - If fname is None, file will be saved with a unique name that contains hash of the file and the last part of the url from where the file would be fetched. - """ - if path == None: - path = self.cached_dir - self.rec.download(path, fname) + self.rec = Record(self._read_cached_response()[self.version]) def _fetch_data(self, file_name): """ @@ -110,11 +95,11 @@ def _fetch_data(self, file_name): try: file_path = self.rec.file_loc['tvb_data.zip'] except: - self.download(path = self.cached_dir, fname=f"tvb_data_{self.version}.zip") + self._download(path = self.cached_dir, fname=f"tvb_data_{self.version}.zip") file_path = self.rec.file_loc['tvb_data.zip'] if self.files_in_zip_dict == None: - self.files_in_zip_dict = self.read_zipfile_structure(file_path=file_path) + self.files_in_zip_dict = self._read_zipfile_structure(file_path=file_path) file_name = file_name.strip() @@ -143,12 +128,23 @@ def _fetch_data(self, file_name): file_name should be one of the following paths: {self.files_in_zip_dict[file_name]}""") raise NameError(f"file name should be one of the {self.files_in_zip_dict[file_name]}, but got {file_name}") + def describe(self): + """ + Returns the project description mentioned on the zenodo website. + """ + return self.rec.describe() + + def get_recordid(self): + """ + returns record id of the dataset + """ + return self.recid def fetch_all_data(self): if self.files_in_zip_dict == None: - self.download(path = self.cached_dir, fname=f"tvb_data_{self.version}.zip") - self.files_in_zip_dict = self.read_zipfile_structure(self.rec.file_loc['tvb_data.zip']) + self._download(path = self.cached_dir, fname=f"tvb_data_{self.version}.zip") + self.files_in_zip_dict = self._read_zipfile_structure(self.rec.file_loc['tvb_data.zip']) for file_paths in self.files_in_zip_dict.values(): @@ -168,8 +164,23 @@ def delete_data(self): shutil.rmtree(_dir) self.log.info(f"deleting {self.extract_dir/'tvb_data'} directory.") + def _download(self, path=None, fname=None): + """ + Downloads the dataset to `path` + parameters + ----------- + path: + - path where you want to download the Dataset. + - If `path` is None, Dataset is downloaded at location according to your profile settings. + fname: + - The name that will be used to save the file. Should NOT include the full the path, just the file name (it will be appended to path). + - If fname is None, file will be saved with a unique name that contains hash of the file and the last part of the url from where the file would be fetched. + """ + if path == None: + path = self.cached_dir + self.rec.download(path, fname) - def update_cached_response(self): + def _update_cached_response(self): """ Gets responses from zenodo server and saves them to a cache file. """ @@ -194,7 +205,7 @@ def update_cached_response(self): self.log.warning("Updated the cache response file") return - def read_cached_response(self): + def _read_cached_response(self): """ Reads responses from the cache file. """ @@ -207,21 +218,3 @@ def read_cached_response(self): responses = dict(responses) return responses - - - def describe(self): - """ - Returns the project description mentioned on the zenodo website. - """ - return self.rec.describe() - - def get_recordid(self): - """ - returns record id of the dataset - """ - return self.recid - - def __eq__(self, other): - if isinstance(other, TVBZenodoDataset): - return self.rec == other.rec - return False From ffbe9853561fc2dce2d38eadfdbd3396233b502a Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sun, 6 Aug 2023 23:53:27 +0530 Subject: [PATCH 78/84] getting missing whitespace around operator error --- .../tvb/tests/library/simulator/backend/nbbackend_mpr_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tvb_library/tvb/tests/library/simulator/backend/nbbackend_mpr_test.py b/tvb_library/tvb/tests/library/simulator/backend/nbbackend_mpr_test.py index 0d993d045b..90dc425f36 100644 --- a/tvb_library/tvb/tests/library/simulator/backend/nbbackend_mpr_test.py +++ b/tvb_library/tvb/tests/library/simulator/backend/nbbackend_mpr_test.py @@ -78,8 +78,8 @@ def test_local_deterministic(self): ) ).configure() - (pdq_t, pdq_d), = NbMPRBackend().run_sim(sim, nstep=1) - (raw_t, raw_d), = sim.run(simulation_length=1) + (pdq_t, pdq_d) , = NbMPRBackend().run_sim(sim, nstep=1) + (raw_t, raw_d) , = sim.run(simulation_length=1) np.testing.assert_allclose(raw_d[0,:], pdq_d[0,:], rtol=1e-5) From 0918d3594a29522c9dd181f184830efe4bb9c4d0 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Mon, 7 Aug 2023 00:06:05 +0530 Subject: [PATCH 79/84] revert the change --- .../tvb/tests/library/simulator/backend/nbbackend_mpr_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tvb_library/tvb/tests/library/simulator/backend/nbbackend_mpr_test.py b/tvb_library/tvb/tests/library/simulator/backend/nbbackend_mpr_test.py index 90dc425f36..0d993d045b 100644 --- a/tvb_library/tvb/tests/library/simulator/backend/nbbackend_mpr_test.py +++ b/tvb_library/tvb/tests/library/simulator/backend/nbbackend_mpr_test.py @@ -78,8 +78,8 @@ def test_local_deterministic(self): ) ).configure() - (pdq_t, pdq_d) , = NbMPRBackend().run_sim(sim, nstep=1) - (raw_t, raw_d) , = sim.run(simulation_length=1) + (pdq_t, pdq_d), = NbMPRBackend().run_sim(sim, nstep=1) + (raw_t, raw_d), = sim.run(simulation_length=1) np.testing.assert_allclose(raw_d[0,:], pdq_d[0,:], rtol=1e-5) From 842e76495acae0f22b667b9501fb4e30e9a8ef75 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Mon, 7 Aug 2023 00:20:12 +0530 Subject: [PATCH 80/84] fix the pycodestyle issue --- tvb_build/docker/requirements_group | 1 + 1 file changed, 1 insertion(+) diff --git a/tvb_build/docker/requirements_group b/tvb_build/docker/requirements_group index 8949aebbdd..e72c1030d2 100644 --- a/tvb_build/docker/requirements_group +++ b/tvb_build/docker/requirements_group @@ -57,3 +57,4 @@ watchdog requests-toolbelt>=0.10 elasticsearch urllib3<2.0 +pycodestyle==2.10.0 \ No newline at end of file From 294a4d82a68327643675ed5621389413e0dc56f7 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Mon, 21 Aug 2023 23:23:22 +0530 Subject: [PATCH 81/84] minor changes --- tvb_build/build_step1.py | 89 +------------------ tvb_library/tvb/datasets/base.py | 28 +----- tvb_library/tvb/datasets/tvb_data.py | 46 +++++----- tvb_library/tvb/datasets/zenodo.py | 14 +-- .../datasets/tvb_zenodo_dataset_test.py | 14 --- 5 files changed, 30 insertions(+), 161 deletions(-) diff --git a/tvb_build/build_step1.py b/tvb_build/build_step1.py index 3ecadacb92..44532af9b5 100644 --- a/tvb_build/build_step1.py +++ b/tvb_build/build_step1.py @@ -44,7 +44,6 @@ import requests import tvb_bin -from tvb.datasets import TVBZenodoDataset from subprocess import Popen, PIPE # source paths @@ -53,95 +52,13 @@ FW_FOLDER = os.path.join(TVB_ROOT, 'tvb_framework') LICENSE_PATH = os.path.join(FW_FOLDER, 'LICENSE') RELEASE_NOTES_PATH = os.path.join(TVB_ROOT, 'tvb_documentation', 'RELEASE_NOTES') -dataset = TVBZenodoDataset() -DATA_SRC_FOLDER = dataset.extract_dir / 'tvb_data' + + DEMOS_MATLAB_FOLDER = os.path.join(TVB_ROOT, 'tvb_documentation', 'matlab') # dest paths DIST_FOLDER = os.path.join(os.path.dirname(__file__), 'build', 'TVB_Distribution') -DATA_INSIDE_FOLDER = os.path.join(DIST_FOLDER, '_tvb_data') - -INCLUDED_INSIDE_DATA = [ - "__init__.py", - "Default_Project.zip", - - "connectivity/connectivity_76.zip", - "connectivity/paupau.zip", - "connectivity/connectivity_66.zip", - "connectivity/connectivity_192.zip", - "connectivity/__init__.py", - - "projectionMatrix/projection_eeg_62_surface_16k.mat", - "projectionMatrix/projection_eeg_65_surface_16k.npy", - "projectionMatrix/projection_meg_276_surface_16k.npy", - "projectionMatrix/projection_seeg_588_surface_16k.npy", - "projectionMatrix/__init__.py", - - "regionMapping/__init__.py", - "regionMapping/regionMapping_16k_76.txt", - "regionMapping/regionMapping_80k_80.txt", - "regionMapping/regionMapping_16k_192.txt", - - "sensors/eeg_unitvector_62.txt.bz2", - "sensors/eeg_brainstorm_65.txt", - "sensors/meg_151.txt.bz2", - "sensors/meg_brainstorm_276.txt", - "sensors/seeg_39.txt.bz2", - "sensors/seeg_brainstorm_960.txt", - "sensors/seeg_588.txt", - "sensors/__init__.py", - - "surfaceData/__init__.py", - "surfaceData/cortex_80k.zip", - "surfaceData/cortex_16384.zip", - "surfaceData/outer_skin_4096.zip", - "surfaceData/inner_skull_4096.zip", - "surfaceData/outer_skull_4096.zip", - "surfaceData/scalp_1082.zip", - "surfaceData/face_8614.zip", - - "local_connectivity/__init__.py", - "local_connectivity/local_connectivity_16384.mat", - "local_connectivity/local_connectivity_80k.mat", - - "obj/__init__.py", - "obj/face_surface.obj", - "obj/eeg_cap.obj", - - "mouse/allen_2mm/Connectivity.h5", - "mouse/allen_2mm/Volume.h5", - "mouse/allen_2mm/StructuralMRI.h5", - "mouse/allen_2mm/RegionVolumeMapping.h5", -] - -def fetch_data_to_include(filenames_list, dataset): - for i in filenames_list: - dataset.fetch_data("tvb_data/"+i) - - -def _copy_dataset(dataset_files, dataset_destination): - for pth in dataset_files: - rel_pth = pth.split('/') - origin = os.path.join(DATA_SRC_FOLDER, *rel_pth) - destination = os.path.join(dataset_destination, *rel_pth) - destination_folder = os.path.dirname(destination) - if not os.path.exists(destination_folder): - os.makedirs(destination_folder) - print("Copying %s into %s" % (origin, destination)) - shutil.copyfile(origin, destination) - - -def copy_distribution_dataset(): - """ - Copy the required data file from tvb_data folder: - - inside TVB library package (for internal usage). - Will be used during TVB functioning: import default project, - load default for console profile, or code update events - - in tvb_data folder, as example for users. - """ - _copy_dataset(INCLUDED_INSIDE_DATA, DATA_INSIDE_FOLDER) - def _copy_demos_collapsed(to_copy): """ @@ -235,8 +152,6 @@ def build_step1(): shutil.copytree(DEMOS_MATLAB_FOLDER, os.path.join(DIST_FOLDER, 'matlab'), ignore=shutil.ignore_patterns('.svn', '*.rst')) - fetch_data_to_include(INCLUDED_INSIDE_DATA, dataset) - copy_distribution_dataset() _copy_demos_collapsed({os.path.join("..", "tvb_documentation", "demos"): os.path.join(DIST_FOLDER, "demo_scripts"), os.path.join("..", "tvb_documentation", "tutorials"): diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index abbb173537..a84bb2cba2 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -49,37 +49,13 @@ def __init__(self, version : str , extract_dir : str =None) -> None: self.extract_dir = Path(extract_dir).expanduser() - def fetch_data(self, file_name:str) -> str: - if Path(file_name).is_absolute(): - self.log.warning("Given file name is an absolute path. No operations are done. The path is returned as it is") - return file_name - - return self._fetch_data(file_name) - + def fetch_data(self) : + raise NotImplemented def get_version(self) -> str: return self.version def delete_data(self): raise NotImplemented - - def _read_zipfile_structure(self, file_path): - """ - Reads the zipfile structure and returns the dictionary containing file_names as keys and list of relative paths having same file name. - """ - with ZipFile(file_path) as zf: - file_names_in_zip = zf.namelist() - zf.close() - - file_names_dict = {} - for i in file_names_in_zip: - if str(Path(i).name) not in file_names_dict.keys(): - file_names_dict[str(Path(i).name)] = [i] - else: - file_names_dict[str(Path(i).name)].append(i) - return file_names_dict - - def _fetch_data(self,file_name): - raise NotImplemented def _download(self): raise NotImplemented diff --git a/tvb_library/tvb/datasets/tvb_data.py b/tvb_library/tvb/datasets/tvb_data.py index 0a2b710f35..e749320cde 100644 --- a/tvb_library/tvb/datasets/tvb_data.py +++ b/tvb_library/tvb/datasets/tvb_data.py @@ -28,7 +28,6 @@ .. moduleauthor:: Abhijit Deo """ -import os import requests import json @@ -77,7 +76,7 @@ def __init__(self, version : str= "2.7", extract_dir : str = None): self.rec = Record(self._read_cached_response()[self.version]) - def _fetch_data(self, file_name): + def fetch_data(self, file_name:str)->str: """ Function to fetch the file having `file_name` as name of the file. The function checks if the dataset is downloaded or not. If not, function downloads the dataset and then extracts/unzip the file. @@ -89,7 +88,10 @@ def _fetch_data(self, file_name): returns: str path of the extracted/Unzipped file. """ - + if Path(file_name).is_absolute(): + self.log.warning("Given `file_name` is an absolute path. No operations are done. The `file_name` is returned as it is") + return file_name + extract_dir = self.extract_dir try: @@ -128,35 +130,20 @@ def _fetch_data(self, file_name): file_name should be one of the following paths: {self.files_in_zip_dict[file_name]}""") raise NameError(f"file name should be one of the {self.files_in_zip_dict[file_name]}, but got {file_name}") - def describe(self): + def describe(self)-> str: """ Returns the project description mentioned on the zenodo website. """ return self.rec.describe() - def get_recordid(self): + def get_recordid(self) -> str: """ returns record id of the dataset """ return self.recid - def fetch_all_data(self): - - if self.files_in_zip_dict == None: - self._download(path = self.cached_dir, fname=f"tvb_data_{self.version}.zip") - self.files_in_zip_dict = self._read_zipfile_structure(self.rec.file_loc['tvb_data.zip']) - - - for file_paths in self.files_in_zip_dict.values(): - for file_path in file_paths: - self.fetch_data(file_path) - - if self.extract_dir.is_absolute(): - return str(self.extract_dir) - return str(Path.cwd()/self.extract_dir) - - def delete_data(self): + def delete_data(self)->None: """ Deletes the `tvb_data` folder in the `self.extract_dir` directory. """ @@ -218,3 +205,20 @@ def _read_cached_response(self): responses = dict(responses) return responses + + def _read_zipfile_structure(self, file_path): + """ + Reads the zipfile structure and returns the dictionary containing file_names as keys and list of relative paths having same file name. + """ + with ZipFile(file_path) as zf: + file_names_in_zip = zf.namelist() + zf.close() + + file_names_dict = {} + for i in file_names_in_zip: + if str(Path(i).name) not in file_names_dict.keys(): + file_names_dict[str(Path(i).name)] = [i] + else: + file_names_dict[str(Path(i).name)].append(i) + return file_names_dict + diff --git a/tvb_library/tvb/datasets/zenodo.py b/tvb_library/tvb/datasets/zenodo.py index 055381b206..9a3c95eb07 100644 --- a/tvb_library/tvb/datasets/zenodo.py +++ b/tvb_library/tvb/datasets/zenodo.py @@ -32,7 +32,6 @@ import requests -import re import pooch from typing import List, Dict from pathlib import Path @@ -67,8 +66,7 @@ def download(self, path: str = None, fname=None) -> None: if path == None: path = pooch.os_cache("tvb") - - #convert pathlib.Path objects to strings. + path = str(path) for file in self.data["files"]: @@ -80,7 +78,6 @@ def download(self, path: str = None, fname=None) -> None: self.file_loc[f'{file_name}'] = file_path - print(f"file {file_name} is downloaded at {file_path}") @@ -139,14 +136,10 @@ def get_versions_info(self, recid) -> Dict: """ # needs ineternet - recid = self.get_record(recid).data['metadata']['relations']['version'][0]['parent']['pid_value'] - versions = {} - url = f"{self.base_url}records?q=conceptrecid:{recid}&all_versions=true" - for hit in requests.get(url).json()['hits']['hits']: version = hit['metadata']['version'] @@ -155,9 +148,4 @@ def get_versions_info(self, recid) -> Dict: continue versions[version] = recid - return versions - - - - diff --git a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py index fdc511bdb2..2abcccb464 100644 --- a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py @@ -158,17 +158,3 @@ def test_file_name_variants(self): assert data.is_file() dataset.delete_data() - - - - - - - - - - - - - - #TODO add no interenet tests From 6bef08775158bd54b1d10bba2bbf57233e3503e1 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Tue, 22 Aug 2023 07:11:55 +0530 Subject: [PATCH 82/84] fix test fix --- .../datasets/tvb_zenodo_dataset_test.py | 25 ------------------- 1 file changed, 25 deletions(-) diff --git a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py index 2abcccb464..b06e01cd5d 100644 --- a/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py +++ b/tvb_library/tvb/tests/library/datasets/tvb_zenodo_dataset_test.py @@ -66,7 +66,6 @@ def test_extract(self): dataset.delete_data() assert not matfile_dir.is_file() - excel_extract = Path(dataset.fetch_data(" ConnectivityTable_regions.xls")) assert excel_extract.is_file() @@ -74,13 +73,6 @@ def test_extract(self): assert not excel_extract.is_file() - - all_extract =Path(dataset.fetch_all_data()) - assert all_extract.is_dir() - assert all_extract - - dataset.delete_data() - def test_check_content(self): #check if connectivity_66 contains expected files. @@ -102,23 +94,6 @@ def test_check_content(self): assert "weights.txt" in zipfile.ZipFile(connectivity66_dir).namelist() dataset.delete_data() - dataset = TVBZenodoDataset(version="2.0.3", extract_dir="dataset") - extract_dir = Path(dataset.fetch_all_data()) - - assert (extract_dir/"tvb_data").is_dir() - assert (extract_dir/"tvb_data/mouse/allen_2mm/Connectivity.h5").is_file() - assert (extract_dir/"tvb_data/surfaceData/inner_skull_4096.zip").is_file() - - connectivity66 = extract_dir/"tvb_data/connectivity/connectivity_96.zip" - assert connectivity66.is_file() - - assert "centres.txt" in zipfile.ZipFile(connectivity66).namelist() - assert "info.txt" in zipfile.ZipFile(connectivity66).namelist() - assert "tract_lengths.txt" in zipfile.ZipFile(connectivity66).namelist() - assert "weights.txt" in zipfile.ZipFile(connectivity66).namelist() - - dataset.delete_data() - def test_file_name_variants(self): dataset = TVBZenodoDataset(version= "2.0.3", extract_dir="~/dataset") From bddded2fd0b3a94928124d74aaf71adf0af5ee4b Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 26 Aug 2023 16:59:24 +0530 Subject: [PATCH 83/84] update base.py --- tvb_library/tvb/datasets/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index a84bb2cba2..5a49cacbbd 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -50,12 +50,12 @@ def __init__(self, version : str , extract_dir : str =None) -> None: def fetch_data(self) : - raise NotImplemented + return NotImplemented def get_version(self) -> str: return self.version def delete_data(self): - raise NotImplemented + return NotImplemented def _download(self): - raise NotImplemented + return NotImplemented From dea53d3852135a02208e3e861c05d3b3a498fc68 Mon Sep 17 00:00:00 2001 From: abhi_win Date: Sat, 26 Aug 2023 17:04:40 +0530 Subject: [PATCH 84/84] update base.py --- tvb_library/tvb/datasets/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tvb_library/tvb/datasets/base.py b/tvb_library/tvb/datasets/base.py index 5a49cacbbd..ae030098f0 100644 --- a/tvb_library/tvb/datasets/base.py +++ b/tvb_library/tvb/datasets/base.py @@ -50,12 +50,12 @@ def __init__(self, version : str , extract_dir : str =None) -> None: def fetch_data(self) : - return NotImplemented + raise NotImplementedError def get_version(self) -> str: return self.version def delete_data(self): - return NotImplemented + raise NotImplementedError def _download(self): - return NotImplemented + raise NotImplementedError