Skip to content

Commit

Permalink
Try to handle API breaking changes in Memory
Browse files Browse the repository at this point in the history
  • Loading branch information
gmatteo committed Jul 22, 2024
1 parent 272d046 commit b0405a9
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 91 deletions.
5 changes: 4 additions & 1 deletion abipy/electrons/arpes.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,10 @@ def model_from_ebands(cls, ebands, tmesh=(0, 300, 600), poorman_polaron=False):
#aw: [nwr, ntemp, max_nbcalc, nkcalc, nsppol] array
#aw_meshes: [max_nbcalc, nkcalc, nsppol] array with energy mesh in eV
from abipy.tools.numtools import lorentzian
from scipy.integrate import cumtrapz
try :
from scipy.integrate import cumulative_trapezoid as cumtrapz
except ImportError:
from scipy.integrate import cumtrapz
for spin in ebands.spins:
for ik, kpt in enumerate(ebands.kpoints):
for band in range(ebands.nband_sk[spin, ik]):
Expand Down
85 changes: 0 additions & 85 deletions abipy/flowtk/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
from monty.termcolor import cprint, colored, cprint_map, get_terminal_size
from monty.inspect import find_top_pyfile
from monty.json import MSONable
from pymatgen.core.units import Memory
from abipy.tools.iotools import AtomicFile
from abipy.tools.serialization import pmg_pickle_load, pmg_pickle_dump, pmg_serialize
from abipy.tools.typing import Figure, TYPE_CHECKING
Expand Down Expand Up @@ -2479,90 +2478,6 @@ def make_light_tarfile(self, name=None):
name = os.path.basename(self.workdir) + "-light.tar.gz" if name is None else name
return self.make_tarfile(name=name, exclude_dirs=["outdata", "indata", "tmpdata"])

def make_tarfile(self, name=None, max_filesize=None, exclude_exts=None, exclude_dirs=None, verbose=0, **kwargs):
"""
Create a tarball file.
Args:
name: Name of the tarball file. Set to os.path.basename(`flow.workdir`) + "tar.gz"` if name is None.
max_filesize (int or string with unit): a file is included in the tar file if its size <= max_filesize
Can be specified in bytes e.g. `max_files=1024` or with a string with unit e.g. `max_filesize="1 Mb"`.
No check is done if max_filesize is None.
exclude_exts: List of file extensions to be excluded from the tar file.
exclude_dirs: List of directory basenames to be excluded.
verbose (int): Verbosity level.
kwargs: keyword arguments passed to the :class:`TarFile` constructor.
Returns: The name of the tarfile.
"""
def any2bytes(s):
"""Convert string or number to memory in bytes."""
if is_string(s):
# Support for deprecated pymatgen API
try:
mem = int(Memory.from_string(s).to("b"))
except Exception:
mem = int(Memory.from_str(s).to("b"))
else:
return int(s)

if max_filesize is not None:
max_filesize = any2bytes(max_filesize)

if exclude_exts:
# Add/remove ".nc" so that we can simply pass "GSR" instead of "GSR.nc"
# Moreover this trick allows one to treat WFK.nc and WFK file on the same footing.
exts = []
for e in list_strings(exclude_exts):
exts.append(e)
if e.endswith(".nc"):
exts.append(e.replace(".nc", ""))
else:
exts.append(e + ".nc")
exclude_exts = exts

def filter(tarinfo):
"""
Function that takes a TarInfo object argument and returns the changed TarInfo object.
If it instead returns None the TarInfo object will be excluded from the archive.
"""
# Skip links.
if tarinfo.issym() or tarinfo.islnk():
if verbose: print("Excluding link: %s" % tarinfo.name)
return None

# Check size in bytes
if max_filesize is not None and tarinfo.size > max_filesize:
if verbose: print("Excluding %s due to max_filesize" % tarinfo.name)
return None

# Filter filenames.
if exclude_exts and any(tarinfo.name.endswith(ext) for ext in exclude_exts):
if verbose: print("Excluding %s due to extension" % tarinfo.name)
return None

# Exlude directories (use dir basenames).
if exclude_dirs and any(dir_name in exclude_dirs for dir_name in tarinfo.name.split(os.path.sep)):
if verbose: print("Excluding %s due to exclude_dirs" % tarinfo.name)
return None

return tarinfo

back = os.getcwd()
os.chdir(os.path.join(self.workdir, ".."))

import tarfile
name = os.path.basename(self.workdir) + ".tar.gz" if name is None else name
with tarfile.open(name=name, mode='w:gz', **kwargs) as tar:
tar.add(os.path.basename(self.workdir), arcname=None, recursive=True, filter=filter)

# Add the script used to generate the flow.
if self.pyfile is not None and os.path.exists(self.pyfile):
tar.add(self.pyfile)

os.chdir(back)
return name

def explain(self, what="all", nids=None, verbose=0) -> str:
"""
Return string with the docstrings of the works/tasks in the Flow grouped by class.
Expand Down
28 changes: 23 additions & 5 deletions abipy/flowtk/qadapters.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,11 +215,19 @@ def __init__(self, **kwargs):

# Convert memory to megabytes.
m = str(kwargs.pop("mem_per_node"))
# Support for old pymatgen API

try:
self.mem_per_node = int(Memory.from_string(m).to("Mb"))
# Support for old pymatgen API
if hasattr(Memory, "from_string"):
self.mem_per_node = int(Memory.from_string(m).to("Mb"))
else:
self.mem_per_node = int(Memory.from_str(m).to("Mb"))
except:
self.mem_per_node = int(Memory.from_str(m).to("Mb"))
d = {"Kb": "KB", "Mb": "MB", "Gb": "GB", "Tb": "TB"}
for old, new in d.items():
m = m.replace(old, new)

self.mem_per_node = int(Memory.from_str(m).to("MB"))

if self.mem_per_node <= 0 or self.sockets_per_node <= 0 or self.cores_per_socket <= 0:
raise ValueError("invalid parameters: %s" % kwargs)
Expand Down Expand Up @@ -259,10 +267,17 @@ def divmod_node(self, mpi_procs: int, omp_threads: int) -> tuple[int, int]:
return divmod(mpi_procs * omp_threads, self.cores_per_node)

def as_dict(self) -> dict:

try:
# old pymatgen
mem_per_node = str(Memory(val=self.mem_per_node, unit='Mb'))
except:
mem_per_node = str(Memory(val=self.mem_per_node, unit='MB'))

return {'num_nodes': self.num_nodes,
'sockets_per_node': self.sockets_per_node,
'cores_per_socket': self.cores_per_socket,
'mem_per_node': str(Memory(val=self.mem_per_node, unit='Mb'))}
'mem_per_node': mem_per_node}

@classmethod
def from_dict(cls, d: dict) -> Hardware:
Expand Down Expand Up @@ -901,7 +916,10 @@ def set_master_mem_overhead(self, mem_mb):
@property
def total_mem(self) -> Memory:
"""Total memory required by the job in megabytes."""
return Memory(self.mem_per_proc * self.mpi_procs + self.master_mem_overhead, "Mb")
try:
return Memory(self.mem_per_proc * self.mpi_procs + self.master_mem_overhead, "Mb")
except:
return Memory(self.mem_per_proc * self.mpi_procs + self.master_mem_overhead, "MB")

@abc.abstractmethod
def cancel(self, job_id: int) -> int:
Expand Down

0 comments on commit b0405a9

Please sign in to comment.