Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rotations in KKRimp code #136

Merged
merged 5 commits into from
Oct 31, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 1 addition & 8 deletions aiida_kkr/calculations/kkrimp.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
__copyright__ = (u'Copyright (c), 2018, Forschungszentrum Jülich GmbH, '
'IAS-1/PGI-1, Germany. All rights reserved.')
__license__ = 'MIT license, see LICENSE.txt file'
__version__ = '0.8.2'
__version__ = '0.9.0'
__contributors__ = (u'Philipp Rüßmann', u'Fabian Bertoldo')

#TODO: implement 'ilayer_center' consistency check
Expand Down Expand Up @@ -171,13 +171,6 @@ def define(cls, spec):
Note: The length of the theta, phi and fix_dir lists have to be equal to the number of atoms in the impurity cluster.
"""
)
spec.input(
'cleanup_outfiles',
valid_type=Bool,
required=False,
default=lambda: Bool(False),
help='Cleanup and compress output (works only in aiida-core<2.0 and breaks caching ability).'
)

# define outputs
spec.output('output_parameters', valid_type=Dict, required=True, help='results of the KKRimp calculation')
Expand Down
20 changes: 1 addition & 19 deletions aiida_kkr/parsers/kkr.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
__copyright__ = (u'Copyright (c), 2017, Forschungszentrum Jülich GmbH, '
'IAS-1/PGI-1, Germany. All rights reserved.')
__license__ = 'MIT license, see LICENSE.txt file'
__version__ = '0.7.0'
__version__ = '0.8.0'
__contributors__ = ('Jens Broeder', u'Philipp Rüßmann')


Expand Down Expand Up @@ -229,21 +229,3 @@ def parse(self, debug=False, **kwargs):

if not success:
return self.exit_codes.ERROR_KKR_PARSING_FAILED
else: # cleanup after parsing (only if parsing was successful)
# cleanup only works below aiida-core v2.0
if int(aiida_core_version.split('.')[0]) < 2:
# delete completely parsed output files
self.remove_unnecessary_files()
# then (maybe) tar the output to save space
# TODO needs implementing (see kkrimp parser)

def remove_unnecessary_files(self):
"""
Remove files that are not needed anymore after parsing
The information is completely parsed (i.e. in outdict of calculation)
and keeping the file would just be a duplication.
"""
files_to_delete = [KkrCalculation._POTENTIAL, KkrCalculation._SHAPEFUN]
for fileid in files_to_delete:
if fileid in self.retrieved.list_object_names():
self.retrieved.delete_object(fileid, force=True)
91 changes: 4 additions & 87 deletions aiida_kkr/parsers/kkrimp.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
__copyright__ = (u'Copyright (c), 2018, Forschungszentrum Jülich GmbH, '
'IAS-1/PGI-1, Germany. All rights reserved.')
__license__ = 'MIT license, see LICENSE.txt file'
__version__ = '0.5.0'
__version__ = '0.6.0'
__contributors__ = ('Philipp Rüßmann')


Expand All @@ -42,7 +42,7 @@ def __init__(self, calc):

# pylint: disable=protected-access

def parse(self, debug=False, **kwargs):
def parse(self, debug=False, ignore_nan=True, **kwargs): # pylint: disable=unexpected-keyword-arg
"""
Parse output data folder, store results in database.

Expand Down Expand Up @@ -113,7 +113,7 @@ def parse(self, debug=False, **kwargs):

# now we can parse the output files
success, msg_list, out_dict = KkrimpParserFunctions().parse_kkrimp_outputfile(
out_dict, named_file_handles, debug=debug
out_dict, named_file_handles, debug=debug, ignore_nan=ignore_nan
)

out_dict['parser_errors'] = msg_list
Expand All @@ -132,22 +132,7 @@ def parse(self, debug=False, **kwargs):
# create output node and link
self.out('output_parameters', Dict(dict=out_dict))

# cleanup after parsing (only if parsing was successful), only works below aiida-core v2.0
if success:
if int(aiida_core_version.split('.')[0]) < 2:
# check if we should do the cleanup or not
cleanup_outfiles = False
if 'cleanup_outfiles' in self.node.inputs:
cleanup_outfiles = self.node.inputs.cleanup_outfiles.value
if cleanup_outfiles:
# reduce size of timing file
self.cleanup_outfiles(files['out_timing'], ['Iteration number', 'time until scf starts'])
# reduce size of out_log file
self.cleanup_outfiles(files['out_log'], ['Iteration Number'])
# delete completely parsed output files and create a tar ball to reduce size
self.remove_unnecessary_files()
self.final_cleanup()
else:
if not success:
return self.exit_codes.ERROR_PARSING_KKRIMPCALC

def _check_file_existance(self, files, keyname, fname, icrit, file_errors):
Expand All @@ -168,71 +153,3 @@ def _check_file_existance(self, files, keyname, fname, icrit, file_errors):
raise ValueError('icrit should be either 1 or 2')
file_errors.append((icrit, crit_level + f" File '{fname}' not found."))
files[keyname] = None

def cleanup_outfiles(self, fileidentifier, keyslist):
"""open file and remove unneeded output"""
if fileidentifier is not None:
lineids = []
with self.retrieved.open(fileidentifier) as tfile:
txt = tfile.readlines()
for iline in range(len(txt)):
for key in keyslist: # go through all keys
if key in txt[iline]: # add line id to list if key has been found
lineids.append(iline)
# rewrite file deleting the middle part
if len(lineids) > 1: # cut only if more than one iteration was found
txt = txt[:lineids[0]] + \
['# ... [removed output except for last iteration] ...\n'] + \
txt[lineids[-1]:]
with self.retrieved.open(fileidentifier, 'w') as tfilenew:
tfilenew.writelines(txt)

def remove_unnecessary_files(self):
"""
Remove files that are not needed anymore after parsing
The information is completely parsed (i.e. in outdict of calculation)
and keeping the file would just be a duplication.
"""
# first delete unused files (completely in parsed output)
files_to_delete = [
KkrimpCalculation._OUT_ENERGYSP_PER_ATOM, KkrimpCalculation._OUT_ENERGYTOT_PER_ATOM,
KkrimpCalculation._SHAPEFUN
]
for fileid in files_to_delete:
if fileid in self.retrieved.list_object_names():
self.retrieved.delete_object(fileid, force=True)

def final_cleanup(self):
"""Create a tarball of the rest."""

# short name for retrieved folder
ret = self.retrieved

# Now create tarball of output
#
# check if output has been packed to tarfile already
# only if tarfile is not there we create the output tar file
if KkrimpCalculation._FILENAME_TAR not in ret.list_object_names():
# first create dummy file which is used to extract the full path that is given to tarfile.open
with ret.open(KkrimpCalculation._FILENAME_TAR, 'w') as f:
filepath_tar = f.name

# now create tarfile and loop over content of retrieved directory
to_delete = []
with tarfile.open(filepath_tar, 'w:gz') as tf:
for f in ret.list_object_names():
with ret.open(f) as ftest:
filesize = os.stat(ftest.name).st_size
ffull = ftest.name
if (
f != KkrimpCalculation._FILENAME_TAR # ignore tar file
and filesize > 0 # ignore empty files
# ignore files starting with '.' like '.nfs...'
and f[0] != '.'
):
tf.add(ffull, arcname=os.path.basename(ffull))
to_delete.append(f)

# finally delete files that have been added to tarfile
for f in to_delete:
ret.delete_object(f, force=True)
22 changes: 19 additions & 3 deletions aiida_kkr/workflows/imp_BdG.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def define(cls, spec):
spec.expose_inputs(
kkr_imp_wc,
namespace='imp_scf',
include=('startpot', 'wf_parameters', 'gf_writeout', 'scf.params_overwrite')
include=('startpot', 'wf_parameters', 'gf_writeout', 'scf.params_overwrite', 'scf.initial_noco_angles')
)
spec.inputs['imp_scf']['gf_writeout']['kkr'].required = False
spec.input('imp_scf.options', required=False, help='computer options for impurity scf step')
Expand All @@ -157,7 +157,11 @@ def define(cls, spec):
)

# inputs for impurity BdG scf
spec.expose_inputs(kkr_imp_wc, namespace='BdG_scf', include=('startpot', 'remote_data_gf', 'gf_writeout'))
spec.expose_inputs(
kkr_imp_wc,
namespace='BdG_scf',
include=('startpot', 'remote_data_gf', 'gf_writeout', 'scf.initial_noco_angles')
)
spec.inputs['BdG_scf']['gf_writeout']['kkr'].required = False
spec.input('BdG_scf.options', required=False, help='computer options for BdG impurity scf step')

Expand All @@ -166,7 +170,11 @@ def define(cls, spec):
)

# inputs for impurity dos
spec.expose_inputs(kkr_imp_dos_wc, namespace='dos', include=('wf_parameters', 'gf_dos_remote', 'gf_writeout'))
spec.expose_inputs(
kkr_imp_dos_wc,
namespace='dos',
include=('wf_parameters', 'gf_dos_remote', 'gf_writeout', 'initial_noco_angles')
)

spec.input(
'dos.gf_writeout.host_remote',
Expand Down Expand Up @@ -275,6 +283,8 @@ def imp_pot_calc(self):
builder.options = self.inputs.imp_scf.options
else:
builder.options = self.inputs.options
if 'initial_noco_angles' in self.inputs.imp_scf:
builder.scf.initial_noco_angles = self.inputs.imp_scf.initial_noco_angles # pylint: disable=no-member

if 'gf_writeout' in self.inputs.imp_scf:
if 'options' in self.inputs.imp_scf.gf_writeout:
Expand Down Expand Up @@ -319,6 +329,8 @@ def imp_BdG_calc(self):
builder.params_kkr_overwrite = self.inputs.BdG_scf.gf_writeout.params_kkr_overwrite
if 'kkr' in self.inputs:
builder.gf_writeout.kkr = builder.kkr # pylint: disable=no-member
if 'initial_noco_angles' in self.inputs.BdG_scf:
builder.scf.initial_noco_angles = self.inputs.BdG_scf.initial_noco_angles # pylint: disable=no-member

builder.remote_data_host = self.inputs.BdG_scf.remote_data_host

Expand Down Expand Up @@ -384,6 +396,10 @@ def DOS_calc(self):
else:
builder.options = self.inputs.options

# set nonco angles
if 'initial_noco_angles' in self.inputs.dos:
builder.initial_noco_angles = self.inputs.dos.initial_noco_angles

# skip BdG step and just use the starting potential instead?
# faster and same accuracy?!
if 'startpot' in self.inputs.BdG_scf:
Expand Down
35 changes: 5 additions & 30 deletions aiida_kkr/workflows/kkr_imp.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from aiida_kkr.tools import test_and_get_codenode, neworder_potential_wf, update_params_wf
from aiida_kkr.workflows.gf_writeout import kkr_flex_wc
from aiida_kkr.workflows.voro_start import kkr_startpot_wc
from aiida_kkr.workflows.kkr_imp_sub import kkr_imp_sub_wc, clean_sfd
from aiida_kkr.workflows.kkr_imp_sub import kkr_imp_sub_wc
import numpy as np
from aiida_kkr.tools.save_output_nodes import create_out_dict_node

Expand Down Expand Up @@ -101,7 +101,8 @@ def define(cls, spec):
# 'kkrimp',
'options',
# 'wf_parameters',
'params_overwrite'
'params_overwrite',
'initial_noco_angles'
)
)

Expand Down Expand Up @@ -338,7 +339,6 @@ def start(self):
self.ctx.hfield = wf_dict.get('hfield', self._wf_default['hfield'])
self.ctx.init_pos = wf_dict.get('init_pos', self._wf_default['init_pos'])
self.ctx.accuracy_params = wf_dict.get('accuracy_params', self._wf_default['accuracy_params'])
self.ctx.do_final_cleanup = wf_dict.get('do_final_cleanup', self._wf_default['do_final_cleanup'])
# set up new parameter dict to pass to kkrimp subworkflow later
self.ctx.kkrimp_params_dict = Dict({
'nsteps': self.ctx.nsteps,
Expand All @@ -354,15 +354,11 @@ def start(self):
'hfield': self.ctx.hfield,
'init_pos': self.ctx.init_pos,
'accuracy_params': self.ctx.accuracy_params,
'do_final_cleanup': self.ctx.do_final_cleanup
})

# retrieve option for kkrlfex files
self.ctx.retrieve_kkrflex = wf_dict.get('retrieve_kkrflex', self._wf_default['retrieve_kkrflex'])

# list of things that are cleaned if everything ran through
self.ctx.sfd_final_cleanup = []

# report the chosen parameters to the user
self.report(
'INFO: use the following parameter:\n'
Expand Down Expand Up @@ -738,8 +734,6 @@ def construct_startpot(self):

# add starting potential for kkrimp calculation to context
self.ctx.startpot_kkrimp = startpot_kkrimp
# add to list for final cleanup
self.ctx.sfd_final_cleanup.append(startpot_kkrimp)

self.report(
'INFO: created startpotential (pid: {}) for the impurity calculation '
Expand Down Expand Up @@ -793,6 +787,8 @@ def run_kkrimp_scf(self):
builder.params_overwrite = self.inputs.scf.params_overwrite
if 'options' in self.inputs.scf:
builder.options = self.inputs.scf.options
if 'initial_noco_angles' in self.inputs.scf:
builder.initial_noco_angles = self.inputs.scf.initial_noco_angles
builder.wf_parameters = kkrimp_params
future = self.submit(builder)

Expand Down Expand Up @@ -853,10 +849,6 @@ def return_results(self):
self.out('converged_potential', self.ctx.kkrimp_scf_sub.outputs.host_imp_pot)
self.out('remote_data_gf', self.ctx.gf_remote)

# cleanup things that are not needed anymore
if self.ctx.do_final_cleanup:
self.final_cleanup()

# print final message before exiting
self.report('INFO: created 3 output nodes for the KKR impurity workflow.')
self.report(
Expand All @@ -869,23 +861,6 @@ def return_results(self):
self.report(self.exit_codes.ERROR_KKRIMP_SUB_WORKFLOW_FAILURE) # pylint: disable=no-member
return self.exit_codes.ERROR_KKRIMP_SUB_WORKFLOW_FAILURE # pylint: disable=no-member

def final_cleanup(self):
"""
Remove unneeded files to save space
"""
for sfd in self.ctx.sfd_final_cleanup:
clean_sfd(sfd)
if self.ctx.create_startpot:
kkr_startpot = self.ctx.last_voro_calc
vorocalc = kkr_startpot.outputs.last_voronoi_remote.get_incoming(link_label_filter=u'remote_folder'
).first().node
ret = vorocalc.outputs.retrieved
for fname in ret.list_object_names():
if fname not in [VoronoiCalculation._OUTPUT_FILE_NAME, VoronoiCalculation._OUT_POTENTIAL_voronoi]:
# delete all except vor default output file
with ret.open(fname) as f:
ret.delete_object(fname, force=True)

def error_handler(self):
"""Capture errors raised in validate_input"""
if self.ctx.exit_code is not None:
Expand Down
Loading
Loading