mirror of https://github.com/gsi-upm/soil
Compare commits
7 Commits
a3ea434f23
...
b0add8552e
Author | SHA1 | Date |
---|---|---|
J. Fernando Sánchez | b0add8552e | 5 years ago |
J. Fernando Sánchez | 1cf85ea450 | 5 years ago |
J. Fernando Sánchez | c32e167fb8 | 5 years ago |
J. Fernando Sánchez | 5f68b5321d | 5 years ago |
J. Fernando Sánchez | 2a2843bd19 | 5 years ago |
J. Fernando Sánchez | d1006bd55c | 5 years ago |
J. Fernando Sánchez | 9bc036d185 | 5 years ago |
@ -1,4 +1,7 @@
|
|||||||
test:
|
quick-test:
|
||||||
docker-compose exec dev python -m pytest -s -v
|
docker-compose exec dev python -m pytest -s -v
|
||||||
|
|
||||||
.PHONY: test
|
test:
|
||||||
|
docker run -t -v $$PWD:/usr/src/app -w /usr/src/app python:3.7 python setup.py test
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
|
@ -0,0 +1,29 @@
|
|||||||
|
---
|
||||||
|
vars:
|
||||||
|
bounds:
|
||||||
|
x1: [0, 1]
|
||||||
|
x2: [1, 2]
|
||||||
|
fixed:
|
||||||
|
x3: ["a", "b", "c"]
|
||||||
|
sampler: "SALib.sample.morris.sample"
|
||||||
|
samples: 10
|
||||||
|
template: |
|
||||||
|
group: simple
|
||||||
|
num_trials: 1
|
||||||
|
interval: 1
|
||||||
|
max_time: 2
|
||||||
|
seed: "CompleteSeed!"
|
||||||
|
dump: false
|
||||||
|
network_params:
|
||||||
|
generator: complete_graph
|
||||||
|
n: 10
|
||||||
|
network_agents:
|
||||||
|
- agent_type: CounterModel
|
||||||
|
weight: {{ x1 }}
|
||||||
|
state:
|
||||||
|
id: 0
|
||||||
|
- agent_type: AggregatedCounter
|
||||||
|
weight: {{ 1 - x1 }}
|
||||||
|
environment_params:
|
||||||
|
name: {{ x3 }}
|
||||||
|
skip_test: true
|
@ -1,8 +1,10 @@
|
|||||||
nxsim
|
nxsim>=0.1.2
|
||||||
simpy
|
simpy
|
||||||
networkx>=2.0
|
networkx>=2.0
|
||||||
numpy
|
numpy
|
||||||
matplotlib
|
matplotlib
|
||||||
pyyaml
|
pyyaml>=5.1
|
||||||
pandas
|
pandas>=0.23
|
||||||
scipy
|
scipy==1.2.1 # scipy 1.3.0rc1 is not compatible with salib
|
||||||
|
SALib>=1.3
|
||||||
|
Jinja2
|
||||||
|
@ -1 +1 @@
|
|||||||
0.13.8
|
0.14.0
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
from . import BaseAgent
|
|
||||||
|
|
||||||
import os.path
|
|
||||||
import matplotlib
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
import networkx as nx
|
|
||||||
|
|
||||||
|
|
||||||
class DrawingAgent(BaseAgent):
|
|
||||||
"""
|
|
||||||
Agent that draws the state of the network.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def step(self):
|
|
||||||
# Outside effects
|
|
||||||
f = plt.figure()
|
|
||||||
nx.draw(self.env.G, node_size=10, width=0.2, pos=nx.spring_layout(self.env.G, scale=100), ax=f.add_subplot(111))
|
|
||||||
f.savefig(os.path.join(self.env.get_path(), "graph-"+str(self.env.now)+".png"))
|
|
@ -0,0 +1,175 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import networkx as nx
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
from .serialization import deserialize
|
||||||
|
from .utils import open_or_reuse, logger, timer
|
||||||
|
|
||||||
|
|
||||||
|
from . import utils
|
||||||
|
|
||||||
|
|
||||||
|
def for_sim(simulation, names, *args, **kwargs):
|
||||||
|
'''Return the set of exporters for a simulation, given the exporter names'''
|
||||||
|
exporters = []
|
||||||
|
for name in names:
|
||||||
|
mod = deserialize(name, known_modules=['soil.exporters'])
|
||||||
|
exporters.append(mod(simulation, *args, **kwargs))
|
||||||
|
return exporters
|
||||||
|
|
||||||
|
|
||||||
|
class DryRunner(BytesIO):
|
||||||
|
def __init__(self, fname, *args, copy_to=None, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.__fname = fname
|
||||||
|
self.__copy_to = copy_to
|
||||||
|
|
||||||
|
def write(self, txt):
|
||||||
|
if self.__copy_to:
|
||||||
|
self.__copy_to.write('{}:::{}'.format(self.__fname, txt))
|
||||||
|
try:
|
||||||
|
super().write(txt)
|
||||||
|
except TypeError:
|
||||||
|
super().write(bytes(txt, 'utf-8'))
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
logger.info('**Not** written to {} (dry run mode):\n\n{}\n\n'.format(self.__fname,
|
||||||
|
self.getvalue().decode()))
|
||||||
|
super().close()
|
||||||
|
|
||||||
|
|
||||||
|
class Exporter:
|
||||||
|
'''
|
||||||
|
Interface for all exporters. It is not necessary, but it is useful
|
||||||
|
if you don't plan to implement all the methods.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, simulation, outdir=None, dry_run=None, copy_to=None):
|
||||||
|
self.sim = simulation
|
||||||
|
outdir = outdir or os.getcwd()
|
||||||
|
self.outdir = os.path.join(outdir,
|
||||||
|
simulation.group or '',
|
||||||
|
simulation.name)
|
||||||
|
self.dry_run = dry_run
|
||||||
|
self.copy_to = copy_to
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
'''Method to call when the simulation starts'''
|
||||||
|
|
||||||
|
def end(self):
|
||||||
|
'''Method to call when the simulation ends'''
|
||||||
|
|
||||||
|
def trial_end(self, env):
|
||||||
|
'''Method to call when a trial ends'''
|
||||||
|
|
||||||
|
def output(self, f, mode='w', **kwargs):
|
||||||
|
if self.dry_run:
|
||||||
|
f = DryRunner(f, copy_to=self.copy_to)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
if not os.path.isabs(f):
|
||||||
|
f = os.path.join(self.outdir, f)
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return open_or_reuse(f, mode=mode, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class Default(Exporter):
|
||||||
|
'''Default exporter. Writes CSV and sqlite results, as well as the simulation YAML'''
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
if not self.dry_run:
|
||||||
|
logger.info('Dumping results to %s', self.outdir)
|
||||||
|
self.sim.dump_yaml(outdir=self.outdir)
|
||||||
|
else:
|
||||||
|
logger.info('NOT dumping results')
|
||||||
|
|
||||||
|
def trial_end(self, env):
|
||||||
|
if not self.dry_run:
|
||||||
|
with timer('Dumping simulation {} trial {}'.format(self.sim.name,
|
||||||
|
env.name)):
|
||||||
|
with self.output('{}.sqlite'.format(env.name), mode='wb') as f:
|
||||||
|
env.dump_sqlite(f)
|
||||||
|
|
||||||
|
|
||||||
|
class CSV(Exporter):
|
||||||
|
def trial_end(self, env):
|
||||||
|
if not self.dry_run:
|
||||||
|
with timer('[CSV] Dumping simulation {} trial {}'.format(self.sim.name,
|
||||||
|
env.name)):
|
||||||
|
with self.output('{}.csv'.format(env.name)) as f:
|
||||||
|
env.dump_csv(f)
|
||||||
|
|
||||||
|
|
||||||
|
class Gexf(Exporter):
|
||||||
|
def trial_end(self, env):
|
||||||
|
if not self.dry_run:
|
||||||
|
with timer('[CSV] Dumping simulation {} trial {}'.format(self.sim.name,
|
||||||
|
env.name)):
|
||||||
|
with self.output('{}.gexf'.format(env.name), mode='wb') as f:
|
||||||
|
env.dump_gexf(f)
|
||||||
|
|
||||||
|
|
||||||
|
class Dummy(Exporter):
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
with self.output('dummy', 'w') as f:
|
||||||
|
f.write('simulation started @ {}\n'.format(time.time()))
|
||||||
|
|
||||||
|
def trial_end(self, env):
|
||||||
|
with self.output('dummy', 'w') as f:
|
||||||
|
for i in env.history_to_tuples():
|
||||||
|
f.write(','.join(map(str, i)))
|
||||||
|
f.write('\n')
|
||||||
|
|
||||||
|
def end(self):
|
||||||
|
with self.output('dummy', 'a') as f:
|
||||||
|
f.write('simulation ended @ {}\n'.format(time.time()))
|
||||||
|
|
||||||
|
|
||||||
|
class Distribution(Exporter):
|
||||||
|
'''
|
||||||
|
Write the distribution of agent states at the end of each trial,
|
||||||
|
the mean value, and its deviation.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self.means = []
|
||||||
|
self.counts = []
|
||||||
|
|
||||||
|
def trial_end(self, env):
|
||||||
|
df = env[None, None, None].df()
|
||||||
|
ix = df.index[-1]
|
||||||
|
attrs = df.columns.levels[0]
|
||||||
|
vc = {}
|
||||||
|
stats = {}
|
||||||
|
for a in attrs:
|
||||||
|
t = df.loc[(ix, a)]
|
||||||
|
try:
|
||||||
|
self.means.append(('mean', a, t.mean()))
|
||||||
|
except TypeError:
|
||||||
|
for name, count in t.value_counts().iteritems():
|
||||||
|
self.counts.append(('count', a, name, count))
|
||||||
|
|
||||||
|
def end(self):
|
||||||
|
dfm = pd.DataFrame(self.means, columns=['metric', 'key', 'value'])
|
||||||
|
dfc = pd.DataFrame(self.counts, columns=['metric', 'key', 'value', 'count'])
|
||||||
|
dfm = dfm.groupby(by=['key']).agg(['mean', 'std', 'count', 'median', 'max', 'min'])
|
||||||
|
dfc = dfc.groupby(by=['key', 'value']).agg(['mean', 'std', 'count', 'median', 'max', 'min'])
|
||||||
|
with self.output('counts.csv') as f:
|
||||||
|
dfc.to_csv(f)
|
||||||
|
with self.output('metrics.csv') as f:
|
||||||
|
dfm.to_csv(f)
|
||||||
|
|
||||||
|
class GraphDrawing(Exporter):
|
||||||
|
|
||||||
|
def trial_end(self, env):
|
||||||
|
# Outside effects
|
||||||
|
f = plt.figure()
|
||||||
|
nx.draw(env.G, node_size=10, width=0.2, pos=nx.spring_layout(env.G, scale=100), ax=f.add_subplot(111))
|
||||||
|
with open('graph-{}.png'.format(env.name)) as f:
|
||||||
|
f.savefig(f)
|
@ -0,0 +1,201 @@
|
|||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import ast
|
||||||
|
import sys
|
||||||
|
import importlib
|
||||||
|
from glob import glob
|
||||||
|
from itertools import product, chain
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
import networkx as nx
|
||||||
|
|
||||||
|
from jinja2 import Template
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger('soil')
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
|
def load_network(network_params, dir_path=None):
|
||||||
|
if network_params is None:
|
||||||
|
return nx.Graph()
|
||||||
|
path = network_params.get('path', None)
|
||||||
|
if path:
|
||||||
|
if dir_path and not os.path.isabs(path):
|
||||||
|
path = os.path.join(dir_path, path)
|
||||||
|
extension = os.path.splitext(path)[1][1:]
|
||||||
|
kwargs = {}
|
||||||
|
if extension == 'gexf':
|
||||||
|
kwargs['version'] = '1.2draft'
|
||||||
|
kwargs['node_type'] = int
|
||||||
|
try:
|
||||||
|
method = getattr(nx.readwrite, 'read_' + extension)
|
||||||
|
except AttributeError:
|
||||||
|
raise AttributeError('Unknown format')
|
||||||
|
return method(path, **kwargs)
|
||||||
|
|
||||||
|
net_args = network_params.copy()
|
||||||
|
if 'generator' not in net_args:
|
||||||
|
return nx.Graph()
|
||||||
|
|
||||||
|
net_gen = net_args.pop('generator')
|
||||||
|
|
||||||
|
if dir_path not in sys.path:
|
||||||
|
sys.path.append(dir_path)
|
||||||
|
|
||||||
|
method = deserializer(net_gen,
|
||||||
|
known_modules=['networkx.generators',])
|
||||||
|
|
||||||
|
return method(**net_args)
|
||||||
|
|
||||||
|
|
||||||
|
def load_file(infile):
|
||||||
|
with open(infile, 'r') as f:
|
||||||
|
return list(chain.from_iterable(map(expand_template, load_string(f))))
|
||||||
|
|
||||||
|
|
||||||
|
def load_string(string):
|
||||||
|
yield from yaml.load_all(string, Loader=yaml.FullLoader)
|
||||||
|
|
||||||
|
|
||||||
|
def expand_template(config):
|
||||||
|
if 'template' not in config:
|
||||||
|
yield config
|
||||||
|
return
|
||||||
|
if 'vars' not in config:
|
||||||
|
raise ValueError(('You must provide a definition of variables'
|
||||||
|
' for the template.'))
|
||||||
|
|
||||||
|
template = Template(config['template'])
|
||||||
|
|
||||||
|
sampler_name = config.get('sampler', 'SALib.sample.morris.sample')
|
||||||
|
n_samples = int(config.get('samples', 100))
|
||||||
|
sampler = deserializer(sampler_name)
|
||||||
|
bounds = config['vars']['bounds']
|
||||||
|
|
||||||
|
problem = {
|
||||||
|
'num_vars': len(bounds),
|
||||||
|
'names': list(bounds.keys()),
|
||||||
|
'bounds': list(v for v in bounds.values())
|
||||||
|
}
|
||||||
|
samples = sampler(problem, n_samples)
|
||||||
|
|
||||||
|
lists = config['vars'].get('lists', {})
|
||||||
|
names = list(lists.keys())
|
||||||
|
values = list(lists.values())
|
||||||
|
combs = list(product(*values))
|
||||||
|
|
||||||
|
allnames = names + problem['names']
|
||||||
|
allvalues = [(list(i[0])+list(i[1])) for i in product(combs, samples)]
|
||||||
|
params = list(map(lambda x: dict(zip(allnames, x)), allvalues))
|
||||||
|
|
||||||
|
|
||||||
|
blank_str = template.render({k: 0 for k in allnames})
|
||||||
|
blank = list(load_string(blank_str))
|
||||||
|
if len(blank) > 1:
|
||||||
|
raise ValueError('Templates must not return more than one configuration')
|
||||||
|
if 'name' in blank[0]:
|
||||||
|
raise ValueError('Templates cannot be named, use group instead')
|
||||||
|
|
||||||
|
confs = []
|
||||||
|
for ps in params:
|
||||||
|
string = template.render(ps)
|
||||||
|
for c in load_string(string):
|
||||||
|
yield c
|
||||||
|
|
||||||
|
|
||||||
|
def load_files(*patterns, **kwargs):
|
||||||
|
for pattern in patterns:
|
||||||
|
for i in glob(pattern, **kwargs):
|
||||||
|
for config in load_file(i):
|
||||||
|
path = os.path.abspath(i)
|
||||||
|
if 'dir_path' not in config:
|
||||||
|
config['dir_path'] = os.path.dirname(path)
|
||||||
|
yield config, path
|
||||||
|
|
||||||
|
|
||||||
|
def load_config(config):
|
||||||
|
if isinstance(config, dict):
|
||||||
|
yield config, None
|
||||||
|
else:
|
||||||
|
yield from load_files(config)
|
||||||
|
|
||||||
|
|
||||||
|
builtins = importlib.import_module('builtins')
|
||||||
|
|
||||||
|
def name(value, known_modules=[]):
|
||||||
|
'''Return a name that can be imported, to serialize/deserialize an object'''
|
||||||
|
if value is None:
|
||||||
|
return 'None'
|
||||||
|
if not isinstance(value, type): # Get the class name first
|
||||||
|
value = type(value)
|
||||||
|
tname = value.__name__
|
||||||
|
if hasattr(builtins, tname):
|
||||||
|
return tname
|
||||||
|
modname = value.__module__
|
||||||
|
if modname == '__main__':
|
||||||
|
return tname
|
||||||
|
if known_modules and modname in known_modules:
|
||||||
|
return tname
|
||||||
|
for kmod in known_modules:
|
||||||
|
if not kmod:
|
||||||
|
continue
|
||||||
|
module = importlib.import_module(kmod)
|
||||||
|
if hasattr(module, tname):
|
||||||
|
return tname
|
||||||
|
return '{}.{}'.format(modname, tname)
|
||||||
|
|
||||||
|
|
||||||
|
def serializer(type_):
|
||||||
|
if type_ != 'str' and hasattr(builtins, type_):
|
||||||
|
return repr
|
||||||
|
return lambda x: x
|
||||||
|
|
||||||
|
|
||||||
|
def serialize(v, known_modules=[]):
|
||||||
|
'''Get a text representation of an object.'''
|
||||||
|
tname = name(v, known_modules=known_modules)
|
||||||
|
func = serializer(tname)
|
||||||
|
return func(v), tname
|
||||||
|
|
||||||
|
def deserializer(type_, known_modules=[]):
|
||||||
|
if type(type_) != str: # Already deserialized
|
||||||
|
return type_
|
||||||
|
if type_ == 'str':
|
||||||
|
return lambda x='': x
|
||||||
|
if type_ == 'None':
|
||||||
|
return lambda x=None: None
|
||||||
|
if hasattr(builtins, type_): # Check if it's a builtin type
|
||||||
|
cls = getattr(builtins, type_)
|
||||||
|
return lambda x=None: ast.literal_eval(x) if x is not None else cls()
|
||||||
|
# Otherwise, see if we can find the module and the class
|
||||||
|
modules = known_modules or []
|
||||||
|
options = []
|
||||||
|
|
||||||
|
for mod in modules:
|
||||||
|
if mod:
|
||||||
|
options.append((mod, type_))
|
||||||
|
|
||||||
|
if '.' in type_: # Fully qualified module
|
||||||
|
module, type_ = type_.rsplit(".", 1)
|
||||||
|
options.append ((module, type_))
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for modname, tname in options:
|
||||||
|
try:
|
||||||
|
module = importlib.import_module(modname)
|
||||||
|
cls = getattr(module, tname)
|
||||||
|
return getattr(cls, 'deserialize', cls)
|
||||||
|
except (ModuleNotFoundError, AttributeError) as ex:
|
||||||
|
errors.append((modname, tname, ex))
|
||||||
|
raise Exception('Could not find type {}. Tried: {}'.format(type_, errors))
|
||||||
|
|
||||||
|
|
||||||
|
def deserialize(type_, value=None, **kwargs):
|
||||||
|
'''Get an object from a text representation'''
|
||||||
|
if not isinstance(type_, str):
|
||||||
|
return type_
|
||||||
|
des = deserializer(type_, **kwargs)
|
||||||
|
if value is None:
|
||||||
|
return des
|
||||||
|
return des(value)
|
@ -0,0 +1,110 @@
|
|||||||
|
import os
|
||||||
|
import io
|
||||||
|
import tempfile
|
||||||
|
import shutil
|
||||||
|
from time import time
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
from soil import exporters
|
||||||
|
from soil.utils import safe_open
|
||||||
|
from soil import simulation
|
||||||
|
|
||||||
|
|
||||||
|
class Dummy(exporters.Exporter):
|
||||||
|
started = False
|
||||||
|
trials = 0
|
||||||
|
ended = False
|
||||||
|
total_time = 0
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self.__class__.started = True
|
||||||
|
|
||||||
|
def trial_end(self, env):
|
||||||
|
assert env
|
||||||
|
self.__class__.trials += 1
|
||||||
|
self.__class__.total_time += env.now
|
||||||
|
|
||||||
|
def end(self):
|
||||||
|
self.__class__.ended = True
|
||||||
|
|
||||||
|
|
||||||
|
class Exporters(TestCase):
|
||||||
|
def test_basic(self):
|
||||||
|
config = {
|
||||||
|
'name': 'exporter_sim',
|
||||||
|
'network_params': {},
|
||||||
|
'agent_type': 'CounterModel',
|
||||||
|
'max_time': 2,
|
||||||
|
'num_trials': 5,
|
||||||
|
'environment_params': {}
|
||||||
|
}
|
||||||
|
s = simulation.from_config(config)
|
||||||
|
s.run_simulation(exporters=[Dummy], dry_run=True)
|
||||||
|
assert Dummy.started
|
||||||
|
assert Dummy.ended
|
||||||
|
assert Dummy.trials == 5
|
||||||
|
assert Dummy.total_time == 2*5
|
||||||
|
|
||||||
|
def test_distribution(self):
|
||||||
|
'''The distribution exporter should write the number of agents in each state'''
|
||||||
|
config = {
|
||||||
|
'name': 'exporter_sim',
|
||||||
|
'network_params': {
|
||||||
|
'generator': 'complete_graph',
|
||||||
|
'n': 4
|
||||||
|
},
|
||||||
|
'agent_type': 'CounterModel',
|
||||||
|
'max_time': 2,
|
||||||
|
'num_trials': 5,
|
||||||
|
'environment_params': {}
|
||||||
|
}
|
||||||
|
output = io.StringIO()
|
||||||
|
s = simulation.from_config(config)
|
||||||
|
s.run_simulation(exporters=[exporters.Distribution], dry_run=True, exporter_params={'copy_to': output})
|
||||||
|
result = output.getvalue()
|
||||||
|
assert 'count' in result
|
||||||
|
assert 'SEED,Noneexporter_sim_trial_3,1,,1,1,1,1' in result
|
||||||
|
|
||||||
|
def test_writing(self):
|
||||||
|
'''Try to write CSV, GEXF, sqlite and YAML (without dry_run)'''
|
||||||
|
n_trials = 5
|
||||||
|
config = {
|
||||||
|
'name': 'exporter_sim',
|
||||||
|
'network_params': {
|
||||||
|
'generator': 'complete_graph',
|
||||||
|
'n': 4
|
||||||
|
},
|
||||||
|
'agent_type': 'CounterModel',
|
||||||
|
'max_time': 2,
|
||||||
|
'num_trials': n_trials,
|
||||||
|
'environment_params': {}
|
||||||
|
}
|
||||||
|
output = io.StringIO()
|
||||||
|
s = simulation.from_config(config)
|
||||||
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
envs = s.run_simulation(exporters=[
|
||||||
|
exporters.Default,
|
||||||
|
exporters.CSV,
|
||||||
|
exporters.Gexf,
|
||||||
|
exporters.Distribution,
|
||||||
|
],
|
||||||
|
outdir=tmpdir,
|
||||||
|
exporter_params={'copy_to': output})
|
||||||
|
result = output.getvalue()
|
||||||
|
|
||||||
|
simdir = os.path.join(tmpdir, s.group or '', s.name)
|
||||||
|
with open(os.path.join(simdir, '{}.dumped.yml'.format(s.name))) as f:
|
||||||
|
result = f.read()
|
||||||
|
assert result
|
||||||
|
|
||||||
|
try:
|
||||||
|
for e in envs:
|
||||||
|
with open(os.path.join(simdir, '{}.gexf'.format(e.name))) as f:
|
||||||
|
result = f.read()
|
||||||
|
assert result
|
||||||
|
|
||||||
|
with open(os.path.join(simdir, '{}.csv'.format(e.name))) as f:
|
||||||
|
result = f.read()
|
||||||
|
assert result
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tmpdir)
|
Loading…
Reference in New Issue