mirror of
https://github.com/gsi-upm/soil
synced 2025-09-19 06:32:21 +00:00
Compare commits
8 Commits
d1006bd55c
...
0.14.6
Author | SHA1 | Date | |
---|---|---|---|
|
c8b8149a17 | ||
|
6690b6ee5f | ||
|
97835b3d10 | ||
|
b0add8552e | ||
|
1cf85ea450 | ||
|
c32e167fb8 | ||
|
5f68b5321d | ||
|
2a2843bd19 |
@@ -1,2 +1,4 @@
|
|||||||
**/soil_output
|
**/soil_output
|
||||||
.*
|
.*
|
||||||
|
__pycache__
|
||||||
|
*.pyc
|
||||||
|
21
CHANGELOG.md
21
CHANGELOG.md
@@ -3,6 +3,27 @@ All notable changes to this project will be documented in this file.
|
|||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.14.6]
|
||||||
|
### Fixed
|
||||||
|
* Bug with newer versions of networkx (0.24) where the Graph.node attribute has been removed. We have updated our calls, but the code in nxsim is not under our control, so we have pinned the networkx version until that issue is solved.
|
||||||
|
### Changed
|
||||||
|
* Explicit yaml.SafeLoader to avoid deprecation warnings when using yaml.load. It should not break any existing setups, but we could move to the FullLoader in the future if needed.
|
||||||
|
|
||||||
|
## [0.14.4]
|
||||||
|
### Fixed
|
||||||
|
* Bug in `agent.get_agents()` when `state_id` is passed as a string. The tests have been modified accordingly.
|
||||||
|
## [0.14.3]
|
||||||
|
### Fixed
|
||||||
|
* Incompatibility with py3.3-3.6 due to ModuleNotFoundError and TypeError in DryRunner
|
||||||
|
## [0.14.2]
|
||||||
|
### Fixed
|
||||||
|
* Output path for exporters is now soil_output
|
||||||
|
### Changed
|
||||||
|
* CSV output to stdout in dry_run mode
|
||||||
|
## [0.14.1]
|
||||||
|
### Changed
|
||||||
|
* Exporter names in lower case
|
||||||
|
* Add default exporter in runs
|
||||||
## [0.14.0]
|
## [0.14.0]
|
||||||
### Added
|
### Added
|
||||||
* Loading configuration from template definitions in the yaml, in preparation for SALib support.
|
* Loading configuration from template definitions in the yaml, in preparation for SALib support.
|
||||||
|
7
Makefile
7
Makefile
@@ -1,4 +1,7 @@
|
|||||||
test:
|
quick-test:
|
||||||
docker-compose exec dev python -m pytest -s -v
|
docker-compose exec dev python -m pytest -s -v
|
||||||
|
|
||||||
.PHONY: test
|
test:
|
||||||
|
docker run -t -v $$PWD:/usr/src/app -w /usr/src/app python:3.7 python setup.py test
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
|
@@ -323,7 +323,7 @@ Let's run our simulation:
|
|||||||
|
|
||||||
.. code:: ipython3
|
.. code:: ipython3
|
||||||
|
|
||||||
soil.simulation.run_from_config(config, dump=False)
|
soil.simulation.run_from_config(config)
|
||||||
|
|
||||||
|
|
||||||
.. parsed-literal::
|
.. parsed-literal::
|
||||||
|
@@ -59,7 +59,7 @@ class Patron(FSM):
|
|||||||
2) Look for a bar where the agent and other agents in the same group can get in.
|
2) Look for a bar where the agent and other agents in the same group can get in.
|
||||||
3) While in the bar, patrons only drink, until they get drunk and taken home.
|
3) While in the bar, patrons only drink, until they get drunk and taken home.
|
||||||
'''
|
'''
|
||||||
level = logging.INFO
|
level = logging.DEBUG
|
||||||
|
|
||||||
defaults = {
|
defaults = {
|
||||||
'pub': None,
|
'pub': None,
|
||||||
@@ -113,7 +113,8 @@ class Patron(FSM):
|
|||||||
@state
|
@state
|
||||||
def at_home(self):
|
def at_home(self):
|
||||||
'''The end'''
|
'''The end'''
|
||||||
self.debug('Life sucks. I\'m home!')
|
others = self.get_agents(state_id=Patron.at_home.id, limit_neighbors=True)
|
||||||
|
self.debug('I\'m home. Just like {} of my friends'.format(len(others)))
|
||||||
|
|
||||||
def drink(self):
|
def drink(self):
|
||||||
self['pints'] += 1
|
self['pints'] += 1
|
||||||
|
@@ -1,10 +1,10 @@
|
|||||||
nxsim>=0.1.2
|
nxsim>=0.1.2
|
||||||
simpy
|
simpy
|
||||||
networkx>=2.0
|
networkx>=2.0,<2.4
|
||||||
numpy
|
numpy
|
||||||
matplotlib
|
matplotlib
|
||||||
pyyaml
|
pyyaml>=5.1
|
||||||
pandas>=0.23
|
pandas>=0.23
|
||||||
scipy>=1.2
|
scipy==1.2.1 # scipy 1.3.0rc1 is not compatible with salib
|
||||||
SALib>=1.3
|
SALib>=1.3
|
||||||
Jinja2
|
Jinja2
|
||||||
|
@@ -1 +1 @@
|
|||||||
0.13.8
|
0.14.6
|
@@ -57,11 +57,11 @@ def main():
|
|||||||
logging.info('Loading config file: {}'.format(args.file))
|
logging.info('Loading config file: {}'.format(args.file))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
exporters = list(args.exporter or [])
|
exporters = list(args.exporter or ['default', ])
|
||||||
if args.csv:
|
if args.csv:
|
||||||
exporters.append('CSV')
|
exporters.append('csv')
|
||||||
if args.graph:
|
if args.graph:
|
||||||
exporters.append('Gexf')
|
exporters.append('gexf')
|
||||||
exp_params = {}
|
exp_params = {}
|
||||||
if args.dry_run:
|
if args.dry_run:
|
||||||
exp_params['copy_to'] = sys.stdout
|
exp_params['copy_to'] = sys.stdout
|
||||||
|
@@ -171,7 +171,7 @@ class BaseAgent(nxsim.BaseAgent):
|
|||||||
|
|
||||||
def info(self, *args, **kwargs):
|
def info(self, *args, **kwargs):
|
||||||
return self.log(*args, level=logging.INFO, **kwargs)
|
return self.log(*args, level=logging.INFO, **kwargs)
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
'''
|
'''
|
||||||
Serializing an agent will lose all its running information (you cannot
|
Serializing an agent will lose all its running information (you cannot
|
||||||
@@ -476,11 +476,8 @@ class Geo(NetworkAgent):
|
|||||||
|
|
||||||
def select(agents, state_id=None, agent_type=None, ignore=None, iterator=False, **kwargs):
|
def select(agents, state_id=None, agent_type=None, ignore=None, iterator=False, **kwargs):
|
||||||
|
|
||||||
if state_id is not None:
|
if state_id is not None and not isinstance(state_id, (tuple, list)):
|
||||||
try:
|
state_id = tuple([state_id])
|
||||||
state_id = tuple(state_id)
|
|
||||||
except TypeError:
|
|
||||||
state_id = tuple([state_id])
|
|
||||||
if agent_type is not None:
|
if agent_type is not None:
|
||||||
try:
|
try:
|
||||||
agent_type = tuple(agent_type)
|
agent_type = tuple(agent_type)
|
||||||
|
@@ -20,7 +20,7 @@ def _read_data(pattern, *args, from_csv=False, process_args=None, **kwargs):
|
|||||||
process_args = {}
|
process_args = {}
|
||||||
for folder in glob.glob(pattern):
|
for folder in glob.glob(pattern):
|
||||||
config_file = glob.glob(join(folder, '*.yml'))[0]
|
config_file = glob.glob(join(folder, '*.yml'))[0]
|
||||||
config = yaml.load(open(config_file))
|
config = yaml.load(open(config_file), Loader=yaml.SafeLoader)
|
||||||
df = None
|
df = None
|
||||||
if from_csv:
|
if from_csv:
|
||||||
for trial_data in sorted(glob.glob(join(folder,
|
for trial_data in sorted(glob.glob(join(folder,
|
||||||
@@ -133,7 +133,7 @@ def get_count(df, *keys):
|
|||||||
def get_value(df, *keys, aggfunc='sum'):
|
def get_value(df, *keys, aggfunc='sum'):
|
||||||
if keys:
|
if keys:
|
||||||
df = df[list(keys)]
|
df = df[list(keys)]
|
||||||
return df.groupby(axis=1, level=0).agg(aggfunc, axis=1)
|
return df.groupby(axis=1, level=0).agg(aggfunc)
|
||||||
|
|
||||||
|
|
||||||
def plot_all(*args, **kwargs):
|
def plot_all(*args, **kwargs):
|
||||||
|
@@ -87,7 +87,7 @@ class Environment(nxsim.NetworkEnvironment):
|
|||||||
@property
|
@property
|
||||||
def network_agents(self):
|
def network_agents(self):
|
||||||
for i in self.G.nodes():
|
for i in self.G.nodes():
|
||||||
node = self.G.node[i]
|
node = self.G.nodes[i]
|
||||||
if 'agent' in node:
|
if 'agent' in node:
|
||||||
yield node['agent']
|
yield node['agent']
|
||||||
|
|
||||||
@@ -212,12 +212,12 @@ class Environment(nxsim.NetworkEnvironment):
|
|||||||
return self[key] if key in self else default
|
return self[key] if key in self else default
|
||||||
|
|
||||||
def get_agent(self, agent_id):
|
def get_agent(self, agent_id):
|
||||||
return self.G.node[agent_id]['agent']
|
return self.G.nodes[agent_id]['agent']
|
||||||
|
|
||||||
def get_agents(self, nodes=None):
|
def get_agents(self, nodes=None):
|
||||||
if nodes is None:
|
if nodes is None:
|
||||||
return list(self.agents)
|
return list(self.agents)
|
||||||
return [self.G.node[i]['agent'] for i in nodes]
|
return [self.G.nodes[i]['agent'] for i in nodes]
|
||||||
|
|
||||||
def dump_csv(self, f):
|
def dump_csv(self, f):
|
||||||
with utils.open_or_reuse(f, 'w') as f:
|
with utils.open_or_reuse(f, 'w') as f:
|
||||||
@@ -231,9 +231,9 @@ class Environment(nxsim.NetworkEnvironment):
|
|||||||
# Workaround for geometric models
|
# Workaround for geometric models
|
||||||
# See soil/soil#4
|
# See soil/soil#4
|
||||||
for node in G.nodes():
|
for node in G.nodes():
|
||||||
if 'pos' in G.node[node]:
|
if 'pos' in G.nodes[node]:
|
||||||
G.node[node]['viz'] = {"position": {"x": G.node[node]['pos'][0], "y": G.node[node]['pos'][1], "z": 0.0}}
|
G.nodes[node]['viz'] = {"position": {"x": G.nodes[node]['pos'][0], "y": G.nodes[node]['pos'][1], "z": 0.0}}
|
||||||
del (G.node[node]['pos'])
|
del (G.nodes[node]['pos'])
|
||||||
|
|
||||||
nx.write_gexf(G, f, version="1.2draft")
|
nx.write_gexf(G, f, version="1.2draft")
|
||||||
|
|
||||||
|
@@ -21,6 +21,7 @@ def for_sim(simulation, names, *args, **kwargs):
|
|||||||
exporters.append(mod(simulation, *args, **kwargs))
|
exporters.append(mod(simulation, *args, **kwargs))
|
||||||
return exporters
|
return exporters
|
||||||
|
|
||||||
|
|
||||||
class DryRunner(BytesIO):
|
class DryRunner(BytesIO):
|
||||||
def __init__(self, fname, *args, copy_to=None, **kwargs):
|
def __init__(self, fname, *args, copy_to=None, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
@@ -49,7 +50,7 @@ class Exporter:
|
|||||||
|
|
||||||
def __init__(self, simulation, outdir=None, dry_run=None, copy_to=None):
|
def __init__(self, simulation, outdir=None, dry_run=None, copy_to=None):
|
||||||
self.sim = simulation
|
self.sim = simulation
|
||||||
outdir = outdir or os.getcwd()
|
outdir = outdir or os.path.join(os.getcwd(), 'soil_output')
|
||||||
self.outdir = os.path.join(outdir,
|
self.outdir = os.path.join(outdir,
|
||||||
simulation.group or '',
|
simulation.group or '',
|
||||||
simulation.name)
|
simulation.name)
|
||||||
@@ -77,8 +78,8 @@ class Exporter:
|
|||||||
return open_or_reuse(f, mode=mode, **kwargs)
|
return open_or_reuse(f, mode=mode, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class Default(Exporter):
|
class default(Exporter):
|
||||||
'''Default exporter. Writes CSV and sqlite results, as well as the simulation YAML'''
|
'''Default exporter. Writes sqlite results, as well as the simulation YAML'''
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
@@ -95,25 +96,29 @@ class Default(Exporter):
|
|||||||
env.dump_sqlite(f)
|
env.dump_sqlite(f)
|
||||||
|
|
||||||
|
|
||||||
class CSV(Exporter):
|
class csv(Exporter):
|
||||||
|
'''Export the state of each environment (and its agents) in a separate CSV file'''
|
||||||
def trial_end(self, env):
|
def trial_end(self, env):
|
||||||
if not self.dry_run:
|
with timer('[CSV] Dumping simulation {} trial {} @ dir {}'.format(self.sim.name,
|
||||||
with timer('[CSV] Dumping simulation {} trial {}'.format(self.sim.name,
|
env.name,
|
||||||
env.name)):
|
self.outdir)):
|
||||||
with self.output('{}.csv'.format(env.name)) as f:
|
with self.output('{}.csv'.format(env.name)) as f:
|
||||||
env.dump_csv(f)
|
env.dump_csv(f)
|
||||||
|
|
||||||
|
|
||||||
class Gexf(Exporter):
|
class gexf(Exporter):
|
||||||
def trial_end(self, env):
|
def trial_end(self, env):
|
||||||
if not self.dry_run:
|
if self.dry_run:
|
||||||
with timer('[CSV] Dumping simulation {} trial {}'.format(self.sim.name,
|
logger.info('Not dumping GEXF in dry_run mode')
|
||||||
env.name)):
|
return
|
||||||
with self.output('{}.gexf'.format(env.name), mode='wb') as f:
|
|
||||||
env.dump_gexf(f)
|
with timer('[GEXF] Dumping simulation {} trial {}'.format(self.sim.name,
|
||||||
|
env.name)):
|
||||||
|
with self.output('{}.gexf'.format(env.name), mode='wb') as f:
|
||||||
|
env.dump_gexf(f)
|
||||||
|
|
||||||
|
|
||||||
class Dummy(Exporter):
|
class dummy(Exporter):
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
with self.output('dummy', 'w') as f:
|
with self.output('dummy', 'w') as f:
|
||||||
@@ -130,7 +135,7 @@ class Dummy(Exporter):
|
|||||||
f.write('simulation ended @ {}\n'.format(time.time()))
|
f.write('simulation ended @ {}\n'.format(time.time()))
|
||||||
|
|
||||||
|
|
||||||
class Distribution(Exporter):
|
class distribution(Exporter):
|
||||||
'''
|
'''
|
||||||
Write the distribution of agent states at the end of each trial,
|
Write the distribution of agent states at the end of each trial,
|
||||||
the mean value, and its deviation.
|
the mean value, and its deviation.
|
||||||
@@ -164,7 +169,7 @@ class Distribution(Exporter):
|
|||||||
with self.output('metrics.csv') as f:
|
with self.output('metrics.csv') as f:
|
||||||
dfm.to_csv(f)
|
dfm.to_csv(f)
|
||||||
|
|
||||||
class GraphDrawing(Exporter):
|
class graphdrawing(Exporter):
|
||||||
|
|
||||||
def trial_end(self, env):
|
def trial_end(self, env):
|
||||||
# Outside effects
|
# Outside effects
|
||||||
|
@@ -11,6 +11,7 @@ logger = logging.getLogger(__name__)
|
|||||||
from collections import UserDict, namedtuple
|
from collections import UserDict, namedtuple
|
||||||
|
|
||||||
from . import serialization
|
from . import serialization
|
||||||
|
from .utils import open_or_reuse
|
||||||
|
|
||||||
|
|
||||||
class History:
|
class History:
|
||||||
@@ -236,7 +237,7 @@ class History:
|
|||||||
|
|
||||||
def dump(self, f):
|
def dump(self, f):
|
||||||
self._close()
|
self._close()
|
||||||
for line in open(self.db_path, 'rb'):
|
for line in open_or_reuse(self.db_path, 'rb'):
|
||||||
f.write(line)
|
f.write(line)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -55,7 +55,7 @@ def load_file(infile):
|
|||||||
|
|
||||||
|
|
||||||
def load_string(string):
|
def load_string(string):
|
||||||
yield from yaml.load_all(string)
|
yield from yaml.load_all(string, Loader=yaml.FullLoader)
|
||||||
|
|
||||||
|
|
||||||
def expand_template(config):
|
def expand_template(config):
|
||||||
@@ -186,7 +186,7 @@ def deserializer(type_, known_modules=[]):
|
|||||||
module = importlib.import_module(modname)
|
module = importlib.import_module(modname)
|
||||||
cls = getattr(module, tname)
|
cls = getattr(module, tname)
|
||||||
return getattr(cls, 'deserialize', cls)
|
return getattr(cls, 'deserialize', cls)
|
||||||
except (ModuleNotFoundError, AttributeError) as ex:
|
except (ImportError, AttributeError) as ex:
|
||||||
errors.append((modname, tname, ex))
|
errors.append((modname, tname, ex))
|
||||||
raise Exception('Could not find type {}. Tried: {}'.format(type_, errors))
|
raise Exception('Could not find type {}. Tried: {}'.format(type_, errors))
|
||||||
|
|
||||||
|
@@ -153,11 +153,11 @@ class Simulation(NetworkSimulation):
|
|||||||
**kwargs)
|
**kwargs)
|
||||||
|
|
||||||
def _run_simulation_gen(self, *args, parallel=False, dry_run=False,
|
def _run_simulation_gen(self, *args, parallel=False, dry_run=False,
|
||||||
exporters=None, outdir=None, exporter_params={}, **kwargs):
|
exporters=['default', ], outdir=None, exporter_params={}, **kwargs):
|
||||||
logger.info('Using exporters: %s', exporters or [])
|
logger.info('Using exporters: %s', exporters or [])
|
||||||
logger.info('Output directory: %s', outdir)
|
logger.info('Output directory: %s', outdir)
|
||||||
exporters = exporters_for_sim(self,
|
exporters = exporters_for_sim(self,
|
||||||
exporters or [],
|
exporters,
|
||||||
dry_run=dry_run,
|
dry_run=dry_run,
|
||||||
outdir=outdir,
|
outdir=outdir,
|
||||||
**exporter_params)
|
**exporter_params)
|
||||||
|
@@ -2,6 +2,8 @@ import logging
|
|||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from shutil import copyfile
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
logger = logging.getLogger('soil')
|
logger = logging.getLogger('soil')
|
||||||
@@ -23,15 +25,26 @@ def timer(name='task', pre="", function=logger.info, to_object=None):
|
|||||||
to_object.end = end
|
to_object.end = end
|
||||||
|
|
||||||
|
|
||||||
def safe_open(path, *args, **kwargs):
|
def safe_open(path, mode='r', backup=True, **kwargs):
|
||||||
outdir = os.path.dirname(path)
|
outdir = os.path.dirname(path)
|
||||||
if outdir and not os.path.exists(outdir):
|
if outdir and not os.path.exists(outdir):
|
||||||
os.makedirs(outdir)
|
os.makedirs(outdir)
|
||||||
return open(path, *args, **kwargs)
|
if backup and 'w' in mode and os.path.exists(path):
|
||||||
|
creation = os.path.getctime(path)
|
||||||
|
stamp = time.strftime('%Y-%m-%d_%H:%M', time.localtime(creation))
|
||||||
|
|
||||||
|
backup_dir = os.path.join(outdir, stamp)
|
||||||
|
if not os.path.exists(backup_dir):
|
||||||
|
os.makedirs(backup_dir)
|
||||||
|
newpath = os.path.join(backup_dir, os.path.basename(path))
|
||||||
|
if os.path.exists(newpath):
|
||||||
|
newpath = '{}@{}'.format(newpath, time.time())
|
||||||
|
copyfile(path, newpath)
|
||||||
|
return open(path, mode=mode, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def open_or_reuse(f, *args, **kwargs):
|
def open_or_reuse(f, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
return safe_open(f, *args, **kwargs)
|
return safe_open(f, *args, **kwargs)
|
||||||
except TypeError:
|
except (AttributeError, TypeError):
|
||||||
return f
|
return f
|
||||||
|
@@ -118,9 +118,9 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
|
|||||||
elif msg['type'] == 'download_gexf':
|
elif msg['type'] == 'download_gexf':
|
||||||
G = self.trials[ int(msg['data']) ].history_to_graph()
|
G = self.trials[ int(msg['data']) ].history_to_graph()
|
||||||
for node in G.nodes():
|
for node in G.nodes():
|
||||||
if 'pos' in G.node[node]:
|
if 'pos' in G.nodes[node]:
|
||||||
G.node[node]['viz'] = {"position": {"x": G.node[node]['pos'][0], "y": G.node[node]['pos'][1], "z": 0.0}}
|
G.nodes[node]['viz'] = {"position": {"x": G.nodes[node]['pos'][0], "y": G.nodes[node]['pos'][1], "z": 0.0}}
|
||||||
del (G.node[node]['pos'])
|
del (G.nodes[node]['pos'])
|
||||||
writer = nx.readwrite.gexf.GEXFWriter(version='1.2draft')
|
writer = nx.readwrite.gexf.GEXFWriter(version='1.2draft')
|
||||||
writer.add_graph(G)
|
writer.add_graph(G)
|
||||||
self.write_message({'type': 'download_gexf',
|
self.write_message({'type': 'download_gexf',
|
||||||
@@ -130,9 +130,9 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
|
|||||||
elif msg['type'] == 'download_json':
|
elif msg['type'] == 'download_json':
|
||||||
G = self.trials[ int(msg['data']) ].history_to_graph()
|
G = self.trials[ int(msg['data']) ].history_to_graph()
|
||||||
for node in G.nodes():
|
for node in G.nodes():
|
||||||
if 'pos' in G.node[node]:
|
if 'pos' in G.nodes[node]:
|
||||||
G.node[node]['viz'] = {"position": {"x": G.node[node]['pos'][0], "y": G.node[node]['pos'][1], "z": 0.0}}
|
G.nodes[node]['viz'] = {"position": {"x": G.nodes[node]['pos'][0], "y": G.nodes[node]['pos'][1], "z": 0.0}}
|
||||||
del (G.node[node]['pos'])
|
del (G.nodes[node]['pos'])
|
||||||
self.write_message({'type': 'download_json',
|
self.write_message({'type': 'download_json',
|
||||||
'filename': self.config['name'] + '_trial_' + str(msg['data']),
|
'filename': self.config['name'] + '_trial_' + str(msg['data']),
|
||||||
'data': nx.node_link_data(G) })
|
'data': nx.node_link_data(G) })
|
||||||
@@ -271,4 +271,4 @@ def main():
|
|||||||
parser.add_argument('--verbose', '-v', help='verbose mode', action='store_true')
|
parser.add_argument('--verbose', '-v', help='verbose mode', action='store_true')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
run(name=args.name, port=(args.port[0] if isinstance(args.port, list) else args.port), verbose=args.verbose)
|
run(name=args.name, port=(args.port[0] if isinstance(args.port, list) else args.port), verbose=args.verbose)
|
||||||
|
110
tests/test_exporters.py
Normal file
110
tests/test_exporters.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
import os
|
||||||
|
import io
|
||||||
|
import tempfile
|
||||||
|
import shutil
|
||||||
|
from time import time
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
from soil import exporters
|
||||||
|
from soil.utils import safe_open
|
||||||
|
from soil import simulation
|
||||||
|
|
||||||
|
|
||||||
|
class Dummy(exporters.Exporter):
|
||||||
|
started = False
|
||||||
|
trials = 0
|
||||||
|
ended = False
|
||||||
|
total_time = 0
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self.__class__.started = True
|
||||||
|
|
||||||
|
def trial_end(self, env):
|
||||||
|
assert env
|
||||||
|
self.__class__.trials += 1
|
||||||
|
self.__class__.total_time += env.now
|
||||||
|
|
||||||
|
def end(self):
|
||||||
|
self.__class__.ended = True
|
||||||
|
|
||||||
|
|
||||||
|
class Exporters(TestCase):
|
||||||
|
def test_basic(self):
|
||||||
|
config = {
|
||||||
|
'name': 'exporter_sim',
|
||||||
|
'network_params': {},
|
||||||
|
'agent_type': 'CounterModel',
|
||||||
|
'max_time': 2,
|
||||||
|
'num_trials': 5,
|
||||||
|
'environment_params': {}
|
||||||
|
}
|
||||||
|
s = simulation.from_config(config)
|
||||||
|
s.run_simulation(exporters=[Dummy], dry_run=True)
|
||||||
|
assert Dummy.started
|
||||||
|
assert Dummy.ended
|
||||||
|
assert Dummy.trials == 5
|
||||||
|
assert Dummy.total_time == 2*5
|
||||||
|
|
||||||
|
def test_distribution(self):
|
||||||
|
'''The distribution exporter should write the number of agents in each state'''
|
||||||
|
config = {
|
||||||
|
'name': 'exporter_sim',
|
||||||
|
'network_params': {
|
||||||
|
'generator': 'complete_graph',
|
||||||
|
'n': 4
|
||||||
|
},
|
||||||
|
'agent_type': 'CounterModel',
|
||||||
|
'max_time': 2,
|
||||||
|
'num_trials': 5,
|
||||||
|
'environment_params': {}
|
||||||
|
}
|
||||||
|
output = io.StringIO()
|
||||||
|
s = simulation.from_config(config)
|
||||||
|
s.run_simulation(exporters=[exporters.distribution], dry_run=True, exporter_params={'copy_to': output})
|
||||||
|
result = output.getvalue()
|
||||||
|
assert 'count' in result
|
||||||
|
assert 'SEED,Noneexporter_sim_trial_3,1,,1,1,1,1' in result
|
||||||
|
|
||||||
|
def test_writing(self):
|
||||||
|
'''Try to write CSV, GEXF, sqlite and YAML (without dry_run)'''
|
||||||
|
n_trials = 5
|
||||||
|
config = {
|
||||||
|
'name': 'exporter_sim',
|
||||||
|
'network_params': {
|
||||||
|
'generator': 'complete_graph',
|
||||||
|
'n': 4
|
||||||
|
},
|
||||||
|
'agent_type': 'CounterModel',
|
||||||
|
'max_time': 2,
|
||||||
|
'num_trials': n_trials,
|
||||||
|
'environment_params': {}
|
||||||
|
}
|
||||||
|
output = io.StringIO()
|
||||||
|
s = simulation.from_config(config)
|
||||||
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
envs = s.run_simulation(exporters=[
|
||||||
|
exporters.default,
|
||||||
|
exporters.csv,
|
||||||
|
exporters.gexf,
|
||||||
|
exporters.distribution,
|
||||||
|
],
|
||||||
|
outdir=tmpdir,
|
||||||
|
exporter_params={'copy_to': output})
|
||||||
|
result = output.getvalue()
|
||||||
|
|
||||||
|
simdir = os.path.join(tmpdir, s.group or '', s.name)
|
||||||
|
with open(os.path.join(simdir, '{}.dumped.yml'.format(s.name))) as f:
|
||||||
|
result = f.read()
|
||||||
|
assert result
|
||||||
|
|
||||||
|
try:
|
||||||
|
for e in envs:
|
||||||
|
with open(os.path.join(simdir, '{}.gexf'.format(e.name))) as f:
|
||||||
|
result = f.read()
|
||||||
|
assert result
|
||||||
|
|
||||||
|
with open(os.path.join(simdir, '{}.csv'.format(e.name))) as f:
|
||||||
|
result = f.read()
|
||||||
|
assert result
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tmpdir)
|
@@ -1,6 +1,7 @@
|
|||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import io
|
||||||
import yaml
|
import yaml
|
||||||
import pickle
|
import pickle
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
@@ -15,10 +16,15 @@ ROOT = os.path.abspath(os.path.dirname(__file__))
|
|||||||
EXAMPLES = join(ROOT, '..', 'examples')
|
EXAMPLES = join(ROOT, '..', 'examples')
|
||||||
|
|
||||||
|
|
||||||
class CustomAgent(agents.BaseAgent):
|
class CustomAgent(agents.FSM):
|
||||||
def step(self):
|
@agents.default_state
|
||||||
self.state['neighbors'] = self.count_agents(state_id=0,
|
@agents.state
|
||||||
|
def normal(self):
|
||||||
|
self.state['neighbors'] = self.count_agents(state_id='normal',
|
||||||
limit_neighbors=True)
|
limit_neighbors=True)
|
||||||
|
@agents.state
|
||||||
|
def unreachable(self):
|
||||||
|
return
|
||||||
|
|
||||||
class TestMain(TestCase):
|
class TestMain(TestCase):
|
||||||
|
|
||||||
@@ -133,8 +139,7 @@ class TestMain(TestCase):
|
|||||||
},
|
},
|
||||||
'network_agents': [{
|
'network_agents': [{
|
||||||
'agent_type': CustomAgent,
|
'agent_type': CustomAgent,
|
||||||
'weight': 1,
|
'weight': 1
|
||||||
'state': {'id': 0}
|
|
||||||
|
|
||||||
}],
|
}],
|
||||||
'max_time': 10,
|
'max_time': 10,
|
||||||
@@ -144,6 +149,9 @@ class TestMain(TestCase):
|
|||||||
s = simulation.from_config(config)
|
s = simulation.from_config(config)
|
||||||
env = s.run_simulation(dry_run=True)[0]
|
env = s.run_simulation(dry_run=True)[0]
|
||||||
assert env.get_agent(0).state['neighbors'] == 1
|
assert env.get_agent(0).state['neighbors'] == 1
|
||||||
|
assert env.get_agent(0).state['neighbors'] == 1
|
||||||
|
assert env.get_agent(1).count_agents(state_id='normal') == 2
|
||||||
|
assert env.get_agent(1).count_agents(state_id='normal', limit_neighbors=True) == 1
|
||||||
|
|
||||||
def test_torvalds_example(self):
|
def test_torvalds_example(self):
|
||||||
"""A complete example from a documentation should work."""
|
"""A complete example from a documentation should work."""
|
||||||
@@ -178,7 +186,7 @@ class TestMain(TestCase):
|
|||||||
with utils.timer('serializing'):
|
with utils.timer('serializing'):
|
||||||
serial = s.to_yaml()
|
serial = s.to_yaml()
|
||||||
with utils.timer('recovering'):
|
with utils.timer('recovering'):
|
||||||
recovered = yaml.load(serial)
|
recovered = yaml.load(serial, Loader=yaml.SafeLoader)
|
||||||
with utils.timer('deleting'):
|
with utils.timer('deleting'):
|
||||||
del recovered['topology']
|
del recovered['topology']
|
||||||
assert config == recovered
|
assert config == recovered
|
||||||
@@ -217,7 +225,8 @@ class TestMain(TestCase):
|
|||||||
"""
|
"""
|
||||||
G = nx.random_geometric_graph(20, 0.1)
|
G = nx.random_geometric_graph(20, 0.1)
|
||||||
env = Environment(topology=G)
|
env = Environment(topology=G)
|
||||||
env.dump_gexf('/tmp/dump-gexf/prueba.gexf')
|
f = io.BytesIO()
|
||||||
|
env.dump_gexf(f)
|
||||||
|
|
||||||
def test_save_graph(self):
|
def test_save_graph(self):
|
||||||
'''
|
'''
|
||||||
@@ -231,7 +240,7 @@ class TestMain(TestCase):
|
|||||||
env[0, 0, 'testvalue'] = 'start'
|
env[0, 0, 'testvalue'] = 'start'
|
||||||
env[0, 10, 'testvalue'] = 'finish'
|
env[0, 10, 'testvalue'] = 'finish'
|
||||||
nG = env.history_to_graph()
|
nG = env.history_to_graph()
|
||||||
values = nG.node[0]['attr_testvalue']
|
values = nG.nodes[0]['attr_testvalue']
|
||||||
assert ('start', 0, 10) in values
|
assert ('start', 0, 10) in values
|
||||||
assert ('finish', 10, None) in values
|
assert ('finish', 10, None) in values
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user