1
0
mirror of https://github.com/gsi-upm/soil synced 2025-09-13 19:52:20 +00:00

Compare commits

...

5 Commits

Author SHA1 Message Date
J. Fernando Sánchez
6adc8d36ba minor change in docs 2020-03-13 12:50:05 +01:00
J. Fernando Sánchez
c8b8149a17 Updated to 0.14.6
Fix compatibility issues with newer networkx and pandas versions
2020-03-11 16:17:14 +01:00
J. Fernando Sánchez
6690b6ee5f Fix incompatibility and bug in get_agents 2019-05-16 19:59:46 +02:00
J. Fernando Sánchez
97835b3d10 Clean up exporters 2019-05-03 13:17:27 +02:00
J. Fernando Sánchez
b0add8552e Tag version 0.14.0 2019-04-30 16:26:08 +02:00
20 changed files with 158 additions and 95 deletions

View File

@@ -1,2 +1,4 @@
**/soil_output
.*
__pycache__
*.pyc

View File

@@ -3,6 +3,32 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.14.7]
### Changed
* Minor change to traceback handling in async simulations
### Fixed
* Incomplete example in the docs (example.yml) caused an exception
## [0.14.6]
### Fixed
* Bug with newer versions of networkx (0.24) where the Graph.node attribute has been removed. We have updated our calls, but the code in nxsim is not under our control, so we have pinned the networkx version until that issue is solved.
### Changed
* Explicit yaml.SafeLoader to avoid deprecation warnings when using yaml.load. It should not break any existing setups, but we could move to the FullLoader in the future if needed.
## [0.14.4]
### Fixed
* Bug in `agent.get_agents()` when `state_id` is passed as a string. The tests have been modified accordingly.
## [0.14.3]
### Fixed
* Incompatibility with py3.3-3.6 due to ModuleNotFoundError and TypeError in DryRunner
## [0.14.2]
### Fixed
* Output path for exporters is now soil_output
### Changed
* CSV output to stdout in dry_run mode
## [0.14.1]
### Changed
* Exporter names in lower case
* Add default exporter in runs
## [0.14.0]
### Added
* Loading configuration from template definitions in the yaml, in preparation for SALib support.

View File

@@ -8,32 +8,8 @@ The advantage of a configuration file is that it is a clean declarative descript
Simulation configuration files can be formatted in ``json`` or ``yaml`` and they define all the parameters of a simulation.
Here's an example (``example.yml``).
.. code:: yaml
---
name: MyExampleSimulation
max_time: 50
num_trials: 3
interval: 2
network_params:
generator: barabasi_albert_graph
n: 100
m: 2
network_agents:
- agent_type: SISaModel
weight: 1
state:
id: content
- agent_type: SISaModel
weight: 1
state:
id: discontent
- agent_type: SISaModel
weight: 8
state:
id: neutral
environment_params:
prob_infect: 0.075
.. literalinclude:: example.yml
:language: yaml
This example configuration will run three trials (``num_trials``) of a simulation containing a randomly generated network (``network_params``).

35
docs/example.yml Normal file
View File

@@ -0,0 +1,35 @@
---
name: MyExampleSimulation
max_time: 50
num_trials: 3
interval: 2
network_params:
generator: barabasi_albert_graph
n: 100
m: 2
network_agents:
- agent_type: SISaModel
weight: 1
state:
id: content
- agent_type: SISaModel
weight: 1
state:
id: discontent
- agent_type: SISaModel
weight: 8
state:
id: neutral
environment_params:
prob_infect: 0.075
neutral_discontent_spon_prob: 0.1
neutral_discontent_infected_prob: 0.3
neutral_content_spon_prob: 0.3
neutral_content_infected_prob: 0.4
discontent_neutral: 0.5
discontent_content: 0.5
variance_d_c: 0.2
content_discontent: 0.2
variance_c_d: 0.2
content_neutral: 0.2
standard_variance: 1

View File

@@ -323,7 +323,7 @@ Let's run our simulation:
.. code:: ipython3
soil.simulation.run_from_config(config, dump=False)
soil.simulation.run_from_config(config)
.. parsed-literal::

View File

@@ -59,7 +59,7 @@ class Patron(FSM):
2) Look for a bar where the agent and other agents in the same group can get in.
3) While in the bar, patrons only drink, until they get drunk and taken home.
'''
level = logging.INFO
level = logging.DEBUG
defaults = {
'pub': None,
@@ -113,7 +113,8 @@ class Patron(FSM):
@state
def at_home(self):
'''The end'''
self.debug('Life sucks. I\'m home!')
others = self.get_agents(state_id=Patron.at_home.id, limit_neighbors=True)
self.debug('I\'m home. Just like {} of my friends'.format(len(others)))
def drink(self):
self['pints'] += 1

View File

@@ -1,6 +1,6 @@
nxsim>=0.1.2
simpy
networkx>=2.0
networkx>=2.0,<2.4
numpy
matplotlib
pyyaml>=5.1

View File

@@ -1 +1 @@
0.13.8
0.14.7

View File

@@ -57,11 +57,11 @@ def main():
logging.info('Loading config file: {}'.format(args.file))
try:
exporters = list(args.exporter or [])
exporters = list(args.exporter or ['default', ])
if args.csv:
exporters.append('CSV')
exporters.append('csv')
if args.graph:
exporters.append('Gexf')
exporters.append('gexf')
exp_params = {}
if args.dry_run:
exp_params['copy_to'] = sys.stdout

View File

@@ -171,7 +171,7 @@ class BaseAgent(nxsim.BaseAgent):
def info(self, *args, **kwargs):
return self.log(*args, level=logging.INFO, **kwargs)
def __getstate__(self):
'''
Serializing an agent will lose all its running information (you cannot
@@ -476,11 +476,8 @@ class Geo(NetworkAgent):
def select(agents, state_id=None, agent_type=None, ignore=None, iterator=False, **kwargs):
if state_id is not None:
try:
state_id = tuple(state_id)
except TypeError:
state_id = tuple([state_id])
if state_id is not None and not isinstance(state_id, (tuple, list)):
state_id = tuple([state_id])
if agent_type is not None:
try:
agent_type = tuple(agent_type)

View File

@@ -20,7 +20,7 @@ def _read_data(pattern, *args, from_csv=False, process_args=None, **kwargs):
process_args = {}
for folder in glob.glob(pattern):
config_file = glob.glob(join(folder, '*.yml'))[0]
config = yaml.load(open(config_file))
config = yaml.load(open(config_file), Loader=yaml.SafeLoader)
df = None
if from_csv:
for trial_data in sorted(glob.glob(join(folder,
@@ -133,7 +133,7 @@ def get_count(df, *keys):
def get_value(df, *keys, aggfunc='sum'):
if keys:
df = df[list(keys)]
return df.groupby(axis=1, level=0).agg(aggfunc, axis=1)
return df.groupby(axis=1, level=0).agg(aggfunc)
def plot_all(*args, **kwargs):

View File

@@ -87,7 +87,7 @@ class Environment(nxsim.NetworkEnvironment):
@property
def network_agents(self):
for i in self.G.nodes():
node = self.G.node[i]
node = self.G.nodes[i]
if 'agent' in node:
yield node['agent']
@@ -212,12 +212,12 @@ class Environment(nxsim.NetworkEnvironment):
return self[key] if key in self else default
def get_agent(self, agent_id):
return self.G.node[agent_id]['agent']
return self.G.nodes[agent_id]['agent']
def get_agents(self, nodes=None):
if nodes is None:
return list(self.agents)
return [self.G.node[i]['agent'] for i in nodes]
return [self.G.nodes[i]['agent'] for i in nodes]
def dump_csv(self, f):
with utils.open_or_reuse(f, 'w') as f:
@@ -231,9 +231,9 @@ class Environment(nxsim.NetworkEnvironment):
# Workaround for geometric models
# See soil/soil#4
for node in G.nodes():
if 'pos' in G.node[node]:
G.node[node]['viz'] = {"position": {"x": G.node[node]['pos'][0], "y": G.node[node]['pos'][1], "z": 0.0}}
del (G.node[node]['pos'])
if 'pos' in G.nodes[node]:
G.nodes[node]['viz'] = {"position": {"x": G.nodes[node]['pos'][0], "y": G.nodes[node]['pos'][1], "z": 0.0}}
del (G.nodes[node]['pos'])
nx.write_gexf(G, f, version="1.2draft")

View File

@@ -50,7 +50,7 @@ class Exporter:
def __init__(self, simulation, outdir=None, dry_run=None, copy_to=None):
self.sim = simulation
outdir = outdir or os.getcwd()
outdir = outdir or os.path.join(os.getcwd(), 'soil_output')
self.outdir = os.path.join(outdir,
simulation.group or '',
simulation.name)
@@ -78,8 +78,8 @@ class Exporter:
return open_or_reuse(f, mode=mode, **kwargs)
class Default(Exporter):
'''Default exporter. Writes CSV and sqlite results, as well as the simulation YAML'''
class default(Exporter):
'''Default exporter. Writes sqlite results, as well as the simulation YAML'''
def start(self):
if not self.dry_run:
@@ -96,25 +96,29 @@ class Default(Exporter):
env.dump_sqlite(f)
class CSV(Exporter):
class csv(Exporter):
'''Export the state of each environment (and its agents) in a separate CSV file'''
def trial_end(self, env):
if not self.dry_run:
with timer('[CSV] Dumping simulation {} trial {}'.format(self.sim.name,
env.name)):
with self.output('{}.csv'.format(env.name)) as f:
env.dump_csv(f)
with timer('[CSV] Dumping simulation {} trial {} @ dir {}'.format(self.sim.name,
env.name,
self.outdir)):
with self.output('{}.csv'.format(env.name)) as f:
env.dump_csv(f)
class Gexf(Exporter):
class gexf(Exporter):
def trial_end(self, env):
if not self.dry_run:
with timer('[CSV] Dumping simulation {} trial {}'.format(self.sim.name,
env.name)):
with self.output('{}.gexf'.format(env.name), mode='wb') as f:
env.dump_gexf(f)
if self.dry_run:
logger.info('Not dumping GEXF in dry_run mode')
return
with timer('[GEXF] Dumping simulation {} trial {}'.format(self.sim.name,
env.name)):
with self.output('{}.gexf'.format(env.name), mode='wb') as f:
env.dump_gexf(f)
class Dummy(Exporter):
class dummy(Exporter):
def start(self):
with self.output('dummy', 'w') as f:
@@ -131,7 +135,7 @@ class Dummy(Exporter):
f.write('simulation ended @ {}\n'.format(time.time()))
class Distribution(Exporter):
class distribution(Exporter):
'''
Write the distribution of agent states at the end of each trial,
the mean value, and its deviation.
@@ -165,7 +169,7 @@ class Distribution(Exporter):
with self.output('metrics.csv') as f:
dfm.to_csv(f)
class GraphDrawing(Exporter):
class graphdrawing(Exporter):
def trial_end(self, env):
# Outside effects

View File

@@ -11,6 +11,7 @@ logger = logging.getLogger(__name__)
from collections import UserDict, namedtuple
from . import serialization
from .utils import open_or_reuse
class History:
@@ -236,7 +237,7 @@ class History:
def dump(self, f):
self._close()
for line in open(self.db_path, 'rb'):
for line in open_or_reuse(self.db_path, 'rb'):
f.write(line)

View File

@@ -186,7 +186,7 @@ def deserializer(type_, known_modules=[]):
module = importlib.import_module(modname)
cls = getattr(module, tname)
return getattr(cls, 'deserialize', cls)
except (ModuleNotFoundError, AttributeError) as ex:
except (ImportError, AttributeError) as ex:
errors.append((modname, tname, ex))
raise Exception('Could not find type {}. Tried: {}'.format(type_, errors))

View File

@@ -153,11 +153,11 @@ class Simulation(NetworkSimulation):
**kwargs)
def _run_simulation_gen(self, *args, parallel=False, dry_run=False,
exporters=None, outdir=None, exporter_params={}, **kwargs):
exporters=['default', ], outdir=None, exporter_params={}, **kwargs):
logger.info('Using exporters: %s', exporters or [])
logger.info('Output directory: %s', outdir)
exporters = exporters_for_sim(self,
exporters or [],
exporters,
dry_run=dry_run,
outdir=outdir,
**exporter_params)
@@ -216,9 +216,10 @@ class Simulation(NetworkSimulation):
try:
return self.run_trial(*args, **kwargs)
except Exception as ex:
c = ex.__cause__
c.message = ''.join(traceback.format_exception(type(c), c, c.__traceback__)[:])
return c
if ex.__cause__ is not None:
ex = ex.__cause__
ex.message = ''.join(traceback.format_exception(type(ex), ex, ex.__traceback__)[:])
return ex
def to_dict(self):
return self.__getstate__()

View File

@@ -2,6 +2,8 @@ import logging
import time
import os
from shutil import copyfile
from contextlib import contextmanager
logger = logging.getLogger('soil')
@@ -23,15 +25,26 @@ def timer(name='task', pre="", function=logger.info, to_object=None):
to_object.end = end
def safe_open(path, *args, **kwargs):
def safe_open(path, mode='r', backup=True, **kwargs):
outdir = os.path.dirname(path)
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
return open(path, *args, **kwargs)
if backup and 'w' in mode and os.path.exists(path):
creation = os.path.getctime(path)
stamp = time.strftime('%Y-%m-%d_%H:%M', time.localtime(creation))
backup_dir = os.path.join(outdir, stamp)
if not os.path.exists(backup_dir):
os.makedirs(backup_dir)
newpath = os.path.join(backup_dir, os.path.basename(path))
if os.path.exists(newpath):
newpath = '{}@{}'.format(newpath, time.time())
copyfile(path, newpath)
return open(path, mode=mode, **kwargs)
def open_or_reuse(f, *args, **kwargs):
try:
return safe_open(f, *args, **kwargs)
except TypeError:
except (AttributeError, TypeError):
return f

View File

@@ -118,9 +118,9 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
elif msg['type'] == 'download_gexf':
G = self.trials[ int(msg['data']) ].history_to_graph()
for node in G.nodes():
if 'pos' in G.node[node]:
G.node[node]['viz'] = {"position": {"x": G.node[node]['pos'][0], "y": G.node[node]['pos'][1], "z": 0.0}}
del (G.node[node]['pos'])
if 'pos' in G.nodes[node]:
G.nodes[node]['viz'] = {"position": {"x": G.nodes[node]['pos'][0], "y": G.nodes[node]['pos'][1], "z": 0.0}}
del (G.nodes[node]['pos'])
writer = nx.readwrite.gexf.GEXFWriter(version='1.2draft')
writer.add_graph(G)
self.write_message({'type': 'download_gexf',
@@ -130,9 +130,9 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
elif msg['type'] == 'download_json':
G = self.trials[ int(msg['data']) ].history_to_graph()
for node in G.nodes():
if 'pos' in G.node[node]:
G.node[node]['viz'] = {"position": {"x": G.node[node]['pos'][0], "y": G.node[node]['pos'][1], "z": 0.0}}
del (G.node[node]['pos'])
if 'pos' in G.nodes[node]:
G.nodes[node]['viz'] = {"position": {"x": G.nodes[node]['pos'][0], "y": G.nodes[node]['pos'][1], "z": 0.0}}
del (G.nodes[node]['pos'])
self.write_message({'type': 'download_json',
'filename': self.config['name'] + '_trial_' + str(msg['data']),
'data': nx.node_link_data(G) })
@@ -271,4 +271,4 @@ def main():
parser.add_argument('--verbose', '-v', help='verbose mode', action='store_true')
args = parser.parse_args()
run(name=args.name, port=(args.port[0] if isinstance(args.port, list) else args.port), verbose=args.verbose)
run(name=args.name, port=(args.port[0] if isinstance(args.port, list) else args.port), verbose=args.verbose)

View File

@@ -60,7 +60,7 @@ class Exporters(TestCase):
}
output = io.StringIO()
s = simulation.from_config(config)
s.run_simulation(exporters=[exporters.Distribution], dry_run=True, exporter_params={'copy_to': output})
s.run_simulation(exporters=[exporters.distribution], dry_run=True, exporter_params={'copy_to': output})
result = output.getvalue()
assert 'count' in result
assert 'SEED,Noneexporter_sim_trial_3,1,,1,1,1,1' in result
@@ -83,10 +83,10 @@ class Exporters(TestCase):
s = simulation.from_config(config)
tmpdir = tempfile.mkdtemp()
envs = s.run_simulation(exporters=[
exporters.Default,
exporters.CSV,
exporters.Gexf,
exporters.Distribution,
exporters.default,
exporters.csv,
exporters.gexf,
exporters.distribution,
],
outdir=tmpdir,
exporter_params={'copy_to': output})

View File

@@ -16,10 +16,15 @@ ROOT = os.path.abspath(os.path.dirname(__file__))
EXAMPLES = join(ROOT, '..', 'examples')
class CustomAgent(agents.BaseAgent):
def step(self):
self.state['neighbors'] = self.count_agents(state_id=0,
class CustomAgent(agents.FSM):
@agents.default_state
@agents.state
def normal(self):
self.state['neighbors'] = self.count_agents(state_id='normal',
limit_neighbors=True)
@agents.state
def unreachable(self):
return
class TestMain(TestCase):
@@ -134,8 +139,7 @@ class TestMain(TestCase):
},
'network_agents': [{
'agent_type': CustomAgent,
'weight': 1,
'state': {'id': 0}
'weight': 1
}],
'max_time': 10,
@@ -145,6 +149,9 @@ class TestMain(TestCase):
s = simulation.from_config(config)
env = s.run_simulation(dry_run=True)[0]
assert env.get_agent(0).state['neighbors'] == 1
assert env.get_agent(0).state['neighbors'] == 1
assert env.get_agent(1).count_agents(state_id='normal') == 2
assert env.get_agent(1).count_agents(state_id='normal', limit_neighbors=True) == 1
def test_torvalds_example(self):
"""A complete example from a documentation should work."""
@@ -179,7 +186,7 @@ class TestMain(TestCase):
with utils.timer('serializing'):
serial = s.to_yaml()
with utils.timer('recovering'):
recovered = yaml.load(serial)
recovered = yaml.load(serial, Loader=yaml.SafeLoader)
with utils.timer('deleting'):
del recovered['topology']
assert config == recovered
@@ -233,7 +240,7 @@ class TestMain(TestCase):
env[0, 0, 'testvalue'] = 'start'
env[0, 10, 'testvalue'] = 'finish'
nG = env.history_to_graph()
values = nG.node[0]['attr_testvalue']
values = nG.nodes[0]['attr_testvalue']
assert ('start', 0, 10) in values
assert ('finish', 10, None) in values