diff --git a/VERSION b/VERSION index 3eefcb9d..6e8bf73a 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.0.0 +0.1.0 diff --git a/build_docs/conf.py b/build_docs/conf.py index 3f7b3800..957b5a82 100644 --- a/build_docs/conf.py +++ b/build_docs/conf.py @@ -54,9 +54,9 @@ # built documents. # # The short X.Y version. -version = '1.0' +version = '0.1' # The full version, including alpha/beta/rc tags. -release = '1.0.0' +release = '0.1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -270,7 +270,7 @@ epub_copyright = u'2019, Brian Drawert, Evie Hilton' # The basename for the epub file. It defaults to the project name. -#epub_basename = u'PyURDME' +#epub_basename = u'SpatialPy' # The HTML theme for the epub output. Since the default themes are not optimized # for small screen space, using the same theme for HTML and epub output is diff --git a/build_docs/examples_coral_reef.rst b/build_docs/examples_coral_reef.rst index fd198701..8e9196fa 100644 --- a/build_docs/examples_coral_reef.rst +++ b/build_docs/examples_coral_reef.rst @@ -1,4 +1,4 @@ -PyURDME Example: Coral Reef +SpatialPy Example: Coral Reef ============================= Introduction diff --git a/build_docs/index.rst b/build_docs/index.rst index 190c2c77..54d9c511 100644 --- a/build_docs/index.rst +++ b/build_docs/index.rst @@ -1,14 +1,14 @@ -.. PyURDME documentation master file, created by +.. SpatialPy documentation master file, created by sphinx-quickstart on Mon Apr 21 11:09:45 2014. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Welcome to PyURDME's documentation! +Welcome to SpatialPy's documentation! =================================== -PyURDME is a general software framework for modeling and simulation of stochastic reaction-diffusion processes on unstructured, tetrahedral (3D) and triangular (2D) meshes. Unstructured meshes allow for a more flexible handling of complex geometries compared to structured, Cartesian meshes. The current core simulation algorithm is based on the mesoscopic reaction-diffusion master equation (RDME) model. +SpatialPy is a general software framework for modeling and simulation of stochastic reaction-diffusion processes on unstructured, tetrahedral (3D) and triangular (2D) meshes. Unstructured meshes allow for a more flexible handling of complex geometries compared to structured, Cartesian meshes. The current core simulation algorithm is based on the mesoscopic reaction-diffusion master equation (RDME) model. -PyURDME was originally based on the URDME software package, but has been completely rewritten in python and updated. It depends on the FEniCS libraries for mesh generation and assembly, see http://fenicsproject.org/. The core simulation routines are implemented in C, and requires GCC for compilation. The default solver is an efficient implementation of the Next Subvolume Method (NSM). +SpatialPy was originally based on the PyURDME software package, and before that the URDME software. It has been rewritted to use the Smoothed Disapative Particles Dynamics formunlation for the discretization of the diffusion equation. The core simulation routines are implemented in C, and requires GCC for compilation. The default solver is an efficient implementation of the Next Subvolume Method (NSM). Contents: diff --git a/build_docs/pyurdme.rst b/build_docs/pyurdme.rst deleted file mode 100644 index 4eb757d8..00000000 --- a/build_docs/pyurdme.rst +++ /dev/null @@ -1,29 +0,0 @@ -PyURDME API -=============== - -pyurdme module ----------------------- - -.. automodule:: pyurdme.pyurdme - :members: - :undoc-members: - :show-inheritance: - - -pyurdme.model module --------------------- - -.. automodule:: pyurdme.model - :members: - :undoc-members: - :show-inheritance: - -pyurdme.nsmsolver module ------------------------- - -.. automodule:: pyurdme.nsmsolver - :members: - :undoc-members: - :show-inheritance: - - diff --git a/build_docs/spatialpy.rst b/build_docs/spatialpy.rst new file mode 100644 index 00000000..e9914583 --- /dev/null +++ b/build_docs/spatialpy.rst @@ -0,0 +1,21 @@ +SpatialPy API +=============== + +spatialpy module +---------------------- + +.. automodule:: spatialpy.spatialpy + :members: + :undoc-members: + :show-inheritance: + + +spatialpy.nsmsolver module +------------------------ + +.. automodule:: spatialpy.nsmsolver + :members: + :undoc-members: + :show-inheritance: + + diff --git a/examples/cylinderDemo/SpatialPy_cylinderDemo3D.ipynb b/examples/cylinderDemo/SpatialPy_cylinderDemo3D.ipynb new file mode 100644 index 00000000..d9294ab9 --- /dev/null +++ b/examples/cylinderDemo/SpatialPy_cylinderDemo3D.ipynb @@ -0,0 +1,430 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import sys\n", + "from collections import OrderedDict" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "sys.path.append(\"../..\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "import spatialpy" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "import numpy\n", + "\n", + "# Global Constants\n", + "MAX_X_DIM = 5.0\n", + "MIN_X_DIM = -5.0\n", + "TOL = 1e-9" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "class Edge1(spatialpy.SubDomain):\n", + " def inside(self, x, on_boundary):\n", + " return abs(x[0] - MAX_X_DIM) < 0.05\n", + "class Edge2(spatialpy.SubDomain):\n", + " def inside(self, x, on_boundary):\n", + " return abs(x[0] - MIN_X_DIM) < 0.05\n" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": {}, + "outputs": [], + "source": [ + "class cylinderDemo3D(spatialpy.Model):\n", + " def __init__(self, model_name=\"cylinder_demo3d\"):\n", + " spatialpy.Model.__init__(self, model_name)\n", + "\n", + " # System constants\n", + " D_const = 0.1\n", + "\n", + " # Define Species\n", + " A = spatialpy.Species(name=\"A\", diffusion_constant=D_const)\n", + " B = spatialpy.Species(name=\"B\", diffusion_constant=D_const)\n", + " self.add_species([A, B])\n", + "\n", + " # Define Geometry\n", + " self.mesh = spatialpy.Mesh.read_xml_mesh('cylinder.xml')\n", + "\n", + " # Define Subdomains\n", + " self.add_subdomain(Edge1(), 2)\n", + " self.add_subdomain(Edge2(), 3)\n", + " \n", + " # Restrict the movement of Chemical Species\n", + " self.restrict(A,[1,2])\n", + " self.restrict(B,[1,3])\n", + "\n", + " vol = self.mesh.get_vol()\n", + " sd = self.sd\n", + " left = numpy.sum(vol[sd == 2])\n", + " right = numpy.sum(vol[sd == 3])\n", + " print(\"left \"+str(left)+\" right \"+str(right))\n", + " \n", + " k_react = spatialpy.Parameter(name=\"k_react\", expression=1.0)\n", + " k_creat1 = spatialpy.Parameter(name=\"k_creat1\", \n", + " expression=100/left)\n", + " k_creat2 = spatialpy.Parameter(name=\"k_creat2\", \n", + " expression=100/right)\n", + " self.add_parameter([k_react, k_creat1,k_creat2])\n", + "\n", + "\n", + " # Define Reactions\n", + " R1 = spatialpy.Reaction(reactants=None, products={A:1}, \n", + " rate=k_creat1, restrict_to=2)\n", + " R2 = spatialpy.Reaction(reactants=None, products={B:1}, \n", + " rate=k_creat2, restrict_to=3)\n", + " R3 = spatialpy.Reaction(reactants={A:1, B:1}, products=None, \n", + " rate=k_react)\n", + " self.add_reaction([R1, R2, R3])\n", + "\n", + " # Define simulation timespan\n", + " #self.set_timesteps(1, 200)\n", + " self.timespan(range(500))" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "left 0.509201383306 right 0.505804729089\n" + ] + } + ], + "source": [ + "model = cylinderDemo3D()" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "metadata": {}, + "outputs": [], + "source": [ + "from spatialpy.nsmsolver import NSMSolver" + ] + }, + { + "cell_type": "code", + "execution_count": 113, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "h=0.8\n", + "Compiling Solver. Build dir: /tmp/tmp1sl4ksb_\n", + "Creating propensity file /tmp/tmp1sl4ksb_/cylinder_demo3d_generated_model.c\n", + "cmd: cd /tmp/tmp1sl4ksb_ ; make -f /home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/build/Makefile.nsm ROOT=/home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine MODEL=/tmp/tmp1sl4ksb_/cylinder_demo3d_generated_model.c BUILD=/tmp/tmp1sl4ksb_\n", + "\n", + "gcc -c -o linked_list.o /home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/src/linked_list.c -I/home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/include/ -O3 -Wall \n", + "gcc -c -o particle.o /home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/src/particle.c -I/home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/include/ -O3 -Wall \n", + "gcc -c -o simulate.o /home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/src/simulate.c -I/home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/include/ -O3 -Wall \n", + "gcc -c -o count_cores.o /home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/src/count_cores.c -I/home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/include/ -O3 -Wall \n", + "gcc -c -o output.o /home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/src/output.c -I/home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/include/ -O3 -Wall \n", + "gcc -c -o simulate_rdme.o /home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/src/simulate_rdme.c -I/home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/include/ -O3 -Wall \n", + "gcc -c -o binheap.o /home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/src/binheap.c -I/home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/include/ -O3 -Wall \n", + "gcc -c -o main.o /tmp/tmp1sl4ksb_/cylinder_demo3d_generated_model.c -I/home/brian/Desktop/research/SpatialPy/spatialpy/ssa_sdpd-c-simulation-engine/include/ -O3 -Wall \n", + "gcc -o ssa_sdpd linked_list.o particle.o simulate.o count_cores.o output.o simulate_rdme.o binheap.o main.o -pthread -lm\n", + "\n", + "\n", + "CPU times: user 19.1 ms, sys: 4.13 ms, total: 23.2 ms\n", + "Wall time: 1.64 s\n" + ] + } + ], + "source": [ + "#result = model.run(report_level=2)\n", + "sol = NSMSolver(model, report_level=2)\n", + "sol.h = 0.8\n", + "print(\"h=\"+str(sol.h))\n", + "%time sol.compile()" + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "cmd: cd /tmp/tmpra_oadd6;/tmp/tmpleetkdl9/ssa_sdpd\n", + "\n", + "CPU times: user 17.2 ms, sys: 8.21 ms, total: 25.4 ms\n", + "Wall time: 6.76 s\n" + ] + } + ], + "source": [ + "%time result = sol.run()" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "21241.0\n", + "21386.0\n", + "0.509201383306\n", + "0.505804729089\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 85, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAFpCAYAAAB3UOSMAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4xLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvAOZPmwAAIABJREFUeJzt3Xl4lNXZ+PHvnQUSNsMuEjWogIgsYgQiCrgBoq/iVq0LtFXjUqu82rJo31d/alVoq9a6YOpSeEXQuiBaLKVAQGBYgqCySlCEsBvCagLJ5Pz+OM/ImMxM9nlmuT/XlWsy5zkzc55kZu7n7GKMQSmllPKX4HYBlFJKRR4NDkoppSrR4KCUUqoSDQ5KKaUq0eCglFKqEg0OSimlKtHgoJRSqhINDkoppSrR4KCUUqoSDQ5KKaUqSXK7ALXVpk0bk5GR4XYxlFIqqqxcufJ7Y0zbqvJFbXDIyMggLy/P7WIopVRUEZHvqpNPm5WUUkpVosFBKaVUJRoclFJKVRK1fQ6BlJaWUlBQQElJidtFCSklJYX09HSSk5PdLopSKkocOgRffAEbNsAddzT868VUcCgoKKB58+ZkZGQgIm4XJyBjDIWFhRQUFNCpUye3i6OUcpkxsHs3bNwIBw/CkSP259Ah2LsX1qyBL1eXs2Xr8Yaea66B1q0btlwxFRxKSkoiOjAAiAitW7dm7969bhdFKdUAjIFt22DHDmjSBPbvhz27DTsLvOzYYSjc7aVpq8YUfHuM3QVlrNuUROGhxgGfK5EyuiR9S7+yldzBV/TiC85NWE1r+RJo1aDnEVPBAYjowOATDWVUSgW3bx98+y0cOAAH95ezZ6eXXd8dZefGA3y88AS2729W4RECJJHMMVqxn4O04GS20Z7dXMsGzmYN3WQjLdsm0eT0DjTr2pFmh3bSQg6RlJoMXbtC9/Mg/Ro4+2xISWnwc4y54BAJPvzwQ6699lrWr1/PmWee6XZxlFLV5PXappzt2+2V/65dtoknPx+OHYONGw0b1pSxZ59/f2GC85NME4ShzOaylnmc0iON4sZppDX30rZlGSe29dK26Q8kNEmxL3DSSdCuHXTPhFZDoEMHaBy4BuEGDQ4NYNq0aVxwwQVMnz6dxx57zO3iKKUcxsDmzbB2LRQV2Q7evBXl7Nlt+OEH2Lk7Aa+3cs2+uRymkZRyplnPf5l1nMkGziCflu0b0+L8s2l5agvSO3hJOPssEs7qA6deA1HeQlBlcBCRk4EpwIlAOZBjjPmLiLQC3gEygC3Az4wxRWLbTP4CDAd+AH5hjPncea5RwO+dp37SGDPZST8X+DuQCswCHjDGmHo6x7A6fPgwixcvZv78+Vx11VUaHJRyQVkZJCbCpk2weLHt7N205iiLlwi7ixr9mC81oYRzzOf0Nttowg+cxA46st3eNi7kxGaHSWmeTNtz0u0TZmRAjx4w8D5IT4ek2L2+rs6ZlQEPGWM+F5HmwEoRmQP8AphrjHlGRMYB44CxwOVAZ+enH/AK0M8JJo8CmYBxnmemMabIyZMNLMUGh2HAp3U6s9GjYfXqOj1FJb17w/PPh8wyY8YMhg0bRpcuXWjVqhWff/45ffr0qd9yKBXnDhyAzz+3TT4bNoDHYzuB9+yBPXsMxcVCSuNySo7aET7JHOMUtjGEJQxgMX0araXlKc3JSC8j6dxe9os+MRFS2kF6Hzj1VOjcGeJ4uHmVwcEYsxPY6fx+SETWAx2Bq4HBTrbJQC42OFwNTHGu/JeKSJqIdHDyzjHG7ANwAswwEckFWhhjPE76FGAEdQ0OLpk2bRqjR48G4KabbmLatGkaHJSqhW3b7Bf//v0wd6790k8w5WzYAOs2JmDM8Wab01O3c0bSFs707qJd8Raac5ADR0+gG+u54IQ1dL2wHQkXDoBzzoGuj9hgkKBzgEOpUZ1IRDKAc4BlQHsncGCM2Ski7ZxsHYFtfg8rcNJCpRcESK+bKq7wG0JhYSHz5s1jzZo1iAherxcRYeLEiTpCSakgjIGCAtv8s20bLF8O+fmG1auPf2aaJ/3ASWY75V5DVzZyA3lk4SGN/ZxykpcTO6VCq1b2JyMDTjvdjujpOdyO9NHPX41VOziISDPgfWC0MeZgiC+7QAdMLdIDlSEb2/zEKaecUlWRw+69995j5MiRvPrqqz+mDRo0iEWLFnHhhRe6WDKl3FdWBl9+Cd98A6tWwfJFR9m4EQr3J/LD0eNfRR2Td3OGbOYPfMIAFpPCUXplHCGlf2/o1g1SU+HkHnD2z+G006BRoxCvqmqrWsFBRJKxgWGqMeYDJ3m3iHRwag0dgD1OegFwst/D04EdTvrgCum5Tnp6gPyVGGNygByAzMzMiOuwnjZtGuPGjftJ2nXXXcfbb7+twUHFjZIS2wG8dq3tC8jLg6Iiw/YCw+EjtiknkTJ6sYbBrKUN33Ma33Be2+/IaHWQ9qemQFoaDBoE50604/qbNnX5rOJPdUYrCfA6sN4Y86zfoZnAKOAZ5/Yjv/T7RGQ6tkP6gBNAZgNPiUhLJ98QYLwxZp+IHBKR/tjmqpHAX+vh3MIuNze3Utr9998f/oIoFQZeL6xfD8uW2aagzZtts1B+vqG83DYINEoo5fzUVfQ88h1D2EkWHro338Zp57Wm2cV9oU8faH82nHiTHeevzT8Rozo1hwHAbcBXIuIb/vMwNii8KyK3A1uBG5xjs7DDWPOxQ1l/CeAEgSeAFU6+x32d08A9HB/K+ilR2hmtVCwzBrZsgX/+E95/t4y8PDhcbL9C0pIO0a3xt/RI3MWN5cvozhq6s5Yu6UdpdHYX6NULTj4ZutwBAwfG9SigaFGd0UqLCNwvAHBJgPwG+HWQ53oDeCNAeh5wdlVlUUqFx4EDsGIFLF0KX31ZzvoVR9hVmMTeQ6kA9GQdo1hIP5bRT1ZwRs/mJLRrY8f9n3sunDcSzjvPzgBWUSl2Z3AopaqlvBzmz4d16+DLLwyeRWWs+zrpx6GipyVspWv5OjLZRV+Wk9W5kJ5Xd0IuvghOucjWCFq0cPksVH3T4KBUnDl0yHYSL1tqWLagmKXLE9hVZBdyayn76W883IiH/rKc8zrvJy2rG1x1FWReCk2ugjZtXD4DFQ4aHJSKYcXFdsTQ/v2wILecuf8qZV1+I6dWIJzBdi5hGVfKLAZ32UH7vqcimefCucOg11hoVnF1URUvNDgoFSOMge++syOHfCOIVuaVU1ziW0KijEuYxw0spV+LDZw3vC2tB5xp+wh6vWY3H1DKocGhniUmJtKjRw+MMSQmJvLiiy9y/vnnu10sFaOKiuCDD2DGDFi+3LBnj+0naJxwjD7JX5F99DMuS5hHx+5pnNy/I63P7wrnXGvnDiQmulx6Fck0ONSz1NRUVjsL/s2ePZvx48ezYMECl0ulYsnevbBgAXz6KUydajh6VDij6Q6Gs5i+zKMvy+l5ppfkbmfABRfAzybZvQOUqgENDg3o4MGDtGzZsuqMSlVh2zZ46SX4+GM7qgigaWIxo8xb3MZkBnTYg/TqCZddBlf+HjrWfXkyFd9iNji4tGI3xcXF9O7dm5KSEnbu3Mm8efPqtxAqbhhj+w4mTbI1BFNuuKT5Cm5jBoOZz7mn7if52v+CayZCVpbOLlb1KmaDg1v8m5U8Hg8jR478cZVWpapSVmaHmf7jH/DhO0f5dntjmsgP3GNe40GeJeP01nDNNTDiNejeXQOCajAxGxxcWLG7kqysLL7//nv27t1LO50pqkIoKoLp0+HR//GytzCRBLxczhx+LzO4/oJdtLhhKFy9ECJwNWIVm2I2OESCDRs24PV6ad26tdtFURHo2DE73PSN5w4w7eOmlJQm0Zc8/sqzXHLiOtr8921w+wTQ949ygQaHeubrcwAwxjB58mQSdcigcpSWwkeTi3j7pf38Z+2JHCpNpSmJjOI17jzxY/rcmYlc9zD07KlNRspVGhzqmdfrdbsIKsJ4vbDgzW/4x1938f7aruz1tiadw9yQ8gFDehRwxQ1NaXbdUDg9W7euVBFDg4NSDaC8HJa8t4O3XyriQ097dpWeRhPac+WJeYwcXsiw33QmsdfNWjtQEUuDg1L1aM3iAzwzeidzVrVhj/ckmnACw5st5Gc3lXHFk1k0OWWQ20VUqlo0OChVR3sWb+KD57cyde6JLCrqzgkYrmi9lMsvF0aMzqBZn2FaQ1BRJ+aCgzEm4ucU2P2QVDTb9m0ZH/zpGz6YfoxF+7pRTmfOarSJP/T/mDuezKDdJcPcLqJSdRJTwSElJYXCwkJat24dsQHCGENhYSEpKSluF0XVkDGwYt4h3nl8I39d2JNSunB24jp+P3gx147tTM+hnRHp7HYxlaoXMRUc0tPTKSgoYO/evW4XJaSUlBTS09PdLoaqpn374E8P72PKW8L2Iy1Johc/a7eARx8po8uvLoBmZ7ldRKXqXUwFh+TkZDp16uR2MVQMMAZW5hn+9vgOpn7aih+8aVwps3jqgh1c+WQWrQZd6nYRlWpQMRUclKoLY2CZp5z3X97NR7OS2VTUhhRacXPKB4y+Yz89HrsOTjzR7WIqFRYaHFTc274d/v63Uia/coRNe9JIpjUXkcvvzlzD9fe2o+Wd14P2Eak4o8FBxa3PPoM/PnaEf85PpdwkM5AvGN9pIdf+7nROuHEYtBridhGVck2Vc/VF5A0R2SMia/zSHhOR7SKy2vkZ7ndsvIjki8hGERnqlz7MScsXkXF+6Z1EZJmIbBKRd0SkUX2eoFL+jIH5c8q4ouc2Bg6E5fMOMcZMYNNF2SxYIPxy8+854Z6boVUrt4uqlKuqU3P4O/AiMKVC+nPGmD/5J4jIWcBNQHfgJOA/ItLFOfwScBlQAKwQkZnGmHXABOe5povIJOB24JVano9SAZWVwXuv7edPjx9h5c6OtKUxT6T9mQdHl9PkthvhtNPcLqJSEaXK4GCMWSgiGdV8vquB6caYo8C3IpIP9HWO5RtjvgEQkenA1SKyHrgYuNnJMxl4DA0Oqp4UF8PfHt/Jcy8mseVwW7qwm1e7v8BtfziT1Kse1JnLSgVRlyUg7xORL51mJ99GyR2BbX55Cpy0YOmtgf3GmLIK6UrVSVmp4f1Hv6Rnm+088EwHOv6wiRmXv8r6DQlkr7mf1KuHaGBQKoTaBodXgNOB3sBO4M9OeqBPm6lFekAiki0ieSKSF+kT3ZQ79uYf4OkBn3Ba6g6uf7wncrSEOb+YyqI9Xbl61l0kdNUZzEpVR62CgzFmtzHGa4wpB/7G8aajAuBkv6zpwI4Q6d8DaSKSVCE92OvmGGMyjTGZbdu2rU3RVYxaMWsvo875kvTOKTy85Eq6tPqeGff9h/UHOnLpm7fobmpK1VCthrKKSAdjzE7n7jWAbyTTTOBtEXkW2yHdGViOrSF0FpFOwHZsp/XNxhgjIvOB64HpwCjgo9qejIov5eXwTs4Bnn/yMMu3d6QZKdx50ix+/Xxnut3Qy+3iKRXVqgwOIjINGAy0EZEC4FFgsIj0xjYBbQHuAjDGrBWRd4F1QBnwa2OM13me+4DZQCLwhjFmrfMSY4HpIvIksAp4vd7OTsWm8nKWPL+c0U+2ZkVRZ7qyk7/2yGHk5Etocc41bpdOqZgg0bp8dGZmpsnLy3O7GCqcDh9m23PvMe6ZE3j7h2s4KWEnz1w2j1ueyyShW1e3S6dUVBCRlcaYzKry6QxpFfk2bSL/yem89k5zXjiajZEEfn/1V4x9oyvNWt3idumUikkaHFTk2r6d3eOe476p/XnP/A8JUs4Nl+zjmdfakJHRw+3SKRXTNDioyGMMha/PYOLoHbx85FFKE1P43wcOk/1gMzp2bON26ZSKCxocVEQpXbaS/7v134zJv5N9tOKmyw/y6HPJdO2a7HbRlIordZkhrVT92bKFOUP/RI/+Tbk9fzydT/PyxSrD27PS6Kp9zUqFnQYH5a7Nm1l5xf9yXqe9DPn3bylLa8OMqUdYvKk9PXonul06peKWNispd2zbRsHYv/LI9LP5P/MY7Zoc5sXxRdz+2za6r45SEUCDgwqv7dsp+dOLPPdiEk+WPUpZQiN+d88PPPxUC044we3CKaV8NDio8Pj6a8yTf+CdqWWML3+SLXTimqFH+PMryXTqpJ3NSkUaDQ6qYW3bBk88weLXN/Agf2Z5+Xn0Pusoc/4Cl17a1O3SKaWC0A5p1TC+/x4efJCiM87jztf6cUH5Qra378Pf/w4rv2rMpZe6XUClVCgaHFT9Ki2F556j/LQzmPx8EV0TvubNhF8xZgxs3JTIqFGQoO86pSKeNiup+jN/Ptx3H/nrjnJHy4UsMD3JOgdefhl693a7cEqpmtBrOFV3W7fCjTfivfhS/rTrFno02sjq8h787W+waJEGBqWikdYcVO0VF8Njj8Ff/sIXpid3dtzKiu0dueoqeOUVOOkktwuolKotDQ6qdjZvhttuY51nP2M6LuWf23vT5ihMnw4/+xlIoN3BlVJRQ5uVVM2Ul8PLL7OvxyB+lXcvZ8taFh/pzRNPwIYNcOONGhiUigVac1DVt2MHRT+/l2cXnstfEjfyA0146CFh7FhooytpKxVTNDio6vnnP5l181uMOvg3vqctN1xrePRRoXt3twumlGoIGhxUaEePsuf+J7knpzcfMI2eXUv4zzvQq5e2HSkVyzQ4qOC+/pqPhr3Cnd+O52BiS574fSkPjU0hNdXtgimlGpoGB1WZMRx8dRqjf1PGm2XP0bvTAeZ/nKxNSErFkSpHK4nIGyKyR0TW+KW1EpE5IrLJuW3ppIuIvCAi+SLypYj08XvMKCf/JhEZ5Zd+roh85TzmBREd6+KqI0fwDH+CXvdkMbnsFh6+7yDLNpyggUGpOFOdoax/B4ZVSBsHzDXGdAbmOvcBLgc6Oz/ZwCtggwnwKNAP6As86gsoTp5sv8dVfC0VJsVfbuLx0/7Ohf96GNLS+Gwh/OGvLWjUyO2SKaXCrcrgYIxZCOyrkHw1MNn5fTIwwi99irGWAmki0gEYCswxxuwzxhQBc4BhzrEWxhiPMcYAU/yeS4WJMfDOfy+lS+9UHt3za24YvJdV37bk/At1m06l4lVtJ8G1N8bsBHBu2znpHYFtfvkKnLRQ6QUB0lWYFBYUc1PnPG56vj/tmxwi953dTJvfgbQ0t0umlHJTfc+QDtRfYGqRHvjJRbJFJE9E8vbu3VvLIiqfT1/dSo9Oh/hwc0+euvBTlu45nUE/a+92sZRSEaC2wWG30ySEc7vHSS8ATvbLlw7sqCI9PUB6QMaYHGNMpjEms23btrUsujp8yHDP4PUMv/sUWplClr+Ux/iFl5PURDsXlFJWbYPDTMA34mgU8JFf+khn1FJ/4IDT7DQbGCIiLZ2O6CHAbOfYIRHp74xSGun3XKoBrJh/mD4n7eLVBV357Snvkrcpjd73nu92sZRSEaY6Q1mnAR6gq4gUiMjtwDPAZSKyCbjMuQ8wC/gGyAf+BtwLYIzZBzwBrHB+HnfSAO4BXnMesxn4tH5OTfnzeuHpe7dx/sWNKTlcyrw7pvHHb68npVMHt4umlIpAYgcJRZ/MzEyTl5fndjGiwv595dyY9R3//roTP2vyMZPeb0fLYf3cLpZSygUistIYk1lVPp0hHeM8s4q49foSthan8+o5k7jzPzcirVpW/UClVFzT/Rxi2Bu//4bBVzTBFJcw/8FPyF55lwYGpVS1aM0hBpWVGh4auoYX5vfg0pRFvPNpC1oNvsbtYimloogGhxhzcHcx153zDf/Z2YPRp37IH5cPIqldK7eLpZSKMhocYsjWRVu5dshhvijuwutXfcSvPrgKEnUJDKVUzWmfQ4z41xMr6DnwBNYXn8qMR1fzq4+u1sCglKo1rTlEudKj5fzPpR4mLBrAWY038/GnSZx20XluF0spFeU0OESxom+KuCpzO4uKBnBX53n8ZVl/Grds4naxlFIxQJuVotSehRu4qNtOlhd1Zuov/8OkjRdpYFBK1RsNDlFo57ufMfgi+PpYJz5+/htufuNS0A30lFL1SINDlPlm4nsMurE9WzmFWdMPMuSBbm4XSSkVg7TPIVqUlzP71v/j1mnD8SY1ZvYn5QwYqnsvKKUahtYcooA5cJCJ3Sdz+bTbaJ92jKWrUxkwtJnbxVJKxTCtOUS4o9/u4J5zl/Nm0S/5WZ983lx4Ok2aav+CUqphac0hgu1YmM/gbrt4s2gE/3vLZqatOEMDg1IqLDQ4RKjNz3/MgMFJfHW0K/94ZjP/763TSdD/llIqTLRZKQKt/f00LvvDII4mNSV3xmEyrzjd7SIppeKMBocIs+I3Uxj24hU0biwsXJJC9z4nuF0kpVQc0oaKSGEMC0a9wSUvjuCEpmUs+qI53fs0drtUSqk4pcEhEhjDRyPeYOiUm0lPO8xn69pwWtdkt0ullIpjGhzcdvQorw14k2tn/oLe7XexcOOJdDxFl9pWSrlLg4OLzP4DPHXmFO70/IrLTv+Wufmn0qad/kuUUu7TbyKXmD17Gdt1Bo9suZNbBnzLzHVn0LSZzmFQSkWGOgUHEdkiIl+JyGoRyXPSWonIHBHZ5Ny2dNJFRF4QkXwR+VJE+vg9zygn/yYRGVW3U4p8pmA7Y7rN5I97RnHvFd8xZWEnGjVyu1RKKXVcfdQcLjLG9DbGZDr3xwFzjTGdgbnOfYDLgc7OTzbwCthgAjwK9AP6Ao/6AkosMvmbGdv9E/6073Z+PWI7L358qk5uU0pFnIb4WroamOz8PhkY4Zc+xVhLgTQR6QAMBeYYY/YZY4qAOcCwBiiX+776isfP+YA/HryLe67bw18/6KjbMCilIlJdg4MB/i0iK0Uk20lrb4zZCeDctnPSOwLb/B5b4KQFS48pZsFCxp/3Hx47/Dt+MaKIF99tp4FBKRWx6jpDeoAxZoeItAPmiMiGEHkDfRWaEOmVn8AGoGyAU045paZldU3ZPz7krpv280b5f3P3LYd4cXJLbUpSSkW0On1FGWN2OLd7gA+xfQa7neYinNs9TvYC4GS/h6cDO0KkB3q9HGNMpjEms23btnUpetgU/3Me192YxBvlv+TRMcW8/H/NSdRpDEqpCFfr4CAiTUWkue93YAiwBpgJ+EYcjQI+cn6fCYx0Ri31Bw44zU6zgSEi0tLpiB7ipEW9olkehlzVmI/NFbw08QiPTUjVpiSlVFSoS7NSe+BDsd92ScDbxph/icgK4F0RuR3YCtzg5J8FDAfygR+AXwIYY/aJyBPACiff48aYfXUoV0TY/umXDPuvFnxtOvPO3w5xwx26gJ5SKnqIMQGb9yNeZmamycvLc7sYAW38+GuGjkih0LTio2nFXHxjdDSBKaVin4is9Jt6EJQu2V3Pvvl0A4OvbolXEsn9oIhzR5xc9YOUUirCaHCoR998uoEhVzbmGI347JODnHX5qW4XSSmlakUHVNaTfUs2MPTKRhSZNGa9e1gDg1IqqmlwqAcH1mxj2OAStpZ3ZOa0I/S7XpuSlFLRTYNDHR3+Zg/Dz9vLqtLuvP/CDgbcmO52kZRSqs40ONTBwS37GN5zG8tKejL9yc1c+ZtObhdJKaXqhQaHWvp+y2EuPns3niM9efuRdVz3yJluF0kppeqNjlaqhcM7DnJ5j+2sOdKJGb9fyRVP9He7SEopVa+05lBDpXv3c/1Za/n8cBf+MSZPA4NSKiZpcKiBku2F3NB5NbMPZPHqr7/kvyZc4HaRlFKqQWizUjUd2VrIiO6b+M/hwfz17rXc8eI5bhdJKaUajAaHajiwpYgrzt6K58h5vPngV/zizz3cLpJSSjUoDQ5VOLJtH8PP3sryI2cz/eEvueEPWmNQSsU+7XMIoWTHPq7uvomlR3ow7eE1GhiUUnFDg0MQpfuPcGP3r5h7qB9vPLiW6zUwKKXiiAaHALxHyxjVPY+Z+wfx0l1fMurPPd0uklJKhZUGhwqMt5y7ey5h2o5BPDNiKfdO0sCglIo/Ghz8mNIyHurxb177eiCPXLCAsR/qBDelVHzS4OAw5YZH+s7hufXDuL//cp5YOMjtIimllGt0KCs2MPy2/yKeXX052b2X89zivoi4XSqllHJP3Nccyr2G3/RdxrMrLuQ3Z89jUl4mCXH/V1FKxbu4/hos9xruylzJSyv789ues/nL6sFIYlz/SZRSCoig4CAiw0Rko4jki8i4hn49Y+DOzFW8tjqTR3p9wsTPL9PAoJRSjoj4NhSRROAl4HLgLODnInJWQ72eKTeMuWAJb6zuw//2/ognPx+ugUEppfxEyjdiXyDfGPONMeYYMB24ukFeyevld33m8qcl53Nv17k8tuJKtJNBKaV+KlK+FTsC2/zuFzhp9coYeOqSufz5i0u5t/cSXlx7EZKUWN8vo5RSUS9SgkOggaOmUiaRbBHJE5G8vXv31vhFyopLmb2qHbecsZTnl5+vTUlKKRVEpMxzKABO9rufDuyomMkYkwPkAGRmZlYKHlVJbpLMrC1nkZpiSEiubVGVUir2Rcql8wqgs4h0EpFGwE3AzIZ4oaYtG5GQ2rghnloppWJGRNQcjDFlInIfMBtIBN4wxqx1uVhKKRW3IiI4ABhjZgGz3C6HUkqpyGlWUkopFUE0OCillKpEjKnxoJ+IICJ7ge9q+fA2wPf1WJxooOccH/Sc40NdzvlUY0zbqjJFbXCoCxHJM8Zkul2OcNJzjg96zvEhHOeszUpKKaUq0eCglFKqkngNDjluF8AFes7xQc85PjT4Ocdln4NSSqnQ4rXmoJRSKoS4Cg7h3m0unETkDRHZIyJr/NJaicgcEdnk3LZ00kVEXnD+Dl+KSB/3Sl47InKyiMwXkfUislZEHnDSY/mcU0RkuYh84Zzz/3PSO4nIMuec33HWJ0NEGjv3853jGW6Wvy5EJFFEVonIJ879mD5nEdkiIl+JyGoRyXPSwvrejpvgEO7d5lzwd2BYhbRxwFxjTGdgrnMf7N+gs/OTDbwSpjLWpzLgIWNMN6A/8Gvn/xnL53wUuNgY0wvoDQwTkf7ABOA555yLgNud/LcDRcaYM4DnnHzR6gFgvd/9eDjni4wxvf2GrIb3vW2MiYsfIAuY7Xd/PDDe7XLV8zlmAGv87m8EOjgCWbu5AAAgAElEQVS/dwA2Or+/Cvw8UL5o/QE+Ai6Ll3MGmgCfA/2wk6GSnPQf3+fYhSyznN+TnHzidtlrca7p2C/Di4FPsPu/xPo5bwHaVEgL63s7bmoOhGm3uQjT3hizE8C5beekx9Tfwmk6OAdYRoyfs9O8shrYA8wBNgP7jTFlThb/8/rxnJ3jB4DW4S1xvXgeGAOUO/dbE/vnbIB/i8hKEcl20sL63o6YVVnDoFq7zcWJmPlbiEgz4H1gtDHmoEigU7NZA6RF3TkbY7xAbxFJAz4EugXK5txG/TmLyJXAHmPMShEZ7EsOkDVmztkxwBizQ0TaAXNEZEOIvA1yzvFUc6jWbnMxZreIdABwbvc46THxtxCRZGxgmGqM+cBJjulz9jHG7Adysf0taSLiu9DzP68fz9k5fgKwL7wlrbMBwFUisgWYjm1aep7YPmeMMTuc2z3Yi4C+hPm9HU/BIWy7zUWQmcAo5/dR2HZ5X/pIZ5RDf+CAr7oaLcRWEV4H1htjnvU7FMvn3NapMSAiqcCl2E7a+cD1TraK5+z7W1wPzDNOo3S0MMaMN8akG2MysJ/ZecaYW4jhcxaRpiLS3Pc7MARYQ7jf2253vIS5k2c48DW2nfYRt8tTz+c2DdgJlGKvJG7HtrXOBTY5t62cvIIdubUZ+ArIdLv8tTjfC7BV5y+B1c7P8Bg/557AKuec1wD/66SfBiwH8oF/AI2d9BTnfr5z/DS3z6GO5z8Y+CTWz9k5ty+cn7W+76pwv7d1hrRSSqlK4qlZSSmlVDVpcFBKKVWJBgellFKVaHBQSilViQYHpZRSlWhwUEopVYkGB6WUUpVocFBKKVWJBgellFKVaHBQSilViQYHpZRSlWhwUEopVYkGB6WUUpVocFBKKVVJ1G4T2qZNG5ORkeF2MZRSKqqsXLnye2NM26ryRW1wyMjIIC8vz+1iKKVUVBGR76qTT5uVlFJKVaLBQSmlVCVxFxw8Hnj6aXurlFIqsKjtc6gNjwcuuQSOHYNGjWDuXMjKcrtUSikVeeKq5pCbCyUl4PXa29xct0uklFKRKa6Cw9q1YIz93Rh7XymlVGVxFRyWLQt9XymllBVXweG000LfV0opZcVVcNi/P/R9pZRSVlwFh5SU0PeVUkpZcRUcdu4MfV8ppZQVV8Fh27bQ95VSSllxFRzKykLfV0opZVUZHEQkRUSWi8gXIrJWRP6fk95JRJaJyCYReUdEGjnpjZ37+c7xDL/nGu+kbxSRoX7pw5y0fBEZV/+naWlwUEqp6qlOzeEocLExphfQGxgmIv2BCcBzxpjOQBFwu5P/dqDIGHMG8JyTDxE5C7gJ6A4MA14WkUQRSQReAi4HzgJ+7uRVSqnY57/gW04O9OsH11zj+gJwVa6tZIwxwGHnbrLzY4CLgZud9MnAY8ArwNXO7wDvAS+KiDjp040xR4FvRSQf6OvkyzfGfAMgItOdvOvqcmJKKRXxPB4YPBhKS48v3+DzySewcKFrC8BVq8/BucJfDewB5gCbgf3GGF/DTAHQ0fm9I7ANwDl+AGjtn17hMcHSlVIqtk2ZYlcCrRgYwLZ7u7gAXLWCgzHGa4zpDaRjr/a7Bcrm3EqQYzVNr0REskUkT0Ty9u7dW3XBK2jcOPR9pZQKq127Qh8fPDgsxQikRqOVjDH7gVygP5AmIr5mqXRgh/N7AXAygHP8BGCff3qFxwRLD/T6OcaYTGNMZtu2VW6BWkm/fqHvK6VUWO3bF/xY586u7ilQndFKbUUkzfk9FbgUWA/MB653so0CPnJ+n+ncxzk+z+m3mAnc5Ixm6gR0BpYDK4DOzuinRthO65n1cXIVPfMMJDnhLCnJ3ldKKVfk5MBnnwU/Pnly+MoSQHU2++kATHZGFSUA7xpjPhGRdcB0EXkSWAW87uR/Hfg/p8N5H/bLHmPMWhF5F9vRXAb82hjjBRCR+4DZQCLwhjGmQRbTzsqy/Tu5uba2phv9KKVc4fHA3XcH7msA6NvX9S8oMcEKF+EyMzNNXl6e28VQSqmaGzTIXqkG8+qrkJ3dIC8tIiuNMZlV5YurbUKVUspVHo9tuli1KnieW25psMBQExoclFIqHHyb2BcXB88zZgxMmBC+MoUQV2srheI/STHQfaWUqpPc3KgJDKA1B+B4QD92DBISoGNH2LrV9hUlJsJLL0VELU8pFc1atw6c3rQpPPtsxH3JaHDABvRjx8DrtT9bthw/VlYG994LPXq4PnhAKRWNcnLg9ddhz57AxyMwMIA2KwF2WGujRsGPe70werQ2MSmlamjsWLjrLli+/KdXnT4DB0ZkYAANDoCtEcydCyeeGDzPihW26UkDhFKqWnJyYOLE4McbN47ombgaHBxZWaHXWjIGjh51dR0spVS08HhsjSGQpCQ7AW7+/Ihuq9Y+Bz9Nm4Y+Xl4evE9JKaV+NC7InmVpaTBrVkQHBR+tOfh54IGq8zz+eMOXQykVxXJygs9+njAhKgIDaHD4iexsO2u9b19o1ixwnu3bI2KTJqVUpPF47LIYgZqTWrdu0CUxGoIGhwqys2HZMvjzn4PnmTEDLrxQg4RSyuHxwPnnB68xPPRQVAUG0OAQVHa2nbAYjNdrg8TAgRoglIp7P/tZ8GOpqa5u2lNbGhxCmDABliyxAUAC7VeHnSQ3ZUp4y6WUiiA5OVBQEPhY3752nHyU9DP40+BQhawsWLAAFi+2/+dA5s4Nb5mUUhHA47Fty7/5TeDjCQnw/PNRGRhAg0O1ZWVBnz6Bj23apFuOKhVXPB7bpDBjhl17pyIReOWVqA0MoMGhRkaOtMtsBGpiWr4c2rWDe+7RPgilYprHA489ZtuUAxGBSZOirgO6It0JroZ8e3Xk5sK//x04T3KybYqK4osGpVQgviWcS0qCb/E5YgR8+GF4y1UD1d0JTmsONZSVBePHw+zZ0K1b4DylpaGXVFFKRanc3NCBoXHj0MMco4gGhzq47bbgx2bMgFtvDV9ZlFJh0Lp18MAwcGDEr5dUExoc6mDwYDuEOZipU+2ESe2DUCrK5eTASSfZTsWKmja1tYUYa0vW4FAHvqW+n3oqeJBYuNBeUOTkhLdsSqk68u0V3K+fXRJj5067+mZFt90WUdt71hddlbWOsrLsz/79wfsZdDc5paKMr+M51J7PYEefjBwZnjKFmdYc6smECXDLLcGPe73aSa1U1PB1PIcycGDMNSX50+BQj956K/RABe2kVioK5OTYn2Adz74VVmM4MIA2K9W7CRPg669tIAhk6lTo2DEmmyiVin45OcF3cIOIn8NQn7Tm0ADGjLEzqYN54QUdwaRURHr99eDHRGJmDkN1aHBoAFlZtsny7rtts2RFJSV2PwgNEEpFEI8Htm4NfnzSpJhuRqpIg0MDycqy624tWGCX/U5J+elxrxeuvVYDhFIRIScHBgyAXbsCHx8zJurXSqopDQ5hkJUF999fOX3XLrt5lM6BUMolOTnQqZPtZwjUAd2xo+18jsNOQu2QDhPfe+uPf6z8Hrz7bli1yg6XjqNaq1LuGjs29PjypCT4xz/i9kOpNYcwmjABbr65croxtjlTaxFKhYnHEzowDBxolzeI08AA1QgOInKyiMwXkfUislZEHnDSW4nIHBHZ5Ny2dNJFRF4QkXwR+VJE+vg91ygn/yYRGeWXfq6IfOU85gWRYJtyRr+33oLevYMfv+su7YdQqsGNGxf8WAyuk1QbVe7nICIdgA7GmM9FpDmwEhgB/ALYZ4x5RkTGAS2NMWNFZDjwG2A40A/4izGmn4i0AvKATMA4z3OuMaZIRJYDDwBLgVnAC8aYT0OVK9B+DqWlpRQUFFBS1cxGlxUXpzBkSDqFhckBj591Frz2Wty/N5WqX9XZjGXMmJjvX6jufg5V9jkYY3YCO53fD4nIeqAjcDUw2Mk2GcgFxjrpU4yNOktFJM0JMIOBOcaYfU4B5wDDRCQXaGGM8TjpU7DBJ2RwCKSgoIDmzZuTkZFBpFY+jDEUFhby3nsFXHRRp4B51q2zq7nqxYtS9cTjscsoB9rS0ycOAkNN1KjPQUQygHOAZUB7J3D4Akg7J1tHYJvfwwqctFDpBQHSa6ykpITWrVtHbGAAEBFat25N+/YlvPpq4C1HwW4YdMcd2sSkVL2YODF0YABISwtPWaJEtUcriUgz4H1gtDHmYIgv4EAHTC3SA5UhG8gGOOWUU4KVM1i5IoavjL5h0/fcE3gl4HXr7GS5l1+GwkJ74aM1CaVqwOOBKVOCr2fj06iR/YCpH1Wr5iAiydjAMNUY84GTvNtpLvL1S+xx0guAk/0eng7sqCI9PUB6JcaYHGNMpjEms23bttUpuis+/PBDRIQNGzZUmTc7GxYtCt5J7fXaTuqHH9Z9IZSqEY8HLrrIDgUMZcQI2w+hV14/UZ3RSgK8Dqw3xjzrd2gm4BtxNAr4yC99pDNqqT9wwGl2mg0MEZGWzsimIcBs59ghEenvvNZIv+eKStOmTeOCCy5g+vTp1cqflWXnOQRaasOfb18IbWpSqhrGjYOjR4MfF7ET3D78UANDANWpOQwAbgMuFpHVzs9w4BngMhHZBFzm3Ac72ugbIB/4G3AvgNMR/QSwwvl53Nc5DdwDvOY8ZjO16IyOFIcPH2bx4sW8/vrr1Q4OPs88A4mJofPovhBKVcPQoXaeQkVJSZCRYWsLixfH3ZIYNVGd0UqLCNwvAHBJgPwG+HWQ53oDeCNAeh5wdlVlaRC+4W311KA/Y8YMhg0bRpcuXWjVqhWff/45ffr0qfqB2Jd/+WVbO/B6g+ebOdMWWy92lKrA47EfoNWrAx9/8EEdkVRN8T1D2rcV4P/8j72th/aaadOmcdNNNwFw0003MW3atBo9PjsbPvsMhgwJnqe83PaxKaX8eDx2BEewwHDGGRoYaiC+11bKzbXD27xee1vHTqnCwkLmzZvHmjVrEBG8Xi8iwsSJE2s0iiorCx57zAaJYFvYTpoE33wDp51m7+u6TCou+df8x40LXuVOTNQrqhqK7+AweLAdwnbsWL0MZXvvvfcYOXIkr7766o9pgwYNYtGiRVx44YU1eq6sLJg7177vly8PPBLPf5Ln66/rpDkVZzweO1u0tNR2Lgdb7WHgQNuhpx+OGonvZiXfN/ATT9jbOr55pk2bxjXXXPOTtOuuu46333671sUbP95O3EyqIoyXlsLo0TqSScWRiRPtGx8CB4aBA+1mKnrVVCtVrq0UqQKtrbR+/Xq6devmUolqpqZlzcmB++47/lkIJjW1XuKcUpEvJSX4UNWBA21QUJVUd22l+K45RJHsbPteP+OM0PmKi21/hdYgVEzyeODpp+1eDMECQ3KybUZSdRLffQ5RJisLfvc7O2M6lDlzbGe21iBUTPHNeA41sU37F+qNBoco45uz8/rrsGJF4KZWY2wNYsoU/YyoGFLVjGdtSqpX2qwUhbKzYdkyO5w11AjZnBxdi0lFMV8Tksdjm5ECzXj2SUzUpqR6pjWHKJadDZ9+GnzByfJy3Z9aRSnfBNWjR+0VUKD5Cy1aQKtWdtXKMWP0DV7PtOYQ5aoa5urbn/rCC7UWoaJIbi6UlNgrnGAT29q1g2+/1YXzGogGh3qWmJhI79696dWrF3369GHJkiUN+npZWba2PWJE6Hy+pb/Hjm3Q4ihVP9auDT6pzefaa8NTljilwaGepaamsnr1ar744guefvppxo8f3+CvmZVlL57GjKk678SJWoNQEW7oUJg6NXSeW27RdZIamAaHBnTw4EFatmwZttebMMEuT9+tG5x4IgR76dGjtQahIpDHAyef/NN1YSrq0MHOen7rrfCVK07FfYd0Pa/YTXFxMb1796akpISdO3cyb968uj9pDWRnHx/u6vHA+ecHKqOtQWzfrp8xFSFycuzoiVBNSY0awfvva/9CmMR1cPANiPCtu1cfk8Z8zUr2+T2MHDnyx1Vawy0ryzY1BdscaOpUOzRc9ztRrvLtwRAoMLRqBXfcAWlpuol6mMV1s1KgFbvrU1ZWFt9//z179+6t3yeugQkToHPn4MfvussubKnLbSjXTJkSfETSJ5/YN/H48RoYwiyug4Nvxe7ExHpZsbuSDRs24PV6ad26df0+cQ1Nnhx6+9GFC20NQgOECivfkttvvhn4uEj9X7GpaovrZiX/PRPqu88BwBjD5MmTSaxqY+gGlpVl11qaODH4hLmyMrj5Znj7bb1AUw3I18m3dq19s4XqY0hJqf8rNlVtcR0cwH4R1ueXoTfU5s8u8g13DdXvt2WL7cC+5RbtqFYNwOOxX/bHjoXOl5BgO8J0Wr+r4j44xBtf5/O99wZv5p061e4+N3myfjZVPfDVFt59N3RgaNYMbr1Vg0KE0OAQh7KzoUcP2w/40Uewc2flPJs2wYABdukNHc2kas03JLCkpOoZz3/+s77ZIkhcd0jHs6wseOUVO2w8OTlwHmPgnnvsj3ZWq1rJzbUTa0IFhoQEO+ZaA0NEibngEA3bnkZSGbOy7BL4d98d+Hh5uS7cp2opJ8fWBoIZMgSeegoWLdKlMCJQTDUrpaSkUFhYSOvWrV2ZdFYdxhgKCwtJSUlxuyg/8nXK/+tftlM6EN/CfaAXeCoEX//C/v3BZ1+CnXwze3bYiqVqLqaCQ3p6OgUFBa5OOquOlJQU0tPT3S5GJePHV70FqQYIFZT/kgPl5aHzTp4cnjKpWoup4JCcnEynTp3cLkbUys6GzZvhT38K/dm+6y7bVzF4sK5ooPxMmWL7F6qiG/NEBYmk9u+ayMzMNHl5eW4XIyZ5PKEnzPlLTrZ9FvpZj1Mejw0Ku3ZV7w0zZoz2L7hMRFYaYzKryhdTNQdVP3wT5m69tepl9UtL7b7vuq97HPItf1FaGjpfhw7Qr5/WGKJMzI1WUvXnrbfs57ldO7vMTTALF+pIprg0cmTowHDWWXaDkR07dCvPKKTBQYU0YQLs3g2LF9vPejB3361zIeJKRgbk5wc/PmSIXT9JRy5ErSqDg4i8ISJ7RGSNX1orEZkjIpuc25ZOuojICyKSLyJfikgfv8eMcvJvEpFRfunnishXzmNekEgdgxrnsrLgtdfsfKVAjIGrr7Y7zD39tAaKmNavH3z3XfDjffvqMNUYUGWHtIgMBA4DU4wxZztpE4F9xphnRGQc0NIYM1ZEhgO/AYYD/YC/GGP6iUgrIA/IBAywEjjXGFMkIsuBB4ClwCzgBWPMp1UVXDuk3VGdDbsAUlPrZ/Mk5TLfvIXWrWHVKli3zrYjBjNkiAaGCFdvHdLGmIUiklEh+WpgsPP7ZCAXGOukTzE24iwVkTQR6eDknWOM2ecUbg4wTERygRbGGI+TPgUYAVQZHJQ7qrNwH9gRjVOmaHCIah4PXHQRHD1add62beHJJ7UZKYbUts+hvTFmJ4Bz285J7whs88tX4KSFSi8IkK4iWHa23R/i7rtDbyI0aZLuMhfVpkypXmBITbUrOGpgiCn13SEdqL/A1CI98JOLZItInojkRfos6FjnW7jvs89gxIjg+RYutKu76mimKOPxVD1vISPDro2k7YcxqbbBYbfTXIRzu8dJLwBO9suXDuyoIj09QHpAxpgcY0ymMSazbdu2tSy6qk++ORFLlgSvRRhjZ1Xfemt4y6ZqKSfHRvRdu0LnGz9e93aOYbUNDjMB34ijUcBHfukjnVFL/YEDTrPTbGCIiLR0RjYNAWY7xw6JSH9nlNJIv+dSUSQrCx56KHSeqVPtemvazBTBcnJsJA814qBRIzt/QZuRYlqVHdIiMg3bodxGRAqAR4FngHdF5HZgK3CDk30WdqRSPvAD8EsAY8w+EXkCWOHke9zXOQ3cA/wdSMV2RGtndJTyrYqQk2MX5QwkPx8GDrTNTXrBGWGqMyUe4K9/1cAQB3RtJdUgcnLgt7+FQ4cCH2/XDn7xC0hL08X7IsLYscGX2O7Y0S6ilZoKo0drYIhy1R3KqsFBNZjqLr3TqJEdSq8BwgVjx8Lbb9slLgItxavNRzFHF95TrvPtMpeba1dSCNZiceyYLt7niqqakUaM0MAQaXyTEsNQ3dbgoBqUb5c5sH0NwTYT8i3ep99FYdKvHyxfHvy4iF11UUUO/82UGjVq8CHEuvCeCpvsbBsggnnoIbjnHh3N1GA8HrvwVUZG8MDQtCn07m1XWtR2vsiSm2sDg9drb3NzG/TlNDiosAq1suvhw3ZW9YUXwjXXaKCoVzk59g/78MPBF80bM8b+E1at0sAQiQYPtjWGxER7O3hwg76cdkirsPJ47Hv62LHq5U9MhJdf1uamOvF44PzzQ+cZMcLOZlSRLSfH7tF73XW1/lBUt0Naaw4qrLKybG347rvt6MiqeL12kT+tQdTBqFGhjycmav9CNPB44P77Yc4ce9vAHwoNDirsfOsyLVhgl+YZMiR0fq8Xbr9dA0SNeDy2XW7QINi0KXCeZs1sJ9Bnn2kzUjTwLYRojL2dMqVBX06Dg3JNVpZdmmf27KovXNevhwsu0AX8qmXsWNuMNGlS8L0XbrnFzlBcsEADgwpIg4OKCBMm2KamUMrLdQG/kHyzDoPNdPYZMsRuEK6iS4sWoe/XMw0OKmKMHGkHYVRl6lTo0kWbmX7Ct5JqsJpCWhp062ZnPOtObdGp4tBVHcqq4oV/Z/WIEXa4fTCbNtmWk2uuifMg4asthFpJNTERZs2yW3zqsK/oVXGIX3WH/NWSzpBWEcV/RjXA0KHw738Hzz9jBsycCYsWxWHTeXU29G7RAv71rzj848SgitXq6lSz60BrDiqi+Tqr09KC5ykvtxfPY8eGr1yu841GChUYRDQwxJKKH4JQH4p6oMFBRbwJE6CoyDaXN2sWOE9pqe2Hbd48xoNETo6tTo0cGXgVVZ/ERDtaSQND7Fi1KvT9eqbBQUWN7GzbxJQQ4l17+LANEuecE2N9ER6PPam77rJ/hPz8wPkSE21T02efaf9CrKl4MRDq4qAeaJ+DiipZWbZ/YeJEOzBn377A+VavtvMiYqIvojrLX9xyC3TvrjsnxbLmzaGw8Kf3G5DWHFTUycqyywAVFoaeXV1eDldeGeU1iJwc26ESypgxdt7C+PEaGGJZxfVmqrP+TB1ocFBRbfZsOOOM4Mf37bMX3VE3cS4nBzp0sM1IobbSGzHi+ObdKrZt3x76fj3T4KCi3pQp0Lhx6DxTp9oLrajorL71VhsUdu0KnU8XzIsvFS8Sqtp/t440OKiol5UF8+fbRfxCfVeWldm+ioiuRVS1dSfYtmZdMC/+VLwCquqKqI60Q1rFBP/Jc6efbjc827IlcF7fd2/ELS9U1Yw/sNWf2bM1KMSjI0dC369nWnNQMSc7G779FpYsgTZtAueZOtU26Y8dawOJa53WY8dCejq0axc6MIjY2oKuohq/Kk54bOCN2rTmoGJWVhbs3WtbaqZPt/tC+Nu16/gCpqmpDb5fe2Vjx1a9gmp6uh1yNXKkBoV416IFHDz40/sNSGsOKua99ZZtng+luLjB9045rrpLa48ZA9u22Z2RNDCoiy8Ofb+eaXBQcSEry476DGXSJFvLaLBmJl9QOP/84EtrA/TqZdvEdIiq8jdmzPG5DcnJDT5STUwDt1s1lMzMTJOXl+d2MVQU8XjgoovsDotVadzYjoCqtwt2j8futxDq89atG7z+utYSVHAej13Xvg4z4UVkpTEms8p8GhxUPPF9tvbvh+efD70k/sCBkJIC111Xx2WKcnLgv/8bfvghdL4lSzQwqAanwUGpKlTnYt5nxAhbi6/xd3dOjp3QFkp6Orz7rgYGFRbVDQ7a56DiVlYWLF4MnTtXnXfGDFuTr3ZfRE4O9OsH998fPI+IXYd82zYNDCriaHBQcS0rC77+2n5Hn3hi6LzHjsG4cVU8ocdjN7i+6y5Yvjx4B0dCgu0B12W1VYTS4KAU9jt6586qB4AsXGgnz+XkVDiQkwOdOtmRSJs2hX6SESPsWuIaGFQEi5jgICLDRGSjiOSLSFXXZ0o1iAkTqq5F7NplKwYpKXZk6j3pH+O5683g63X49O5tO50//FCbkVTEi4jgICKJwEvA5cBZwM9F5Cx3S6XiVfVqEYajRw0LFxombb+SASxiLE8FzjpihA0Kq1ZpUFBRIyJGK4lIFvCYMWaoc388gDHm6WCPqfVoJZHjv/ufu2+M46uvwnffBX5sYiKce669QiwutusxlJbaXWUSE+193xoNSUn20vLYMfuajRvbvImJdq2GQ4fs6xtj031lSUy0+RMS7K3Xe/yYiJ0yn5hox2I2bny8HGAnxvjSwK7e2aULrFtnF+kyBho1ssuTgi2f/+slJ9sddHznc8IJUFJin0/Ebmju9dop/L4yJSRA06a2XPv2HX/tpKSf/o0TE+0G0C1a2LLs3Xv875qUZP8mXq/9e5WXH/+bGWPTjDn+v0tIOP63S0y0ab4xqY0a2bSjR+35NG1q9w71eo+fq9d7/P9RVnb8uRISjv8dmjSBY8fwHO7BRPMQH3E15sdrKb/30PE3k3OkDEEQvDThKKd3TmDvD83Ytcu+hO/UGjU6/tYRgY4d7TpQ+fm26L5iJSbaH9/pidj0pk3hpJPsnzEhweb3/Yt9PyLQvj1ccgnMnGnfcgBt29q3T2mpzZOYaFvEjh2zb/2EhON71xcX23y+t2Fysn1blZba1/PtVCly/KdJk+N9Obt32+f1X7pExJ5rkya2/KWl9lwKC+2/ypenUSP7mikpNs33egkJ9m0tYt/aPp0728cXF9tyFhXZv4vvuVq0sH8D/4+pr/yJiTZPaqp9THGxfW3fR8V3bgkJx9+Wvv/NkSP2mP+w6NRU+z86ePD4/7JNGzuVZfXq4/+LYF+/bdvCRx9VvpYI9vVVE1E1lFVErgeGGWPucO7fBvQzxtwX7DG1Cg4S4ENtjA0Ml1xy/ItNqQo89GccT7OQC+EnQcJwPFhU/CwFCoR8PuIAAAOlSURBVCJKVZ//1JdgX181FW1DWYNfivlnEskWkTwRydvrf+VZV7m5oWdDqbiXxVIWcBFLuIA0fPv4BgoG/j9K1U1urnuvHSnBoQA42e9+OrCjYiZjTI4xJtMYk9m2bdv6e/XBg22dUqkqZLGUItoyhmdIZxvgxQYJ349S9WfwYPdeO1KalZKAr4FLgO3ACuBmY8zaYI/RPgftcwhHn8OPDd1JSTY9Odn+TRISYOBAPE0uYeLqy5i752x+OJr84yn72t5PP92eqvY5/PQjqH0OP/14VKR9Dn5EZDjwPJAIvGGM+UOo/Lp8hlJK1Vx1g0PEbPZjjJkFzHK7HEoppSKnz0EppVQE0eCglFKqkojpc6gpEdkLBOk5jlhtgO/dLkSY6TnHBz3n6HGqMabK4Z5RGxyikYjkVacjKJboOccHPefYo81KSimlKtHgoJRSqhINDuFVcReAeKDnHB/0nGOM9jkopZSqRGsOSimlKtHg4BIR+a2IGBFp43ZZGpqI/FFENojIlyLyoYikuV2mhhJvOxqKyMkiMl9E1ovIWhF5wO0yhYOIJIrIKhH5xO2yNBQNDi4QkZOBy4CtbpclTOYAZxtjemIXWBzvcnkaRJzuaFgGPGSM6Qb0B34dB+cM8ACw3u1CNCQNDu54DhhDnKzxbIz5tzHGWd+Spdgl2WNRXyDfGPONMeYYMB242uUyNShjzE5jzOfO74ewX5gd3S1VwxKRdOAK4DW3y9KQNDiEmYhcBWw3xnzhdllc8ivgU7cL0UA6Atv87hcQ41+U/kQkAzgHWOZuSRrc89iLu3K3C9KQImZV1lgiIv8BTgxw6BHgYWBIeEvU8EKdszHmIyfPI9hmiKnhLFsYVWtHw1gkIs2A94HRxpiDbpenoYjIlcAeY8xKERnsdnkakgaHBmCMuTRQuoj0ADoBX4jdtSMd+FxE+hpjdoWxiPUu2Dn7iMgo4ErgEhO746ertaNhrBGRZGxgmGqM+cDt8jSwAcBVzv4zKUALEXnLGHOry+WqdzrPwUUisgXINMZE4+Jd1SYiw4BngUHGmHrc/Duy1GZHw2gn9ipnMrDPGDPa7fKEk1Nz+K0x5kq3y9IQtM9BhcOLQHNgjoisFpFJbheoITid7vcBs7Eds+/GcmBwDABuAy52/rernatqFeW05qCUUqoSrTkopZSqRIODUkqpSjQ4KKWUqkSDg1JKqUo0OCillKpEg4NSSqlKNDgopZSqRIODUkqpSv4/+S9Jfnj+DjwAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Plot of the time-average spatial concentration.\n", + "x_vals = model.mesh.coordinates()[:, 0]\n", + "A_vals = numpy.sum(result.get_species(\"A\", concentration=False), axis=0)\n", + "B_vals = numpy.sum(result.get_species(\"B\", concentration=False), axis=0)\n", + "\n", + "A_sum = numpy.sum(result.get_species(\"A\"), axis=1)\n", + "B_sum = numpy.sum(result.get_species(\"B\"), axis=1)\n", + "print(A_sum[-1])\n", + "print(B_sum[-1])\n", + "plt.figure(figsize=(6,6))\n", + "plt.subplot(2,1,1)\n", + "plt.plot(result.get_timespan(),A_sum,'-r',label=\"A\")\n", + "plt.plot(result.get_timespan(),B_sum,'-b',label=\"B\")\n", + "plt.legend(loc='best')\n", + "plt.subplot(2,1,2)\n", + "\n", + "vol = model.vol\n", + "sd = model.sd\n", + "print(numpy.sum(vol[sd == 2]))\n", + "print(numpy.sum(vol[sd == 3]))\n", + "\n", + "\n", + "plt.plot(x_vals,A_vals,'.r',x_vals,B_vals,'.b')\n", + "plt.legend(['A', 'B'],loc='best')\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 114, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "cmd: cd /tmp/tmpuwdb50nt;/tmp/tmp1sl4ksb_/ssa_sdpd\n", + "\n", + "Elapsed seconds: 5.73\n", + "\n", + "CPU times: user 5.21 ms, sys: 16.3 ms, total: 21.5 ms\n", + "Wall time: 5.74 s\n" + ] + } + ], + "source": [ + "%time r2 = sol.run()" + ] + }, + { + "cell_type": "code", + "execution_count": 115, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "find_h = 0.8703111740074692\n", + "CPU times: user 4.21 s, sys: 40 ms, total: 4.25 s\n", + "Wall time: 10.9 s\n" + ] + }, + { + "data": { + "text/plain": [ + "{'Status': 'Success'}" + ] + }, + "execution_count": 115, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%time model.run()" + ] + }, + { + "cell_type": "code", + "execution_count": 118, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "cmd: cd /tmp/tmpp7zuwtvu;/tmp/tmp1sl4ksb_/ssa_sdpd\n", + "\n" + ] + }, + { + "ename": "SimulationTimeout", + "evalue": "b''None", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTimeoutExpired\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/home/brian/Desktop/research/SpatialPy/spatialpy/Solver.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, number_of_trajectories, seed, timeout)\u001b[0m\n\u001b[1;32m 142\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mtimeout\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 143\u001b[0;31m \u001b[0mstdout\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mstderr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mprocess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcommunicate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 144\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.6/subprocess.py\u001b[0m in \u001b[0;36mcommunicate\u001b[0;34m(self, input, timeout)\u001b[0m\n\u001b[1;32m 842\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 843\u001b[0;31m \u001b[0mstdout\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstderr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_communicate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mendtime\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 844\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.6/subprocess.py\u001b[0m in \u001b[0;36m_communicate\u001b[0;34m(self, input, endtime, orig_timeout)\u001b[0m\n\u001b[1;32m 1514\u001b[0m \u001b[0mready\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mselector\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mselect\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1515\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_check_timeout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mendtime\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0morig_timeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1516\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.6/subprocess.py\u001b[0m in \u001b[0;36m_check_timeout\u001b[0;34m(self, endtime, orig_timeout)\u001b[0m\n\u001b[1;32m 870\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0m_time\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m>\u001b[0m \u001b[0mendtime\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 871\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mTimeoutExpired\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0morig_timeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 872\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mTimeoutExpired\u001b[0m: Command 'cd /tmp/tmpp7zuwtvu;/tmp/tmp1sl4ksb_/ssa_sdpd' timed out after 2 seconds", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mSimulationTimeout\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'time r2 = sol.run(timeout=2)'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/usr/lib/python3/dist-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2158\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2159\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2160\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2161\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2162\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3/dist-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2079\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2080\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2081\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2082\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2083\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3/dist-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3/dist-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1191\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1192\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1193\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1194\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1195\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m/home/brian/Desktop/research/SpatialPy/spatialpy/Solver.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, number_of_trajectories, seed, timeout)\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[0mos\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mkillpg\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprocess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpid\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msignal\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSIGINT\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# send signal to the process group\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 153\u001b[0m \u001b[0mstdout\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mstderr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mprocess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcommunicate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 154\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mSimulationTimeout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"{0}{1}\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstdout\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mstderr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 155\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mOSError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 156\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Error, execution of solver raised an exception: {0}\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mSimulationTimeout\u001b[0m: b''None" + ] + } + ], + "source": [ + "%time r2 = sol.run(timeout=2)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/cylinderDemo/cylinder.xml b/examples/cylinderDemo/cylinder.xml new file mode 100644 index 00000000..96b3a8ea --- /dev/null +++ b/examples/cylinderDemo/cylinder.xml @@ -0,0 +1,5619 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/cylinderDemo/ADFSP_cylinderDemo_tests.ipynb b/examples/cylinderDemo/old/ADFSP_cylinderDemo_tests.ipynb similarity index 100% rename from examples/cylinderDemo/ADFSP_cylinderDemo_tests.ipynb rename to examples/cylinderDemo/old/ADFSP_cylinderDemo_tests.ipynb diff --git a/examples/cylinderDemo/Cylinder_test.ipynb b/examples/cylinderDemo/old/Cylinder_test.ipynb similarity index 100% rename from examples/cylinderDemo/Cylinder_test.ipynb rename to examples/cylinderDemo/old/Cylinder_test.ipynb diff --git a/examples/cylinderDemo/cylinder_demo3D.py b/examples/cylinderDemo/old/cylinder_demo3D.py similarity index 100% rename from examples/cylinderDemo/cylinder_demo3D.py rename to examples/cylinderDemo/old/cylinder_demo3D.py diff --git a/examples/cylinderDemo/ensemble_test.py b/examples/cylinderDemo/old/ensemble_test.py similarity index 100% rename from examples/cylinderDemo/ensemble_test.py rename to examples/cylinderDemo/old/ensemble_test.py diff --git a/examples/cylinderDemo/pickle_test.py b/examples/cylinderDemo/old/pickle_test.py similarity index 100% rename from examples/cylinderDemo/pickle_test.py rename to examples/cylinderDemo/old/pickle_test.py diff --git a/install_ubuntu.sh b/install_ubuntu.sh index f5d570d5..45e5414c 100644 --- a/install_ubuntu.sh +++ b/install_ubuntu.sh @@ -1,13 +1,7 @@ #!/usr/bin/env bash apt-get -y update -apt-get -y install git apt-get -y install build-essential python-dev apt-get -y install python-setuptools apt-get -y install python-matplotlib python-numpy python-scipy apt-get -y install make -apt-get -y install python-software-properties -add-apt-repository ppa:fenics-packages/fenics -apt-get update -apt-get -y install fenics -apt-get -y install cython python-h5py python setup.py install diff --git a/setup.py b/setup.py index e2ca904e..f598e570 100644 --- a/setup.py +++ b/setup.py @@ -1,23 +1,89 @@ -from setuptools import setup +from setuptools import setup, find_packages +from setuptools.command.develop import develop +from setuptools.command.install import install +from setuptools.command.bdist_egg import bdist_egg +from setuptools.command.easy_install import easy_install +import os + +SETUP_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def stoch_path(command_subclass): + """ + A decorator for classes subclassing one of the setuptools commands. + It modifies the run() method. + """ + orig_run = command_subclass.run + + def modified_run(self): + success = False + orig_run(self) + + command_subclass.run = modified_run + return command_subclass + + +# update all install classes with our new class +@stoch_path +class develop_new(develop): + pass + + +@stoch_path +class install_new(install): + pass + + +@stoch_path +class bdist_egg_new(bdist_egg): + pass + + +@stoch_path +class easy_install_new(easy_install): + pass + + +with open('README.md', 'r') as fh: + full_description = fh.read() + + setup(name="spatialpy", - version="1.1.1", + version="0.1.0", packages=['spatialpy'], + + include_package_data = True, + #package_data={'spatialpy':['data/*.c','data/three.js_templates/js/*','data/three.js_templates/*.html','spatialpy/AUTHORS','spatialpy/LICENCE','spatialpy/bin/*','spatialpy/build/*','spatialpy/include/*','spatialpy/src/*.c','spatialpy/src/nsm/*']}, - #include_package_data = True, - package_data={'spatialpy':['data/*.c','data/three.js_templates/js/*','data/three.js_templates/*.html','spatialpy/AUTHORS','spatialpy/LICENCE','spatialpy/bin/*','spatialpy/build/*','spatialpy/include/*','spatialpy/src/*.c','spatialpy/src/nsm/*']}, - + description='Python Interface for Spatial Stochastic Biochemical Simulations', + install_requires = ["numpy", - "scipy", - "h5py"], + "scipy"], - author="Brian Drawert", - author="Evie Hilton", - author_email=["briandrawert@gmail.com"], - author_email=["mhilton@unca.edu"], + author="Brian Drawert, Evie Hilton", + author_email="briandrawert@gmail.com", license = "GPL", keywords = "spatialpy, spatial stochastic simulation, RDME", - - + url="https://spatialpy.github.io/SpatialPy/", + download_url="https://github.com/SpatialPy/SpatialPy/tarball/master/", + + + cmdclass={'bdist_egg': bdist_egg_new, + 'install': install_new, + 'develop': develop_new, + 'easy_install': easy_install_new}, + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3', + 'Topic :: Scientific/Engineering', + 'Topic :: Scientific/Engineering :: Chemistry', + 'Topic :: Scientific/Engineering :: Mathematics', + 'Topic :: Scientific/Engineering :: Medical Science Apps.', + 'Intended Audience :: Science/Research' + ], + ) diff --git a/spatialpy/DataFunction.py b/spatialpy/DataFunction.py new file mode 100644 index 00000000..0827be41 --- /dev/null +++ b/spatialpy/DataFunction.py @@ -0,0 +1,22 @@ +import spatialpy + + +class DataFunction(): + """ Abstract class used to constuct the data vector. """ + name = None + def __init__(self, name=None): + if name is not None: + self.name = name + if self.name is None: + raise Exception("DataFunction must have a 'name'") + + def map(self, x): + """ map() takes the coordinate 'x' and returns a double. + Args: + x: a list of 3 ints. + Returns: + A float + """ + raise Exception("DataFunction.map() not implemented.") + + diff --git a/spatialpy/InitialCondition.py b/spatialpy/InitialCondition.py new file mode 100644 index 00000000..72ce341d --- /dev/null +++ b/spatialpy/InitialCondition.py @@ -0,0 +1,45 @@ +import numpy + + +class InitialCondition(): + """ Class used to defined initial conditions in SpatialPy. + SubClasses must implement the 'apply(model)' method, which + direction modifies the model.u0[species,voxel] matrix. + """ + + def apply(self, model): + raise ModelError("spatialpy.InitialCondition subclasses must implement apply()") + + +class ScatterInitialCondition(InitialCondition): + + def __init__(self, species, count, subdomains=None): + """ Scatter 'count' of 'species' randomly over the list of subdomains + (all subdomains if None).""" + self.species = species + self.count = count + self.subdomains = subdomains + + def apply(self, model): + + spec_name = self.species.name + for spec_ndx in range(len(model.listOfSpecies)): + if model.listOfSpecies[spec_ndx] = self.species: break + + if self.subdomains is None: + nvox = model.mesh.get_num_voxels() + for mol in range(self.count): + vtx = numpy.random.randint(0, nvox) + self.u0[spec_ndx, vtx] += 1 + else: + allowed_voxels = [] + for i in range(model.mesh.get_num_voxels()): + if model.sd[i] in self.subdomains: + allowed_voxels.append(i) + nvox = len(alowed_voxels) + if nvox==0: raise ModelError("ScatterInitialCondition has zero voxels to scatter in") + for mol in range(self.count): + v_ndx = numpy.random.randint(0, nvox) + vtx = allowed_voxels[v_ndx] + self.u0[spec_ndx, vtx] += 1 + diff --git a/spatialpy/Mesh.py b/spatialpy/Mesh.py new file mode 100644 index 00000000..e90e0d3c --- /dev/null +++ b/spatialpy/Mesh.py @@ -0,0 +1,117 @@ +import numpy + +class Mesh(): + """ Mesh class for spatial py """ + + + def __init__(self): + self.vertices = numpy.zeros((0)) + self.tetrahedrons = numpy.zeros((0,),dtype=int) + self.vol = None + + + def coordinates(self): + return self.vertices + + def get_num_voxels(self): + return self.vertices.shape[0] + + def find_h(self): + max_dist = None + #print("find_h") + for i in range(self.vertices.shape[0]): + d = self.dist_to_closest_neighbor(i) + #print("\tdist_to_closest_neighbor({0})={1}".format(i,d)) + if max_dist is None or d > max_dist: + max_dist = d + h = 2.2*max_dist + #print("find_h = {0}".format(h)) + return h + + + def dist_to_closest_neighbor(self, v_ndx): + min_dist=None + for i in range(self.vertices.shape[0]): + if i==v_ndx: continue + d = numpy.linalg.norm( self.vertices[i,:]-self.vertices[v_ndx,:] ) + if d > 0 and (min_dist is None or d < min_dist): + min_dist = d + return min_dist + + def get_bounding_box(self): + xhi=None + xlo=None + yhi=None + ylo=None + zhi=None + zlo=None + for i in range(self.vertices.shape[0]): + if xhi is None or xhi < self.vertices[i,0]: xhi = self.vertices[i,0] + if xlo is None or xlo > self.vertices[i,0]: xlo = self.vertices[i,0] + if yhi is None or yhi < self.vertices[i,1]: yhi = self.vertices[i,1] + if ylo is None or ylo > self.vertices[i,1]: ylo = self.vertices[i,1] + if zhi is None or zhi < self.vertices[i,2]: zhi = self.vertices[i,2] + if zlo is None or zlo > self.vertices[i,2]: zlo = self.vertices[i,2] + return xhi,xlo,yhi,ylo,zhi,zlo + + def get_vol(self): + if self.vol is None: + self.vol = numpy.zeros((self.vertices.shape[0]),dtype=float) + for t_ndx in range(self.tetrahedrons.shape[0]): + v1,v2,v3,v4 = self.tetrahedrons[t_ndx] + a = self.vertices[v1,:] + b = self.vertices[v2,:] + c = self.vertices[v3,:] + d = self.vertices[v4,:] + #https://en.wikipedia.org/wiki/Tetrahedron#Volume + t_vol = numpy.abs(numpy.dot( (a-d), numpy.cross( (b-d),(c-d) ) )/6 ) + self.vol[v1] += t_vol/4 + self.vol[v2] += t_vol/4 + self.vol[v3] += t_vol/4 + self.vol[v4] += t_vol/4 + + return self.vol + + + @classmethod + def read_xml_mesh(cls, filename): + """ Read a FEniCS/dolfin style XML mesh file""" + import xml.etree.ElementTree as ET + root = ET.parse(filename).getroot() + if not root.tag == 'dolfin': raise MeshException("Not a dolfin mesh.") + mesh = root[0] + if mesh.tag != 'mesh' or \ + mesh.attrib['celltype'] != 'tetrahedron' or \ + mesh.attrib['dim'] != '3': + raise MeshException("XML mesh format error") + # + vertices = mesh[0] + cells = mesh[1] + # create mesh object + obj = Mesh() + #vertices + obj.vertices = numpy.zeros(( len(vertices), 3), dtype=float) + for v in vertices: + obj.vertices[ int(v.attrib['index']),0] = float(v.attrib['x']) + obj.vertices[ int(v.attrib['index']),1] = float(v.attrib['y']) + obj.vertices[ int(v.attrib['index']),2] = float(v.attrib['z']) + #tetrahedrons + obj.tetrahedrons = numpy.zeros(( len(cells), 4), dtype=int) + for c in cells: + obj.tetrahedrons[ int(c.attrib['index']),0] = int(c.attrib['v0']) + obj.tetrahedrons[ int(c.attrib['index']),1] = int(c.attrib['v1']) + obj.tetrahedrons[ int(c.attrib['index']),2] = int(c.attrib['v2']) + obj.tetrahedrons[ int(c.attrib['index']),3] = int(c.attrib['v3']) + # return model ref + return obj + + + + + + + + + +class MeshException(Exception): + pass diff --git a/spatialpy/model.py b/spatialpy/Model.py similarity index 52% rename from spatialpy/model.py rename to spatialpy/Model.py index 9abdf7f5..6d92b2b8 100644 --- a/spatialpy/model.py +++ b/spatialpy/Model.py @@ -1,27 +1,26 @@ #This module defines a model that simulates a discrete, stoachastic, mixed biochemical reaction network in python. -""" + +from __future__ import division # is this still necessary? import uuid -from __future__ import division from collections import OrderedDict -#from spatialpy.core.spatialpySolver import SpatialPySolver -#from spatialpy.core.spatialpyError import * -import numpy as np +from spatialpy.Solver import Solver +import numpy +import scipy class Model(): - """ Representation of a well mixed biochemical model. Interfaces to solvers in StochSS - should attempt to extend Model. """ + """ Representation of a spatial biochemical model. """ + reserved_names = ['vol'] + special_characters = ['[', ']', '+', '-', '*', '/', '.', '^'] + def __init__(self, name="", volume=1.0): - """ Create an empty model. """ + """ Create an empty SpatialPy model. """ # The name that the model is referenced by (should be a String) self.name = name - # Optional decription of the model (string) - self.annotation = "" - # Dictionaries with Species, Reactions and Parameter objects. # Species,Reactio and Paramter names are used as keys. self.listOfParameters = OrderedDict() @@ -35,6 +34,116 @@ def __init__(self, name="", volume=1.0): # evaluation of expressions in the scope of the model. self.namespace = OrderedDict([]) + ###################### + self.mesh = None + self.sd = None + self.listOfSubdomainIDs = [1] # starts with subdomain '1' + self.listOfDiffusionRestrictions = {} + self.timestep_size = None + self.num_timesteps = None + self.listOfDataFunctions = OrderedDict() + self.listOfInitialConditions = OrderedDict() + self.species_map = {} + + + def run(self, number_of_trajectories=1, solver=None, seed=None, report_level=0): + """ Simulate the model. + Args: + solver: A str or class type that is a subclass of SpatialPy.Solver. Default: NSM solver. + number_of_trajectories: How many trajectories should be run. + seed: An int, the random seed given to the solver. + report_level: An int, Level of output from the solver: 0, 1, or 2. Default: 0. + Returns: + A SpatialPY.Result object with the results of the simulation. + """ + if solver is not None: + if ((isinstance(solver, type) + and issubclass(solver, Solver))) or issubclass(type(solver), Solver): + sol = solver(self, report_level=report_level) + else: + from spatialpy.nsmsolver import NSMSolver + sol = NSMSolver(self, report_level=report_level) + + return sol.run(number_of_trajectories=number_of_trajectories, seed=seed) + + + def set_timesteps(self, step_size, num_steps): + """" Set the simlation time span parameters + Args: + step_size: float, size of each timestep in seconds + num_steps: int, total number of steps to take + + Note: the number of output times will be num_steps+1 as the first + output will be at time zero. + """ + #TODO, add checking + self.timestep_size = step_size + self.num_timesteps = num_steps + + def timespan(self, time_span): + """ + Set the time span of simulation. The SSA-SDPD engine does not support + non-uniform timespans. + + tspan : numpy ndarray + Evenly-spaced list of times at which to sample the species + populations during the simulation. + """ + + items_diff = numpy.diff(time_span) + items = map(lambda x: round(x, 10), items_diff) + isuniform = (len(set(items)) == 1) + + if isuniform: + self.timestep_size = items_diff[0] + self.num_timesteps = len(items_diff) + else: + raise InvalidModelError("Only uniform timespans are supported") + + + + def add_subdomain(self, subdomain, domain_id): + """ Add a subdomain definition to the model. By default, all regions are set to + subdomain 1. + Args: + subdomain: an instance of a 'spatialpy.SubDomain' subclass. The 'inside()' method + of this object will be used to assign domain_id to points. + domain_id: the identifier for this subdomain (usually an int). + Return values: + Number of mesh points that were tagged with this domain_id + """ + if self.mesh is None: + raise Exception("SpatialPy models must have a mesh before subdomains can be attached"); + if domain_id not in self.listOfSubdomainIDs: + # index is the "particle type", value is the "subdomain ID" + self.listOfSubdomainIDs.append(domain_id) + if self.sd is None: + self.sd = numpy.ones(self.mesh.get_num_voxels()) + # apply the subdomain to all points, set sd for any points that match + count =0 + for v_ndx in range(self.mesh.get_num_voxels()): + if subdomain.inside( self.mesh.coordinates()[v_ndx,:], False): + self.sd[v_ndx] = domain_id + count +=1 + return count + + def restrict(self, species, listOfSubDomains): + """ Set the diffusion coefficient to zero for 'species' in all subdomains not in + 'listOfSubDomains'. This effectively restricts the movement of 'species' to + the subdomains specified in 'listOfSubDomains'. + Args: + species: an instance of a 'spatialpy.Species'. + listOfSubdomains: a list, each object in the list should be a 'domain_id' + """ + x = Species() + #if not isinstance(species, Species): + if str(type(x)) != str(type(species)): + raise ModelError("First argument to restrict() must be a Species object") + if not isinstance(listOfSubDomains,list): + raise ModelError("First argument to restrict() must be a list of subdomain_id") + self.listOfDiffusionRestrictions[species] = listOfSubDomains + + def update_namespace(self): """ Create a dict with flattened parameter and species objects. """ @@ -53,19 +162,30 @@ def get_all_species(self): return self.listOfSpecies def add_species(self, obj): - """ - Add a Species to listOfSpecies. Accepts input either as a single Species object, or - as a list of Species objects. """ - if isinstance(obj, Species): - if obj.name in self.listOfSpecies: - raise ModelError("Can't add species. A species with that name already exists.") - self.listOfSpecies[obj.name] = obj; - else: # obj is a list of species + Adds a species, or list of species to the model. + + Attributes + ---------- + obj : Species, or list of Species + The species or list of species to be added to the model object. + """ + + #if isinstance(obj, Species): + x = Species() + if str(type(x)) == str(type(obj)): + problem = self.problem_with_name(obj.name) + if problem is not None: + raise problem + self.species_map[obj] = len(self.listOfSpecies) + self.listOfSpecies[obj.name] = obj + elif isinstance(obj, list): for S in obj: - if S.name in self.listOfSpecies: - raise ModelError("Can't add species. A species with that name already exists.") - self.listOfSpecies[S.name] = S; + self.add_species(S) + else: + raise ModelError("Unexpected parameter for add_species. Parameter must be Species or list of Species.") + return obj + def delete_species(self, obj): """ Remove a Species from model.listOfSpecies. """ @@ -83,23 +203,43 @@ def get_parameter(self,pname): def get_all_parameters(self): return self.listOfParameters - + + def problem_with_name(self, name): + if name in Model.reserved_names: + return ModelError('Name "{}" is unavailable. It is reserved for internal GillesPy use. Reserved Names: ({}).'.format(name, Model.reserved_names)) + if name in self.listOfSpecies: + return ModelError('Name "{}" is unavailable. A species with that name exists.'.format(name)) + if name in self.listOfParameters: + return ModelError('Name "{}" is unavailable. A parameter with that name exists.'.format(name)) + if name.isdigit(): + return ModelError('Name "{}" is unavailable. Names must not be numeric strings.'.format(name)) + for special_character in Model.special_characters: + if special_character in name: + return ModelError('Name "{}" is unavailable. Names must not contain special characters: {}.'.format(name, Model.special_characters)) + + + def add_parameter(self,params): """ Add Parameter(s) to model.listOfParameters. Input can be either a single Parameter object or a list of Parameter objects. """ - # TODO, make sure that you don't overwrite an existing parameter?? - if isinstance(params,list): + if isinstance(params,list): for p in params: self.add_parameter(p) else: - if isinstance(params, Parameter): + #if isinstance(params, type(Parameter())): + x = Parameter() + if str(type(params)) == str(type(x)): problem = self.problem_with_name(params.name) if problem is not None: raise problem + # make sure that you don't overwrite an existing parameter?? + if params.name in self.listOfParameters.keys(): + raise ParameterError("Parameter '{0}' has already been added to the model.".format(params.name)) self.listOfParameters[params.name] = params else: - raise ParameterError("Could not resolve Parameter expression {} to a scalar value.".format(params)) + #raise ParameterError("Could not resolve Parameter expression {} to a scalar value.".format(params)) + raise ParameterError("Parameter '{0}' needs to be of type '{2}', it is of type '{1}'".format(params.name,str(type(params)),str(type(x)))) return params def delete_parameter(self, obj): @@ -168,6 +308,106 @@ def __eq__(self, other): self.listOfReactions == other.listOfReactions and \ self.name == other.name) + def create_stoichiometric_matrix(self): + """ Generate a stoichiometric matrix in sparse CSC format. """ + + if self.get_num_reactions() > 0: + ND = numpy.zeros((self.get_num_species(), self.get_num_reactions())) + for i, r in enumerate(self.listOfReactions): + R = self.listOfReactions[r] + reactants = R.reactants + products = R.products + + for s in reactants: + ND[self.species_map[s], i] -= reactants[s] + for s in products: + ND[self.species_map[s], i] += products[s] + + N = scipy.sparse.csc_matrix(ND) + else: + N = numpy.zeros((self.get_num_species(), self.get_num_reactions())) + + return N + + + def create_dependency_graph(self): + """ Construct the sparse dependency graph. """ + # We cannot safely generate a dependency graph (without attempting to analyze the + # propensity string itself) if the model contains custom propensities. + mass_action_model = True + for name, reaction in self.listOfReactions.items(): + if not reaction.massaction: + GF = numpy.ones((self.get_num_reactions(), + self.get_num_reactions() + self.get_num_species())) + mass_action_model = False + + if mass_action_model: + GF = numpy.zeros((self.get_num_reactions(), + self.get_num_reactions() + self.get_num_species())) + species_map = self.species_map + + involved_species = [] + reactants = [] + for name, reaction in self.listOfReactions.items(): + temp = [] + temp2 = [] + for s in reaction.reactants: + temp.append(species_map[s]) + temp2.append(species_map[s]) + for s in reaction.products: + temp.append(species_map[s]) + involved_species.append(temp) + reactants.append(temp2) + + species_to_reactions = [] + for sname,species in self.listOfSpecies.items(): + temp = [] + for j, x in enumerate(reactants): + if species_map[species] in x: + temp.append(j) + species_to_reactions.append(temp) + + reaction_to_reaction = [] + for name, reaction in self.listOfReactions.items(): + temp = [] + for s in reaction.reactants: + if species_to_reactions[species_map[s]] not in temp: + temp = temp+species_to_reactions[species_map[s]] + + for s in reaction.products: + if species_to_reactions[species_map[s]] not in temp: + temp = temp+ species_to_reactions[species_map[s]] + + temp = list(set(temp)) + reaction_to_reaction.append(temp) + + # Populate G + for j, spec in enumerate(species_to_reactions): + for s in spec: + GF[s, j] = 1 + + for i,reac in enumerate(reaction_to_reaction): + for r in reac: + GF[r, self.get_num_species()+i] = 1 + + + try: + G = scipy.sparse.csc_matrix(GF) + except Exception as e: + G = GF + + return G + + def apply_initial_conditions(self): + """ Initalize the u0 matrix (zeros) and then apply each initial condition""" + # initalize + ns = self.get_num_species() + nv = self.mesh.get_num_voxels() + self.u0 = numpy.zeros((ns, nv)) + # apply initial condition functions + + + class Species(): """ Model of a biochemical species. """ @@ -209,7 +449,8 @@ def __init__(self,name="",expression=None,value=None): # might not be evaluable in the namespace of this parameter, but defined # in the context of a model or reaction. if self.expression == None: - raise TypeError + #raise TypeError + self.value = 0 if self.value is None: self.evaluate() @@ -330,23 +571,25 @@ def create_mass_action(self): # There is no theoretical justification for higher order propensities. # Users can still create such propensities if they really want to, # but should then use a custom propensity. - total_stoch=0 + total_stoch = 0 for r in self.reactants: - total_stoch+=self.reactants[r] - if total_stoch>2: - raise ReactionError("Reaction: " +self.name + "A mass-action reaction cannot involve more than two species.") - + total_stoch += self.reactants[r] + if total_stoch > 2: + raise ReactionError("Reaction: A mass-action reaction cannot involve more than two of one species or one " + "of two species.") # Case EmptySet -> Y - propensity_function = self.marate.name; - + + propensity_function = self.marate.name + # There are only three ways to get 'total_stoch==2': for r in self.reactants: # Case 1: 2X -> Y if self.reactants[r] == 2: - propensity_function = "0.5*" +propensity_function+ "*"+r+"*("+r+"-1)/vol" + propensity_function = ("0.5*" + propensity_function + + "*" + str(r) + "*(" + str(r) + "-1)/vol") else: - # Case 3: X1, X2 -> Y; - propensity_function += "*"+r + # Case 3: X1, X2 -> Y; + propensity_function += "*" + str(r) # Set the volume dependency based on order. order = len(self.reactants) @@ -355,7 +598,6 @@ def create_mass_action(self): elif order == 0: propensity_function += "*vol" - self.propensity_function = propensity_function def set_type(self,type): @@ -376,6 +618,10 @@ def annotate(self,annotation): # Module exceptions +class SimulationError(Exception): + pass +class SimulationTimeout(SimulationError): + pass class ModelError(Exception): pass diff --git a/spatialpy/Result.py b/spatialpy/Result.py index 6290a501..5200cf60 100644 --- a/spatialpy/Result.py +++ b/spatialpy/Result.py @@ -13,14 +13,14 @@ import scipy.io import scipy.sparse -from model import * +from spatialpy.Model import * import inspect - import pickle import json -import functools + +import vtk # module-level variable to for javascript export in IPython/Jupyter notebooks __pyurdme_javascript_libraries_loaded = False @@ -39,193 +39,102 @@ def load_pyurdme_javascript_libraries(): IPython.display.display(IPython.display.HTML('')) -def deprecated(func): - '''This is a decorator which can be used to mark functions - as deprecated. It will result in a warning being emitted - when the function is used.''' - - @functools.wraps(func) - def new_func(*args, **kwargs): - warnings.warn_explicit( - "Call to deprecated function {}.".format(func.__name__), - category=DeprecationWarning, - filename=func.func_code.co_filename, - lineno=func.func_code.co_firstlineno + 1 - ) - return func(*args, **kwargs) - return new_func - - -# Set log level to report only errors or worse -#dolfin.set_log_level(dolfin.ERROR) -import logging -logging.getLogger('FFC').setLevel(logging.ERROR) -logging.getLogger('UFL').setLevel(logging.ERROR) -class URDMEResult(dict): +class Result(dict): """ Result object for a URDME simulation, extends the dict object. """ - def __init__(self, model=None, filename=None, loaddata=False): + def __init__(self, model=None, result_dir=None, loaddata=False): self.model = model - self.sol = None self.U = None self.tspan = None self.data_is_loaded = False - self.sol_initialized = False - self.filename = filename - if filename is not None and loaddata: - self.read_solution() self.stdout = None self.stderr = None - - def __ne__(self, other): - return not self.__eq__(other) - - def __eq__(self, other, verbose=False): - try: - tspan = self.get_timespan() - if numpy.any(tspan != other.get_timespan()): - if verbose: print "tspan does not match" - return False - for t in tspan: - for sname in self.model.listOfSpecies: - if numpy.any(self.get_species(sname, timepoints=t) != other.get_species(sname, timepoints=t)): - if verbose: print "Species {0} does not match at t={1}".format(sname, t) - return False - return True - except ValueError as e: - if verbose: print "value error: {0}".format(e) - return False - - - def get_endtime_model(self): - """ Return a URDME model object with the initial conditions set to the final time point of the - result object. - """ - if self.model is None: - raise Exception("can not continue a result with no model") - # create a soft copy - model_str = pickle.dumps(self.model) - model2 = pickle.loads(model_str) - # set the initial conditions - model2.u0 = numpy.zeros(self.model.u0.shape) - for s, sname in enumerate(self.model.listOfSpecies): - model2.u0[s,:] = self.get_species(sname, timepoints=-1) - return model2 - - - def __getstate__(self): - """ Used by pickle to get state when pickling. We need to read the contents of the - output file since we can't pickle file objects. """ - - try: - with open(self.filename,mode='rb') as fh: - filecontents = fh.read() - except Exception as e: - raise Exception(("Error pickling model. Failed to read result file:",str(e))) - - state = self.__dict__ - state["filecontents"] = filecontents - - state["v2d"] = self.get_v2d() - state["d2v"] = self.get_d2v() - - return state - - - def __setstate__(self, state): - """ Used by pickle to set state when unpickling. """ - - # If the object contains filecontents, write those to a new tmp file. - try: - filecontents = state.pop("filecontents",None) - fd = tempfile.NamedTemporaryFile(delete=False, dir=os.environ.get('PYURDME_TMPDIR')) - with open(fd.name, mode='wb') as fh: - fh.write(filecontents) - state["filename"] = fd.name - except Exception as e: - print "Error unpickling model, could not recreate the solution file." - raise e - - for k,v in state.items(): - self.__dict__[k] = v - - def get_v2d(self): - """ Return the vertex-to-dof mapping. """ - if not hasattr(self, 'v2d'): - fs = self.model.mesh.get_function_space() - self.v2d = dolfin.vertex_to_dof_map(fs) - - return self.v2d - - def get_d2v(self): - """ Return the dof-to-vertex mapping. """ - if not hasattr(self, 'd2v'): - fs = self.model.mesh.get_function_space() - self.d2v = dolfin.dof_to_vertex_map(fs) - - return self.d2v - - def _reorder_dof_to_voxel(self, M, num_species=None): - """ Reorder the colums of M from dof ordering to vertex ordering. """ - - v2d = self.get_v2d() - if len(M.shape) == 1: - num_timepoints = 1 - else: - num_timepoints = M.shape[0] - num_vox = self.model.mesh.get_num_voxels() - if num_species is None: - num_species = self.model.get_num_species() - num_dofs = num_vox*num_species - C = numpy.zeros((num_timepoints, num_dofs), dtype=numpy.float64) - - for vox_ndx in range(num_vox): - for cndx in range(num_species): - try: - if len(M.shape) == 1: - C[:, vox_ndx*num_species+cndx] = M[v2d[vox_ndx]*num_species+cndx] - else: - C[:, vox_ndx*num_species+cndx] = M[:, v2d[vox_ndx]*num_species+cndx] - except IndexError as e: - import traceback - #traceback.print_stack() - print traceback.format_exc() - print "C.shape: ", C.shape - print "M.shape: ", M.shape - print "num_timepoints: ", num_timepoints - print "vox_ndx={0},num_species={1},cndx={2}".format(vox_ndx,num_species,cndx) - print "v2d[vox_ndx]={0}".format(v2d[vox_ndx]) - print "vox_ndx*num_species+cndx={0}".format(vox_ndx*num_species+cndx) - print "v2d[vox_ndx]*num_species+cndx={0}".format(v2d[vox_ndx]*num_species+cndx) - raise e - return C - - def read_solution(self): - """ Read the tspan and U matrix into memory. """ - - resultfile = h5py.File(self.filename, 'r') - U = resultfile['U'] - U = numpy.array(U) - - tspan = resultfile['tspan'] - tspan = numpy.array(tspan).flatten() - resultfile.close() - - # Reorder the dof from dof ordering to voxel ordering - U = self._reorder_dof_to_voxel(U) - - self.U = U - self.tspan = tspan - self.data_is_loaded = True + self.result_dir = result_dir + + + +# def get_endtime_model(self): +# """ Return a URDME model object with the initial conditions set to the final time point of the +# result object. +# """ +# if self.model is None: +# raise Exception("can not continue a result with no model") +# # create a soft copy +# model_str = pickle.dumps(self.model) +# model2 = pickle.loads(model_str) +# # set the initial conditions +# model2.u0 = numpy.zeros(self.model.u0.shape) +# for s, sname in enumerate(self.model.listOfSpecies): +# model2.u0[s,:] = self.get_species(sname, timepoints=-1) +# return model2 + + +# def __getstate__(self): +# """ Used by pickle to get state when pickling. We need to read the contents of the +# output file since we can't pickle file objects. """ +# +# try: +# with open(self.filename,mode='rb') as fh: +# filecontents = fh.read() +# except Exception as e: +# raise Exception(("Error pickling model. Failed to read result file:",str(e))) +# +# state = self.__dict__ +# state["filecontents"] = filecontents +# +# state["v2d"] = self.get_v2d() +# state["d2v"] = self.get_d2v() +# +# return state +# +# +# def __setstate__(self, state): +# """ Used by pickle to set state when unpickling. """ +# +# # If the object contains filecontents, write those to a new tmp file. +# try: +# filecontents = state.pop("filecontents",None) +# fd = tempfile.NamedTemporaryFile(delete=False, dir=os.environ.get('PYURDME_TMPDIR')) +# with open(fd.name, mode='wb') as fh: +# fh.write(filecontents) +# state["filename"] = fd.name +# except Exception as e: +# print "Error unpickling model, could not recreate the solution file." +# raise e +# +# for k,v in state.items(): +# self.__dict__[k] = v + + + def read_step(self, step_num): + """ Read the data for simulation step 'step_num'. """ + reader = vtk.vtkGenericDataObjectReader() + filename = os.path.join(self.result_dir, "output{0}.vtk".format(step_num)) + #print("read_step({0}) opening '{1}'".format(step_num, filename)) + reader.SetFileName(filename) + reader.Update() + data = reader.GetOutput() + points = numpy.array( data.GetPoints().GetData() ) + pd = data.GetPointData() + vtk_data = {} + for i in range(pd.GetNumberOfArrays()): + if pd.GetArrayName(i) is None: break + #print(i,pd.GetArrayName(i)) + vtk_data[ pd.GetArrayName(i)] = numpy.array(pd.GetArray(i)) + return (points, vtk_data) + + +# def read_solution(self): +# """ Read the tspan and U matrix into memory. """ +# raise Exception("todo") +## self.U = U +## self.tspan = tspan +## self.data_is_loaded = True def get_timespan(self): - if self.tspan is not None: - resultfile = h5py.File(self.filename, 'r') - tspan = resultfile['tspan'] - tspan = numpy.array(tspan).flatten() - resultfile.close() - self.tspan = tspan + self.tspan = numpy.linspace(0,self.model.num_timesteps, + num=self.model.num_timesteps+1) * self.model.timestep_size return self.tspan def get_species(self, species, timepoints="all", concentration=False): @@ -238,127 +147,74 @@ def get_species(self, species, timepoints="all", concentration=False): if set to True, the concentration (=copy_number/volume) is returned. """ - if isinstance(species, Species): - spec_name = species.name - else: + if isinstance(species,str): spec_name = species - - species_map = self.model.get_species_map() - num_species = self.model.get_num_species() - - spec_indx = species_map[spec_name] - - resultfile = h5py.File(self.filename, 'r') - #Ncells = self.model.mesh.num_vertices() # Need dof ordering numVoxels - U = resultfile['U'] - Ncells = U.shape[1]/num_species - - if timepoints == "all": - Uslice= U[:,(spec_indx*Ncells):(spec_indx*Ncells+Ncells)] else: - Uslice = U[timepoints,(spec_indx*Ncells):(spec_indx*Ncells+Ncells)] - - if concentration: - Uslice = self._copynumber_to_concentration(Uslice) - - # Reorder the dof from dof ordering to voxel ordering - Uslice = self._reorder_dof_to_voxel(Uslice, num_species=1) - - # Make sure we return 1D slices as flat arrays - dims = numpy.shape(Uslice) - if dims[0] == 1: - Uslice = Uslice.flatten() - - resultfile.close() - return Uslice - + spec_name = species.name - def __setattr__(self, k, v): - if k in self.keys(): - self[k] = v - elif not hasattr(self, k): - self[k] = v - else: - raise AttributeError, "Cannot set '%s', cls attribute already exists" % ( k, ) - - def __setupitems__(self, k): - if k == 'sol' and not self.sol_initialized: - self._initialize_sol() - elif (k == 'U' or k == 'tspan') and not self.data_is_loaded: - if self.filename is None: - raise AttributeError("This result object has no data file.") - self.read_solution() - - def __getitem__(self, k): - self.__setupitems__(k) - if k in self.keys(): - return self.get(k) - raise KeyError("Object has no attribute {0}".format(k)) - - def __getattr__(self, k): - self.__setupitems__(k) - if k in self.keys(): - return self.get(k) - raise AttributeError("Object has no attribute {0}".format(k)) + species_map = self.model.species_map + num_species = self.model.get_num_species() + num_voxel = self.model.mesh.get_num_voxels() + + t_index_arr = numpy.linspace(0,self.model.num_timesteps, + num=self.model.num_timesteps+1, dtype=int) + + if timepoints != "all": + t_index_arr = t_index_arr[timepoints] + + ret = numpy.zeros( (len(t_index_arr), num_voxel)) + for ndx, t_ndx in enumerate(t_index_arr): + (_, step) = self.read_step(t_ndx) + if concentration: + # concentration = (copy_number/volume) + # volume = (mass/density) + ret[ndx,:] = step['C['+spec_name+']'] / (step['mass'] / step['rho'] ) + else: + ret[ndx,:] = step['C['+spec_name+']'] + + return ret + + +# def __setattr__(self, k, v): +# if k in self.keys(): +# self[k] = v +# elif not hasattr(self, k): +# self[k] = v +# else: +# raise AttributeError("Cannot set '%s', cls attribute already exists" % ( k, )) +# +# def __setupitems__(self, k): +# if (k == 'U' or k == 'tspan') and not self.data_is_loaded: +# if self.result_dir is None: +# raise AttributeError("This result object has no data file.") +# self.read_solution() +# +# def __getitem__(self, k): +# self.__setupitems__(k) +# if k in self.keys(): +# return self.get(k) +# raise KeyError("Object has no attribute {0}".format(k)) +# +# def __getattr__(self, k): +# self.__setupitems__(k) +# if k in self.keys(): +# return self.get(k) +# raise AttributeError("Object has no attribute {0}".format(k)) def __del__(self): """ Deconstructor. """ - # if not self.data_is_loaded: + # if not self.data_is_loaded: try: - # Clean up data file - os.remove(self.filename) - except OSError as e: - #print "URDMEResult.__del__: Could not delete result file'{0}': {1}".format(self.filename, e) + if self.result_dir is not None: + try: + shutil.rmtree(self.result_dir) + except OSError as e: + print("Could not delete '{0}'".format(self.result_dir)) + except Exception as e: pass - @deprecated - def _initialize_sol(self): - """ Initialize the sol variable. This is a helper function for export_to_vtk(). """ - - # Create Dolfin Functions for all the species - sol = {} - - if self.model is None: - raise URDMEError("URDMEResult.model must be set before the sol attribute can be accessed.") - numvox = self.model.mesh.num_vertices() - fs = self.model.mesh.get_function_space() - vertex_to_dof_map = self.get_v2d() - dof_to_vertex_map = self.get_d2v() - - # The result is loaded in dolfin Functions, one for each species and time point - for i, spec in enumerate(self.model.listOfSpecies): - - species = self.model.listOfSpecies[spec] - spec_name = species.name - - spec_sol = {} - for j, time in enumerate(self.tspan): - - func = dolfin.Function(fs) - func_vector = func.vector() - - S = self.get_species(spec, [j]) - - for voxel in range(numvox): - ix = vertex_to_dof_map[voxel] - try: - func_vector[ix] = float(S[voxel]/self.model.dofvol[ix]) - except IndexError as e: - print "func_vector.size(): ", func_vector.size() - print "dolfvox: ",dolfvox - print "S.shape: ",S.shape - print "voxel: ",voxel - print "vertex_to_dof_map[voxel]", vertex_to_dof_map[voxel] - print "self.model.dofvol.shape: ", self.model.dofvol.shape - raise e - spec_sol[time] = func - - sol[spec] = spec_sol - self.sol = sol - self.sol_initialized = True - return sol def export_to_csv(self, folder_name): """ Dump trajectory to a set CSV files, the first specifies the mesh (mesh.csv) and the rest specify trajectory data for each species (species_S.csv for species named 'S'). @@ -394,18 +250,19 @@ def export_to_vtk(self, species, folder_name): """ Dump the trajectory to a collection of vtk files in the folder folder_name (created if non-existant). The exported data is #molecules/volume, where the volume unit is implicit from the mesh dimension. """ - #self._initialize_sol() - subprocess.call(["mkdir", "-p", folder_name]) - fd = dolfin.File(os.path.join(folder_name, "trajectory.xdmf").encode('ascii', 'ignore')) - func = dolfin.Function(self.model.mesh.get_function_space()) - func_vector = func.vector() - vertex_to_dof_map = self.get_v2d() - - for i, time in enumerate(self.tspan): - solvector = self.get_species(species,i,concentration=True) - for j, val in enumerate(solvector): - func_vector[vertex_to_dof_map[j]] = val - fd << func +# #self._initialize_sol() +# subprocess.call(["mkdir", "-p", folder_name]) +# fd = dolfin.File(os.path.join(folder_name, "trajectory.xdmf").encode('ascii', 'ignore')) +# func = dolfin.Function(self.model.mesh.get_function_space()) +# func_vector = func.vector() +# vertex_to_dof_map = self.get_v2d() +# +# for i, time in enumerate(self.tspan): +# solvector = self.get_species(species,i,concentration=True) +# for j, val in enumerate(solvector): +# func_vector[vertex_to_dof_map[j]] = val +# fd << func + raise Exception("todo") def export_to_xyx(self, filename, species=None, file_format="VMD"): """ Dump the solution attached to a model as a xyz file. This format can be @@ -549,16 +406,36 @@ def _copynumber_to_concentration(self,copy_number_data): for t in range(dims[0]): timeslice = scaled_sol[t,:] for i,cn in enumerate(timeslice): - scaled_sol[t, i] = float(cn)/(6.022e23*self.model.dofvol[i]) + scaled_sol[t, i] = float(cn)/(6.022e23*self.model.vol[i]) return scaled_sol + @classmethod + def _compute_colors(cls, x): + import matplotlib.cm + + # Get RGB color map proportional to the concentration. + cm = matplotlib.cm.ScalarMappable() + crgba= cm.to_rgba(x, bytes = True) + # Convert RGB to HEX + colors= [] + for row in crgba: + # get R,G,B of RGBA + colors.append(_rgb_to_hex(tuple(list(row[0:3])))) + + # Convert Hex to Decimal + for i,c in enumerate(colors): + colors[i] = int(c,0) + + + return colors + def _compute_solution_colors(self,species, time_index): """ Create a color list for species at time. """ timeslice = self.get_species(species,time_index, concentration = True) - colors = _compute_colors(timeslice) + colors = self._compute_colors(timeslice) return colors def display_particles(self,species, time_index, width=500): @@ -575,18 +452,20 @@ def display_particles(self,species, time_index, width=500): def display(self, species, time_index, opacity=1.0, wireframe=True, width=500, camera=[0,0,1]): """ Plot the trajectory as a PDE style plot. """ - load_pyurdme_javascript_libraries() - data = self.get_species(species,time_index,concentration=True) - fun = DolfinFunctionWrapper(self.model.mesh.get_function_space()) - vec = fun.vector() - (nd,) = numpy.shape(data) - if nd == len(vec): - for i in range(nd): - vec[i]=data[i] - else: - #v2d= self.get_v2d() - for i in range(len(vec)): - vec[i] = data[i] # shouldn't we use v2d or d2v here? But it doesn't work if I do. - fun.display(opacity=opacity, wireframe=wireframe, width=width, camera=camera) +# load_pyurdme_javascript_libraries() +# data = self.get_species(species,time_index,concentration=True) +# fun = DolfinFunctionWrapper(self.model.mesh.get_function_space()) +# vec = fun.vector() +# (nd,) = numpy.shape(data) +# if nd == len(vec): +# for i in range(nd): +# vec[i]=data[i] +# else: +# #v2d= self.get_v2d() +# for i in range(len(vec)): +# vec[i] = data[i] # shouldn't we use v2d or d2v here? But it doesn't work if I do. +# fun.display(opacity=opacity, wireframe=wireframe, width=width, camera=camera) + raise Exception("TODO") + diff --git a/spatialpy/Solver.py b/spatialpy/Solver.py index 10b8ba58..22590395 100644 --- a/spatialpy/Solver.py +++ b/spatialpy/Solver.py @@ -1,287 +1,110 @@ import os -import re import shutil +import signal import subprocess import sys import tempfile -import types -import warnings -import uuid - - -import numpy -import scipy.io -import scipy.sparse - -from model import * - -import inspect - -try: - # This is only needed if we are running in an Ipython Notebook - import IPython.display -except: - pass - -try: - import h5py -except: - raise Exception("SpatialPy requires h5py.") - -import pickle -import json -import functools - -# module-level variable to for javascript export in IPython/Jupyter notebooks -__spatialpy_javascript_libraries_loaded = False -def load_pyurdme_javascript_libraries(): - global __spatialpy_javascript_libraries_loaded - if not __spatialpy_javascript_libraries_loaded: - __spatialpy_javascript_libraries_loaded = True - import os.path - import IPython.display - with open(os.path.join(os.path.dirname(__file__),'data/three.js_templates/js/three.js')) as fd: - bufa = fd.read() - with open(os.path.join(os.path.dirname(__file__),'data/three.js_templates/js/render.js')) as fd: - bufb = fd.read() - with open(os.path.join(os.path.dirname(__file__),'data/three.js_templates/js/OrbitControls.js')) as fd: - bufc = fd.read() - IPython.display.display(IPython.display.HTML('')) - - -def deprecated(func): - '''This is a decorator which can be used to mark functions - as deprecated. It will result in a warning being emitted - when the function is used.''' - - @functools.wraps(func) - def new_func(*args, **kwargs): - warnings.warn_explicit( - "Call to deprecated function {}.".format(func.__name__), - category=DeprecationWarning, - filename=func.func_code.co_filename, - lineno=func.func_code.co_firstlineno + 1 - ) - return func(*args, **kwargs) - return new_func - - -class SpatialPySolver: +import time + + +from spatialpy.Model import * +from spatialpy.Result import * + + + +class Solver: """ Abstract class for spatialpy solvers. """ - def __init__(self, model, solver_path=None, report_level=0, model_file=None, sopts=None): + def __init__(self, model, report_level=0): """ Constructor. """ - if not isinstance(model, SpatialPyModel): - raise SpatialPyError("URDMEsolver constructors must take a SpatialPyModel as an argument.") - if not issubclass(self.__class__, URDMESolver): - raise SpatialPyError("Solver classes must be a subclass of SpatialPySolver.") + #TODO: fix class checking + #if not isinstance(model, Model): + # raise SimulationError("Solver constructors must take a Model as an argument.") + #if not issubclass(self.__class__, Solver): + # raise SimulationError("Solver classes must be a subclass of SpatialPy.Solver.") if not hasattr(self, 'NAME'): - raise SpatialPyError("Solver classes must implement a NAME attribute.") + raise SimulationError("Solver classes must implement a NAME attribute.") self.model = model self.is_compiled = False self.report_level = report_level - self.model_file = model_file - self.infile_name = None - self.delete_infile = False self.model_name = self.model.name - self.solver_base_dir = None - if sopts is None: - self.sopts = [0,0,0] - else: - self.sopts = sopts + self.build_dir = None + self.executable_name = 'ssa_sdpd' + self.h = None # basis function width - # For the remote execution - self.temp_urdme_root = None - - self.SpatialPy_ROOT = os.path.dirname(os.path.abspath(__file__))+"/spatialpy" - - #print "solver_path={0}".format(solver_path) - if solver_path is None or solver_path == "": - self.SpatialPy_BUILD = self.SpatialPy_ROOT + '/build/' - else: - self.SpatialPy_BUILD = solver_path + '/build/' - os.environ['SOLVER_ROOT'] = solver_path - - def __getstate__(self): - """ Save the state of the solver, saves all instance variables - and reads all the files necessary to compile the solver off - of the file system and stores it in a separate state variable. - If the solver model files is specified, it saves that too. - This is used by Pickle. - """ - ret = {} - # Save the instance variables - ret['vars'] = self.__dict__.copy() - # The model object is not picklabe due to the Swig-objects from Dolfin - #ret['vars']['model'] = None - ret['vars']['is_compiled'] = False - # Create temp root - tmproot = tempfile.mkdtemp(dir=os.environ.get('PYURDME_TMPDIR')) - # Get the propensity file - model_file = tmproot+'/'+self.model_name + '_pyurdme_generated_model'+ '.c' - ret['model_file'] = os.path.basename(model_file) - if self.model_file == None: - self.create_propensity_file(file_name=model_file) - else: - subprocess.call('cp '+self.model_file+' '+model_file, shell=True) - # Get the solver source files - os.mkdir(tmproot+'/include') - os.mkdir(tmproot+'/src') - os.mkdir(tmproot+'/src/'+self.NAME) - #TODO: what if solverdir is not the same as URDME_ROOT ? - subprocess.call('cp '+self.URDME_ROOT+'/src/*.c '+tmproot+'/src/', shell=True) - subprocess.call('cp '+self.URDME_ROOT+'/src/'+self.NAME+'/*.* '+tmproot+'/src/'+self.NAME+'/', shell=True) - subprocess.call('cp '+self.URDME_ROOT+'/include/*.h '+tmproot+'/include/', shell=True) - #TODO: get the include files from solvers not in the default path (none currently implement this). - # Get the Makefile - os.mkdir(tmproot+'/build') - subprocess.call('cp '+self.URDME_BUILD+'Makefile.'+self.NAME+' '+tmproot+'/build/Makefile.'+self.NAME, shell=True) - # Get the input file - input_file = tmproot+'/model_input.mat' - ret['input_file'] = os.path.basename(input_file) - self.serialize(filename=input_file, report_level=self.report_level) - ## - origwd = os.getcwd() - os.chdir(tmproot) - tarname = tmproot+'/'+self.NAME+'.tar.gz' - subprocess.call('tar -czf '+tarname+' src include build '+os.path.basename(input_file)+' '+os.path.basename(model_file), shell=True) - with open(tarname, 'r') as f: - ret['SolverFiles'] = f.read() - os.chdir(origwd) - shutil.rmtree(tmproot) - # return the state - return ret - - def __setstate__(self, state): - """ Set all instance variables for the object, and create a unique temporary - directory to store all the solver files. URDME_BUILD is set to this dir, - and is_compiled is always set to false. This is used by Pickle. - """ - # 0. restore the instance variables - for key, val in state['vars'].iteritems(): - self.__dict__[key] = val - # 1. create temporary directory = SPATIALPY_ROOT - self.temp_spatialpy_root = tempfile.mkdtemp(dir=os.environ.get('SPATIALPY_TMPDIR')) - self.SPATIALPY_ROOT = self.temp_spatialpy_root - self.SPATIALPY_BUILD = self.temp_spatialpy_root+'/build/' - origwd = os.getcwd() - os.chdir(self.temp_spatialpy_root) - tarname = self.temp_spatialpy_root+'/'+self.NAME+'.tar.gz' - with open(tarname, 'wd') as f: - f.write(state['SolverFiles']) - subprocess.call('tar -zxf '+tarname, shell=True) - os.chdir(origwd) - # Model File - self.model_file = self.temp_spatialpy_root+'/'+state['model_file'] - # Input File - self.infile_name = self.temp_spatialpy_root+'/'+state['input_file'] + self.SpatialPy_ROOT = os.path.dirname(os.path.abspath(__file__))+"/ssa_sdpd-c-simulation-engine" def __del__(self): """ Deconstructor. Removes the compiled solver.""" - if self.delete_infile: - try: - os.remove(self.infile_name) - except OSError as e: - print "Could not delete '{0}'".format(self.infile_name) - if self.solver_base_dir is not None: - try: - shutil.rmtree(self.solver_base_dir) - except OSError as e: - print "Could not delete '{0}'".format(self.solver_base_dir) - if self.temp_spatialpy_root is not None: - try: - shutil.rmtree(self.temp_spatialpy_root) - except OSError as e: - print "Could not delete '{0}'".format(self.temp_urdme_root) - - - def serialize(self, filename=None, report_level=0, sopts=None): - """ Write the datastructures needed by the the core URDME solvers to a .mat input file. """ - spatialpy_solver_data = self.model.get_solver_datastructure() - spatialpy_solver_data['report'] = report_level - if sopts is None: - spatialpy_solver_data['sopts'] = self.sopts - else: - spatialpy_solver_data['sopts'] = sopts + try: + if self.build_dir is not None: + try: + shutil.rmtree(self.build_dir) + except OSError as e: + print("Could not delete '{0}'".format(self.solver_base_dir)) + except Exception as e: + pass - self.model.validate(spatialpy_solver_data) - scipy.io.savemat(filename, spatialpy_solver_data, oned_as='column') def compile(self): """ Compile the model.""" # Create a unique directory each time call to compile. - self.solver_base_dir = tempfile.mkdtemp(dir=os.environ.get('SPATIALPY_TMPDIR')) - self.solver_dir = self.solver_base_dir + '/.spatialpy/' - #print "URDMESolver.compile() self.solver_dir={0}".format(self.solver_dir) + self.build_dir = tempfile.mkdtemp(dir=os.environ.get('SPATIALPY_TMPDIR')) if self.report_level >= 1: - print "Compiling Solver" - - if os.path.isdir(self.solver_dir): - try: - shutil.rmtree(self.solver_dir) - except OSError as e: - pass - try: - os.mkdir(self.solver_dir) - except Exception as e: - pass + print("Compiling Solver. Build dir: {0}".format(self.build_dir)) # Write the propensity file - self.propfilename = self.model_name + '_pyurdme_generated_model' - if self.model_file == None: - prop_file_name = self.solver_dir + self.propfilename + '.c' - if self.report_level > 1: - print "Creating propensity file {0}".format(prop_file_name) - self.create_propensity_file(file_name=prop_file_name) - else: - cmd = " ".join(['cp', self.model_file, self.solver_dir + self.propfilename + '.c']) - if self.report_level > 1: - print cmd - subprocess.call(cmd, shell=True) + self.propfilename = self.model_name + '_generated_model' + self.prop_file_name = self.build_dir + '/' + self.propfilename + '.c' + if self.report_level > 1: + print("Creating propensity file {0}".format(self.prop_file_name)) + self.create_propensity_file(file_name=self.prop_file_name) # Build the solver - makefile = 'Makefile.' + self.NAME - cmd = " ".join([ 'cd', self.solver_base_dir , ';', 'make', '-f', self.URDME_BUILD + makefile, 'URDME_ROOT=' + self.URDME_ROOT, 'URDME_MODEL=' + self.propfilename]) + makefile = self.SpatialPy_ROOT+'/build/Makefile.'+self.NAME + cmd = " ".join([ 'cd', self.build_dir , ';', 'make', '-f', makefile, 'ROOT=' + self.SpatialPy_ROOT, 'MODEL=' + self.prop_file_name,'BUILD='+self.build_dir]) if self.report_level > 1: - print "cmd: {0}\n".format(cmd) + print("cmd: {0}\n".format(cmd)) try: handle = subprocess.Popen(cmd, stdout = subprocess.PIPE, stderr=subprocess.PIPE, shell=True) return_code = handle.wait() except OSError as e: - print "Error, execution of compilation raised an exception: {0}".format(e) - print "cmd = {0}".format(cmd) - raise URDMEError("Compilation of solver failed") + print("Error, execution of compilation raised an exception: {0}".format(e)) + print("cmd = {0}".format(cmd)) + raise SimulationError("Compilation of solver failed") if return_code != 0: try: - print handle.stdout.read() - print handle.stderr.read() + print(handle.stdout.read().decode("utf-8")) + print(handle.stderr.read().decode("utf-8")) except Exception as e: pass - raise URDMEError("Compilation of solver failed, return_code={0}".format(return_code)) + raise SimulationError("Compilation of solver failed, return_code={0}".format(return_code)) if self.report_level > 1: - print handle.stdout.read() - print handle.stderr.read() + print(handle.stdout.read().decode("utf-8")) + print(handle.stderr.read().decode("utf-8")) self.is_compiled = True - def run(self, number_of_trajectories=1, seed=None, input_file=None, loaddata=False): + def run(self, number_of_trajectories=1, seed=None, timeout=None): """ Run one simulation of the model. - number_of_trajectories: How many trajectories should be run. - seed: the random number seed (incremented by one for multiple runs). - input_file: the filename of the solver input data file . - loaddata: boolean, should the result object load the data into memory on creation. + Args: + number_of_trajectories: (int) How many trajectories should be simulated. + seed: (int) the random number seed (incremented by one for multiple runs). + timeout: (int) maximum number of seconds the solver can run. + + Returns: - URDMEResult object. + Result object. or, if number_of_trajectories > 1 - a list of URDMEResult objects + a list of Result objects """ if number_of_trajectories > 1: result_list = [] @@ -289,80 +112,81 @@ def run(self, number_of_trajectories=1, seed=None, input_file=None, loaddata=Fal if not self.is_compiled: self.compile() - if input_file is None: - if self.infile_name is None or not os.path.exists(self.infile_name): - # Get temporary input and output files - infile = tempfile.NamedTemporaryFile(delete=False, dir=os.environ.get('PYURDME_TMPDIR')) - - # Write the model to an input file in .mat format - self.serialize(filename=infile, report_level=self.report_level) - infile.close() - self.infile_name = infile.name - self.delete_infile = True - else: - self.infile_name = input_file - self.delete_infile = False - - if not os.path.exists(self.infile_name): - raise URDMEError("input file not found.") - # Execute the solver for run_ndx in range(number_of_trajectories): - outfile = tempfile.NamedTemporaryFile(delete=False, dir=os.environ.get('PYURDME_TMPDIR')) - outfile.close() - urdme_solver_cmd = [self.solver_dir + self.propfilename + '.' + self.NAME, self.infile_name, outfile.name] + outfile = tempfile.mkdtemp(dir=os.environ.get('SPATIALPY_TMPDIR')) + result = Result(self.model, outfile) + solver_cmd = 'cd {0}'.format(outfile) + ";" + os.path.join(self.build_dir, self.executable_name) if seed is not None: - urdme_solver_cmd.append(str(seed+run_ndx)) + solver_cmd += " "+str(seed+run_ndx) if self.report_level > 1: - print 'cmd: {0}\n'.format(urdme_solver_cmd) + print('cmd: {0}\n'.format(solver_cmd)) stdout = '' stderr = '' +# try: +# if self.report_level >= 1: #stderr & stdout to the terminal +# handle = subprocess.Popen(solver_cmd, shell=True) +# else: +# handle = subprocess.Popen(solver_cmd, stderr=subprocess.PIPE, +# stdout=subprocess.PIPE, shell=True) +# stdout, stderr = handle.communicate() +# return_code = handle.wait() +# except OSError as e: +# print("Error, execution of solver raised an exception: {0}".format(e)) +# print("cmd = {0}".format(solver_cmd)) try: - if self.report_level >= 1: #stderr & stdout to the terminal - handle = subprocess.Popen(urdme_solver_cmd) - else: - handle = subprocess.Popen(urdme_solver_cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE) - stdout, stderr = handle.communicate() - return_code = handle.wait() + start = time.monotonic() + with subprocess.Popen(solver_cmd, shell=True, stdout=subprocess.PIPE, preexec_fn=os.setsid) as process: + try: + if timeout is not None: + stdout,stderr = process.communicate(timeout=timeout) + else: + stdout,stderr = process.communicate() + return_code = process.wait() + if self.report_level >= 1: #stderr & stdout to the terminal + print('Elapsed seconds: {:.2f}'.format(time.monotonic() - start)) + if stdout is not None: print(stdout.decode('utf-8')) + if stderr is not None: print(stderr.decode('utf-8')) + except subprocess.TimeoutExpired: + os.killpg(process.pid, signal.SIGINT) # send signal to the process group + stdout,stderr = process.communicate() + message = "SpatialPy solver timeout exceded. " + if stdout is not None: message += stdout.decode('utf-8') + if stderr is not None: message += stderr.decode('utf-8') + raise SimulationTimeout(message) except OSError as e: - print "Error, execution of solver raised an exception: {0}".format(e) - print "urdme_solver_cmd = {0}".format(urdme_solver_cmd) + print("Error, execution of solver raised an exception: {0}".format(e)) + print("cmd = {0}".format(solver_cmd)) if return_code != 0: if self.report_level >= 1: try: - print stderr, stdout + print(stderr) + print(stdout) except Exception as e: pass - print "urdme_solver_cmd = {0}".format(urdme_solver_cmd) - raise URDMEError("Solver execution failed, return code = {0}".format(return_code)) + print("solver_cmd = {0}".format(solver_cmd)) + raise SimulationError("Solver execution failed, return code = {0}".format(return_code)) - #Load the result from the hdf5 output file. - try: - result = URDMEResult(self.model, outfile.name, loaddata=loaddata) - result["Status"] = "Sucess" - result.stderr = stderr - result.stdout = stdout - if number_of_trajectories > 1: - result_list.append(result) - else: - return result - except Exception as e: - exc_info = sys.exc_info() - os.remove(outfile.name) - raise exc_info[1], None, exc_info[2] + result["Status"] = "Success" + if stdout is not None: result.stdout = stdout.decode('utf-8') + if stderr is not None: result.stderr = stderr.decode('utf-8') + if number_of_trajectories > 1: + result_list.append(result) + else: + return result return result_list def create_propensity_file(self, file_name=None): - """ Generate the C propensity file that is used to compile the URDME solvers. + """ Generate the C propensity file that is used to compile the solvers. Only mass action propensities are supported. """ - template = open(os.path.abspath(os.path.dirname(__file__)) + '/data/propensity_file_template.c', 'r') + template = open(os.path.abspath(os.path.dirname(__file__)) + '/ssa_sdpd-c-simulation-engine/propensity_file_template.c', 'r') propfile = open(file_name, "w") propfilestr = template.read() @@ -370,7 +194,6 @@ def create_propensity_file(self, file_name=None): i = 0 for S in self.model.listOfSpecies: speciesdef += "#define " + S + " " + "x[" + str(i) + "]" + "\n" - speciesdef += "#define " + S + "_INDEX " + str(i) + "\n" i += 1 propfilestr = propfilestr.replace("__DEFINE_SPECIES__", speciesdef) @@ -384,7 +207,7 @@ def create_propensity_file(self, file_name=None): i = 0 for d in self.model.listOfDataFunctions: if d.name is None: - raise URDMEError("DataFunction {0} does not have a name attributed defined.".format(i)) + raise SimulationError("DataFunction {0} does not have a name attributed defined.".format(i)) data_fn_str += "#define " + d.name + " data[" + str(i) + "]\n" i += 1 propfilestr = propfilestr.replace("__DEFINE_DATA_FUNCTIONS__", str(data_fn_str)) @@ -409,7 +232,9 @@ def create_propensity_file(self, file_name=None): rname = self.model.listOfReactions[R].name func += funheader.replace("__NAME__", rname) + "\n{\n" if self.model.listOfReactions[R].restrict_to == None or (isinstance(self.model.listOfReactions[R].restrict_to, list) and len(self.model.listOfReactions[R].restrict_to) == 0): + func += "return " func += self.model.listOfReactions[R].propensity_function + func += ";" else: func += "if(" if isinstance(self.model.listOfReactions[R].restrict_to, list) and len(self.model.listOfReactions[R].restrict_to) > 0: @@ -419,79 +244,180 @@ def create_propensity_file(self, file_name=None): elif isinstance(self.model.listOfReactions[R].restrict_to, int): func += "sd == " + str(self.model.listOfReactions[R].restrict_to) else: - raise URDMEError("When restricting reaction to subdomains, you must specify either a list or an int") + raise SimulationError("When restricting reaction to subdomains, you must specify either a list or an int") func += "){\n" + func += "return " func += self.model.listOfReactions[R].propensity_function - + func += ";" func += "\n}else{" func += "\n\treturn 0.0;}" func += "\n}" funcs += func + "\n\n" - funcinits += " ptr[" + str(i) + "] = " + rname + ";\n" + funcinits += " ptr[" + str(i) + "] = (PropensityFun) " + rname + ";\n" i += 1 propfilestr = propfilestr.replace("__DEFINE_REACTIONS__", funcs) propfilestr = propfilestr.replace("__DEFINE_PROPFUNS__", funcinits) + # End of pyurdme replacements + # SSA-SDPD values here + init_particles = "" + for i in range(len(self.model.sd)): + init_particles += " init_create_particle(sys,id++,{0},{1},{2},{3});".format(self.model.mesh.coordinates()[i,0],self.model.mesh.coordinates()[i,1],self.model.mesh.coordinates()[i,2],self.model.sd[i])+ "\n" + propfilestr = propfilestr.replace("__INIT_PARTICLES__", init_particles) + + + # process initial conditions here + self.model.apply_initial_conditions() + nspecies = self.model.u0.shape[0] + ncells = self.model.u0.shape[1] + + input_constants = "" + + outstr = "static unsigned int input_u0[{0}] = ".format(nspecies*ncells) + outstr+="{" + for i in range(ncells): + for s in range(nspecies): + if i+s>0: outstr+=',' + outstr+= str(self.model.u0[s,i]) + outstr+="};" + input_constants += outstr + "\n" + # attache the vol to the model as well, for backwards compatablity + self.model.vol = self.model.mesh.get_vol() + outstr = "static double input_vol[{0}] = ".format(self.model.mesh.get_vol().shape[0]) + outstr+="{" + for i in range(self.model.mesh.get_vol().shape[0]): + if i>0: outstr+=',' + outstr+= str(self.model.mesh.get_vol()[i]) + outstr+="};" + input_constants += outstr + "\n" + outstr = "static int input_sd[{0}] = ".format(self.model.sd.shape[0]) + outstr+="{" + for i in range(self.model.sd.shape[0]): + if i>0: outstr+=',' + outstr+= str(self.model.sd[i]) + outstr+="};" + input_constants += outstr + "\n" + + if len(self.model.listOfDataFunctions) == 0: + outstr = "static int input_dsize = 1;" + input_constants += outstr + "\n" + outstr = "static double input_data[{0}] = ".format(ncells) + outstr += "{" + ",".join(['0']*80) + "};" + input_constants += outstr + "\n" + else: + outstr = "static int input_dsize = {0};".format(len(self.model.listOfDataFunctions)) + input_constants += outstr + "\n" + outstr = "static double input_data[{0}] = ".format(ncells*len(self.model.listOfDataFunctions)) + outstr+="{" + for v_ndx in range(ncells): + for ndf in range(len(self.model.listOfDataFunctions)): + if ndf+v_ndx>0: outstr+=',' + outstr+= "{0}".format( self.model.listOfDataFunctions[ndf].map( self.model.mesh.coordinate()[v_ndx,:] ) ) + outstr+="};" + input_constants += outstr + "\n" + + + N = self.model.create_stoichiometric_matrix() + outstr = "static size_t input_irN[{0}] = ".format(len(N.indices)) + outstr+="{" + for i in range(len(N.indices)): + if i>0: outstr+=',' + outstr+= str(N.indices[i]) + outstr+="};" + input_constants += outstr + "\n" + + outstr = "static size_t input_jcN[{0}] = ".format(len(N.indptr)) + outstr+="{" + for i in range(len(N.indptr)): + if i>0: outstr+=',' + outstr+= str(N.indptr[i]) + outstr+="};" + input_constants += outstr + "\n" + + outstr = "static int input_prN[{0}] = ".format(len(N.data)) + outstr+="{" + for i in range(len((N.data))): + if i>0: outstr+=',' + outstr+= str(N.data[i]) + outstr+="};" + input_constants += outstr + "\n" + + + G = self.model.create_dependency_graph() + outstr = "static size_t input_irG[{0}] = ".format(len(G.indices)) + outstr+="{" + for i in range(len(G.indices)): + if i>0: outstr+=',' + outstr+= str(G.indices[i]) + outstr+="};" + input_constants += outstr + "\n" + + outstr = "static size_t input_jcG[{0}] = ".format(len(G.indptr)) + outstr+="{" + for i in range(len(G.indptr)): + if i>0: outstr+=',' + outstr+= str(G.indptr[i]) + outstr+="};" + input_constants += outstr + "\n" + outstr = "const char* const input_species_names[] = {" + for i,s in enumerate(self.model.listOfSpecies.keys()): + if i>0: outstr+="," + outstr+='"'+s+'"' + outstr+=", 0};" + input_constants += outstr + "\n" + num_subdomains = len(self.model.listOfSubdomainIDs) + outstr = "const int input_num_subdomain = {0};".format(num_subdomains) + input_constants += outstr + "\n" + outstr = "const double input_subdomain_diffusion_matrix[{0}] = ".format(len(self.model.listOfSpecies)*num_subdomains) + outstr+="{" + for i, sname in enumerate(self.model.listOfSpecies.keys()): + s = self.model.listOfSpecies[sname] + #print sname, + for j, sd_id in enumerate(range(num_subdomains)): + #print sd_ndx, + if i+j>0: outstr+=',' + if sd_id not in self.model.listOfDiffusionRestrictions[s]: + outstr+= "{0}".format(s.diffusion_constant) + else: + outstr+= "0.0" + outstr+="};" + input_constants += outstr + "\n" + propfilestr = propfilestr.replace("__INPUT_CONSTANTS__", input_constants) + + system_config = "" + system_config +="system->dt = {0};\n".format(self.model.timestep_size) + system_config +="system->nt = {0};\n".format(self.model.num_timesteps) + system_config +="system->output_freq = 1;\n" + if self.h is None: + self.h = self.model.mesh.find_h() + if self.h == 0.0: + raise ModelError('h (basis function width) can not be zero.') + system_config +="system->h = {0};\n".format(self.h) + system_config +="system->rho0 = 1.0;\n" + system_config +="system->c0 = 10;\n" + system_config +="system->P0 = 10;\n" + #// bounding box + bounding_box = self.model.mesh.get_bounding_box() + system_config +="system->xlo = {0};\n".format(bounding_box[0]) + system_config +="system->xhi = {0};\n".format(bounding_box[1]) + system_config +="system->ylo = {0};\n".format(bounding_box[2]) + system_config +="system->yhi = {0};\n".format(bounding_box[3]) + system_config +="system->zlo = {0};\n".format(bounding_box[4]) + system_config +="system->zhi = {0};\n".format(bounding_box[5]) + + propfilestr = propfilestr.replace("__SYSTEM_CONFIG__", system_config) + + + #### Write the data to the file #### propfile.write(propfilestr) propfile.close() -def spatialpy(model=None, solver='nsm', solver_path="", model_file=None, input_file=None, seed=None, report_level=0): - """ SPATIALPY solver interface. - Similar to model.run() the urdme() function provides an interface that is backwards compatiable with the - previous URDME implementation. - After sucessful execution, urdme returns a URDMEResults object with the following members: - U: the raw copy number output in a matrix with dimension (Ndofs, num_time_points) - tspan: the time span vector containing the time points that corresponds to the columns in U - status: Sucess if the solver executed without error - stdout: the standard ouput stream from the call to the core solver - stderr: the standard error stream from the call to the core solver - """ - - - #If solver is a subclass of URDMESolver, use it directly. - if isinstance(solver, (type, types.ClassType)) and issubclass(solver, URDMESolver): - sol = solver(model, solver_path, report_level, model_file=model_file) - elif type(solver) is str: - if solver == 'nsm': - from nsmsolver import NSMSolver - sol = NSMSolver(model, solver_path, report_level, model_file=model_file) - elif solver == 'nem': - from nemsolver import NEMSolver - sol = NEMSolver(model, solver_path, report_level, model_file=model_file) - else: - raise URDMEError("Unknown solver: {0}".format(solver_name)) - else: - raise URDMEError("solver argument to spatialpy() must be a string or a spatialpysolver class object.") - - sol.compile() - return sol.run(seed=seed, input_file=input_file) - - -class SPATIALPYDataFunction(): - """ Abstract class used to constuct the URDME data vector. """ - name = None - def __init__(self, name=None): - if name is not None: - self.name = name - if self.name is None: - raise Exception("URDMEDataFunction must have a 'name'") - - def map(self, x): - """ map() takes the coordinate 'x' and returns a list of doubles. - Args: - x: a list of 3 ints. - Returns: - a list of floats. - """ - raise Exception("URDMEDataFunction.map() not implemented.") -if __name__ == '__main__': - """ Command line interface to URDME. Execute URDME given a model file. """ diff --git a/spatialpy/SpatialPy.py b/spatialpy/SpatialPy.py deleted file mode 100644 index f2068bfa..00000000 --- a/spatialpy/SpatialPy.py +++ /dev/null @@ -1,931 +0,0 @@ -# pylint: disable-msg=C0301 -# pylint: disable-msg=C0103 - -import os -import re -import shutil -import subprocess -import sys -import tempfile -import types -import warnings -import uuid - - -import numpy -import scipy.io -import scipy.sparse - -from model import * - -import inspect - -try: - # This is only needed if we are running in an Ipython Notebook - import IPython.display -except: - pass - -try: - import h5py -except: - raise Exception("SpatialPy requires h5py.") - -import pickle -import json -import functools - -# module-level variable to for javascript export in IPython/Jupyter notebooks -__pyurdme_javascript_libraries_loaded = False -def load_pyurdme_javascript_libraries(): - global __pyurdme_javascript_libraries_loaded - if not __pyurdme_javascript_libraries_loaded: - __pyurdme_javascript_libraries_loaded = True - import os.path - import IPython.display - with open(os.path.join(os.path.dirname(__file__),'data/three.js_templates/js/three.js')) as fd: - bufa = fd.read() - with open(os.path.join(os.path.dirname(__file__),'data/three.js_templates/js/render.js')) as fd: - bufb = fd.read() - with open(os.path.join(os.path.dirname(__file__),'data/three.js_templates/js/OrbitControls.js')) as fd: - bufc = fd.read() - IPython.display.display(IPython.display.HTML('')) - - -def deprecated(func): - '''This is a decorator which can be used to mark functions - as deprecated. It will result in a warning being emitted - when the function is used.''' - - @functools.wraps(func) - def new_func(*args, **kwargs): - warnings.warn_explicit( - "Call to deprecated function {}.".format(func.__name__), - category=DeprecationWarning, - filename=func.func_code.co_filename, - lineno=func.func_code.co_firstlineno + 1 - ) - return func(*args, **kwargs) - return new_func - - -# Set log level to report only errors or worse -#dolfin.set_log_level(dolfin.ERROR) -#import logging -#logging.getLogger('FFC').setLevel(logging.ERROR) -#logging.getLogger('UFL').setLevel(logging.ERROR) - -class SPATIALPYModel(Model): - """ - An URDME Model extends Model with spatial information and methods to create URDME solver input. - TODO: Documentiation. - """ - - def __init__(self, name=""): - Model.__init__(self, name) - - # Currently not used - self.geometry = None - # - self.sd = [] - self.sd_initialized = False - - self.mesh = None - self.xmesh = None - self.stiffness_matrices = None - self.mass_matrices = None - - # subdomains is a list of MeshFunctions with subdomain marker information - self.subdomains = OrderedDict() - self.old_style_subdomain = False - - # This dictionary hold information about the subdomains each species is active on - self.species_to_subdomains = {} - self.tspan = None - - # URDMEDataFunction objects to construct the data vector. - self.listOfDataFunctions = [] - - # Volume of each voxel in the dolfin dof ordering (not vertex ordering). - self.dofvol = None - - def __getstate__(self): - """ Used by pickle to get state when pickling. Because we - have Swig wrappers to extension modules, we need to remove some instance variables - for the object to pickle. """ - - # Filter out any instance variable that is not picklable... - state = {} - for key, item in self.__dict__.items(): - if key == "subdomains": - sddict = OrderedDict() - for sdkey, sd_func in item.items(): - tmpfile = tempfile.NamedTemporaryFile(suffix=".xml") - dolfin.File(tmpfile.name) << sd_func - tmpfile.seek(0) - sddict[sdkey] = tmpfile.read() - tmpfile.close() - - state[key] = sddict - elif key in ["stiffness_matrices", "mass_matrices", "xmesh"]: - state[key] = None - else: - state[key] = item - - - return state - - def __setstate__(self, state): - """ Used by pickle to set state when unpickling. """ - - self.__dict__ = state - - if 'subdomains' in state: - # Recreate the subdomain functions - try: - sddict = OrderedDict() - for sdkey, sd_func_str in state["subdomains"].items(): - fd = tempfile.NamedTemporaryFile(suffix=".xml") - fdname = fd.name - fd.write(sd_func_str) - fd.seek(0) - fd_in = dolfin.File(fdname) - func = dolfin.MeshFunction("size_t", self.__dict__["mesh"]) - fd_in >> func - sddict[sdkey] = func - fd.close() - self.__dict__["subdomains"] = sddict - except Exception as e: - raise Exception("Error unpickling model, could not recreate the subdomain functions"+str(e)) - - self.create_extended_mesh() - - def add_data_function(self, data_function): - """ Add a URDMEDataFunction object to this object. """ - if isinstance(data_function, URDMEDataFunction): - self.listOfDataFunctions.append(data_function) - else: - raise Exception("data_function not of type URDMEDataFunction") - - def __initialize_species_map(self): - i = 0 - self.species_map = {} - for S in self.listOfSpecies: - self.species_map[S] = i - i = i + 1 - - def get_species_map(self): - """ Get the species map, name to index. """ - if not hasattr(self, 'species_map'): - self.__initialize_species_map() - - return self.species_map - - def add_subdomain(self, subdomain, subdomain_id=None): - """ Add a subdomain definition to the model. By default, all regions are set to - subdomain 1. - Args: - subdomain: an instance of a 'dolfin.SubDomain' subclass. - id: an int, the identifier for this subdomain. - """ - if subdomain_id is None and isinstance(subdomain, dolfin.cpp.mesh.MeshFunctionSizet): - # Old style, for backwards compatability - if not subdomain.dim() in self.subdomains.keys(): - self.subdomains[subdomain.dim()] = subdomain - self.old_style_subdomain = True - else: - raise ModelException("Failed to add subdomain function of dim "+str(subdomain.dim())+". Only one subdomain function of a given dimension is allowed.") - else: - # New style - if self.old_style_subdomain: - raise ModelException("Can't mix old and new style subdomains") - if not issubclass(subdomain.__class__, dolfin.SubDomain): - raise ModelException("'subdomain' argument to add_subdomain() must be a subclass of dolfin.SubDomain") - if subdomain_id is None or not isinstance(subdomain_id, int): - raise ModelException("'id' argument to add_subdomain() must be an int") - self.subdomains[subdomain_id] = subdomain - - - ################################ - - def _subdomains_to_threejs(self, subdomains={1:'blue'}): - """ Export threejs code to plot the mesh with edges colored for the listed subdomains. - Input is a dictionary with subdomain index:color pairs, output is a single json three.js mesh - with the subdomains colored according to the input colors. """ - sd = self.get_subdomain_vector() - c = ['black']*len(sd) - - for i, s in enumerate(sd): - try: - c[i] = subdomains[int(s)] - except KeyError: - pass - - jsondoc = self.mesh.export_to_three_js(colors = c) - return jsondoc - - def _subdomains_to_html(self, filename, sd=1): - sd = self.get_subdomain_vector() - c = _compute_colors(sd) - self.mesh._ipython_display_(filename, colors=c) - - def write_stochss_subdomain_file(self, filename="stochss_sdfile.txt"): - # Write mesh and subdomain files for the StochSS UI - sd = self.get_subdomain_vector() - with open(filename,'w') as fd: - for ndx, val in enumerate(sd): - fd.write("{0},{1}\n".format(ndx, val)) - - def display_mesh(self, subdomains, width=500, height=375, camera=[0,0,1]): - ''' WebGL display of the wireframe mesh.''' - load_pyurdme_javascript_libraries() - if isinstance(subdomains, int): - jstr = self._subdomains_to_threejs(subdomains={1:'blue', subdomains:'red'}) - elif isinstance(subdomains, list): - sd_in = {1:'blue'} - for i in subdomains: - sd_in[i] = 'red' - jstr = self._subdomains_to_threejs(subdomains=sd_in) - elif isinstance(subdomains, dict): - jstr = self._subdomains_to_threejs(subdomains=subdomains) - hstr = None - with open(os.path.dirname(os.path.abspath(__file__))+"/data/three.js_templates/mesh.html", 'r') as fd: - hstr = fd.read() - if hstr is None: - raise Exception("could note open template mesh.html") - hstr = hstr.replace('###PYURDME_MESH_JSON###', jstr) - # Create a random id for the display div. This is to avioid multiple plots ending up in the same - # div in Ipython notebook - displayareaid = str(uuid.uuid4()) - hstr = hstr.replace('###DISPLAYAREAID###', displayareaid) - # ###CAMERA_X###, ###CAMERA_Y###, ###CAMERA_Z### - hstr = hstr.replace('###CAMERA_X###',str(camera[0])) - hstr = hstr.replace('###CAMERA_Y###',str(camera[1])) - hstr = hstr.replace('###CAMERA_Z###',str(camera[2])) - html = '
'.format(width, height, displayareaid) - IPython.display.display(IPython.display.HTML(html+hstr)) - - - - ################################ - - def create_stoichiometric_matrix(self): - """ Generate a stoichiometric matrix in sparse CSC format. """ - - if not hasattr(self, 'species_map'): - self.__initialize_species_map() - if self.get_num_reactions() > 0: - ND = numpy.zeros((self.get_num_species(), self.get_num_reactions())) - for i, r in enumerate(self.listOfReactions): - R = self.listOfReactions[r] - reactants = R.reactants - products = R.products - - for s in reactants: - ND[self.species_map[s], i] -= reactants[s] - for s in products: - ND[self.species_map[s], i] += products[s] - - N = scipy.sparse.csc_matrix(ND) - else: - N = numpy.zeros((self.get_num_species(), self.get_num_reactions())) - - return N - - def create_dependency_graph(self): - """ Construct the sparse dependency graph. """ - # We cannot safely generate a dependency graph (without attempting to analyze the propensity string itself) - # if the model contains custom propensities. - mass_action_model = True - for name, reaction in self.listOfReactions.items(): - if not reaction.massaction: - GF = numpy.ones((self.get_num_reactions(), self.get_num_reactions() + self.get_num_species())) - mass_action_model = False - - if mass_action_model: - GF = numpy.zeros((self.get_num_reactions(), self.get_num_reactions() + self.get_num_species())) - species_map = self.get_species_map() - - involved_species = [] - reactants = [] - for name, reaction in self.listOfReactions.items(): - temp = [] - temp2 = [] - for s in reaction.reactants: - temp.append(species_map[s]) - temp2.append(species_map[s]) - for s in reaction.products: - temp.append(species_map[s]) - involved_species.append(temp) - reactants.append(temp2) - - species_to_reactions = [] - for species in self.listOfSpecies: - temp = [] - for j, x in enumerate(reactants): - if species_map[species] in x: - temp.append(j) - species_to_reactions.append(temp) - - - reaction_to_reaction = [] - for name, reaction in self.listOfReactions.items(): - temp = [] - for s in reaction.reactants: - if species_to_reactions[species_map[s]] not in temp: - temp = temp+species_to_reactions[species_map[s]] - - for s in reaction.products: - if species_to_reactions[species_map[s]] not in temp: - temp = temp+ species_to_reactions[species_map[s]] - - temp = list(set(temp)) - reaction_to_reaction.append(temp) - - # Populate G - for j, spec in enumerate(species_to_reactions): - for s in spec: - GF[s, j] = 1 - - for i,reac in enumerate(reaction_to_reaction): - for r in reac: - GF[r, self.get_num_species()+i] = 1 - - - try: - G = scipy.sparse.csc_matrix(GF) - except Exception as e: - G = GF - - return G - - - def timespan(self, tspan): - """ Set the time span of simulation. """ - self.tspan = tspan - - - def _initialize_default_subdomain(self): - """" Create a default subdomain function. The default is all voxels belong - to subdomain 1. - """ - - subdomain = dolfin.MeshFunction("size_t", self.mesh, self.mesh.topology().dim()-1) - subdomain.set_all(1) - self.add_subdomain(subdomain) - - def _initialize_species_to_subdomains(self): - """ Initialize the species mapping to subdomains. The default - is that a species is active in all the defined subdomains. - """ - - sds = list(numpy.unique(self.get_subdomain_vector())) - # This conversion is necessary for UFL not to choke on the subdomain ids. - for i, sd in enumerate(sds): - sds[i] = int(sd) - try: - sds.remove(0) - except ValueError: - pass - - # If a species is not present as key in the species_to_subdomain mapping, - # we label it as active in all subdomains - for spec_name in self.listOfSpecies: - species = self.listOfSpecies[spec_name] - if species not in self.species_to_subdomains.keys(): - self.species_to_subdomains[species] = sds - - - def restrict(self, species, subdomains): - """ Restrict the diffusion of a species to a subdomain. """ - self.species_to_subdomains[species] = subdomains - - - def set_subdomain_vector(self, sd): - """ Explicitly set the subdomain vector from an array. """ - self.sd = sd - self.sd_initialized = True - - def get_subdomain_vector(self, subdomains=None): - """ Create the 'sd' vector. 'subdomains' is a dolfin FacetFunction, - and if no subdomain input is specified, they voxels default to - subdomain 1. """ - if self.sd_initialized: - return self.sd - - # We need to make sure that the highest dimension is applied - # first, otherwise the cell level will overwrite all markings - # applied on boundaries. - if not hasattr(self,'xmesh'): - self.create_extended_mesh() - - self.mesh.init() - - sd = numpy.ones(self.mesh.get_num_voxels()) - - if len(self.subdomains) == 0: - self.sd = sd - else: - if self.old_style_subdomain: - subdomains = self.subdomains - else: - subdomains = OrderedDict() - sdvec = dolfin.MeshFunction("size_t", self.mesh, self.mesh.topology().dim()-1) - sdvec.set_all(1) - for id, inst in self.subdomains.iteritems(): - inst.mark(sdvec, id) - subdomains[sdvec.dim()] = sdvec - - for dim, subdomain in subdomains.items(): - if dim == 0: - # If we define subdomains on vertices, ONLY use those. - # Then it is a direct copy to the sd - for ndx, val in enumerate(subdomain): - sd[ndx] = val - break - else: - # Map all facet labels to vertex labels - tovertex = self.mesh.topology()(dim, 0) - for i in range(subdomain.size()): - for vtx in tovertex(i): - if subdomain[i] != 0: # TODO: Temporary hack to fix issue with Gmesh facet_region files. - sd[vtx] = subdomain[i] - - self.sd = sd - self.sd_initialized = True - return self.sd - - def initialize_initial_condition(self): - """ Create all-zeros inital condition matrix. """ - - ns = self.get_num_species() - if self.xmesh == None: - self.create_extended_mesh() - nv = self.mesh.get_num_voxels() - self.u0 = numpy.zeros((ns, nv)) - - def create_extended_mesh(self): - """ Extend the primary mesh with information about the degrees of freedom. """ - - xmesh = URDMEXmesh() - # Construct a species map (dict mapping model species name to an integer index) - species_map = self.get_species_map() - # Initialize the function spaces and dof maps. - for spec in self.listOfSpecies: - species = self.listOfSpecies[spec] - spec_name = species.name - spec_index = species_map[spec_name] - xmesh.function_space[spec_name] = self.mesh.get_function_space() - xmesh.vertex_to_dof_map[spec_name] = dolfin.vertex_to_dof_map(xmesh.function_space[spec_name]) - xmesh.vertex_to_dof_map[spec_name] = len(self.listOfSpecies) * xmesh.vertex_to_dof_map[spec_name] + spec_index - xmesh.dof_to_vertex_map[spec_name] = dolfin.dof_to_vertex_map(xmesh.function_space[spec_name]) - - - xmesh.vertex = self.mesh.coordinates() - self.xmesh = xmesh - - - # Some utility routines to set initial conditions - def set_initial_condition_scatter(self, spec_init, subdomains=None): - """ Scatter an initial number of molecules over the voxels in a subdomain. """ - - if not hasattr(self,"u0"): - self.initialize_initial_condition() - - if not hasattr(self, 'xmesh'): - self.create_extended_mesh() - - self._initialize_species_to_subdomains() - - self.get_subdomain_vector() - - for species in spec_init: - - if subdomains is None: - subdomains = self.species_to_subdomains[species] - - spec_name = species.name - num_spec = spec_init[species] - species_map = self.get_species_map() - specindx = species_map[spec_name] - - sd = self.sd - table = [] - for i, ind in enumerate(sd): - if ind in subdomains: - table.append(i) - - ltab = len(table) - if ltab == 0: - raise ModelException("set_initial_condition_scatter: No voxel in the given subdomains "+str(subdomains)+", check subdomain marking.") - - for mol in range(num_spec): - vtx = numpy.random.randint(0, ltab) - ind = table[vtx] - self.u0[specindx, ind] += 1 - - def set_initial_condition_distribute_uniformly(self, spec_init, subdomains=None): - """ Place the same number of molecules of the species in each voxel. """ - if not hasattr(self, "u0"): - self.initialize_initial_condition() - - if not hasattr(self, 'xmesh'): - self.create_extended_mesh() - - self._initialize_species_to_subdomains() - - species_map = self.get_species_map() - for spec in spec_init: - if subdomains is None: - subdomains = self.species_to_subdomains[spec] - spec_name = spec.name - num_spec = spec_init[spec] - specindx = species_map[spec_name] - for ndx in range(len(self.sd)): - if self.sd[ndx] in subdomains: - self.u0[specindx, ndx] = num_spec - - - def set_initial_condition_place_near(self, spec_init, point=None, add=False): - """ Place all molecules of kind species in the voxel nearest a given point. The species existing previously in this voxel are reset if add is set to False""" - - - if not hasattr(self, "u0"): - self.initialize_initial_condition() - - if not hasattr(self, 'xmesh'): - self.create_extended_mesh() - - for spec in spec_init: - spec_name = spec.name - num_spec = spec_init[spec] - - # Find the voxel with center (vertex) nearest to the point - ix = self.mesh.closest_vertex(point) - species_map = self.get_species_map() - specindx = species_map[spec_name] - self.u0[specindx, ix] = (self.u0[specindx, ix] if add else 0) + num_spec - - def set_initial_condition_place_voxel(self, spec_init, voxel,add=False): - """Place all molecules of kind species in the given voxel. The species existing previously in this voxel are reset if add is set to False""" - - if not hasattr(self, "u0"): - self.initialize_initial_condition() - - if not hasattr(self, 'xmesh'): - self.create_extended_mesh() - - for spec in spec_init: - spec_name = spec.name - num_spec = spec_init[spec] - - species_map = self.get_species_map() - specindx = species_map[spec_name] - self.u0[specindx, voxel] = (self.u0[specindx,voxel] if add else 0) + num_spec - - def create_system_matrix(self): - """ Create the system (diffusion) matrix for input to the URDME solvers. The matrix - is built by concatenating the individually assembled matrices for each of the species, - and multiplying with the lumped mass matrix (which define the volume of the voxels). - Negative off-diagonal elements in the matrix are set to zero, and the diagonal is renormalized - in order to assure that the returned matrix is a Markov transition matrix. - Returns a dictionary containing the volumes of the subvolumes, the system diffusion matrix - and the fraction of the mass of the negative off-diagonal elements that has been filtered out. - """ - - import time - - # Check if the individual stiffness and mass matrices (per species) have been assembled, otherwise assemble them. - if self.stiffness_matrices is not None and self.mass_matrices is not None: - stiffness_matrices = self.stiffness_matrices - mass_matrices = self.mass_matrices - else: - if self.mesh is None: - raise ModelException("This model has no mesh, can not create system matrix.") - matrices = self.assemble() - self.stiffness_matrices = matrices['K'] - self.mass_matrices = matrices['M'] - stiffness_matrices = self.stiffness_matrices - mass_matrices = self.mass_matrices - - # Make a dok matrix of dimension (Ndofs,Ndofs) for easier manipulation - i = 1 - Mspecies = len(self.listOfSpecies) - if Mspecies == 0: - raise ModelException("The model has no species, can not create system matrix.") - - # Use dolfin 'dof' number of voxels, not the number of verticies - Nvoxels = self.mesh.get_num_dof_voxels() - Ndofs = Nvoxels*Mspecies - - # Create the volume vector by lumping the mass matrices - vol = numpy.zeros((Ndofs, 1)) - spec = 0 - - xmesh = self.xmesh - - for species, M in mass_matrices.iteritems(): - - rows, cols, vals = dolfin.as_backend_type(M).data() - SM = scipy.sparse.csr_matrix((vals, cols, rows)) - vols = SM.sum(axis=1) - - spec = self.species_map[species] - for j in range(len(vols)): - vx = j - dof = Mspecies*vx+spec - vol[dof, 0] = vols[j] - - # This is necessary in order for the array to have the right dimension (Ndofs,1) - vol = vol.flatten() - - # Assemble one big matrix from the individual stiffness matrices. Multiply by the inverse of - # the lumped mass matrix, filter out any entries with the wrong sign and renormalize the columns. - spec = 0 - positive_mass = 0.0 - total_mass = 0.0 - - - sd = self.get_subdomain_vector() - sd_vec_dof = numpy.zeros(self.mesh.get_num_dof_voxels()) - vertex_to_dof = dolfin.vertex_to_dof_map(self.mesh.get_function_space()) - for ndx, sd_val in enumerate(sd): - sd_vec_dof[vertex_to_dof[ndx]] = sd_val - sd = sd_vec_dof - - tic = time.time() - # If a volume is zero, we need to set it to 1. - vi = vol+(vol<=0.0) - - S = scipy.sparse.dok_matrix((Ndofs, Ndofs)) - - - keys = [] - values = [] - - for species, K in stiffness_matrices.iteritems(): - - rows, cols, vals = dolfin.as_backend_type(K).data() - - # Filter the matrix: get rid of all elements < 0 (inlcuding the diagonal) - vals *= vals < 0 - Kcrs = scipy.sparse.csr_matrix((vals, cols, rows)) - - sdmap = self.species_to_subdomains[self.listOfSpecies[species]] - - # Filter the matrix: get rid of all elements < 0 (inlcuding the diagonal) - Kdok = Kcrs.todok() - - - for ind, val in Kdok.iteritems(): - - ir = ind[0] - ij = ind[1] - - # Check if this is an edge that the species should diffuse along, - # if not, set the diffusion coefficient along this edge to zero. This is - # equivalent to how boundary species are handled in legacy URDME. - if sd[ir] not in sdmap: - val = 0.0 - - S[Mspecies*ir+spec, Mspecies*ij+spec] = -val/vi[Mspecies*ij+spec] - - spec = spec + 1 - - sumcol = S.tocsr().sum(axis=0) - S.setdiag(-numpy.array(sumcol).flatten()) - - D = S.tocsc() - - if total_mass == 0.0: - return {'vol':vol, 'D':D, 'relative_positive_mass':None} - else: - return {'vol':vol, 'D':D, 'relative_positive_mass':positive_mass/total_mass} - - - def validate(self, urdme_solver_data): - """ Validate the model data structures. - validate should be called prior to writing the model to the solver input file. - The core solvers do very limited error checking of the input. - """ - - for spec_name, species in self.listOfSpecies.items(): - if 0 in self.species_to_subdomains[species]: - raise ModelException("Subdomain number 0 is reserved. Please check your model.") - - # Check that all the columns of the system matrix sums to zero (or close to zero). If not, it does - # not define a Markov process and the solvers might segfault or produce erraneous results. - colsum = numpy.abs(urdme_solver_data['D'].sum(axis=0)) - colsum = colsum.flatten() - maxcolsum = numpy.argmax(colsum) - if colsum[0, maxcolsum] > 1e-10: - D = urdme_solver_data["D"] - raise InvalidSystemMatrixException("Invalid diffusion matrix. The sum of the columns does not sum to zero. " + str(maxcolsum) + ' ' + str(colsum[0,maxcolsum]) + "\nThis can be caused by a large difference between the largest and smallest diffusion coefficients.") - - def create_connectivity_matrix(self): - """ Assemble a connectivity matrix in CCS format. """ - - fs = self.mesh.get_function_space() - trial_function = dolfin.TrialFunction(fs) - test_function = dolfin.TestFunction(fs) - a_K = -1*dolfin.inner(dolfin.nabla_grad(trial_function), dolfin.nabla_grad(test_function)) * dolfin.dx - K = dolfin.assemble(a_K) - rows, cols, vals = dolfin.as_backend_type(K).data() - Kcrs = scipy.sparse.csc_matrix((vals, cols, rows)) - return Kcrs - - def get_solver_datastructure(self): - """ Return the datastructures needed by the URDME solvers. - get_solver_datastructure() creates and populates a dictionary, urdme_solver_data, - containing the mandatory input data structures of the core NSM solver in URDME. - Those data structures are - D - the Diffusion matrix - N - the stochiometry matrix - G - the dependency graph - vol - the volume vector - sd - the subdomain vector - data - the data vector - u0 - the intial condition - The follwing data is also returned, unlike in the legacy URDME interface: - p - the vertex coordinates - K - a (Nvoxel x Nvoxel) connectivity matrix - """ - - urdme_solver_data = {} - num_species = self.get_num_species() - - # Stoichimetric matrix - N = self.create_stoichiometric_matrix() - urdme_solver_data['N'] = N - # Dependency Graph - G = self.create_dependency_graph() - urdme_solver_data['G'] = G - - # Volume vector - result = self.create_system_matrix() - vol = result['vol'] - urdme_solver_data['dofvolumes'] = vol - - #TODO: Make use of all dofs values, requires modification of the core URDME solver. - self.dofvol = vol[::len(self.listOfSpecies)] - urdme_solver_data['vol'] = self.dofvol - - D = result['D'] - urdme_solver_data['D'] = D - - num_dofvox = self.dofvol.shape[0] - - # Get vertex to dof ordering - vertex_to_dof = dolfin.vertex_to_dof_map(self.mesh.get_function_space()) - dof_to_vertex = dolfin.dof_to_vertex_map(self.mesh.get_function_space()) - - vertex_to_dof_to_vertex = dof_to_vertex[vertex_to_dof] - - # Subdomain vector - # convert to dof ordering - sd_vec_dof = numpy.zeros(num_dofvox) - for ndx, sd_val in enumerate(self.get_subdomain_vector()): - sd_vec_dof[vertex_to_dof[ndx]] = sd_val - urdme_solver_data['sd'] = sd_vec_dof - - # Data vector. If not present in model, it defaults to a vector with all elements zero. - # convert to dof ordering - data = numpy.zeros((1, num_dofvox)) - if len(self.listOfDataFunctions) > 0: - data = numpy.zeros((len(self.listOfDataFunctions), num_dofvox)) - coords = self.mesh.coordinates() - for ndf, df in enumerate(self.listOfDataFunctions): - for ndx in range(len(coords)): - vox_coords = numpy.zeros(3) - for cndx in range(len(coords[ndx])): - vox_coords[cndx] = coords[ndx][cndx] - data[ndf][vertex_to_dof[ndx]] = df.map(vox_coords) - - urdme_solver_data['data'] = data - - if not hasattr(self,'u0'): - self.initialize_initial_condition() - - # Initial Conditions, convert to dof ordering - u0_dof = numpy.zeros((num_species, num_dofvox)) - for vox_ndx in range(self.mesh.get_num_voxels()): - dof_ndx = vertex_to_dof[vox_ndx] - # With periodic BCs the same dof_ndx voxel will get written to twice - # which may overwrite the value. We need to check for this case. - if vertex_to_dof_to_vertex[vox_ndx] != vox_ndx: - vox_ndx2 = vertex_to_dof_to_vertex[vox_ndx] - for cndx in range(num_species): - if self.u0[cndx, vox_ndx] == 0 or self.u0[cndx, vox_ndx] == self.u0[cndx, vox_ndx2]: - u0_dof[cndx, dof_ndx] = self.u0[cndx, vox_ndx] - elif self.u0[cndx, vox_ndx2] == 0 and vox_ndx < vox_ndx2: - self.u0[cndx, vox_ndx2] = self.u0[cndx, vox_ndx] - elif self.u0[cndx, vox_ndx2] == 0 and vox_ndx > vox_ndx2: - u0_dof[cndx, dof_ndx] = self.u0[cndx, vox_ndx] - else: - sys.stderr.write("Warning: the initial condition for species {0} in voxel {1} will be discarded due to periodic boundary conditions.\n".format(self.listOfSpecies.keys()[cndx], vox_ndx)) - else: - for cndx in range(num_species): - u0_dof[cndx, dof_ndx] = self.u0[cndx, vox_ndx] - urdme_solver_data['u0'] = u0_dof - - tspan = numpy.asarray(self.tspan, dtype=numpy.float) - urdme_solver_data['tspan'] = tspan - - # Vertex coordinates - # convert to dof ordering - p_dof = numpy.zeros((num_dofvox, 3)) - for vox_ndx, row in enumerate(self.mesh.get_voxels()): - p_dof[vertex_to_dof[vox_ndx], :len(row)] = row - urdme_solver_data['p'] = p_dof - - # Connectivity matrix - urdme_solver_data['K'] = self.create_connectivity_matrix() - - urdme_solver_data['report']=0 - - return urdme_solver_data - - - def assemble(self): - """ Assemble the mass and stiffness matrices using Dolfin. - Returns: A dictionary containing two dictionaries, one for the stiffness matrices - and one for the mass matrices. Those dictionaries has the species names as keys and - the matrices are in CSR format. - """ - - if self.xmesh == None: - self.create_extended_mesh() - - self._initialize_species_to_subdomains() - - function_space = self.xmesh.function_space - trial_functions = OrderedDict() - test_functions = OrderedDict() - stiffness_matrices = OrderedDict() - mass_matrices = OrderedDict() - - # The maximum dimension that a species is active on (currently not used) - maxdim = 1 - for spec in self.listOfSpecies: - dim = self.listOfSpecies[spec].dim() - if dim > maxdim: - maxdim = dim - - for spec in self.listOfSpecies: - trial_functions[spec] = dolfin.TrialFunction(function_space[spec]) - test_functions[spec] = dolfin.TestFunction(function_space[spec]) - - - weak_form_K = {} - weak_form_M = {} - - ndofs = None - - # Set up the forms - for spec_name, species in self.listOfSpecies.items(): - - # Find out what subdomains this species is active on - weak_form_K[spec_name] = dolfin.inner(dolfin.nabla_grad(trial_functions[spec_name]), dolfin.nabla_grad(test_functions[spec_name]))*dolfin.dx - weak_form_M[spec_name] = trial_functions[spec_name]*test_functions[spec_name]*dolfin.dx - - # Assemble the matrices - for spec_name, species in self.listOfSpecies.items(): - stiffness_matrices[spec_name] = dolfin.assemble(weak_form_K[spec_name]) - if ndofs is None: - ndofs = stiffness_matrices[spec_name].size(0) - self.mesh.set_num_dof_voxels(ndofs) - - # We cannot include the diffusion constant in the assembly, dolfin does not seem to deal well - # with small diffusion constants (it drops small elements), so we multiply here. - stiffness_matrices[spec_name] = species.diffusion_constant * stiffness_matrices[spec_name] - mass_matrices[spec_name] = dolfin.assemble(weak_form_M[spec_name]) - - - return {'K':stiffness_matrices, 'M':mass_matrices} - - def run(self, number_of_trajectories=1, solver='nsm', seed=None, report_level=0): - """ Simulate the model. - Args: - solver: A str or class type that is a subclass of URDMESolver. Default: NSM solver. - number_of_trajectories: How many trajectories should be run. - seed: An int, the random seed given to the solver. - report_level: An int, Level of output from the solver: 0, 1, or 2. Default: 0. - Returns: - A URDMEResult object with the results of the simulation. - """ - - #If solver is a subclass of URDMESolver, use it directly. - if isinstance(solver, (type, types.ClassType)) and issubclass(solver, URDMESolver): - sol = solver(self, report_level=report_level) - elif type(solver) is str: - if solver == 'nsm': - from nsmsolver import NSMSolver - sol = NSMSolver(self, report_level=report_level) - else: - raise URDMEError("Unknown solver: {0}".format(solver_name)) - else: - raise URDMEError("solver argument to urdme() must be a string or a URDMESolver class object.") - - return sol.run(number_of_trajectories=number_of_trajectories, seed=seed) - - diff --git a/spatialpy/Subdomain.py b/spatialpy/Subdomain.py new file mode 100644 index 00000000..55f9e5da --- /dev/null +++ b/spatialpy/Subdomain.py @@ -0,0 +1,14 @@ + + +class SubDomain: + """ SubDomain class provides a method for taging parts of the spatial domain as seperate parts""" + + def __init__(self): + pass + + + def inside(self, x, on_boundary): + raise Exception("Subclasses of spatialpy.SubDomain must implement the inside() metho") + + + diff --git a/spatialpy/_init_.py b/spatialpy/__init__.py similarity index 62% rename from spatialpy/_init_.py rename to spatialpy/__init__.py index 57720680..4861c6ee 100644 --- a/spatialpy/_init_.py +++ b/spatialpy/__init__.py @@ -1,10 +1,14 @@ #__all__=['model','spatialpy'] -from .spatialpy import * +#from .spatialpy import * import sys if (sys.version_info < (3,0)): raise Exception("SpatialPy only works in Python 3.0 and higher") +from spatialpy.Model import * +from spatialpy.Subdomain import * +from spatialpy.Mesh import * +from spatialpy.DataFunction import * #from spatialpy.core import * #import spatialpy.sbml diff --git a/spatialpy/adfsp_solver.py b/spatialpy/adfsp_solver.py deleted file mode 100644 index 169503dc..00000000 --- a/spatialpy/adfsp_solver.py +++ /dev/null @@ -1,14 +0,0 @@ -""" ADFSP solver. """ -import os -import pyurdme - -class ADFSPSolver(pyurdme.URDMESolver): - """ ADFSP solver class. """ - NAME = 'adaptive_dfsp' - - def __init__(self, model, solver_path=None, report_level=0, tau_d=-1, error_tolarance=1e-03, max_jump=-1): - if solver_path is None or solver_path == "": - solver_path = os.path.dirname(os.path.abspath(__file__))+"/urdme" - pyurdme.URDMESolver.__init__(self, model, solver_path=solver_path, report_level=report_level) - self.sopts = [tau_d, max_jump, error_tolarance] - diff --git a/spatialpy/data/propensity_file_template.c b/spatialpy/data/propensity_file_template.c deleted file mode 100644 index 10ea8d42..00000000 --- a/spatialpy/data/propensity_file_template.c +++ /dev/null @@ -1,37 +0,0 @@ -/* URDME model file, automatically generated by pyURDME. */ -#include -#include -#include "propensities.h" -#include - -/* Species names */ -__DEFINE_SPECIES__ - -/* Number of reactions */ -#define NR __NUMBER_OF_REACTIONS__ // Depricated, will remove in future version. -#define NUM_REACTIONS __NUMBER_OF_REACTIONS__ -#define NUM_SPECIES __NUMBER_OF_SPECIES__ -#define NUM_VOXELS __NUMBER_OF_VOXELS__ - -/* Define DataFunctions */ -__DEFINE_DATA_FUNCTIONS__ - -/* Parameter definitions */ -__DEFINE_PARAMETERS__ - -/* Reaction definitions */ -__DEFINE_REACTIONS__ - -PropensityFun *ALLOC_propensities(void) -{ - PropensityFun *ptr = (PropensityFun *)malloc(sizeof(PropensityFun)*NUM_REACTIONS); - -__DEFINE_PROPFUNS__ - return ptr; -} - -void FREE_propensities(PropensityFun* ptr) -{ - free(ptr); -} - diff --git a/spatialpy/nsmsolver.py b/spatialpy/nsmsolver.py index 0e107a9d..f1c53917 100644 --- a/spatialpy/nsmsolver.py +++ b/spatialpy/nsmsolver.py @@ -1,7 +1,7 @@ """ NSM solver. """ #from pyurdme import pyurdme -import pyurdme +import spatialpy -class NSMSolver(pyurdme.URDMESolver): +class NSMSolver(spatialpy.Solver): """ NSM solver class. """ NAME = 'nsm' diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/include/binheap.h b/spatialpy/ssa_sdpd-c-simulation-engine/include/binheap.h new file mode 100644 index 00000000..5fee9e12 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/include/binheap.h @@ -0,0 +1,17 @@ +/* binheap.h */ + +/* B. Drawert 2010-12-12 */ +/* J. Cullhed 2008-06-18. */ + +#ifndef BINHEAP__H +#define BINHEAP__H + +void initialize_heap(double *data,int *INDEX,int *INDEX2,int N); +void percolate_down(int node,double *data,int *INDEX,int *INDEX2,int size); +void percolate_up(int node,double *data,int *INDEX,int *INDEX2,int N); +void update(int node,double *data,int *INDEX,int *INDEX2,int N); +int test_heap_prty(double *data,int *INDEX,int N); +void print_heap(double *data,int N); +void test_min_prty(double *data,int N); + +#endif /* BINHEAP__H */ diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/include/count_cores.h b/spatialpy/ssa_sdpd-c-simulation-engine/include/count_cores.h new file mode 100644 index 00000000..d17b2a3d --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/include/count_cores.h @@ -0,0 +1,13 @@ +/* ***************************************************************************** +SSA-SDPD simulation engine +Copyright 2018 Brian Drawert (UNCA) + +This program is distributed under the terms of the GNU General Public License. +See the file LICENSE.txt for details. +***************************************************************************** */ +#ifndef count_cores_h +#define count_cores_h + +int get_num_processors(); + +#endif //count_cores_h diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/include/linked_list.h b/spatialpy/ssa_sdpd-c-simulation-engine/include/linked_list.h new file mode 100644 index 00000000..15cc04c1 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/include/linked_list.h @@ -0,0 +1,64 @@ +/* ***************************************************************************** +SSA-SDPD simulation engine +Copyright 2018 Brian Drawert (UNCA) + +This program is distributed under the terms of the GNU General Public License. +See the file LICENSE.txt for details. +***************************************************************************** */ +#ifndef linked_list_h +#define linked_list_h +#include + +typedef struct linked_list_t linked_list; +typedef struct node_t node; +#include "particle.h" + +// Create data structure for a node of the list +struct node_t { + particle_t* data; + node* next; + node* prev; +}; + +// Data structre for the linked list type + +struct linked_list_t { + node* head; + node* tail; + int count; + pthread_mutex_t mutex; +}; + +// Functions to manipulate the linked list + +//constructor +linked_list* create_linked_list(); + +void empty_linked_list( linked_list*ll); + +// destructor +void destroy_linked_list( linked_list* ll ); + +// add a new node to the end of the linked list +node* linked_list_add( linked_list* ll, particle_t* data_in); + +// Delete a node from the linked list +void linked_list_delete( linked_list* ll, node* to_delete); + +// search for a node by it's data field +//node* linked_list_search( linked_list* ll, char* search_string ); + +// get node by index +node* linked_list_get( linked_list* ll, int index); + +// get first node on list and remove it from list +node * linked_list_pop( linked_list * ll); + +// free a node +void free_node(node*n); + +// in-place bubble sort +void linked_list_sort(linked_list*ll, int sort_ndx); + + +#endif /* linked_list_h*/ diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/include/output.h b/spatialpy/ssa_sdpd-c-simulation-engine/include/output.h new file mode 100644 index 00000000..a2c512e5 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/include/output.h @@ -0,0 +1,20 @@ +/* ***************************************************************************** +SSA-SDPD simulation engine +Copyright 2018 Brian Drawert (UNCA) + +This program is distributed under the terms of the GNU General Public License. +See the file LICENSE.txt for details. +***************************************************************************** */ +#ifndef output_h +#define output_h +#include "particle.h" + + + +void output_csv(system_t*system, int current_step); + +void output_vtk__sync_step(system_t*system, int current_step); +void output_vtk__async_step(); + +#endif // output_h + diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/include/particle.h b/spatialpy/ssa_sdpd-c-simulation-engine/include/particle.h new file mode 100644 index 00000000..c03f4a78 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/include/particle.h @@ -0,0 +1,62 @@ +/* ***************************************************************************** +SSA-SDPD simulation engine +Copyright 2018 Brian Drawert (UNCA) + +This program is distributed under the terms of the GNU General Public License. +See the file LICENSE.txt for details. +***************************************************************************** */ +#ifndef particle_h +#define particle_h + +typedef struct __particle_t particle_t; +typedef struct __system_t system_t; +#include "linked_list.h" +#include "simulate_rdme.h" + +struct __particle_t { + unsigned int id; + double x[3]; + double v[3]; + int type; + double mass; + double rho; + double nu; + int solidTag; + // below here for simulation + node* x_index; + linked_list*neighbors; +}; + +struct __system_t { + int dimension; + double dt; + unsigned int nt; + unsigned int current_step; + unsigned int output_freq; + double xlo, xhi, ylo, yhi, zlo, zhi; + double h; + double c0; + double rho0; + double P0; + linked_list* particle_list; + linked_list* x_index; + char boundary_conditions[3]; + rdme_t*rdme; + int static_domain; +}; + + +linked_list* find_neighbors(particle_t* me, system_t* system); +system_t* create_system(); +particle_t* create_particle(int id); +void add_particle(particle_t* me, system_t* system); +double particle_dist(particle_t* p1, particle_t*p2); +double particle_dist_sqrd(particle_t* p1, particle_t*p2); + + +// Global flags +extern int debug_flag; + + +#endif //particle_h + diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/include/propensities.h b/spatialpy/ssa_sdpd-c-simulation-engine/include/propensities.h new file mode 100644 index 00000000..e4148566 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/include/propensities.h @@ -0,0 +1,28 @@ +/* propensities.h */ + +/* B. Drawert 2019-06-04 */ +/* J. Cullhed 2008-06-18. */ +/* A. Hellander 2010-01-10. */ +/* A. Hellander 2012-06-05 (rev). */ +/* B. Drawert 2012-09-08 */ + + +#ifndef PROPENSITIES__H +#define PROPENSITIES__H + +/* Global variable that can be used to pass parameters to the propensity functions. */ +extern double *parameters; + +/* Definition of the propensity function. */ +// double rfun(const int *x, double t, const double vol, const double *data, int sd, int voxel, int *xx, const size_t *irK, const size_t *jcK, const double *prK) +//typedef double (*PropensityFun)(const int *, double, double, const double *, int, int, int *, const size_t *, const size_t *, const double *); +// double rfun(const int *x, double t, const double vol, const double *data, int sd) +typedef double (*PropensityFun)(const unsigned int *, double, double, const double *, int); + +/* Declaration of allocation and deallocation of propensity list. */ +PropensityFun *ALLOC_propensities(void); +void FREE_propensities(PropensityFun* ptr); + + +#endif +/* PROPENSITIES__H */ diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/include/simulate.h b/spatialpy/ssa_sdpd-c-simulation-engine/include/simulate.h new file mode 100644 index 00000000..1f9b473c --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/include/simulate.h @@ -0,0 +1,14 @@ +/* ***************************************************************************** +SSA-SDPD simulation engine +Copyright 2018 Brian Drawert (UNCA) + +This program is distributed under the terms of the GNU General Public License. +See the file LICENSE.txt for details. +***************************************************************************** */ +#ifndef simulate_h +#define simulate_h +#include "particle.h" + +void run_simulation(int num_threads, system_t* system); + +#endif // simulate_h diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/include/simulate_rdme.h b/spatialpy/ssa_sdpd-c-simulation-engine/include/simulate_rdme.h new file mode 100644 index 00000000..a533dbc7 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/include/simulate_rdme.h @@ -0,0 +1,89 @@ +/* ***************************************************************************** +SSA-SDPD simulation engine +Copyright 2018 Brian Drawert (UNCA) + +This program is distributed under the terms of the GNU General Public License. +See the file LICENSE.txt for details. +***************************************************************************** */ +#ifndef simulate_rdme_h +#define simulate_rdme_h +#include "particle.h" +#include "propensities.h" + + +typedef struct __rdme_data_t rdme_t; + +struct __rdme_data_t { + size_t *irD; + size_t *jcD; + double *prD; + const size_t *irN; + const size_t *jcN; + const int *prN; + const size_t *irG; + const size_t *jcG; + double *vol; + int *sd; + const double *data; + int num_subdomains; + const double*subdomain_diffusion_matrix; + size_t dsize; + size_t Ncells; + size_t Mspecies; + size_t Mreactions; + size_t Ndofs; + unsigned int *xx; + int initialized; + PropensityFun *rfun; + double *srrate; + double *rrate; + double *sdrate; + double *Ddiag; + double *rtimes; + int *node; + int *heap; + long int total_reactions; + long int total_diffusion; + char** species_names; +}; + + + +void initialize_rdme(system_t*system, const int Ncells, const int Mspecies, + const int Mreactions, const double*vol, const int*sd, + const double*data, size_t dsize, + size_t *irN, size_t *jcN,int *prN,size_t *irG,size_t *jcG, + const char* const species_names[], const unsigned int*u0, + const int num_subdomains, const double*subdomain_diffusion_matrix); +void simulate_rdme(system_t*system, unsigned int step); +void destroy_rdme(system_t*system); + + +/******************************************************************/ + + + +rdme_t* nsm_core__create(system_t*system, const int Ncells, const int Mspecies, + const int Mreactions, const double*vol, const int*sd, + const double*data, size_t dsize, + size_t *irN, size_t *jcN,int *prN,size_t *irG,size_t *jcG, + const char* const species_names[], + const int num_subdomains, const double*subdomain_diffusion_matrix); +void nsm_core__destroy(rdme_t*rdme); + +void nsm_core__initialize_chem_populations(rdme_t* rdme, const unsigned int*u0); + +void nsm_core__initialize_rxn_propensities(rdme_t* rdme); +void nsm_core__initialize_diff_propensities(rdme_t* rdme); +void nsm_core__initialize_heap(rdme_t* rdme); + + +void nsm_core__build_diffusion_matrix(rdme_t*rdme,system_t*system); +void nsm_core__destroy_diffusion_matrix(rdme_t*rdme); + +void nsm_core__take_step(rdme_t* rdme, double current_time, double step_size); + + + +#endif /* simulate_rdme_h */ + diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/propensity_file_template.c b/spatialpy/ssa_sdpd-c-simulation-engine/propensity_file_template.c new file mode 100644 index 00000000..b88c3520 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/propensity_file_template.c @@ -0,0 +1,109 @@ +/* SSA-SDPD model file, automatically generated by SpatialPy */ +#include +#include +#include +#include +#include +#include +#include +#include +#include "count_cores.h" +#include "particle.h" +#include "propensities.h" +#include "simulate.h" + +/* Species names */ +__DEFINE_SPECIES__ + +/* Number of reactions */ +#define NR __NUMBER_OF_REACTIONS__ // Depricated, will remove in future version. +#define NUM_REACTIONS __NUMBER_OF_REACTIONS__ +#define NUM_SPECIES __NUMBER_OF_SPECIES__ +#define NUM_VOXELS __NUMBER_OF_VOXELS__ + +/* Define DataFunctions */ +__DEFINE_DATA_FUNCTIONS__ + +/* Parameter definitions */ +__DEFINE_PARAMETERS__ + +/* Reaction definitions */ +__DEFINE_REACTIONS__ + +PropensityFun *ALLOC_propensities(void) +{ + PropensityFun *ptr = (PropensityFun *)malloc(sizeof(PropensityFun)*NUM_REACTIONS); + +__DEFINE_PROPFUNS__ + return ptr; +} + +void FREE_propensities(PropensityFun* ptr) +{ + free(ptr); +} + + +__INPUT_CONSTANTS__ + +int debug_flag; + +void init_create_particle(system_t* sys, int id, double x, double y, double z, int type){ + particle_t* p = create_particle(id); + p->x[0] = x; + p->x[1] = y; + p->x[2] = z; + p->id = id; + p->type = type; + p->rho = sys->rho0; + p->mass = input_vol[id]; + add_particle(p, sys); +} + + +int init_all_particles(system_t* sys){ + int id=0; + __INIT_PARTICLES__ + return id; +} + + +int main(int argc, char**argv){ + debug_flag = 0; + system_t* system = create_system(); + // Fix particles in space + system->static_domain = 1; + //CONFIG = + //system->dt = 1; + //system->nt = 101; + //system->output_freq = 1; + //system->h = 0.5; + //system->rho0 = 1.0; + //system->c0 = 10; + //system->P0 = 10; + // bounding box + //system->xlo = -5.1; + //system->xhi = 5.1; + //system->ylo = -1.1; + //system->yhi = 1.1; + //system->zlo = -1.1; + //system->zhi = 1.1; + __SYSTEM_CONFIG__ + // create all particles in system + init_all_particles(system); + // Setup chemical reaction system + initialize_rdme(system, NUM_VOXELS, NUM_SPECIES, NUM_REACTIONS, input_vol, input_sd, + input_data, input_dsize, input_irN, input_jcN, input_prN, input_irG, + input_jcG, input_species_names, input_u0, input_num_subdomain, + input_subdomain_diffusion_matrix); + + if(argc>1){ + srand48(atol(argv[1])); + }else{ + srand48((long int)time(NULL)+(long int)(1e9*clock())); + } + int num_threads = get_num_processors(); + run_simulation(num_threads, system); + exit(0); +} + diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/src/binheap.c b/spatialpy/ssa_sdpd-c-simulation-engine/src/binheap.c new file mode 100644 index 00000000..2c378dc8 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/src/binheap.c @@ -0,0 +1,122 @@ +/* binheap.c */ + +/* B. Drawert 2010-12-12 */ +/* J. Cullhed 2008-08-04. */ + +/* Binary heap needed by the NRM and NSM. */ + +#include +#include +#include +#include "binheap.h" + + +/* This is an indexed heap. If node is the index of an element in + "data", the place in the heap is given by INDEX[node]. The place in + the "data" held by node n in the heap is given by INDEX2[INDEX[n]]. +*/ + +void initialize_heap(double *data,int *INDEX,int *INDEX2,int N) +/*** ? ***/ +{ + int i; + for (i=(N-1)>>1; i>=0; i--) + percolate_down(i,data,INDEX,INDEX2,N); +} + +void percolate_down(int n1,double *data,int *INDEX,int *INDEX2,int N) +/*** ? ***/ +{ + int child; + int node=n1; + double key=data[node]; + int j=INDEX[node]; + int rxn; + + while ((child = (node<<1)+1)>1; + + if(key0); + + data[node]=key; + INDEX[node]=j; + INDEX2[j]=node; +} + +void update(int node,double *data,int *INDEX,int *INDEX2,int N) +/*** ? ***/ +{ + int parent=(node-1)>>1; + + if(node>0 && data[node]data[2*i+1] || data[i]>data[2*i+2]) + return -1; + return 0; +} + +void print_heap(double *data,int N) +{ + int i; + printf("\n"); + for(i=0; i<(N-1)/2; i++) + printf("%.10f %.10f %.10f\n",data[i],data[2*i+1],data[2*i+2]); +} + +void test_min_prty(double *data,int N) +{ + int i; + double min=data[0]; + for(i=1;i +#elif MACOS +#include +#include +#else +#include +#endif + +// Credit: Dirk-Jan Kroon +// Ref: https://www.cprogramming.com/snippets/source-code/find-the-number-of-cpu-cores-for-windows-mac-or-linux + +int get_num_processors() { +#ifdef WIN32 + SYSTEM_INFO sysinfo; + GetSystemInfo(&sysinfo); + return sysinfo.dwNumberOfProcessors; +#elif MACOS + int nm[2]; + size_t len = 4; + uint32_t count; + + nm[0] = CTL_HW; nm[1] = HW_AVAILCPU; + sysctl(nm, 2, &count, &len, NULL, 0); + + if(count < 1) { + nm[1] = HW_NCPU; + sysctl(nm, 2, &count, &len, NULL, 0); + if(count < 1) { count = 1; } + } + return count; +#else + return sysconf(_SC_NPROCESSORS_ONLN); +#endif +} diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/src/linked_list.c b/spatialpy/ssa_sdpd-c-simulation-engine/src/linked_list.c new file mode 100644 index 00000000..dcef6b10 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/src/linked_list.c @@ -0,0 +1,182 @@ +/* ***************************************************************************** +SSA-SDPD simulation engine +Copyright 2018 Brian Drawert (UNCA) + +This program is distributed under the terms of the GNU General Public License. +See the file LICENSE.txt for details. +***************************************************************************** */ +#include // for strcmp and strcpy +#include // for malloc and free +#include // for printf +#include "linked_list.h" +#include "particle.h" + + +//constructor +linked_list* create_linked_list(){ + linked_list* ll = (linked_list*) malloc( sizeof(linked_list)); + ll->count = 0; + ll->head = NULL; + ll->tail = NULL; + return ll; +} + +// destructor +void destroy_linked_list( linked_list* ll ){ + // empty the linked list + empty_linked_list(ll); + // un-allocate the memory + free(ll); +} + +void empty_linked_list( linked_list*ll){ + while( ll->count > 0){ + linked_list_delete( ll, ll->head ); + } +} + +// add a new node to the end of the linked list +node* linked_list_add( linked_list* ll, particle_t* data_in){ + node* n = (node *) malloc( sizeof(node) ); + n->data = data_in; + n->next = NULL; + n->prev = NULL; + // Traverse the list to find the end node + if(ll->head == NULL){ + ll->head = n; + ll->tail = n; + }else{ + ll->tail->next = n; + n->prev = ll->tail; + ll->tail = n; + } + // increase the size of the list + ll->count++; + return n; +} + +// Delete a node from the linked list +void linked_list_delete( linked_list* ll, node* to_delete){ + node* prev_node; + if( ll->head == NULL){ + printf("Error, linked_list_delete() empty list\n"); + return; + }else if( to_delete == ll->head ){ + ll->head = ll->head->next; + }else{ + for( prev_node=ll->head; prev_node->next!=NULL; prev_node=prev_node->next ){ + if(prev_node->next == to_delete){ + break; + } + } + if( prev_node->next == NULL){ + printf("Error, linked_list_delete(), could not find item in list\n"); + return; + } + prev_node->next = to_delete->next; // connect the list + if(prev_node->next != NULL){ prev_node->next->prev = prev_node; } + } + + //free and reduce size + ll->count--; + free_node(to_delete); +} + +// search for a node by it's data field +/* +node* linked_list_search( linked_list* ll, char* search_string ){ + node* n; + for( n=ll->head; n != NULL; n = n->next ){ + if( strcmp( n->data, search_string) == 0 ){ + break; + } + } + if( n == NULL){ + return NULL; + } + // success, found the element + return n; +}*/ + +// get node by index +node* linked_list_get( linked_list* ll, int index){ + int count = 0; + node* n = ll->head; + if( ll->head == NULL){ + printf("error, linked_list_get() empty list\n"); + return NULL; + } + while( count < index ){ + if(n->next == NULL){ + printf("Error, linked_list_get() list shorted than %i \n", index); + return NULL; + } + n = n->next; + count++; + } + return n; + +} + +// remove and return first node on list +node * linked_list_pop( linked_list * ll){ + node*n = ll->head; + if( ll->head == NULL){ + return NULL; + } + ll->head = ll->head->next; + ll->head->prev = NULL; + ll->count--; + return n; +} + +void free_node(node*n){ + free(n); +} + +node* linked_list_sort__sub(node* head, int sort_ndx){ + node* min_node = head; + node* before = NULL; + node* ptr; + node* tmp; + if(head->next == NULL){ + return head; + } + for(ptr = head; ptr->next != NULL; ptr = ptr->next){ + if( ptr->next->data->x[sort_ndx] < min_node->data->x[sort_ndx] ){ + min_node = ptr->next; + before = ptr; + } + } + if( min_node != head ){ + tmp = head; + head = min_node; + before->next = min_node->next; + if(min_node->next != NULL){ min_node->next->prev = before;} + head->next = tmp; + tmp->prev = head; + head->prev = NULL; + } + head->next = linked_list_sort__sub(head->next,sort_ndx); + if(head->next != NULL){ + head->next->prev = head; + } + return head; +} + +void linked_list_sort(linked_list*ll, int sort_ndx){ + ll->head = linked_list_sort__sub(ll->head, sort_ndx); +} + + + + + + + + + + + + + diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/src/output.c b/spatialpy/ssa_sdpd-c-simulation-engine/src/output.c new file mode 100644 index 00000000..6e8802e3 --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/src/output.c @@ -0,0 +1,175 @@ +#include "output.h" +#include "particle.h" +#include +#include +#include + + + +void output_csv(system_t*system, int current_step){ + char filename[256]; + node* n; + particle_t* p; + sprintf(filename,"output_%u.csv",current_step); + FILE*fp = fopen(filename,"w+"); + fprintf(fp, "id, x, y, z, vx, vy, vz, type, mass, rho\n"); + for(n=system->particle_list->head; n!=NULL; n=n->next){ + p = n->data; + fprintf(fp,"%u, %lf, %lf, %lf, %lf, %lf, %lf, %i, %lf, %lf\n", + p->id, p->x[0], p->x[1], p->x[2], p->v[0], p->v[1], p->v[2], + p->type, p->mass, p->rho); + } + fclose(fp); +} + + +particle_t*output_buffer; +int output_buffer_size = 0; +int output_buffer_current_step; +int output_buffer_current_num_particles; +unsigned int* output_buffer_xx; +int output_buffer_xx_size = 0; + +void output_vtk__sync_step(system_t*system, int current_step){ + output_buffer_current_step = current_step; + if(output_buffer_size == 0){ + output_buffer_size = system->particle_list->count; + output_buffer = (particle_t*) malloc(sizeof(particle_t)*output_buffer_size); + }else if(output_buffer_size < system->particle_list->count){ + output_buffer = realloc(output_buffer, sizeof(particle_t)*output_buffer_size); + } + int i=0; + node*n; + for(n=system->particle_list->head; n!=NULL; n=n->next){ + memcpy( (void *) &output_buffer[i++], (void *) n->data, sizeof(particle_t) ); + } + output_buffer_current_num_particles = i; + if(system->rdme != NULL){ + // make a copy of the RDME state vector xx + if(output_buffer_xx_size==0){ + output_buffer_xx_size = output_buffer_current_num_particles*system->rdme->Mspecies; + output_buffer_xx = (unsigned int*) malloc(sizeof(unsigned int)*output_buffer_xx_size); + }else if(output_buffer_xx_size < output_buffer_current_num_particles*system->rdme->Mspecies){ + output_buffer_xx_size = output_buffer_current_num_particles*system->rdme->Mspecies; + output_buffer_xx = realloc(output_buffer_xx, sizeof(unsigned int)*output_buffer_xx_size); + } + /* + printf("system->rdme->Mspecies*output_buffer_current_num_particles = %i\n",system->rdme->Mspecies*output_buffer_current_num_particles); + printf("output_buffer_current_num_particles = %i\n",output_buffer_current_num_particles); + printf("system->rdme->Mspecies = %i\n",system->rdme->Mspecies); + int i; + printf("xx = [ "); + for(i=0;irdme->Mspecies*output_buffer_current_num_particles;i++){ + printf("%i ",system->rdme->xx[i]); + } + printf("]\n"); + printf("output_buffer_xx = [ "); + for(i=0;irdme->Mspecies*output_buffer_current_num_particles;i++){ + printf("%i ",output_buffer_xx[i]); + } + printf("]\n"); + */ + //memcpy( (void*) &output_buffer_xx, (void*) system->rdme->xx, + // sizeof(unsigned int)*system->rdme->Mspecies*output_buffer_current_num_particles); + for(i=0;irdme->Mspecies*output_buffer_current_num_particles;i++){ + //memcpy( (void*) &output_buffer_xx[i], (void*) system->rdme->xx[i], sizeof(unsigned int)); + output_buffer_xx[i] = system->rdme->xx[i]; + } + } +} +void output_vtk__async_step(system_t*system){ + FILE*fp; + int i; + char filename[256]; + int np = output_buffer_current_num_particles; + if(output_buffer_current_step == 0){ + sprintf(filename, "output0_boundingBox.vtk"); + if(debug_flag){printf("Writing file '%s'\n", filename);} + if((fp = fopen(filename,"w+"))==NULL){ + perror("Can't write 'output0_boundingBox.vtk'");exit(1); + } + fprintf(fp, "# vtk DataFile Version 4.1\n"); + fprintf(fp, "Generated by ssa_sdpd\n"); + fprintf(fp, "ASCII\n"); + fprintf(fp, "DATASET RECTILINEAR_GRID\n"); + fprintf(fp, "DIMENSIONS 2 2 2\n"); + fprintf(fp, "X_COORDINATES 2 double\n"); + fprintf(fp, "%lf %lf\n", system->xlo, system->xhi); + fprintf(fp, "Y_COORDINATES 2 double\n"); + fprintf(fp, "%lf %lf\n", system->ylo, system->yhi); + fprintf(fp, "Z_COORDINATES 2 double\n"); + fprintf(fp, "%lf %lf\n", system->zlo, system->zhi); + fclose(fp); + } + sprintf(filename,"output%u.vtk",output_buffer_current_step); + if(debug_flag){printf("Writing file '%s'\n", filename);} + if((fp = fopen(filename,"w+"))==NULL){ + perror("Can't write output vtk file");exit(1); + } + fprintf(fp, "# vtk DataFile Version 4.1\n"); + fprintf(fp, "Generated by ssa_sdpd\n"); + fprintf(fp, "ASCII\n"); + fprintf(fp, "DATASET POLYDATA\n"); + fprintf(fp, "POINTS %i float\n",np); + for(i=0;irdme != NULL){ + num_fields += system->rdme->Mspecies; + } + fprintf(fp,"FIELD FieldData %i\n",num_fields);// + fprintf(fp,"id 1 %i int\n", np); + for(i=0;irdme != NULL){ + int s; + for(s=0;srdme->Mspecies;s++){ + fprintf(fp,"C[%s] 1 %i int\n", system->rdme->species_names[s], np); + for(i=0;irdme->Mspecies+s] ); + if((i+1)%9==0){ fprintf(fp,"\n"); } + } + fprintf(fp,"\n"); + } + } + + fclose(fp); + +} + diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/src/particle.c b/spatialpy/ssa_sdpd-c-simulation-engine/src/particle.c new file mode 100644 index 00000000..d010491e --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/src/particle.c @@ -0,0 +1,178 @@ +/* ***************************************************************************** +SSA-SDPD simulation engine +Copyright 2018 Brian Drawert (UNCA) + +This program is distributed under the terms of the GNU General Public License. +See the file LICENSE.txt for details. +***************************************************************************** */ +#include "linked_list.h" +#include "particle.h" +#include +#include +#include + + +linked_list* find_neighbors(particle_t* me, system_t* system){ + node*n; + //clean out previous neighbors + //printf("find_neighbors.empty_linked_list\n"); + empty_linked_list(me->neighbors); + // search for points forward: (assumes the list is sorted ascending) + //printf("find_neighbors.search forward\n"); + for(n = me->x_index->next; n!=NULL; n=n->next){ + if(n->data->x[0] > (me->x[0] + system->h)) break; //stop searching + if( (n->data->x[1] > (me->x[1] + system->h)) || (n->data->x[1] < (me->x[1] - system->h) ) ) continue; + if( (n->data->x[2] > (me->x[2] + system->h)) || (n->data->x[2] < (me->x[2] - system->h) ) ) continue; + linked_list_add(me->neighbors, n->data); + if(debug_flag){ + printf("find_neighbors(%i) forward found %i dist: %e dx: %e dy: %e dz: %e\n", + me->id,n->data->id, particle_dist(me,n->data), + me->x[0] - n->data->x[0], + me->x[1] - n->data->x[1], + me->x[2] - n->data->x[2]); + } + } + // check for wrap around points x + /*if(n==NULL && system->boundary_conditions[0] == 'p'){ + double max_x = system->h - (me->x[0] - system->xhi); + for(n = system->x_index->head; n!=NULL; n=n->next){ + if(n->data->x[0] > max_x) break; //stop searching + if( (n->data->x[1] > (me->x[1] + system->h)) || (n->data->x[1] < (me->x[1] - system->h) ) ) continue; + if( (n->data->x[2] > (me->x[2] + system->h)) || (n->data->x[2] < (me->x[2] - system->h) ) ) continue; + linked_list_add(me->neighbors, n->data); + if(debug_flag){ + printf("find_neighbors(%i) forward wrap around found %i dist: %e dx: %e dy: %e dz: %e\n", + me->id,n->data->id, particle_dist(me,n->data), + me->x[0] - n->data->x[0], + me->x[1] - n->data->x[1], + me->x[2] - n->data->x[2]); + } + } + } + // check for wrap around points y + if(system->boundary_conditions[1] == 'p' && (me->x[1] + system->h) > system->yhi){ + //double max_y = system->ylo + ((me->x[1] + system->h) - system->yhi); + fprintf(stderr,"periodic boundary in y not supported yet\n"); + exit(1); + //TODO + } + if(system->boundary_conditions[1] == 'p' && (me->x[1] - system->h) < system->ylo){ + fprintf(stderr,"periodic boundary in y not supported yet\n"); + exit(1); + //TODO + } + // check for wrap around points z + if(system->boundary_conditions[2] == 'p' && (me->x[2] + system->h) > system->zhi){ + fprintf(stderr,"periodic boundary in z not supported yet\n"); + exit(1); + //TODO + } + if(system->boundary_conditions[2] == 'p' && (me->x[2] - system->h) < system->zlo){ + fprintf(stderr,"periodic boundary in z not supported yet\n"); + exit(1); + //TODO + } + */ + + //search for points backward + for(n = me->x_index->prev; n!=NULL; n=n->prev){ + //if(n->data->x[0] > (me->x[0] + system->h)) break; //stop searching forward + /*if(me->id == 0){ + printf("find_neighbors(%i) backwards looking at %i dist: %e dx: %e dy: %e dz: %e\n", + me->id,n->data->id, particle_dist(me,n->data), + me->x[0] - n->data->x[0], + me->x[1] - n->data->x[1], + me->x[2] - n->data->x[2]); + }*/ + if(n->data->x[0] < (me->x[0] - system->h)){ + //if(me->id==0){ printf("\tnode x is less than x[me]-h\n");} + break; //stop searching backward + } + if( (n->data->x[1] > (me->x[1] + system->h)) || (n->data->x[1] < (me->x[1] - system->h) ) ){ + //if(me->id==0){ printf("\tnode y is outside\n");} + continue; + } + if( (n->data->x[2] > (me->x[2] + system->h)) || (n->data->x[2] < (me->x[2] - system->h) ) ){ + //if(me->id==0){ printf("\tnode z is outside\n");} + continue; + } + linked_list_add(me->neighbors, n->data); + if(debug_flag){ + printf("find_neighbors(%i) backwards found %i dist: %e dx: %e dy: %e dz: %e\n", + me->id,n->data->id, particle_dist(me,n->data), + me->x[0] - n->data->x[0], + me->x[1] - n->data->x[1], + me->x[2] - n->data->x[2]); + } + } + /*if(n==NULL && system->boundary_conditions[0] == 'p'){ + double min_x = system->xhi - (system->h - (system->xlo - me->x[0])); + for(n = system->x_index->tail; n!=NULL; n=n->prev){ + if(n->data->x[0] < min_x) break; //stop searching + if( (n->data->x[1] > (me->x[1] + system->h)) || (n->data->x[1] < (me->x[1] - system->h) ) ) continue; + if( (n->data->x[2] > (me->x[2] + system->h)) || (n->data->x[2] < (me->x[2] - system->h) ) ) continue; + linked_list_add(me->neighbors, n->data); + if(debug_flag){ + printf("find_neighbors(%i) backwards wrap around found %i dist: %e dx: %e dy: %e dz: %e\n", + me->id,n->data->id, particle_dist(me,n->data), + me->x[0] - n->data->x[0], + me->x[1] - n->data->x[1], + me->x[2] - n->data->x[2]); + } + } + }*/ + // return the neighbor list + return me->neighbors; +} + + +system_t* create_system(){ + system_t*s = malloc(sizeof(system_t)); + s->particle_list = create_linked_list(); + s->x_index = create_linked_list(); + //s->y_index = create_linked_list(); + //s->z_index = create_linked_list(); + s->dimension = 3; + s->boundary_conditions[0] = 'n'; + s->boundary_conditions[1] = 'n'; + s->boundary_conditions[2] = 'n'; + s->rdme = NULL; + s->static_domain = 0; + return s; +} + +particle_t* create_particle(int id){ + particle_t* me = malloc(sizeof(particle_t)); + me->id = id; + me->nu = 1e-6; + me->mass = 1; + me->rho = 1; + me->solidTag = 0; + me->x[0] = me->x[1] = me->x[2] = 0.0; + me->v[0] = me->v[1] = me->v[2] = 0.0; + return me; +} + +void add_particle(particle_t* me, system_t* system){ + linked_list_add(system->particle_list, me); + me->x_index = linked_list_add(system->x_index, me); + //me->y_index = linked_list_add(system->y_index, me); + //me->z_index = linked_list_add(system->z_index, me); + me->neighbors = create_linked_list(); +} + +double particle_dist(particle_t* p1, particle_t*p2){ + double a = p1->x[0] - p2->x[0]; + double b = p1->x[1] - p2->x[1]; + double c = p1->x[2] - p2->x[2]; + return sqrt( a*a + b*b + c*c); +} +double particle_dist_sqrd(particle_t* p1, particle_t*p2){ + double a = p1->x[0] - p2->x[0]; + double b = p1->x[1] - p2->x[1]; + double c = p1->x[2] - p2->x[2]; + return ( a*a + b*b + c*c); +} + + + diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/src/simulate.c b/spatialpy/ssa_sdpd-c-simulation-engine/src/simulate.c new file mode 100644 index 00000000..c0024eff --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/src/simulate.c @@ -0,0 +1,235 @@ +#include "linked_list.h" +#include "output.h" +#include "particle.h" +#include "simulate_rdme.h" +#include +#include +#include +#include +#include +#include + + + +void take_step(particle_t* me, system_t*system, unsigned int step){ + if(system->static_domain){ + if(step==0){ + find_neighbors(me,system); + } + return; + } +} + +struct arg { + system_t* system; + int thread_id; + int num_threads; + int num_my_particles; + node*my_first_particle; +}; + +pthread_barrier_t begin_step_barrier; +pthread_barrier_t end_step_barrier; +pthread_barrier_t begin_sort_barrier; +pthread_barrier_t end_sort_barrier; +pthread_barrier_t begin_output_barrier; +pthread_barrier_t end_output_barrier; +unsigned int current_step; + +void* output_system_thread(void* targ){ + system_t* system = (system_t*) targ; + while(1){ + pthread_barrier_wait(&begin_output_barrier); + //output_csv(system, current_step); + if(debug_flag) printf("[OUT] start output_vtk__sync_step()\n"); + output_vtk__sync_step(system, current_step); + if(debug_flag) printf("[OUT] done output_vtk__sync_step()\n"); + pthread_barrier_wait(&end_output_barrier); + if(debug_flag) printf("[OUT] start output_vtk__async_step()\n"); + output_vtk__async_step(system); + if(debug_flag) printf("[OUT] done output_vtk__async_step()\n"); + } +} + +struct sarg { + linked_list*ll; + int sort_ndx; +}; + +void* sort_index_thread(void* targ_in){ + struct sarg* targ = (struct sarg*) targ_in; + while(1){ + pthread_barrier_wait(&begin_sort_barrier); + if(debug_flag) printf("[SORT] begin sort\n"); + linked_list_sort(targ->ll,targ->sort_ndx); + if(debug_flag) printf("[SORT] sort complete\n"); + pthread_barrier_wait(&end_sort_barrier); + } +} + +void* run_simulation_thread(void *targ_in){ + struct arg* targ = (struct arg*)targ_in; + system_t* system = targ->system; + unsigned int step; + node*n; + int i; + int count = 0; + for(step=0; step < system->nt; step++){ + count = 0; + // block on the begin barrier + if(debug_flag) printf("[WORKER %i] waiting to begin step %i\n",targ->thread_id,step); + pthread_barrier_wait(&begin_step_barrier); + // each thread will take a step with each of it's particles + n=targ->my_first_particle; + for(i=0; inum_my_particles; i++){ + if(n==NULL) break; + take_step(n->data,system,step); + count++; + n=n->next; + } + // block on the end barrier + if(debug_flag) printf("[WORKER %i] completed step %i, processed %i particles\n",targ->thread_id,step,count); + pthread_barrier_wait(&end_step_barrier); + } //end for(step) + return NULL; +} + +void run_simulation(int num_threads, system_t* system){ + // + int i,j; + int num_particles_per_thread = system->particle_list->count / num_threads; + // start worked threads + struct arg*targs = (struct arg*) malloc(sizeof(struct arg)*num_threads); + pthread_t* thread_handles = (pthread_t*) malloc(sizeof(pthread_t)*num_threads); + // create barrier that unblocks when "num_threads+1" call wait() on it + pthread_barrier_init(&begin_step_barrier, NULL, num_threads+1); + pthread_barrier_init(&end_step_barrier, NULL, num_threads+1); + // create all the worker threads + int num_particles_left = system->particle_list->count; + node*particle_list_ittr = system->particle_list->head; + for (i=0; i < num_threads; i++) { + targs[i].system = system; + targs[i].num_threads = num_threads; + targs[i].thread_id = i; + targs[i].my_first_particle = particle_list_ittr; + if(i==num_threads-1){ + targs[i].num_my_particles = num_particles_left; + }else{ + targs[i].num_my_particles = num_particles_per_thread; + num_particles_left -= num_particles_per_thread; + for(j=0;jnext; + if(particle_list_ittr == NULL){ + printf("ERROR: particle_list_ittr was unexpectly NULL\n"); + exit(1); + } + } + } + if(debug_flag) printf("Creating thread to process %i particles\n", targs[i].num_my_particles); + pthread_create(&thread_handles[i], NULL, run_simulation_thread, &targs[i]); + } + // Create threads to sort indexes + pthread_t* sort_thread_handles = (pthread_t*) malloc(sizeof(pthread_t)*3); + pthread_barrier_init(&begin_sort_barrier, NULL, 2); + pthread_barrier_init(&end_sort_barrier, NULL, 2); + struct sarg sort_args[1]; + sort_args[0].ll = system->x_index; + sort_args[0].sort_ndx = 0; + pthread_create(&sort_thread_handles[0], NULL, sort_index_thread, &sort_args[0]); + if(debug_flag) printf("Creating thread to update x-position index\n"); + /*sort_args[1].ll = system->y_index; + sort_args[1].sort_index = 1; + pthread_create(&sort_thread_handles[1], NULL, sort_index_thread, &sort_args[1]); + sort_args[2].ll = system->z_index; + sort_args[2].sort_index = 2; + pthread_create(&sort_thread_handles[2], NULL, sort_index_thread, &sort_args[2]);*/ + + // Create thread to handle output + pthread_t* output_thread_handles = (pthread_t*) malloc(sizeof(pthread_t)*1); + pthread_barrier_init(&begin_output_barrier, NULL, 2); + pthread_barrier_init(&end_output_barrier, NULL, 2); + pthread_create(&output_thread_handles[0], NULL, output_system_thread, system); + if(debug_flag) printf("Creating thread to create output files\n"); + + // Start simulation, coordinate simulation + int step,next_output_step = 0; + for(step=0; step < system->nt; step++){ + // Release the Sort Index threads + if(debug_flag) printf("[%i] Starting the Sort Index threads\n",step); + pthread_barrier_wait(&begin_sort_barrier); + // Output state + if(step >= next_output_step){ + // Release output thread + current_step = step; + if(debug_flag) printf("[%i] Starting the Output threads\n",step); + pthread_barrier_wait(&begin_output_barrier); + next_output_step += system->output_freq; + // Wait until output threads are done + pthread_barrier_wait(&end_output_barrier); + if(debug_flag) printf("[%i] Output threads finished\n",step); + + } + // Wait until Sort Index threads are done + pthread_barrier_wait(&end_sort_barrier); + if(debug_flag) printf("[%i] Sort Index threads finished\n",step); + if(debug_flag && step==0){ + printf("x_index = ["); + node*n; + for(n = system->x_index->head; n!=NULL; n=n->next){ + printf("%e ",n->data->x[0]); + } + printf("]\n"); + printf("x_index_id = ["); + for(n = system->x_index->head; n!=NULL; n=n->next){ + printf("%i ",n->data->id); + } + printf("]\n"); + } + + // Release the worker threads to take step + if(debug_flag) printf("[%i] Starting the Worker threads\n",step); + pthread_barrier_wait(&begin_step_barrier); + // Wait until worker threads are done + pthread_barrier_wait(&end_step_barrier); + if(debug_flag) printf("[%i] Worker threads finished\n",step); + // Solve RDME + if(debug_flag) printf("[%i] starting RDME simulation\n",step); + simulate_rdme(system, step); + if(debug_flag) printf("[%i] Finish RDME simulation\n",step); + + } + // Record final timepoint + current_step = step; + if(debug_flag) printf("[%i] Starting the Output threads\n",step); + pthread_barrier_wait(&begin_output_barrier); + pthread_barrier_wait(&end_output_barrier); + if(debug_flag) printf("[%i] Output threads finished\n",step); + if(debug_flag) printf("[%i] Waiting for Async Output threads\n",step); + pthread_barrier_wait(&begin_output_barrier); // wait for the async + if(debug_flag) printf("[%i] Async Output threads finished\n",step); + + //clean up + if(debug_flag) printf("Cleaning up RDME\n"); + destroy_rdme(system); + + // Kill threads and wait for them to finish + /* + if(debug_flag) printf("Cleaning up threads\n"); + for(i=0;i<3;i++){ + pthread_kill(sort_thread_handles[i], SIGINT); + pthread_join(sort_thread_handles[i], NULL); + } + pthread_kill(output_thread_handles[0], SIGINT); + pthread_join(output_thread_handles[0], NULL); + for (i=0; i < num_threads; i++) { + pthread_kill(thread_handles[i], SIGINT); + pthread_join(thread_handles[i], NULL); + } + */ + // done + if(debug_flag) printf("Simulation complete\n");; + return; +} + + + diff --git a/spatialpy/ssa_sdpd-c-simulation-engine/src/simulate_rdme.c b/spatialpy/ssa_sdpd-c-simulation-engine/src/simulate_rdme.c new file mode 100644 index 00000000..0e06d9de --- /dev/null +++ b/spatialpy/ssa_sdpd-c-simulation-engine/src/simulate_rdme.c @@ -0,0 +1,746 @@ +#include "linked_list.h" +#include +#include "output.h" +#include "particle.h" +#include "simulate_rdme.h" +#include +#include +#include +#include +#include +#include +#include +#include + +#include "propensities.h" +#include "binheap.h" + + +/**************************************************************************/ +void initialize_rdme(system_t*system, const int Ncells, const int Mspecies, + const int Mreactions, const double*vol, const int*sd, + const double*data, size_t dsize, + size_t *irN, size_t *jcN,int *prN,size_t *irG,size_t *jcG, + const char* const species_names[], + const unsigned int*u0, + const int num_subdomains, const double*subdomain_diffusion_matrix + ){ + if(debug_flag){printf("*************** initialize_rdme ******************\n");} + rdme_t*rdme = nsm_core__create(system,Ncells,Mspecies,Mreactions,vol,sd,data, + dsize,irN,jcN,prN,irG,jcG,species_names, + num_subdomains, subdomain_diffusion_matrix); + + + nsm_core__initialize_chem_populations(rdme, u0); + + rdme->initialized=0; + + system->rdme = rdme; +} + +/**************************************************************************/ +void simulate_rdme(system_t*system,unsigned int step){ + rdme_t*rdme = system->rdme; + if(!system->static_domain || !rdme->initialized){ + // if the domain is not static, rebuild the diffusion matrix after movement + if(!rdme->initialized){ + if(debug_flag) printf("Building diffusion matrix\n"); + if(debug_flag) printf("\tsm_core__build_diffusion_matrixn"); + nsm_core__build_diffusion_matrix(rdme,system); + rdme->initialized=1; + }else{ + if(debug_flag) printf("Rebuilding diffusion matrix\n"); + if(debug_flag) printf("\tnsm_core__destroy_diffusion_matrix\n"); + nsm_core__destroy_diffusion_matrix(rdme); + if(debug_flag) printf("\tsm_core__build_diffusion_matrixn"); + nsm_core__build_diffusion_matrix(rdme,system); + } + if(debug_flag) printf("\tnsm_core__initialize_rxn_propensities\n"); + nsm_core__initialize_rxn_propensities(rdme); + if(debug_flag) printf("\tnsm_core__initialize_diff_propensities\n"); + nsm_core__initialize_diff_propensities(rdme); + if(debug_flag) printf("\tnsm_core__initialize_heap\n"); + nsm_core__initialize_heap(rdme); + } + if(debug_flag) printf("Simulating RDME for %e seconds\n",system->dt); + nsm_core__take_step(rdme, system->dt*step, system->dt); +} +/**************************************************************************/ +void destroy_rdme(system_t*system){ + if(debug_flag) printf("NSM: total # reacton events %lu\n",system->rdme->total_reactions); + if(debug_flag) printf("NSM: total # diffusion events %lu\n",system->rdme->total_diffusion); + nsm_core__destroy(system->rdme); +} + + + + +//=================================================== +// Adapted from PyURDME's nsmcore.c +//=================================================== + +void print_current_state(int subvol, unsigned int*xx,const size_t Mspecies){ + int i; + printf("Current state in voxel %i:\n",subvol); + for(i=0;iNcells = Ncells; + rdme->Mspecies = Mspecies; + rdme->Mreactions = Mreactions; + rdme->data =data; + rdme->dsize = dsize; + rdme->irN = irN; + rdme->jcN = jcN; + rdme->prN = prN; + rdme->irG = irG; + rdme->jcG = jcG; + rdme->species_names = (char**) species_names; + rdme->total_reactions = 0; + rdme->total_diffusion = 0; + rdme->num_subdomains = num_subdomains; + rdme->subdomain_diffusion_matrix = subdomain_diffusion_matrix; + rdme->initialized = 0; + + rdme->Ndofs = rdme->Ncells*rdme->Mspecies; + rdme->rfun = ALLOC_propensities(); + + // create sd vector + rdme->sd = (int*)malloc(rdme->Ncells*sizeof(int)); + rdme->vol = (double*)malloc(rdme->Ncells*sizeof(double)); + + node*n; + particle_t*p; + //int i=0; + for(n=system->particle_list->head; n!=NULL; n=n->next){ + p = n->data; + rdme->sd[p->id] = p->type; + rdme->vol[p->id] = p->mass / p->rho; + } + + + + /* Create reaction rate matrix (Mreactions X Ncells) and total rate + vector. In rrate we store all propensities for chemical rections, + and in srrate the sum of propensities in every subvolume. */ + rdme->rrate = (double *)malloc(rdme->Mreactions*rdme->Ncells*sizeof(double)); + rdme->srrate = (double *)malloc(rdme->Ncells*sizeof(double)); + + //nsm_core__initialize_rxn_propensities(rdme); + + /* Total diffusion rate vector (length Mcells). It will hold + the total diffusion rates in each subvolume. */ + rdme->sdrate = (double *)malloc(rdme->Ncells*sizeof(double)); + /* The diagonal value of the D-matrix is used frequently. For + efficiency, we store the negative of D's diagonal in Ddiag. */ + rdme->Ddiag = (double *)malloc(rdme->Ndofs*sizeof(double)); + + //nsm_core__initialize_diff_propensities(rdme); + + /* Create binary (min)heap. */ + rdme->rtimes = (double *)malloc(rdme->Ncells*sizeof(double)); + rdme->node = (int *)malloc(rdme->Ncells*sizeof(int)); + rdme->heap = (int *)malloc(rdme->Ncells*sizeof(int)); + + //nsm_core__initialize_heap(rdme); + + /* return rdme structure */ + return rdme; +} + +/**************************************************************************/ +void nsm_core__initialize_rxn_propensities(rdme_t* rdme){ + int i,j; + /* Calculate the propensity for every reaction and every + subvolume. Store the sum of the reaction intensities in each + subvolume in srrate. */ + for (i = 0; i < rdme->Ncells; i++) { + rdme->srrate[i] = 0.0; + for (j = 0; j < rdme->Mreactions; j++) { + //rrate[i*Mreactions+j] = + //(*rfun[j])(&xx[i*Mspecies],tt,vol[i],&data[i*dsize],sd[i],i,xx,irK,jcK,prK); + //srrate[i] += rrate[i*Mreactions+j]; + rdme->rrate[i*rdme->Mreactions+j] = (*rdme->rfun[j])(&rdme->xx[i*rdme->Mspecies],0.0,rdme->vol[i],&rdme->data[i*rdme->dsize],rdme->sd[i]); + rdme->srrate[i] += rdme->rrate[i*rdme->Mreactions+j]; + } + } +} + +/**************************************************************************/ +void nsm_core__initialize_diff_propensities(rdme_t* rdme){ + int i,j; + + for (i = 0; i < rdme->Ndofs; i++) { + rdme->Ddiag[i] = 0.0; + for (j = rdme->jcD[i]; j < rdme->jcD[i+1]; j++) + if (rdme->irD[j] == i) rdme->Ddiag[i] = -1*rdme->prD[j]; + } + + /* Calculate the total diffusion rate for each subvolume. */ + for(i = 0; i < rdme->Ncells; i++) { + rdme->sdrate[i] = 0.0; + for(j = 0; j < rdme->Mspecies; j++) + rdme->sdrate[i] += rdme->Ddiag[i*rdme->Mspecies+j]*rdme->xx[i*rdme->Mspecies+j]; + } +} + +/**************************************************************************/ +void nsm_core__initialize_heap(rdme_t* rdme){ + int i; + /* Calculate times to next event (reaction or diffusion) + in each subvolume and initialize heap. */ + for (i = 0; i < rdme->Ncells; i++) { + rdme->rtimes[i] = -log(1.0-drand48())/(rdme->srrate[i]+rdme->sdrate[i]); + rdme->heap[i] = rdme->node[i] = i; + } + initialize_heap(rdme->rtimes,rdme->node,rdme->heap,rdme->Ncells); +} + +/**************************************************************************/ +void nsm_core__destroy(rdme_t*rdme){ + FREE_propensities(rdme->rfun); + free(rdme->xx); + free(rdme->heap); + free(rdme->node); + free(rdme->rtimes); + free(rdme->Ddiag); + free(rdme->sdrate); + free(rdme->srrate); + free(rdme->rrate); + free(rdme); +} + +/**************************************************************************/ +void nsm_core__initialize_chem_populations(rdme_t* rdme, const unsigned int*u0){ + /* Set xx to the initial state. xx will always hold the current solution. */ + //printf("malloc Ndofs = %li\n",rdme->Ndofs); + rdme->xx = (unsigned int *)malloc(rdme->Ndofs*sizeof(unsigned int)); + memcpy(rdme->xx,u0,rdme->Ndofs*sizeof(unsigned int)); + //printf(" Ndofs = %li\n",rdme->Ndofs); + //printf("xx = [ "); + //int i; + //for(i=0;iNdofs;i++){ + // printf("%u ",rdme->xx[i]); + //} + //printf("]\n"); +} + + +/**************************************************************************/ +void nsm_core__build_diffusion_matrix(rdme_t*rdme,system_t*system){ + //printf("***************** build_diffusion_matrix *****************\n");fflush(stdout); + double off_diag_sum,diff_const,dist2; + node *n,*n2; + particle_t *p1,*p2; + int s_ndx; + double D_i_j; + double ih,ihsq,wfd; + double h = system->h; + + size_t jcD_length = rdme->Ncells + 1; + size_t irD_length = 0; + size_t prD_length = 0; + // find total length of jc & pr arrays: O(n) + for(n=system->particle_list->head; n!=NULL; n=n->next){ + p1 = n->data; + irD_length += (p1->neighbors->count + 1); + //printf("node %i # neighbors %i\n",p1->id,p1->neighbors->count); + // update the volume + rdme->vol[p1->id] = p1->mass / p1->rho; + } + prD_length = irD_length; + //printf("irD_length= %i\n",irD_length);fflush(stdout); + //printf("jcD_length= %i\n",jcD_length);fflush(stdout); + // allocate space for each array + //printf("MALLOC rdme->irD [%i]\n",irD_length*rdme->Mspecies); + rdme->irD = (size_t*) malloc(sizeof(size_t)*irD_length*rdme->Mspecies); + size_t irD_ndx = 0; + //printf("MALLOC rdme->jcD [%i]\n",jcD_length*rdme->Mspecies); + rdme->jcD = (size_t*) malloc(sizeof(size_t)*jcD_length*rdme->Mspecies); + size_t jcD_ndx = 0; + rdme->jcD[jcD_ndx++] = 0; + //printf("MALLOC rdme->prD [%i]\n",prD_length*rdme->Mspecies); + rdme->prD = (double*) malloc(sizeof(double)*prD_length*rdme->Mspecies); + size_t prD_ndx = 0; + // for each particle p, look at each neighbor p2 + for(n=system->particle_list->head; n!=NULL; n=n->next){ + p1 = n->data; + //printf("p1 = %i\n",p1->id);fflush(stdout); + //TODO: the ordering is very inefficient here. Should do all species for a single p2, only + // calculate dist once. Requires reordering. + for(s_ndx=0; s_ndxMspecies; s_ndx++){ + // keep track of the total off diagonal sum + //printf("s_ndx=%i\n",s_ndx);fflush(stdout); + off_diag_sum = 0.0; + for(n2=p1->neighbors->head; n2!=NULL; n2=n2->next){ + p2 = n2->data; + //printf("p2=%i\n",p2->id);fflush(stdout); + diff_const = rdme->subdomain_diffusion_matrix[s_ndx*rdme->num_subdomains + (p2->type-1)]; + // + dist2 = particle_dist_sqrd(p1,p2); + // Eq (13-14), Drawert et al 2019 + ih = 1.0 / h; + ihsq = ih * ih; + wfd = h - sqrt(dist2); + if(wfd <= 0.0){ + continue; // outside support of basis function + } + wfd = -25.066903536973515383e0 * wfd * wfd * ihsq * ihsq * ihsq * ih; //3D + // Eq 28 of Drawert et al 2019, Tartakovsky et. al., 2007, JCP + D_i_j = -2.0*(p1->mass*p2->mass)/(p1->mass+p2->mass)*(p1->rho+p2->rho)/(p1->rho*p2->rho) * dist2 * wfd / (dist2+0.01*h*h); + + if(diff_const > 0.0){ + rdme->irD[irD_ndx++] = p2->id*rdme->Mspecies + s_ndx; + rdme->prD[prD_ndx++] = diff_const * D_i_j; + off_diag_sum += diff_const * D_i_j; + } + } + rdme->irD[irD_ndx++] = p1->id*rdme->Mspecies + s_ndx; + rdme->prD[prD_ndx++] = -1*off_diag_sum; + + rdme->jcD[jcD_ndx++] = prD_ndx; + } + } + if(debug_flag){ + printf("irD_ndx (%li) length rdme->irD (%li)\n",irD_ndx,irD_length*rdme->Mspecies); + printf("jcD_ndx (%li) length rdme->jcD (%li)\n",jcD_ndx,jcD_length*rdme->Mspecies); + printf("prD_ndx (%li) length rdme->prD (%li)\n",prD_ndx,prD_length*rdme->Mspecies); + if( prD_ndx != irD_ndx){ + printf("Assembly: prD_ndx (%zu) != irD_ndx (%zu)\n",prD_ndx,irD_ndx); + } + if( irD_ndx != irD_length*rdme->Mspecies ){ + printf("Assembly: irD_ndx (%zu) != irD_length*Mspecies (%li)\n", irD_ndx, irD_length*rdme->Mspecies); + } + char filename[256]; + time_t seconds; + size_t i; + seconds = time(NULL); + sprintf(filename,"diffusion_matrix_%ld", seconds); + printf("Writing out diffusion matrix to '%s'\n",filename); + FILE*fp = fopen(filename,"w+"); + fprintf(fp, "irD = ["); + for(i=0;i0){ fprintf(fp,",");} + fprintf(fp, "%zu",rdme->irD[i]); + } + fprintf(fp, "]\n"); + fprintf(fp, "jcD = ["); + for(i=0;i0){ fprintf(fp,",");} + fprintf(fp, "%zu",rdme->jcD[i]); + } + fprintf(fp, "]\n"); + fprintf(fp, "prD = ["); + for(i=0;i0){ fprintf(fp,",");} + fprintf(fp, "%e",rdme->prD[i]); + } + fprintf(fp, "]\n"); + fprintf(fp, "D = scipy.sparse.csc_matrix(prD,irD,jcD)\n"); + fclose(fp); + + } + +// int num_subdomains; +// const double*subdomain_diffusion_matrix; + + +} + +/**************************************************************************/ +void nsm_core__destroy_diffusion_matrix(rdme_t*rdme){ + free(rdme->irD); + free(rdme->jcD); + free(rdme->prD); +} + + +/**************************************************************************/ +void nsm_core__take_step(rdme_t* rdme, double current_time, double step_size){ + double tt=current_time; + double end_time = current_time + step_size; + double totrate,cum,rdelta,rrdelta; + int subvol,event,re,spec,errcode = 0; + size_t i,j = 0; + size_t to_node,to_vol = 0; + int dof,col; + double old_rrate = 0.0,old_drate = 0.0; + double rand1,rand2,cum2,old; + + /* Main loop. */ + while(tt <= end_time){ + + /* Get the subvolume in which the next event occurred. + This subvolume is on top of the heap. */ + //told = tt; + tt = rdme->rtimes[0]; + subvol = rdme->node[0]; + //if(debug_flag){printf("nsm: tt=%e subvol=%i\n",tt,subvol);} + /* First check if it is a reaction or a diffusion event. */ + totrate = rdme->srrate[subvol]+rdme->sdrate[subvol]; + + if(totrate <= 0){ // Sanity check, is there a non-zero reaction and diffusion propensity + totrate = rdme->srrate[subvol]+rdme->sdrate[subvol]; + if (totrate > 0.0) + rdme->rtimes[0] = -log(1.0-drand48())/totrate+tt; + else + rdme->rtimes[0] = INFINITY; + /* Update the heap. */ + update(0,rdme->rtimes,rdme->node,rdme->heap,rdme->Ncells); + // go to the next element + continue; + } + + rand1 = drand48(); + + if (rand1 <= rdme->srrate[subvol]/totrate) { // use normalized floating point comparision + /* Reaction event. */ + event = 0; + + /* a) Determine the reaction re that did occur (direct SSA). */ + double rand_rval = rand1 * rdme->srrate[subvol]; + for (re = 0, cum = rdme->rrate[subvol*rdme->Mreactions]; re < rdme->Mreactions && rand_rval > cum; re++, cum += rdme->rrate[subvol*rdme->Mreactions+re]) + ; + if(re >= rdme->Mreactions){ + if(cum != rdme->srrate[subvol]){ + printf("Reaction propensity mismatch in voxel %i. re=%i, srrate[subvol]=%e cum=%e rand_rval=%e\n",subvol,re,rdme->srrate[subvol],cum,rand_rval); + rdelta = 0.0; + for (j=0;jMreactions; j++) { + rdelta += (rdme->rrate[subvol*rdme->Mreactions+j] = (*rdme->rfun[j])(&rdme->xx[subvol*rdme->Mspecies],tt,rdme->vol[subvol],&rdme->data[subvol*rdme->dsize],rdme->sd[subvol])); + } + rdme->srrate[subvol] = rdelta; + } + if(rdme->srrate[subvol] == 0.0){ continue; } + + + double rand_rval2 = rand1 * rdme->srrate[subvol]; // sum of propensitiess is not propensity sum, re-roll + + for (re = 0, cum = rdme->rrate[subvol*rdme->Mreactions]; re < rdme->Mreactions && rand_rval2 > cum; re++, cum += rdme->rrate[subvol*rdme->Mreactions+re]) + ; + if(re >= rdme->Mreactions){ // failed twice, problems! + printf("Propensity sum overflow, rand=%e rand_rval=%e rand_rval2=%e srrate[%i]=%e cum=%e\n",rand1,rand_rval,rand_rval2,subvol,rdme->srrate[subvol],cum); + exit(1); + } + } + if(debug_flag){printf("nsm: tt=%e subvol=%i sd=%i",tt,subvol,rdme->sd[subvol]);} + if(debug_flag){printf("Rxn %i \n",re);} + /* b) Update the state of the subvolume subvol and sdrate[subvol]. */ + for (i = rdme->jcN[re]; i < rdme->jcN[re+1]; i++) { + int prev_val = rdme->xx[subvol*rdme->Mspecies+rdme->irN[i]]; + rdme->xx[subvol*rdme->Mspecies+rdme->irN[i]] += rdme->prN[i]; + if (rdme->xx[subvol*rdme->Mspecies+rdme->irN[i]] < 0){ + errcode = 1; + printf("Netative state detected after reaction %i, subvol %i, species %zu at time %e (was %i now %i)\n",re,subvol,rdme->irN[i],tt,prev_val,rdme->xx[subvol*rdme->Mspecies+rdme->irN[i]]); + //printf("re decrimented=%i \n",re_decrimented); + printf("rand1 = %e \n",rand1); + printf("cum = %e \n",cum); + printf("re = %i\n", re); + printf("subvol = %i\n",subvol); + printf("rrate[%zu] = %e \n",subvol*rdme->Mreactions+re,rdme->rrate[subvol*rdme->Mreactions+re]); + printf("srrate[%i] = %e \n",subvol,rdme->srrate[subvol]); + printf("sdrate[%i] = %e \n",subvol,rdme->sdrate[subvol]); + printf("totrate = %e \n",totrate); + printf("Mreactions = %li\n",rdme->Mreactions); + printf("total_reactions = %li\n",rdme->total_reactions); + printf("total_diffusion = %li\n",rdme->total_diffusion); + int jj; + for(jj=0;jjMspecies;jj++){ + printf("xx[%i] = %i\n",jj,rdme->xx[subvol*rdme->Mspecies+jj]); + } + double jj_cumsum=0.0; + for(jj=0;jjMreactions;jj++){ + jj_cumsum += rdme->rrate[subvol*rdme->Mreactions+jj]; + printf("rxn%i rrate[%li]=%e cumsum=%e\n",jj,subvol*rdme->Mreactions+jj,rdme->rrate[subvol*rdme->Mreactions+jj],jj_cumsum); + printf("\trxn%i_propensity = %e\n",jj,(*rdme->rfun[jj])(&rdme->xx[subvol*rdme->Mspecies],tt,rdme->vol[subvol],&rdme->data[subvol*rdme->dsize],rdme->sd[subvol])); + } + for (i = rdme->jcG[rdme->Mspecies+re]; i < rdme->jcG[rdme->Mspecies+re+1]; i++) { + printf("G[%li,%li]\n",rdme->Mspecies+re, rdme->irG[i]); + } + for (i = rdme->jcN[re]; i < rdme->jcN[re+1]; i++) { + printf("N[%i,%li]=%i\n",re,rdme->irN[i],rdme->prN[i]); + } + print_current_state(subvol,rdme->xx,rdme->Mspecies); + exit(1); + } + rdme->sdrate[subvol] += rdme->Ddiag[subvol*rdme->Mspecies+rdme->irN[i]]*rdme->prN[i]; + } + + /* c) Recalculate srrate[subvol] using dependency graph. */ + for (i = rdme->jcG[rdme->Mspecies+re], rdelta = 0.0; i < rdme->jcG[rdme->Mspecies+re+1]; i++) { + old = rdme->rrate[subvol*rdme->Mreactions+rdme->irG[i]]; + j = rdme->irG[i]; + rdelta += + (rdme->rrate[subvol*rdme->Mreactions+j] = + (*rdme->rfun[j])(&rdme->xx[subvol*rdme->Mspecies],tt,rdme->vol[subvol],&rdme->data[subvol*rdme->dsize],rdme->sd[subvol]) + )-old; + } + rdme->srrate[subvol] += rdelta; + + rdme->total_reactions++; /* counter */ + } + else { + /* Diffusion event. */ + event = 1; + + /* a) Determine which species... */ + double diff_rand = rand1 * rdme->sdrate[subvol]; + + for (spec = 0, dof = subvol*rdme->Mspecies, cum = rdme->Ddiag[dof]*rdme->xx[dof]; + spec < rdme->Mspecies && diff_rand > cum; + spec++, cum += rdme->Ddiag[dof+spec]*rdme->xx[dof+spec]); + if(spec >= rdme->Mspecies){ + //printf("Diffusion species overflow\n"); + // try again, 'cum' is a better estimate of the propensity sum + if(cum != rdme->srrate[subvol]){ + printf("Diffusion propensity mismatch in voxel %i. spec=%i, sdrate[subvol]=%e cum=%e diff_rand=%e\n",subvol,spec,rdme->sdrate[subvol],cum,diff_rand); + rdelta = 0.0; + for(j = 0; j < rdme->Mspecies; j++){ + rdelta += rdme->Ddiag[subvol*rdme->Mspecies+j]*rdme->xx[subvol*rdme->Mspecies+j]; + } + rdme->sdrate[subvol] = rdelta; + } + if(rdme->sdrate[subvol] == 0.0){ continue; } + + diff_rand = cum *rand1; + for (spec = 0, dof = subvol*rdme->Mspecies, cum = rdme->Ddiag[dof]*rdme->xx[dof]; + spec < rdme->Mspecies && diff_rand > cum; + spec++, cum += rdme->Ddiag[dof+spec]*rdme->xx[dof+spec]); + if(spec >= rdme->Mspecies){ + spec--; + while(rdme->xx[dof+spec] <= 0){ + spec--; + if(spec <=0){ + printf("Error: diffusion event in voxel %i was selected, but no molecues to move\n",subvol); + print_current_state(subvol,rdme->xx,rdme->Mspecies); + exit(1); + } + } + } + } + + + /* b) and then the direction of diffusion. */ + col = dof+spec; + double r2 = drand48(); + rand2 = r2*rdme->Ddiag[col]; + + /* Search for diffusion direction. */ + for (i = rdme->jcD[col], cum2 = 0.0; i < rdme->jcD[col+1]; i++) + if (rdme->irD[i] != col && (cum2 += rdme->prD[i]) > rand2) + break; + + /* paranoia fix: */ + // This paranoia fix creates errors if the final rate has a zero propensity. It can cause negative populations. + if (i >= rdme->jcD[col+1]){ + //printf("Diffusion direction overflow\n"); + // try again, 'cum2' is a better estimate of propensity sum + rand2 = r2*cum2; + for (i = rdme->jcD[col], cum2 = 0.0; i < rdme->jcD[col+1]; i++) + if (rdme->irD[i] != col && (cum2 += rdme->prD[i]) > rand2) + break; + if (i >= rdme->jcD[col+1]){ + i--; + } + } + + to_node = rdme->irD[i]; + to_vol = to_node/rdme->Mspecies; + + /* c) Execute the diffusion event (check for negative elements). */ + rdme->xx[subvol*rdme->Mspecies+spec]--; + if (rdme->xx[subvol*rdme->Mspecies+spec] < 0){ + errcode = 1; + printf("Negative state detected after diffusion, voxel %i -> %zu, species %i at time %e\n",subvol,to_node,spec,tt); + printf("total_reactions = %li\n",rdme->total_reactions); + printf("total_diffusion = %li\n",rdme->total_diffusion); + printf("rand1 = %e\n",rand1); + printf("rand2 = %e\n",rand2); + printf("cum = %e\n",cum); + printf("cum2 = %e\n",cum2); + printf("dof = %i\n",dof); + printf("col = %i\n",col); + printf("i = %zu jcD[col]=%zu jcD[col+1]=%zu\n",i,rdme->jcD[col],rdme->jcD[col+1]); + print_current_state(subvol,rdme->xx,rdme->Mspecies); + exit(1); + } + rdme->xx[to_node]++; + + + if(debug_flag){printf("nsm: tt=%e subvol=%i sd=%i",tt,subvol,rdme->sd[subvol]);} + if(debug_flag){printf("Diff %i->%li\n",subvol,to_vol);} + + /* Save reaction and diffusion rates. */ + old_rrate = rdme->srrate[to_vol]; + old_drate = rdme->sdrate[to_vol]; + /* Recalculate the reaction rates using dependency graph G. */ + if (rdme->Mreactions > 0){ + for (i = rdme->jcG[spec], rdelta = 0.0, rrdelta = 0.0; i < rdme->jcG[spec+1]; i++) { + + j = rdme->irG[i]; + old = rdme->rrate[subvol*rdme->Mreactions+j]; + + rdelta += + (rdme->rrate[subvol*rdme->Mreactions+j] = + (*rdme->rfun[j])(&rdme->xx[subvol*rdme->Mspecies],tt,rdme->vol[subvol],&rdme->data[subvol*rdme->dsize],rdme->sd[subvol]) + )-old; + old = rdme->rrate[to_vol*rdme->Mreactions+j]; + + rrdelta += (rdme->rrate[to_vol*rdme->Mreactions+j] = + (*rdme->rfun[j])(&rdme->xx[to_vol*rdme->Mspecies],tt,rdme->vol[to_vol],&rdme->data[to_vol*rdme->dsize],rdme->sd[to_vol]) + )-old; + } + + rdme->srrate[subvol] += rdelta; + rdme->srrate[to_vol] += rrdelta; + } + + /* Adjust diffusion rates. */ + rdme->sdrate[subvol] -= rdme->Ddiag[subvol*rdme->Mspecies+spec]; + rdme->sdrate[to_vol] += rdme->Ddiag[to_vol*rdme->Mspecies+spec]; + + rdme->total_diffusion++; /* counter */ + + } + + /* Compute time to new event for this subvolume. */ + totrate = rdme->srrate[subvol]+rdme->sdrate[subvol]; + if(totrate > 0.0){ + rdme->rtimes[0] = -log(1.0-drand48())/totrate+tt; + }else{ + rdme->rtimes[0] = INFINITY; + } + /* Update the heap. */ + update(0,rdme->rtimes,rdme->node,rdme->heap,rdme->Ncells); + + /* If it was a diffusion event, also update the other affected + node. */ + if(event) { + totrate = rdme->srrate[to_vol]+rdme->sdrate[to_vol]; + if(totrate > 0.0) { + if(!isinf(rdme->rtimes[rdme->heap[to_vol]])){ + rdme->rtimes[rdme->heap[to_vol]] = + (old_rrate+old_drate)/totrate*(rdme->rtimes[rdme->heap[to_vol]]-tt)+tt; + }else{ + /* generate a new waiting time */ + rdme->rtimes[rdme->heap[to_vol]] = -log(1.0-drand48())/totrate+tt; + } + }else{ + rdme->rtimes[rdme->heap[to_vol]] = INFINITY; + } + + update(rdme->heap[to_vol],rdme->rtimes,rdme->node,rdme->heap,rdme->Ncells); + } + + /* Check for error codes. */ + if (errcode) { + /* Cannot continue. Clear this solution and exit. */ + printf("Exiting due to errcode %i\n",errcode); + print_current_state(subvol, rdme->xx,rdme->Mspecies); + exit(1); + } + } + + + +} + +/**************************************************************************/ + diff --git a/pyurdme_init b/spatialpy_init similarity index 76% rename from pyurdme_init rename to spatialpy_init index 1b1e4b8c..6d2ea061 100644 --- a/pyurdme_init +++ b/spatialpy_init @@ -16,7 +16,4 @@ else fi export SPATIALPYHOME=$MY_PATH export PYTHONPATH="$SPATIALPYHOME:$PYTHONPATH" -export C_INCLUDE_PATH=/usr/lib/openmpi/include -export C_INCLUDE_PATH=/usr/lib/openmpi/include -export INSTANT_SYSTEM_CALL_METHOD=SUBPROCESS diff --git a/test/test_adfsp.py b/test/test_adfsp.py deleted file mode 100644 index 52ecf9f8..00000000 --- a/test/test_adfsp.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python -from pyurdme.adfsp_solver import ADFSPSolver -from examples.cylinder_demo.cylinder_demo3D import cylinderDemo3D -import matplotlib.pyplot as plt -import numpy -import pyurdme -import time - -model = cylinderDemo3D() -sol = ADFSPSolver(model, report_level=1, error_tolarance=0.05) -print "Attempting to compile" -sol.compile() -print "Beginning simulation" -t1 = time.time() -result = sol.run() -print "Simulation complete in {0}s".format(time.time()-t1) -print "Plotting solution" -# Plot of the time-average spatial concentration. -x_vals = model.mesh.coordinates()[:, 0] -A_vals = numpy.mean(result.get_species("A", concentration=True), axis=0) -B_vals = numpy.mean(result.get_species("B", concentration=True), axis=0) -plt.plot(x_vals,A_vals,'.r',x_vals,B_vals,'.b') -plt.legend(['A', 'B']) -plt.show()