Commit ee8e4669 by Jigyasa Watwani

with nematic, most minimal model

parent a0492c5d
import dolfin as df
import numpy as np
import progressbar
import meshzoo
from ufl.constantvalue import PermutationSymbol as epsilon
df.set_log_level(df.LogLevel.ERROR)
df.parameters['form_compiler']['optimize'] = True
class Growing_Domain(object):
def __init__(self, parameters, mesh = None):
# read in parameters
for key in parameters:
setattr(self, key, parameters[key])
# if no mesh is given as initial condition, create one
if mesh == None:
points, cells = meshzoo.disk(10,8)
self.mesh = df.Mesh()
e = df.MeshEditor()
e.open(self.mesh, type='triangle', tdim=2, gdim=2)
e.init_vertices(len(points))
e.init_cells(len(cells))
for i in range(len(points)):
e.add_vertex(i, points[i])
for i in range(len(cells)):
e.add_cell(i, cells[i])
e.close()
else:
self.mesh = df.Mesh(mesh)
def setup(self):
scalar_element = df.FiniteElement('P', self.mesh.ufl_cell(), 1)
vector_element = df.VectorElement('P', self.mesh.ufl_cell(), 1)
tensor_element = df.TensorElement('P', self.mesh.ufl_cell(), 1)
# u, v, rho, Q, l, L
mixed_element = df.MixedElement([vector_element, vector_element, scalar_element,
tensor_element, scalar_element, scalar_element,
])
# define function space with this mixed element
self.function_space = df.FunctionSpace(self.mesh, mixed_element)
# dirichlet boundaries for rho
self.bc = df.DirichletBC(self.function_space.sub(2), df.Constant(0.0), 'on_boundary')
# define the reqd functions on this function space
self.f = df.Function(self.function_space) # f at current time
self.f0 = df.Function(self.function_space) # f at t =0
def advection(self, conc, vel, tconc):
return df.inner(df.div(vel * conc), tconc)
def active_stress(self, rho, q):
return (rho /(rho + self.saturation_rho)
* (self.lamda_rho * df.Identity(self.dimension)
+ self.lamda_q * q)
)
def epsilon(self, v):
return df.sym(df.nabla_grad(v))
def passive_stress(self, u, v, q):
eps_u, eps_v = self.epsilon(u), self.epsilon(v)
elastic_stress = (2 * self.shear_elasticity * eps_u +
(self.bulk_elasticity - self.shear_elasticity) *
df.tr(eps_u) * df.Identity(self.dimension))
viscous_stress = (2 * self.shear_viscosity * eps_v +
(self.bulk_viscosity - self.shear_viscosity) *
df.tr(eps_v) * df.Identity(self.dimension))
nematic_stress = (df.dot(q, self.H_polynomial_terms(q) - self.Kq * df.div(df.nabla_grad(q)))
- df.dot(self.H_polynomial_terms(q) - self.Kq * df.div(df.nabla_grad(q)), q)
- self.stretch * (self.H_polynomial_terms(q)
- self.Kq * df.div(df.nabla_grad(q)))
)
if self.neglect_orientational_stress:
return viscous_stress + elastic_stress
else:
return (elastic_stress + viscous_stress + nematic_stress)
def stress(self, u, v, rho, q):
return (self.passive_stress(u, v, q) + self.active_stress(rho, q))
def reaction_rho(self, rho, trho):
return -self.turnover_rho * df.inner(rho * (1 - rho/self.average_rho), trho)
def diffusion_reaction_rho(self, rho, trho):
return (self.diffusion_rho * df.inner(df.nabla_grad(rho), df.nabla_grad(trho))
+ self.reaction_rho(rho, trho))
def antisymm_gradv(self, v):
return df.skew(df.nabla_grad(v))
def symm_gradv(self, v):
return df.sym(df.nabla_grad(v))
def corotation_stretch_term(self, v, q):
return ( df.dot(self.antisymm_gradv(v), q)
- df.dot(q, self.antisymm_gradv(v))
- self.stretch * (self.symm_gradv(v) - df.tr(self.symm_gradv(v)) * df.Identity(2)/2)
* df.sqrt(df.inner(q, q)
)
)
def H_polynomial_terms(self, q):
return (self.A * q
+ self.B * df.dot(q, q)
+ self.C * q * df.inner(q, q)
+ self.D * df.dot(df.dot(q, q), q))
def refine_mesh(self, mesh):
cell_markers = df.MeshFunction("bool", mesh, mesh.topology().dim())
cell_markers.set_all(False)
for cell in df.cells(mesh):
if cell.volume() > 2 * 0.05:
cell_markers[cell] = True
mesh = df.refine(mesh, cell_markers)
return mesh
def setup_initial_conditions(self):
# ic for u
self.zero_vector = df.Constant((0.0, 0.0))
u0 = df.interpolate(self.zero_vector, self.function_space.sub(0).collapse())
# add noise
noise_u = self.noise_level * (2*np.random.random(u0.vector().size())-1)
u0.vector()[:] = u0.vector()[:] + noise_u
rho0 = '(a/2)*(1-tanh((sqrt(x[0]*x[0]+x[1]*x[1]) - r0)/w))'
# rho0 = '(a/2)*(1 - tanh((sqrt(x[0]*x[0]+x[1]*x[1]) - 1/(sqrt(3*sin(atan2(x[1], x[0]))*sin(atan2(x[1], x[0]))+1)) )/w))'
# for ellipse IC
# rho0 = '(a/2)*(1+tanh((sqrt(x[0]*x[0]+x[1]*x[1]) - r0)/w))'
rho0 = df.interpolate(df.Expression(rho0, a=self.average_rho, r0=0.5, w=0.1, degree=1),
self.function_space.sub(2).collapse())
noise_rho = self.noise_level * (2 * np.random.random(rho0.vector().size()) - 1)
rho0.vector()[:] = rho0.vector()[:] + noise_rho
a = np.random.randn(2, 2)
a = (a + a.T)/2
a -= np.trace(a) * np.eye(2)/2
a = 0.01 * a
q0 = df.Constant(((a[0, 0], a[0, 1]), (a[1, 0], a[1, 1])))
q0 = df.interpolate(q0, self.function_space.sub(3).collapse())
l0 = df.interpolate(df.Constant(a[0, 0] + a[1, 1]), self.function_space.sub(4).collapse())
L0 = df.interpolate(df.Constant(a[0, 1] - a[1, 0]), self.function_space.sub(5).collapse())
# initial flows
VFS = df.VectorFunctionSpace(self.mesh, 'P', 1)
vi = df.Function(VFS)
tvi = df.TestFunction(VFS)
viform = (self.friction * df.inner(vi, tvi)
+ df.inner(self.stress(u0, vi, rho0, q0), df.nabla_grad(tvi))
) * df.dx
df.solve(viform == 0, vi)
vi = df.interpolate(vi, self.function_space.sub(0).collapse())
df.assign(self.f0, [u0, vi, rho0, q0, l0, L0])
def setup_weak_forms(self):
u0, v0, rho0, q0, l0, L0 = df.split(self.f0)
u, v, rho, q, l, L = df.split(self.f)
tu, tv, trho, tq, tl, tL = df.TestFunctions(self.function_space)
uform = (df.inner((u - u0)/self.timestep, tu)
- df.inner(v, tu)
)
vform = (self.friction * df.inner(v, tv)
+ df.inner(self.stress(u, v, rho, q), df.nabla_grad(tv))
)
rhoform = (df.inner((rho - rho0)/self.timestep, trho)
+ self.advection(rho0, v0, trho)
+ self.diffusion_reaction_rho(rho, trho)
)
qform = (df.inner((q - q0)/self.timestep, tq)
+ df.inner(df.dot(v0, df.nabla_grad(q0)), tq)
+ df.inner(self.corotation_stretch_term(v0, q0), tq)
+ self.mobility * df.inner(self.H_polynomial_terms(q), tq)
+ self.mobility * self.Kq * df.inner(df.nabla_grad(q), df.nabla_grad(tq))
+ l * df.inner(df.Identity(2), tq)
+ L * df.inner(epsilon(2), tq))
lform = df.inner(tl, df.tr(q))
Lform = df.inner(tL, df.inner(epsilon(2), q))
self.form = (uform + vform + rhoform + qform + lform + Lform) * df.dx
def solve(self, initu=None, initv=None, initrho=None, initq = None, initTime = 0, extend=False):
# create function space, functions, bc
self.setup()
# create files if they don't exist, open them if they exist
self.uFile = df.XDMFFile('%s_displacement.xdmf' % self.timestamp)
self.vFile = df.XDMFFile('%s_velocity.xdmf' % self.timestamp)
self.rhoFile = df.XDMFFile('%s_density.xdmf' % self.timestamp)
self.qFile = df.XDMFFile('%s_q.xdmf' % self.timestamp)
# time-variables
self.time = initTime
savesteps = round(self.savetime/self.timestep)
maxsteps = round(self.maxtime/self.timestep)
if not extend:
# find the initial velocity
self.setup_initial_conditions()
# save the ic
u0, v0, rho0, q0, l0, L0 = self.f0.split(deepcopy=True)
self.uFile.write_checkpoint(u0, 'displacement', self.time)
self.vFile.write_checkpoint(v0, 'velocity', self.time)
self.rhoFile.write_checkpoint(rho0, 'density', self.time)
self.qFile.write_checkpoint(q0, 'q', self.time)
# move the mesh with this initial velocity
dr0 = df.project(v0 * self.timestep, self.function_space.sub(0).collapse())
df.ALE.move(self.mesh, dr0)
else:
# assign fields to f0
u0 = df.project(initu, self.function_space.sub(0).collapse())
v0 = df.project(initv, self.function_space.sub(1).collapse())
rho0 = df.project(initrho, self.function_space.sub(2).collapse())
q0 = df.project(initq, self.function_space.sub(3).collapse())
df.assign(self.f0, [u0, v0, rho0, q0, l0, L0])
# move the mesh with this initial velocity
dr0 = df.project(v0 * self.timestep, self.function_space.sub(0).collapse())
df.ALE.move(self.mesh, dr0)
self.setup_weak_forms()
bar = progressbar.ProgressBar(maxval=maxsteps)
for steps in bar(range(1, maxsteps + 1)):
self.time += self.timestep
# solve to get fields at next timestep
df.solve(self.form == 0, self.f, self.bc)
# save the fields
u, v, rho, q, l, L = self.f.split(deepcopy=True)
if steps % savesteps == 0:
self.uFile.write_checkpoint(u, 'displacement', self.time, append=True)
self.vFile.write_checkpoint(v, 'velocity', self.time, append=True)
self.rhoFile.write_checkpoint(rho, 'density', self.time, append=True)
self.qFile.write_checkpoint(q, 'q', self.time, append=True)
# assign the calculated func to a newly-defined function to be used later
old_function = df.Function(self.function_space)
old_function.assign(self.f)
# move mesh
dr = df.project(v * self.timestep, self.function_space.sub(0).collapse())
df.ALE.move(self.mesh, dr)
# refine the mesh
self.mesh = self.refine_mesh(self.mesh)
# new function space, bc, functions
self.setup()
# new function0 should be the old function in the old function space projected on the new function space
old_function.set_allow_extrapolation(True)
self.f0 = df.project(old_function, self.function_space)
# new weak form
self.setup_weak_forms()
self.uFile.close()
self.vFile.close()
self.rhoFile.close()
self.qFile.close()
{
"A": -1,
"B": 0.0,
"C":1.0,
"D": 0.0,
"Kq": 1,
"average_rho": 1.0,
"bulk_elasticity": 1,
"bulk_viscosity": 1.0,
"diffusion_rho": 0.1,
"dimension": 2,
"friction": 1.0,
"lamda_q": -2,
"lamda_rho": 0,
"maxtime": 10.0,
"mesh": "None",
"mobility": 1.0,
"noise_level": 0.0,
"saturation_rho": 1.0,
"savetime": 0.1,
"shear_elasticity": 1,
"shear_viscosity": 1.0,
"stretch": -2,
"timestep": 0.01,
"neglect_orientational_stress": false,
"turnover_rho": 1.0
}
\ No newline at end of file
import os
import numpy as np
import h5py
import dolfin as df
import progressbar
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
from scipy.spatial import ConvexHull
def get_mesh(params, DIR=''):
savesteps = round(params['maxtime'] / params['savetime'])
times = np.arange(savesteps + 1) * params['savetime']
# Read mesh geometry from h5 file
var = 'density'
h5 = h5py.File(os.path.join(DIR, '%s_%s.h5' % (params['timestamp'], var)), "r")
# NOTE: geometry and topology are lists of length len(times)
# NOTE: geometry[k] is a numpy array of shape (Num of vertices, 2) for timestep k
# NOTE: topology[k] is a numpy array of shape (Num of cells, 3) for timestep k
topology = []
geometry = []
boundary_points = []
for i in range(len(times)):
geometry.append(np.array(h5['%s/%s_%d/mesh/geometry' % (var, var, i)]))
topology.append(np.array(h5['%s/%s_%d/mesh/topology' % (var, var, i)]))
h5.close()
for steps in progressbar.ProgressBar()(range(savesteps + 1)):
# Create a mesh for the current timestep
mesh = df.Mesh()
editor = df.MeshEditor()
editor.open(mesh,
type='triangle' if params['dimension'] == 2 else 'tetrahedron',
tdim=params['dimension'], gdim=params['dimension'])
editor.init_vertices(len(geometry[steps]))
editor.init_cells(len(topology[steps]))
for j in range(len(geometry[steps])):
editor.add_vertex(j, geometry[steps][j])
for j in range(len(topology[steps])):
editor.add_cell(j, topology[steps][j])
editor.close()
boundary = df.BoundaryMesh(mesh, "exterior", False)
boundary_coords = boundary.coordinates()
boundary_coords = boundary_coords[np.argsort(np.arctan2(boundary_coords[:, 1] - np.mean(boundary_coords[:, 1]),
boundary_coords[:, 0] - np.mean(boundary_coords[:, 0])))]
boundary_points.append(boundary_coords)
return (times, boundary_points)
def quantifify_bdry(params, DIR='', offscreen=False):
times, bdry = get_mesh(params, DIR)
# Plot the initial, intermediate, final boundary
plt.figure(figsize=(8, 8))
plt.plot(bdry[0][:, 0], bdry[0][:, 1], c='blue', label='Initial Boundary', alpha=0.7)
plt.plot(bdry[-1][:, 0], bdry[-1][:, 1], c='red', label='Final Boundary', alpha=0.7)
plt.xlabel('X')
plt.ylabel('Y')
plt.axis('equal')
plt.legend()
plt.grid(True)
plt.show()
plt.savefig(os.path.join(DIR, '%s_boundary_shapes.png' %params['timestamp']), dpi=1000)
area = []
aspect_ratio = []
perimeter = []
roundness = []
for i in range(len(times)):
x = bdry[i][:, 0]
y = bdry[i][:, 1]
# Area by shoelace formula
ar = 0.5 * np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1)))
area.append(ar)
# Aspect ratio by max distance to min distance from center of shape
# NOTE: The following may not work is remeshing is not suficient
# asp_rat = (np.max(np.sqrt((x - np.mean(x))**2 + (y - np.mean(y))**2))
# / np.min(np.sqrt((x - np.mean(x))**2 + (y - np.mean(y))**2)))
# aspect_ratio.append(asp_rat)
hull = ConvexHull(bdry[i])
hull_points = bdry[i][hull.vertices]
x_min, x_max = np.min(hull_points[:, 0]), np.max(hull_points[:, 0])
y_min, y_max = np.min(hull_points[:, 1]), np.max(hull_points[:, 1])
width = x_max - x_min
height = y_max - y_min
asp_rat = max(width, height) / min(width, height)
aspect_ratio.append(asp_rat)
# Perimeter calculation
per = sum(euclidean(bdry[i][j], bdry[i][(j + 1) % len(bdry[i])]) for j in range(len(bdry[i])))
perimeter.append(per)
# Roundness calculation
roundness.append((4 * np.pi * ar) / (per**2))
fig, ax = plt.subplots(1, 2, figsize=(6, 3))
ax[0].grid(True)
ax[1].grid(True)
ax[0].plot(times, area)
ax[0].set_xlabel('Time')
ax[0].set_ylabel('Area')
ax[1].plot(times, aspect_ratio)
ax[1].set_xlabel('Time')
ax[1].set_ylabel('Aspect Ratio')
plt.tight_layout()
# # ax[1, 0].plot(times, perimeter)
# # ax[1, 0].set_xlabel('Time')
# # ax[1, 0].set_ylabel('Perimeter')
# # ax[1, 1].plot(times, roundness)
# # ax[1, 1].set_xlabel('Time')
# # ax[1, 1].set_ylabel('Roundness')
plt.show()
plt.savefig(os.path.join(DIR, '%s_boundary_quantities.png' %params['timestamp']), dpi=1000)
fig1, ax1 = plt.subplots(1, 1, figsize=(6, 3))
ax1.grid(True)
ax1.semilogy(times, np.gradient(area), label='$dA/dt$')
ax1.set_xlabel('Time')
ax1.set_ylabel('Rate of Change of Area')
plt.tight_layout()
plt.show()
plt.savefig(os.path.join(DIR, '%s_boundary_area_rate.png' %params['timestamp']), dpi=1000)
if __name__ == '__main__':
import argparse, json
parser = argparse.ArgumentParser()
parser.add_argument('-j', '--jsonfile', help='parameters file', required=True)
args = parser.parse_args()
with open(args.jsonfile) as jsonFile:
params = json.load(jsonFile)
quantifify_bdry(params, DIR=os.path.dirname(args.jsonfile))
\ No newline at end of file
import json, datetime
import os
from growing_domain import Growing_Domain
from viz_growing_domain_2 import visualize
import argparse
import dolfin as df
import h5py
import numpy as np
parser = argparse.ArgumentParser()
parser.add_argument('-j','--jsonfile', help='data file',
default='parameters.json')
parser.add_argument('-t','--time', help='time to run', type=float)
args = parser.parse_args()
assert os.path.isfile(args.jsonfile), '%s file not found' % args.jsonfile
with open(args.jsonfile) as jsonFile:
parameters = json.load(jsonFile)
if args.jsonfile=='parameters.json':
extend = False
print('Fresh run...')
timestamp = datetime.datetime.now().strftime("%d%m%y-%H%M%S")
parameters['timestamp'] = timestamp
parametersFile = timestamp + '_parameters.json'
initu = None
initv = None
initrho = None
initq = None
initTime = 0.0
else:
extend = True
print('Extending run %s...' % parameters['timestamp'])
parametersFile = args.jsonfile
savesteps = round(parameters['maxtime']/parameters['savetime'])
# read geometry and topology at the last timepoint from h5 file
var = 'density'
h5 = h5py.File( '%s_%s.h5' % (parameters['timestamp'], var,), 'r+')
geometry = np.array(h5['%s/%s_%d/mesh/geometry'%(var,var,savesteps)])
topology = np.array(h5['%s/%s_%d/mesh/topology'%(var,var,savesteps)])
# create mesh with this geometry and topology
mesh = df.Mesh()
editor = df.MeshEditor()
editor.open(mesh,
type='triangle' if parameters['dimension']==2 else 'tetrahedron',
tdim=parameters['dimension'], gdim=parameters['dimension'])
editor.init_vertices(len(geometry))
editor.init_cells(len(topology))
for j in range(len(geometry)):
editor.add_vertex(j, geometry[j])
for j in range(len(topology)):
editor.add_cell(j, topology[j])
editor.close()
# Read field values at the last time point
SFS = df.FunctionSpace(mesh, 'P', 1)
VFS = df.VectorFunctionSpace(mesh, 'P', 1)
TFS = df.TensorFunctionSpace(mesh, 'P', 1)
initu, initv, initrho, initq = df.Function(VFS), df.Function(VFS), df.Function(SFS), df.Function(TFS)
uFile = df.XDMFFile('%s_displacement.xdmf' % parameters['timestamp'])
vFile = df.XDMFFile('%s_velocity.xdmf' % parameters['timestamp'])
rhoFile = df.XDMFFile('%s_density.xdmf' % parameters['timestamp'])
qFile = df.XDMFFile('%s_q.xdmf' % parameters['timestamp'])
uFile.read_checkpoint(initu, 'displacement', savesteps)
vFile.read_checkpoint(initv, 'velocity', savesteps)
rhoFile.read_checkpoint(initrho, 'density', savesteps)
qFile.read_checkpoint(initq, 'q', savesteps)
uFile.close()
vFile.close()
rhoFile.close()
qFile.close()
initTime = parameters['maxtime']
parameters['maxtime'] = args.time
tissue = Growing_Domain(parameters)
tissue.solve(initu, initv, initrho, initq, initTime, extend)
if extend:
parameters['maxtime'] = initTime + parameters['maxtime']
with open(parametersFile, "w") as jsonFile:
json.dump(parameters, jsonFile, indent=4, sort_keys=True)
visualize(parameters, DIR='')
import dolfin as df
import numpy as np
import vedo as vd
import os
import h5py
import progressbar
import matplotlib.pyplot as plt
from tempfile import TemporaryDirectory
def get_data(params, DIR=''):
savesteps = round(params['maxtime']/params['savetime'])
times = np.arange(savesteps+1) * params['savetime']
# Read mesh geometry from h5 file
var = 'density'
h5 = h5py.File(os.path.join(DIR, '%s_%s.h5' % (params['timestamp'], var)), "r")
# NOTE: geometry and topology are lists of length len(times)
# NOTE: geometry[k] is a numpy array of shape (Num of vertices, 2) for timestep k
# NOTE: topology[k] is a numpy array of shape (Num of cells, 3) for timestep k
topology = []
geometry = []
for i in range(len(times)):
geometry.append(np.array(h5['%s/%s_%d/mesh/geometry'%(var,var,i)]))
topology.append(np.array(h5['%s/%s_%d/mesh/topology'%(var,var,i)]))
h5.close()
# create a mesh at every timestep with this geometry and topology
meshes = []
print('Making the mesh..')
for k in range(len(times)):
mesh = df.Mesh()
editor = df.MeshEditor()
editor.open(mesh,
type='triangle' ,
tdim=2, gdim=2)
editor.init_vertices(len(geometry[k]))
editor.init_cells(len(topology[k]))
for j in range(len(geometry[k])):
editor.add_vertex(j, geometry[k][j])
for j in range(len(topology[k])):
editor.add_cell(j, topology[k][j])
editor.close()
meshes.append(mesh)
# Read field values
u = []
v = []
rho = []
q = []
uFile = df.XDMFFile(os.path.join(DIR, '%s_displacement.xdmf' % params['timestamp']))
vFile = df.XDMFFile(os.path.join(DIR, '%s_velocity.xdmf' % params['timestamp']))
rhoFile = df.XDMFFile(os.path.join(DIR, '%s_density.xdmf' % params['timestamp']))
qFile = df.XDMFFile(os.path.join(DIR, '%s_q.xdmf' % params['timestamp']))
bar = progressbar.ProgressBar(maxval=savesteps)
for steps in bar(range(savesteps+1)):
SFS = df.FunctionSpace(meshes[steps], 'P', 1)
VFS = df.VectorFunctionSpace(meshes[steps], 'P', 1)
TFS = df.TensorFunctionSpace(meshes[steps], 'P', 1)
ui, vi, rhoi, qi = df.Function(VFS), df.Function(VFS), df.Function(SFS), df.Function(TFS)
uFile.read_checkpoint(ui, 'displacement', steps)
vFile.read_checkpoint(vi, 'velocity', steps)
rhoFile.read_checkpoint(rhoi, 'density', steps)
qFile.read_checkpoint(qi, 'q', steps)
u_vec = ui.compute_vertex_values(meshes[steps])
u.append(u_vec.reshape(2, int(u_vec.shape[0]/2)).T)
v_vec = vi.compute_vertex_values(meshes[steps])
v.append(v_vec.reshape(2, int(v_vec.shape[0]/2)).T)
rho.append(rhoi.compute_vertex_values(meshes[steps]))
qvals = qi.compute_vertex_values(meshes[steps])
qvals = qvals.reshape(2, 2, int(qvals.shape[0]/2**2)).T
q.append(qvals.transpose(0, 2, 1))
uFile.close()
vFile.close()
rhoFile.close()
qFile.close()
return (times, topology, geometry, u, v, rho, q)
def visualize(params, DIR='', offscreen=False):
(times, topology, geometry, u, v, rho, q) = get_data(params, DIR)
qmag = []
director = []
for i in range(len(times)):
qmag.append(np.sqrt(np.einsum('ijk, ijk -> i', q[i], q[i])))
dir = np.zeros((len(qmag[i]), 2))
for j in range(len(qmag[i])):
l, v = np.linalg.eig(q[i][j])
idx = np.argmax(l)
dir[j] = np.real(v[:, idx]) * qmag[i][j]
director.append(dir)
qmin, qmax = min(min(sublist) for sublist in qmag), max(max(sublist) for sublist in qmag)
rhomin, rhomax = min(min(sublist) for sublist in rho), max(max(sublist) for sublist in rho)
plotter = vd.plotter.Plotter(axes=0)
poly = vd.utils.buildPolyData(geometry[0], topology[0])
scalar_actor = vd.mesh.Mesh(poly)
scalar_actor.pointdata['density'] = rho[0]
scalar_actor.cmap('viridis', rho[0], vmin=rhomin, vmax=rhomax)
scalar_actor.add_scalarbar(title = r'$\rho$',
pos=[[0.8, 0.04], [0.9, 0.4]], nlabels=2,
# titleYOffset=15, titleFontSize=28, size=(100, 600)
)
plotter += scalar_actor
scale = 0.1
points = geometry[0]
startpoints = points - scale * director[0]/2
endpoints = points + scale * director[0]/2
lines = vd.shapes.Lines(startpoints, endpoints, lw=3, c='red')
plotter += lines
def update(idx):
nonlocal plotter, scalar_actor, lines
poly = vd.utils.buildPolyData(geometry[idx], topology[idx])
new_scalar_actor = vd.mesh.Mesh(poly)
new_scalar_actor.pointdata['density'] = rho[idx]
new_scalar_actor.cmap('viridis', rho[idx], vmin=rhomin, vmax=rhomax)
new_scalar_actor.add_scalarbar(title = r'$\rho/\rho_0$',
pos=[[0.8, 0.04], [0.9, 0.4]], nlabels=2,
# titleYOffset=15, titleFontSize=28, size=(100, 600)
)
plotter.remove(scalar_actor)
plotter += new_scalar_actor
scalar_actor = new_scalar_actor
points = geometry[idx]
startpoints = points - scale * director[idx]/2
endpoints = points + scale * director[idx]/2
new_lines = vd.shapes.Lines(startpoints, endpoints, lw=3, c='red')
plotter.remove(lines)
plotter += new_lines
lines = new_lines
def slider_update(widget, event):
value = widget.GetRepresentation().GetValue()
idx = (abs(times-value)).argmin()
update(idx)
slider = plotter.add_slider(slider_update, pos=[(0.1,0.94), (0.5,0.94)],
xmin=times[0], xmax=times.max(),
value=times[0], title=r"$t/\tau$")
slider.GetRepresentation().SetLabelFormat('%0.1f')
vd.show([scalar_actor], interactive=False, zoom=0.8)
FPS = 50
movFile = '%s_density.mov' % params['timestamp']
fps = float(FPS)
command = "ffmpeg -y -r"
options = "-b:v 3600k -qscale:v 4 -vcodec mpeg4"
tmp_dir = TemporaryDirectory()
get_filename = lambda x: os.path.join(tmp_dir.name, x)
for tt in range(len(times)):
idx = (abs(times-times[tt])).argmin()
update(idx)
slider.GetRepresentation().SetValue(times[tt])
fr = get_filename("%03d.png" % tt)
vd.screenshot(fr)
os.system(command + " " + str(fps)
+ " -i " + tmp_dir.name + os.sep
+ "%03d.png " + options + " " + movFile)
tmp_dir.cleanup()
# radial coordinate
# r = np.zeros(len(times))
# for i in range(len(times)):
# r[i] = np.max(np.sqrt(np.sum(np.square(geometry[i]), axis=1)))
# plt.plot(times, r)
# plt.xlabel('time')
# plt.ylabel('radius')
# plt.savefig('%s_radius.png' % params['timestamp'])
# plt.show()
# drdt = np.gradient(r, times)
# print(drdt)
# plt.plot(times, drdt)
# plt.xlabel('time')
# plt.ylabel('dr/dt')
# plt.savefig('%s_drdt.png' % params['timestamp'])
# plt.show()
if __name__ == '__main__':
import argparse, json
parser = argparse.ArgumentParser()
parser.add_argument('-j','--jsonfile', help='parameters file', required=True)
parser.add_argument('--onscreen', action=argparse.BooleanOptionalAction)
args = parser.parse_args()
with open(args.jsonfile) as jsonFile:
params = json.load(jsonFile)
visualize(params, DIR=os.path.dirname(args.jsonfile), offscreen=(not args.onscreen))
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment