Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
fluidparticles
MigFlow
Commits
02f3a33f
Commit
02f3a33f
authored
Nov 29, 2021
by
Jonathan Lambrechts
Browse files
p2 in same file + paraview plugin
parent
5d5c6f69
Pipeline
#9979
failed with stages
in 2 minutes and 41 seconds
Changes
8
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
fluid/fluid_problem.c
View file @
02f3a33f
/*
* MigFlow - Copyright (C) <2010-2020>
/* MigFlow - Copyright (C) <2010-2020>
* <Universite catholique de Louvain (UCL), Belgium
* Universite de Montpellier, France>
*
...
...
@@ -293,6 +292,7 @@ void fluid_problem_compute_stab_parameters(FluidProblem *problem, double dt) {
+
(
problem
->
advection
?
pow2
(
2
*
normu
/
h
)
:
0
.)
+
pow2
(
4
*
nu
/
pow2
(
h
)));
problem
->
tauc
[
iel
]
=
problem
->
advection
?
h
*
normu
*
fmin
(
h
*
normu
/
(
6
*
nu
),
0
.
5
)
:
0
.;
problem
->
taup
[
iel
]
=
problem
->
tauc
[
iel
]
=
0
;
}
}
...
...
@@ -2090,4 +2090,4 @@ void fluid_problem_local_map(FluidProblem *p, int *idx) {
int
localsize
=
p
->
fields
->
local_size
;
fe_fields_local_map
(
p
->
fields
,
p
->
mesh
,
iel
,
&
(
idx
[
iel
*
localsize
]));
}
}
\ No newline at end of file
}
paraview_utils/Makefile
View file @
02f3a33f
...
...
@@ -19,6 +19,6 @@
# along with this program (see COPYING and COPYING.LESSER files). If not,
# see <http://www.gnu.org/licenses/>.
MigFlow.xml
:
python_filter_generator.py contacts-extract.py ids-extract.py concentration-dg.py names.py stress-tensor.py
python python_filter_generator.py
$@
contacts-extract.py ids-extract.py concentration-dg.py names.py stress-tensor.py
MigFlow.xml
:
python_filter_generator.py contacts-extract.py ids-extract.py concentration-dg.py names.py stress-tensor.py
p2.py
python python_filter_generator.py
$@
contacts-extract.py ids-extract.py concentration-dg.py names.py stress-tensor.py
p2.py
paraview_utils/MigFlow.xml
View file @
02f3a33f
...
...
@@ -63,7 +63,7 @@
name=
"Script"
command=
"SetScript"
number_of_elements=
"1"
default_values=
"import numpy as np



in_particles,in_bnd,in_periodic,in_contacts = list(inputs[0])

n_disks = np.count_nonzero(in_bnd.CellTypes == 1)
n_segments = np.count_nonzero(in_bnd.CellTypes == 7)+np.count_nonzero(in_bnd.CellTypes==3)
n_triangles = np.count_nonzero(in_bnd.CellTypes == 13)+np.count_nonzero(in_bnd.CellTypes==5)

vals = []
vals_t = []
vals_s = [] if ("particle_particle_s" in in_contacts.FieldData.keys()) else None
points = []

#particle-particle contacts
vals.append(in_contacts.FieldData["particle_particle"])
vals_t.append(in_contacts.FieldData["particle_particle_t"])
if vals_s is not None:
 vals_s.append(in_contacts.FieldData["particle_particle_s"])
contacts = in_contacts.FieldData["particle_particle_idx"]
points.append(in_particles.Points[contacts,:])
#particle-disk contacts
if n_disks :
 disks = in_bnd.Cells[1:2*n_disks:2]
 vals.append(in_contacts.FieldData["particle_disk"])
 vals_t.append(in_contacts.FieldData["particle_disk_t"])
 if vals_s is not None:
 vals_s.append(in_contacts.FieldData["particle_disk_s"])
 contacts = in_contacts.FieldData["particle_disk_idx"]
 points_d = np.ndarray((contacts.shape[0],2,3))
 points_d[:,0,:] = in_particles.Points[contacts[:,1],:]
 points_d[:,1,:] = in_bnd.Points[disks[contacts[:,0]],:]
 points.append(points_d)

#particle-segments contacts

if n_segments :
 segments = in_bnd.Cells[2*n_disks:2*n_disks+3*n_segments].reshape([-1,3])[:,1:]
 vals.append(in_contacts.FieldData["particle_segment"])
 vals_t.append(in_contacts.FieldData["particle_segment_t"])
 if vals_s is not None:
 vals_s.append(in_contacts.FieldData["particle_segment_s"])
 contacts = in_contacts.FieldData["particle_segment_idx"]
 points_s = np.ndarray((contacts.shape[0],2,3))
 points_s[:,0,:] = in_particles.Points[contacts[:,1],:]
 s = in_bnd.Points[segments[contacts[:,0],:]]
 t = s[:,1,:]-s[:,0,:]
 t /= ((t[:,0]**2+t[:,1]**2+t[:,2]**2)**0.5)[:,None]
 d = points_s[:,0,:]-s[:,0,:]
 l = d[:,0]*t[:,0]+d[:,1]*t[:,1]+d[:,2]*t[:,2]
 points_s[:,1,:] = s[:,0,:]+l[:,None]*t
 points.append(points_s)

#particle-triangle contacts
if n_triangles :
 triangles = in_bnd.Cells[2*n_disks+3*n_segments:2*n_disks+3*n_segments+4*n_triangles].reshape([-1,4])[:,1:]
 vals.append(in_contacts.FieldData["particle_triangle"])
 vals_t.append(in_contacts.FieldData["particle_triangle_t"])
 if vals_s is not None:
 vals_s.append(in_contacts.FieldData["particle_triangle_s"])
 contacts = in_contacts.FieldData["particle_triangle_idx"]
 points_t = np.ndarray((contacts.shape[0],2,3))
 points_t[:,0,:] = in_particles.Points[contacts[:,1],:]

points_t[:,1,:] = np.mean(in_bnd.Points[triangles[contacts[:,0],:]],axis=1)

 points.append(points_t)

#merge everything

points = np.vstack(points)
vals = np.hstack(vals)
vals_t = np.hstack(vals_t)
if vals_s is not None :
 vals_s = np.hstack(vals_s)
#generate tubes
t = points[:,0,:]-points[:,1,:]
t /= np.sqrt(t[:,0,None]**2+t[:,1,None]**2+t[:,2,None]**2)
ez = np.where(np.abs(t[:,1,None])>np.abs(t[:,2,None]),np.array([[0,0,1]]),np.array([[0,1,0]]))
n1 = np.cross(t,ez)
n2 = np.cross(t,n1)
alphas = np.arange(0,2*np.pi, 2*np.pi/nf)
r = rf*vals**(1./2)*1

opoints = points[:,None,:,:] \
 +n1[:,None,None,:]*r[:,None,None,None]*np.sin(-alphas)[None,:,None,None] \
 +n2[:,None,None,:]*r[:,None,None,None]*np.cos(-alphas)[None,:,None,None]
output.Points = opoints.reshape(-1,3)
output.PointData.append(np.repeat(vals,2*nf),"Reaction")
output.PointData.append(np.repeat(vals_t,2*nf),"Reaction_t")
if vals_s is not None :
 output.PointData.append(np.repeat(vals_s,2*nf),"Reaction_s")
n = points.shape[0]
pattern = np.ndarray([nf,4], np.int
32
)
for i in range(nf) :
 j = (i+1)%nf
 pattern[i,:] = (i*2,i*2+1,j*2+1,j*2)
types = np.full([n*nf],9,np.uint8)
locations = np.arange(0,5*n*nf,5,np.uint32)
cells = np.ndarray([n,nf,5],np.uint32)
cells[:,:,0] = 4
cells[:,:,1:] = np.arange(0,n*nf*2,nf*2,np.uint32)[:,None,None]+pattern[None,:,:]
output.SetCells(types, locations, cells.reshape([-1]))
"
default_values=
"import numpy as np
in_particles,in_bnd,in_periodic,in_contacts = list(inputs[0])

n_disks = np.count_nonzero(in_bnd.CellTypes ==
1) + np.count_nonzero(in_periodic.CellTypes ==
1)
n_segments
=
(
np.count_nonzero(in_bnd.CellTypes == 7)+np.count_nonzero(in_bnd.CellTypes==3)

 + np.count_nonzero(in_periodic.CellTypes == 7) + np.count_nonzero(in_periodic.CellTypes == 3))

n_triangles =
(
np.count_nonzero(in_bnd.CellTypes == 13)+np.count_nonzero(in_bnd.CellTypes==5)

+ np.count_nonzero(in_periodic.CellTypes == 13)+np.count_nonzero(in_periodic.CellTypes==5))


vals = []
vals_t = []
vals_s = [] if ("particle_particle_s" in in_contacts.FieldData.keys()) else None
points = []

#particle-particle contacts
vals.append(in_contacts.FieldData["particle_particle"])
vals_t.append(in_contacts.FieldData["particle_particle_t"])
if vals_s is not None:
 vals_s.append(in_contacts.FieldData["particle_particle_s"])
contacts = in_contacts.FieldData["particle_particle_idx"]
points.append(in_particles.Points[contacts,:])
#particle-disk contacts
if n_disks :
 disks = in_bnd.Cells[1:2*n_disks:2]
 vals.append(in_contacts.FieldData["particle_disk"])
 vals_t.append(in_contacts.FieldData["particle_disk_t"])
 if vals_s is not None:
 vals_s.append(in_contacts.FieldData["particle_disk_s"])
 contacts = in_contacts.FieldData["particle_disk_idx"]
 points_d = np.ndarray((contacts.shape[0],2,3))
 points_d[:,0,:] = in_particles.Points[contacts[:,1],:]
 points_d[:,1,:] = in_bnd.Points[disks[contacts[:,0]],:]
 points.append(points_d)

#particle-segments contacts

if n_segments :
 segments = in_bnd.Cells[2*n_disks:2*n_disks+3*n_segments].reshape([-1,3])[:,1:]
 vals.append(in_contacts.FieldData["particle_segment"])
 vals_t.append(in_contacts.FieldData["particle_segment_t"])
 if vals_s is not None:
 vals_s.append(in_contacts.FieldData["particle_segment_s"])
 contacts = in_contacts.FieldData["particle_segment_idx"]
 points_s = np.ndarray((contacts.shape[0],2,3))
 points_s[:,0,:] = in_particles.Points[contacts[:,1],:]
 s = in_bnd.Points[segments[contacts[:,0],:]]
 t = s[:,1,:]-s[:,0,:]
 t /= ((t[:,0]**2+t[:,1]**2+t[:,2]**2)**0.5)[:,None]
 d = points_s[:,0,:]-s[:,0,:]
 l = d[:,0]*t[:,0]+d[:,1]*t[:,1]+d[:,2]*t[:,2]
 points_s[:,1,:] = s[:,0,:]+l[:,None]*t
 points.append(points_s)

#particle-triangle contacts
if n_triangles :
 triangles = in_bnd.Cells[2*n_disks+3*n_segments:2*n_disks+3*n_segments+4*n_triangles].reshape([-1,4])[:,1:]
 vals.append(in_contacts.FieldData["particle_triangle"])
 vals_t.append(in_contacts.FieldData["particle_triangle_t"])
 if vals_s is not None:
 vals_s.append(in_contacts.FieldData["particle_triangle_s"])
 contacts = in_contacts.FieldData["particle_triangle_idx"]
 points_t = np.ndarray((contacts.shape[0],2,3))
 points_t[:,0,:] = in_particles.Points[contacts[:,1],:]

d0 = in_bnd.Points[triangles[contacts[:,0],:]][:,1,:] - in_bnd.Points[triangles[contacts[:,0],:]][:,0,:]
 d1 = in_bnd.Points[triangles[contacts[:,0],:]][:,2,:] - in_bnd.Points[triangles[contacts[:,0],:]][:,0,:]
 N = np.cross(d0,d1)
 N /= np.linalg.norm(N,axis=1)[:,newaxis]
 dd = in_bnd.Points[triangles[contacts[:,0],:]][:,0,:] - in_particles.Points[contacts[:,1],:]
 dist = einsum('ij,ij->i', N, dd)
 points_t[:,1,:] = in_particles.Points[contacts[:,1],:] + N*dist[:,newaxis]

 points.append(points_t)

#merge everything

points = np.vstack(points)
vals = np.hstack(vals)
vals_t = np.hstack(vals_t)
if vals_s is not None :
 vals_s = np.hstack(vals_s)
#generate tubes
t = points[:,0,:]-points[:,1,:]
t /= np.sqrt(t[:,0,None]**2+t[:,1,None]**2+t[:,2,None]**2)
ez = np.where(np.abs(t[:,1,None])>np.abs(t[:,2,None]),np.array([[0,0,1]]),np.array([[0,1,0]]))
n1 = np.cross(t,ez)
n2 = np.cross(t,n1)
alphas = np.arange(0,2*np.pi, 2*np.pi/nf)
r = rf*vals**(1./2)*1

opoints = points[:,None,:,:] \
 +n1[:,None,None,:]*r[:,None,None,None]*np.sin(-alphas)[None,:,None,None] \
 +n2[:,None,None,:]*r[:,None,None,None]*np.cos(-alphas)[None,:,None,None]
output.Points = opoints.reshape(-1,3)
output.PointData.append(np.repeat(vals,2*nf),"Reaction")
output.PointData.append(np.repeat(vals_t,2*nf),"Reaction_t")
if vals_s is not None :
 output.PointData.append(np.repeat(vals_s,2*nf),"Reaction_s")
n = points.shape[0]
pattern = np.ndarray([nf,4], np.int)
for i in range(nf) :
 j = (i+1)%nf
 pattern[i,:] = (i*2,i*2+1,j*2+1,j*2)
types = np.full([n*nf],9,np.uint8)
locations = np.arange(0,5*n*nf,5,np.uint32)
cells = np.ndarray([n,nf,5],np.uint32)
cells[:,:,0] = 4
cells[:,:,1:] = np.arange(0,n*nf*2,nf*2,np.uint32)[:,None,None]+pattern[None,:,:]
output.SetCells(types, locations, cells.reshape([-1]))
"
panel_visibility=
"advanced"
>
<Hints>
<Widget
type=
"multi_line"
/>
...
...
@@ -339,6 +339,74 @@
</StringVectorProperty>
<Hints>
<ShowInMenu
category=
"MigFlow"
/>
</Hints>
</SourceProxy>
</ProxyGroup>
<ProxyGroup
name=
"filters"
>
<SourceProxy
name=
"MFP2"
class=
"vtkPythonProgrammableFilter"
label=
"MigFlow p2 velocity"
>
<Documentation
long_help=
"Generate p2 representation of velocity from field data"
short_help=
"Generate p2 representation of velocity from field data"
>
</Documentation>
<InputProperty
name=
"Input"
command=
"SetInputConnection"
>
<ProxyGroupDomain
name=
"groups"
>
<Group
name=
"sources"
/>
<Group
name=
"filters"
/>
</ProxyGroupDomain>
<DataTypeDomain
name=
"input_type"
>
<DataType
value=
"vtkUnstructuredGrid"
/>
</DataTypeDomain>
</InputProperty>
<IntVectorProperty
name=
"SubdivisionLevel"
label=
"SubdivisionLevel"
initial_string=
"SubdivisionLevel"
command=
"SetParameter"
animateable=
"1"
default_values=
"4"
number_of_elements=
"1"
>
<Documentation></Documentation>
</IntVectorProperty>
<!-- Output data type: "vtkUnstructuredGrid" -->
<IntVectorProperty
command=
"SetOutputDataSetType"
default_values=
"4"
name=
"OutputDataSetType"
number_of_elements=
"1"
panel_visibility=
"never"
>
<Documentation>
The value of this property determines the dataset type
for the output of the programmable filter.
</Documentation>
</IntVectorProperty>
<StringVectorProperty
name=
"Script"
command=
"SetScript"
number_of_elements=
"1"
default_values=
"import numpy as np
ipt = inputs[0].Points
nel = inputs[0].CellTypes.shape[0]
mapping = inputs[0].CellData["velocity_map"]
v = inputs[0].FieldData["velocity"]
f = np.array([[[0,0],[1,0],[0,1]]])
cut = np.array([
 [[1,0,0],[0.5,0.5,0],[0.5,0,0.5]],
 [[0,1,0],[0,0.5,0.5],[0.5,0.5,0]],
 [[0,0,1],[0.5,0,0.5],[0,0.5,0.5]],
 [[0.5,0.5,0],[0,0.5,0.5],[0.5,0,0.5]]])

def split(f):
 return np.einsum("cnm, emf -> ecnf", cut, f).reshape(-1,f.shape[1],f.shape[2])
for i in range(SubdivisionLevel):
 f = split(f)
xi = f[:,:,0]
eta = f[:,:,1]
fp1 = np.stack([1-xi-eta,xi,eta],axis=1)
fp2 = np.stack([1-3*(xi+eta)+2*(xi+eta)*(xi+eta), xi*(2*xi-1), eta*(2*eta-1), 4*xi*(1-xi-eta), 4*xi*eta, 4*eta*(1-xi-eta)]
, axis=1)
x = np.einsum("end, snm -> esmd", ipt[mapping[:,:3]], fp1).reshape(-1,3,3)
v = np.einsum("end, snm -> esmd", v[mapping], fp2).reshape(-1,3,2)
output.Points = x.reshape(-1,3)
ne = v.shape[0]
tri = np.zeros((ne,4))
tri[:,0] = 3
tri[:,1:] = np.arange(0,ne*3).reshape(-1,3)
loc = np.arange(0, 4*(ne+1), ne)
output.SetCells(np.full(ne,5,np.ubyte),loc,tri)
output.PointData.append(v.reshape(-1,2),"velocity")
"
panel_visibility=
"advanced"
>
<Hints>
<Widget
type=
"multi_line"
/>
</Hints>
<Documentation>
This property contains the text of a python program that
the programmable source runs.
</Documentation>
</StringVectorProperty>
<Hints>
<ShowInMenu
category=
"MigFlow"
/>
</Hints>
...
...
paraview_utils/p2.py
0 → 100644
View file @
02f3a33f
# MigFlow - Copyright (C) <2010-2020>
# <Universite catholique de Louvain (UCL), Belgium
# Universite de Montpellier, France>
#
# List of the contributors to the development of MigFlow: see AUTHORS file.
# Description and complete License: see LICENSE file.
#
# This program (MigFlow) is free software:
# you can redistribute it and/or modify it under the terms of the GNU Lesser General
# Public License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program (see COPYING and COPYING.LESSER files). If not,
# see <http://www.gnu.org/licenses/>.
Name
=
'MFP2'
Label
=
'MigFlow p2 velocity'
Help
=
'Generate p2 representation of velocity from field data'
NumberOfInput
=
1
InputDataType
=
'vtkUnstructuredGrid'
OutputDataType
=
'vtkUnstructuredGrid'
ExtraXml
=
''
Properties
=
dict
(
SubdivisionLevel
=
4
)
def
RequestData
():
import
numpy
as
np
ipt
=
inputs
[
0
].
Points
nel
=
inputs
[
0
].
CellTypes
.
shape
[
0
]
mapping
=
inputs
[
0
].
CellData
[
"velocity_map"
]
v
=
inputs
[
0
].
FieldData
[
"velocity"
]
f
=
np
.
array
([[[
0
,
0
],[
1
,
0
],[
0
,
1
]]])
cut
=
np
.
array
([
[[
1
,
0
,
0
],[
0.5
,
0.5
,
0
],[
0.5
,
0
,
0.5
]],
[[
0
,
1
,
0
],[
0
,
0.5
,
0.5
],[
0.5
,
0.5
,
0
]],
[[
0
,
0
,
1
],[
0.5
,
0
,
0.5
],[
0
,
0.5
,
0.5
]],
[[
0.5
,
0.5
,
0
],[
0
,
0.5
,
0.5
],[
0.5
,
0
,
0.5
]]])
def
split
(
f
):
return
np
.
einsum
(
"cnm, emf -> ecnf"
,
cut
,
f
).
reshape
(
-
1
,
f
.
shape
[
1
],
f
.
shape
[
2
])
for
i
in
range
(
SubdivisionLevel
):
f
=
split
(
f
)
xi
=
f
[:,:,
0
]
eta
=
f
[:,:,
1
]
fp1
=
np
.
stack
([
1
-
xi
-
eta
,
xi
,
eta
],
axis
=
1
)
fp2
=
np
.
stack
([
1
-
3
*
(
xi
+
eta
)
+
2
*
(
xi
+
eta
)
*
(
xi
+
eta
),
xi
*
(
2
*
xi
-
1
),
eta
*
(
2
*
eta
-
1
),
4
*
xi
*
(
1
-
xi
-
eta
),
4
*
xi
*
eta
,
4
*
eta
*
(
1
-
xi
-
eta
)]
,
axis
=
1
)
x
=
np
.
einsum
(
"end, snm -> esmd"
,
ipt
[
mapping
[:,:
3
]],
fp1
).
reshape
(
-
1
,
3
,
3
)
v
=
np
.
einsum
(
"end, snm -> esmd"
,
v
[
mapping
],
fp2
).
reshape
(
-
1
,
3
,
2
)
output
.
Points
=
x
.
reshape
(
-
1
,
3
)
ne
=
v
.
shape
[
0
]
tri
=
np
.
zeros
((
ne
,
4
))
tri
[:,
0
]
=
3
tri
[:,
1
:]
=
np
.
arange
(
0
,
ne
*
3
).
reshape
(
-
1
,
3
)
loc
=
np
.
arange
(
0
,
4
*
(
ne
+
1
),
ne
)
output
.
SetCells
(
np
.
full
(
ne
,
5
,
np
.
ubyte
),
loc
,
tri
)
output
.
PointData
.
append
(
v
.
reshape
(
-
1
,
2
),
"velocity"
)
python/fluid.py
View file @
02f3a33f
...
...
@@ -375,29 +375,14 @@ class FluidProblem :
connectivity
=
self
.
elements
()
types
=
np
.
repeat
([
5
if
self
.
_dim
==
2
else
10
],
connectivity
.
shape
[
0
])
offsets
=
np
.
cumsum
(
np
.
repeat
([
self
.
_dim
+
1
],
connectivity
.
shape
[
0
])).
astype
(
np
.
int32
)
VTK
.
write
(
output_dir
+
"/fluid"
,
it
,
t
,(
types
,
connectivity
,
offsets
),
self
.
coordinates
(),
data
,
field_data
,
cell_data
)
v
=
self
.
velocity
()
if
self
.
_dim
==
2
:
v
=
np
.
insert
(
v
,
self
.
_dim
,
0
,
axis
=
1
)
data
=
[(
"velocity"
,
v
)]
if
np
.
max
(
self
.
order_fields
)
==
1
:
connectivity
=
self
.
elements
()
types
=
np
.
repeat
([
5
if
self
.
_dim
==
2
else
10
],
connectivity
.
shape
[
0
])
offsets
=
np
.
cumsum
(
np
.
repeat
([
self
.
_dim
+
1
],
connectivity
.
shape
[
0
])).
astype
(
np
.
int32
)
VTK
.
write
(
output_dir
+
"/velocity_p1"
,
it
,
t
,(
types
,
connectivity
,
offsets
),
self
.
coordinates
(),
data
,
None
,
None
)
elif
np
.
max
(
self
.
order_fields
)
==
2
:
connectivity
=
self
.
set_numbering
([
2
]).
reshape
(
self
.
n_elements
(),
-
1
)
types
=
np
.
repeat
([
22
if
self
.
_dim
==
2
else
24
],
connectivity
.
shape
[
0
])
offsets
=
np
.
cumsum
(
np
.
repeat
([
connectivity
.
shape
[
1
]],
connectivity
.
shape
[
0
])).
astype
(
np
.
int32
)
xp2
=
self
.
coordinates_p2
()
VTK
.
write
(
output_dir
+
"/velocity_p2"
,
it
,
t
,(
types
,
connectivity
,
offsets
),
xp2
,
data
,
None
,
None
)
isp2p1
=
np
.
max
(
self
.
order_fields
)
==
2
if
isp2p1
:
field_data
.
append
((
b
"velocity"
,
self
.
velocity
()))
idx
=
self
.
set_numbering
([
2
])
cell_data
.
append
((
"velocity_map"
,
idx
.
reshape
(
self
.
n_elements
(),
-
1
)))
else
:
raise
ValueError
(
"Order unavailable !"
)
data
.
append
((
"velocity"
,
self
.
velocity
())
)
VTK
.
write
(
output_dir
+
"/fluid"
,
it
,
t
,(
types
,
connectivity
,
offsets
),
self
.
coordinates
(),
data
,
field_data
,
cell_data
)
def
read_vtk
(
self
,
dirname
,
i
):
"""Reads output file to reload computation.
...
...
python/petsc4pylsys.py
View file @
02f3a33f
...
...
@@ -106,4 +106,4 @@ class LinearSystemBAIJ :
else
:
return
x
[:
self
.
csr
.
ndof
]
LinearSystem
=
LinearSystemAIJ
\ No newline at end of file
LinearSystem
=
LinearSystemAIJ
validation/cavity/cavity.py
View file @
02f3a33f
...
...
@@ -68,7 +68,7 @@ class Poiseuille(unittest.TestCase) :
#Object fluid creation + Boundary condition of the fluid (field 0 is horizontal velocity; field 1 is vertical velocity; field 2 is pressure)
#Format: strong_boundaries = [(Boundary tag, Fluid field, Value)
fluid
=
mbfluid
.
FluidProblem
(
2
,
g
,
nu
*
rho
,
rho
,
solver
=
"petsc4py"
,
solver_options
=
"pc_type lu"
,
order
=
[
2
,
2
,
1
])
fluid
=
mbfluid
.
FluidProblem
(
2
,
g
,
nu
*
rho
,
rho
,
solver
=
"petsc4py"
,
solver_options
=
"
-
pc_type lu"
,
order
=
[
2
,
2
,
1
])
fluid
.
set_mean_pressure
(
10000
)
fluid
.
load_msh
(
"mesh.msh"
)
...
...
validation/poiseuille/poiseuille.py
View file @
02f3a33f
...
...
@@ -40,7 +40,7 @@ class Poiseuille(unittest.TestCase) :
if
not
os
.
path
.
isdir
(
outputdir
)
:
os
.
makedirs
(
outputdir
)
subprocess
.
call
([
"gmsh"
,
"-2"
,
"mesh.geo"
,
"-clscale"
,
"2"
])
subprocess
.
call
([
"gmsh"
,
"-2"
,
"mesh.geo"
,
"-clscale"
,
"2
0
"
])
t
=
0
ii
=
0
...
...
@@ -64,7 +64,7 @@ class Poiseuille(unittest.TestCase) :
outf
=
1
# number of iterations between output files
#Object fluid creation + Boundary condition of the fluid (field 0 is horizontal velocity; field 1 is vertical velocity; field 2 is pressure)
fluid
=
mbfluid
.
FluidProblem
(
2
,
g
,
nu
*
rho
,
rho
,
order
=
[
2
,
2
,
1
],
solver_options
=
"pc_type lu"
)
fluid
=
mbfluid
.
FluidProblem
(
2
,
g
,
nu
*
rho
,
rho
,
order
=
[
2
,
2
,
1
],
solver_options
=
"
-
pc_type lu"
)
fluid
.
load_msh
(
"mesh.msh"
)
fluid
.
set_open_boundary
(
"LeftUp"
,
velocity
=
[
lambda
x
:
1
/
(
20
*
mu
)
*
x
[:,
1
]
*
(
1
-
x
[:,
1
]),
0
])
fluid
.
set_open_boundary
(
"LeftDown"
,
velocity
=
[
lambda
x
:
1
/
(
20
*
mu
)
*
x
[:,
1
]
*
(
1
-
x
[:,
1
]),
0
])
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment