diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 395fe74c6..2f60ee686 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -179,8 +179,12 @@ jobs:
sudo echo "deb https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list
sudo apt-get update
echo ""
- echo "packages intel oneapi:"
- sudo apt-get install -y intel-oneapi-compiler-fortran intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic intel-oneapi-mpi intel-oneapi-mpi-devel
+ # info
+ #sudo -E apt-cache pkgnames intel | grep intel-oneapi
+ #echo ""
+ echo "installing packages intel oneapi:"
+ sudo apt-get install -y intel-oneapi-compiler-fortran-2023.2.2 intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic-2023.2.2 intel-oneapi-mpi intel-oneapi-mpi-devel
+ echo ""
- name: compiler infos
run: |
@@ -188,22 +192,54 @@ jobs:
source /opt/intel/oneapi/setvars.sh
echo ""
echo "compiler versions:"
+ echo "icx --version"
+ which icx
+ icx --version
+ echo ""
echo "icc --version"
+ which icc
icc --version
+ echo ""
+ echo "ifx --version"
+ which ifx
+ ifx --version
+ echo ""
echo "ifort --version"
+ which ifort
ifort --version
+ echo ""
echo "mpiifort --version"
+ which mpiifort
mpiifort --version
+ echo ""
echo "mpif90 --version"
+ which mpif90
mpif90 --version
echo ""
+ # infos
+ which ifort
+ which icc
+ which mpiifort
+ echo "mpirun:"
+ which mpirun
+ echo ""
+ # intel setup for running tests
+ echo ""
+ echo "replacing mpif90 with mpiifort link:"
+ sudo ln -sf $(which mpiifort) $(which mpif90)
+ mpif90 --version
+ echo ""
+ # debug
+ #export I_MPI_DEBUG=5,pid,host
+ #export I_MPI_LIBRARY_KIND=debug
+ # remove -ftrapuv which leads to issues for running tests
+ sed -i "s/-ftrapuv//g" flags.guess
+ # environment setting
export TERM=xterm
+ # export info
echo "exports:"
export
echo ""
- which ifort
- which icc
- which mpiifort
echo ""
printenv >> $GITHUB_ENV
echo "CXX=icpc" >> $GITHUB_ENV
@@ -212,7 +248,8 @@ jobs:
echo ""
- name: configure serial debug
- run: ./configure --enable-debug --without-mpi FC=ifort CC=icc
+ run: |
+ ./configure --enable-debug --without-mpi FC=ifort CC=icc
- name: make serial debug
run: |
@@ -221,7 +258,8 @@ jobs:
make clean
- name: configure serial
- run: ./configure --without-mpi FC=ifort CC=icc
+ run: |
+ ./configure --without-mpi FC=ifort CC=icc
- name: make serial
run: |
@@ -229,7 +267,8 @@ jobs:
make clean
- name: configure parallel debug
- run: ./configure --enable-debug --with-mpi FC=ifort CC=icc MPIFC=mpiifort MPI_INC="${I_MPI_ROOT}/include"
+ run: |
+ ./configure --enable-debug --with-mpi FC=ifort CC=icc MPIFC=mpiifort MPI_INC="${I_MPI_ROOT}/include"
- name: make parallel debug
run: |
@@ -238,16 +277,18 @@ jobs:
make clean
- name: configure parallel
- run: ./configure --with-mpi FC=ifort CC=icc MPIFC=mpiifort MPI_INC="${I_MPI_ROOT}/include"
+ run: |
+ ./configure --with-mpi FC=ifort CC=icc MPIFC=mpiifort MPI_INC="${I_MPI_ROOT}/include"
- name: make parallel
run: |
make -j2 all
make clean
- # check: fails due to intel MPI issue with mpif90 using gfortran as default on virtual nodes
- #- name: make tests
- # run: make tests
+ # note: fails with -ftrapuv flag due to MPI issue on virtual nodes
+ - name: make tests
+ run: |
+ make tests
linuxTest_0:
name: Test run example 0 - make tests
diff --git a/EXAMPLES/applications/coffee_mug_with_fluid_inside/run_boundary_definition.py b/EXAMPLES/applications/coffee_mug_with_fluid_inside/run_boundary_definition.py
index 637b64686..d5c33af56 100755
--- a/EXAMPLES/applications/coffee_mug_with_fluid_inside/run_boundary_definition.py
+++ b/EXAMPLES/applications/coffee_mug_with_fluid_inside/run_boundary_definition.py
@@ -28,7 +28,9 @@
###### This is boundary_definition.py of GEOCUBIT
#..... which extracts the bounding faces and defines them into blocks
-reload(boundary_definition)
+import importlib
+importlib.reload(boundary_definition)
+
boundary_definition.entities=['face']
boundary_definition.define_bc(boundary_definition.entities,parallel=True)
diff --git a/EXAMPLES/applications/coffee_mug_with_fluid_inside/run_cubit2specfem3d.py b/EXAMPLES/applications/coffee_mug_with_fluid_inside/run_cubit2specfem3d.py
index 27fbde649..32f6b3c95 100755
--- a/EXAMPLES/applications/coffee_mug_with_fluid_inside/run_cubit2specfem3d.py
+++ b/EXAMPLES/applications/coffee_mug_with_fluid_inside/run_cubit2specfem3d.py
@@ -27,7 +27,8 @@
###### This is boundary_definition.py of GEOCUBIT
#..... which extracts the bounding faces and defines them into blocks
-#reload(boundary_definition)
+#import importlib
+#importlib.(boundary_definition)
#boundary_definition.entities=['face']
#boundary_definition.define_bc(boundary_definition.entities,parallel=True)
@@ -35,7 +36,9 @@
#### Export to SESAME format using cubit2specfem3d.py of GEOCUBIT
os.system('mkdir -p MESH')
-reload(cubit2specfem3d)
+import importlib
+importlib.reload(cubit2specfem3d)
+
cubit2specfem3d.export2SPECFEM3D('MESH')
# all files needed by SCOTCH are now in directory MESH
diff --git a/doc/USER_MANUAL/manual_SPECFEM3D_Cartesian.pdf b/doc/USER_MANUAL/manual_SPECFEM3D_Cartesian.pdf
index 53b6dc72a..3576fa59e 100644
Binary files a/doc/USER_MANUAL/manual_SPECFEM3D_Cartesian.pdf and b/doc/USER_MANUAL/manual_SPECFEM3D_Cartesian.pdf differ
diff --git a/flags.guess b/flags.guess
index 3714edeee..01baf0dc8 100644
--- a/flags.guess
+++ b/flags.guess
@@ -92,12 +92,11 @@ case $my_FC in
#
# warnings about external function calls can be suppressed by "-warn all,noexternal" for version > 2018
# optimization report: "-vec-report0" is old and will be replaced by "-qopt-report0 -qopt-report-phase=vec" for v >=15.0
- DEF_FFLAGS="-xHost -fpe0 -ftz -assume buffered_io -assume byterecl -align sequence -std08 -diag-disable 6477 -implicitnone -gen-interfaces -warn all" # -mcmodel=medium -shared-intel
+ DEF_FFLAGS="-xHost -fpe0 -ftz -assume buffered_io -assume byterecl -align sequence -std08 -diag-disable 6477 -implicitnone -gen-interfaces -warn all,noexternal" # -mcmodel=medium -shared-intel
OPT_FFLAGS="-O3 -check nobounds"
DEBUG_FFLAGS="-check all -debug -g -O0 -fp-stack-check -traceback -ftrapuv"
# option "-openmp" is soon deprecated and replaced by "-qopenmp" for versions > 17.x
OMP_FFLAGS="-qopenmp"
- #
;;
gfortran|*/gfortran|f95|*/f95)
#
diff --git a/src/auxiliaries/combine_surf_data.f90 b/src/auxiliaries/combine_surf_data.f90
index 2d969b183..2fdb6cf75 100644
--- a/src/auxiliaries/combine_surf_data.f90
+++ b/src/auxiliaries/combine_surf_data.f90
@@ -277,7 +277,7 @@ program combine_surf_data
read(27) zstore
close(27)
- do ispec_surf=1,nspec_surf
+ do ispec_surf = 1,nspec_surf
ispec = ibelm_surf(ispec_surf)
k = 1
do j = 1, NGLLY, iny
@@ -345,7 +345,7 @@ program combine_surf_data
np = npoint * (it-1)
-! surface file
+ ! surface file
local_ibool_surf_file = trim(prname) // 'ibelm_' //trim(surfname)// '.bin'
open(unit = 28,file = trim(local_ibool_surf_file),status='old', iostat = ier, form='unformatted')
read(28) nspec_surf
@@ -354,7 +354,7 @@ program combine_surf_data
read(28) ibelm_surf
close(28)
-! ibool file
+ ! ibool file
local_ibool_file = trim(prname) // 'ibool' // '.bin'
open(unit = 28,file = trim(local_ibool_file),status='old', iostat = ier, form='unformatted')
read(28) ibool
diff --git a/src/generate_databases/create_regions_mesh.f90 b/src/generate_databases/create_regions_mesh.f90
index 32ae45ef8..808699f60 100644
--- a/src/generate_databases/create_regions_mesh.f90
+++ b/src/generate_databases/create_regions_mesh.f90
@@ -1857,7 +1857,7 @@ subroutine crm_setup_mesh_surface()
nfaces_surface = NSPEC2D_TOP
endif
-! number of surface faces for all partitions together
+ ! number of surface faces for all partitions together
call sum_all_i(nfaces_surface,nfaces_surface_glob_ext_mesh)
deallocate(ibool_interfaces_ext_mesh_dummy)
diff --git a/src/generate_databases/lts_generate_databases.F90 b/src/generate_databases/lts_generate_databases.F90
index 753b310c1..83554890f 100644
--- a/src/generate_databases/lts_generate_databases.F90
+++ b/src/generate_databases/lts_generate_databases.F90
@@ -847,12 +847,12 @@ subroutine lts_generate_databases()
do ispec = 1,NSPEC_AB
! test p_elem correctness
p_elem_counter = 0
- do ilevel=1,num_p_level
+ do ilevel = 1,num_p_level
if (p_elem(ispec,ilevel) .eqv. .true.) p_elem_counter = p_elem_counter + 1
enddo
if (p_elem_counter /= 1) then
print *, "ERROR: p_counter:",p_elem_counter
- call exit_mpi(myrank,"ASSERT(p_elem(ispec,:) only has 1 true) FAIL")
+ call exit_mpi(myrank,"ASSERT(p_elem(ispec,:) should only have 1 true) FAIL")
endif
enddo
@@ -869,9 +869,11 @@ subroutine lts_generate_databases()
do i = 1,NGLLX
iglob = ibool(i,j,k,ispec)
if (iglob_p_refine(iglob) < p) then
+ ! contains coarser nodes
boundary_elem(ispec,ilevel) = .true.
else if (iglob_p_refine(iglob) > p) then
- call exit_mpi(myrank,"ASSERT(p-elem == .true. should if contains this p-level or coarser nodes)")
+ ! finer nodes should not be possible, otherwise p_elem flag is wrongly set
+ call exit_mpi(myrank,"ASSERT(p-elem == .true. should contain this p-level and finer nodes)")
endif
enddo
enddo
@@ -884,12 +886,12 @@ subroutine lts_generate_databases()
do ispec = 1,NSPEC_AB
! test p_elem correctness
p_elem_counter = 0
- do ilevel=1,num_p_level
+ do ilevel = 1,num_p_level
if (boundary_elem(ispec,ilevel) .eqv. .true.) p_elem_counter = p_elem_counter + 1
enddo
if (p_elem_counter > 1) then
print *, "ERROR: p_counter:",p_elem_counter
- call exit_mpi(myrank,"ASSERT(boundary_elem(ispec,:) only has 1 true) FAIL")
+ call exit_mpi(myrank,"ASSERT(boundary_elem(ispec,:) should only have 1 true) FAIL")
endif
enddo
@@ -972,13 +974,15 @@ subroutine lts_generate_databases()
! re-order global nodes, puts nodes of same p-level together
allocate(p_level_iglob_start(num_p_level), &
- p_level_iglob_end(num_p_level), &
- stat=ier)
+ p_level_iglob_end(num_p_level),stat=ier)
if (ier /= 0) stop 'Error allocating array p_level_iglob_start,..'
! initializes start/end index arrays
p_level_iglob_start(:) = 0
p_level_iglob_end(:) = 0
+ ! re-orders iglob values to have a contiguous range for different p-levels
+ ! (this will make the copy of wavefields for different p-levels faster as it accesses
+ ! contiguous memory blocks of the total array)
call lts_reorder_iglob_by_p_level()
! stores arrays in databases for solver
@@ -1018,22 +1022,20 @@ subroutine lts_reorder_iglob_by_p_level()
use constants, only: CUSTOM_REAL,NGLLX,NGLLY,NGLLZ,myrank
use generate_databases_par, only: NSPEC_AB,NGLOB_AB, &
- ibool,num_interfaces_ext_mesh,ibool_interfaces_ext_mesh,nibool_interfaces_ext_mesh
+ ibool
use create_regions_mesh_ext_par, only: &
xstore => xstore_unique, &
ystore => ystore_unique, &
zstore => zstore_unique
- use fault_generate_databases, only: ANY_FAULT,ANY_FAULT_IN_THIS_PROC
-
! LTS module
use lts_generate_databases_par, only: p_level_iglob_start,p_level_iglob_end,num_p_level,iglob_p_refine,p_level
implicit none
! local parameters
- integer :: ispec, iglob, iglob_new, ip, p, ier, i,j,k, iinterface
+ integer :: ispec, iglob, iglob_new, ip, p, ier, i, j, k
integer, dimension(:), allocatable :: iglob_touched
integer, dimension(:,:,:,:), allocatable :: ibool_new
integer, dimension(:), allocatable :: iglob_field_new
@@ -1044,8 +1046,6 @@ subroutine lts_reorder_iglob_by_p_level()
integer, dimension(:), allocatable :: num_p
integer, dimension(:), allocatable :: p_lookup
- integer, dimension(:,:), allocatable :: ibool_interfaces_ext_mesh_new
-
! allocates temporary arrays
allocate(num_p(num_p_level),stat=ier)
if (ier /= 0) stop 'Error allocate num_p,etc'
@@ -1070,10 +1070,6 @@ subroutine lts_reorder_iglob_by_p_level()
if (ier /= 0) stop 'Error allocating iglob_touched'
iglob_touched(:) = 0
- allocate(ibool_interfaces_ext_mesh_new(size(ibool_interfaces_ext_mesh,1),size(ibool_interfaces_ext_mesh,2)),stat=ier)
- if (ier /= 0) stop 'Error allocating ibool_interfaces_ext_mesh_new'
- ibool_interfaces_ext_mesh_new(:,:) = 0
-
allocate(p_lookup(maxval(p_level)),stat=ier)
if (ier /= 0) stop 'Error allocating p_lookup'
p_lookup(:) = 0
@@ -1209,26 +1205,15 @@ subroutine lts_reorder_iglob_by_p_level()
! copy new values into original arrays
ibool(:,:,:,:) = ibool_new(:,:,:,:)
- ! fix MPI interface
- do iinterface = 1, num_interfaces_ext_mesh
- do i = 1, nibool_interfaces_ext_mesh(iinterface)
- iglob = ibool_interfaces_ext_mesh(i,iinterface)
- ibool_interfaces_ext_mesh_new(i,iinterface) = iglob_touched(iglob)
- enddo
- enddo
- ibool_interfaces_ext_mesh(:,:) = ibool_interfaces_ext_mesh_new(:,:)
-
- ! fix fault interfaces
- if (ANY_FAULT) then
- ! re-orders global values stored in ibulk1 & ibulk2 for fault split nodes
- if (ANY_FAULT_IN_THIS_PROC) call lts_fault_reorder_ibulk(iglob_touched)
- endif
-
! tests to make sure all arrays are correct
if (ANY(iglob_touched(:) == -1)) stop 'Error: some iglobs not touched!'
if (ANY(iglob_p_refine(:) < 1)) stop 'Error: some iglobs listed as p < 1!'
if (ANY(ibool(:,:,:,:) < 1)) stop 'Error: some ibool still listed as -1!'
+ ! re-orders ibool/iglob arrays needed by solver with new iglob ordering
+ call lts_reorder_iglob_solver_arrays(iglob_touched)
+
+ ! free memory
deallocate(ibool_new)
deallocate(iglob_field_new)
deallocate(iglob_field_new_cr)
@@ -1330,6 +1315,9 @@ subroutine lts_save_databases()
character(len=MAX_STRING_LEN) :: prname
! VTK-file output for debugging
+ logical,dimension(:),allocatable :: tmp_flag
+ character(len=2) :: str_level
+ integer :: ilevel
logical,parameter :: DEBUG_VTK_OUTPUT = .false.
!#TODO: LTS database not stored yet in HDF5 / ADIOS2 format
@@ -1369,10 +1357,27 @@ subroutine lts_save_databases()
if (SAVE_MESH_FILES) then
if (DEBUG_VTK_OUTPUT) then
! p-refinements
- filename = trim(prname) // 'ispec_p_refine'
+ filename = trim(prname) // 'lts_ispec_p_refine'
call write_VTK_data_elem_i(NSPEC_AB,NGLOB_AB,xstore,ystore,zstore,ibool,ispec_p_refine,filename)
if (myrank == 0 ) then
write(IMAIN,*) ' written file: ',trim(filename)//'.vtk'
+ endif
+ ! boundary elements
+ allocate(tmp_flag(NSPEC_AB),stat=ier)
+ if (ier /= 0) stop 'Error allocating array tmp_flag'
+ do ilevel = 1,num_p_level
+ tmp_flag(:) = boundary_elem(:,ilevel)
+ ! file name
+ write(str_level,"(i2.2)") ilevel
+ filename = trim(prname) // 'lts_boundary_elem_level_' // trim(str_level)
+ call write_VTK_data_elem_l(NSPEC_AB,NGLOB_AB,xstore,ystore,zstore,ibool,tmp_flag,filename)
+ if (myrank == 0 ) then
+ write(IMAIN,*) ' written file: ',trim(filename)//'.vtk'
+ endif
+ enddo
+ deallocate(tmp_flag)
+ ! end user output
+ if (myrank == 0 ) then
write(IMAIN,*)
call flush_IMAIN()
endif
@@ -1381,6 +1386,79 @@ subroutine lts_save_databases()
end subroutine lts_save_databases
+
+!------------------------------------------------------------------------------------------------
+
+ subroutine lts_reorder_iglob_solver_arrays(iglob_touched)
+
+ use generate_databases_par, only: NGLOB_AB, &
+ iglob_is_surface_external_mesh
+
+ ! MPI interfaces
+ use generate_databases_par, only: num_interfaces_ext_mesh, &
+ nibool_interfaces_ext_mesh,ibool_interfaces_ext_mesh
+
+ use fault_generate_databases, only: ANY_FAULT,ANY_FAULT_IN_THIS_PROC
+
+ implicit none
+ ! interface
+ integer,dimension(NGLOB_AB),intent(in) :: iglob_touched
+
+ ! local parameters
+ integer :: iglob,iglob_new,ier
+ ! MPI interfaces
+ integer, dimension(:,:), allocatable :: ibool_interfaces_ext_mesh_new
+ integer :: i,iinterface
+ ! surface flags
+ logical, dimension(:), allocatable :: iglob_is_surface_external_mesh_new
+ logical :: flag_l
+
+ ! re-orders MPI interfaces
+ allocate(ibool_interfaces_ext_mesh_new(size(ibool_interfaces_ext_mesh,1),size(ibool_interfaces_ext_mesh,2)),stat=ier)
+ if (ier /= 0) stop 'Error allocating ibool_interfaces_ext_mesh_new'
+ ibool_interfaces_ext_mesh_new(:,:) = 0
+
+ ! fix MPI interface
+ do iinterface = 1, num_interfaces_ext_mesh
+ do i = 1, nibool_interfaces_ext_mesh(iinterface)
+ ! old iglob value
+ iglob = ibool_interfaces_ext_mesh(i,iinterface)
+ ! newly ordered iglob value
+ iglob_new = iglob_touched(iglob)
+ ! sets newly ordered values
+ ibool_interfaces_ext_mesh_new(i,iinterface) = iglob_new
+ enddo
+ enddo
+ ibool_interfaces_ext_mesh(:,:) = ibool_interfaces_ext_mesh_new(:,:)
+
+ ! free memory
+ deallocate(ibool_interfaces_ext_mesh_new)
+
+ ! re-orders flags on iglob array needed for surface points
+ allocate(iglob_is_surface_external_mesh_new(NGLOB_AB),stat=ier)
+ if (ier /= 0) stop 'Error allocating iglob_is_surface_external_mesh_new'
+ iglob_is_surface_external_mesh_new(:) = .false.
+
+ do iglob = 1,NGLOB_AB
+ ! gets flag for point
+ flag_l = iglob_is_surface_external_mesh(iglob)
+ ! new flag on new point location
+ iglob_new = iglob_touched(iglob)
+ iglob_is_surface_external_mesh_new(iglob_new) = flag_l
+ enddo
+ iglob_is_surface_external_mesh(:) = iglob_is_surface_external_mesh_new(:)
+
+ ! free memory
+ deallocate(iglob_is_surface_external_mesh_new)
+
+ ! fix fault interfaces
+ if (ANY_FAULT) then
+ ! re-orders global values stored in ibulk1 & ibulk2 for fault split nodes
+ if (ANY_FAULT_IN_THIS_PROC) call lts_fault_reorder_ibulk(iglob_touched)
+ endif
+
+ end subroutine lts_reorder_iglob_solver_arrays
+
!------------------------------------------------------------------------------------------------
!
! additional fault routines
diff --git a/src/generate_databases/save_arrays_solver.F90 b/src/generate_databases/save_arrays_solver.F90
index f5fe1f580..6a966bc2f 100644
--- a/src/generate_databases/save_arrays_solver.F90
+++ b/src/generate_databases/save_arrays_solver.F90
@@ -574,13 +574,13 @@ subroutine save_arrays_solver_files()
! saves free surface points
if (num_free_surface_faces > 0) then
! saves free surface interface points
- allocate( iglob_tmp(NGLLSQUARE*num_free_surface_faces),stat=ier)
+ allocate(iglob_tmp(NGLLSQUARE*num_free_surface_faces),stat=ier)
if (ier /= 0) call exit_MPI_without_rank('error allocating array 652')
if (ier /= 0) stop 'error allocating array iglob_tmp'
inum = 0
iglob_tmp(:) = 0
- do i=1,num_free_surface_faces
- do j=1,NGLLSQUARE
+ do i = 1,num_free_surface_faces
+ do j = 1,NGLLSQUARE
inum = inum+1
iglob_tmp(inum) = ibool(free_surface_ijk(1,j,i), &
free_surface_ijk(2,j,i), &
@@ -601,7 +601,7 @@ subroutine save_arrays_solver_files()
if (ACOUSTIC_SIMULATION .and. ELASTIC_SIMULATION) then
! saves points on acoustic-elastic coupling interface
num_points = NGLLSQUARE*num_coupling_ac_el_faces
- allocate( iglob_tmp(num_points),stat=ier)
+ allocate(iglob_tmp(num_points),stat=ier)
if (ier /= 0) call exit_MPI_without_rank('error allocating array 653')
if (ier /= 0) stop 'error allocating array iglob_tmp'
inum = 0
diff --git a/src/shared/detect_surface.f90 b/src/shared/detect_surface.f90
index 7eaf3a910..d3f5dabd5 100644
--- a/src/shared/detect_surface.f90
+++ b/src/shared/detect_surface.f90
@@ -79,8 +79,8 @@ subroutine detect_surface(NPROC,nglob,nspec,ibool, &
do i = 1, NGLLX
iglob = ibool(i,j,k,ispec)
if (iglob < 1 .or. iglob > nglob) then
- print *,'error valence iglob:',iglob,i,j,k,ispec
- stop 'error valence'
+ print *,'Error: valence setup found iglob:',iglob,i,j,k,ispec
+ stop 'Error: valence setup found invalid iglob index'
endif
valence(iglob) = valence(iglob) + 1
enddo
@@ -96,7 +96,6 @@ subroutine detect_surface(NPROC,nglob,nspec,ibool, &
! determines spectral elements containing surface points
do ispec = 1, nspec
-
! loops over GLL points not on edges or corners
do k = 1, NGLLZ
do j = 1, NGLLY
@@ -116,7 +115,6 @@ subroutine detect_surface(NPROC,nglob,nspec,ibool, &
enddo
enddo
enddo
-
enddo ! nspec
! safety check
diff --git a/src/specfem3D/comp_source_time_function.f90 b/src/specfem3D/comp_source_time_function.f90
index 40f9547be..ef93ea9b3 100644
--- a/src/specfem3D/comp_source_time_function.f90
+++ b/src/specfem3D/comp_source_time_function.f90
@@ -266,7 +266,7 @@ end function comp_source_time_function_d2rck
double precision function comp_source_time_function_brune(t,f0)
use constants, only: PI
-
+
implicit none
double precision, intent(in) :: t,f0
@@ -276,19 +276,19 @@ double precision function comp_source_time_function_brune(t,f0)
! Brune source-time function
! Moment function
- !if(t .lt. 0.d0)then
+ !if (t < 0.d0) then
! comp_source_time_function_brune = 0.d0
- !else
+ !else
! omegat = 2.d0*PI*f0*t
- ! comp_source_time_function_brune = 1.d0 - exp( -omegat ) * (1.0d0+omegat)
+ ! comp_source_time_function_brune = 1.d0 - exp( -omegat ) * (1.0d0+omegat)
!endif
! Moment rate function
- if(t .lt. 0.d0)then
+ if (t < 0.d0) then
comp_source_time_function_brune = 0.d0
- else
+ else
omega = 2.d0*PI*f0
omegat = omega*t
- comp_source_time_function_brune = omega*omegat*exp( -omegat )
+ comp_source_time_function_brune = omega*omegat*exp( -omegat )
endif
end function comp_source_time_function_brune
@@ -298,7 +298,7 @@ end function comp_source_time_function_brune
!
double precision function comp_source_time_function_smooth_brune(t,f0)
-
+
use constants, only: PI
implicit none
@@ -312,27 +312,27 @@ double precision function comp_source_time_function_smooth_brune(t,f0)
! Brune source-time function
! Moment function
!omegat = 2.d0*PI*f0*t
- !if (t .lt. 0.d0) then
+ !if (t < 0.d0) then
! comp_source_time_function_smooth_brune = 0.d0
- !elseif (omegat .ge. 0.d0 .and. omegat .lt. tau0)then
+ !else if (omegat >= 0.d0 .and. omegat < tau0) then
! comp_source_time_function_smooth_brune = 1.d0 - exp(-omegat)*( 1.0d0 + omegat + &
! 0.5d0*omegat**2 - (1.5d0*omegat**3)/tau0 + (1.5d0*omegat**4)/(tau0**2) - &
! (0.5d0*omegat**5)/(tau0**3) )
- !else ! (omegat .gt. tau0)then
+ !else ! (omegat > tau0) then
! comp_source_time_function_smooth_brune = 1.d0 - exp( -omegat ) *
- ! (1.0d0+omegat)
+ ! (1.0d0+omegat)
!endif
! Moment rate function
omega = 2.d0*PI*f0
omegat = omega*t
- if (t .lt. 0.d0) then
+ if (t < 0.d0) then
comp_source_time_function_smooth_brune = 0.d0
- elseif (omegat .ge. 0.d0 .and. omegat .lt. tau0)then
+ else if (omegat >= 0.d0 .and. omegat < tau0) then
comp_source_time_function_smooth_brune = ( 0.5d0*omega*(omegat**2)* &
exp(-omegat)/tau0**3 ) * ( tau0**3 - 3.d0*(tau0**2)*(omegat-3.d0) + &
3.d0*tau0*omegat*(omegat-4.d0) - (omegat**2)*(omegat-5.d0) )
- else ! (omegat .gt. tau0)then
- comp_source_time_function_smooth_brune = omega*omegat*exp( -omegat )
+ else ! (omegat > tau0) then
+ comp_source_time_function_smooth_brune = omega*omegat*exp( -omegat )
endif
end function comp_source_time_function_smooth_brune
diff --git a/src/specfem3D/locate_source.F90 b/src/specfem3D/locate_source.F90
index 107f9b994..7d346cfc8 100644
--- a/src/specfem3D/locate_source.F90
+++ b/src/specfem3D/locate_source.F90
@@ -94,6 +94,7 @@ subroutine locate_source()
logical,dimension(:),allocatable :: is_CPML_source,is_CPML_source_all
integer :: ispec,ier
+ integer :: num_output_info
logical :: is_done_sources
! LTS
@@ -119,6 +120,10 @@ subroutine locate_source()
write(IMAIN,*)
endif
call flush_IMAIN()
+
+ ! output frequency for large number of receivers
+ ! number to output about ~50 steps, rounds to the next multiple of 500
+ num_output_info = max(500,int(ceiling(ceiling(NSOURCES/50.0)/500.0)*500))
endif
! allocates temporary arrays
@@ -257,8 +262,9 @@ subroutine locate_source()
! user output progress
if (myrank == 0 .and. NSOURCES > 1000) then
- if (mod(isource,500) == 0) then
- write(IMAIN,*) ' located source ',isource,'out of',NSOURCES
+ if (mod(isource,num_output_info) == 0) then
+ tCPU = wtime() - tstart
+ write(IMAIN,*) ' located source ',isource,'out of',NSOURCES,' - elapsed time: ',sngl(tCPU),'s'
call flush_IMAIN()
endif
endif
diff --git a/src/specfem3D/lts_global_step.F90 b/src/specfem3D/lts_global_step.F90
index 8117d2f3a..5b741d8a4 100644
--- a/src/specfem3D/lts_global_step.F90
+++ b/src/specfem3D/lts_global_step.F90
@@ -507,7 +507,7 @@ subroutine lts_set_finer_initial_condition(displ_p,ilevel)
integer,intent(in) :: ilevel
! local parameters
- integer :: is, ie, iglob_n, iglob
+ integer :: is0, ie, iglob_n, iglob
! checks that current level is not coarsest one
if (ilevel >= num_p_level) return
@@ -522,14 +522,14 @@ subroutine lts_set_finer_initial_condition(displ_p,ilevel)
!
! fast/partial memory copy of displacement array
! gets start index from finest level up to end index of current level
- is = p_level_iglob_start(1)
+ is0 = p_level_iglob_start(1)
ie = p_level_iglob_end(ilevel)
! checks
- if (ie < is) stop 'Error lts_set_finer_initial_condition: invalid start/end index'
+ if (ie < is0) stop 'Error lts_set_finer_initial_condition: invalid start/end index'
! copies over displacement from coarser level
- displ_p(:,is:ie,ilevel) = displ_p(:,is:ie,ilevel+1)
+ displ_p(:,is0:ie,ilevel) = displ_p(:,is0:ie,ilevel+1)
! cancels out coarser node displacements
do iglob_n = 1,num_p_level_coarser_to_update(ilevel)
diff --git a/src/specfem3D/lts_newmark_update.F90 b/src/specfem3D/lts_newmark_update.F90
index 955a8fdf1..a30ece09b 100644
--- a/src/specfem3D/lts_newmark_update.F90
+++ b/src/specfem3D/lts_newmark_update.F90
@@ -118,7 +118,7 @@ subroutine lts_newmark_update(veloc_p,displ_p,accel,m,ilevel,num_p_level,deltat_
use specfem_par_lts, only: iglob_p_refine, p_level_iglob_start, p_level_iglob_end, &
num_p_level_coarser_to_update, p_level_coarser_to_update, lts_current_m, &
cmassxyz,rmassxyz,rmassxyz_mod, &
- accel_collected
+ accel_collected,mask_ibool_collected,use_accel_collected
! debug
use specfem_par, only: it,xstore,ystore,zstore
@@ -322,22 +322,72 @@ subroutine lts_newmark_update(veloc_p,displ_p,accel,m,ilevel,num_p_level,deltat_
endif
! collects acceleration a_n+1 = B u_n+1 from diffferent levels
- if (allocated(accel_collected)) then
+ if (use_accel_collected) then
! collects acceleration B u_n+1 from this current level ilevel
! this is computed in the first local iteration (m==1) where the initial condition sets u_0 = u_n+1
if (ilevel < num_p_level) then
+ ! multiple levels
! only store if accel was computed the very first time this level was called
!(i.e., lts_current_m(ilevel+1) is still at 1)
if (m == 1 .and. lts_current_m(ilevel+1) == 1) then
- if (STACEY_ABSORBING_CONDITIONS) then
- ! uses same mass matrix as on coarsest level
- accel_collected(:,is:ie) = rmassxyz_mod(:,is:ie)/rmassxyz(:,is:ie) * accel(:,is:ie)
- else
- accel_collected(:,is:ie) = accel(:,is:ie)
- endif
+ ! note: an element in the current p-level can contain nodes that are shared with coarser levels.
+ ! these elements are flagged as boundary elements and updated in the current (finer) level.
+ ! to collect their acceleration, the current level has them computed during the boundary contribution.
+ ! however, those acceleration values will be zeroed out again when the next coarser level is started and
+ ! when it sets its nodes accel(:,is:ie) array to zero.
+ ! thus, if we set
+ ! accel_collected(:,is:ie) = accel(:,is:ie)
+ ! the next time we call this from the coarser level, the values on the boundary nodes that have been computed
+ ! during the finer p-level are zeroed out. thus, the boundary nodes would have zeroes stored in
+ ! the accel_collected(:,:) array.
+ !
+ ! we need to make sure to only update nodes from the current level that are
+ ! not boundary nodes. for that purpose, we use the mask_ibool_collected flags.
+ !
+ ! without distinction of boundary node updates, this could be done:
+ !
+ ! ! uses same mass matrix as on coarsest level
+ ! accel_collected(:,is:ie) = rmassxyz_mod(:,is:ie)/rmassxyz(:,is:ie) * accel(:,is:ie)
+ !
+ ! with consideration of boundary nodes:
+ !
+ ! loop over current level nodes
+ do iglob = is,ie
+ if (.not. mask_ibool_collected(iglob)) then
+ ! uses same mass matrix as on coarsest level
+ accel_collected(:,iglob) = rmassxyz_mod(:,iglob)/rmassxyz(:,iglob) * accel(:,iglob)
+ mask_ibool_collected(iglob) = .true.
+ endif
+ enddo
+ ! boundary points belonging to coarser levels (but are updated in current level)
+ do iglob_n = 1,num_p_level_coarser_to_update(ilevel)
+ iglob = p_level_coarser_to_update(iglob_n,ilevel)
+ if (.not. mask_ibool_collected(iglob)) then
+ if (iglob_p_refine(iglob) > 1) then
+ ! node belongs to finer p-levels
+ ! uses same mass matrix as on coarsest level
+ accel_collected(:,iglob) = rmassxyz_mod(:,iglob)/rmassxyz(:,iglob) * accel(:,iglob)
+ else
+ ! node belongs to coarsest level
+ accel_collected(:,iglob) = accel(:,iglob)
+ endif
+ mask_ibool_collected(iglob) = .true.
+ endif
+ enddo
endif
else
- accel_collected(:,is:ie) = accel(:,is:ie)
+ ! coarsest p-level (ilevel == num_p_level)
+ ! only loop over current level nodes
+ ! as there should be no boundary points belonging to coarser levels
+ do iglob = is,ie
+ if (.not. mask_ibool_collected(iglob)) then
+ accel_collected(:,iglob) = accel(:,iglob)
+ mask_ibool_collected(iglob) = .true.
+ endif
+ enddo
+ ! in case of only a single level (num_p_level==1)
+ ! no need to distinguish between boundary nodes and p-level nodes, just update level points
+ ! accel_collected(:,is:ie) = accel(:,is:ie)
endif
endif
@@ -366,8 +416,10 @@ subroutine lts_newmark_update(veloc_p,displ_p,accel,m,ilevel,num_p_level,deltat_
! sets accel to collected accel wavefield for (possible) seismogram outputs or shakemaps
! note: for the next time loop iteration, accel will be zeroed out again,
! thus this change will have no effect on the time marching.
- if (allocated(accel_collected)) then
+ if (use_accel_collected) then
accel(:,:) = accel_collected(:,:)
+ ! re-sets flags on global nodes (for next lts stepping)
+ mask_ibool_collected(:) = .false.
endif
endif
diff --git a/src/specfem3D/lts_setup.F90 b/src/specfem3D/lts_setup.F90
index a723e7636..1290c6797 100644
--- a/src/specfem3D/lts_setup.F90
+++ b/src/specfem3D/lts_setup.F90
@@ -382,11 +382,16 @@ subroutine lts_setup()
if (FIX_UNDERFLOW_PROBLEM) displ_p(:,:,:) = VERYSMALLVAL
! collected acceleration
+ use_accel_collected = .false.
! only needed for seismograms and shakemaps
if (SAVE_SEISMOGRAMS_ACCELERATION .or. CREATE_SHAKEMAP) then
- allocate(accel_collected(NDIM,NGLOB_AB),stat=ier)
+ allocate(accel_collected(NDIM,NGLOB_AB), &
+ mask_ibool_collected(NGLOB_AB),stat=ier)
if (ier /= 0) call exit_MPI(myrank,'Error allocating working LTS fields displ_p,veloc_p')
accel_collected(:,:) = 0.0_CUSTOM_REAL
+ mask_ibool_collected(:) = .false.
+ ! sets flag to use array
+ use_accel_collected = .true.
endif
! user output
diff --git a/src/specfem3D/setup_movie_meshes.f90 b/src/specfem3D/setup_movie_meshes.f90
index 55f9aff26..e12f21aa9 100644
--- a/src/specfem3D/setup_movie_meshes.f90
+++ b/src/specfem3D/setup_movie_meshes.f90
@@ -155,6 +155,8 @@ subroutine setup_movie_meshes()
allocate(faces_surface_offset(NPROC),stat=ier)
if (ier /= 0) call exit_MPI_without_rank('error allocating array 2119')
if (ier /= 0) stop 'error allocating array for movie faces'
+ nfaces_perproc_surface(:) = 0
+ faces_surface_offset(:) = 0
! number of faces per slice
call gather_all_singlei(nfaces_surface,nfaces_perproc_surface,NPROC)
diff --git a/src/specfem3D/specfem3D_par.F90 b/src/specfem3D/specfem3D_par.F90
index c025dc695..136d2b8d2 100644
--- a/src/specfem3D/specfem3D_par.F90
+++ b/src/specfem3D/specfem3D_par.F90
@@ -871,6 +871,8 @@ module specfem_par_lts
! collected acceleration wavefield
real(kind=CUSTOM_REAL), dimension(:,:),allocatable :: accel_collected
+ logical,dimension(:), allocatable :: mask_ibool_collected
+ logical :: use_accel_collected
! for stacey absorbing boundary conditions
real(kind=CUSTOM_REAL), dimension(:,:), allocatable :: cmassxyz, rmassxyz
diff --git a/utils/Visualization/Blender/README.md b/utils/Visualization/Blender/README.md
new file mode 100644
index 000000000..7b206611a
--- /dev/null
+++ b/utils/Visualization/Blender/README.md
@@ -0,0 +1,12 @@
+# Blender visualization
+
+
+Blender is an open source, cross platform suite of tools for 3D graphics creation
+https://www.blender.org/
+
+
+Examples:
+- python_blender/ - python script example to create a blender image
+
+
+
diff --git a/utils/Visualization/Blender/python_blender/README.md b/utils/Visualization/Blender/python_blender/README.md
new file mode 100644
index 000000000..5958e0f6b
--- /dev/null
+++ b/utils/Visualization/Blender/python_blender/README.md
@@ -0,0 +1,74 @@
+# Blender scripting example
+
+Blender for 3D graphics creation
+https://www.blender.org/
+
+
+## Installation
+
+The python script `plot_with_blender.py` uses Blender's python module. To use the script, we also need some routines from vtk which are not provided in the default python version that Blender internally uses. One possibility to use the systems python frameworks is to set an environment variable `BLENDER_SYSTEM_PATH`. For example, on Mac having python installed through [MacPorts](https://www.macports.org), one can set
+```
+export BLENDER_SYSTEM_PYTHON='/opt/local/Library/Frameworks/Python.framework/Versions/3.10/'
+```
+For this to work, the python version must match the internal python version from Blender. In this example, Blender version 3.6 uses a python version 3.10.
+
+Another option is to install vtk into the provided Blender python version. For example, on Mac this can be used:
+```
+/Applications/Blender.app/Contents/Resources/3.6/python/bin/python3.10 -m ensurepip
+/Applications/Blender.app/Contents/Resources/3.6/python/bin/python3.10 -m pip install vtk==9.2.6
+```
+In this latter case to avoid problems with blender loading the matplotlib rendering modules, comment out the lines related to vtkRenderingMatplotlib in the corresponding vtk file. For example, on Mac this would be needed:
+```
+## correct import problem with vtk in blender
+# see: https://devtalk.blender.org/t/python-console-undefined-symbol-py-main-upon-import/21143/6
+cd /Applications/Blender.app/Contents/Resources/3.6/python/lib/python3.10/site-packages
+
+sed -i "s:from vtkmodules.vtkRenderingMatplotlib import *:#from vtkmodules.vtkRenderingMatplotlib import *:" ./vtk.py
+sed -i "s:from .vtkRenderingMatplotlib import *:#from .vtkRenderingMatplotlib import *:" ./vtkmodules/all.py
+```
+This should work. You could test it by running blender and importing vtk in its scripting environment, or running blender with a short test script `my_test.py`:
+```
+#my test script
+import sys
+for path in sys.path: print(path)
+import vtk
+print("vtk: ",vtk.__path__)
+```
+and run blender with:
+```
+blender -noaudio --background --python my_test.py
+```
+
+
+## Simulation setup
+
+First, run a simulation example, e.g., in `EXAMPLES/applications/simple_model/`, and turn on the shakemap flag in the `DATA/Par_file`:
+ ```
+ CREATE_SHAKEMAP = .true.
+ ```
+
+ This will create a file `shakingdata` in the example's `OUTPUT_FILES/` folder.
+ Then, run the `xcreate_movie_shakemap_AVS_DX_GMT` binary from the root directory to create an AVS UCD format `.inp` output file
+ ```
+ ../../../../bin/xcreate_movie_shakemap_AVS_DX_GMT << lon_max or lat_min > lat_max:
+ print("Wrong mesh area format, please provide in a format (lon_min,lat_min,lon_max,lat_max)")
+ sys.exit(1)
+
+ print(" lat min/max: ",lat_min,lat_max)
+ print(" lon min/max: ",lon_min,lon_max)
+ print("")
+
+ # bounding box with lon/lat format
+ suffix = "/items?bbox={},{},{},{}".format(lon_min,lat_min,lon_max,lat_max)
+
+ # get list of tiles
+ url = url_base + suffix
+ f = urllib.request.urlopen(url)
+
+ txt = f.read().decode('utf-8')
+ json_result = json.loads(txt)
+
+ #print("json: ",json_result)
+
+ # gets tile urls
+ tile_list = []
+ for item in json_result['features']:
+ for k,dic in item['assets'].items():
+ href = dic['href']
+ # keep .dxf files
+ # for example: SWISSBUILDINGS3D_2_0_CHLV95LN02_1047-32.dxf.zip
+ #print("href: ",href)
+ if href[-8:] == '.dxf.zip':
+ # makes sure the tile names have a format like: swissbuildings3d_2_2019-04_1067-44_2056_5728.dxf.zip
+ # contains 6 items when split by '_', like: ['swissbuildings3d', '2', '2019-04', '1067-44', '2056', '5728.dxf.zip']
+ name_items = dic['href'].split('/')[-1].split('_')
+ #print("name: ",name_items)
+ if len(name_items) == 6:
+ tile_list.append(dic['href'])
+
+ num_tiles = len(tile_list)
+ print(" data tiles: number of tiles found = ",num_tiles)
+ print("")
+
+ if num_tiles == 0:
+ print("No data tiles found, please check...")
+ sys.exit(1)
+
+ # download files
+ datafiles = []
+ for i,url in enumerate(tile_list):
+ name = url.split('/')[-1]
+ # zip name
+ # example: ./swissbuildings3d_2_2019-04_1067-44_2056_5728.dxf.zip
+ filename_zip = os.path.join("./",name)
+
+ # check if file already downloaded
+ # file name
+ # example: SWISSBUILDINGS3D_2_0_CHLV95LN02_1067-44.dxf
+ pref = 'SWISSBUILDINGS3D_2_0_CHLV95LN02_'
+ suf = '.dxf'
+ # split: swissbuildings3d _ 2 _ 2019-04 _ 1067-44 _ 2056 _ 5728.dxf.zip
+ # to get 1067-44 identifier
+ name_dxf = pref + name.split("_")[3] + suf
+ filename = os.path.join("./",name_dxf)
+
+ print(" tile {} out of {}".format(i+1,len(tile_list)))
+ print(" zip name: ",filename_zip)
+ print(" name : ",filename)
+
+ if not os.path.isfile(filename):
+ # download file
+ print(" downloading...")
+ #print(" url : ",url)
+ x = urllib.request.urlopen(url)
+ with open(filename_zip,'wb') as f:
+ f.write(x.read())
+
+ # de-zip file
+ with ZipFile(filename_zip, 'r') as zipObj:
+ # Extract all the contents of zip file in current directory
+ zipObj.extractall("./")
+
+ # remove zip file
+ os.remove(filename_zip)
+ else:
+ print(" file exists...")
+ print("")
+
+ datafiles.append(filename)
+
+ print("downloaded files: ")
+ for file in datafiles: print(" ",file)
+ print("")
+
+ return datafiles
+
+
+def read_dxf_file(filename,index):
+ # reads in DXF file
+ print(" reading in DXF file: ",filename)
+ print("")
+
+ # Read the DXF file using ezdxf
+ doc = ezdxf.readfile(filename)
+
+ print(f" DXF version: {doc.dxfversion}")
+ print(f" DXF database contains {len(doc.entitydb)} entities")
+ print("")
+
+ # Recommended: audit & repair DXF document before rendering
+ auditor = doc.audit()
+
+ if auditor.has_errors:
+ auditor.print_error_report(auditor.errors)
+ raise exception("Error: The DXF document is damaged and can't be converted!")
+ sys.exit(1)
+ else:
+ print(" auditor is ok")
+ print("")
+
+ # image plot (pretty slow...)
+ plot_image = False
+ if plot_image:
+ print(" plotting as image png...")
+ name = './' + "tmp_{}.png".format(index)
+ matplotlib.qsave(doc.modelspace(), name)
+ print(" written to: ",name)
+ print("")
+
+ # model space info
+ msp = doc.modelspace()
+
+ num_lines = len(msp.query("LINE"))
+ num_polylines = len(msp.query("POLYLINE"))
+ num_lwpolylines = len(msp.query("LWPOLYLINE"))
+ #num_polyfaces = len(msp.query("POLYFACE")) # swiss buildings dxf contains polylines as polyface meshes
+
+ polyfaces = [polyline for polyline in msp.query('POLYLINE') if polyline.is_poly_face_mesh]
+ num_polyfaces = len(polyfaces)
+
+ print(" modelspace:")
+ print(" lines : ",num_lines)
+ print(" lightweight polylines : ",num_lwpolylines)
+ print(" polylines : ",num_polylines)
+ print(" polyfaces (from polylines): ",num_polyfaces)
+ print("")
+
+ # check
+ if num_polyfaces == 0:
+ print("Error: no polyfaces found in file, exiting...")
+ sys.exit(1)
+
+ return polyfaces
+
+
+def get_minmax_coordinates_in_LV95(mesh_area):
+ # pyproj coordinate system info:
+ # SwissTopo LV95 == CRS / EPSG:2056
+ # WGS84 == EPSG:4326
+ #
+ # WGS84 (GPS) lat/lon -> LV95 (SwissTopo)
+ transformer_to_lv95 = Transformer.from_crs("EPSG:4326","EPSG:2056")
+
+ # mesh area min/max
+ lon_min = mesh_area[0]
+ lat_min = mesh_area[1]
+ lon_max = mesh_area[2]
+ lat_max = mesh_area[3]
+
+ x_min,y_min = transformer_to_lv95.transform(lat_min,lon_min)
+ x_max,y_max = transformer_to_lv95.transform(lat_max,lon_max)
+
+ # user output
+ print(" mesh area:")
+ print(" lat min/max = ",lat_min,lat_max)
+ print(" lon min/max = ",lon_min,lon_max)
+ print(" -> LV95 x min/max = ",x_min,x_max)
+ print(" y min/max = ",y_min,y_max)
+ print("")
+
+ return x_min,x_max,y_min,y_max
+
+
+def convert_coordinates_LV95_to_UTM(mesh):
+ # user output
+ print("converting LV95 coordinates to UTM...")
+ print("")
+ # pyproj coordinate system info:
+ # SwissTopo LV95 == CRS / EPSG:2056
+ # WGS84 == EPSG:4326
+ # spherical mercator, google maps, openstreetmap == EPSG:3857
+ #
+ # we first need to determine the correct UTM zone to get the EPSG code.
+ # for this, we take the mesh origin point and convert it to lat/lon (GPS) position.
+ # we can then query the corresponding UTM zone for this position.
+
+ # LV95 (SwissTopo) -> WGS84 (GPS) lat/lon
+ transformer_to_latlon = Transformer.from_crs("EPSG:2056", "EPSG:4326")
+
+ # mesh origin position
+ orig_x = mesh.diagnose().bbox.extmin.x + 0.5 * mesh.diagnose().bbox.size.x
+ orig_y = mesh.diagnose().bbox.extmin.y + 0.5 * mesh.diagnose().bbox.size.y
+
+ orig_lat,orig_lon = transformer_to_latlon.transform(orig_x,orig_y)
+
+ # user output
+ print(" origin: x/y = ",orig_x,orig_y)
+ print(" -> lat/lon = ",orig_lat,orig_lon)
+ print("")
+
+ # gets list of UTM codes
+ utm_crs_list = pyproj.database.query_utm_crs_info(
+ datum_name="WGS 84",
+ area_of_interest=pyproj.aoi.AreaOfInterest(west_lon_degree=orig_lon,
+ south_lat_degree=orig_lat,
+ east_lon_degree=orig_lon,
+ north_lat_degree=orig_lat))
+ utm_code = utm_crs_list[0].code
+ utm_epsg_code = "EPSG:{}".format(utm_code)
+
+ print(" UTM code:", utm_epsg_code)
+
+ # direct transformation from LV95 to UTM zone
+ transformer_to_utm = Transformer.from_crs("EPSG:2056", utm_epsg_code)
+
+ #debug
+ #print(transformer_to_utm)
+
+ # user info
+ utm_x,utm_y = transformer_to_utm.transform(orig_x,orig_y)
+ print(" -> utm x/y = ",utm_x,utm_y)
+ print(" backward check: orig x/y = ",transformer_to_utm.transform(utm_x,utm_y,direction='INVERSE'))
+ print("")
+
+ # converts mesh point coordinates
+ vertices = mesh.vertices
+ num_vertices = len(vertices)
+ print(" mesh:")
+ print(" number of vertices = ",num_vertices)
+ print("")
+
+ for i,vertex in enumerate(mesh.vertices):
+ # gets LV95 location
+ x = vertex.x
+ y = vertex.y
+ z = vertex.z
+
+ # converts to UTM location (x and y)
+ #point = np.array([x,y])
+ x_utm,y_utm = transformer_to_utm.transform(x,y)
+
+ # sets new coordinates
+ vertex_new = ezdxf.math.Vec3(x_utm,y_utm,z)
+ mesh.vertices[i] = vertex_new
+
+ print(" new UTM mesh dimensions:")
+ print(" bounding box : ",mesh.diagnose().bbox)
+ orig_x = mesh.diagnose().bbox.extmin.x + 0.5 * mesh.diagnose().bbox.size.x
+ orig_y = mesh.diagnose().bbox.extmin.y + 0.5 * mesh.diagnose().bbox.size.y
+ print(" origin X/Y : ",orig_x,orig_y)
+ dim_x = mesh.diagnose().bbox.size.x
+ dim_y = mesh.diagnose().bbox.size.y
+ dim_z = mesh.diagnose().bbox.size.z
+ print(" dimensions : ",dim_x,dim_y,dim_z)
+ print("")
+
+ return mesh
+
+
+def convert_autocad_dxf_to_mesh(input_file,mesh_origin=None,mesh_scale=None,mesh_area=None):
+ global use_coordinate_transform_LV95_to_UTM
+
+ # user output
+ print("")
+ print("convert AutoCAD DXF file(s)")
+ print(" input file : ",input_file)
+ if not mesh_origin is None:
+ print(" mesh_origin : ",mesh_origin)
+ if not mesh_scale is None:
+ print(" mesh_scale : ",mesh_scale)
+ if not mesh_area is None:
+ print(" mesh_area : ",mesh_area)
+
+ print("")
+
+ # current directory
+ dir = os.getcwd()
+ print(" current directory: ",dir)
+ print("")
+
+ # input data
+ if len(input_file) == 0:
+ # download buildings
+ datafiles = download_mesh_area_buildings(mesh_area)
+ else:
+ # check if multiple files specified with wildcard (e.g., SWISSBUILDINGS3D*.dxf)
+ datafiles = []
+ if '*' in input_file:
+ print(" files expanded:")
+ files = glob.glob(input_file)
+ files.sort()
+ for filename in files:
+ if ".dae" in filename or ".shp" in filename or ".dbf" in filename or ".gdb" in filename:
+ # skip file name (e.g. SWISSBUILDINGS3D_2_0_CHLV95LN02_1047-32.dae)
+ continue
+ else:
+ # keep file
+ print(" ",filename)
+ datafiles.append(filename)
+ print("")
+ else:
+ datafiles.append(input_file)
+
+ print(" number of data files: ",len(datafiles))
+ print("")
+ if len(datafiles) == 0:
+ print("no data files found, please check input...")
+ sys.exit(1)
+
+ # checks if file(s) exists
+ for filename in datafiles:
+ # checks if file exists
+ if not os.path.isfile(filename):
+ print("Please check if file exists: ",filename)
+ sys.exit(1)
+
+ # to check if mesh buildings are in specified mesh_area
+ if not mesh_area is None:
+ # convert lat/lon from mesh_area to SwissTopo LV95
+ area_x_min,area_x_max,area_y_min,area_y_max = get_minmax_coordinates_in_LV95(mesh_area)
+
+ # overall document layout
+ doc = ezdxf.new()
+ msp = doc.modelspace()
+
+ # create a new mesh object
+ mesh = MeshBuilder()
+
+ # reads in DXF data
+ icount_file = 0
+ for filename in datafiles:
+ # reads in model file
+ icount_file += 1
+ print("")
+ print("***************************************************")
+ print("file {} out of {}".format(icount_file,len(datafiles)))
+ print("")
+ print("data file: ",filename)
+ print("***************************************************")
+ print("")
+
+ polyfaces = read_dxf_file(filename,icount_file)
+
+ # adds polyface meshes
+ print("adding meshes...")
+ icount_added = 0
+ for i,polyface in enumerate(polyfaces):
+ layer = polyface.dxf.layer
+ color = polyface.dxf.color
+
+ # user output
+ # number to output in steps of 10/100/1000 depending on how large the number of polyfaces is
+ nout = min(1000,int(10**np.floor(np.log10(len(polyfaces)))))
+ if (i+1) % nout == 0:
+ print(" polyface: {} out of {} - layer: {}".format(i+1,len(polyfaces),layer)) # " - color: ",color
+
+ # creates object mesh
+ b = MeshVertexMerger.from_polyface(polyface)
+ #b.render(msp1, dxfattribs={
+ # 'layer': polyface.dxf.layer,
+ # 'color': polyface.dxf.color,
+ #})
+ b.render_polyface(msp, dxfattribs={
+ 'layer': layer,
+ 'color': color,
+ })
+
+ # checks number of vertices in mesh
+ if len(b.vertices) == 0: continue
+
+ # checks if mesh in mesh_area
+ if not mesh_area is None:
+ # gets bounding box min/max from this building
+ bbox = b.diagnose().bbox
+ x_min = bbox.extmin.x
+ x_max = bbox.extmax.x
+ y_min = bbox.extmin.y
+ y_max = bbox.extmax.y
+
+ if x_min < area_x_min or x_max > area_x_max or \
+ y_min < area_y_min or y_max > area_y_max:
+ # building outside of mesh area
+ #print("building: ",x_min,x_max,"/",y_min,y_max,"area: ",area_x_min,area_x_max,"/",area_y_min,area_y_max)
+ # we won't add it to the total mesh object, continue with next polyface
+ continue
+
+ # removes faces with null normals (degenerate faces),
+ # otherwise will lead to problems/crashes when saving as .stl and reading the .ply file
+ faces = []
+ for j,face in enumerate(b.faces):
+ #print("face ",j,face)
+ normal = b.get_face_normal(j)
+ if normal == ezdxf.math.NULLVEC:
+ #debug
+ #print("mesh problem w/ normal from get_face_normal() ",j,normal,face)
+ # we will omit this face
+ continue
+ else:
+ faces.append(face)
+
+ # re-sets mesh faces array
+ b.faces = faces
+
+ # checks number of new faces
+ if len(b.faces) == 0: continue
+
+ # checks again face normals to avoid problems with degenerate faces when saving to file
+ do_add_mesh = True
+ for j,normal in enumerate(b.diagnose().face_normals):
+ if normal == ezdxf.math.NULLVEC:
+ #debug
+ #print("mesh problem w/ face normal ",j,normal)
+ # we will skip this mesh
+ do_add_mesh = False
+ break
+
+ # adds object to mesh
+ if do_add_mesh:
+ mesh.add_mesh(mesh=b)
+ # counter
+ icount_added += 1
+
+ #debug
+ #if i == 2: break
+ print(" done. added {} out of {}".format(icount_added,len(polyfaces)))
+
+ # check
+ if len(mesh.vertices) == 0:
+ print("")
+ print("Mesh is empty, there's nothing to save. Please check your inputs...")
+ print("")
+ sys.exit(1)
+
+ # user output
+ print("")
+ print("mesh dimensions:")
+ print(" bounding box : ",mesh.diagnose().bbox)
+ orig_x = mesh.diagnose().bbox.extmin.x + 0.5 * mesh.diagnose().bbox.size.x
+ orig_y = mesh.diagnose().bbox.extmin.y + 0.5 * mesh.diagnose().bbox.size.y
+ print(" origin X/Y : ",orig_x,orig_y)
+ dim_x = mesh.diagnose().bbox.size.x
+ dim_y = mesh.diagnose().bbox.size.y
+ dim_z = mesh.diagnose().bbox.size.z
+ print(" dimensions : ",dim_x,dim_y,dim_z)
+ print("")
+
+ # transform mesh coordinate from SwissTopo to UTM
+ if use_coordinate_transform_LV95_to_UTM:
+ mesh = convert_coordinates_LV95_to_UTM(mesh)
+
+ ## write out UTM mesh as binary .ply file
+ name = 'output_dxf_utm.ply'
+ filename = './' + name
+ with open(filename, "wb") as fp:
+ fp.write(meshex.ply_dumpb(mesh))
+ print(" UTM mesh written: ",filename)
+ print("")
+
+ # moves mesh coordinates by origin point to position around (0,0,0) for blender
+ if not mesh_origin is None:
+ print("mesh: moving by origin = ",mesh_origin)
+ print("")
+
+ translation_vector = - ezdxf.math.Vec3(mesh_origin)
+
+ mesh2 = MeshTransformer.from_builder(mesh)
+ mesh2.translate(translation_vector)
+
+ mesh = mesh2
+
+ print(" bounding box after : ",mesh.diagnose().bbox)
+ orig_x = mesh.diagnose().bbox.extmin.x + 0.5 * mesh.diagnose().bbox.size.x
+ orig_y = mesh.diagnose().bbox.extmin.y + 0.5 * mesh.diagnose().bbox.size.y
+ print(" origin X/Y after : ",orig_x,orig_y)
+ dim_x = mesh.diagnose().bbox.size.x
+ dim_y = mesh.diagnose().bbox.size.y
+ dim_z = mesh.diagnose().bbox.size.z
+ print(" dimensions after : ",dim_x,dim_y,dim_z)
+ print("")
+
+ # scales mesh coordinates by uniform scaling factor to scale between +/- 1 for blender
+ if not mesh_scale is None:
+ print("mesh: scaling by factor = ",mesh_scale)
+ print("")
+
+ scale_factor = mesh_scale
+
+ mesh2 = MeshTransformer.from_builder(mesh)
+ mesh2.scale_uniform(scale_factor)
+
+ mesh = mesh2
+
+ print(" bounding box after : ",mesh.diagnose().bbox)
+ dim_x = mesh.diagnose().bbox.size.x
+ dim_y = mesh.diagnose().bbox.size.y
+ dim_z = mesh.diagnose().bbox.size.z
+ print(" dimensions after : ",dim_x,dim_y,dim_z)
+ print("")
+
+ ## write out ASCII .stl file
+ #name = 'output_dxf.stl'
+ #filename = './' + name
+ #with open(filename, "wt") as fp:
+ # fp.write(meshex.stl_dumps(mesh))
+
+ ## write out binary .stl file
+ #name = 'output_dxf.stl'
+ #filename = './' + name
+ #with open(filename, "wb") as fp:
+ # fp.write(meshex.stl_dumpb(mesh))
+
+ ## write out ASCII .obj file
+ #name = 'output_dxf.obj'
+ #filename = './' + name
+ #with open(filename, "wt") as fp:
+ # fp.write(meshex.obj_dumps(mesh))
+
+ ## write out binary .ply file
+ name = 'output_dxf.ply'
+ filename = './' + name
+ with open(filename, "wb") as fp:
+ fp.write(meshex.ply_dumpb(mesh))
+ print("written: ",filename)
+ print("")
+
+
+
+def usage():
+ print("usage: ./convert_dxf_to_mesh.py [--dxf_file=file] [--mesh_origin=(x0,y0,z0)] [--mesh_scale=factor] [--mesh_area=(lon_min,lat_min,lon_max,lat_max)")
+ print(" with")
+ print(" --dxf_file - input mesh file (.dxf, .dwg)")
+ print(" --mesh_origin - translate mesh by origin position")
+ print(" --mesh_scale - scale mesh by a factor")
+ print(" --mesh_area - downloads/limits buildings for area specified by (lon_min,lat_min,lon_max,lat_max)")
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ # init
+ input_file = ""
+ mesh_origin = None
+ mesh_scale = None
+ mesh_area = None
+
+ # reads arguments
+ #print("\nnumber of arguments: " + str(len(sys.argv)))
+ if len(sys.argv) <= 1: usage()
+
+ i = 0
+ for arg in sys.argv:
+ i += 1
+ #print("argument "+str(i)+": " + arg)
+ # get arguments
+ if "--help" in arg:
+ usage()
+ elif "--dxf_file=" in arg:
+ input_file = arg.split('=')[1]
+ elif "--dwg_file=" in arg:
+ input_file = arg.split('=')[1]
+ elif "--mesh_origin=" in arg:
+ str_array = arg.split('=')[1]
+ mesh_origin = np.array([float(val) for val in str_array.strip('()[]').split(',')])
+ elif "--mesh_scale=" in arg:
+ mesh_scale = float(arg.split('=')[1])
+ elif "--mesh_area=" in arg:
+ str_array = arg.split('=')[1]
+ mesh_area = np.array([float(val) for val in str_array.strip('()[]').split(',')])
+ elif i >= 6:
+ print("argument not recognized: ",arg)
+
+ # logging
+ cmd = " ".join(sys.argv)
+ filename = './convert_dxf_to_mesh.log'
+ with open(filename, 'a') as f:
+ print("command call --- " + str(datetime.datetime.now()),file=f)
+ print(cmd,file=f)
+ print("command logged to file: " + filename)
+
+ # main routine
+ convert_autocad_dxf_to_mesh(input_file,mesh_origin,mesh_scale,mesh_area)
diff --git a/utils/Visualization/Blender/python_blender/plot_with_blender.py b/utils/Visualization/Blender/python_blender/plot_with_blender.py
new file mode 100755
index 000000000..53583391d
--- /dev/null
+++ b/utils/Visualization/Blender/python_blender/plot_with_blender.py
@@ -0,0 +1,1690 @@
+#!/usr/bin/env blender --background --python-use-system-env --python plot_with_blender.py --
+#
+# or for example on Mac:
+# /usr/bin/env /Applications/Blender.app/Contents/MacOS/Blender --background --python-use-system-env --python plot_blender_sphere.py --
+#
+#
+# run with: > ./plot_with_blender.py --help
+#
+###############################################################################################
+
+import sys
+import os
+import time
+import datetime
+
+# blender
+try:
+ import bpy
+except:
+ print("Failed importing bpy. Please make sure to have Blender python working properly.")
+ sys.exit(1)
+
+print("")
+print("Blender: version ",bpy.app.version_string)
+print("")
+
+# blender meshing
+import bmesh
+
+# adds path
+#sys.path.append('/Applications/Blender.app/Contents/Resources/3.6/python/lib/python3.10/site-packages')
+
+try:
+ import vtk
+except:
+ print("Failed importing vtk. Please make sure to have Blender python with vtk working properly.")
+ print("system path:")
+ for path in sys.path:
+ print(" ",path)
+ sys.exit(1)
+
+print("VTK: version ",vtk.vtkVersion().GetVTKVersion())
+print("")
+
+try:
+ import numpy as np
+except:
+ print("Failed importing numpy. Please make sure to have Blender python with numpy working properly.")
+ sys.exit(1)
+
+from mathutils import Vector,Matrix
+from math import radians,atan2,sin,cos
+
+###############################################################################################
+## USER parameters
+
+## renderer
+# render image size
+blender_img_resolution_X = 2400
+blender_img_resolution_Y = 1600
+
+# cycles rendering (for better glass effect of buildings)
+use_cycles_renderer = False
+
+###############################################################################################
+
+# Constants
+PI = 3.141592653589793
+DEGREE_TO_RAD = PI / 180.0
+
+# Global variables
+mesh_scale_factor = 1.0
+mesh_origin = [0.0,0.0,0.0]
+
+
+# class to avoid long stdout output by renderer
+# see: https://stackoverflow.com/questions/24277488/in-python-how-to-capture-the-stdout-from-a-c-shared-library-to-a-variable/29834357
+class SuppressStream(object):
+ def __init__(self, stream=sys.stderr,suppress=False):
+ # turns on/off suppressing stdout of renderer process
+ self.SUPPRESS_STDOUT = suppress
+
+ if self.SUPPRESS_STDOUT:
+ self.orig_stream_fileno = stream.fileno()
+
+ def __enter__(self):
+ if self.SUPPRESS_STDOUT:
+ self.orig_stream_dup = os.dup(self.orig_stream_fileno)
+ self.devnull = open(os.devnull, 'w')
+ os.dup2(self.devnull.fileno(), self.orig_stream_fileno)
+
+ def __exit__(self, type, value, traceback):
+ if self.SUPPRESS_STDOUT:
+ os.close(self.orig_stream_fileno)
+ os.dup2(self.orig_stream_dup, self.orig_stream_fileno)
+ os.close(self.orig_stream_dup)
+ self.devnull.close()
+
+
+def convert_vtk_to_obj(vtk_file,colormap=0,color_max=None):
+ global mesh_scale_factor,mesh_origin
+
+ # Path to your .vtu file
+ print("converting vtk file: ",vtk_file)
+
+ # check file
+ if len(vtk_file) == 0:
+ print("Error: no vtk file specified...")
+ sys.exit(1)
+
+ if not os.path.exists(vtk_file):
+ print("Error: vtk file specified not found...")
+ sys.exit(1)
+
+ # gets file extension
+ extension = os.path.splitext(vtk_file)[1]
+ # reads the vtk file
+ if extension == '.vtk':
+ # .vtk file
+ reader = vtk.vtkDataSetReader()
+ reader.SetFileName(vtk_file)
+ reader.Update()
+ elif extension == '.vtu':
+ # .vtu
+ reader = vtk.vtkXMLUnstructuredGridReader()
+ reader.SetFileName(vtk_file)
+ reader.Update()
+ elif extension == '.inp':
+ # AVS .inp
+ #reader = vtk.vtkSimplePointsReader()
+ reader = vtk.vtkAVSucdReader()
+ reader.SetFileName(vtk_file)
+ reader.Update()
+ else:
+ print("unknown vtk file extension ",extension," - exiting...")
+ sys.exit(1)
+
+ #debug
+ #print(reader)
+
+ ## scale coordinates to be in range [-1,1] for visualization
+ # gets the points (vertices) from the dataset
+ points = reader.GetOutput().GetPoints()
+
+ # number of points
+ num_points = points.GetNumberOfPoints()
+
+ #print(points)
+ #print(points.GetBounds())
+ xmin,xmax,ymin,ymax,zmin,zmax = points.GetBounds()
+
+ # defines the scaling factors to fit within +/- 1
+ #min_coords = np.array(points.GetPoint(0))
+ #max_coords = np.array(points.GetPoint(0))
+ #for i in range(1, num_points):
+ # point = np.array(points.GetPoint(i))
+ # min_coords = np.minimum(min_coords, point)
+ # max_coords = np.maximum(max_coords, point)
+
+ min_coords = np.array([xmin,ymin,zmin])
+ max_coords = np.array([xmax,ymax,zmax])
+ dimensions = max_coords - min_coords
+
+ # info
+ print(" minimum coordinates:", min_coords)
+ print(" maximum coordinates:", max_coords)
+ print(" dimensions :", dimensions)
+ print("")
+
+ # gets data values on nodes
+ data_array = None
+ if reader.GetOutput().GetPointData().GetNumberOfArrays() > 0:
+ data_array = reader.GetOutput().GetPointData().GetArray(0) # Example: Accessing the first data array
+ #debug
+ #print(data_array)
+ #info
+ print(" data: ")
+ print(" range = ",data_array.GetRange())
+ print("")
+
+
+ # determines origin of mesh to move it back to (0,0,0) and scale it between [-1,1] to better locating it in blender
+ mesh_origin = min_coords + 0.5 * (max_coords - min_coords)
+
+ # z-coordinate: leaves sea level at 0 for mesh origin
+ mesh_origin[2] = 0.0
+
+ # takes maximum size in x/y direction
+ dim_max = np.maximum(dimensions[0],dimensions[1])
+
+ if np.abs(dim_max) > 0.0:
+ mesh_scale_factor = 2.0 / dim_max
+
+ print(" mesh scaling:")
+ print(" origin :",mesh_origin)
+ print(" scale factor :",mesh_scale_factor)
+ print("")
+
+ # creates an array to store scaled points
+ scaled_points = vtk.vtkPoints()
+
+ # Loop through each point, scale its coordinates, and add it to the new points array
+ for i in range(num_points):
+ # translation by origin
+ point = np.array(points.GetPoint(i)) - mesh_origin
+ # uniform scaling
+ scaled_point = point * mesh_scale_factor
+ # stores updated points
+ scaled_points.InsertNextPoint(scaled_point)
+
+ # creates a new polydata with the scaled points
+ scaled_polydata = vtk.vtkPolyData()
+ scaled_polydata.SetPoints(scaled_points)
+
+ # vertex data
+ if data_array:
+ num_points = data_array.GetNumberOfTuples()
+ num_components = data_array.GetNumberOfComponents()
+ data_min,data_max = data_array.GetRange()
+
+ print(" data array: ")
+ print(" number of points = ",num_points)
+ print(" number of components = ",num_components)
+ print(" range min/max = ",data_min,"/",data_max)
+ print("")
+
+ # checks if fixing maximum value
+ if color_max:
+ # Convert VTK data array to NumPy array
+ array = np.zeros((num_points, num_components))
+ for i in range(num_points):
+ for j in range(num_components):
+ value = data_array.GetComponent(i, j)
+ array[i,j] = value
+
+ # limit size
+ if 1 == 0:
+ ## determines maximum value as a multiple of 10
+ # reduce first by 10%
+ total_max = abs(array).max()
+ total_max = 0.9 * total_max # sets limit at 90% of the maximum
+
+ # get maximum value in power of 10
+ if total_max != 0.0:
+ total_max = 1.0 * 10**(int(np.log10(total_max))) # example: 2.73e-11 limits to 1.e-11
+ #total_max = 1.0 * 10**(int(np.log10(total_max))-1) # example: 2.73e-11 limits to 1.e-11
+ #total_max = 1.0 * 10**(int(np.log10(total_max))-2) # example: 2.73e-11 limits to 1.e-12
+ else:
+ total_max = 0.0
+ print(" data: color data min/max = ",array.min(),array.max())
+ print(" data: zero color data - nothing to show")
+ # nothing left to do
+ #sys.exit(1)
+
+ # checks if fixing maximum value
+ if color_max:
+ total_max = color_max
+
+ print(" limiting color data range:")
+ print(" data min/max = ",array.min(),array.max())
+ if color_max:
+ print(" data total max = ",total_max," (fixed)")
+ else:
+ print(" data total max = ",total_max)
+ print("")
+
+ # limits range [-total_max,total_max]
+ array = np.where(array < -total_max, -total_max, array)
+ array = np.where(array > total_max, total_max, array)
+
+ # in case color-max is larger than actual range, this sets an arbitrary point to the maximum value
+ # to get the correct range value when plotting the data
+ if abs(array).max() < total_max:
+ array[0,0] = total_max
+
+ # for shakemaps, start range with a minimum value of 0
+ if 'shaking' in vtk_file or 'shakemap' in vtk_file:
+ array[1,0] = 0.0
+
+ # sets updated range back to vtk array
+ print(" new data: color data min/max = ",array.min(),array.max())
+ for i in range(num_points):
+ for j in range(num_components):
+ value = array[i,j]
+ data_array.SetComponent(i, j, value)
+
+ # Inform VTK that the array has been modified
+ data_array.Modified()
+ data_min,data_max = data_array.GetRange()
+ print(" new data: range min/max = ",data_min,"/",data_max)
+ print("")
+
+ # sets vertex data
+ scaled_polydata.GetPointData().SetScalars(data_array)
+
+ # updates the points in the original dataset with the scaled points
+ reader.GetOutput().SetPoints(scaled_points)
+ reader.Update()
+
+ # output scaled bounds
+ points = reader.GetOutput().GetPoints()
+ xmin,xmax,ymin,ymax,zmin,zmax = points.GetBounds()
+ print(" mesh dimensions after scaling: x min/max = ",xmin,xmax)
+ print(" y min/max = ",ymin,ymax)
+ print(" z min/max = ",zmin,zmax)
+ print("")
+
+ # Get the unstructured grid data
+ unstructured_grid = reader.GetOutput()
+
+ # Extract colors if available
+ colors_array = None
+ if unstructured_grid.GetPointData().GetNumberOfArrays() > 0:
+ print(" grid: point data arrays = ",unstructured_grid.GetPointData().GetNumberOfArrays())
+ print("")
+ colors_array = unstructured_grid.GetPointData().GetArray(0) # Assuming colors are in the first array
+ print(" colors: name = ",colors_array.GetName())
+ print(" range = ",colors_array.GetRange())
+ print("")
+
+ #debug
+ #print("colors_array: ",colors_array)
+
+ # convert to .ply data file
+ # Path to your generated .ply file
+ obj_file = 'output.ply'
+
+ # convert the data to polydata
+ geometry_filter = vtk.vtkGeometryFilter()
+ geometry_filter.SetInputData(unstructured_grid)
+ geometry_filter.Update()
+
+ # creates a new polydata object
+ polydata = geometry_filter.GetOutput()
+
+ print(" polydata: initial number of points",polydata.GetNumberOfPoints())
+ print(" polydata: initial number of verts",polydata.GetNumberOfVerts())
+ print("")
+
+ # checks if we have points
+ # for proc****_free_surface.vtk files, only points are stored in the .vtk file
+ # and the geometry_filter won't fill the polydata object.
+ # here we check that we have points & verts filled, otherwise we assume to have points only in the .vtk file
+ # and we will try to get a connectivity by Delauney triangulation
+ if polydata.GetNumberOfPoints() == 0:
+ print(" unstructured grid: number of points",unstructured_grid.GetNumberOfPoints())
+ if unstructured_grid.GetNumberOfPoints() > 0:
+ points = unstructured_grid.GetPoints()
+ polydata.SetPoints(points)
+
+ if polydata.GetNumberOfVerts() == 0:
+ print(" unstructured grid: number of cells",unstructured_grid.GetNumberOfCells())
+ if unstructured_grid.GetNumberOfCells() > 0:
+ verts = unstructured_grid.GetCells()
+ polydata.SetVerts(verts)
+ else:
+ # Perform Delaunay triangulation to generate connectivity
+ print(" getting Delaunay 2D connectivity...")
+ delaunay = vtk.vtkDelaunay2D()
+ delaunay.SetInputData(polydata)
+ delaunay.Update()
+
+ # Get the output triangles
+ triangles = delaunay.GetOutput()
+ #print(triangles)
+ print(" triangles: number of verts",triangles.GetNumberOfVerts())
+ print(" triangles: number of cells",triangles.GetNumberOfCells())
+ polydata = triangles
+
+ print(" polydata: number of points",polydata.GetNumberOfPoints())
+ print(" polydata: number of verts",polydata.GetNumberOfVerts())
+ print(" polydata: number of cells",polydata.GetNumberOfCells())
+ print(" polydata: number of strips",polydata.GetNumberOfStrips())
+ print(" polydata: number of data arrays",polydata.GetPointData().GetNumberOfArrays())
+ print("")
+
+ # we need to set a default lookup table for the polydata set,
+ # otherwise the color float values on the points won't get stored
+ #
+ # set lookup table for depth values to colors
+ if colors_array:
+ lut = vtk.vtkLookupTable()
+ lut.SetTableRange(data_min, data_max)
+ lut.SetNumberOfTableValues(256) # Set the number of table values
+ #lut.SetRampToLinear()
+ #lut.SetRampToSQRT()
+
+ # VTK by default maps the value range to colors from red to blue
+ # determine custom type
+ if colormap == 0:
+ print(" color map: default VTK")
+ # nothing to special to add, let's just vtk internally do it
+ # colormap is going from red to white to blue
+ elif colormap == 1:
+ # topo
+ print(" color map: topo")
+ colors_rgb = [
+ [0.3, 0.3, 0.3], # gray
+ [0.1, 0.1, 0.4], # blue
+ [0.2, 0.5, 0.2],
+ [0.25, 0.625, 0.5],
+ [0.0, 0.5, 0.25],
+ [0.5, 0.365, 0.0],
+ [0.75, 0.625, 0.25],
+ [1.0, 0.75, 0.625],
+ [1.0, 0.75, 0.5],
+ [1, 1, 1], # white
+ ]
+
+ elif colormap == 2:
+ # Scientific Colour Map Categorical Palette
+ # https://www.fabiocrameri.ch/colourmaps/
+ # Crameri, F. (2018). Scientific colour maps. Zenodo. http://doi.org/10.5281/zenodo.1243862
+ print(" color map: lisbon")
+ # lisbon 10 Swatches
+ colors_rgb255 = [
+ [230, 229, 255], # lisbon-1 #E6E5FF
+ # or start with a less white color
+ #[200, 208, 237], # lisbon-12 #C8D0ED
+ [155, 175, 211], # lisbon-29 #9BAFD3
+ [ 81, 119, 164], # lisbon-58 #5177A4
+ [ 30, 67, 104], # lisbon-86 #1E4368
+ [ 17, 30, 44], # lisbon-114 #111E2C
+ [ 39, 37, 26], # lisbon-143 #27251A
+ [ 87, 81, 52], # lisbon-171 #575134
+ [141, 133, 86], # lisbon-199 #8D8556
+ [201, 195, 144], # lisbon-228 #C9C390
+ [255, 255, 217], # lisbon-256 #FFFFD9
+ ]
+ # converts the colors from 0-255 range to 0-1 range
+ colors_rgb = [[comp / 255.0 for comp in color] for color in colors_rgb255]
+
+ elif colormap == 3:
+ # Scientific Colour Map Categorical Palette
+ # https://www.fabiocrameri.ch/colourmaps/
+ # Crameri, F. (2018). Scientific colour maps. Zenodo. http://doi.org/10.5281/zenodo.1243862
+ print(" color map: lajolla")
+ # lajolla 10 Swatches
+ colors_rgb255 = [
+ [ 25, 25, 0], # lajolla-1 #191900
+ [ 51, 34, 15], # lajolla-29 #33220F
+ [ 91, 48, 35], # lajolla-58 #5B3023
+ [143, 64, 61], # lajolla-86 #8F403D
+ [199, 80, 75], # lajolla-114 #C7504B
+ [224, 114, 79], # lajolla-143 #E0724F
+ [231, 148, 82], # lajolla-171 #E79452
+ [238, 181, 85], # lajolla-199 #EEB555
+ [248, 223, 124], # lajolla-228 #F8DF7C
+ [255, 254, 203], # lajolla-256 #FFFECB
+ ]
+ # converts the colors from 0-255 range to 0-1 range
+ colors_rgb = [[comp / 255.0 for comp in color] for color in colors_rgb255]
+
+ elif colormap == 4:
+ # Scientific Colour Map Categorical Palette
+ # https://www.fabiocrameri.ch/colourmaps/
+ # Crameri, F. (2018). Scientific colour maps. Zenodo. http://doi.org/10.5281/zenodo.1243862
+ print(" color map: lipari")
+ # lipari 10 Swatches
+ colors_rgb255 = [
+ [ 3, 19, 38], # lipari-1 #031326
+ [ 19, 56, 90], # lipari-29 #13385A
+ [ 71, 88, 122], # lipari-58 #47587A
+ [107, 95, 118], # lipari-86 #6B5F76
+ [142, 97, 108], # lipari-114 #8E616C
+ [188, 100, 97], # lipari-143 #BC6461
+ [229, 123, 98], # lipari-171 #E57B62
+ [231, 162, 121], # lipari-199 #E7A279
+ [233, 201, 159], # lipari-228 #E9C99F
+ [253, 245, 218], # lipari-256 #FDF5DA
+ ]
+ # converts the colors from 0-255 range to 0-1 range
+ colors_rgb = [[comp / 255.0 for comp in color] for color in colors_rgb255]
+
+ elif colormap == 5:
+ # Scientific Colour Map Categorical Palette
+ # https://www.fabiocrameri.ch/colourmaps/
+ # Crameri, F. (2018). Scientific colour maps. Zenodo. http://doi.org/10.5281/zenodo.1243862
+ print(" color map: davos")
+ # davos 10 Swatches
+ colors_rgb255 = [
+ [ 0, 5, 74], # davos-1 #00054A
+ [ 17, 44, 113], # davos-29 #112C71
+ [ 41, 82, 145], # davos-58 #295291
+ [ 67, 112, 157], # davos-86 #43709D
+ [ 94, 133, 152], # davos-114 #5E8598
+ [121, 150, 141], # davos-143 #79968D
+ [153, 173, 136], # davos-171 #99AD88
+ [201, 210, 158], # davos-199 #C9D29E
+ [243, 243, 210], # davos-228 #F3F3D2
+ [254, 254, 254], # davos-256 #FEFEFE
+ ]
+ # converts the colors from 0-255 range to 0-1 range
+ colors_rgb = [[comp / 255.0 for comp in color] for color in colors_rgb255]
+
+ elif colormap == 6:
+ # Scientific Colour Map Categorical Palette
+ # https://www.fabiocrameri.ch/colourmaps/
+ # Crameri, F. (2018). Scientific colour maps. Zenodo. http://doi.org/10.5281/zenodo.1243862
+ print(" color map: turku")
+ # turku 10 Swatches
+ colors_rgb255 = [
+ [ 0, 0, 0], # turku-1 #000000
+ [ 36, 36, 32], # turku-29 #242420
+ [ 66, 66, 53], # turku-58 #424235
+ [ 95, 95, 68], # turku-86 #5F5F44
+ [126, 124, 82], # turku-114 #7E7C52
+ [169, 153, 101], # turku-143 #A99965
+ [207, 166, 124], # turku-171 #CFA67C
+ [234, 173, 152], # turku-199 #EAAD98
+ [252, 199, 195], # turku-228 #FCC7C3
+ [255, 230, 230], # turku-256 #FFE6E6
+ ]
+ # converts the colors from 0-255 range to 0-1 range
+ colors_rgb = [[comp / 255.0 for comp in color] for color in colors_rgb255]
+
+ elif colormap == 7:
+ # Scientific Colour Map Categorical Palette
+ # https://www.fabiocrameri.ch/colourmaps/
+ # Crameri, F. (2018). Scientific colour maps. Zenodo. http://doi.org/10.5281/zenodo.1243862
+ print(" color map: berlin")
+ # berlin 10 Swatches
+ colors_rgb255 = [
+ [158, 176, 255], # berlin-1 #9EB0FF
+ [ 91, 164, 219], # berlin-29 #5BA4DB
+ [ 45, 117, 151], # berlin-58 #2D7597
+ [ 26, 66, 86], # berlin-86 #1A4256
+ [ 17, 25, 30], # berlin-114 #11191E
+ [ 40, 13, 1], # berlin-143 #280D01
+ [ 80, 24, 3], # berlin-171 #501803
+ [138, 63, 42], # berlin-199 #8A3F2A
+ [196, 117, 106], # berlin-228 #C4756A
+ [255, 173, 173], # berlin-256 #FFADAD
+ ]
+ # converts the colors from 0-255 range to 0-1 range
+ colors_rgb = [[comp / 255.0 for comp in color] for color in colors_rgb255]
+
+ elif colormap == 8:
+ # Scientific Colour Map Categorical Palette
+ # https://www.fabiocrameri.ch/colourmaps/
+ # Crameri, F. (2018). Scientific colour maps. Zenodo. http://doi.org/10.5281/zenodo.1243862
+ print(" color map: grayC")
+ colors_rgb255 = [
+ [0, 0, 0], # grayC-1 #000000
+ [35, 35, 35], # grayC-29 #232323
+ [61, 61, 61], # grayC-58 #3D3D3D
+ [86, 86, 86], # grayC-86 #565656
+ [108, 108, 108],# grayC-114 #6C6C6C
+ [130, 130, 130],# grayC-143 #828282
+ [154, 154, 154],# grayC-171 #9A9A9A
+ [182, 182, 182],# grayC-199 #B6B6B6
+ [216, 216, 216],# grayC-228 #D8D8D8
+ [255, 255, 255],# grayC-256 #FFFFFF
+ ]
+ # converts the colors from 0-255 range to 0-1 range
+ colors_rgb = [[comp / 255.0 for comp in color] for color in colors_rgb255]
+
+ elif colormap == 9:
+ # custom snow
+ print(" color map: snow")
+ colors_rgb255 = [
+ [204, 204, 204], # gray
+ [153, 178, 204],
+ [ 71, 88, 122], # lipari-58 #47587A
+ [107, 95, 118], # lipari-86 #6B5F76
+ [142, 97, 108], # lipari-114 #8E616C
+ [188, 100, 97], # lipari-143 #BC6461
+ [229, 123, 98], # lipari-171 #E57B62
+ [231, 162, 121], # lipari-199 #E7A279
+ [255, 229, 204],
+ [255, 255, 255], # white
+ ]
+ # converts the colors from 0-255 range to 0-1 range
+ colors_rgb = [[comp / 255.0 for comp in color] for color in colors_rgb255]
+
+ elif colormap == 10:
+ # custom shakeGreen
+ print(" color map: shakeGreen")
+ colors_rgb = [
+ [0.8, 0.8, 0.8], # gray
+ [0.5, 0.5, 0.4],
+ [0.5, 0.4, 0.2],
+ [0.6, 0.6, 0.0], # green
+ [0.72, 0.25, 0.0 ], # orange
+ [0.81, 0.5, 0.0 ],
+ [0.9, 0.74, 0.0 ], # yellow
+ [1.0, 0.99, 0.0 ],
+ [1.0, 0.99, 0.25],
+ [1.0, 1.0, 1.0 ], # white
+ ]
+
+ elif colormap == 11:
+ # custom shakeRed
+ print(" color map: shakeRed")
+ colors_rgb = [
+ [0.85, 0.85, 0.85], # gray
+ [0.7, 0.7, 0.7 ],
+ [0.5, 0.5, 0.5 ],
+ [0.63, 0.0, 0.0 ], # red
+ [0.72, 0.25, 0.0 ], # orange
+ [0.81, 0.5, 0.0 ],
+ [0.9, 0.74, 0.0 ], # yellow
+ [1.0, 0.99, 0.0 ],
+ [1.0, 0.99, 0.25],
+ [1.0, 1.0, 1.0 ], # white
+ ]
+
+ elif colormap == 12:
+ # custom shakeUSGS
+ # taken from a shakemap plot of the USGS
+ # https://earthquake.usgs.gov/earthquakes/eventpage/us6000lqf9/shakemap/intensity
+ print(" color map: shakeUSGS")
+ colors_rgb = [
+ [1.0, 1.0, 1.0 ], # I: white
+ [0.8, 0.8, 0.8 ],
+ [0.77, 0.81, 1.0 ], # II-III : light purple
+ [0.5, 1.0, 0.98], # IV: turquoise
+ [0.5, 1.0, 0.54], # V : green
+ [1.0, 0.98, 0.0 ], # VI : yellow
+ [1.0, 0.77, 0.0 ], # VII : orange
+ [0.99, 0.52, 0.0 ], # VIII: darkorange
+ [0.98, 0.0, 0.0 ], # IX : red
+ [0.78, 0.0, 0.0 ], # X+ : darkred
+ ]
+
+ elif colormap == 13:
+ # custom shakeUSGSgray
+ # starts with darker gray than the default USGS
+ print(" color map: shakeUSGSgray")
+ colors_rgb = [
+ [0.8, 0.8, 0.8 ], # gray
+ [0.5, 0.5, 0.5 ],
+ [0.77, 0.81, 1.0 ], # II-III : light purple
+ [0.5, 1.0, 0.98], # IV: turquoise
+ [0.5, 1.0, 0.54], # V : green
+ [1.0, 0.98, 0.0 ], # VI : yellow
+ [1.0, 0.77, 0.0 ], # VII : orange
+ [0.99, 0.52, 0.0 ], # VIII: darkorange
+ [0.98, 0.0, 0.0 ], # IX : red
+ [0.5, 0.0, 0.0 ], # X+ : darkred
+ ]
+
+ else:
+ print("Warning: colormap with type {} is not supported, exiting...".format(colormap))
+ sys.exit(1)
+
+ # sets lookup table entries
+ if colormap != 0:
+ # Create a vtkColorTransferFunction
+ color_transfer_func = vtk.vtkColorTransferFunction()
+ # add specific scalar values in the color transfer function
+ for i, color in enumerate(colors_rgb):
+ val = i / (len(colors_rgb) - 1.0)
+ color_transfer_func.AddRGBPoint(val, color[0], color[1], color[2])
+ # Calculate the color values for the lookup table by interpolating from the color transfer function
+ for i in range(256):
+ scalar = i / 255.0 # Normalized scalar value from 0 to 1
+ color = color_transfer_func.GetColor(scalar)
+ lut.SetTableValue(i, color[0], color[1], color[2], 1.0)
+ print("")
+
+ # build lookup table
+ lut.Build()
+ colors_array.SetLookupTable(lut)
+
+ # Write the data to PLY format
+ writer = vtk.vtkPLYWriter()
+ writer.SetInputData(polydata)
+
+ # Include vertex colors if available
+ if colors_array:
+ writer.SetArrayName(colors_array.GetName())
+ writer.SetLookupTable(lut)
+ # info
+ print(" writer: color mode = ",writer.GetColorMode())
+ print(" writer: color array name = ",writer.GetArrayName())
+ print(" writer: color component = ",writer.GetComponent())
+ print("")
+
+ os.system('rm -f output.ply')
+
+ writer.SetFileName(obj_file)
+ writer.Write()
+
+ if not os.path.exists(obj_file):
+ print("Error writing file ",obj_file)
+ sys.exit(1)
+
+ print("")
+ print(" converted to: ",obj_file)
+ print("")
+
+ # work-around for .obj files
+ # however, .obj file can by default only store the mesh, not the color data on the vertices
+ # we thus prefer to work with the .ply file format above.
+ #
+ ## save mesh as .obj file
+ ## Path to your generated .obj file
+ #obj_file = 'output.obj'
+ #
+ ## Convert the data to polydata
+ #geometry_filter = vtk.vtkGeometryFilter()
+ #geometry_filter.SetInputConnection(reader.GetOutputPort())
+ #geometry_filter.Update()
+ #
+ ## Write the data to .obj format
+ #writer = vtk.vtkOBJWriter()
+ #writer.SetInputConnection(geometry_filter.GetOutputPort())
+ #
+ #os.system('rm -f output.obj')
+ #
+ #writer.SetFileName(obj_file) # Output .obj file path
+ #writer.Write()
+ #
+ #if not os.path.exists(obj_file):
+ # print("Error writing file ",obj_file)
+ # sys.exit(1)
+ #
+ ## appends data lines
+ ##..
+ ##d val
+ #if data_array:
+ # min_val = data_array.GetValue(0)
+ # max_val = data_array.GetValue(0)
+ #
+ # with open(obj_file, 'a') as f:
+ # f.write("# Associated data values:\n")
+ # for i in range(data_array.GetNumberOfTuples()):
+ # data_value = data_array.GetValue(i)
+ # min_val = np.minimum(min_val,data_value)
+ # max_val = np.maximum(max_val,data_value)
+ # f.write(f"# vertex {i}: {data_value}\n")
+ # #f.write(f"d {data_value}\n")
+ # print(" appended data: min/max = ",min_val,max_val)
+ # data_min = min_val
+ # data_max = max_val
+ #
+ #print("")
+ #print(" converted to: ",obj_file)
+
+ return obj_file
+
+def create_blender_setup(obj_file=""):
+ ## Blender setup
+ print("blender setup:")
+ print("")
+
+ # clears all existing objects in the scene
+ bpy.ops.object.select_all(action='SELECT')
+ bpy.ops.object.delete()
+
+ # clears only default Cube object and any previous output & output_dfx object,
+ # leaves Camera and Light object
+ #print(" objects: ",bpy.data.objects.keys())
+ #objs = bpy.data.objects
+ #for obj in objs:
+ # print(" objects: ",obj.name)
+ # if obj.name == "Cube": objs.remove(obj, do_unlink=True)
+ # if obj.name.contains("output"): objs.remove(obj, do_unlink=True)
+
+ print(" objects: ",bpy.data.objects.keys())
+ objs = bpy.data.objects
+ for obj in objs:
+ print(" objects: ",obj.name)
+ print("")
+
+ # import mesh object into blender
+ if len(obj_file) > 0:
+ print(" importing mesh file: ",obj_file)
+ # gets file extension
+ extension = os.path.splitext(obj_file)[1]
+ # reads the mesh object file
+ if extension == '.ply':
+ # Import .ply file
+ # New experimental, but much faster
+ bpy.ops.wm.ply_import(filepath=obj_file)
+ # or standard/legacy ply importer
+ #bpy.ops.import_mesh.ply(filepath=obj_file)
+ elif extension == '.obj':
+ # Import .obj file into Blender
+ bpy.ops.import_scene.obj(filepath=obj_file)
+ else:
+ print("unknown mesh object file extension ",extension," - exiting...")
+ sys.exit(1)
+
+ print(" imported in blender: ",obj_file)
+ print("")
+
+ # blender info
+ print(" scenes : ",bpy.data.scenes.keys())
+ print(" objects: ",bpy.data.objects.keys())
+ for obj in bpy.data.objects:
+ print(" object: ",obj.name)
+ print("")
+
+ ## mesh object
+ #obj = bpy.context.object
+ #print(obj.name, ":", obj)
+ #objs = bpy.context.selected_objects
+ #print(", ".join(o.name for o in objs))
+ # Select the imported object
+ obj = bpy.data.objects['output']
+ if obj == None:
+ print("Error: no mesh object in blender available, exiting...")
+ sys.exit(1)
+
+ # object is a mesh
+ mesh = obj.data
+
+ #debug
+ #print(" obj: ",obj)
+ print(" obj type: ",obj.type)
+ print(" obj data: ",mesh)
+ print(" obj polygons: ",mesh.polygons)
+ #print(" obj polygon 0: ",mesh.polygons[0])
+ #print(" obj polygon 0 loop: ",mesh.polygons[0].loop_indices)
+ #print(" obj data loops: ",mesh.loops)
+ #print(" obj data loops: ",mesh.loops[0])
+ print(" obj data vertex_colors: ",mesh.vertex_colors)
+ #print(" obj data vertex_layers_float: ",mesh.vertex_layers_float)
+ #print(" obj data vertex_layers_int: ",mesh.vertex_layers_int)
+ #print(" obj data vertex_layers_string: ",mesh.vertex_layers_string)
+ #print(" obj data vertex_colors: ",mesh.vertex_colors.get("a"))
+ #for loop_index in mesh.polygons[0].loop_indices:
+ # index = mesh.loops[loop_index].vertex_index
+ # print(" loop: index",loop_index,obj.data.loops[loop_index].vertex_index)
+ # color = vertex_colors[loop_index].color
+ #print(" obj data polygon_layers_float: ",mesh.polygon_layers_float)
+ #print(" obj data polygon_layers_int: ",mesh.polygon_layers_int)
+ #print(" obj data uv_layers: ",mesh.uv_layers)
+ #print(" obj data vertices",mesh.vertices)
+ #print(" obj data vertex 0",mesh.vertices[0])
+ #print(" obj data vertex 0 coord",mesh.vertices[0].co)
+ #print(" obj data vertex 0 keys",mesh.vertices.keys())
+ #print(" obj data vertex 0 get",mesh.vertices.items())
+ print("")
+
+ #print("obj data vertex_colors 0: ",obj.data.vertex_colors[0])
+ #print("obj data vertex_colors active data: ",obj.data.vertex_colors.active.data)
+
+ if obj is not None:
+ # Ensure the object has a mesh and vertex colors
+ if obj.type == 'MESH' and not obj.data.vertex_colors is None:
+ print(" Object 'output' has vertex colors and is a mesh.")
+ # Access vertex color data
+ #vertex_colors = obj.data.vertex_colors.active.data
+ # Iterate through vertex color data
+ #for poly in obj.data.polygons:
+ # for loop_index in poly.loop_indices:
+ # vertex_index = obj.data.loops[loop_index].vertex_index
+ # color = vertex_colors[loop_index].color
+ # # Print information about vertex colors
+ # #print(f"Vertex {vertex_index}: Color {color}")
+ else:
+ print(" Object 'output' does not have vertex colors or is not a mesh.")
+ else:
+ print(" Object 'output' not found.")
+ sys.exit(1)
+
+ print("")
+ print(" mesh: setting up shader nodes...")
+ print("")
+
+ # assigns new material
+ mat = bpy.data.materials.new(name="VertexColorMaterial")
+ mat.use_nodes = True
+ # node-graph
+ nodes = mat.node_tree.nodes
+ links = mat.node_tree.links
+
+ # Clear default nodes
+ #for node in nodes:
+ # nodes.remove(node)
+
+ # Create Attribute node to fetch vertex color
+ color_attribute = nodes.new(type='ShaderNodeAttribute')
+ color_attribute.attribute_name = "Col" # Use "Col" as it's the default name for vertex color
+ # Set the Color Attribute node to use vertex colors
+ color_attribute.attribute_type = 'GEOMETRY' # 'COLOR'
+
+ # Create Principled BSDF shader node
+ # checks default node
+ bsdf = nodes["Principled BSDF"]
+ if bsdf == None:
+ bsdf = nodes.new(type='ShaderNodeBsdfPrincipled')
+
+ bsdf.inputs['Metallic'].default_value = 0.4 # 0.1
+ bsdf.inputs['Roughness'].default_value = 0.5 # 0.8
+ bsdf.inputs['Specular'].default_value = 0.2 # 0.3
+
+ # (default) color map from vtk file
+ links.new(bsdf.inputs['Base Color'],color_attribute.outputs['Color'])
+
+ # no emission
+ bsdf.inputs['Emission Strength'].default_value = 0.0
+ # w/ emission (default) for brighter colors
+ #links.new(bsdf.inputs['Emission'],color_attribute.outputs['Color'])
+ #links.new(bsdf.inputs['Emission Strength'],color_attribute.outputs['Fac'])
+
+ # custom mesh coloring w/ color ramp node
+ if 1 == 0:
+ # creates a custom color ramp node to highlight shaking regions
+ ramp = nodes.new(type='ShaderNodeValToRGB')
+ ramp.color_ramp.interpolation = 'LINEAR'
+ #ramp.color_ramp.interpolation = 'B_SPLINE'
+ # default 2 slots
+ ramp.color_ramp.elements[0].position = 0.0
+ ramp.color_ramp.elements[0].color = [1,1,1,1]
+ ramp.color_ramp.elements[1].position = 0.4
+ ramp.color_ramp.elements[1].color = [0.5,0.5,0.5,1] # gray
+ # add color slot
+ ramp.color_ramp.elements.new(0.5)
+ ramp.color_ramp.elements[2].color = [0.2,0.2,0.3,1] # dark blue
+ # add color slot
+ ramp.color_ramp.elements.new(0.6)
+ ramp.color_ramp.elements[3].color = [0.8,0.0,0.0,1] # red
+ # add color slot
+ ramp.color_ramp.elements.new(0.65)
+ ramp.color_ramp.elements[4].color = [1.0,0.6,0.0,1] # yellow
+ # add color slot
+ ramp.color_ramp.elements.new(0.7)
+ ramp.color_ramp.elements[5].color = [1,1,1,1] # white
+
+ # Link Math node output to Color Ramp factor input
+ links.new(ramp.inputs["Fac"],color_attribute.outputs["Fac"])
+ # custom Color Ramp output to Principled BSDF node
+ links.new(bsdf.inputs['Base Color'],ramp.outputs['Color'])
+
+ # adds additional emission
+ if 1 == 0:
+ # Create Math node to manipulate grayscale value
+ math_node = nodes.new(type='ShaderNodeMath')
+ math_node.operation = 'GREATER_THAN'
+ math_node.inputs[1].default_value = 0.7 # Set threshold value
+ links.new(math_node.inputs['Value'],color_attribute.outputs['Fac'])
+
+ # Create RGB to BW node to convert color to black/white float value
+ #rgb_to_bw = nodes.new(type='ShaderNodeRGBToBW')
+ #links.new(rgb_to_bw.inputs['Color'],color_attribute.outputs['Color'])
+ #links.new(math_node.inputs['Value'],rgb_to_bw.outputs['Val'])
+
+ # emission node
+ emission = nodes.new('ShaderNodeEmission')
+ emission.name = "Emission"
+ links.new(emission.inputs['Color'],color_attribute.outputs['Color'])
+ links.new(emission.inputs['Strength'],math_node.outputs['Value'])
+
+ # mix light emission and main image
+ mix = nodes.new('ShaderNodeMixShader')
+ mix.name = "Mix Shader"
+ #links.new(mix.inputs['Fac'], ramp.outputs['Alpha'])
+ # takes output from main BSDF node
+ links.new(mix.inputs[1], bsdf.outputs[0])
+ links.new(mix.inputs[2], emission.outputs[0])
+
+ # link mixer to final material output
+ material_output = nodes["Material Output"]
+ links.new(material_output.inputs["Surface"], mix.outputs["Shader"])
+
+ # Assign the material to the object
+ if obj.data.materials:
+ obj.data.materials[0] = mat
+ else:
+ obj.data.materials.append(mat)
+
+ print(" blender mesh done")
+ print("")
+
+
+def add_blender_buildings(buildings_file=""):
+ global mesh_origin,mesh_scale_factor
+ global use_cycles_renderer
+
+ # checks if anything to do
+ if len(buildings_file) == 0: return
+
+ print("buildings file: ",buildings_file)
+ print("")
+
+ # check file
+ if not os.path.exists(buildings_file):
+ print("Error: buildings file specified not found...")
+ sys.exit(1)
+
+ # buildings need to be given as .ply (Stanford) format file
+ print(" reading in .ply mesh...")
+
+ # Enable the experimental .ply importer
+ # New experimental, but much faster
+ bpy.ops.wm.ply_import(filepath=buildings_file)
+ # or standard/legacy ply importer
+ #bpy.ops.import_mesh.ply(filepath=buildings_file)
+
+ # Print the names of imported objects
+ print("")
+ for obj in bpy.context.selected_objects:
+ print(" imported object:", obj.name," - type: ",obj.type)
+ # select buildings mesh (name must contain dxf)
+ #if obj.type == 'MESH' and 'dxf' in obj.name:
+ # bpy.context.view_layer.objects.active = obj
+ # obj_buildings = obj
+ # break
+ print("")
+
+ # gets active object
+ obj = bpy.context.view_layer.objects.active
+
+ if obj is None:
+ print(" Object for buildings not found.")
+ sys.exit(1)
+
+ # note: due to different resolution of the meshes, the mesh elevation of SPECFEM mesh (e.g., AVS_shaking_map.inp)
+ # and the buildings (e.g., from SwissTopo) might be slightly off by a few meters.
+
+ # moves and scales UTM mesh
+ if 'utm' in buildings_file:
+ # need to translate and scale the UTM mesh to place it within the vtk mesh
+ print(" UTM mesh: moving & scaling mesh...")
+ print(" mesh origin = ",mesh_origin)
+ print(" mesh scale factor = ",mesh_scale_factor)
+ print("")
+
+ # Get the mesh data
+ mesh = obj.data
+
+ # Create a BMesh object
+ bm = bmesh.new()
+ bm.from_mesh(mesh)
+
+ # Translate the vertices (example: translating by (1, 0, 0))
+ translation_vector = Vector(mesh_origin)
+ for vert in bm.verts:
+ vert.co -= translation_vector
+
+ # Scale the vertices (example: scaling by 1.5 in all axes)
+ scale_factor = mesh_scale_factor
+ for vert in bm.verts:
+ vert.co *= scale_factor
+
+ # Update the mesh with the modified vertices
+ bm.to_mesh(mesh)
+ bm.free()
+
+ # Create a new material
+ mat = bpy.data.materials.new('buildingsMaterial')
+
+ # enable node-graph edition mode
+ mat.use_nodes = True
+
+ nodes = mat.node_tree.nodes
+ links = mat.node_tree.links
+
+ # Clear default nodes
+ for node in nodes:
+ nodes.remove(node)
+
+ # gets scene
+ scene = bpy.context.scene
+
+ if use_cycles_renderer:
+ # Set render engine to Cycles
+ scene.render.engine = 'CYCLES'
+
+ # Create a Glass BSDF node
+ mat_node = nodes.new(type='ShaderNodeBsdfGlass')
+ # Set IOR (Index of Refraction) and Roughness values
+ mat_node.inputs['IOR'].default_value = 1.5 # (e.g., 1.5 for typical glass)
+ mat_node.inputs['Roughness'].default_value = 0.6 # Roughness value (0.0 for perfectly smooth)
+
+ else:
+ # BLENDER_EEVEE renderer
+ scene.render.engine = 'BLENDER_EEVEE'
+
+ # create a glossy node
+ mat_node = nodes.new(type='ShaderNodeBsdfGlossy')
+ mat_node.inputs['Roughness'].default_value = 0.5 # Roughness value (0.0 for perfectly smooth)
+ mat_node.inputs['Color'].default_value = (0.8, 0.74, 0.56, 1)
+
+ # create a glass node
+ # note: transparency effect w/ eevee for buildings looks not good enough yet...
+ # glass buildings need cycles renderer.
+ # thus, i'm using glossy buildings to get some environment colors onto buildings.
+ #mat_node = nodes.new(type='ShaderNodeBsdfGlass')
+ # node color
+ #mat_node.inputs['Color'].default_value = (0.6, 0.6, 0.6, 1)
+ # enable transparency for eevee
+ # material transparency
+ #mat.blend_method = 'BLEND' # 'OPAQUE', 'BLEND', ..
+ #mat.shadow_method = 'HASHED'
+ #mat.use_backface_culling = False
+ #mat.show_transparent_back = True
+ #mat.use_screen_refraction = True
+ #mat.refraction_depth = 0.01
+
+ # Create Output node
+ output_node = nodes.new(type='ShaderNodeOutputMaterial')
+ output_node.location = 400, 0
+
+ # Link nodes
+ links.new(mat_node.outputs['BSDF'], output_node.inputs['Surface'])
+
+ # Assign the material to the object
+ if obj.data.materials:
+ obj.data.materials[0] = mat
+ else:
+ obj.data.materials.append(mat)
+
+ # blender info
+ print(" scenes : ",bpy.data.scenes.keys())
+ print(" objects: ",bpy.data.objects.keys())
+ for obj in bpy.data.objects:
+ print(" object: ",obj.name)
+ print("")
+
+ print(" blender buildings mesh done")
+ print("")
+
+def get_mesh_elevation_at_origin():
+ # gets vtk mesh object
+ obj = bpy.data.objects['output']
+ if obj == None:
+ print("Error: no mesh object in blender available, exiting...")
+ sys.exit(1)
+
+ # object is a mesh
+ mesh = obj.data
+
+ # Define the point of interest (X, Y, Z coordinates)
+ point_of_interest = Vector((0.0, 0.0, 0.0)) # origin
+
+ # Define the origin of the ray (above the mesh)
+ ray_origin = Vector((point_of_interest.x, point_of_interest.y, 10.0)) # Adjust the Z coordinate as needed
+
+ # Get the world matrix of the object
+ matrix_world = obj.matrix_world
+
+ # Calculate the direction of the ray (pointing downwards)
+ ray_direction = Vector((0, 0, -1))
+
+ # Transform the ray direction to world space
+ ray_direction.rotate(matrix_world.to_quaternion())
+
+ # Perform the ray casting
+ success, location, _, _ = obj.ray_cast(ray_origin, ray_direction)
+
+ if success:
+ # Get the elevation (Z coordinate) of the mesh at the point of interest
+ elevation = location.z
+ print(" mesh elevation at origin = ", elevation)
+ print("")
+ else:
+ # sets a default height
+ elevation = 0.1
+ print(" mesh elevation at origin: no intersection found")
+ print(" setting default = ",elevation)
+ print("")
+
+ return elevation
+
+
+def save_blender_scene(title="",animation=False,close_up_view=False):
+ global blender_img_resolution_X,blender_img_resolution_Y
+ global use_cycles_renderer
+
+ ## blender scene setup
+ print("Setting up blender scene...")
+ print("")
+
+ # determine elevation from mesh at origin point to position the camera
+ elevation = get_mesh_elevation_at_origin()
+
+ # elevation offset for camera positioning
+ elevation_offset = 0.15
+ camera_angle_depth_degree = 75.0
+ if title == 'Lauterbrunnen' or title == 'Zermatt':
+ elevation_offset = 0.25
+ camera_angle_depth_degree = 70.0
+
+ # gets scene
+ scene = bpy.context.scene
+
+ ## camera
+ print(" adding camera")
+ # adds camera position
+ bpy.ops.object.camera_add(enter_editmode=False, align='VIEW')
+ # current object
+ cam = bpy.context.object
+ cam.name = "Camera" #cam = bpy.data.objects["Camera"]
+ scene.camera = cam
+
+ if close_up_view:
+ ## close-up view
+ print(" camera location relative to mesh elevation: ",elevation)
+ # Set camera translation
+ scene.camera.location = (0, -0.7, elevation + elevation_offset)
+ # Set camera rotation in euler angles
+ scene.camera.rotation_mode = 'XYZ'
+ scene.camera.rotation_euler = (camera_angle_depth_degree * DEGREE_TO_RAD, 0, 0)
+ # Set camera fov in degrees
+ scene.camera.data.angle = float(30.0 * DEGREE_TO_RAD)
+ else:
+ ## overview
+ # Set camera translation
+ scene.camera.location = (0, -4, 4)
+ # Set camera rotation in euler angles
+ scene.camera.rotation_mode = 'XYZ'
+ scene.camera.rotation_euler = (44.0 * DEGREE_TO_RAD, 0, 0)
+ # Set camera fov in degrees
+ scene.camera.data.angle = float(30.0 * DEGREE_TO_RAD)
+
+ ## light
+ print(" adding light")
+ # adds sun position
+ bpy.ops.object.light_add(type='AREA')
+ # current object
+ light = bpy.context.object
+ light.name = "Light" #light = bpy.data.objects["Light"]
+
+ light.location = (1.5, -0.5, 1.3)
+ light.rotation_mode = 'XYZ'
+ light.rotation_euler = (0, 40.0 * DEGREE_TO_RAD, 0)
+ # sets light to rectangular (plane)
+ light.data.type = 'AREA'
+ light.data.shape = 'SQUARE'
+ # Change the light's color
+ light.data.color = (1, 1, 1) # Set light color to white
+ # intensity
+ light.data.energy = 80 # W
+ # Set the light's size
+ light.data.size = 0.1
+ light.data.use_contact_shadow = True # Enable contact shadows
+
+ ## background plane
+ print(" adding sea-level plane")
+ # Create a mesh plane (to capture shadows and indicate sea level)
+ bpy.ops.mesh.primitive_plane_add(size=10, enter_editmode=False, location=(0, 0, 0))
+
+ # Get the created plane object
+ plane_object = bpy.context.object
+
+ # Set the object's material to white
+ mat = bpy.data.materials.new(name="White")
+ if close_up_view:
+ mat.diffuse_color = (0.8, 0.8, 0.8, 1) # similar as background color to have an infinite background
+ else:
+ mat.diffuse_color = (0.135, 0.135, 0.135, 1) # gray for better contrast
+ plane_object.data.materials.append(mat)
+
+ # text object
+ if len(title) > 0:
+ print(" adding text object:")
+ print(" title = ",title)
+
+ # Create a new text object
+ bpy.ops.object.text_add(location=(-0.3, -1.2, 0.01)) # Adjust the location as needed
+ text_object = bpy.context.object
+ text_object.data.body = title # Set the text content
+
+ # Set text properties (font, size, etc.)
+ text_object.data.size = 0.2 # Adjust the font size
+
+ #debug
+ #print(" blender fonts available: ",bpy.data.fonts.keys())
+
+ if 'Bfont' in bpy.data.fonts:
+ text_object.data.font = bpy.data.fonts['Bfont'] # Use a specific default font
+ elif 'Bfont Regular' in bpy.data.fonts:
+ text_object.data.font = bpy.data.fonts['Bfont Regular'] # Use a specific default font
+ elif 'Arial Regular' in bpy.data.fonts:
+ text_object.data.font = bpy.data.fonts['Arial Regular']
+
+ #text_object.data.font = bpy.data.fonts.load("/path/to/your/font.ttf") # Replace with your font path
+
+ # Set text material
+ text_material = bpy.data.materials.new(name="TextMaterial")
+ text_material.use_nodes = True
+ text_material.node_tree.nodes["Principled BSDF"].inputs["Base Color"].default_value = (0.5, 0.5, 0.5, 1)
+
+ text_object.data.materials.append(text_material)
+
+ # save scene and render options
+ print("")
+ print(" saving blender scene...")
+ print("")
+
+ # renderer options
+ if use_cycles_renderer:
+ # Set render engine to Cycles
+ scene.render.engine = 'CYCLES'
+
+ ## adjust settings for faster rendering w/ cycles
+ #scene.cycles.device = 'GPU'
+ # Set tile size
+ scene.cycles.tile_size = 512 # 256
+ # Lower the number of samples
+ scene.cycles.samples = 128 # Adjust the number of samples as needed
+ # Enable denoising
+ scene.cycles.use_denoising = True
+
+ # Adjust light path bounces
+ scene.cycles.max_bounces = 4 # Adjust bounce values for diffuse, glossy, transmission, etc.
+
+ ## additional parameters to check for better performance:
+ # Use Branched Path Tracing integrator
+ #scene.cycles.progressive = 'BRANCHED_PATH'
+ # Adjust Branched Path Tracing settings
+ #scene.cycles.use_square_samples = False # Enable square samples for the branched path tracing
+ #scene.cycles.diffuse_samples = 3 # Adjust samples per type (diffuse, glossy, etc.)
+ #scene.cycles.glossy_samples = 3
+ #scene.cycles.transmission_samples = 3
+ #scene.cycles.ao_samples = 3
+ #scene.cycles.mesh_light_samples = 3
+ #scene.cycles.subsurface_samples = 3
+
+ else:
+ # Set render engine to Eevee
+ scene.render.engine = 'BLENDER_EEVEE'
+
+ # ambient occlusion
+ scene.eevee.use_gtao = True
+ scene.eevee.gtao_distance = 0.01
+ scene.eevee.gtao_factor = 1.0
+
+ # turns on screen space reflections
+ scene.eevee.use_ssr = True
+ # refraction
+ scene.eevee.use_ssr_refraction = True
+ scene.eevee.ssr_thickness = 0.1
+
+ # turns on bloom
+ #scene.eevee.use_bloom = True
+
+ # render resolution
+ scene.render.resolution_x = blender_img_resolution_X
+ scene.render.resolution_y = blender_img_resolution_Y
+ # Set the render percentage (optional, for scaling down the final output)
+ #scene.render.resolution_percentage = 50 # Adjust as needed
+
+ # sets white background color
+ bpy.data.worlds["World"].node_tree.nodes["Background"].inputs[0].default_value = (1, 1, 1, 1)
+
+ print(" renderer: ",scene.render.engine)
+ print("")
+
+ if animation:
+ print("animation:")
+
+ # initializes frames
+ number_of_keyframes = 0
+ total_frames = 0
+
+ start_frame = -1
+ end_frame = -1
+
+ scene.frame_start = -1
+ scene.frame_end = -1
+
+ ## dive-in keyframes
+ if 1 == 1:
+ # setup keyframes
+ number_of_keyframes += 4
+ keyframe_interval = 20
+
+ total_frames = (number_of_keyframes-1) * keyframe_interval
+
+ print(" dive-in:")
+ print(" keyframe interval = ",keyframe_interval)
+ print(" number of keyframes = ",number_of_keyframes)
+ print("")
+
+ # define a frame timeline
+ start_frame = 0
+ inter_frame_1 = keyframe_interval
+ inter_frame_2 = 2*keyframe_interval
+ end_frame = total_frames
+
+ scene.frame_start = start_frame
+ scene.frame_end = end_frame
+
+ # moves camera
+ cam = bpy.data.objects["Camera"]
+
+ # start frame
+ cam.keyframe_insert("location", frame=start_frame) # (0, -4, 4)
+ cam.keyframe_insert("rotation_euler", frame=start_frame) # (44.0 * DEGREE_TO_RAD, 0, 0)
+
+ # intermediate frame
+ cam.location = (0, -2.5, 1)
+ cam.rotation_euler = (60.0 * DEGREE_TO_RAD, 0, 0)
+ cam.keyframe_insert("location", frame=inter_frame_1)
+ cam.keyframe_insert("rotation_euler", frame=inter_frame_1)
+
+ # intermediate frame
+ cam.location = (0, -1.2, elevation + elevation_offset)
+ cam.rotation_euler = (camera_angle_depth_degree * DEGREE_TO_RAD, 0, 0)
+ cam.keyframe_insert("location", frame=inter_frame_2)
+ cam.keyframe_insert("rotation_euler", frame=inter_frame_2)
+
+ # end frame
+ cam.location = (0, -0.7, elevation + elevation_offset) # Ending position
+ cam.rotation_euler = (camera_angle_depth_degree * DEGREE_TO_RAD, 0, 0)
+ cam.keyframe_insert("location", frame=end_frame)
+ cam.keyframe_insert("rotation_euler", frame=end_frame)
+
+ ## rotation
+ if 1 == 1:
+ # appends frames for rotation
+ frames = 60 # Number of frames for the animation
+ keyframe_interval = 5
+ number_of_keyframes += frames
+
+ total_frames += frames * keyframe_interval
+
+ print(" rotation:")
+ print(" keyframe interval = ",keyframe_interval)
+ print(" number of keyframes = ",frames)
+ print("")
+
+ if start_frame == -1:
+ start_frame = 0
+ else:
+ start_frame = end_frame + 1 * keyframe_interval
+
+ if end_frame == -1:
+ end_frame = total_frames
+ else:
+ end_frame = total_frames + 1 * keyframe_interval
+
+ # updates end frame count
+ scene.frame_end = end_frame
+
+ # Set the rotation parameters
+ rotation_degrees = 360 # Total rotation in degrees
+
+ # moves camera around the z-axis
+ cam = bpy.data.objects['Camera']
+
+ #if scene.frame_start == -1:
+ # # add start frame
+ # scene.frame_start = start_frame
+ # # start frame
+ # cam.keyframe_insert("location", frame=start_frame) # (0, -4, 4)
+ # cam.keyframe_insert("rotation_euler", frame=start_frame) # (44.0 * DEGREE_TO_RAD, 0, 0)
+
+ # camera offset from z-axis
+ x_0 = cam.location[0]
+ y_0 = cam.location[1]
+ z_0 = cam.location[2]
+
+ offset_distance = (x_0**2 + y_0**2)**0.5
+ angle_0 = atan2(y_0,x_0)
+
+ # Set keyframes for rotation
+ for frame in range(0,frames+1):
+ angle = angle_0 + radians(frame * (rotation_degrees / frames))
+
+ # Calculate camera position around the Z-axis
+ x = offset_distance * cos(angle)
+ y = offset_distance * sin(angle)
+ camera_location = Vector((x, y, z_0)) # Maintain Z position
+
+ # Set camera rotation towards the origin
+ look_at = Vector((0, 0, elevation)) # Origin point on mesh
+ direction = look_at - camera_location
+ rot_quat = direction.to_track_quat('-Z', 'Y') # Point -Z axis to direction
+ camera_rotation = rot_quat.to_euler()
+
+ # there seems to be a problem with frame interpolation when the angles going past 2 PI, i.e,
+ # having the angle set between [-pi,pi] from the to_euler() output and interpolating a step
+ # between +pi to -pi. to avoid we always increase the angle going from [0,2pi[
+ #
+ # in our case here, we change & increase only the third euler angle, while the first 2 stay fixed
+ # when the camera rotates around the z-axis.
+ if camera_rotation[2] < 0: camera_rotation[2] += 2.0 * PI
+
+ frame_number = start_frame + frame * keyframe_interval
+ scene.frame_set(frame_number)
+
+ # Set camera location and keyframe for each frame
+ cam.rotation_euler = camera_rotation
+ cam.location = camera_location
+
+ cam.keyframe_insert("location", frame=frame_number)
+ cam.keyframe_insert("rotation_euler", frame=frame_number)
+
+ #print("frame: ",frame,frame_number,"angle",angle,cam.location,"rotation",cam.rotation_euler)
+
+ print(" total number of keyframes = ",number_of_keyframes)
+ print(" total number of frames = ",total_frames)
+ print("")
+
+ # smoothing move
+ #for fcurve in cam.animation_data.action.fcurves:
+ # for keyframe_point in fcurve.keyframe_points:
+ # keyframe_point.interpolation = 'LINEAR'
+
+ # render settings
+ # see: https://docs.blender.org/api/current/bpy.types.FFmpegSettings.html#bpy.types.FFmpegSettings
+ # ffmpeg
+ scene.render.image_settings.file_format = 'FFMPEG' # 'AVI_JPEG', 'FFMPEG'
+ # codec
+ scene.render.ffmpeg.codec = 'H264' # compression: 'MPEG4', 'H264'
+ #scene.render.ffmpeg.constant_rate_factor = 'HIGH'
+ scene.render.ffmpeg.format = 'MPEG4' # output file container format
+ # frames per second (blender default is 24)
+ scene.render.fps = 20
+
+ # user output
+ print(" movie fps = ",scene.render.fps)
+ print(" movie format = ",scene.render.ffmpeg.format )
+ print("")
+
+ # output movie
+ name = './out.anim'
+ filename = name + '.mp4'
+ scene.render.filepath = filename
+ scene.render.use_file_extension = False
+
+ # timing
+ tic = time.perf_counter()
+
+ # redirect stdout to null
+ print("rendering animation: {} ...".format(filename))
+ # to avoid long stdout output by the renderer:
+ # Fra:1 Mem:189.14M (Peak 190.26M) | Time:00:00.68 | Syncing Sun
+ # Fra:1 Mem:189.14M (Peak 190.26M) | Time:00:00.68 | Syncing Camera
+ # ..
+ suppress = False
+ with SuppressStream(sys.stdout,suppress):
+ # render animation
+ bpy.ops.render.render(animation=True)
+ print(" written to: ",filename)
+ print("")
+
+ # timing
+ toc = time.perf_counter()
+ if toc - tic < 100.0:
+ print("elapsed time for animation render is {:0.4f} seconds\n".format(toc - tic))
+ else:
+ min = int((toc-tic)/60.0)
+ sec = (toc - tic) - min * 60
+ print("elapsed time for animation render is {} min {:0.4f} sec\n".format(min,sec))
+
+ else:
+ # output image
+ scene.render.image_settings.file_format = 'JPEG' # 'PNG'
+ name = './out'
+ filename = name + '.jpg'
+ scene.render.filepath = filename
+ scene.render.use_file_extension = False
+
+ # timing
+ tic = time.perf_counter()
+
+ # redirect stdout to null
+ print(" rendering image: {} ...".format(filename))
+ # to avoid long stdout output by the renderer:
+ # Fra:1 Mem:462.85M (Peak 462.85M) | Time:00:00.04 | ..
+ # ..
+ suppress = False
+ with SuppressStream(sys.stdout,suppress):
+ # Render Scene and store the scene
+ bpy.ops.render.render(write_still=True)
+ print(" written to: ",filename)
+ print("")
+
+ # timing
+ toc = time.perf_counter()
+ if toc - tic < 100.0:
+ print("elapsed time for image render is {:0.4f} seconds\n".format(toc - tic))
+ else:
+ min = int((toc-tic)/60.0)
+ sec = (toc - tic) - min * 60
+ print("elapsed time for image render is {} min {:0.4f} sec\n".format(min,sec))
+
+ # save blend file
+ dir = os.getcwd()
+ name = 'out.blend'
+
+ filename = dir + "/" + name
+ bpy.ops.wm.save_as_mainfile(filepath=filename)
+
+ print("")
+ print(" saved blend file: ",filename)
+ print("")
+
+
+# main routine
+def plot_with_blender(vtk_file="",image_title="",colormap=0,color_max=None,buildings_file="",animation=False,close_up_view=False):
+ """
+ renders image for (earth) sphere with textures
+ """
+ # set current directory, in case we need it to load files
+ dir = os.getcwd()
+ print("current directory: ",dir)
+ print("")
+
+ # converts .vtu to .obj file for blender to read in
+ obj_file = convert_vtk_to_obj(vtk_file,colormap,color_max)
+
+ # setup mesh node with shaders
+ create_blender_setup(obj_file)
+
+ # add buildings
+ add_blender_buildings(buildings_file)
+
+ # save blender scene
+ save_blender_scene(image_title,animation,close_up_view)
+
+
+def usage():
+ print("usage: ./plot_with_blender.py [--vtk_file=file] [--title=my_mesh_name] [--colormap=val] [--color-max=val] [--buildings=file]")
+ print(" [--with-cycles/--no-cycles] [--help]")
+ print(" with")
+ print(" --vtk_file - input mesh file (.vtk, .vtu, .inp)")
+ print(" --title - title text (added to image rendering)")
+ print(" --colormap - color map type: 0==VTK / 1==topo / 2==lisbon / 3==lajolla / 4==lipari")
+ print(" 5==davos / 6==turku / 7==berlin / 8==grayC / 9==snow")
+ print(" 10==shakeGreen /11==shakeRed /12==shakeUSGS /13==shakeUSGSgray")
+ print(" (default is shakeUSGSgray for shakemaps)")
+ print(" --color-max - fixes maximum value of colormap for moviedata to val, e.g., 1.e-7)")
+ print(" --buildings - mesh file (.ply) with buildings to visualize for the area")
+ print(" --with-cycles/--no-cycles - turns on/off CYCLES renderer (default is off, using BLENDER_EEVEE)")
+ print(" --small - turns on small images size (400x600px) for preview")
+ print(" --anim - turns on movie animation (dive-in and rotation)")
+ print(" --help - this help for usage...")
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ # init
+ vtk_file = ""
+ image_title = ""
+ color_max = None
+ colormap = -1
+ buildings_file = ""
+ animation = False
+ close_up_view = False
+
+ # reads arguments
+ #print("\nnumber of arguments: " + str(len(sys.argv)))
+ i = 0
+ for arg in sys.argv:
+ i += 1
+ #print("argument "+str(i)+": " + arg)
+ # get arguments
+ if "--help" in arg:
+ usage()
+ elif "--anim" in arg:
+ animation = True
+ elif "--buildings=" in arg:
+ buildings_file = arg.split('=')[1]
+ elif "--closeup" in arg:
+ close_up_view = True
+ elif "--color-max" in arg:
+ color_max = float(arg.split('=')[1])
+ elif "--colormap" in arg:
+ colormap = int(arg.split('=')[1])
+ elif "--with-cycles" in arg:
+ use_cycles_renderer = True
+ elif "--no-cycles" in arg:
+ use_cycles_renderer = False
+ elif "--small" in arg:
+ blender_img_resolution_X = 600
+ blender_img_resolution_Y = 400
+ elif "--title=" in arg:
+ image_title = arg.split('=')[1]
+ elif "--vtk_file=" in arg:
+ vtk_file = arg.split('=')[1]
+ elif i >= 8:
+ print("argument not recognized: ",arg)
+
+ # sets default colormap
+ if colormap == -1:
+ # for shakemaps
+ if 'shaking' in vtk_file or 'shakemap' in vtk_file:
+ colormap = 13 # shakeUSGSgray
+ else:
+ colormap = 0 # VTK diverging red-blue
+
+ # logging
+ cmd = " ".join(sys.argv)
+ filename = './plot_with_blender.log'
+ with open(filename, 'a') as f:
+ print("command call --- " + str(datetime.datetime.now()),file=f)
+ print(cmd,file=f)
+ print("command logged to file: " + filename)
+
+ # main routine
+ plot_with_blender(vtk_file,image_title,colormap,color_max,buildings_file,animation,close_up_view)
diff --git a/utils/scripts/run_get_simulation_topography.py b/utils/scripts/run_get_simulation_topography.py
new file mode 100755
index 000000000..12ee4ef45
--- /dev/null
+++ b/utils/scripts/run_get_simulation_topography.py
@@ -0,0 +1,1346 @@
+#!/usr/bin/env python
+#
+# script to setup topography for a given region
+#
+# required python modules:
+# - utm - version 0.4.2
+# - elevation - version 1.0.5
+#
+# required external packages:
+# - GMT - version 5.4.2
+# - gdal - version 2.2.3
+#
+from __future__ import print_function
+
+import os
+import sys
+import subprocess
+import math
+import datetime
+
+## elevation package
+# see: https://github.com/bopen/elevation
+# http://elevation.bopen.eu/en/stable/
+# install by: > sudo pip install elevation
+try:
+ import elevation
+except:
+ print("Error importing module `elevation`")
+ print("install by: > pip install -U elevation")
+ sys.exit(1)
+
+## elevation
+# check
+elevation.util.selfcheck(elevation.TOOLS)
+
+## UTM zones
+# see: https://github.com/Turbo87/utm
+# install by: > sudo pip install utm
+try:
+ import utm
+except:
+ print("Error importing module `utm`")
+ print("install by: > pip install -U utm")
+ sys.exit(1)
+
+## DEM analysis (slope, topographic wetness index, ..)
+# http://conference.scipy.org/proceedings/scipy2015/pdfs/mattheus_ueckermann.pdf
+# install by: > sudo pip install pyDEM
+
+
+## GMT
+## http://gmt.soest.hawaii.edu
+## install by: > sudo apt install gmt
+## setup gmt functions by:
+## > source /usr/share/gmt/tools/gmt_functions.sh
+try:
+ cmd = 'gmt --version'
+ print("> ",cmd)
+ version = subprocess.check_output(cmd, shell=True)
+except:
+ print("Error using `gmt`")
+ print("install by: > sudo apt install gmt")
+ sys.exit(1)
+# avoid bytes string issues with strings like b'Hello', converts to text string
+if isinstance(version, (bytes, bytearray)): version = version.decode("utf-8")
+version = version.strip()
+print("GMT version: %s" % (version))
+print("")
+# get version numbers for later (grdconvert command format changes between version 5.3 and 5.4)
+elem = version.split(".")
+gmt_major = int(elem[0])
+gmt_minor = int(elem[1])
+
+# GMT python interface
+# todo: not used yet, calling gmt commands directly as shell commands...
+#
+#try:
+# import pygmt
+#except:
+# print("Error importing module `pygmt`")
+# print("install by: > pip install -U pygmt")
+# sys.exit(1)
+
+
+## GDAL/OGR
+## https://www.gdal.org
+try:
+ cmd = 'gdalinfo --version'
+ print("> ",cmd)
+ version = subprocess.check_output(cmd, shell=True)
+except:
+ print("Error using `gdalinfo`")
+ print("install by: > sudo apt install gdal-bin")
+ sys.exit(1)
+# avoid bytes string issues with strings like b'Hello', converts to text string
+if isinstance(version, (bytes, bytearray)): version = version.decode("utf-8")
+version = version.strip()
+print("GDAL version: %s" % (version.strip()))
+print("")
+
+# GDAL python interface
+# todo: not used yet, calling gdal commands directly as shell commands...
+#
+#try:
+# from osgeo import gdal
+#except:
+# print("Error importing module `gdal`")
+# print("install by: > pip install -U gdal")
+# sys.exit(1)
+
+
+#########################################################################
+## USER PARAMETERS
+
+## SRTM data
+# type: 'low' == SRTM 90m / 'high' == SRTM 30m / else e.g. 'etopo' == topo30 (30-arc seconds)
+SRTM_type = 'low'
+
+## GMT grid sampling (in degrees)
+# to convert increment to km: incr_dx * math.pi/180.0 * 6371.0 -> 1 degree = 111.1949 km
+# sampling coarse (~5.5km)
+#incr_dx = 0.05
+# sampling fine (~1.1km)
+#incr_dx = 0.01
+# sampling fine (~500m)
+incr_dx = 0.0045
+# sampling fine (~110m)
+#incr_dx = 0.001
+
+# topography shift for 2nd interface (shifts original topography downwards)
+toposhift = 8000.0
+#toposhift = 1000.0 (small, local meshes)
+
+# scaling factor for topography variations
+toposcale = 0.1
+
+## local data directory
+datadir = 'topo_data'
+
+#########################################################################
+
+# globals
+utm_zone = 0
+gmt_region = ""
+
+
+def get_topo_DEM(region,filename_path,res='low'):
+ """
+ downloads and creates tif-file with elevation data from SRTM 1-arc second or SRTM 3-arc seconds
+ """
+ # check
+ if len(filename_path) == 0:
+ print("error invalid filename",filename_path)
+ sys.exit(1)
+
+ # region format: #lon_min #lat_min #lon_max #lat_max (left bottom right top) in degrees
+ # for example: region = (12.35, 41.8, 12.65, 42.0)
+ print("region:")
+ print(" longitude min/max = ",region[0],"/",region[2],"(deg)")
+ print(" latitude min/max = ",region[1],"/",region[3],"(deg)")
+ print("")
+
+ # earth radius
+ earth_radius = 6371000.0
+ # earth circumference
+ earth_d = 2.0 * 3.14159 * earth_radius # in m ~ 40,000 km
+ # lengths
+ length_deg = earth_d * 1./360 # length of 1 degree ~ 111 km
+ length_arcsec = length_deg * 1.0/3600 # length of 1 arcsecond ~ 30.89 m
+
+ # range / lengths (in km)
+ range_lon = region[2]-region[0] # in degreee
+ range_lat = region[3]-region[1]
+ length_lon = range_lon * length_deg / 1000.0 # in km
+ length_lat = range_lat * length_deg / 1000.0
+ print(" longitude range: ",length_lon,"(km)")
+ print(" latitude range: ",length_lat,"(km)")
+ print("")
+
+ # maximum height of earth curvature
+ # e.g., http://earthcurvature.com
+ #
+ # ***
+ # *** | ***
+ # *** h | ***
+ # A**-------|-------**B curvature height (h) between point A and B
+ #
+ # formula: alpha = (A + B)/2 mid-distance (in degree) as angle
+ # h = R * ( 1 - cos(alpha) ) with R = earth radius 6371.0 km, assuming spherical earth
+ alpha = 0.5 * max(range_lon,range_lat)
+ alpha = alpha * math.pi/180.0 # in rad
+ h = earth_radius * (1.0 - math.cos(alpha) )
+ print(" maximum Earth curvature height = ",h,"(m)")
+ print("")
+
+ # resolution
+ if res == 'low':
+ product = 'SRTM3'
+ length = 3.0 * length_arcsec
+ else:
+ product = 'SRTM1'
+ length = length_arcsec
+
+ print("resolution: ",res,product)
+ print(" step length: ",length,"(m)")
+ print("")
+ print("elevation module:")
+ elevation.info(product=product)
+ print("")
+
+ # tiles
+ if res == 'low':
+ ilon,ilat = elevation.datasource.srtm3_tile_ilonlat(region[0],region[1])
+ if ilon < 0 or ilat < 0:
+ print("invalid tile number: ",ilon,ilat,"please check if lon/lat order in your input is correct")
+ sys.exit(1)
+ tiles = list(elevation.datasource.srtm3_tiles_names(region[0],region[1],region[2],region[3]))
+ else:
+ ilon,ilat = elevation.datasource.srtm1_tile_ilonlat(region[0],region[1])
+ if ilon < 0 or ilat < 0:
+ print("invalid tile number: ",ilon,ilat,"please check if lon/lat order in your input is correct")
+ sys.exit(1)
+ tiles = list(elevation.datasource.srtm1_tiles_names(region[0],region[1],region[2],region[3]))
+ print("tiles:",len(tiles))
+ for name in tiles:
+ print(" ",name)
+ print("")
+
+ # note: in case elevation fails to download files, check in folder:
+ # /opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/elevation/datasource.py
+ # SRTM 3 files have new address: http://srtm.csi.cgiar.org/wp-content/uploads/files/srtm_5x5/tiff
+ # test with:
+ # > curl -s -o srtm_16_09.zip.html http://srtm.csi.cgiar.org/wp-content/uploads/files/srtm_5x5/tiff/srtm_16_09.zip
+
+ # get data, bounds: left bottom right top
+ print("getting topography DEM file:")
+ print(" region : ",region)
+ print(" path : ",filename_path)
+ print(" product: ",product)
+ elevation.clip(bounds=region,output=filename_path,product=product)
+
+ # cleans cache ( ~/Library/Caches/elevation/)
+ elevation.clean(product=product)
+
+ # check
+ if not os.path.isfile(filename_path):
+ print("error getting topo DEM file: ",filename_path)
+ sys.exit(1)
+
+ print(" done")
+ print("")
+
+
+#
+#-----------------------------------------------------------------------------
+#
+
+def get_topo(lon_min,lat_min,lon_max,lat_max):
+ """
+ gets topography data for given region and stores it in file ptopo.xyz
+ """
+ global incr_dx
+ global SRTM_type
+ global gmt_region
+
+ # region format: #lon_min #lat_min #lon_max #lat_max (left bottom right top) in degrees
+ # for example: region = (12.35, 41.8, 12.65, 42.0)
+ region = (lon_min, lat_min, lon_max, lat_max)
+
+ print("get topo:")
+ print(" region: ",region)
+ print("")
+
+ ## gmt
+ # region format: e.g. -R123.0/132.0/31.0/40.0
+ gmt_region = '-R' + str(lon_min) + '/' + str(lon_max) + '/' + str(lat_min) + '/' + str(lat_max)
+ # sampling fine (~1.1km)
+ gmt_interval = '-I' + str(incr_dx) + '/' + str(incr_dx)
+
+ # current directory
+ dir = os.getcwd()
+
+ name = 'ptopo-DEM.tif'
+ filename = dir + '/' + name
+
+ print("current directory:",dir)
+ print("topo file name :",filename)
+ print("")
+
+ ## get topo file
+ if SRTM_type == 'low' or SRTM_type == 'high':
+ # enlarge data region slightly to fit desired locations
+ #eps = 0.0001
+ #region_extended = ( lon_min - eps, lat_min - eps, lon_max + eps, lat_max + eps)
+ # SRTM data
+ get_topo_DEM(region,filename,res=SRTM_type)
+ elif SRTM_type == 'etopo2' \
+ or SRTM_type == 'topo30' \
+ or SRTM_type == 'srtm30s' \
+ or SRTM_type == 'srtm_1km' \
+ or SRTM_type == 'topo15' \
+ or SRTM_type == 'srtm15s' \
+ or SRTM_type == 'srtm_500m' \
+ or SRTM_type == 'topo3' \
+ or SRTM_type == 'srtm3s' \
+ or SRTM_type == 'srtm_100m' \
+ or SRTM_type == 'topo1' \
+ or SRTM_type == 'srtm1s' \
+ or SRTM_type == 'srtm_30m' :
+ # gmt grid
+ gridfile = 'ptopo-DEM.grd'
+ if gmt_major >= 6:
+ # new version uses grdcut and earth relief grids from server
+ # http://gmt.soest.hawaii.edu/doc/latest/datasets.html
+ if SRTM_type == 'etopo2':
+ # ETOPO2 (2-arc minutes)
+ cmd = 'gmt grdcut @earth_relief_02m ' + gmt_region + ' -G' + gridfile
+ # interpolation?
+ incr_dx = 0.05 # coarse (~5.5km)
+ gmt_interval = '-I' + str(incr_dx) + '/' + str(incr_dx)
+ #cmd += '; gmt blockmean ' + gridfile + ' -I2m -G' + gridfile
+ elif SRTM_type == 'etopo1':
+ # ETOPO1 (1-arc minute)
+ cmd = 'gmt grdcut @earth_relief_01m ' + gmt_region + ' -G' + gridfile
+ # interpolation?
+ incr_dx = 0.01 # fine (~1.1km)
+ gmt_interval = '-I' + str(incr_dx) + '/' + str(incr_dx)
+ #cmd += '; gmt blockmean ' + gridfile + ' -I0.00833 -G' + gridfile
+ elif SRTM_type == 'topo30' or SRTM_type == 'srtm30s' or SRTM_type == 'srtm_1km':
+ # srtm 30s (30-arc seconds) ~ 1km resolution
+ cmd = 'gmt grdcut @earth_relief_30s ' + gmt_region + ' -G' + gridfile
+ # interpolation?
+ incr_dx = 0.009 # fine (~1km)
+ gmt_interval = '-I' + str(incr_dx) + '/' + str(incr_dx)
+ #cmd += '; gmt blockmean ' + gridfile + ' -I0.00833 -G' + gridfile
+ elif SRTM_type == 'topo15' or SRTM_type == 'srtm15s' or SRTM_type == 'srtm_500m':
+ # srtm 15s (15-arc seconds) ~ 0.5km resolution
+ cmd = 'gmt grdcut @earth_relief_15s ' + gmt_region + ' -G' + gridfile
+ # interpolation?
+ incr_dx = 0.0045 # fine (~500m)
+ gmt_interval = '-I' + str(incr_dx) + '/' + str(incr_dx)
+ #cmd += '; gmt blockmean ' + gridfile + ' -I0.004166 -G' + gridfile
+ elif SRTM_type == 'topo3' or SRTM_type == 'srtm3s' or SRTM_type == 'srtm_100m':
+ # srtm 3s (3-arc seconds) ~ 100m resolution
+ cmd = 'gmt grdcut @earth_relief_03s ' + gmt_region + ' -G' + gridfile
+ # interpolation?
+ incr_dx = 0.00083 # fine (~100m)
+ gmt_interval = '-I' + str(incr_dx) + '/' + str(incr_dx)
+ #cmd += '; gmt blockmean ' + gridfile + ' -I0.0008 -G' + gridfile
+ elif SRTM_type == 'topo1' or SRTM_type == 'srtm1s' or SRTM_type == 'srtm_30m':
+ # srtm 1s (1-arc seconds) ~ 30m resolution
+ cmd = 'gmt grdcut @earth_relief_01s ' + gmt_region + ' -G' + gridfile
+ # interpolation?
+ incr_dx = 0.0003 # fine (~30m)
+ gmt_interval = '-I' + str(incr_dx) + '/' + str(incr_dx)
+ #cmd += '; gmt blockmean ' + gridfile + ' -I0.0003 -G' + gridfile
+ else:
+ print("Error invalid SRTM_type " + SRTM_type)
+ sys.exit(1)
+ else:
+ # older version < 6, like 5.4 ...
+ # or use grdraster for lower resolution
+ if SRTM_type == 'etopo2':
+ # ETOPO2 (2-arc minutes)
+ cmd = 'gmt grdraster ETOPO2 ' + gmt_region + ' -I2m -G' + gridfile
+ elif SRTM_type == 'topo30':
+ # topo30 (30-arc seconds) ~ 1km resolution
+ cmd = 'gmt grdraster topo30 ' + gmt_region + ' -I0.00833 -G' + gridfile
+ elif SRTM_type == 'srtm_500m':
+ # srtm 500m resolution
+ cmd = 'gmt grdraster srtm_500m ' + gmt_region + ' -I0.004166 -G' + gridfile
+ else:
+ print("Error invalid SRTM_type " + SRTM_type)
+ sys.exit(1)
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+
+ print("")
+ # convert gmt grid-file to gdal GTiff for shading
+ if gmt_major >= 5 and gmt_minor >= 4:
+ # version > 5.4
+ cmd = 'gmt grdconvert ' + gridfile + ' -G' + filename + '=gd:Gtiff'
+ else:
+ # older version < 5.3
+ cmd = 'gmt grdconvert ' + gridfile + ' ' + filename + '=gd:Gtiff'
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ else:
+ print("Error invalid SRTM_type " + SRTM_type)
+ sys.exit(1)
+
+ print("GMT:")
+ print(" region : ",gmt_region)
+ print(" interval: ",gmt_interval)
+ print("")
+
+ # topography info
+ cmd = 'gmt grdinfo ' + filename
+ print("topography file info:")
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # hillshade
+ gif_file = filename + '.hillshaded.gif'
+ cmd = 'gdaldem hillshade ' + filename + ' ' + gif_file + ' -of GTiff'
+ print("hillshade figure:")
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # defines model region
+ ## resampling
+ # even sampling
+ # note: the region might be slightly off, then this resampling can lead to wrong values.
+ # check output of gmt command and see what region it is using
+ #
+ # e.g. grdsample etopo.grd $region -I$dx/$dx -Getopo.sampled.grd
+ #cmd = 'grdsample ' + filename + ' ' + gmt_region + ' ' + gmt_interval + ' -Getopo.sampled.grd'
+ # uses region specified from grid file
+ gridfile = 'ptopo.sampled.grd'
+ cmd = 'gmt grdsample ' + filename + ' ' + gmt_interval + ' -G' + gridfile
+ print("resampling topo data")
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ cmd = 'gmt grdinfo ' + gridfile
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # converts to xyz format
+ xyz_file = 'ptopo.xyz'
+ cmd = 'gmt grd2xyz ' + gridfile + ' > ' + xyz_file
+ print("converting to xyz")
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # gif
+ cmd = 'gdaldem hillshade ' + xyz_file + ' ' + xyz_file + '.hillshaded1.gif -of GTiff'
+ print("creating gif-image ...")
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # mesh interpolation (only needed when different gridding interval)
+ mean_file = 'ptopo.mean.xyz'
+ # note: can lead to problems, if mean interval is similar to grid interval
+ #cmd = 'gmt blockmean ' + gmt_interval + ' ptopo.xyz ' + gmt_region + ' > ptopo.mean.xyz'
+ gmt_interval2 = '-I' + str(incr_dx/10.0) + '/' + str(incr_dx/10.0)
+
+ cmd = 'gmt blockmean ' + gmt_interval2 + ' ' + xyz_file + ' ' + gmt_region + ' > ' + mean_file
+ print("mesh interpolation")
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ cmd = 'mv -v ptopo.xyz ptopo.xyz.org; mv -v ptopo.mean.xyz ptopo.xyz'
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # gif
+ cmd = 'gdaldem hillshade ' + xyz_file + ' ' + xyz_file + '.hillshaded2.gif -of GTiff'
+ print("creating gif-image ...")
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # map
+ plot_map(gmt_region)
+
+ return xyz_file
+
+#
+#-----------------------------------------------------------------------------
+#
+
+def plot_map(gmt_region,gridfile="ptopo.sampled.grd"):
+ global datadir
+
+ print("plotting map ...")
+
+ # current directory
+ dir = os.getcwd()
+ print("current directory:",dir)
+ print("")
+
+ #cmd = 'cd ' + datadir + '/' + ';'
+ #status = subprocess.call(cmd, shell=True)
+ #check_status(status)
+
+ ps_file = "map.ps"
+ pdf_file = "map.pdf"
+
+ # gmt plotting
+ cmd = 'gmt pscoast ' + gmt_region + ' -JM6i -Dh -G220 -P -K > ' + ps_file + ';'
+ # topography shading
+ #makecpt -Cgray -T0/1/0.01 > topo.cpt
+ #cmd += 'makecpt -Cglobe -T-2500/2500/100 > topo.cpt' + ';'
+ #cmd += 'makecpt -Cterra -T-2500/2500/100 > ptopo.cpt' + ';'
+ #cmd += 'makecpt -Ctopo -T-2500/2500/100 > ptopo.cpt' + ';'
+ cmd += 'gmt makecpt -Crelief -T-2500/2500/100 > ptopo.cpt' + ';'
+ cmd += 'gmt grdgradient ' + gridfile + ' -Nt1 -A45 -Gptopogradient.grd -V' + ';'
+ cmd += 'gmt grdimage ' + gridfile + ' -Iptopogradient.grd -J -R -Cptopo.cpt -V -O -K >> ' + ps_file + ';'
+ cmd += 'gmt pscoast -R -J -Di -N1/1.5p,gray40 -A1000 -W1 -O -K >> ' + ps_file + ';'
+ cmd += 'gmt psbasemap -O -R -J -Ba1g1:"Map": -P -V >> ' + ps_file + ';'
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("map plotted in file: ",ps_file)
+
+ # imagemagick converts ps to pdf
+ cmd = 'convert ' + ps_file + ' ' + pdf_file
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("map plotted in file: ",pdf_file)
+ print("")
+ return
+
+#
+#-----------------------------------------------------------------------------
+#
+
+def create_AVS_file():
+ global datadir
+ global utm_zone
+ global gmt_region
+
+ print("creating AVS border file ...")
+
+ # current directory
+ dir = os.getcwd()
+ #print("current directory:",dir)
+ #print("")
+
+ # GMT segment file
+ name = "map_segment.dat"
+ cmd = 'gmt pscoast ' + gmt_region + ' -Dh -W -M > ' + name + ';'
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("GMT segment file plotted in file: ",name)
+
+ # note: GMT segment file has format
+ # > Shore Bin # .., Level ..
+ # lon1 lat1
+ # lon2 lat2
+ # > Shore Bin # .., Level ..
+ # ..
+
+ print("Getting boundaries from file %s ..." % name)
+
+ # reads gmt boundary file
+ with open(name,'r') as f:
+ content = f.readlines()
+
+ if len(content) == 0: sys.exit("Error no file content")
+
+ # counts segments and points
+ numsegments = 0
+ numpoints = 0
+ for line in content:
+ if ">" in line:
+ # new segment
+ numsegments += 1
+ else:
+ # point
+ numpoints += 1
+
+ print("There are %i contours" % numsegments)
+ print("There are %i data points" % numpoints)
+
+ # read the GMT file to get the number of individual line segments
+ currentelem = 0
+ previous_was_comment = 1
+ for line in content:
+ line = line.strip()
+ #debug
+ #print("currentelem %i %i %s" % (currentelem,previous_was_comment,line))
+ # skip comment lines
+ if line[0:1] == "#": continue
+ # get line marker (comment in file)
+ if ">" in line:
+ previous_was_comment = 1
+ else:
+ if previous_was_comment == 0: currentelem += 1
+ previous_was_comment = 0
+
+ num_individual_lines = currentelem
+ print("There are %i individual line segments" % num_individual_lines)
+
+
+ avsfile = "AVS_boundaries_utm.inp"
+ with open(avsfile,'w') as f:
+ # write header for AVS (with point data)
+ f.write("%i %i 1 0 0\n" % (numpoints,num_individual_lines))
+
+ # read the GMT file to get the points
+ currentpoint = 0
+ for line in content:
+ line = line.strip()
+ # skip comment lines
+ if line[0:1] == "#": continue
+
+ # get point only if line is not a comment
+ if ">" not in line:
+ currentpoint += 1
+ elem = line.split()
+
+ ## global lon/lat coordinates
+ # longitude is the number before the white space
+ lon = float(elem[0])
+ # latitude is the number after the white space
+ lat = float(elem[1])
+
+ # perl example
+ # convert geographic latitude to geocentric colatitude and convert to radians
+ # $pi = 3.14159265;
+ # $theta = $pi/2. - atan2(0.99329534 * tan($latitude * $pi / 180.),1) ;
+ # $phi = $longitude * $pi / 180. ;
+ # compute the Cartesian position of the receiver (ignore ellipticity for AVS)
+ # assume a sphere of radius one
+ # $r_target = 1. ;
+ ## DK DK make the radius a little bit bigger to make sure it is
+ ## DK DK correctly superimposed to the mesh in final AVS figure
+ # $r_target = 1.015 ;
+ # $x_target = $r_target*sin($theta)*cos($phi) ;
+ # $y_target = $r_target*sin($theta)*sin($phi) ;
+ # $z_target = $r_target*cos($theta) ;
+
+ ## UTM
+ # utm_x is the number before the white space
+ #utm_x = float(elem[0])
+ # utm_y is the number after the white space
+ #utm_y = float(elem[1])
+ #x_target = utm_x
+ #y_target = utm_y
+
+ # converts to UTM x/y
+ x,y = geo2utm(lon,lat,utm_zone)
+
+ # location
+ x_target = x
+ y_target = y
+ z_target = 0.0 # assuming models use depth in negative z-direction
+
+ f.write("%i %f %f %f\n" % (currentpoint,x_target,y_target,z_target))
+
+ # read the GMT file to get the lines
+ currentline = 0
+ currentelem = 0
+ currentpoint = 0
+ previous_was_comment = 1
+ for line in content:
+ line = line.strip()
+ # skip comment lines
+ if line[0:1] == "#": continue
+ # get line marker (comment in file)
+ if ">" in line:
+ currentline += 1
+ currentpoint += 1
+ previous_was_comment = 1
+ #print("processing contour %i named %s" % (currentline,line))
+ else:
+ if previous_was_comment == 0:
+ previouspoint = currentpoint
+ currentelem +=1
+ currentpoint +=1
+ # new line
+ f.write("%i %i line %i %i\n" % (currentelem,currentline,previouspoint,currentpoint))
+ previous_was_comment = 0
+
+ # dummy variable names
+ f.write(" 1 1\n")
+ f.write(" Zcoord, meters\n")
+ # create data values for the points
+ for currentpoint in range(1,numpoints+1):
+ f.write("%i 255.\n" % (currentpoint))
+
+ print("see file: %s" % avsfile)
+ print("")
+ return
+
+#
+#-----------------------------------------------------------------------------
+#
+
+def topo_extract(filename):
+ global toposhift
+ global toposcale
+
+ # ./topo_extract.sh ptopo.mean.xyz
+ #cmd = './topo_extract.sh ptopo.mean.xyz'
+ print("extracting interface data for xmeshfem3D ...")
+
+ import numpy
+
+ ## shift/downscale topography
+ print(" topo shift = ",toposhift,"(m)")
+ print(" scale factor = ",toposcale)
+ print("")
+
+ file1 = filename + '.1.dat'
+ file2 = filename + '.2.dat'
+
+ # statistics
+ cmd = 'gmt gmtinfo ' + filename
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # cleanup
+ cmd = 'rm -f ' + file1 + ';'
+ cmd += 'rm -f ' + file2 + ';'
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # reads in lon/lat/elevation
+ print("reading file " + filename + " ...")
+ data = numpy.loadtxt(filename)
+ #debug
+ #print(data)
+ elevation = data[:,2]
+
+ # extracts only elevation data
+ with open(file1,'w') as f:
+ for i in range(0,len(elevation)):
+ f.write("%f\n" % (elevation[i]) )
+
+ # shifts topography surface down,
+ with open(file2,'w') as f:
+ for i in range(0,len(elevation)):
+ f.write("%f\n" % (elevation[i] * toposcale - toposhift) )
+
+ print("")
+ print("check: ",file1,file2)
+ print("")
+
+ cmd = 'gmt gmtinfo ' + file1 + ';'
+ cmd += 'gmt gmtinfo ' + file2 + ';'
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ print("number of points along x (NXI) and y (NETA)")
+ x0 = data[0,0]
+ y0 = data[0,1]
+ i0 = 1
+ nx = 0; ny = 0
+ xmin = x0; xmax = x0
+ ymin = y0; ymax = y0
+
+ for i in range(1,len(data)):
+ x = data[i,0]
+ y = data[i,1]
+ dx = x - x0
+ x0 = x
+ if x < xmin: xmin = x
+ if x > xmax: xmax = x
+ if y < ymin: ymin = y
+ if y > ymax: ymax = y
+ if dx < 0.0:
+ ii = i + 1
+ if nx > 0 and ii - i0 != nx:
+ print("non-regular nx: ",nx,ii-i0,"on line ",i+1)
+ nx = ii - i0
+ ny += 1
+ deltay = y - y0
+ y0 = y
+ i0 = ii
+ else:
+ deltax = dx
+
+ ii = len(data) + 1
+ if nx > 0 and ii - i0 != nx:
+ print("non-regular nx: ",nx,ii-i0,"on line ",ii)
+ nx = ii - i0
+ ny += 1
+ print("--------------------------------------------")
+ print("NXI = ",nx)
+ print("NETA = ",ny)
+ print("xmin/xmax = ",xmin,xmax)
+ print("ymin/ymax = ",ymin,ymax)
+ print("deltax = ",deltax,"average = ",(xmax-xmin)/(nx-1))
+ print("deltay = ",deltay,"average = ",(ymax-ymin)/(ny-1))
+ print("--------------------------------------------")
+ print("")
+ return nx,ny,deltax,deltay
+
+
+
+#
+#-----------------------------------------------------------------------------
+#
+
+def check_status(status):
+ if status != 0:
+ print("error: status returned ",status)
+ sys.exit(status)
+ return
+
+#
+#-----------------------------------------------------------------------------
+#
+
+def update_Mesh_Par_file(dir,lon_min,lat_min,lon_max,lat_max,nx,ny,dx,dy,xyz_file):
+ global datadir
+ global utm_zone
+
+ # change working directory back to DATA/
+ path = dir + '/' + 'DATA/meshfem3D_files/'
+ os.chdir(path)
+
+ print("updating Mesh_Par_file ...")
+ print(" working directory: ",os.getcwd())
+ print(" min lon/lat = ",lon_min,lat_min," max lon/lat = ",lon_max,lat_max)
+ print("")
+
+ cmd = 'echo "";'
+ cmd += 'echo "setting lat min/max = ' + str(lat_min) + ' ' + str(lat_max) + '";'
+ cmd += 'echo " lon min/max = ' + str(lon_min) + ' ' + str(lon_max) + '";'
+ cmd += 'echo " utm zone = ' + str(utm_zone) + '";'
+ cmd += 'echo "";'
+ cmd += 'sed -i "s:^LATITUDE_MIN .*:LATITUDE_MIN = ' + str(lat_min) + ':" Mesh_Par_file' + ';'
+ cmd += 'sed -i "s:^LATITUDE_MAX .*:LATITUDE_MAX = ' + str(lat_max) + ':" Mesh_Par_file' + ';'
+ cmd += 'sed -i "s:^LONGITUDE_MIN .*:LONGITUDE_MIN = ' + str(lon_min) + ':" Mesh_Par_file' + ';'
+ cmd += 'sed -i "s:^LONGITUDE_MAX .*:LONGITUDE_MAX = ' + str(lon_max) + ':" Mesh_Par_file' + ';'
+ cmd += 'sed -i "s:^UTM_PROJECTION_ZONE .*:UTM_PROJECTION_ZONE = ' + str(utm_zone) + ':" Mesh_Par_file' + ';'
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # interfaces.dat file
+ nxi = nx
+ neta = ny
+
+ dxi = dx
+ deta = dy
+
+ if dx < 0.0:
+ # negative increment, starts from maximum location
+ lon = lon_max
+ else:
+ # positive increment, starts from minimum location
+ lon = lon_min
+
+ if dy < 0.0:
+ # negative increment, starts from maximum location
+ lat = lat_max
+ else:
+ # positive increment, starts from minimum location
+ lat = lat_min
+
+ # format:
+ # #SUPPRESS_UTM_PROJECTION #NXI #NETA #LONG_MIN #LAT_MIN #SPACING_XI #SPACING_ETA
+ # .false. 901 901 123.d0 40.0d0 0.01 -0.01
+ cmd = 'echo "";'
+ cmd += 'echo "interfaces nxi = ' + str(nxi) + ' neta = ' + str(neta) + '";'
+ cmd += 'echo " long = ' + str(lon) + ' lat = ' + str(lat) + '";'
+ cmd += 'echo " spacing_xi = ' + str(dxi) + ' spacing_eta = ' + str(deta) + '";'
+ cmd += 'echo "";'
+ line = '.false. ' + str(nxi) + ' ' + str(neta) + ' ' + str(lon) + ' ' + str(lat) + ' ' + str(dxi) + ' ' + str(deta)
+ cmd += 'sed -i "s:^.false. .*:' + line + ':g" interfaces.dat' + ';'
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # link to topography files
+ topodir = '../../' + datadir
+ xyz_file1 = xyz_file + '.1.dat'
+ xyz_file2 = xyz_file + '.2.dat'
+ cmd = 'rm -f ' + xyz_file1 + ' ' + xyz_file2 + ';'
+ cmd += 'ln -s ' + topodir + '/' + xyz_file1 + ';'
+ cmd += 'ln -s ' + topodir + '/' + xyz_file2 + ';'
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ print("file updated: %s/Mesh_Par_file" % path)
+ print("")
+
+ return
+
+#
+#-----------------------------------------------------------------------------
+#
+
+
+def update_Par_file(dir):
+ global datadir
+ global utm_zone
+
+ # change working directory back to DATA/
+ path = dir + '/' + 'DATA/'
+ os.chdir(path)
+
+ print("updating Par_file ...")
+ print(" working directory: ",os.getcwd())
+ print(" utm_zone = ",utm_zone)
+ print("")
+
+ cmd = 'sed -i "s:^UTM_PROJECTION_ZONE .*:UTM_PROJECTION_ZONE = ' + str(utm_zone) + ':" Par_file' + ';'
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ print("file updated: %s/Par_file" % path)
+ print("")
+
+ return
+
+
+#
+#----------------------------------------------------------------------------------------
+#
+
+
+def geo2utm(lon,lat,zone):
+ """
+ from utm_geo.f90
+
+ convert geodetic longitude and latitude to UTM, and back
+ use iway = ILONGLAT2UTM for long/lat to UTM, IUTM2LONGLAT for UTM to lat/long
+ a list of UTM zones of the world is available at www.dmap.co.uk/utmworld.htm
+
+ implicit none
+
+ include "constants.h"
+
+ -----CAMx v2.03
+
+ UTM_GEO performs UTM to geodetic (long/lat) translation, and back.
+
+ This is a Fortran version of the BASIC program "Transverse Mercator
+ Conversion", Copyright 1986, Norman J. Berls (Stefan Musarra, 2/94)
+ Based on algorithm taken from "Map Projections Used by the USGS"
+ by John P. Snyder, Geological Survey Bulletin 1532, USDI.
+
+ Input/Output arguments:
+
+ rlon Longitude (deg, negative for West)
+ rlat Latitude (deg)
+ rx UTM easting (m)
+ ry UTM northing (m)
+ UTM_PROJECTION_ZONE UTM zone
+ iway Conversion type
+ ILONGLAT2UTM = geodetic to UTM
+ IUTM2LONGLAT = UTM to geodetic
+ """
+ PI = math.pi
+ degrad = PI/180.0
+ raddeg = 180.0/PI
+ semimaj = 6378206.4
+ semimin = 6356583.8
+ scfa = 0.9996
+
+ # some extracts about UTM:
+ #
+ # There are 60 longitudinal projection zones numbered 1 to 60 starting at 180 W.
+ # Each of these zones is 6 degrees wide, apart from a few exceptions around Norway and Svalbard.
+ # There are 20 latitudinal zones spanning the latitudes 80 S to 84 N and denoted
+ # by the letters C to X, ommitting the letter O.
+ # Each of these is 8 degrees south-north, apart from zone X which is 12 degrees south-north.
+ #
+ # To change the UTM zone and the hemisphere in which the
+ # calculations are carried out, need to change the fortran code and recompile. The UTM zone is described
+ # actually by the central meridian of that zone, i.e. the longitude at the midpoint of the zone, 3 degrees
+ # from either zone boundary.
+ # To change hemisphere need to change the "north" variable:
+ # - north=0 for northern hemisphere and
+ # - north=10000000 (10000km) for southern hemisphere. values must be in metres i.e. north=10000000.
+ #
+ # Note that the UTM grids are actually Mercators which
+ # employ the standard UTM scale factor 0.9996 and set the
+ # Easting Origin to 500,000;
+ # the Northing origin in the southern
+ # hemisphere is kept at 0 rather than set to 10,000,000
+ # and this gives a uniform scale across the equator if the
+ # normal convention of selecting the Base Latitude (origin)
+ # at the equator (0 deg.) is followed. Northings are
+ # positive in the northern hemisphere and negative in the
+ # southern hemisphere.
+ north = 0.0
+ east = 500000.0
+
+ IUTM2LONGLAT = 1
+ ILONGLAT2UTM = 2
+
+ #---------------------------------------------------------------
+ # use conversion to UTM
+ iway = ILONGLAT2UTM
+
+ # zone
+ UTM_PROJECTION_ZONE = zone
+
+ # lon/lat
+ rlon = lon
+ rlat = lat
+
+ rx = 0.0
+ ry = 0.0
+
+ #---------------------------------------------------------------
+
+
+ # save original parameters
+ rlon_save = rlon
+ rlat_save = rlat
+ rx_save = rx
+ ry_save = ry
+
+ # define parameters of reference ellipsoid
+ e2 = 1.0 - (semimin/semimaj)**2
+ e4 = e2 * e2
+ e6 = e2 * e4
+ ep2 = e2/(1.0 - e2)
+
+ if iway == IUTM2LONGLAT:
+ xx = rx
+ yy = ry
+ else:
+ dlon = rlon
+ dlat = rlat
+
+ #----- Set Zone parameters
+ zone = UTM_PROJECTION_ZONE
+ cm = zone * 6.0 - 183.0 # set central meridian for this zone
+ cmr = cm*degrad
+
+ #---- Lat/Lon to UTM conversion
+ if iway == ILONGLAT2UTM:
+
+ rlon = degrad*dlon
+ rlat = degrad*dlat
+
+ delam = dlon - cm
+ if delam < -180.0: delam = delam + 360.0
+ if delam > 180.0: delam = delam - 360.0
+
+ delam = delam*degrad
+
+ f1 = (1. - e2/4.0 - 3.0*e4/64.0 - 5.0*e6/256.0)*rlat
+ f2 = 3.0*e2/8.0 + 3.0*e4/32.0 + 45.0*e6/1024.0
+ f2 = f2 * math.sin(2.0*rlat)
+ f3 = 15.0*e4/256.0 + 45.0*e6/1024.0
+ f3 = f3 * math.sin(4.0*rlat)
+ f4 = 35.0*e6/3072.0
+ f4 = f4 * math.sin(6.0*rlat)
+ rm = semimaj*(f1 - f2 + f3 - f4)
+
+ if dlat == 90.0 or dlat == -90.0:
+ xx = 0.0
+ yy = scfa*rm
+ else:
+ rn = semimaj/math.sqrt(1.0 - e2*math.sin(rlat)**2)
+ t = math.tan(rlat)**2
+ c = ep2 * math.cos(rlat)**2
+ a = math.cos(rlat) * delam
+
+ f1 = (1.0 - t + c) * a**3/6.0
+ f2 = 5.0 - 18.0*t + t**2 + 72.0*c - 58.0*ep2
+ f2 = f2 * a**5/120.0
+ xx = scfa*rn*(a + f1 + f2)
+ f1 = a**2/2.0
+ f2 = 5.0 - t + 9.0*c + 4.0*c**2
+ f2 = f2*a**4/24.0
+ f3 = 61.0 - 58.0*t + t**2 + 600.0*c - 330.0*ep2
+ f3 = f3 * a**6/720.0
+ yy = scfa*(rm + rn*math.tan(rlat)*(f1 + f2 + f3))
+
+ xx = xx + east
+ yy = yy + north
+
+ else:
+ #---- UTM to Lat/Lon conversion
+ xx = xx - east
+ yy = yy - north
+ e1 = math.sqrt(1.0 - e2)
+ e1 = (1.0 - e1)/(1.0 + e1)
+ rm = yy/scfa
+ u = 1.0 - e2/4.0 - 3.0*e4/64.0 - 5.0*e6/256.0
+ u = rm/(semimaj*u)
+
+ f1 = 3.0*e1/2.0 - 27.0*e1**3.0/32.0
+ f1 = f1*math.sin(2.0*u)
+ f2 = 21.0*e1**2/16.0 - 55.0*e1**4/32.0
+ f2 = f2*math.sin(4.0*u)
+ f3 = 151.0*e1**3/96.0
+ f3 = f3*math.sin(6.0*u)
+ rlat1 = u + f1 + f2 + f3
+ dlat1 = rlat1*raddeg
+
+ if dlat1 >= 90.0 or dlat1 <= -90.0:
+ dlat1 = min(dlat1,90.0)
+ dlat1 = max(dlat1,-90.0)
+ dlon = cm
+ else:
+ c1 = ep2*math.cos(rlat1)**2
+ t1 = math.tan(rlat1)**2
+ f1 = 1.0 - e2*math.sin(rlat1)**2
+ rn1 = semimaj/math.sqrt(f1)
+ r1 = semimaj*(1.0 - e2)/math.sqrt(f1**3)
+ d = xx/(rn1*scfa)
+
+ f1 = rn1*math.tan(rlat1)/r1
+ f2 = d**2/2.0
+ f3 = 5.0 + 3.0*t1 + 10.0*c1 - 4.0*c1**2 - 9.0*ep2
+ f3 = f3*d**2*d**2/24.0
+ f4 = 61.0 + 90.0*t1 + 298.0*c1 + 45.0*t1**2 - 252.0*ep2 - 3.0*c1**2
+ f4 = f4*(d**2)**3/720.0
+ rlat = rlat1 - f1*(f2 - f3 + f4)
+ dlat = rlat*raddeg
+
+ f1 = 1.0 + 2.0*t1 + c1
+ f1 = f1*d**2*d/6.0
+ f2 = 5.0 - 2.0*c1 + 28.0*t1 - 3.0*c1**2 + 8.0*ep2 + 24.0*t1**2
+ f2 = f2*(d**2)**2*d/120.0
+ rlon = cmr + (d - f1 + f2)/math.cos(rlat1)
+ dlon = rlon*raddeg
+ if dlon < -180.0: dlon = dlon + 360.0
+ if dlon > 180.0: dlon = dlon - 360.0
+
+ if iway == IUTM2LONGLAT:
+ rlon = dlon
+ rlat = dlat
+ rx = rx_save
+ ry = ry_save
+ return rlon,rlat
+
+ else:
+ rx = xx
+ ry = yy
+ rlon = rlon_save
+ rlat = rlat_save
+ return rx,ry
+
+
+#
+#----------------------------------------------------------------------------------------
+#
+
+def convert_lonlat2utm(file_in,zone,file_out):
+ """
+ converts file with lon/lat/elevation to output file utm_x/utm_y/elevation
+ """
+ print("converting lon/lat to UTM: " + file_in + " ...")
+ print(" zone: %i " % zone)
+
+ # checks argument
+ if zone < 1 or zone > 60: sys.exit("error zone: zone not UTM zone")
+
+ # grab all the locations in file
+ with open(file_in,'r') as f:
+ content = f.readlines()
+
+ nlines = len(content)
+ print(" number of lines: %i" % nlines)
+ print("")
+ print(" output file: " + file_out)
+ print(" format: #UTM_x #UTM_y #elevation")
+
+ # write out converted file
+ with open(file_out,'w') as f:
+ for line in content:
+ # reads lon/lat coordinates
+ items = line.split()
+ lon = float(items[0])
+ lat = float(items[1])
+ ele = float(items[2])
+
+ # converts to UTM x/y
+ x,y = geo2utm(lon,lat,zone)
+ #print("%18.8f\t%18.8f\t%18.8f" % (x,y,ele))
+ f.write("%18.8f\t%18.8f\t%18.8f\n" % (x,y,ele))
+
+#
+#-----------------------------------------------------------------------------
+#
+
+
+def setup_simulation(lon_min,lat_min,lon_max,lat_max):
+ """
+ sets up directory for a SPECFEM3D simulation
+ """
+ global datadir
+ global utm_zone
+ global incr_dx,toposhift,toposcale
+ # current directory
+ dir = os.getcwd()
+
+ # creates data directory
+ cmd = 'mkdir -p ' + datadir
+ print("> ",cmd)
+ status = subprocess.call(cmd, shell=True)
+ check_status(status)
+ print("")
+
+ # change working directory to ./topo_data/
+ path = dir + '/' + datadir
+ os.chdir(path)
+ print("working directory: ",os.getcwd())
+ print(" topo : ",SRTM_type)
+ print(" grid sampling interval: ",incr_dx,"(deg) ",incr_dx * math.pi/180.0 * 6371.0, "(km)")
+ print(" topo down shift : ",toposhift)
+ print(" topo down scaling : ",toposcale)
+ print("")
+
+ # check min/max is given in correct order
+ if lat_min > lat_max:
+ tmp = lat_min
+ lat_min = lat_max
+ lat_max = tmp
+
+ if lon_min > lon_max:
+ tmp = lon_min
+ lon_min = lon_max
+ lon_max = tmp
+
+ # get topography data
+ xyz_file = get_topo(lon_min,lat_min,lon_max,lat_max)
+
+ ## UTM zone
+ midpoint_lat = (lat_min + lat_max)/2.0
+ midpoint_lon = (lon_min + lon_max)/2.0
+
+ utm_zone = utm.latlon_to_zone_number(midpoint_lat,midpoint_lon)
+ print("region midpoint lat/lon: %f / %f " %(midpoint_lat,midpoint_lon))
+ print("UTM zone: %d" % utm_zone)
+ print("")
+
+ # converting to utm
+ utm_file = 'ptopo.utm'
+ convert_lonlat2utm(xyz_file,utm_zone,utm_file)
+ #script version
+ #cmd = '../topo/convert_lonlat2utm.py ' + xyz_file + ' ' + str(utm_zone) + ' > ' + utm_file
+ #print("converting lon/lat to UTM: ptopo.utm ...")
+ #print("> ",cmd)
+ #status = subprocess.call(cmd, shell=True)
+ #check_status(status)
+ print("")
+
+ # AVS UCD file with region borders
+ create_AVS_file()
+
+ # extracts interface data for xmeshfem3D
+ # uses file with degree increments to determine dx,dy in degrees, as needed for interfaces.dat
+ nx,ny,dx,dy = topo_extract(xyz_file)
+
+ # creates parameter files
+ update_Par_file(dir)
+ update_Mesh_Par_file(dir,lon_min,lat_min,lon_max,lat_max,nx,ny,dx,dy,xyz_file)
+
+ print("")
+ print("topo output in directory: ",datadir)
+ print("all done")
+ print("")
+ return
+
+#
+#-----------------------------------------------------------------------------
+#
+
+def usage():
+ global incr_dx,toposhift,toposcale
+
+ # default increment in km
+ incr_dx_km = incr_dx * math.pi/180.0 * 6371.0
+
+ print("usage: ./run_get_simulation_topography.py lon_min lat_min lon_max lat_max [SRTM] [incr_dx] [toposhift] [toposcale]")
+ print(" where")
+ print(" lon_min lat_min lon_max lat_max - region given by points: left bottom right top")
+ print(" for example: 12.35 42.0 12.65 41.8 (Rome)")
+ print(" SRTM - (optional) name options are:")
+ print(" 'low' == SRTM 90m ")
+ print(" 'high' == SRTM 30m")
+ print(" 'etopo2' == ETOPO2 (2-arc minutes)")
+ print(" 'topo30' / 'srtm30s' / 'srtm_1km' == SRTM topo 30s (30-arc seconds)")
+ print(" 'topo15' / 'srtm15s' / 'srtm_500m' == SRTM topo 15s (15-arc seconds)")
+ print(" 'topo3' / 'srtm3s' / 'srtm_100m' == SRTM topo 3s (3-arc seconds)")
+ print(" 'topo1' / 'srtm1s' / 'srtm_30m' == SRTM topo 1s (1-arc seconds)")
+ print(" incr_dx - (optional) GMT grid sampling (in degrees)")
+ print(" e.g., 0.01 == (~1.1km) [default %f degrees ~ %f km]" % (incr_dx,incr_dx_km))
+ print(" toposhift - (optional) topography shift for 2nd interface (in m)")
+ print(" to shift original topography downwards [default %f m]" % toposhift)
+ print(" toposcale - (optional) scalefactor to topography for shifted 2nd interface (e.g., 0.1) [default %f]" %toposcale)
+ return
+
+if __name__ == '__main__':
+ # gets arguments
+ if len(sys.argv) < 5:
+ usage()
+ sys.exit(1)
+ else:
+ lon_min = float(sys.argv[1])
+ lat_min = float(sys.argv[2])
+ lon_max = float(sys.argv[3])
+ lat_max = float(sys.argv[4])
+
+ # type: 'low' == SRTM 90m / 'high' == SRTM 30m / else e.g. 'etopo' == topo30 (30-arc seconds)
+ if len(sys.argv) >= 6:
+ SRTM_type = sys.argv[5]
+
+ # GMT grid sampling interval
+ if len(sys.argv) >= 7:
+ incr_dx = float(sys.argv[6])
+
+ # topography shift for 2nd interface
+ if len(sys.argv) == 8:
+ toposhift = float(sys.argv[7])
+
+ # topography scalefactor
+ if len(sys.argv) == 9:
+ toposcale = float(sys.argv[8])
+
+ # logging
+ cmd = " ".join(sys.argv)
+ filename = './run_get_simulation_topography.log'
+ with open(filename, 'a') as f:
+ print("command call --- " + str(datetime.datetime.now()),file=f)
+ print(cmd,file=f)
+ print("command logged to file: " + filename)
+
+ # initializes
+ setup_simulation(lon_min,lat_min,lon_max,lat_max)
+