diff --git a/src/generate_databases/setup_mesh_adjacency.f90 b/src/generate_databases/setup_mesh_adjacency.f90 index 6d53bf806..571b308ef 100644 --- a/src/generate_databases/setup_mesh_adjacency.f90 +++ b/src/generate_databases/setup_mesh_adjacency.f90 @@ -220,8 +220,7 @@ subroutine setup_mesh_adjacency() ! user output if (myrank == 0) then - write(IMAIN,*) ' using kd-tree search radius = ',sngl(r_search) - write(IMAIN,*) + write(IMAIN,*) ' using kd-tree search radius = ',sngl(r_search) call flush_IMAIN() endif @@ -530,6 +529,17 @@ subroutine setup_mesh_adjacency() deallocate(kdtree_search_index) endif + ! user output progress + if (myrank == 0) then + if (mod(ispec_ref,max(NSPEC_AB/10,1)) == 0) then + tCPU = wtime() - time1 + ! elapsed + write(IMAIN,*) " ",int(ispec_ref/(max(NSPEC_AB/10,1)) * 10)," %", & + " - elapsed time:",sngl(tCPU),"s" + ! flushes file buffer for main output file (IMAIN) + call flush_IMAIN() + endif + endif enddo ! ispec_ref ! frees temporary array @@ -646,11 +656,12 @@ subroutine setup_mesh_adjacency() if (myrank == 0) then ! elapsed time since beginning of neighbor detection tCPU = wtime() - time1 + write(IMAIN,*) write(IMAIN,*) ' maximum search elements = ',num_elements_max write(IMAIN,*) ' maximum of actual search elements (after distance criterion) = ',num_elements_actual_max write(IMAIN,*) write(IMAIN,*) ' estimated maximum element size = ',elemsize_max_glob - write(IMAIN,*) ' maximum distance between neighbor centers = ',sqrt(dist_squared_max) + write(IMAIN,*) ' maximum distance between neighbor centers = ',real(sqrt(dist_squared_max),kind=CUSTOM_REAL) if (use_kdtree_search) then if (sqrt(dist_squared_max) > r_search - 0.5*elemsize_max_glob) then write(IMAIN,*) '***' @@ -665,7 +676,7 @@ subroutine setup_mesh_adjacency() endif write(IMAIN,*) ' total number of neighbors = ',num_neighbors_all write(IMAIN,*) - write(IMAIN,*) ' Elapsed time for detection of neighbors in seconds = ',tCPU + write(IMAIN,*) ' Elapsed time for detection of neighbors in seconds = ',sngl(tCPU) write(IMAIN,*) call flush_IMAIN() endif diff --git a/src/meshfem3D/check_mesh_quality.f90 b/src/meshfem3D/check_mesh_quality.f90 index d9cd9c64a..88786b60a 100644 --- a/src/meshfem3D/check_mesh_quality.f90 +++ b/src/meshfem3D/check_mesh_quality.f90 @@ -348,7 +348,7 @@ subroutine check_mesh_quality(VP_MAX,NGLOB,NSPEC,x,y,z,ibool, & endif ! vtk file output - call write_VTK_data_elem_cr_meshfem(nspec,nglob,x,y,z,ibool,tmp1,filename) + call write_VTK_data_elem_cr_meshfem(nspec,nglob,NGLLX_M,x,y,z,ibool,tmp1,filename) deallocate(tmp1) endif diff --git a/src/shared/write_VTK_data.f90 b/src/shared/write_VTK_data.f90 index 8c78a6b4b..135b7d6c3 100644 --- a/src/shared/write_VTK_data.f90 +++ b/src/shared/write_VTK_data.f90 @@ -963,19 +963,19 @@ end subroutine write_VTK_data_ngnod_elem_i ! !------------------------------------------------------------------------------------ - subroutine write_VTK_data_elem_cr_meshfem(nspec,nglob,xstore_db,ystore_db,zstore_db,ibool, & + subroutine write_VTK_data_elem_cr_meshfem(nspec,nglob,NGLL,xstore_db,ystore_db,zstore_db,ibool, & elem_data,filename) ! special routine for meshfem3D with simpler mesh arrays - use constants, only: CUSTOM_REAL,MAX_STRING_LEN,NGNOD_EIGHT_CORNERS,IOUT_VTK + use constants, only: CUSTOM_REAL,MAX_STRING_LEN,IOUT_VTK implicit none - integer :: nspec,nglob + integer, intent(in) :: nspec,nglob,NGLL ! global coordinates - integer, dimension(NGNOD_EIGHT_CORNERS,NSPEC) :: ibool + integer,dimension(NGLL,NGLL,NGLL,nspec),intent(in) :: ibool double precision, dimension(nglob) :: xstore_db,ystore_db,zstore_db ! element flag array @@ -1004,8 +1004,10 @@ subroutine write_VTK_data_elem_cr_meshfem(nspec,nglob,xstore_db,ystore_db,zstore write(IOUT_VTK,'(a,i12,i12)') "CELLS ",nspec,nspec*9 do ispec = 1,nspec write(IOUT_VTK,'(9i12)') 8, & - ibool(1,ispec)-1,ibool(2,ispec)-1,ibool(4,ispec)-1,ibool(3,ispec)-1, & - ibool(5,ispec)-1,ibool(6,ispec)-1,ibool(8,ispec)-1,ibool(7,ispec)-1 + ibool(1,1,1,ispec)-1,ibool(NGLL,1,1,ispec)-1, & + ibool(NGLL,NGLL,1,ispec)-1,ibool(1,NGLL,1,ispec)-1, & + ibool(1,1,NGLL,ispec)-1,ibool(NGLL,1,NGLL,ispec)-1, & + ibool(NGLL,NGLL,NGLL,ispec)-1,ibool(1,NGLL,NGLL,ispec)-1 enddo write(IOUT_VTK,*) '' diff --git a/src/specfem3D/locate_receivers.f90 b/src/specfem3D/locate_receivers.f90 index cff3a40aa..d8b4cb7b1 100644 --- a/src/specfem3D/locate_receivers.f90 +++ b/src/specfem3D/locate_receivers.f90 @@ -32,7 +32,7 @@ subroutine locate_receivers(rec_filename,utm_x_source,utm_y_source) use constants - use specfem_par, only: USE_SOURCES_RECEIVERS_Z,SUPPRESS_UTM_PROJECTION,INVERSE_FWI_FULL_PROBLEM,SU_FORMAT, & + use specfem_par, only: USE_SOURCES_RECEIVERS_Z,SUPPRESS_UTM_PROJECTION,INVERSE_FWI_FULL_PROBLEM, & ibool,myrank,NSPEC_AB,NGLOB_AB, & xstore,ystore,zstore, & nrec,islice_selected_rec,ispec_selected_rec, & @@ -113,6 +113,7 @@ subroutine locate_receivers(rec_filename,utm_x_source,utm_y_source) if (USE_SOURCES_RECEIVERS_Z) then write(IMAIN,*) 'using sources/receivers Z:' write(IMAIN,*) ' (depth) becomes directly (z) coordinate' + write(IMAIN,*) endif call flush_IMAIN() @@ -132,8 +133,10 @@ subroutine locate_receivers(rec_filename,utm_x_source,utm_y_source) call read_stations(rec_filename) ! checks if station locations already available - if (SU_FORMAT .and. (.not. INVERSE_FWI_FULL_PROBLEM) ) then - call read_stations_SU_from_previous_run(is_done_stations) + if (.not. INVERSE_FWI_FULL_PROBLEM) then + ! reads stations_info.bin if available in OUTPUT_FILES/ folder + call read_stations_from_previous_run(is_done_stations) + ! check if done if (is_done_stations) then ! user output @@ -141,7 +144,7 @@ subroutine locate_receivers(rec_filename,utm_x_source,utm_y_source) ! elapsed time since beginning of mesh generation tCPU = wtime() - tstart write(IMAIN,*) - write(IMAIN,*) 'Elapsed time for receiver detection in seconds = ',tCPU + write(IMAIN,*) 'Elapsed time for receiver detection in seconds = ',sngl(tCPU) write(IMAIN,*) write(IMAIN,*) 'End of receiver detection - done' write(IMAIN,*) @@ -317,7 +320,7 @@ subroutine locate_receivers(rec_filename,utm_x_source,utm_y_source) endif ! limits user output if too many receivers - if (nrec < 1000 .and. (.not. SU_FORMAT )) then + if (nrec < 1000) then ! receiver info write(IMAIN,*) @@ -432,11 +435,11 @@ subroutine locate_receivers(rec_filename,utm_x_source,utm_y_source) close(IOUT_SU) ! stores station infos for later runs - if (SU_FORMAT) call write_stations_SU_for_next_run(x_found,y_found,z_found) + call write_stations_for_next_run(x_found,y_found,z_found) ! elapsed time since beginning of mesh generation tCPU = wtime() - tstart - write(IMAIN,*) 'Elapsed time for receiver detection in seconds = ',tCPU + write(IMAIN,*) 'Elapsed time for receiver detection in seconds = ',sngl(tCPU) write(IMAIN,*) write(IMAIN,*) 'End of receiver detection - done' write(IMAIN,*) diff --git a/src/specfem3D/locate_source.F90 b/src/specfem3D/locate_source.F90 index af0c0b4a3..de2fdfb6c 100644 --- a/src/specfem3D/locate_source.F90 +++ b/src/specfem3D/locate_source.F90 @@ -703,7 +703,7 @@ subroutine locate_source() if (myrank == 0) then tCPU = wtime() - tstart write(IMAIN,*) - write(IMAIN,*) 'Elapsed time for detection of sources in seconds = ',tCPU + write(IMAIN,*) 'Elapsed time for detection of sources in seconds = ',sngl(tCPU) write(IMAIN,*) write(IMAIN,*) 'End of source detection - done' write(IMAIN,*) diff --git a/src/specfem3D/read_stations.f90 b/src/specfem3D/read_stations.f90 index b12c9d578..a612372a9 100644 --- a/src/specfem3D/read_stations.f90 +++ b/src/specfem3D/read_stations.f90 @@ -291,7 +291,7 @@ end subroutine read_stations_find_duplets !------------------------------------------------------------------------------------------- ! - subroutine read_stations_SU_from_previous_run(is_done_stations) + subroutine read_stations_from_previous_run(is_done_stations) use constants, only: NDIM,IOUT_SU,IMAIN,OUTPUT_FILES,MAX_STRING_LEN @@ -299,7 +299,7 @@ subroutine read_stations_SU_from_previous_run(is_done_stations) nrec,station_name,network_name, & islice_selected_rec,ispec_selected_rec, & xi_receiver,eta_receiver,gamma_receiver, & - nu_rec + nu_rec, stations_hashsum implicit none @@ -308,80 +308,120 @@ subroutine read_stations_SU_from_previous_run(is_done_stations) ! local parameters integer :: ier,irec - ! SU_FORMAT parameters - logical :: SU_station_file_exists + ! file parameters + logical :: station_file_exists double precision, allocatable, dimension(:) :: x_found,y_found,z_found character(len=MAX_STRING_LEN) :: filename + character(len=32) :: hashsum_read_from_file ! initializes is_done_stations = .false. - filename = trim(OUTPUT_FILES)//'/SU_stations_info.bin' + ! only check for existing file if number of stations is big enough + if (nrec < 1000) return + + filename = trim(OUTPUT_FILES)//'/stations_info.bin' ! checks if file with station infos located from previous run exists - inquire(file=trim(filename),exist=SU_station_file_exists) + inquire(file=trim(filename),exist=station_file_exists) - if (SU_station_file_exists) then + ! checks if something to do + if (.not. station_file_exists) return - ! main reads in available station information - if (myrank == 0) then - open(unit=IOUT_SU,file=trim(filename),status='old',action='read',form='unformatted',iostat=ier) - if (ier /= 0) call exit_mpi(myrank,'error opening file '//trim(filename)) + ! main reads in available station information + if (myrank == 0) then + ! user output + write(IMAIN,*) 'reading station details:' + write(IMAIN,*) ' from file: stations_info.bin' + call flush_IMAIN() - write(IMAIN,*) 'station details from SU_stations_info.bin' - call flush_IMAIN() + open(unit=IOUT_SU,file=trim(filename),status='old',action='read',form='unformatted',iostat=ier) + if (ier /= 0) call exit_mpi(myrank,'error opening file '//trim(filename)) - allocate(x_found(nrec),y_found(nrec),z_found(nrec),stat=ier) - if (ier /= 0) call exit_MPI_without_rank('error allocating array 1971') + ! temporary arrays + allocate(x_found(nrec),y_found(nrec),z_found(nrec),stat=ier) + if (ier /= 0) call exit_MPI_without_rank('error allocating array 1971') - ! reads in station infos - read(IOUT_SU) islice_selected_rec,ispec_selected_rec - read(IOUT_SU) xi_receiver,eta_receiver,gamma_receiver - read(IOUT_SU) x_found,y_found,z_found - read(IOUT_SU) nu_rec - close(IOUT_SU) + ! reads hash key + read(IOUT_SU) hashsum_read_from_file - ! write the locations of stations, so that we can load them and write them to SU headers later - open(unit=IOUT_SU,file=trim(OUTPUT_FILES)//'/output_list_stations.txt', & - status='unknown',action='write',iostat=ier) - if (ier /= 0) & - call exit_mpi(myrank,'error opening file '//trim(OUTPUT_FILES)//'/output_list_stations.txt') + ! user output + write(IMAIN,*) ' hash key : ',hashsum_read_from_file + write(IMAIN,*) + call flush_IMAIN() - do irec = 1,nrec - write(IOUT_SU,*) station_name(irec),network_name(irec),x_found(irec),y_found(irec),z_found(irec) - enddo + ! get a hash checksum from STATIONS infos + if (len_trim(stations_hashsum) == 0) then + call create_stations_checksum(stations_hashsum) + endif + + !debug + !print *,'debug: hash from STATIONS = ',stations_hashsum + !print *,'debug: hash from file = ',hashsum_read_from_file,' match: ',(stations_hashsum==hashsum_read_from_file) + + ! checks hash keys + if (stations_hashsum /= hashsum_read_from_file) then + write(IMAIN,*) ' hash checksum does not match:' + write(IMAIN,*) ' from STATIONS file: ',stations_hashsum + write(IMAIN,*) ' from stations_info.bin file: ',hashsum_read_from_file + write(IMAIN,*) ' will fall back to locate stations again...' + ! closes file close(IOUT_SU) - deallocate(x_found,y_found,z_found) + + ! return w/out infos + is_done_stations = .false. + return endif - ! main process broadcasts the results to all the slices - call bcast_all_i(islice_selected_rec,nrec) - call bcast_all_i(ispec_selected_rec,nrec) - call bcast_all_dp(xi_receiver,nrec) - call bcast_all_dp(eta_receiver,nrec) - call bcast_all_dp(gamma_receiver,nrec) - call bcast_all_dp(nu_rec,NDIM*NDIM*nrec) - call synchronize_all() - - ! everything done - is_done_stations = .true. + ! reads in station infos + read(IOUT_SU) islice_selected_rec,ispec_selected_rec + read(IOUT_SU) xi_receiver,eta_receiver,gamma_receiver + read(IOUT_SU) x_found,y_found,z_found + read(IOUT_SU) nu_rec + close(IOUT_SU) + + ! write the locations of stations, so that we can load them and write them to SU headers later + open(unit=IOUT_SU,file=trim(OUTPUT_FILES)//'/output_list_stations.txt', & + status='unknown',action='write',iostat=ier) + if (ier /= 0) & + call exit_mpi(myrank,'error opening file '//trim(OUTPUT_FILES)//'/output_list_stations.txt') + + do irec = 1,nrec + write(IOUT_SU,*) station_name(irec),network_name(irec),x_found(irec),y_found(irec),z_found(irec) + enddo + + close(IOUT_SU) + deallocate(x_found,y_found,z_found) endif - end subroutine read_stations_SU_from_previous_run + ! main process broadcasts the results to all the slices + call bcast_all_i(islice_selected_rec,nrec) + call bcast_all_i(ispec_selected_rec,nrec) + call bcast_all_dp(xi_receiver,nrec) + call bcast_all_dp(eta_receiver,nrec) + call bcast_all_dp(gamma_receiver,nrec) + call bcast_all_dp(nu_rec,NDIM*NDIM*nrec) + call synchronize_all() + + ! everything done + is_done_stations = .true. + + end subroutine read_stations_from_previous_run ! !------------------------------------------------------------------------------------------- ! - subroutine write_stations_SU_for_next_run(x_found,y_found,z_found) + subroutine write_stations_for_next_run(x_found,y_found,z_found) - use constants, only: IOUT_SU,OUTPUT_FILES,MAX_STRING_LEN + use constants, only: myrank,IMAIN,IOUT_SU,OUTPUT_FILES,MAX_STRING_LEN use specfem_par, only: nrec,islice_selected_rec,ispec_selected_rec, & - xi_receiver,eta_receiver,gamma_receiver,nu_rec + xi_receiver,eta_receiver,gamma_receiver,nu_rec, & + stations_hashsum implicit none double precision, dimension(nrec),intent(in) :: x_found,y_found,z_found @@ -390,10 +430,36 @@ subroutine write_stations_SU_for_next_run(x_found,y_found,z_found) integer :: ier character(len=MAX_STRING_LEN) :: filename - filename = trim(OUTPUT_FILES)//'/SU_stations_info.bin' + ! only output file if number of stations is big enough + if (nrec < 1000) return + + ! only main process writes out all available station information + if (myrank /= 0) return + + ! user output + write(IMAIN,*) 'storing station details:' + write(IMAIN,*) ' to file : stations_info.bin' + call flush_IMAIN() + + ! get checksum from STATIONS file infos + ! could have been computed already in read_stations_from_previous run + if (len_trim(stations_hashsum) == 0) then + call create_stations_checksum(stations_hashsum) + endif + + ! user output + write(IMAIN,*) ' hash key : ',stations_hashsum + write(IMAIN,*) + call flush_IMAIN() + + ! creates file with all station location infos + filename = trim(OUTPUT_FILES)//'/stations_info.bin' open(unit=IOUT_SU,file=trim(filename),status='unknown',action='write',form='unformatted',iostat=ier) if (ier == 0) then + ! hash checksum for checking + write(IOUT_SU) stations_hashsum + ! stations informations write(IOUT_SU) islice_selected_rec,ispec_selected_rec write(IOUT_SU) xi_receiver,eta_receiver,gamma_receiver write(IOUT_SU) x_found,y_found,z_found @@ -401,4 +467,319 @@ subroutine write_stations_SU_for_next_run(x_found,y_found,z_found) close(IOUT_SU) endif - end subroutine write_stations_SU_for_next_run + end subroutine write_stations_for_next_run + +! +!------------------------------------------------------------------------------------------- +! + + subroutine create_stations_checksum(hashsum) + +! creates a hash key to check if stations_info.bin file contains a valid number of stations +! compared to STATIONS file infos +! +! the checksum algorithm follows the principles of an MD5 hash algorithm, +! but its resulting hash differs to the exact MD5 one. + + use constants, only: MAX_LENGTH_STATION_NAME,MAX_LENGTH_NETWORK_NAME + use specfem_par, only: nrec,station_name,network_name,stlat,stlon,stbur + + implicit none + + character(len=32),intent(out) :: hashsum ! 128-bit hash key + + ! md5 parameters + ! binary integer part of the sines of integers (Radians) as constants + ! note: the initialization here with boz constants won't work with gfortran + !integer(kind=4), dimension(64), parameter :: K =(/ & + ! z'0xD76AA478', z'0xE8C7B756', z'0x242070DB', z'0xC1BDCEEE', & + ! z'0xF57C0FAF', z'0x4787C62A', z'0xA8304613', z'0xFD469501', & + ! z'0x698098D8', z'0x8B44F7AF', z'0xFFFF5BB1', z'0x895CD7BE', & + ! z'0x6B901122', z'0xFD987193', z'0xA679438E', z'0x49B40821', & + ! z'0xF61E2562', z'0xC040B340', z'0x265E5A51', z'0xE9B6C7AA', & + ! z'0xD62F105D', z'0x02441453', z'0xD8A1E681', z'0xE7D3FBC8', & + ! z'0x21E1CDE6', z'0xC33707D6', z'0xF4D50D87', z'0x455A14ED', & + ! z'0xA9E3E905', z'0xFCEFA3F8', z'0x676F02D9', z'0x8D2A4C8A', & + ! z'0xFFFA3942', z'0x8771F681', z'0x6D9D6122', z'0xFDE5380C', & + ! z'0xA4BEEA44', z'0x4BDECFA9', z'0xF6BB4B60', z'0xBEBFBC70', & + ! z'0x289B7EC6', z'0xEAA127FA', z'0xD4EF3085', z'0x04881D05', & + ! z'0xD9D4D039', z'0xE6DB99E5', z'0x1FA27CF8', z'0xC4AC5665', & + ! z'0xF4292244', z'0x432AFF97', z'0xAB9423A7', z'0xFC93A039', & + ! z'0x655B59C3', z'0x8F0CCC92', z'0xFFEFF47D', z'0x85845DD1', & + ! z'0x6FA87E4F', z'0xFE2CE6E0', z'0xA3014314', z'0x4E0811A1', & + ! z'0xF7537E82', z'0xBD3AF235', z'0x2AD7D2BB', z'0xEB86D391' & + ! /) + ! or to be computed below + integer(kind=4), dimension(64) :: K + + ! specifies the per-round shift amounts + integer(kind=4), dimension(64), parameter :: S = (/ & + 7, 12, 17, 22, 7, 12, 17, 22, & + 7, 12, 17, 22, 7, 12, 17, 22, & + 5, 9, 14, 20, 5, 9, 14, 20, & + 5, 9, 14, 20, 5, 9, 14, 20, & + 4, 11, 16, 23, 4, 11, 16, 23, & + 4, 11, 16, 23, 4, 11, 16, 23, & + 6, 10, 15, 21, 6, 10, 15, 21, & + 6, 10, 15, 21, 6, 10, 15, 21 & + /) + + integer(kind=4) :: a0, b0, c0, d0 + integer(kind=4) :: a, b, c, d, f, g, temp + integer :: i, j, pos, str_len, bit_len, size_str4 + integer :: irec ! ier + ! buffer 64-byte (16xint) + character(len=8) :: wtmp + integer(kind=4) :: w(16),i1,i2,i3,i4 + + ! character string holding sta/net/x/y/z + character(len=MAX_LENGTH_STATION_NAME + MAX_LENGTH_NETWORK_NAME + 3*12) :: single_station_line + + ! padded array with a multiple of 512-bit chunks (16 4-byte integer size), +2 to add a bit-1 and string length + ! for the single_station_line length above of 76, the padded integer array should have size 32 + integer, parameter :: size_padded_int = 32 + integer(kind=4), dimension(size_padded_int) :: padded_int + ! or dynamically for variable lengths of single_station_line - just in case + !integer(kind=4), dimension(:), allocatable :: padded_int + + ! the original MD5 algorithm seems to use big endian initializations + ! https://datatracker.ietf.org/doc/html/rfc1321 + ! thus, to match the exact MD5, one would need to be careful here - todo if one wants to match it in future. + ! for now, we're happy with something similar :) + ! + ! for an explanation of the MD5 algorithm, see also: + ! https://github.com/Zunawe/md5-c + + ! endianness + ! little-endian writes the lowest order byte first, big-endian writes it last. + ! for example, given the integer value of 1, big endian uses 00 00 00 01 + ! and little endian uses 01 00 00 00 as bit representation + ! + ! using transfer(1,'a') takes the bit-representation of integer value 1 (multi-byte, usually 32-bit) + ! and interprets it as a character type (of 1-byte length). + ! thus, looking at the first byte, it is either 0 or 1, respectively. + ! finally, ichar(c) takes a character and returns its position number. + ! + ! generally, an intel machine would use little endian ordering + !logical, parameter :: is_big_endian = (ichar(transfer(1,'a')) == 0) + + ! string as integer + !allocate(padded_int((len(single_station_line) + 64 + 8) / 4),stat=ier) + + ! padded array with a multiple of 512-bit chunks (16 4-byte integer size), +2 to add a bit-1 and string length + !if (mod(int(len(single_station_line)/4) + 2, 16) == 0) then + ! size_padded_int = int(len(single_station_line)/4) + 2 + !else + ! size_padded_int = ((int(len(single_station_line)/4) + 2)/16 + 1) * 16 + !endif + !allocate(padded_int(size_padded_int),stat=ier) + !if (ier /= 0) stop 'Error allocating padded_int array' + !padded_int(:) = 0 + + ! initializes MD5 hash values + ! binary integer part of the sines of integers (Radians) as constants + do i = 1,64 + K(i) = floor(2**32 * dabs(dsin((i-1) + 1.d0))) + enddo + + ! magic numbers: the original MD5 algorithm seems to use big endian initializations + !if (is_big_endian) then + ! a0 = int(z'67452301',kind=4) + ! b0 = int(z'EFCDAB89',kind=4) + ! c0 = int(z'98BADCFE',kind=4) + ! d0 = int(z'10325476',kind=4) + !else + ! a0 = int(z'01234567') + ! b0 = int(z'89ABCDEF') + ! c0 = int(z'FEDCBA98') + ! d0 = int(z'76543210') + !endif + ! note: using gfortran versions <= 9 will return a compilation error for these boz-initializations: + ! "Error: Arithmetic overflow converting INTEGER(16) to INTEGER(4) .." + ! thus, instead of + ! b0 = int(z'89ABCDEF') or b0 = int(z'89ABCDEF',kind=4) + ! one could split it and use + ! b0 = ior(ishft(int(z'89AB'), 16), int(z'CDEF')) + ! or + ! b0 = transfer(int(z'89ABCDEF',kind=8),b0) + ! these hexadecimals all fit into a 32-bit representation, and therefore the transfer() function + ! should return valid results. + a0 = transfer(int(z'01234567',kind=8),a0) + b0 = transfer(int(z'89ABCDEF',kind=8),b0) + c0 = transfer(int(z'FEDCBA98',kind=8),c0) + d0 = transfer(int(z'76543210',kind=8),d0) + + do irec = 1,nrec + single_station_line(:) = '' + + ! creates string station info + write(single_station_line,'(a,a,3F12.4)') station_name(irec),network_name(irec),stlat(irec),stlon(irec),stbur(irec) + + !to check: + !single_station_line = 'The quick brown fox jumps over the lazy dog.' + !single_station_line = 'Hello, World!' + !debug + !print *,'debug: station ',irec,len_trim(single_station_line),'***'//single_station_line//'***' + + ! Convert the string to padded integer array + str_len = len_trim(single_station_line) + bit_len = str_len * 8 + + ! size as a multiple of 4 characters + if (mod(str_len,4) == 0) then + size_str4 = str_len / 4 + else + size_str4 = (int(str_len / 4) + 1) * 4 + endif + + ! initializes padded integer array + padded_int(:) = 0 + + ! merges 4 characters into a hex and converts it to integer + do j = 1,size_str4 + ! string index + i = (j-1) * 4 + 1 + + ! integer conversion + ! converts 4 characters to a 4-byte integer + i1 = 0; i2 = 0; i3 = 0; i4 = 0 + + ! 4 char to int + !if (is_big_endian) then + ! ! the ordering here is big-endian + ! if (i <= str_len) i4 = ichar(single_station_line(i:i)) + ! if (i+1 <= str_len) i3 = ichar(single_station_line(i+1:i+1)) + ! if (i+2 <= str_len) i2 = ichar(single_station_line(i+2:i+2)) + ! if (i+3 <= str_len) i1 = ichar(single_station_line(i+3:i+3)) + !else + ! little endian + if (i <= str_len) i1 = ichar(single_station_line(i:i)) + if (i+1 <= str_len) i2 = ichar(single_station_line(i+1:i+1)) + if (i+2 <= str_len) i3 = ichar(single_station_line(i+2:i+2)) + if (i+3 <= str_len) i4 = ichar(single_station_line(i+3:i+3)) + !endif + + ! concats 4 char values to a hex value + write(wtmp,'(4(z2.2))') i1,i2,i3,i4 + + ! reads hexadecimal value as integer + read(wtmp,'(z8)') padded_int(j) + + !debug + !print *,'debug: i ',i,size_str4,str_len,'wtmp ***'//wtmp//'*** line ***'//single_station_line(i:i+3)//'***', & + ! 'position',j,' int = ',padded_int(j) + enddo + + ! Append the bit '1' to the message + !if (is_big_endian) then + ! ! 00000000 00000000 00000000 00000001 + ! padded_int(str_len / 4 + 1) = ibset(padded_int(str_len / 4 + 1), 8 * (4 - mod(str_len, 4))) + !else + ! 10000000 00000000 00000000 00000000 + padded_int(str_len / 4 + 1) = ibset(padded_int(str_len / 4 + 1), 8 * (mod(str_len, 4) + 1)) + !endif + + ! Append the original bit length to the message + padded_int(size_padded_int) = bit_len + + !debug + !print *,'debug: padded size ',size_padded_int,'int:',padded_int(:) + + ! initialize + a = a0 + b = b0 + c = c0 + d = d0 + + ! md5 loop + do i = 1, size_padded_int,16 + ! takes 512-bit chunk, i.e., 16 4-byte integers + do j = 1,16 + pos = (i-1) + j + w(j) = padded_int(pos) + enddo + + ! main loop + do j = 1,64 + if (j >= 1 .and. j <= 16) then + f = ior( iand(b,c) , iand(not(b), d) ) + g = j + else if (j >= 17 .and. j <= 32) then + f = ior( iand(d , b) , iand(not(d) , c) ) + g = mod(5*(j-1) + 1, 16) + 1 + else if (j >= 33 .and. j <= 48) then + f = ieor(b, ieor(c, d)) + g = mod(3*(j-1) + 5, 16) + 1 + else if (j >= 49 .and. j <= 64) then + f = ieor(c, ior(b , not(d))) + g = mod(7*(j-1), 16) + 1 + endif + + ! Save current hash values + !if (is_big_endian) then + ! temp = a + ! a = d + ! d = c + ! c = b + ! b = b + leftrotate((a + f + K(j) + w(g)), S(j)) + ! a = temp + !else + temp = d + d = c + c = b + b = b + leftrotate((a + f + K(j) + w(g)), S(j)) + a = temp + !endif + enddo + enddo + + ! add this station to hash + a0 = a0 + a + b0 = b0 + b + c0 = c0 + c + d0 = d0 + d + enddo + + ! Output hash + hashsum = iachar2hex(a0, b0, c0, d0) + +contains + + ! Bitwise left rotation function + function leftrotate(x, n) result(rotated) + integer(4), intent(in) :: x, n + integer(4) :: rotated + + rotated = ieor(shiftl(x, n), shiftr(x, 32 - n)) + end function leftrotate + + ! Convert array of integers to hexadecimal string + function iachar2hex(a0,b0,c0,d0) result(hex_string) + integer(4), intent(in) :: a0,b0,c0,d0 + character(len=32) :: hex_string + integer(4) :: a,b,c,d + + ! zero extend + !a = zext(a0, 8) + !b = zext(b0, 8) + !c = zext(c0, 8) + !d = zext(d0, 8) + ! w/out extend + a = a0 + b = b0 + c = c0 + d = d0 + + ! digest is a 128-bit number written in little endian + write(hex_string, '(4Z8.8)') a,b,c,d + + end function iachar2hex + + ! Zero extend function + !function zext(x, width) result(zexted) + ! integer(4), intent(in) :: x, width + ! integer(4) :: zexted + ! zexted = iand(x, ishft(1, width) - 1) + !end function zext + + end subroutine create_stations_checksum diff --git a/src/specfem3D/specfem3D_par.F90 b/src/specfem3D/specfem3D_par.F90 index 5c428c0b0..149323ba5 100644 --- a/src/specfem3D/specfem3D_par.F90 +++ b/src/specfem3D/specfem3D_par.F90 @@ -170,6 +170,9 @@ module specfem_par double precision, dimension(:,:), allocatable :: hpxir_store,hpetar_store,hpgammar_store double precision, dimension(:,:,:), allocatable :: nu_rec + ! hash key for STATIONS infos + character(len=32) :: stations_hashsum = '' + ! location storage for inverse problem damping double precision, dimension(:), allocatable :: x_target_station,y_target_station,z_target_station diff --git a/utils/Visualization/Blender/python_blender/convert_dxf_to_mesh.py b/utils/Visualization/Blender/python_blender/convert_dxf_to_mesh.py index 38b980d01..91b201c11 100755 --- a/utils/Visualization/Blender/python_blender/convert_dxf_to_mesh.py +++ b/utils/Visualization/Blender/python_blender/convert_dxf_to_mesh.py @@ -208,6 +208,18 @@ def read_dxf_file(filename,index): # model space info msp = doc.modelspace() + # query all entity types available in the model space + entity_types = [] + for e in msp: + if not e.dxftype() in entity_types: + entity_types.append(e.dxftype()) + + print(" model space entity types:") + print(" ",entity_types) + print("") + + # query lines + # we will need polyfaces to convert 3D objects num_lines = len(msp.query("LINE")) num_polylines = len(msp.query("POLYLINE")) num_lwpolylines = len(msp.query("LWPOLYLINE")) @@ -294,12 +306,12 @@ def convert_coordinates_LV95_to_UTM(mesh): east_lon_degree=orig_lon, north_lat_degree=orig_lat)) utm_code = utm_crs_list[0].code - utm_epsg_code = "EPSG:{}".format(utm_code) + utm_epsg = "EPSG:{}".format(utm_code) - print(" UTM code:", utm_epsg_code) + print(" UTM code:", utm_epsg) # direct transformation from LV95 to UTM zone - transformer_to_utm = Transformer.from_crs("EPSG:2056", utm_epsg_code) + transformer_to_utm = Transformer.from_crs("EPSG:2056", utm_epsg) #debug #print(transformer_to_utm) diff --git a/utils/Visualization/Blender/python_blender/convert_openstreetmap_buildings_to_mesh.py b/utils/Visualization/Blender/python_blender/convert_openstreetmap_buildings_to_mesh.py new file mode 100755 index 000000000..34c0aaa45 --- /dev/null +++ b/utils/Visualization/Blender/python_blender/convert_openstreetmap_buildings_to_mesh.py @@ -0,0 +1,1809 @@ +#!/usr/bin/env python +# +# downloads openstreetmap buildings and creates 3d mesh objects saved in .ply format +# +# required python modules: +# - numpy +# - scipy +# - pygmt +# - pyproj +# - xarray +# - osmnx +# - geopandas +# - trimesh +# - rtree +# - mapbox_earcut +# +################################################################# + +import sys +import os +import time +import datetime +import math + +try: + import numpy as np +except: + print("Failed importing numpy. Please make sure to have python with numpy working properly.") + sys.exit(1) + +# elevation +# GMT python interface +try: + import pygmt +except: + print("Error importing module `pygmt`") + print("install by: > pip install -U pygmt") + sys.exit(1) + +# Coordinate projections +try: + import pyproj +except: + print("Failed importing pyproj.") + sys.exit(1) +from pyproj import Transformer + +# for topo arrays +import xarray as xr + +# OSMnx +try: + import osmnx as ox +except: + print("Failed importing osmnx. Please install by: > pip install osmnx") + sys.exit(1) + +# GeoJSON file import +import geopandas as gpd + +# meshing +try: + import trimesh +except: + print("Error importing module `trimesh`") + print("install by: > pip install -U trimesh") + sys.exit(1) + +# for mesh triangulation +from scipy.spatial import Delaunay + +# for triangulation filtering checks +from matplotlib.patches import Polygon + +################################################################# +## USER PARAMETERS + +# minimum level height for buildings (when missing further informations) +building_height_min = 3.0 # m + +# level height for roofs (when roof:levels is provided) +roof_level_height = 1.5 # m + +################################################################# + +## globals +transformer_to_utm = None + + + +def download_buildings(mesh_area): + """ + downloads OpenStreetMap (OSM) building footprints and infos + """ + # user output + print("downloading OSM buildings...") + print(" mesh area: ",mesh_area) + print("") + + # mesh area min/max + lon_min = mesh_area[0] + lat_min = mesh_area[1] + lon_max = mesh_area[2] + lat_max = mesh_area[3] + + # check + if lon_min > lon_max or lat_min > lat_max: + print("Wrong mesh area format, please provide in a format (lon_min,lat_min,lon_max,lat_max)") + sys.exit(1) + + print(" lat min/max: ",lat_min,lat_max) + print(" lon min/max: ",lon_min,lon_max) + print("") + + # define a bounding box + north, south, east, west = lat_max, lat_min, lon_max, lon_min + + # configuration + ox.settings.use_cache = True + ox.settings.log_console = True + + if 1 == 0: + # get graph + G = ox.graph_from_bbox(north, south, east, west, network_type="drive") + + #print("debug: G ",G) + fig, ax = ox.plot_graph(G, node_size=0) + + # graph option to get elevations + #raster_file = "./DEM-rome-w46075_s10/w46075_s10.tif" + #G = ox.elevation.add_node_elevations_raster(G,raster_file) + + #print("debug: G ",G) + #fig, ax = ox.plot_graph(G, node_size=0) + + # get one color for each node, by elevation, then plot the network + #nc = ox.plot.get_node_colors_by_attr(G, "elevation", cmap="plasma") + #fig, ax = ox.plot_graph(G, node_color=nc, node_size=5, edge_color="#333333", bgcolor="k") + + # download only OSM buildings feature + tags = {"building": True, "building:part": True} + + # download buildings as a GeoDataFrame + gdf = ox.features_from_bbox(north, south, east, west, tags) + + print("") + print(" number of all buildings: ",gdf.shape[0]) + + if gdf.size == 0: + print("Error: no buildings were found, exiting...") + sys.exit(1) + + print(" download done.") + print("") + + return gdf + +# +#---------------------------------------------------------------------------------------- +# + +def prepare_osm_building_data(gdf): + """ + prepares and cleans up buildings data array + """ + # user output + print("preparing OSM building data...") + print("") + + # extracts only meaningful infos for buildings + + #bld_list = ["geometry", "building:height", "building:levels"] + #for key in gdf.keys(): + # if key not in bld_list: + # print("gdf: removing key: ",key) + # gdf = gdf.drop(labels=key, axis=1) + + # all possible height related infos + cols_extended = ["building:height", "building:min_level", \ + "building:levels", "building:levels:roof", "building:levels:underground", \ + "building:shape", "building:part", \ + "roof:levels", "roof:level", "roof:type", "roof:orientation", "roof:direction", "roof:shape", \ + "height", "min_height", "min_level", "max_level", "maxheight", \ + "geometry", "nodes" ] + + # main infos only + cols = [ "geometry", "name", "height", \ + "building:height", "building:levels", "building:part", \ + "roof:levels", "roof:shape", "roof:type", "roof:direction", "roof:orientation" ] + + # adds missing keys + # for example if downloaded gdf has no "building:height" infos + for key in cols: + if key not in gdf.keys(): + gdf[key] = np.nan + + # very slow... + #for key in gdf.keys(): + # if key not in cols: + # #print("gdf: removing key: ",key) + # gdf = gdf.drop(labels=key, axis=1) + # faster + gdf = gdf[cols] + + print(" GeoDataFrame keys: ",gdf.keys()) + print("") + + # view just the polygon buildings + #gdf[gdf["geometry"].type == "Polygon"].dropna(axis=1, how="any") + # view just the roof shapes + #gdf[gdf['roof:shape'].notnull()] + + # keep everything (Polygon,MultiPolygon,LineString,..) except single Points geometries + #gdf = gdf[gdf.geometry.type != "Point"] + # keep only MultiPolygon + #gdf = gdf[gdf.geometry.type == "MultiPolygon"] + # keep only Polygon + #gdf = gdf[gdf.geometry.type == "Polygon"] + + # combined mask to filter only rows with Polygon or MultiPolygon geometry types + mask = (gdf.geometry.type == "Polygon") | (gdf.geometry.geom_type == "MultiPolygon") + gdf = gdf[mask] + + # Drop rows with NaN values + #gdf = gdf.dropna(axis=1,how="all") + + # checks for valid geometry + indices_to_remove = [] + for i,geom in enumerate(list(gdf.geometry)): + # check flags - imported building's geometry should all be valid, but just in case + if not geom.is_valid: + print(" building {} has invalid geometry".format(i)) + indices_to_remove.append(i) + + # check centroid info + lon = geom.centroid.x + lat = geom.centroid.y + if np.isnan(lon) or np.isnan(lat): + print(" building {} has invalid centroid lon/lat {}/{} - geometry {}".format(i,lon,lat,geometry.centroid)) + indices_to_remove.append(i) + + print(" number of usable buildings : ",gdf.shape[0]) + print(" number of invalid buildings: ",len(indices_to_remove)) + print("") + + if len(indices_to_remove) > 0: + print("") + gdf.iloc[indices_to_remove] + gdf = gdf.drop(indices_to_remove) + print(" updated number of usable buildings: ",gdf.shape[0]) + print("") + + print(" checking for valid data entries...") + print("") + + # makes sure that the building information related to numbers can be read in as a float or as NaN. + # info tags that are strings won't need to be changed here. + # + # iterates over each row in the GeoDataFrame + icount = 0 + for index, obj in gdf.iterrows(): + # counter + icount += 1 + + # check height + val = obj['height'] + if isinstance(val,str): + # shorten string, for example: "1.0;2.0" -> "1.0" + if val.find(";") >= 0: val = val[0:val.find(";")] + if val.find("'") >= 0: val = val[0:val.find("'")] + # check if its a valid number, for example "16.2" -> "162" -> yes, "q" -> "q" -> no + if val.replace(".", "", 1).isdigit(): + h = float(val) + else: + h = np.nan + else: + h = val + gdf.at[index,'height'] = h + + # check building height + val = obj['building:height'] + if isinstance(val,str): + # shorten string, for example: "1.0;2.0" -> "1.0" + if val.find(";") >= 0: val = val[0:val.find(";")] + if val.find("'") >= 0: val = val[0:val.find("'")] + # check if its a valid number, for example "16.2" -> "162" -> yes, "q" -> "q" -> no + if val.replace(".", "", 1).isdigit(): + h = float(val) + else: + h = np.nan + else: + h = val + gdf.at[index,'building:height'] = h + + # check levels + val = obj['building:levels'] + if isinstance(val,str): + # shorten string, for example: "1;2" -> "1" + if val.find(";") >= 0: val = val[0:val.find(";")] + if val.find("'") >= 0: val = val[0:val.find("'")] + # check if its a valid number, for example "16.2" -> "162" -> yes, "q" -> "q" -> no + if val.replace(".", "", 1).isdigit(): + lev = float(val) + else: + lev = np.nan + else: + lev = val + gdf.at[index,'building:levels'] = lev + + # check roof levels + val = obj['roof:levels'] + if isinstance(val,str): + # shorten string, for example: "1;2" -> "1" + if val.find(";") >= 0: val = val[0:val.find(";")] + if val.find("'") >= 0: val = val[0:val.find("'")] + # check if its a valid number, for example "16.2" -> "162" -> yes, "q" -> "q" -> no + if val.replace(".", "", 1).isdigit(): + lev = float(val) + else: + lev = np.nan + else: + lev = val + gdf.at[index,'roof:levels'] = lev + + # check roof levels + val = obj['roof:direction'] + if isinstance(val,str): + # shorten string, for example: "1;2" -> "1" + if val.find(";") >= 0: val = val[0:val.find(";")] + if val.find("'") >= 0: val = val[0:val.find("'")] + # check if its a valid number, for example "16.2" -> "162" -> yes, "q" -> "q" -> no + if val.replace(".", "", 1).isdigit(): + d = float(val) + else: + d = np.nan + else: + d = val + # directions seem to be within the range [0,360] + if not np.isnan(d): + if d < 0.0: d += 360.0 + if d > 360.0: d -= 360.0 + # re-set direction value + gdf.at[index,'roof:direction'] = d + + #debug + #name = "" if str(obj['name']) == 'nan' else obj['name'] + #print("debug: building ",icount,"H/B:H/B:L/B:P ",obj['height'],obj['building:height'],obj['building:levels'],obj['building:part'],name) + + # plot footprints + if 1 == 0: + gdf_proj = ox.project_gdf(gdf) + fig, ax = ox.plot_footprints(gdf_proj) + + # Save the GeoDataFrame to a GeoJSON file + if 1 == 0: + gdf = gdf.apply(lambda c: c.astype(str) if c.name != "geometry" else c, axis=0) + filename = "./buildings.gjson" + gdf.to_file(filename, driver="GeoJSON") + print(" written to file: ",filename) + # check other formats + #import fiona + #fiona.supported_drivers + # Save the GeoDataFrame to a DXF file + #gdf.to_file("./buildings.dxf", driver="DXF") + # direct function to GeoJSON + #gdf.to_json("./buildings.gjson", na='drop', driver="GeoJSON") + + print(" all building setup done.") + print("") + + return gdf + +# +#---------------------------------------------------------------------------------------- +# + +def get_topo_DEM(mesh_area): + """ + downloads elevation data using GMT + """ + # mesh area min/max + lon_min = mesh_area[0] + lat_min = mesh_area[1] + lon_max = mesh_area[2] + lat_max = mesh_area[3] + + # check + if lon_min > lon_max or lat_min > lat_max: + print("Wrong mesh area format, please provide in a format (lon_min,lat_min,lon_max,lat_max)") + sys.exit(1) + + print("topography:") + print(" lat min/max: ",lat_min,lat_max) + print(" lon min/max: ",lon_min,lon_max) + print("") + + # GMT python interface + # https://www.pygmt.org/dev/api/generated/pygmt.datasets.load_earth_relief.html + # + # GMT region format: e.g. -R123.0/132.0/31.0/40.0 + gmt_region = [lon_min,lon_max,lat_min,lat_max] + + dim_max = max((lat_max-lat_min),(lon_max-lon_min)) + + # determines resolution + if dim_max > 5.0: + # ETOPO1 (1-arc minute) + # (topo_grid is an xarray data object) + res = '01m' + elif dim_max > 1.0: + # SRTM 3S (3-arc sec) + res = '03s' + else: + # SRTM 1S (1-arc sec) + res = '01s' # '01s' + + print(" maximum dimension extent: ",dim_max,"(deg)") + print(" using resolution : ",res) + print("") + + topo_grid = pygmt.datasets.load_earth_relief(resolution=res, + region=gmt_region, + registration="gridline") + print("") + + # checks units are in m + if not topo_grid.units == 'meters': + print("Error: topo grid has invalid unit ",topo_grid.units) + sys.exit(1) + + # debug + #lat = 40.0 + #lon = 100.0 + #elevation = get_topo_elevation(lat,lon,topo_grid) + #print("debug: lat/lon/elevation = ",lat,lon,elevation) + + # show/write figure plot + if 1 == 1: + fig = pygmt.Figure() + fig.grdimage(grid=topo_grid, + cmap="haxby", + projection="M10c", + frame=True, + ) + fig.colorbar(frame=["x+lelevation", "y+lm"]) + #fig.show() + + # save figure as jpeg image + name = "./output_topography.jpg" + fig.savefig(name, crop=True, dpi=720) + print(" figure plotted to: ",name) + print("") + + print(" topo setup done.") + print("") + + return topo_grid + +# +#---------------------------------------------------------------------------------------- +# + +def get_topo_elevation(lat,lon,topo_grid): + # gets elevation interpolated by nearest neighbor method + elevation = topo_grid.interp(lon=lon,lat=lat,method="nearest") + + # extract simple float value from returned xarray object + elevation_val = elevation.data + + return elevation_val + +# +#---------------------------------------------------------------------------------------- +# + +def convert_coordinates_to_UTM(gdf,mesh_area,lons,lats): + global transformer_to_utm + + # user output + if transformer_to_utm == None: + print("converting coordinates to UTM...") + print("") + # pyproj coordinate system info: + # WGS84 == EPSG:4326 + # spherical mercator, google maps, openstreetmap == EPSG:3857 + # + # we first need to determine the correct UTM zone to get the EPSG code. + # for this, we take the mesh origin point and convert it to lat/lon (GPS) position. + # we can then query the corresponding UTM zone for this position. + + # Coordinate Reference System + ref_epsg = str(gdf.crs) + + # check reference is WGS84 + if ref_epsg != 'epsg:4326': + print("Error: reference coordinate system not recognized: ",ref_epsg) + sys.exit(1) + + # mesh area min/max + lon_min = mesh_area[0] + lat_min = mesh_area[1] + lon_max = mesh_area[2] + lat_max = mesh_area[3] + + # mesh origin position + orig_lon = 0.5 * (lon_min + lon_max) + orig_lat = 0.5 * (lat_min + lat_max) + + # user output + print(" origin: lon/lat = ",orig_lon,orig_lat) + print("") + + # gets list of UTM codes + utm_crs_list = pyproj.database.query_utm_crs_info( + datum_name="WGS 84", + area_of_interest=pyproj.aoi.AreaOfInterest(west_lon_degree=orig_lon, + south_lat_degree=orig_lat, + east_lon_degree=orig_lon, + north_lat_degree=orig_lat)) + utm_code = utm_crs_list[0].code + utm_epsg = "EPSG:{}".format(utm_code) + + print(" UTM code:", utm_code," epsg: ", utm_epsg) + + # transformer + # transformation from WGS84 to UTM zone + # WGS84: Transformer.from_crs("EPSG:4326", utm_epsg) + #transformer_to_utm = Transformer.from_crs(ref_epsg, utm_epsg) # input: lat/lon -> utm_x,utm_y + transformer_to_utm = Transformer.from_crs(ref_epsg, utm_epsg, always_xy=True) # input: lon/lat -> utm_x/utm_y + + #debug + #print(transformer_to_utm) + #print("debug: lon/lat ",transformer_to_utm.transform(orig_lon,orig_lat)) + #print("debug: lat/lon ",transformer_to_utm.transform(orig_lat,orig_lon)) + + # user info + utm_x,utm_y = transformer_to_utm.transform(orig_lon,orig_lat) + print(" -> UTM x/y = ",utm_x,utm_y) + print(" backward check: orig x/y = ",transformer_to_utm.transform(utm_x,utm_y,direction='INVERSE')) + print("") + + # converts point coordinates + x = lons + y = lats + + # converts to UTM location (x and y) + #point = np.array([x,y]) + x_utm,y_utm = transformer_to_utm.transform(x,y) + + return x_utm,y_utm + +# +#---------------------------------------------------------------------------------------- +# + +def create_building_mesh(gdf,mesh_area,elevation,obj,meshes,icount_added): + """ + creates a 3d mesh for building specified by obj + """ + geometry = obj['geometry'] + + # gets building height + height = get_building_height(obj) + + # gets roof height and type + roof_type, roof_height, roof_direction, roof_orientation = get_building_roof_type(obj) + + # creates extruded building from footprint polygon + if geometry.geom_type == 'Polygon': + # For a single Polygon + poly = geometry + + # creates building mesh + b_mesh = create_extruded_building(gdf,mesh_area,height,roof_type,roof_height,roof_direction,roof_orientation,elevation,poly) + + # add to overall mesh + if not b_mesh is None: + meshes.append(b_mesh) + # counter + icount_added += 1 + + elif geometry.geom_type == 'MultiPolygon': + # For MultiPolygon + # create "building" for each polygon + for poly in geometry.geoms: + # creates building mesh + b_mesh = create_extruded_building(gdf,mesh_area,height,roof_type,roof_height,roof_direction,roof_orientation,elevation,poly) + + # add to overall mesh + if not b_mesh is None: + meshes.append(b_mesh) + # counter + icount_added += 1 + + else: + print("Error: unsupported geometry type: ",geometry.geom_type) + print(" object location centroid : ",geometry.centroid) + raise ValueError("Unsupported geometry type") + + return meshes,icount_added + +# +#---------------------------------------------------------------------------------------- +# + +def get_building_height(obj): + """ + returns the height of the building (in m) + """ + global building_height_min + + # initialize building height + height = building_height_min + + #debug + #has_height_info = False + + # check building height infos + h = obj['building:height'] + if not np.isnan(h): + height = h + #debug + #has_height_info = True + #name = "" if str(obj['name']) == 'nan' else " - "+obj['name'] + #print("debug: obj found building:height ",obj['building:height'],name) + else: + # check height instead of building:height + h = obj['height'] + if not np.isnan(h): + height = h + #debug + #has_height_info = True + #name = "" if str(obj['name']) == 'nan' else " - "+obj['name'] + #print("debug: obj found height ",obj['height'],name) + else: + # check for levels + lev = obj['building:levels'] + if not np.isnan(lev): + height = lev * building_height_min + #debug + #has_height_info = True + #name = "" if str(obj['name']) == 'nan' else " - "+obj['name'] + #print("debug: obj found building:levels ",obj['building:levels'],name) + + # checks + if np.isnan(height): height = building_height_min + + # sets a minimum building height if specified height is < 1m + if height <= 1.0: height = 1.0 + + #debug + #print("debug: building height = ",height,has_height_info) + + return height + +# +#---------------------------------------------------------------------------------------- +# + +def get_building_roof_type(obj): + """ + returns the type and height of the roof (in m) + """ + global roof_level_height + + # initialize roof height + # there will be no roof minimum height, only if there is more roof infos we will add it + roof_type = 'flat' + height = 0.0 + direction = np.nan + orientation = 'along' + + # check building height infos + lev = obj['roof:levels'] + if not np.isnan(lev): + height = lev * roof_level_height + #debug + #has_height_info = True + #name = "" if str(obj['name']) == 'nan' else " - "+obj['name'] + #print("debug: obj found roof:levels ",obj['roof:levels'],name) + + # check building direction infos + val = obj['roof:direction'] + if not np.isnan(val): + direction = val + #debug + #has_height_info = True + #name = "" if str(obj['name']) == 'nan' else " - "+obj['name'] + #print("debug: obj found roof:direction ",obj['roof:direction'],obj['roof:shape'],obj['roof:type'],name) + + # checks shape info + shape = obj['roof:shape'] + if isinstance(shape, str): + # check shape + if shape == 'flat': + roof_type = 'flat' + + elif shape == 'gabled': + roof_type = 'gabled' + # adds minimum height for gabled roof + if height == 0.0: height = roof_level_height + + elif shape == 'hipped': + roof_type = 'hipped' + # adds minimum height for gabled roof + if height == 0.0: height = roof_level_height + + elif shape == 'side_hipped': + roof_type = 'side_hipped' + if height == 0.0: height = roof_level_height + + elif shape == 'skillion': + roof_type = 'skillion' + if height == 0.0: height = roof_level_height + + elif shape == 'sawtooth': + roof_type = 'sawtooth' + if height == 0.0: height = roof_level_height + + elif shape == 'quadruple_saltbox': + roof_type = 'quadruple_saltbox' + if height == 0.0: height = roof_level_height + + elif shape == 'pyramidal': + roof_type = 'pyramidal' + if height == 0.0: height = 2.0 * roof_level_height + + elif shape == 'dome': + roof_type = 'dome' + if height == 0.0: height = 2.0 * roof_level_height + + elif shape == 'cone': + roof_type = 'cone' + if height == 0.0: height = 2.0 * roof_level_height + + else: + # not implemented yet - we will set it to flat + roof_type = 'flat' + height = 0.0 + # debug + #name = "" if str(obj['name']) == 'nan' else " - "+obj['name'] + #print("building has unrecognized roof shape: ", shape,name) + + # checks type info + # this seems mostly unset, i.e., nan, with a few ones which are set to 'flat' (probably by mistake?) + type = obj['roof:type'] + if isinstance(type, str): + #debug + #name = "" if str(obj['name']) == 'nan' else " - "+obj['name'] + #print("debug: obj found roof type ",roof_type,height,obj['roof:type'],obj['roof:shape'],obj['roof:levels']) + + # check type + if type == 'flat': + roof_type = 'flat' + + elif type == 'gabled': + roof_type = 'gabled' + # adds minimum height for gabled roof + if height == 0.0: height = roof_level_height + + elif type == 'hipped': + roof_type = 'hipped' + # adds minimum height for gabled roof + if height == 0.0: height = roof_level_height + + elif shape == 'dome': + roof_type = 'dome' + if height == 0.0: height = 2.0 * roof_level_height + + elif shape == 'cone': + roof_type = 'cone' + if height == 0.0: height = 2.0 * roof_level_height + + else: + # not implemented yet - we will set it to flat + roof_type = 'flat' + height = 0.0 + #debug + #name = "" if str(obj['name']) == 'nan' else " - "+obj['name'] + #print("building has unrecognized roof type: ", type,name) + + # checks orientation info + orient = obj['roof:orientation'] + if isinstance(orient, str): + # check orientation + if orient == 'along': + orientation = 'along' + + elif orient == 'across': + orientation = 'across' + + else: + # invalid, will use 'along' + orientation = 'along' + #debug + #name = "" if str(obj['name']) == 'nan' else " - "+obj['name'] + #print("building has roof orientation: ",orient,name) + + return roof_type, height, direction, orientation + +# +#---------------------------------------------------------------------------------------- +# + +def create_extruded_building(gdf,mesh_area,height,roof_type,roof_height,roof_direction,roof_orientation,elevation,poly): + """ + create building mesh by extrusion + """ + # sets building height for extrusion + # checks if roof height needs to be added + # only for simple 'flat' roof, more complicated roofs will be added by an extra mesh later on + if roof_type == 'flat' and roof_height > 0.0: + # we only add roof height to building's height + height += roof_height + + # checks if something to do + if abs(height) < 1.e-5: + return None + + # polygon points + #print("debug: exteriors ",poly.exterior) + #print("debug: interiors ",poly.interiors,len(poly.interiors)) + + # exterior points + points = list(poly.exterior.coords) + + # convert to numpy array + #coords_outer = np.array(points) + + # check for interior points + points_inner = [] + for interior in poly.interiors: + for xy in interior.coords: + points_inner.append(xy) + + # array for inner points + coords_inner = np.array(points_inner) + + # combine all points for triangulation + # add inner points to point list + points.extend(points_inner) + + # overall points as numpy array + coords = np.array(points) + + # get all lon/lat positions as separate arrays + lons,lats = coords.T + + # convert lon/lat to UTM + utm_x,utm_y = convert_coordinates_to_UTM(gdf,mesh_area,lons,lats) + + coords = np.column_stack((utm_x,utm_y)) + + # inner polygon + if len(coords_inner) > 0: + # get all lon/lat positions as separate arrays + lons,lats = coords_inner.T + # convert lon/lat to UTM + utm_x,utm_y = convert_coordinates_to_UTM(gdf,mesh_area,lons,lats) + # store utm coordinates + coords_inner = np.column_stack((utm_x,utm_y)) + + # debug timing + #if icount % noutput_info == 0: + # toc2 = time.perf_counter() + # print(" coord {} - elapsed time {:0.4f} s".format(coords[0],(toc2-toc))) + + #debug + #print("new coords: ",coords) + + # triangulate and extrude + triangle_faces = get_triangulation(coords,coords_inner) + + # check + if len(triangle_faces) == 0: + return None + + # creates Mesh by extrusion + b_mesh = trimesh.creation.extrude_triangulation(vertices=coords, faces=triangle_faces, + height=height) + + # checks number of vertices in mesh + if len(b_mesh.vertices) == 0: + return None + + #debug + #if not np.isnan(roof_direction): print("debug: roof direction ",roof_direction,roof_type,roof_height,"location: ",points[0]) + + # adds roof + if roof_type != 'flat' and roof_height > 0.0: + b_mesh = add_roof_mesh(b_mesh,height,roof_type,roof_height,roof_direction,roof_orientation) + + ## topography + # Edit vertices to have correct z values + verts = b_mesh.vertices.view(np.ndarray) + # adds topographic elevation to z-coordinates + verts[:,2] += elevation + + # check + if np.isnan(verts).any(): + #print("error: mesh verts ",verts,"height",height,"elevation",elevation) + return None + + # updates mesh vertices + b_mesh.vertices = verts + + # updates face normals + b_mesh.fix_normals() + + # debug + #print("debug: mesh is empty {} watertight {} convex {} volume {} winding_consistent {}".format( + # b_mesh.is_empty,b_mesh.is_watertight,b_mesh.is_convex,b_mesh.is_volume,b_mesh.is_winding_consistent)) + + return b_mesh + +# +#---------------------------------------------------------------------------------------- +# + +def get_triangulation(coords, coords_inner): + + def in_polygon(point, polygon, hole): + # Check if a point is inside the polygon (excluding holes) + # check if inside + path = Polygon(polygon).get_path() + is_inside = path.contains_point(point) + #debug + #print("debug: point is in polygon: ",is_inside) + + # check if point is inside a hole area + if len(hole) > 0: + path = Polygon(hole).get_path() + is_inside_hole = path.contains_point(point) + #debug + #print("debug: point is in hole : ",is_inside_hole) + else: + is_inside_hole = False + + if is_inside and not is_inside_hole: + return True + else: + return False + + def filter_triangles(tris, polygon, hole): + """ + simple check if triangle center is inside the polygon with a possible hole in it + """ + # Filter triangles to keep only those inside the polygon (excluding holes) + filtered_triangles = [] + for face in tris.simplices: + # gets triangle centroid point + positions = tris.points[face] + triangle_center = np.mean(positions, axis=0) + + #debug + #print("triangle: ",face,positions,"center: ",triangle_center) + + # keep triangle if inside polygon + if in_polygon(triangle_center, polygon, hole): + filtered_triangles.append(face) + + return np.array(filtered_triangles) + + # get triangulation using all coordinate points + tris = Delaunay(coords) + + # check if triangles are inside outer polygon path and not inside an inner "hole" path + polygon = coords # note: need full coords and not coord_outer here, otherwise no triangles will get selected + hole = coords_inner + + # Filter triangles to keep only those inside the original polygon (excluding holes) + triangle_faces = filter_triangles(tris, polygon, hole) + + return triangle_faces + + +# +#---------------------------------------------------------------------------------------- +# + +def add_roof_mesh(b_mesh,height,roof_type,roof_height,roof_direction,roof_orientation): + """ + adds a roof mesh to the building + """ + # we will scale and move a simple roof primitive to match the building bounding box + # this assumes that the initial building footprint is close to a rectangle, if not the roof would not work + + def normalize_angle(angle): + # Ensure the angle is within [0, 360) + normalized_angle = angle % 360.0 + # If the angle is negative, wrap it around to the positive side + if normalized_angle < 0.0: normalized_angle += 360.0 + return normalized_angle + + def angular_difference(angle1, angle2): + # Calculate the raw angular difference + raw_difference = angle1 - angle2 + # Calculate the normalized angular difference in the range [-180, 180] + normalized_difference = (raw_difference + 180) % 360 - 180 + return normalized_difference + + # determine scale and orientation of base plane of mesh + points = b_mesh.vertices + + #debug + #print("debug: roof: ",roof_type," roof height: ", roof_height," direction: ", roof_direction," - position: ",b_mesh.centroid) + #print("debug: mesh original points: ",points) + + # selects only base points + points = points[points[:,2] < 0.1] + + # takes only x/y coordinates + base_points = points[:, :2] + + #debug + # center of base points + #center = np.mean(base_points, axis=0) + #print("debug: mesh base points: ",base_points," center: ",center) + + # gets oriented 2d bounding box + # note: the transformation T will offset and rotate to align with the origin and xyz-axis + # in order to move the roof to the mesh location and align it, we will have to invert the transformation T. + T, rectangle = trimesh.bounds.oriented_bounds_2D(base_points) + + #debug + #print("debug: 2d oriented bounds: ",T,"rectangle: ",rectangle) + + # for gabled/hipped roofs + # check if building footprint is similar to a simple rectangle + if roof_type != 'dome' and roof_type != 'cone': + # we'll use the area of the footprint to see if it matches the rectangle area, + # otherwise the shape must be quite different + area = rectangle[0] * rectangle[1] + # area is volume / height for our extrusion mesh + mesh_area = b_mesh.volume / height + #debug + #print("debug: mesh area = ",mesh_area," - rectangle area = ",area) + # check + if abs(mesh_area - area) > 0.1 * area: + #debug + #print("roof: building footprint won't match a rectangular shape by area:",mesh_area," rectangle area:",area,"roof type: ",roof_type) + # just returns the building mesh as is + return b_mesh + + # constructs a roof primitive + roof_mesh, roof_height = get_roof_primitive(roof_type,roof_height,base_points,height) + + # checks roof mesh + if roof_mesh is None: + # no roof created, returns building mesh as is + return b_mesh + + # changes rectangle size for spherical roofs + if roof_type == 'dome' or roof_type == 'cone': + # adapts rectangle size since dome/cone uses radius, not diameters + rectangle = rectangle / 2.0 + + # mask for points making up base of roof + if roof_type == 'sawtooth': + # only select corner points from the base (not base points in the middle of the edges) + mask_roof_base_points = (roof_mesh.vertices[:,2] < 0.1) & (np.abs(roof_mesh.vertices[:,0]) > 0.499) + else: + # shapes like gabled/hipped/.. have only 4 corners at the base + mask_roof_base_points = roof_mesh.vertices[:,2] < 0.1 + + # trimesh seems to return the larger dimension first + if rectangle[0] > rectangle[1]: + # rotates roof by an additional 90 degree to have longer direction align with y-direction of the roof + # looks nicer for gabled and hipped roofs + angle = 90.0 # in degrees + R = trimesh.transformations.rotation_matrix(np.deg2rad(angle), + direction=[0.0, 0.0, 1.0]) + roof_mesh.apply_transform(R) + + # default orientation is 'along' which aligns with the roof primitive, + # we only need to rotation for 'across' orientations + if roof_orientation == 'across': + # rotates roof by another 90 degree to have shorter direction align with y-direction of the roof + angle = 90.0 # in degrees + R = trimesh.transformations.rotation_matrix(np.deg2rad(angle), + direction=[0.0, 0.0, 1.0]) + roof_mesh.apply_transform(R) + + # determines roof direction + if roof_type == 'skillion' and not np.isnan(roof_direction): + # roof direction + # see: https://wiki.openstreetmap.org/wiki/Key:roof:direction + + # first determine building angle + # Compute the oriented bounding box (OBB) of the mesh + obb = b_mesh.bounding_box_oriented + # Extract the rotation angles from the OBB transformation matrix + angles = trimesh.transformations.euler_from_matrix(obb.transform, 'sxyz') + # angles variable now contains the rotation angles around the X, Y, and Z axes + # takes angle around the Z axis (vertical) corresponds to the North direction + north_angle = angles[2] + # Convert the angle to degrees if needed + b_direction = np.degrees(north_angle) + # directions are within the range [0,360] + b_direction = normalize_angle(b_direction) + + # angle difference between building and roof direction (in range [-180,180]) + # example: building angle = 0.0 roof direction = 80.0 -> difference = 80.0 + # building angle = 230.0 roof direction = 210.0 -> difference = -20.0 + angle_diff = angular_difference(roof_direction, b_direction) + + # direction tolerance in degrees + TOL_DIR = 45.0 + + # checks if we need to rotate roof + # todo - this is still unclear what the correct rotation should be: + # the building angle seems to be dominated by the longest length of the building, + # and since the skillion roof primitive aligns with the longest length, the rain run-off direction which + # is the roof direction for skillion roofs goes perpendicular to this longest length direction. + # + # for now, we only rotate if there is a +90 degree offset between building and roof - meaning that the rain run-off + # from the roof should be in opposite direction, assuming that the skillion roof primitive has a default run-off direction + # of -90 degree with respect to the building direction. + # + # checks angle difference + if angle_diff > 90.0 - TOL_DIR and angle_diff < 90.0 + TOL_DIR: + # 90-degree difference + #print("debug: 90-deg building angle ",b_direction,"roof direction: ",roof_direction,"difference = ",angle_diff) + angle = 180.0 # (in degrees) rotate by 180-degree to have rain run-off angle in opposite direction + + elif angle_diff > 180.0 - TOL_DIR and angle_diff < 180.0 + TOL_DIR: + # 180-degree difference + #print("debug: 180-deg building angle ",b_direction,"roof direction: ",roof_direction,"difference = ",angle_diff) + angle = 0.0 # no rotation + + elif angle_diff > -90.0 - TOL_DIR and angle_diff < -90.0 + TOL_DIR: + # minus 90-degree difference + #print("debug: -90-deg building angle ",b_direction,"roof direction: ",roof_direction,"difference = ",angle_diff) + angle = 0.0 # no rotation + + elif angle_diff > -180.0 - TOL_DIR and angle_diff < -180.0 + TOL_DIR: + # minus 180-degree difference + #print("debug: -180-deg building angle ",b_direction,"roof direction: ",roof_direction,"difference = ",angle_diff) + angle = 0.0 # no rotation + + else: + # angle difference between +/- 45 degrees + #print("debug: no rot building angle ",b_direction,"roof direction: ",roof_direction,"difference = ",angle_diff) + angle = 0.0 + + # apply rotation + if abs(angle) > 1.0: + R = trimesh.transformations.rotation_matrix(np.deg2rad(angle), + direction=[0.0, 0.0, 1.0]) + roof_mesh.apply_transform(R) + + # scale roof in x/y dimension, scales vertically later + scale_vec = [rectangle[0], rectangle[1], 1.0] + # scale + roof_mesh.apply_scale(scale_vec) + + # convert to 3D transformation (around z-axis) + T_3d = trimesh.transformations.planar_matrix_to_3D(T) + # takes inverse to have rotation inverse than to_origin, i.e., we want to move and rotate our roof from the origin location + # to the actual building mesh position + roof_mesh.apply_transform(np.linalg.inv(T_3d)) + + # scale roof vertically + scale_vec = [1.0, 1.0, roof_height] + # scale + roof_mesh.apply_scale(scale_vec) + + # move to top of mesh (roof sits on top of building mesh) + translate_vec = [0.0, 0.0, height] + roof_mesh.apply_translation(translate_vec) + + # for gabled/hipped roofs + # let's try to move the base points of the roof to match the top corners of the building + if roof_type != 'dome' and roof_type != 'cone': + # tries to match corners of roof with corners of building + roof_verts = roof_mesh.vertices[mask_roof_base_points] + + # gets closest, distance and vertex id of closest point in building mesh + closest, distance, tid = trimesh.proximity.closest_point(b_mesh,roof_verts) + + for i,point in enumerate(roof_verts): + #debug + #print("debug: roof base points: ",roof_verts[i],"closest: ",closest[i],distance[i],tid[i],"height: ",height) + # sets roof point to closest point from building + if distance[i] < height: roof_verts[i] = closest[i] + # updates vertex position of roof base + roof_mesh.vertices[mask_roof_base_points] = roof_verts + + # building position + # gets maximum height of mesh + #bbox = b_mesh.bounding_box + #mesh_height = bbox.bounds[:,2].max() + #debug + #print("debug: mesh: centroid ",b_mesh.centroid,"bounds: ",b_mesh.bounds) + #print("debug: mesh bounding box: ", bbox.bounds) + #print("debug: mesh max height: ", mesh_height," input height: ",height) + # safety check + #if abs(mesh_height - height) > 1.e-5: + # print("Error: mesh_height and height do not match: ",mesh_height,height) + # sys.exit(1) + + # combine both meshes, building and roof into a single mesh + combined_mesh = trimesh.util.concatenate([b_mesh,roof_mesh]) + + # remove degenerate faces + combined_mesh.update_faces(combined_mesh.nondegenerate_faces()) + + # remove vertices not needed + combined_mesh.remove_unreferenced_vertices() + + # removes duplicate vertices + combined_mesh.merge_vertices() + + return combined_mesh + +# +#---------------------------------------------------------------------------------------- +# + +def get_roof_primitive(roof_type,roof_height,base_points,height): + """ + creates a geometric primitive of different roof types which will need to be scaled and positioned + """ + # Openstreetmap roof shapes + # see: https://wiki.openstreetmap.org/wiki/Key:roof:shape + # + if roof_type == 'gabled': + # gabled roof + # based on prism + # _____________ + # z / /\ + # |__ y / / \ + # / / / \ + # x /____________/****** + # + tris = np.array([[ 0.5, -0.5, 0.0], + [ 0.0, -0.5, 1.0], + [-0.5, -0.5, 0.0]]) + truncation_plane_origin = [0.0, 0.5, 0.0] + truncation_plane_normal = [0.0, 1.0, 0.0] + + roof_mesh = trimesh.creation.truncated_prisms(tris, + origin=truncation_plane_origin, + normal=truncation_plane_normal) + + elif roof_type == 'hipped': + # hipped roof + # starts like a gabled roof top and then moves top points closer to each other + # based on prism + # _____________ + # z / /\ + # |__ y / / \ + # / / / \ + # x /____________/****** + # + tris = np.array([[ 0.5, -0.5, 0.0], + [ 0.0, -0.5, 1.0], + [-0.5, -0.5, 0.0]]) + truncation_plane_origin = [0.0, 0.5, 0.0] + truncation_plane_normal = [0.0, 1.0, 0.0] + + roof_mesh = trimesh.creation.truncated_prisms(tris, + origin=truncation_plane_origin, + normal=truncation_plane_normal) + + # moves top points closer together + # _________ + # z / |\ + # |__ y / | \ + # / / | \ + # x /____________|**** + # + # hipped + verts = roof_mesh.vertices + # moves the two top nodes closer to each other + verts[1] = [ 0.0, -0.2, 1.0] + verts[4] = [ 0.0, 0.2, 1.0] + # updates roof primitive + roof_mesh.vertices = verts + + elif roof_type == 'side_hipped': + # side-hipped roof + # https://wiki.openstreetmap.org/wiki/Tag:roof:shape=side%20hipped?uselang=en + + # constructs a roof primitive + # based on prism + # hipped on one side only + # + # ._________ + # z . /\ + # |__ y . / \ + # / . / \ + # x /____________/****** + # + tris = np.array([[ 0.5, -0.5, 0.0], + [ 0.0, -0.5, 1.0], + [-0.5, -0.5, 0.0]]) + truncation_plane_origin = [0.0, 0.5, 0.0] + truncation_plane_normal = [0.0, 1.0, 0.0] + + roof_mesh = trimesh.creation.truncated_prisms(tris, + origin=truncation_plane_origin, + normal=truncation_plane_normal) + # side-hipped + verts = roof_mesh.vertices + # moves only one node closer to center + verts[1] = [ 0.0, -0.2, 1.0] + # updates roof primitive + roof_mesh.vertices = verts + + elif roof_type == 'skillion': + # skillion roof + # based on prism + # _____________ + # z / /| + # |__ y / / | + # / / / | + # x /____________/***| + # + tris = np.array([[ 0.5, -0.5, 0.0], + [-0.5, -0.5, 1.0], + [-0.5, -0.5, 0.0]]) + truncation_plane_origin = [0.0, 0.5, 0.0] + truncation_plane_normal = [0.0, 1.0, 0.0] + + roof_mesh = trimesh.creation.truncated_prisms(tris, + origin=truncation_plane_origin, + normal=truncation_plane_normal) + + elif roof_type == 'sawtooth': + # sawtooth roof + # based on prism + # _____________ ____ ____ + # z / /| /| /| + # |__ y / / | / | / | + # / / / | / | / | + # x /____________/***|/***|/***| + # + tris = np.array([[ 0.5, -0.5, 0.0], # 1. triangle + [ 0.1666, -0.5, 1.0], + [ 0.1666, -0.5, 0.0], + [ 0.1666, -0.5, 0.0], # 2. triangle + [-0.1666, -0.5, 1.0], + [-0.1666, -0.5, 0.0], + [-0.1666, -0.5, 0.0], # 3.triangle + [-0.5, -0.5, 1.0], + [-0.5, -0.5, 0.0] + ]) + truncation_plane_origin = [0.0, 0.5, 0.0] + truncation_plane_normal = [0.0, 1.0, 0.0] + + roof_mesh = trimesh.creation.truncated_prisms(tris, + origin=truncation_plane_origin, + normal=truncation_plane_normal) + + + elif roof_type == 'pyramidal': + # creates a pyramid + # based on prism, merging top two points + # _____.________ + # z / . . /\ + # |__ y / . . / \ + # / / . . / \ + # x /.___________./****** + # + tris = np.array([[ 0.5, -0.5, 0.0], + [ 0.0, -0.5, 1.0], + [-0.5, -0.5, 0.0]]) + truncation_plane_origin = [0.0, 0.5, 0.0] + truncation_plane_normal = [0.0, 1.0, 0.0] + + roof_mesh = trimesh.creation.truncated_prisms(tris, + origin=truncation_plane_origin, + normal=truncation_plane_normal) + + # merges the two top nodes + verts = roof_mesh.vertices + verts[1] = [ 0.0, 0.0, 1.0] + verts[4] = [ 0.0, 0.0, 1.0] + # updates roof primitive + roof_mesh.vertices = verts + + elif roof_type == 'quadruple_saltbox': + # quadruple-saltbox roof + # based on prism, merging top two points, then cutting top tip off + # + # z *___* + # |__ y *____*/\ + # / . . \ + # x ._________./ + # + tris = np.array([[ 0.5, -0.5, 0.0], + [ 0.0, -0.5, 1.0], + [-0.5, -0.5, 0.0]]) + truncation_plane_origin = [0.0, 0.5, 0.0] + truncation_plane_normal = [0.0, 1.0, 0.0] + + roof_mesh = trimesh.creation.truncated_prisms(tris, + origin=truncation_plane_origin, + normal=truncation_plane_normal) + + # merges the two top nodes + verts = roof_mesh.vertices + verts[1] = [ 0.0, 0.0, 1.0] + verts[4] = [ 0.0, 0.0, 1.0] + + # updates roof primitive + roof_mesh.vertices = verts + + # cuts away top tip + roof_mesh = roof_mesh.slice_plane(plane_origin=[0.0,0.0,0.5], plane_normal=[0,0,-1], cap=True) + + # remove degenerate faces to make mesh watertight + roof_mesh.update_faces(roof_mesh.nondegenerate_faces()) + #debug + #print("debug: quadruple_saltbox is watertight: ",roof_mesh.is_watertight) + + elif roof_type == 'dome': + # dome roof + # creates a sphere and cuts away lower hemisphere + roof_mesh = trimesh.creation.uv_sphere(radius=1) + + # cuts away lower hemisphere (cap=True to have a watertight mesh) + roof_mesh = roof_mesh.slice_plane(plane_origin=[0.0,0.0,0.0], plane_normal=[0,0,1], cap=True) + + # determines radius to inscribe a sphere for vertical scaling of the dome + center, radius = trimesh.nsphere.minimum_nsphere(base_points) + # limit radius (taking the full inscribed radius becomes fairly high) + radius *= 0.6 + # limit radius to building height + if radius > height: radius = height + # update roof height + roof_height = radius + + elif roof_type == 'cone': + # cone roof + # creates a cone + roof_mesh = trimesh.creation.cone(radius=1,height=1) + + # determines radius to inscribe a sphere for vertical scaling of the cone + center, radius = trimesh.nsphere.minimum_nsphere(base_points) + # limit radius (taking the full inscribed radius becomes fairly high) + radius *= 0.6 + # limit radius to building height + if radius > height: radius = height + # update roof height + roof_height = radius + + else: + #debug + #print("Roof type not supported yet: ",roof_type," - will use a flat roof...") + return None, roof_height + + return roof_mesh, roof_height + +# +#---------------------------------------------------------------------------------------- +# + +def determine_building_elevations(gdf,topo_grid,mesh_area): + """ + determines for each building its topographic elevation based on the building's centroid position + """ + print(" determining building elevations") + print("") + + # mesh area min/max + lon_min = mesh_area[0] + lat_min = mesh_area[1] + lon_max = mesh_area[2] + lat_max = mesh_area[3] + + # buildings + num_buildings = gdf.shape[0] + + # arrays for building positions + lons = np.zeros(num_buildings) + lats = np.zeros(num_buildings) + #building_elevations = np.zeros(num_buildings) + + # timing + tic = time.perf_counter() + + # number to output in steps of 10/100/1000 depending on how large the number of polyfaces is + if num_buildings > 100000: + noutput_info = min(10000,int(10**np.floor(np.log10(num_buildings)))) + else: + noutput_info = min(1000,int(10**np.floor(np.log10(num_buildings)))) + + icount = 0 + for index, obj in gdf.iterrows(): + # counter + icount += 1 + + # user output + if icount % noutput_info == 0: + # timing + toc = time.perf_counter() + print(" building: {} out of {} - elapsed time {:0.2f} s".format(icount,num_buildings,(toc-tic))) + + # Access the geometry of the building + geometry = obj['geometry'] + + # check + if not geometry.is_valid: + print("Error: found invalid geometry ",geometry," this should have been checked before, exiting...") + sys.exit(1) + + # centroid position lat / lon + lon = geometry.centroid.x + lat = geometry.centroid.y + + # check + if np.isnan(lon) or np.isnan(lat): + print("Error: building {} has invalid centroid lon/lat {}/{} - geometry {}, exiting...".format(index,lon,lat,geometry.centroid)) + sys.exit(1) + + # keep inside mesh area + if lon < lon_min: lon = lon_min + if lon > lon_max: lon = lon_max + if lat < lat_min: lat = lat_min + if lat > lat_max: lat = lat_max + + # stores centroid position + lons[icount-1] = lon + lats[icount-1] = lat + + # single value call - too slow for many buildings... + # gets elevations (interpolated by nearest neighbor method) + #building_elevations[icount-1] = get_topo_elevation(lat,lon,topo_grid) + print("") + + # check + if icount != num_buildings: + print("Error: number of buildings {} should match all {}".format(icount,num_buildings)) + sys.exit(1) + + # gets all base elevations (interpolated by nearest neighbor method) + # returned array will be a 2d array[nlats,nlons] + # this becomes too big for many buildings... + #building_elevations = get_topo_elevation(lats,lons,topo_grid) + # extracts 1d array needed only (array[i,i]) + #building_elevations = np.diag(building_elevations) + + # using advanced indexing to have 1d array result + # see: https://docs.xarray.dev/en/stable/user-guide/interpolation.html#advanced-interpolation + x = xr.DataArray(lons, dims="z") + y = xr.DataArray(lats, dims="z") + + # gets elevation + # default linear interpolation + elevation = topo_grid.interp(lon=x,lat=y) + # interpolated by nearest neighbor method - faster, but less accurate + #elevation = topo_grid.interp(lon=x,lat=y,method="nearest") + + # extract simple float value from returned xarray object + building_elevations = elevation.data + + print(" building elevations:") + print(" number of elevations = {}".format(len(building_elevations))) + print(" min/max = {} / {}".format(building_elevations.min(),building_elevations.max())) + print("") + + # check elevations are valid + for i in range(len(building_elevations)): + ele = building_elevations[i] + # checks nan values + if np.isnan(ele): + print("Error: building {} has invalid elevation {} at lon/lat {}/{}".format(i,building_elevations[i],lons[i],lats[i])) + sys.exit(1) + + return building_elevations + + +# +#---------------------------------------------------------------------------------------- +# + +def create_3d_buildings(gdf,topo_grid,mesh_area): + """ + creates for each building a 3d mesh and stores all as .ply output file + """ + global building_height_min + + # mesh area min/max + lon_min = mesh_area[0] + lat_min = mesh_area[1] + lon_max = mesh_area[2] + lat_max = mesh_area[3] + + # makes an initial conversion to UTM to get transformer + center_lon = 0.5 * (lon_min + lon_max) + center_lat = 0.5 * (lat_min + lat_max) + # convert lon/lat to UTM + center_utm_x,center_utm_y = convert_coordinates_to_UTM(gdf,mesh_area,center_lon,center_lat) + + # buildings + num_buildings = gdf.shape[0] + + print("creating 3D buildings...") + print(" total number of buildings: ",num_buildings) + print("") + + # gets all elevations for the building bases + building_elevations = determine_building_elevations(gdf,topo_grid,mesh_area) + + # 3d buildings + print(" creating 3d buildings") + print("") + + # meshes for .ply output + meshes = [] + + # number to output in steps of 10/100/1000 depending on how large the number of polyfaces is + if num_buildings > 100000: + noutput_info = min(10000,int(10**np.floor(np.log10(num_buildings)))) + else: + noutput_info = min(1000,int(10**np.floor(np.log10(num_buildings)))) + + # timing + tic = time.perf_counter() + + # Iterate over each row in the GeoDataFrame + icount = 0 + icount_added = 0 + for index, obj in gdf.iterrows(): + # counter + icount += 1 + + # user output + if icount % noutput_info == 0: + # timing + toc = time.perf_counter() + print(" building: {} out of {} - elapsed time {:0.2f} s".format(icount,num_buildings,(toc-tic))) + + # gets topographic base elevation of the building + elevation = building_elevations[icount-1] + + # creates a mesh of the building + meshes, icount_added = create_building_mesh(gdf,mesh_area,elevation,obj,meshes,icount_added) + + + print("") + print(" done. added {} building meshes".format(icount_added)) + print("") + + # create a reference sphere for checking lat/lon positioning + if 1 == 0: + b_mesh = trimesh.creation.uv_sphere(radius=50.0) + # Define the desired position as a 3D vector (x, y, z) + lon = 12.5 # station X1 or center_lon + lat = 42.0 # center_lat + # convert lon/lat to UTM + x,y = convert_coordinates_to_UTM(gdf,mesh_area,lon,lat) + # gets base elevation (interpolated by nearest neighbor method) + xele = topo_grid.interp(lon=lon,lat=lat,method="nearest") + z = float(xele.data) # converts to single float value + if np.isnan(z): z = 0.0 + desired_position = [x,y,z] + # Create a translation matrix based on the desired position + translation_matrix = trimesh.transformations.translation_matrix(desired_position) + # Apply the translation matrix to the sphere + b_mesh.apply_transform(translation_matrix) + # adds to all meshes + meshes.append(b_mesh) + + # combine all meshes into single mesh + mesh = trimesh.util.concatenate(meshes) + + # remove degenerate faces + mesh.update_faces(mesh.nondegenerate_faces()) + + # remove vertices not needed + mesh.remove_unreferenced_vertices() + + # removes duplicate vertices + mesh.merge_vertices() + + # user output + print("mesh:") + print(" total number of vertices = ",len(mesh.vertices)) + print(" total number of faces = ",len(mesh.faces)) + print("") + + # check + if len(mesh.vertices) == 0 or len(mesh.faces) == 0: + print("") + print("Mesh is empty, there's nothing to save. Please check your inputs...") + print("") + sys.exit(1) + + # user output + print("") + print("mesh dimensions:") + xmin,ymin,zmin = mesh.bounds[:][0] + xmax,ymax,zmax = mesh.bounds[:][1] + print(" bounding box : x min/max = {} / {}".format(xmin,xmax)) + print(" y min/max = {} / {}".format(ymin,ymax)) + print(" z min/max = {} / {}".format(zmin,zmax)) + print("") + + # Save the mesh as a PLY file + filename = './output_buildings_utm.ply' + mesh.export(file_obj=filename); + + print("written: ",filename) + print("") + +# +#---------------------------------------------------------------------------------------- +# + +def convert_openstreetmap_buildings(input_file=None,mesh_origin=None,mesh_scale=None,mesh_area=None): + + # user output + print("") + print("convert OpenStreetMap buildings") + + if len(input_file) > 0: + print(" input file : ",input_file) + if not mesh_origin is None: + print(" mesh_origin : ",mesh_origin) + if not mesh_scale is None: + print(" mesh_scale : ",mesh_scale) + if not mesh_area is None: + print(" mesh_area : ",mesh_area) + + print("") + + # current directory + dir = os.getcwd() + print(" current directory: ",dir) + print("") + + # input data + if len(input_file) == 0: + # download buildings + gdf = download_buildings(mesh_area) + else: + # reads in buildings and loads GeoJSON format file into a GeoDataFrame object + gdf = gpd.read_file(input_file) + + # prepare gdf data array and validate entries + gdf = prepare_osm_building_data(gdf) + + # download topography for elevations + topo_grid = get_topo_DEM(mesh_area) + + # make 3d buildings + create_3d_buildings(gdf,topo_grid,mesh_area) + + +# +#---------------------------------------------------------------------------------------- +# + +def usage(): + print("usage: ./convert_openstreetmap_buildings_to_dxf.py [--mesh_area=(lon_min,lat_min,lon_max,lat_max)] [--mesh_origin=(x0,y0,z0)] [--mesh_scale=factor] [--GeoJSON_file=file]") + print(" with") + print(" --mesh_area - downloads/limits buildings for area specified by (lon_min,lat_min,lon_max,lat_max)") + print(" --mesh_origin - translate mesh by origin position") + print(" --mesh_scale - scale mesh by a factor") + print(" --GeoJSON_file - use input mesh file (.gjson) instead of download") + sys.exit(1) + + +if __name__ == '__main__': + # init + input_file = "" + mesh_origin = None + mesh_scale = None + mesh_area = None + + # reads arguments + #print("\nnumber of arguments: " + str(len(sys.argv))) + if len(sys.argv) <= 1: usage() + + i = 0 + for arg in sys.argv: + i += 1 + #print("argument "+str(i)+": " + arg) + # get arguments + if "--help" in arg: + usage() + elif "--dxf_file=" in arg: + input_file = arg.split('=')[1] + elif "--dwg_file=" in arg: + input_file = arg.split('=')[1] + elif "--mesh_origin=" in arg: + str_array = arg.split('=')[1] + mesh_origin = np.array([float(val) for val in str_array.strip('()[]').split(',')]) + elif "--mesh_scale=" in arg: + mesh_scale = float(arg.split('=')[1]) + elif "--mesh_area=" in arg: + str_array = arg.split('=')[1] + mesh_area = np.array([float(val) for val in str_array.strip('()[]').split(',')]) + elif i >= 6: + print("argument not recognized: ",arg) + + # logging + cmd = " ".join(sys.argv) + filename = './convert_openstreetmap_buildings_to_mesh.log' + with open(filename, 'a') as f: + print("command call --- " + str(datetime.datetime.now()),file=f) + print(cmd,file=f) + print("command logged to file: " + filename) + + # main routine + convert_openstreetmap_buildings(input_file,mesh_origin,mesh_scale,mesh_area) diff --git a/utils/Visualization/Blender/python_blender/plot_with_blender.py b/utils/Visualization/Blender/python_blender/plot_with_blender.py index 743beb81d..bd845513d 100755 --- a/utils/Visualization/Blender/python_blender/plot_with_blender.py +++ b/utils/Visualization/Blender/python_blender/plot_with_blender.py @@ -75,6 +75,29 @@ mesh_scale_factor = 1.0 mesh_origin = [0.0,0.0,0.0] +# baseline shift for buildings (to move above SPECFEM mesh) +shift_building_baseline = 0.0 # 0.0==no-shift default, or e.g. --shift-building-baseline=0.0005 + +# rendering output +suppress_renderer_output = False + +# animation +use_animation_dive_in = True +use_animation_rotation = True + +# camera positioning +camera_elevation_offset = 0.15 +camera_y_offset = -0.7 +camera_angle_depth_degree = 75.0 + +# center point to focus camera view on +x_center = 0.0 +y_center = 0.0 +z_center = 0.0 + +# elevation at center point +z_elevation = 0.0 + # class to avoid long stdout output by renderer # see: https://stackoverflow.com/questions/24277488/in-python-how-to-capture-the-stdout-from-a-c-shared-library-to-a-variable/29834357 @@ -109,6 +132,7 @@ def convert_vtk_to_obj(vtk_file,colormap=0,color_max=None): # check file if len(vtk_file) == 0: print("Error: no vtk file specified...") + usage() sys.exit(1) if not os.path.exists(vtk_file): @@ -971,6 +995,9 @@ def create_blender_setup(obj_file=""): def add_blender_buildings(buildings_file=""): + """ + adds buildings given by input .ply file + """ global mesh_origin,mesh_scale_factor global use_cycles_renderer @@ -1044,6 +1071,13 @@ def add_blender_buildings(buildings_file=""): bm.to_mesh(mesh) bm.free() + # baseline shift + if shift_building_baseline != 0.0: + print(" shifting buildings up: factor = ",shift_building_baseline) + print(" in m = ",shift_building_baseline / mesh_scale_factor) + print("") + obj.location = (0, 0, shift_building_baseline) + # Create a new material mat = bpy.data.materials.new('buildingsMaterial') @@ -1118,18 +1152,16 @@ def add_blender_buildings(buildings_file=""): print(" blender buildings mesh done") print("") -def get_mesh_elevation_at_origin(): + +def get_mesh_elevation(point_of_interest): + """ + determines elevation of object (obj) at a given point by ray intersection + """ # gets vtk mesh object obj = bpy.data.objects['output'] if obj == None: - print("Error: no mesh object in blender available, exiting...") - sys.exit(1) - - # object is a mesh - mesh = obj.data - - # Define the point of interest (X, Y, Z coordinates) - point_of_interest = Vector((0.0, 0.0, 0.0)) # origin + print("Info: no mesh object in blender available to determine elevation...") + return None # Define the origin of the ray (above the mesh) ray_origin = Vector((point_of_interest.x, point_of_interest.y, 10.0)) # Adjust the Z coordinate as needed @@ -1148,43 +1180,50 @@ def get_mesh_elevation_at_origin(): if success: # Get the elevation (Z coordinate) of the mesh at the point of interest - elevation = location.z - print(" mesh elevation at origin = ", elevation) - print("") + return location.z else: - # sets a default height - elevation = 0.1 - print(" mesh elevation at origin: no intersection found") - print(" setting default = ",elevation) - print("") + return None - return elevation +def get_mesh_elevation_at_origin(): + """ + determines elevation of mesh at origin/center point + """ + global x_center,y_center,z_center -def save_blender_scene(title="",animation=False,close_up_view=False): - global blender_img_resolution_X,blender_img_resolution_Y - global use_cycles_renderer - global use_transparent_sea_level_plane + print(" origin center = {} / {} / {}".format(x_center,y_center,z_center)) - ## blender scene setup - print("Setting up blender scene...") - print("") + # Define the point of interest (X, Y, Z coordinates) + point = Vector((x_center, y_center, z_center)) # origin - # determine elevation from mesh at origin point to position the camera - elevation = get_mesh_elevation_at_origin() + # get elevation + elevation = get_mesh_elevation(point) - # elevation offset for camera positioning - elevation_offset = 0.15 - camera_angle_depth_degree = 75.0 - if title == 'Lauterbrunnen' or title == 'Zermatt': - elevation_offset = 0.25 - camera_angle_depth_degree = 70.0 + # checks if valid + if not elevation is None: + # got an elevation + print(" mesh elevation at origin = ", elevation) + else: + # sets a default height + elevation = 0.1 + print(" mesh elevation at origin: no intersection found") + print(" setting default = ",elevation) + + return elevation + +def add_camera(title="",close_up_view=False): + """ + adds camera object + """ + global x_center,y_center,z_center + global camera_y_offset,camera_elevation_offset,camera_angle_depth_degree + global z_elevation + + print(" adding camera") # gets scene scene = bpy.context.scene - ## camera - print(" adding camera") # adds camera position bpy.ops.object.camera_add(enter_editmode=False, align='VIEW') # current object @@ -1192,11 +1231,96 @@ def save_blender_scene(title="",animation=False,close_up_view=False): cam.name = "Camera" #cam = bpy.data.objects["Camera"] scene.camera = cam + # clip range + # note: our objects are scaled between [-1,1]. + # a clip end plane around 10 should be sufficient to cover the whole area. + # for the clip start, it is more challenging as choosing a value below 0.1 will lead to depth Z-buffer artifacts. + # to avoid this, for an overview scene, the default value of 0.1 is good, when zooming in into close-up views, + # a clip start around 0.01 is usually better, otherwise the front of the scene will be cut-off. + scene.camera.data.clip_start = 0.1 # blender default: 0.1 + scene.camera.data.clip_end = 10.0 # blender default: 1000 + #scene.camera.data.lens = 70.0 + + # determine elevation from mesh at origin point to position the camera + z_elevation = get_mesh_elevation_at_origin() + + # elevation offset for camera positioning + camera_elevation_offset = 0.15 + camera_y_offset = -0.7 + camera_angle_depth_degree = 75.0 + + # specifics + if title == 'Lauterbrunnen' or title == 'Zermatt': + print(" camera setup for Lauterbrunnen/Zermatt") + camera_elevation_offset = 0.25 + camera_angle_depth_degree = 70.0 + + # center point to focus camera view on + x_center = 0.0 + y_center = 0.0 + z_center = 0.0 + + ## Rome colosseum + if title == "Rome": + print(" camera setup for Rome") + camera_elevation_offset = 0.0018 + camera_y_offset = -0.013 + camera_angle_depth_degree = 82.0 + # use colosseum position as center of views + if 1 == 1: + # colosseum: N 41.890258 E 12.492335 + # 33 T -> utm 291957.383 4640632.710 + x_center_utm = 291957.383; y_center_utm = 4640632.710 + # need to translate and scale the UTM position to place it within the scene + point = Vector((x_center_utm, y_center_utm, 0.0)) + translation_vector = Vector(mesh_origin) + point -= translation_vector + point *= mesh_scale_factor + # set center position + x_center = point.x + y_center = point.y + else: + # directly from using cursor position in blender + x_center = -0.01251 + y_center = -0.2263 + z_center = 0.0018 + + # get elevation at x/y position + point = Vector((x_center, y_center, 0.0)) # origin + center_elevation = get_mesh_elevation(point) + if not center_elevation is None: + z_elevation = center_elevation + else: + z_elevation = 0.001 + + #cam.location = (-0.02251, -0.24362, 0.00632) + #cam.rotation_euler = (83.401 * DEGREE_TO_RAD, 0.000133 * DEGREE_TO_RAD, -28.0 * DEGREE_TO_RAD) + + # metallic material for buildings + if not use_cycles_renderer: + if 'buildingsMaterial' in bpy.data.materials.keys(): + mat = bpy.data.materials['buildingsMaterial'] + nodes = mat.node_tree.nodes + mat_node = nodes['Glossy BSDF'] + mat_node.inputs['Roughness'].default_value = 0.25 # shinier, more metallic + print(" using metallic buildings") + if close_up_view: ## close-up view - print(" camera location relative to mesh elevation: ",elevation) + # adjust clip range + scene.camera.data.clip_start = 0.01 + #scene.camera.data.clip_end = 10.0 + #scene.camera.data.lens = 70.0 + # camera location + x_cam = x_center + y_cam = y_center + camera_y_offset + z_cam = z_center + z_elevation + camera_elevation_offset + print(" close-up scene") + print(" center point : x/y/z = {:.6f} / {:.6f} / {:.6f}".format(x_center,y_center,z_center)) + print(" camera location: x/y/z = {:.6f} / {:.6f} / {:.6f}".format(x_cam,y_cam,z_cam)) + print(" camera location relative to mesh elevation: ",z_elevation) # Set camera translation - scene.camera.location = (0, -0.7, elevation + elevation_offset) + scene.camera.location = (x_cam,y_cam,z_cam) # Set camera rotation in euler angles scene.camera.rotation_mode = 'XYZ' scene.camera.rotation_euler = (camera_angle_depth_degree * DEGREE_TO_RAD, 0, 0) @@ -1204,15 +1328,26 @@ def save_blender_scene(title="",animation=False,close_up_view=False): scene.camera.data.angle = float(30.0 * DEGREE_TO_RAD) else: ## overview + # camera location + x_cam = x_center + y_cam = y_center - 4.0 + z_cam = z_center + 4.0 + print(" overview scene") + print(" center point : x/y/z = {:.6f} / {:.6f} / {:.6f}".format(x_center,y_center,z_center)) + print(" camera location: x/y/z = {:.6f} / {:.6f} / {:.6f}".format(x_cam,y_cam,z_cam)) # Set camera translation - scene.camera.location = (0, -4, 4) + scene.camera.location = (x_cam,y_cam,z_cam) # Set camera rotation in euler angles scene.camera.rotation_mode = 'XYZ' scene.camera.rotation_euler = (44.0 * DEGREE_TO_RAD, 0, 0) # Set camera fov in degrees scene.camera.data.angle = float(30.0 * DEGREE_TO_RAD) - ## light + +def add_light(): + """ + adds a light to the scene + """ print(" adding light") # adds sun position bpy.ops.object.light_add(type='AREA') @@ -1234,7 +1369,13 @@ def save_blender_scene(title="",animation=False,close_up_view=False): light.data.size = 0.1 light.data.use_contact_shadow = True # Enable contact shadows - ## background plane + +def add_plane(close_up_view=False): + """ + adds a plane at sea-level + """ + global use_transparent_sea_level_plane + print(" adding sea-level plane") # Create a mesh plane (to capture shadows and indicate sea level) bpy.ops.mesh.primitive_plane_add(size=10, enter_editmode=False, location=(0, 0, 0)) @@ -1246,6 +1387,7 @@ def save_blender_scene(title="",animation=False,close_up_view=False): mat = bpy.data.materials.new(name="White") # adds transparency to the plane if use_transparent_sea_level_plane: + print(" using transparent sea-level") # Set the material to use a Principled BSDF shader mat.use_nodes = True principled_bsdf = mat.node_tree.nodes.get('Principled BSDF') @@ -1268,7 +1410,11 @@ def save_blender_scene(title="",animation=False,close_up_view=False): plane_object.data.materials.append(mat) - # text object + +def add_title(title=""): + """ + adds a title text + """ if len(title) > 0: print(" adding text object:") print(" title = ",title) @@ -1300,10 +1446,18 @@ def save_blender_scene(title="",animation=False,close_up_view=False): text_object.data.materials.append(text_material) - # save scene and render options - print("") - print(" saving blender scene...") +def set_scene(): + """ + sets scene options + """ + global blender_img_resolution_X,blender_img_resolution_Y + global use_cycles_renderer + print("") + print(" setting scene options") + + # gets scene + scene = bpy.context.scene # renderer options if use_cycles_renderer: @@ -1361,239 +1515,336 @@ def save_blender_scene(title="",animation=False,close_up_view=False): # sets white background color bpy.data.worlds["World"].node_tree.nodes["Background"].inputs[0].default_value = (1, 1, 1, 1) - print(" renderer: ",scene.render.engine) + print(" renderer: ",scene.render.engine) print("") - if animation: - print("animation:") - # initializes frames - number_of_keyframes = 0 - total_frames = 0 +def render_animation(): + """ + renders a movie animation + """ + global use_animation_dive_in,use_animation_rotation + global suppress_renderer_output + global camera_elevation_offset,camera_y_offset,camera_angle_depth_degree + global z_elevation - start_frame = -1 - end_frame = -1 + print("animation:") - scene.frame_start = -1 - scene.frame_end = -1 + # gets scene + scene = bpy.context.scene - ## dive-in keyframes - if 1 == 1: - # setup keyframes - number_of_keyframes += 4 - keyframe_interval = 20 + # initializes frames + number_of_keyframes = 0 + total_frames = 0 - total_frames = (number_of_keyframes-1) * keyframe_interval + start_frame = -1 + end_frame = -1 - print(" dive-in:") - print(" keyframe interval = ",keyframe_interval) - print(" number of keyframes = ",number_of_keyframes) - print("") + scene.frame_start = -1 + scene.frame_end = -1 + + ## dive-in keyframes + if use_animation_dive_in: + # setup keyframes + number_of_keyframes += 4 + keyframe_interval = 20 + + total_frames = (number_of_keyframes-1) * keyframe_interval + + print(" dive-in:") + print(" keyframe interval = ",keyframe_interval) + print(" number of keyframes = ",number_of_keyframes) + print("") + + # define a frame timeline + start_frame = 0 + inter_frame_1 = keyframe_interval + inter_frame_2 = 2*keyframe_interval + end_frame = total_frames + + scene.frame_start = start_frame + scene.frame_end = end_frame + + # moves camera + cam = bpy.data.objects["Camera"] + + # 1. start frame + cam.keyframe_insert("location", frame=start_frame) # (0, -4, 4) + cam.keyframe_insert("rotation_euler", frame=start_frame) # (44.0 * DEGREE_TO_RAD, 0, 0) + + # 2. intermediate frame + cam.location = (0, -2.5, 1) + cam.rotation_euler = (60.0 * DEGREE_TO_RAD, 0, 0) + cam.keyframe_insert("location", frame=inter_frame_1) + cam.keyframe_insert("rotation_euler", frame=inter_frame_1) + # adjust clip range + cam.data.clip_start = 0.1 + cam.data.keyframe_insert("clip_start", frame=inter_frame_1) + #cam.data.clip_end = 10.0 + #cam.data.keyframe_insert("clip_end", frame=inter_frame_1) + + # 3. intermediate frame + x_cam = 0 + y_cam = -1.2 + z_cam = z_center + z_elevation + camera_elevation_offset + cam.location = (x_cam, y_cam, z_cam) + cam.rotation_euler = (camera_angle_depth_degree * DEGREE_TO_RAD, 0, 0) + cam.keyframe_insert("location", frame=inter_frame_2) + cam.keyframe_insert("rotation_euler", frame=inter_frame_2) + # adjust clip range + cam.data.clip_start = 0.01 + cam.data.keyframe_insert("clip_start", frame=inter_frame_2) + #cam.data.clip_end = 10.0 + #cam.data.keyframe_insert("clip_end", frame=inter_frame_2) + + # 4. end frame + x_cam = x_center + y_cam = y_center + camera_y_offset + z_cam = z_center + z_elevation + camera_elevation_offset + cam.location = (x_cam, y_cam, z_cam) # Ending position + cam.rotation_euler = (camera_angle_depth_degree * DEGREE_TO_RAD, 0, 0) + cam.keyframe_insert("location", frame=end_frame) + cam.keyframe_insert("rotation_euler", frame=end_frame) + + ## rotation + if use_animation_rotation: + # appends frames for rotation + frames = 60 # Number of frames for the animation + keyframe_interval = 5 + number_of_keyframes += frames + + total_frames += frames * keyframe_interval + + print(" rotation:") + print(" keyframe interval = ",keyframe_interval) + print(" number of keyframes = ",frames) + print("") - # define a frame timeline + if start_frame == -1: start_frame = 0 - inter_frame_1 = keyframe_interval - inter_frame_2 = 2*keyframe_interval - end_frame = total_frames - - scene.frame_start = start_frame - scene.frame_end = end_frame - - # moves camera - cam = bpy.data.objects["Camera"] - - # start frame - cam.keyframe_insert("location", frame=start_frame) # (0, -4, 4) - cam.keyframe_insert("rotation_euler", frame=start_frame) # (44.0 * DEGREE_TO_RAD, 0, 0) - - # intermediate frame - cam.location = (0, -2.5, 1) - cam.rotation_euler = (60.0 * DEGREE_TO_RAD, 0, 0) - cam.keyframe_insert("location", frame=inter_frame_1) - cam.keyframe_insert("rotation_euler", frame=inter_frame_1) - - # intermediate frame - cam.location = (0, -1.2, elevation + elevation_offset) - cam.rotation_euler = (camera_angle_depth_degree * DEGREE_TO_RAD, 0, 0) - cam.keyframe_insert("location", frame=inter_frame_2) - cam.keyframe_insert("rotation_euler", frame=inter_frame_2) - - # end frame - cam.location = (0, -0.7, elevation + elevation_offset) # Ending position - cam.rotation_euler = (camera_angle_depth_degree * DEGREE_TO_RAD, 0, 0) - cam.keyframe_insert("location", frame=end_frame) - cam.keyframe_insert("rotation_euler", frame=end_frame) - - ## rotation - if 1 == 1: - # appends frames for rotation - frames = 60 # Number of frames for the animation - keyframe_interval = 5 - number_of_keyframes += frames + else: + start_frame = end_frame + 1 * keyframe_interval - total_frames += frames * keyframe_interval + if end_frame == -1: + end_frame = total_frames + else: + end_frame = total_frames + 1 * keyframe_interval + + # updates end frame count + scene.frame_end = end_frame + + # Set the rotation parameters + rotation_degrees = 360 # Total rotation in degrees + + # moves camera around the z-axis + cam = bpy.data.objects['Camera'] + + #if scene.frame_start == -1: + # # add start frame + # scene.frame_start = start_frame + # # start frame + # cam.keyframe_insert("location", frame=start_frame) # (0, -4, 4) + # cam.keyframe_insert("rotation_euler", frame=start_frame) # (44.0 * DEGREE_TO_RAD, 0, 0) + + # camera offset from z-axis + x_0 = cam.location[0] + y_0 = cam.location[1] + z_0 = cam.location[2] + + offset_distance = ((x_0-x_center)**2 + (y_0-y_center)**2)**0.5 + angle_0 = atan2(y_0-y_center,x_0-x_center) + + # adjust clip range since rotations will be done after dive-in in more close-up views + # to check if we zoomed in, we will use the camera's z-position which for an overview is set to +4 + if z_0 < 2.0: + cam.data.clip_start = 0.01 + #cam.data.clip_end = 10.0 + + # Set keyframes for rotation + for frame in range(0,frames+1): + angle = angle_0 + radians(frame * (rotation_degrees / frames)) + + # Calculate camera position around the Z-axis + x = offset_distance * cos(angle) + y = offset_distance * sin(angle) + + # rotate around center location + x += x_center + y += y_center + + # new camera position + camera_location = Vector((x, y, z_0)) # Maintain Z position + + # Set camera rotation towards the origin + look_at = Vector((x_center, y_center, z_center + z_elevation)) # Origin point on mesh + + # gets camera rotation angle + direction = look_at - camera_location + rot_quat = direction.to_track_quat('-Z', 'Y') # Point -Z axis to direction + camera_rotation = rot_quat.to_euler() + + # there seems to be a problem with frame interpolation when the angles going past 2 PI, i.e, + # having the angle set between [-pi,pi] from the to_euler() output and interpolating a step + # between +pi to -pi. to avoid we always increase the angle going from [0,2pi[ + # + # in our case here, we change & increase only the third euler angle, while the first 2 stay fixed + # when the camera rotates around the z-axis. + if camera_rotation[2] < 0.0: camera_rotation[2] += 2.0 * PI + + # checks difference in current camera rotation and new target rotation + # makes sure that z-angle always increases from one frame to another, + # otherwise the linear interpolation will between frames will lead to weird results + if cam.rotation_euler[2] > camera_rotation[2]: camera_rotation[2] += 2.0 * PI + + # info + if frame == 0: + print(" initial frame: camera angles = {:.2f} / {:.2f} / {:.2f} ".format(np.rad2deg(camera_rotation.x), + np.rad2deg(camera_rotation.y),np.rad2deg(camera_rotation.z))) + print("") + + frame_number = start_frame + frame * keyframe_interval + scene.frame_set(frame_number) + + # Set camera location and keyframe for each frame + cam.rotation_euler = camera_rotation + cam.location = camera_location + + cam.keyframe_insert("location", frame=frame_number) + cam.keyframe_insert("rotation_euler", frame=frame_number) + + #debug + #print("frame: ",frame,frame_number,"angle",angle,cam.location,"rotation",cam.rotation_euler) + + print(" total number of keyframes = ",number_of_keyframes) + print(" total number of frames = ",total_frames) + print("") - print(" rotation:") - print(" keyframe interval = ",keyframe_interval) - print(" number of keyframes = ",frames) - print("") + # smoothing move + #for fcurve in cam.animation_data.action.fcurves: + # for keyframe_point in fcurve.keyframe_points: + # keyframe_point.interpolation = 'LINEAR' + + # render settings + # see: https://docs.blender.org/api/current/bpy.types.FFmpegSettings.html#bpy.types.FFmpegSettings + # ffmpeg + scene.render.image_settings.file_format = 'FFMPEG' # 'AVI_JPEG', 'FFMPEG' + # codec + scene.render.ffmpeg.codec = 'H264' # compression: 'MPEG4', 'H264' + #scene.render.ffmpeg.constant_rate_factor = 'HIGH' + scene.render.ffmpeg.format = 'MPEG4' # output file container format + # frames per second (blender default is 24) + scene.render.fps = 20 + + # user output + print(" movie fps = ",scene.render.fps) + print(" movie format = ",scene.render.ffmpeg.format ) + print("") - if start_frame == -1: - start_frame = 0 - else: - start_frame = end_frame + 1 * keyframe_interval + # output movie + name = './out.anim' + filename = name + '.mp4' + scene.render.filepath = filename + scene.render.use_file_extension = False - if end_frame == -1: - end_frame = total_frames - else: - end_frame = total_frames + 1 * keyframe_interval + # timing + tic = time.perf_counter() - # updates end frame count - scene.frame_end = end_frame + # redirect stdout to null + print("rendering animation: {} ...".format(filename)) + print("") + # to avoid long stdout output by the renderer: + # Fra:1 Mem:189.14M (Peak 190.26M) | Time:00:00.68 | Syncing Sun + # Fra:1 Mem:189.14M (Peak 190.26M) | Time:00:00.68 | Syncing Camera + # .. + suppress = suppress_renderer_output + with SuppressStream(sys.stdout,suppress): + # render animation + bpy.ops.render.render(animation=True) + print(" written to: ",filename) + print("") - # Set the rotation parameters - rotation_degrees = 360 # Total rotation in degrees + # timing + toc = time.perf_counter() + if toc - tic < 100.0: + print("elapsed time for animation render is {:0.4f} seconds\n".format(toc - tic)) + else: + min = int((toc-tic)/60.0) + sec = (toc - tic) - min * 60 + print("elapsed time for animation render is {} min {:0.4f} sec\n".format(min,sec)) - # moves camera around the z-axis - cam = bpy.data.objects['Camera'] - #if scene.frame_start == -1: - # # add start frame - # scene.frame_start = start_frame - # # start frame - # cam.keyframe_insert("location", frame=start_frame) # (0, -4, 4) - # cam.keyframe_insert("rotation_euler", frame=start_frame) # (44.0 * DEGREE_TO_RAD, 0, 0) +def render_image(): + """ + renders a single image + """ + global suppress_renderer_output - # camera offset from z-axis - x_0 = cam.location[0] - y_0 = cam.location[1] - z_0 = cam.location[2] + print("image:") - offset_distance = (x_0**2 + y_0**2)**0.5 - angle_0 = atan2(y_0,x_0) + # gets scene + scene = bpy.context.scene - # Set keyframes for rotation - for frame in range(0,frames+1): - angle = angle_0 + radians(frame * (rotation_degrees / frames)) + # output image + scene.render.image_settings.file_format = 'JPEG' # 'PNG' + name = './out' + filename = name + '.jpg' + scene.render.filepath = filename + scene.render.use_file_extension = False - # Calculate camera position around the Z-axis - x = offset_distance * cos(angle) - y = offset_distance * sin(angle) - camera_location = Vector((x, y, z_0)) # Maintain Z position + # timing + tic = time.perf_counter() - # Set camera rotation towards the origin - look_at = Vector((0, 0, elevation)) # Origin point on mesh - direction = look_at - camera_location - rot_quat = direction.to_track_quat('-Z', 'Y') # Point -Z axis to direction - camera_rotation = rot_quat.to_euler() + # redirect stdout to null + print(" rendering image: {} ...".format(filename)) + print("") + # to avoid long stdout output by the renderer: + # Fra:1 Mem:462.85M (Peak 462.85M) | Time:00:00.04 | .. + # .. + suppress = suppress_renderer_output + with SuppressStream(sys.stdout,suppress): + # Render Scene and store the scene + bpy.ops.render.render(write_still=True) + print(" written to: ",filename) + print("") - # there seems to be a problem with frame interpolation when the angles going past 2 PI, i.e, - # having the angle set between [-pi,pi] from the to_euler() output and interpolating a step - # between +pi to -pi. to avoid we always increase the angle going from [0,2pi[ - # - # in our case here, we change & increase only the third euler angle, while the first 2 stay fixed - # when the camera rotates around the z-axis. - if camera_rotation[2] < 0: camera_rotation[2] += 2.0 * PI + # timing + toc = time.perf_counter() + if toc - tic < 100.0: + print("elapsed time for image render is {:0.4f} seconds\n".format(toc - tic)) + else: + min = int((toc-tic)/60.0) + sec = (toc - tic) - min * 60 + print("elapsed time for image render is {} min {:0.4f} sec\n".format(min,sec)) - frame_number = start_frame + frame * keyframe_interval - scene.frame_set(frame_number) - # Set camera location and keyframe for each frame - cam.rotation_euler = camera_rotation - cam.location = camera_location +def render_blender_scene(title="",animation=False,close_up_view=False): - cam.keyframe_insert("location", frame=frame_number) - cam.keyframe_insert("rotation_euler", frame=frame_number) + ## blender scene setup + print("Setting up blender scene...") + print("") - #print("frame: ",frame,frame_number,"angle",angle,cam.location,"rotation",cam.rotation_euler) + ## camera + add_camera(title,close_up_view) - print(" total number of keyframes = ",number_of_keyframes) - print(" total number of frames = ",total_frames) - print("") + ## light + add_light() - # smoothing move - #for fcurve in cam.animation_data.action.fcurves: - # for keyframe_point in fcurve.keyframe_points: - # keyframe_point.interpolation = 'LINEAR' - - # render settings - # see: https://docs.blender.org/api/current/bpy.types.FFmpegSettings.html#bpy.types.FFmpegSettings - # ffmpeg - scene.render.image_settings.file_format = 'FFMPEG' # 'AVI_JPEG', 'FFMPEG' - # codec - scene.render.ffmpeg.codec = 'H264' # compression: 'MPEG4', 'H264' - #scene.render.ffmpeg.constant_rate_factor = 'HIGH' - scene.render.ffmpeg.format = 'MPEG4' # output file container format - # frames per second (blender default is 24) - scene.render.fps = 20 - - # user output - print(" movie fps = ",scene.render.fps) - print(" movie format = ",scene.render.ffmpeg.format ) - print("") + ## background plane + add_plane(close_up_view) - # output movie - name = './out.anim' - filename = name + '.mp4' - scene.render.filepath = filename - scene.render.use_file_extension = False - - # timing - tic = time.perf_counter() - - # redirect stdout to null - print("rendering animation: {} ...".format(filename)) - # to avoid long stdout output by the renderer: - # Fra:1 Mem:189.14M (Peak 190.26M) | Time:00:00.68 | Syncing Sun - # Fra:1 Mem:189.14M (Peak 190.26M) | Time:00:00.68 | Syncing Camera - # .. - suppress = False - with SuppressStream(sys.stdout,suppress): - # render animation - bpy.ops.render.render(animation=True) - print(" written to: ",filename) - print("") + # text object + add_title(title) - # timing - toc = time.perf_counter() - if toc - tic < 100.0: - print("elapsed time for animation render is {:0.4f} seconds\n".format(toc - tic)) - else: - min = int((toc-tic)/60.0) - sec = (toc - tic) - min * 60 - print("elapsed time for animation render is {} min {:0.4f} sec\n".format(min,sec)) + # save scene and render options + set_scene() + # rendering + if animation: + # movie rendering + render_animation() else: - # output image - scene.render.image_settings.file_format = 'JPEG' # 'PNG' - name = './out' - filename = name + '.jpg' - scene.render.filepath = filename - scene.render.use_file_extension = False - - # timing - tic = time.perf_counter() - - # redirect stdout to null - print(" rendering image: {} ...".format(filename)) - # to avoid long stdout output by the renderer: - # Fra:1 Mem:462.85M (Peak 462.85M) | Time:00:00.04 | .. - # .. - suppress = False - with SuppressStream(sys.stdout,suppress): - # Render Scene and store the scene - bpy.ops.render.render(write_still=True) - print(" written to: ",filename) - print("") - - # timing - toc = time.perf_counter() - if toc - tic < 100.0: - print("elapsed time for image render is {:0.4f} seconds\n".format(toc - tic)) - else: - min = int((toc-tic)/60.0) - sec = (toc - tic) - min * 60 - print("elapsed time for image render is {} min {:0.4f} sec\n".format(min,sec)) + # image rendering + render_image() # save blend file dir = os.getcwd() @@ -1627,12 +1878,12 @@ def plot_with_blender(vtk_file="",image_title="",colormap=0,color_max=None,build add_blender_buildings(buildings_file) # save blender scene - save_blender_scene(image_title,animation,close_up_view) + render_blender_scene(image_title,animation,close_up_view) def usage(): print("usage: ./plot_with_blender.py [--vtk_file=file] [--title=my_mesh_name] [--colormap=val] [--color-max=val] [--buildings=file]") - print(" [--with-cycles/--no-cycles] [--closeup] [--small] [--anim] [--help]") + print(" [--with-cycles/--no-cycles] [--closeup] [--small] [--anim] [--suppress] [--help]") print(" with") print(" --vtk_file - input mesh file (.vtk, .vtu, .inp)") print(" --title - title text (added to image rendering)") @@ -1646,7 +1897,9 @@ def usage(): print(" --closeup - sets camera view closer to center of model") print(" --transparent-sea-level - turns on transparency for sea-level plane") print(" --small - turns on small images size (400x600px) for preview") - print(" --anim - turns on movie animation (dive-in and rotation)") + print(" --shift-buildings - moves buildings up, e.g., a factor 0.0005 (default is no shift==0.0)") + print(" --anim - turns on movie animation (dive-in and rotation by default, use --no-rotation or --no-dive-in to turn off)") + print(" --suppress - suppress renderer output (default is off)") print(" --help - this help for usage...") sys.exit(1) @@ -1684,9 +1937,17 @@ def usage(): use_cycles_renderer = True elif "--no-cycles" in arg: use_cycles_renderer = False + elif "--no-dive-in" in arg: + use_animation_dive_in = False + elif "--no-rotation" in arg: + use_animation_rotation = False elif "--small" in arg: blender_img_resolution_X = 600 blender_img_resolution_Y = 400 + elif "--shift-buildings" in arg: + shift_building_baseline = float(arg.split('=')[1]) + elif "--suppress" in arg: + suppress_renderer_output = True elif "--title=" in arg: image_title = arg.split('=')[1] elif "--transparent-sea-level" in arg: diff --git a/utils/scripts/run_convert_IRIS_EMC_netCDF_2_tomo.py b/utils/scripts/run_convert_IRIS_EMC_netCDF_2_tomo.py index e339f284b..d9a742f3b 100755 --- a/utils/scripts/run_convert_IRIS_EMC_netCDF_2_tomo.py +++ b/utils/scripts/run_convert_IRIS_EMC_netCDF_2_tomo.py @@ -167,7 +167,7 @@ use_surface_elevation = False # create JPEG figure of surface elevation (if surface elevation is used) -create_surface_elevation_figure = False +create_surface_elevation_figure = True # VTK file of model for visualization create_vtk_file_output = True @@ -473,14 +473,32 @@ def read_data_array(var_name,model_data): """ reads in variable data as numpy array """ + global depth_index,lat_index,lon_index global use_replace_missing_values_with_average # gets variable data array var_array = np.array(model_data.variables[var_name]) + # determines indexing (which index for which dimension) + # needed also to get index of depth for the data array averaging + depth_index,lat_index,lon_index = get_array_indexing(var_name,var_array,model_data,depth_index,lat_index,lon_index,ndepths,nlats,nlons) + # replaces missing values if use_replace_missing_values_with_average: - var_array = replace_missing_values_with_average(var_name,var_array,model_data) + # gets missing value + # checks if there is a missing value specified for the array + has_missing_val = True + try: + missing_val = model_data.variables[var_name].missing_value + except: + # array has no missing value specified + has_missing_val = False + + # fill missing values + if has_missing_val: + var_array = replace_missing_values_with_average(var_name,var_array,model_data) + else: + print(" ",var_name," model has no missing values specified") return var_array @@ -497,11 +515,13 @@ def replace_missing_values_with_average(var_name,var_array,model_data): print(" replace missing values for model parameter: ",var_name) - # gets index of depth for the data array - depth_index,lat_index,lon_index = get_array_indexing(var_name,var_array,model_data,depth_index,lat_index,lon_index,ndepths,nlats,nlons) - # gets missing value - missing_val = model_data.variables[var_name].missing_value + try: + missing_val = model_data.variables[var_name].missing_value + except: + # array has no missing value specified + # we assume that there are no such and use a default missing value + missing_val = 99999.0 var_array = fill_missing_data_with_average_values(var_array,missing_val,ndepths,depth_index) @@ -840,8 +860,9 @@ def get_topo_DEM(region,res='low'): # show/write figure plot if create_surface_elevation_figure: + print(" creating surface elevation figure...") fig = pygmt.Figure() - fig.grdimage(grid=grid, + fig.grdimage(grid=topo_grid, cmap="haxby", projection="M10c", frame=True, @@ -1052,7 +1073,6 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): print(" mesh_area : ",mesh_area) if not maximum_depth is None: print(" maximum depth: ",maximum_depth) - print("") # target mesh area min/max @@ -1062,6 +1082,10 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): mesh_lon_max = mesh_area[2] mesh_lat_max = mesh_area[3] + print(" target mesh area: latitude min/max = {} / {}".format(mesh_lat_min,mesh_lat_max)) + print(" longitude min/max = {} / {}".format(mesh_lon_min,mesh_lon_max)) + print("") + # check if mesh_lon_min > mesh_lon_max or mesh_lat_min > mesh_lat_max: print("Wrong mesh area format, please provide in a format (lon_min,lat_min,lon_max,lat_max)") @@ -1094,8 +1118,14 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): if 'geospatial_lat_min' in global_vars: model_lat_min = model_data.geospatial_lat_min model_lat_max = model_data.geospatial_lat_max + if model_data.geospatial_lat_resolution: + model_dlat = model_data.geospatial_lat_resolution + model_lon_min = model_data.geospatial_lon_min model_lon_max = model_data.geospatial_lon_max + if model_data.geospatial_lon_resolution: + model_dlon = model_data.geospatial_lon_resolution + model_depth_min = model_data.geospatial_vertical_min model_depth_max = model_data.geospatial_vertical_max @@ -1145,32 +1175,86 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): # tomo file needs regular spacing print(" checking regular spacing:") + # numerical tolerance in spacing increments + TOL_SPACING = 1.e-4 # checks longitudes dx0 = x[1] - x[0] for i in range(1,nlons): dx = x[i] - x[i-1] - if dx != dx0: + if np.abs(dx - dx0) > TOL_SPACING: print("Error: irregular spacing in longitudes: ",i,dx,dx0,"step: ",x[i],x[i-1]) + print(" using a tolerance ",TOL_SPACING) + print(" Please select an EMC model with regular spacing, exiting...") + sys.exit(1) + if model_dlon: + dlon = model_dlon + if np.abs(dlon - dx0) > TOL_SPACING: + print("Error: irregular spacing in longitudes: ",dx0," instead of model specification: ",dlon) + print(" using a tolerance ",TOL_SPACING) + print(" Please select an EMC model with regular spacing, exiting...") sys.exit(1) - dlon = dx0 + else: + dlon = dx0 + print(" has regular longitude spacing ",dlon,"(deg)") # checks latitudes dy0 = y[1] - y[0] for i in range(1,nlats): dy = y[i] - y[i-1] - if dy != dy0: + if np.abs(dy - dy0) > TOL_SPACING: print("Error: irregular spacing in latitudes: ",i,dy,dy0,"step: ",y[i],y[i-1]) + print(" using a tolerance ",TOL_SPACING) + print(" Please select an EMC model with regular spacing, exiting...") sys.exit(1) - dlat = dy0 + if model_dlat: + dlat = model_dlat + if np.abs(dlat - dy0) > TOL_SPACING: + print("Error: irregular spacing in latitudes: ",dy0," instead of model specification: ",dlat) + print(" using a tolerance ",TOL_SPACING) + print(" Please select an EMC model with regular spacing, exiting...") + sys.exit(1) + else: + dlat = dy0 + print(" has regular latitude spacing ",dlat,"(deg)") # checks depths dz0 = z[1] - z[0] + dz_increments = np.array([dz0]) + has_irregular_depths = False for i in range(1,ndepths): dz = z[i] - z[i-1] - if dz != dz0: - print("Error: irregular spacing in depths: ",i,dz,dz0,"step: ",z[i],z[i-1]) - sys.exit(1) - ddepth = dz0 + if np.abs(dz - dz0) > TOL_SPACING: + has_irregular_depths = True + # adds increment to list of increments + if dz not in dz_increments: + dz_increments = np.append(dz_increments,dz) + + if has_irregular_depths: + print(" has irregular depth spacing (m): ",dz_increments) + # chooses minimum spacing + # Convert float numbers to integers (multiply by a common factor) + common_factor = 10.0 + integer_numbers = np.round(dz_increments * common_factor).astype(int) + # greatest common divisor: numpy's gcd function calculates the GCD for all numbers (integers) + gcd = np.gcd.reduce(integer_numbers) + + # regular depth increment (float) + ddepth = float(gcd / common_factor) + + print("") + print(" tomography model will use a regular spacing: ",ddepth,"(m)") + print(" converting model depths to regular depths...") + # convert irregular depths to a regular gridding + # number of regular depth increments + ndepths_regular = np.round(z.max() / ddepth).astype(int) + 1 + z_regular = np.linspace(z.min(),z.max(),ndepths_regular) + + else: + # has regular spacing + ddepth = dz0 + print(" has regular depth spacing ",ddepth,"(km)") + + print("") print(" dlon/dlat/ddepth = {} (deg) / {} (deg) / {} (km)".format(dlon,dlat,ddepth/1000.0)) print(" spacing ok") print("") @@ -1211,6 +1295,43 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): print("Error: given model is incomplete for converting to a tomographic model with (vp,vs,rho) parameterization") sys.exit(1) + + # checks if model range covers target region + if not mesh_area is None: + # latitude range + if mesh_lat_min > model_lat_min and mesh_lat_min < model_lat_max and \ + mesh_lat_max > model_lat_min and mesh_lat_max < model_lat_max: + print(" target latitude range {} / {} within model range {} / {}".format(mesh_lat_min,mesh_lat_max,model_lat_min,model_lat_max)) + else: + print("") + print("ERROR: target latitude range {} / {} outside model range {} / {}".format(mesh_lat_min,mesh_lat_max,model_lat_min,model_lat_max)) + print(" nothing to extract...") + print("") + print(" Please make sure that the EMC model covers your target region, exiting...") + print("") + sys.exit(1) + + # longitude range + if mesh_lon_min > model_lon_min and mesh_lon_min < model_lon_max and \ + mesh_lon_max > model_lon_min and mesh_lon_max < model_lon_max: + print(" target longitude range {} / {} within model range {} / {}".format(mesh_lon_min,mesh_lon_max,model_lon_min,model_lon_max)) + else: + print("") + print("ERROR: target longitude range {} / {} outside model range {} / {}".format(mesh_lon_min,mesh_lon_max,model_lon_min,model_lon_max)) + print(" nothing to extract...") + print("") + print(" Please make sure that the EMC model covers your target mesh area, exiting...") + print("") + sys.exit(1) + print("") + + # checks depth + if not maximum_depth is None: + if maximum_depth > model_depth_max: + print("WARNING: target maximum depth {} below model maximum depth {}".format(maximum_depth,model_depth_max)) + print(" tomographic model output will be cut-off at model depth {}".format(model_depth_max)) + print("") + # fill missing values with average value from each depth if use_replace_missing_values_with_average: print(" replacing missing values with depth average") @@ -1229,14 +1350,14 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): # file output requires velocities in m/s # determines scaling factor # (based on vpv array, assuming all other velocity arrays have the same units) - if model_data.variables['vpv'].units == "km.s-1": + if model_data.variables['vpv'].units in ["km.s-1","km/s"]: # given in km/s -> m/s unit_scale = 1000 - elif model_data.variables['vpv'].units == "m.s-1": + elif model_data.variables['vpv'].units in ["m.s-1","m/s"]: # given in m/s unit_scale = 1 else: - print("Error: array has invalid unit ",model_data.variables['vpv'].units) + print("Error: vpv array has invalid unit ",model_data.variables['vpv'].units) sys.exit(1) # scales velocities to m/s if unit_scale != 1: @@ -1255,14 +1376,14 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): # file output requires velocities in m/s # determines scaling factor # (based on vpv array, assuming all other velocity arrays have the same units) - if model_data.variables['vsv'].units == "km.s-1": + if model_data.variables['vsv'].units in ["km.s-1","km/s"]: # given in km/s -> m/s unit_scale = 1000 - elif model_data.variables['vsv'].units == "m.s-1": + elif model_data.variables['vsv'].units in ["m.s-1","m/s"]: # given in m/s unit_scale = 1 else: - print("Error: array has invalid unit ",model_data.variables['vpv'].units) + print("Error: vsv array has invalid unit ",model_data.variables['vsv'].units) sys.exit(1) # scales velocities to m/s if unit_scale != 1: @@ -1285,10 +1406,10 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): # file output requires velocities in m/s # determines scaling factor - if model_data.variables['vp'].units == "km.s-1": + if model_data.variables['vp'].units in ["km.s-1","km/s"]: # given in km/s -> m/s unit_scale = 1000 - elif model_data.variables['vp'].units == "m.s-1": + elif model_data.variables['vp'].units in ["m.s-1","m/s"]: # given in m/s unit_scale = 1 else: @@ -1306,10 +1427,10 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): # file output requires velocities in m/s # determines scaling factor - if model_data.variables['vs'].units == "km.s-1": + if model_data.variables['vs'].units in ["km.s-1","km/s"]: # given in km/s -> m/s unit_scale = 1000 - elif model_data.variables['vs'].units == "m.s-1": + elif model_data.variables['vs'].units in ["m.s-1","m/s"]: # given in m/s unit_scale = 1 else: @@ -1320,6 +1441,7 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): vs *= unit_scale model['vs'] = vs + print("") # parameter scaling @@ -1339,19 +1461,19 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): # file output requires density in kg/m^3 # determines scaling factor - if model_data.variables['rho'].units == "g.cm-3": + if model_data.variables['rho'].units in ["g.cm-3","g/cm3"]: # given in g/cm^3 -> kg/m^3 # rho [kg/m^3] = rho * 1000 [g/cm^3] unit_scale = 1000 - elif model_data.variables['vp'].units == "kg.cm-3": + elif model_data.variables['rho'].units in ["kg.cm-3","kg/cm3"]: # given in kg/cm^3 -> kg/m^3 # rho [kg/m^3] = rho * 1000000 [kg/cm^3] unit_scale = 1000000 - elif model_data.variables['vp'].units == "kg.m-3": + elif model_data.variables['rho'].units in ["kg.m-3","kg/m3"]: # given in kg/m^3 unit_scale = 1 else: - print("Error: array has invalid unit ",model_data.variables['vp'].units) + print("Error: rho array has invalid unit ",model_data.variables['rho'].units) sys.exit(1) # converts density to default kg/m^3 if unit_scale != 1: @@ -1388,11 +1510,17 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): # checks if depth given with respect to earth surface (or sea level) depth_descr = model_data.variables['depth'].long_name + # user info + print("depth:") if "below earth surface" in depth_descr: - # user info - print("depth:") print(" depth given with respect to earth surface") - print("") + elif "below sea level" in depth_descr: + print(" depth given with respect to sea level") + elif "below mean Earth radius of 6371 km" in depth_descr: + print(" depth given with respect to mean Earth radius of 6371 km") + else: + print(" description given: ",depth_descr) + print("") # elevation if use_surface_elevation: @@ -1434,6 +1562,7 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): # vtk file output if create_vtk_file_output: print("creating vtk file for visualization...") + print("") # Create points points = vtk.vtkPoints() @@ -1482,6 +1611,13 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): ndim_lats = 0 ndim_lons = 0 + # switches depth array to loop over regular depths + if has_irregular_depths: + # original, irregular depth z-array + z_irreg = z.copy() + # switches z to regular depth z-array + z = z_regular + # loops over depth for k, depth in enumerate(z): # determines target area @@ -1556,7 +1692,17 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): ijk = [0,0,0] ijk[lat_index] = i ijk[lon_index] = j - ijk[depth_index] = k + + if has_irregular_depths: + # find index in irregular depth array + # Find the matching index of the depth value in the irregular z_irreg array + k_irreg = np.max(np.where(z_irreg <= depth)) + ijk[depth_index] = k_irreg + else: + # regular depth intervals, simply use index k from loop + ijk[depth_index] = k + + # gets tuple for array indexing index = tuple(ijk) # model parameters @@ -1608,6 +1754,29 @@ def netCDF_2_tomo(input_file,UTM_zone=None,mesh_area=None,maximum_depth=None): print("") print("") + print("tomographic model statistics:") + print(" vp min/max : {:.2f} / {:.2f} (m/s)".format(header_vp_min,header_vp_max)) + print(" vs min/max : {:.2f} / {:.2f} (m/s)".format(header_vs_min,header_vs_max)) + print(" rho min/max : {:.2f} / {:.2f} (kg/m3)".format(header_rho_min,header_rho_max)) + print("") + + if header_vp_min <= 0.0: + print("WARNING: Vp has invalid entries with a minimum of zero!") + print(" The provided output model is likely invalid.") + print(" Please check with your inputs...") + print("") + + if header_vs_min <= 0.0: + print("WARNING: Vs has entries with a minimum of zero!") + print(" The provided output model is likely invalid.") + print(" Please check with your inputs...") + print("") + + if header_rho_min <= 0.0: + print("WARNING: Density has entries with a minimum of zero!") + print(" The provided output model is likely invalid.") + print(" Please check with your inputs...") + print("") # header infos header_dx = dlon # increment x-direction for longitudes diff --git a/utils/scripts/run_get_simulation_topography.py b/utils/scripts/run_get_simulation_topography.py index 12ee4ef45..be69e92d8 100755 --- a/utils/scripts/run_get_simulation_topography.py +++ b/utils/scripts/run_get_simulation_topography.py @@ -267,7 +267,9 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): # for example: region = (12.35, 41.8, 12.65, 42.0) region = (lon_min, lat_min, lon_max, lat_max) + print("*******************************") print("get topo:") + print("*******************************") print(" region: ",region) print("") @@ -283,8 +285,8 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): name = 'ptopo-DEM.tif' filename = dir + '/' + name - print("current directory:",dir) - print("topo file name :",filename) + print(" current directory:",dir) + print(" topo file name :",filename) print("") ## get topo file @@ -372,7 +374,7 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): else: print("Error invalid SRTM_type " + SRTM_type) sys.exit(1) - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) @@ -384,7 +386,7 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): else: # older version < 5.3 cmd = 'gmt grdconvert ' + gridfile + ' ' + filename + '=gd:Gtiff' - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -393,15 +395,15 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): print("Error invalid SRTM_type " + SRTM_type) sys.exit(1) - print("GMT:") + print(" GMT:") print(" region : ",gmt_region) print(" interval: ",gmt_interval) print("") # topography info cmd = 'gmt grdinfo ' + filename - print("topography file info:") - print("> ",cmd) + print(" topography file info:") + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -409,8 +411,8 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): # hillshade gif_file = filename + '.hillshaded.gif' cmd = 'gdaldem hillshade ' + filename + ' ' + gif_file + ' -of GTiff' - print("hillshade figure:") - print("> ",cmd) + print(" hillshade figure:") + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -426,14 +428,14 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): # uses region specified from grid file gridfile = 'ptopo.sampled.grd' cmd = 'gmt grdsample ' + filename + ' ' + gmt_interval + ' -G' + gridfile - print("resampling topo data") - print("> ",cmd) + print(" resampling topo data") + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") cmd = 'gmt grdinfo ' + gridfile - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -441,16 +443,16 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): # converts to xyz format xyz_file = 'ptopo.xyz' cmd = 'gmt grd2xyz ' + gridfile + ' > ' + xyz_file - print("converting to xyz") - print("> ",cmd) + print(" converting to xyz") + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") # gif cmd = 'gdaldem hillshade ' + xyz_file + ' ' + xyz_file + '.hillshaded1.gif -of GTiff' - print("creating gif-image ...") - print("> ",cmd) + print(" creating gif-image ...") + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -462,20 +464,20 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): gmt_interval2 = '-I' + str(incr_dx/10.0) + '/' + str(incr_dx/10.0) cmd = 'gmt blockmean ' + gmt_interval2 + ' ' + xyz_file + ' ' + gmt_region + ' > ' + mean_file - print("mesh interpolation") - print("> ",cmd) + print(" mesh interpolation") + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) cmd = 'mv -v ptopo.xyz ptopo.xyz.org; mv -v ptopo.mean.xyz ptopo.xyz' - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") # gif cmd = 'gdaldem hillshade ' + xyz_file + ' ' + xyz_file + '.hillshaded2.gif -of GTiff' - print("creating gif-image ...") - print("> ",cmd) + print(" creating gif-image ...") + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -492,11 +494,13 @@ def get_topo(lon_min,lat_min,lon_max,lat_max): def plot_map(gmt_region,gridfile="ptopo.sampled.grd"): global datadir + print("*******************************") print("plotting map ...") + print("*******************************") # current directory dir = os.getcwd() - print("current directory:",dir) + print(" current directory:",dir) print("") #cmd = 'cd ' + datadir + '/' + ';' @@ -520,14 +524,14 @@ def plot_map(gmt_region,gridfile="ptopo.sampled.grd"): cmd += 'gmt psbasemap -O -R -J -Ba1g1:"Map": -P -V >> ' + ps_file + ';' status = subprocess.call(cmd, shell=True) check_status(status) - print("map plotted in file: ",ps_file) + print(" map plotted in file: ",ps_file) # imagemagick converts ps to pdf cmd = 'convert ' + ps_file + ' ' + pdf_file - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) - print("map plotted in file: ",pdf_file) + print(" map plotted in file: ",pdf_file) print("") return @@ -540,7 +544,9 @@ def create_AVS_file(): global utm_zone global gmt_region + print("*******************************") print("creating AVS border file ...") + print("*******************************") # current directory dir = os.getcwd() @@ -552,7 +558,7 @@ def create_AVS_file(): cmd = 'gmt pscoast ' + gmt_region + ' -Dh -W -M > ' + name + ';' status = subprocess.call(cmd, shell=True) check_status(status) - print("GMT segment file plotted in file: ",name) + print(" GMT segment file plotted in file: ",name) # note: GMT segment file has format # > Shore Bin # .., Level .. @@ -561,13 +567,17 @@ def create_AVS_file(): # > Shore Bin # .., Level .. # .. - print("Getting boundaries from file %s ..." % name) + print(" Getting boundaries from file %s ..." % name) # reads gmt boundary file with open(name,'r') as f: content = f.readlines() - if len(content) == 0: sys.exit("Error no file content") + if len(content) == 0: + print("") + print(" INFO: no boundaries in file") + print("") + return # counts segments and points numsegments = 0 @@ -580,8 +590,8 @@ def create_AVS_file(): # point numpoints += 1 - print("There are %i contours" % numsegments) - print("There are %i data points" % numpoints) + print(" There are %i contours" % numsegments) + print(" There are %i data points" % numpoints) # read the GMT file to get the number of individual line segments currentelem = 0 @@ -600,7 +610,7 @@ def create_AVS_file(): previous_was_comment = 0 num_individual_lines = currentelem - print("There are %i individual line segments" % num_individual_lines) + print(" There are %i individual line segments" % num_individual_lines) avsfile = "AVS_boundaries_utm.inp" @@ -690,7 +700,7 @@ def create_AVS_file(): for currentpoint in range(1,numpoints+1): f.write("%i 255.\n" % (currentpoint)) - print("see file: %s" % avsfile) + print(" see file: %s" % avsfile) print("") return @@ -704,7 +714,9 @@ def topo_extract(filename): # ./topo_extract.sh ptopo.mean.xyz #cmd = './topo_extract.sh ptopo.mean.xyz' + print("*******************************") print("extracting interface data for xmeshfem3D ...") + print("*******************************") import numpy @@ -718,7 +730,7 @@ def topo_extract(filename): # statistics cmd = 'gmt gmtinfo ' + filename - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -726,13 +738,13 @@ def topo_extract(filename): # cleanup cmd = 'rm -f ' + file1 + ';' cmd += 'rm -f ' + file2 + ';' - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") # reads in lon/lat/elevation - print("reading file " + filename + " ...") + print(" reading file " + filename + " ...") data = numpy.loadtxt(filename) #debug #print(data) @@ -749,17 +761,17 @@ def topo_extract(filename): f.write("%f\n" % (elevation[i] * toposcale - toposhift) ) print("") - print("check: ",file1,file2) + print(" check: ",file1,file2) print("") cmd = 'gmt gmtinfo ' + file1 + ';' cmd += 'gmt gmtinfo ' + file2 + ';' - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") - print("number of points along x (NXI) and y (NETA)") + print(" number of points along x (NXI) and y (NETA):") x0 = data[0,0] y0 = data[0,1] i0 = 1 @@ -779,7 +791,7 @@ def topo_extract(filename): if dx < 0.0: ii = i + 1 if nx > 0 and ii - i0 != nx: - print("non-regular nx: ",nx,ii-i0,"on line ",i+1) + print(" non-regular nx: ",nx,ii-i0,"on line ",i+1) nx = ii - i0 ny += 1 deltay = y - y0 @@ -790,17 +802,17 @@ def topo_extract(filename): ii = len(data) + 1 if nx > 0 and ii - i0 != nx: - print("non-regular nx: ",nx,ii-i0,"on line ",ii) + print(" non-regular nx: ",nx,ii-i0,"on line ",ii) nx = ii - i0 ny += 1 - print("--------------------------------------------") - print("NXI = ",nx) - print("NETA = ",ny) - print("xmin/xmax = ",xmin,xmax) - print("ymin/ymax = ",ymin,ymax) - print("deltax = ",deltax,"average = ",(xmax-xmin)/(nx-1)) - print("deltay = ",deltay,"average = ",(ymax-ymin)/(ny-1)) - print("--------------------------------------------") + print(" --------------------------------------------") + print(" NXI = ",nx) + print(" NETA = ",ny) + print(" xmin/xmax = ",xmin,xmax) + print(" ymin/ymax = ",ymin,ymax) + print(" deltax = ",deltax,"average = ",(xmax-xmin)/(nx-1)) + print(" deltay = ",deltay,"average = ",(ymax-ymin)/(ny-1)) + print(" --------------------------------------------") print("") return nx,ny,deltax,deltay @@ -828,9 +840,12 @@ def update_Mesh_Par_file(dir,lon_min,lat_min,lon_max,lat_max,nx,ny,dx,dy,xyz_fil path = dir + '/' + 'DATA/meshfem3D_files/' os.chdir(path) + print("*******************************") print("updating Mesh_Par_file ...") + print("*******************************") print(" working directory: ",os.getcwd()) - print(" min lon/lat = ",lon_min,lat_min," max lon/lat = ",lon_max,lat_max) + print(" min lon/lat : ",lon_min,"/",lat_min) + print(" max lon/lat : ",lon_max,"/",lat_max) print("") cmd = 'echo "";' @@ -843,7 +858,7 @@ def update_Mesh_Par_file(dir,lon_min,lat_min,lon_max,lat_max,nx,ny,dx,dy,xyz_fil cmd += 'sed -i "s:^LONGITUDE_MIN .*:LONGITUDE_MIN = ' + str(lon_min) + ':" Mesh_Par_file' + ';' cmd += 'sed -i "s:^LONGITUDE_MAX .*:LONGITUDE_MAX = ' + str(lon_max) + ':" Mesh_Par_file' + ';' cmd += 'sed -i "s:^UTM_PROJECTION_ZONE .*:UTM_PROJECTION_ZONE = ' + str(utm_zone) + ':" Mesh_Par_file' + ';' - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -879,7 +894,7 @@ def update_Mesh_Par_file(dir,lon_min,lat_min,lon_max,lat_max,nx,ny,dx,dy,xyz_fil cmd += 'echo "";' line = '.false. ' + str(nxi) + ' ' + str(neta) + ' ' + str(lon) + ' ' + str(lat) + ' ' + str(dxi) + ' ' + str(deta) cmd += 'sed -i "s:^.false. .*:' + line + ':g" interfaces.dat' + ';' - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -891,12 +906,12 @@ def update_Mesh_Par_file(dir,lon_min,lat_min,lon_max,lat_max,nx,ny,dx,dy,xyz_fil cmd = 'rm -f ' + xyz_file1 + ' ' + xyz_file2 + ';' cmd += 'ln -s ' + topodir + '/' + xyz_file1 + ';' cmd += 'ln -s ' + topodir + '/' + xyz_file2 + ';' - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") - print("file updated: %s/Mesh_Par_file" % path) + print(" file updated: %s/Mesh_Par_file" % path) print("") return @@ -914,18 +929,20 @@ def update_Par_file(dir): path = dir + '/' + 'DATA/' os.chdir(path) + print("*******************************") print("updating Par_file ...") + print("*******************************") print(" working directory: ",os.getcwd()) print(" utm_zone = ",utm_zone) print("") cmd = 'sed -i "s:^UTM_PROJECTION_ZONE .*:UTM_PROJECTION_ZONE = ' + str(utm_zone) + ':" Par_file' + ';' - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") - print("file updated: %s/Par_file" % path) + print(" file updated: %s/Par_file" % path) print("") return @@ -1206,12 +1223,24 @@ def setup_simulation(lon_min,lat_min,lon_max,lat_max): global datadir global utm_zone global incr_dx,toposhift,toposcale + + print("") + print("*******************************") + print("setup simulation topography") + print("*******************************") + print("") + print(" topo : ",SRTM_type) + print(" grid sampling interval: ",incr_dx,"(deg) ",incr_dx * math.pi/180.0 * 6371.0, "(km)") + print(" topo down shift : ",toposhift) + print(" topo down scaling : ",toposcale) + print("") + # current directory dir = os.getcwd() # creates data directory cmd = 'mkdir -p ' + datadir - print("> ",cmd) + print(" > ",cmd) status = subprocess.call(cmd, shell=True) check_status(status) print("") @@ -1219,11 +1248,7 @@ def setup_simulation(lon_min,lat_min,lon_max,lat_max): # change working directory to ./topo_data/ path = dir + '/' + datadir os.chdir(path) - print("working directory: ",os.getcwd()) - print(" topo : ",SRTM_type) - print(" grid sampling interval: ",incr_dx,"(deg) ",incr_dx * math.pi/180.0 * 6371.0, "(km)") - print(" topo down shift : ",toposhift) - print(" topo down scaling : ",toposcale) + print(" working directory : ",os.getcwd()) print("") # check min/max is given in correct order @@ -1241,12 +1266,16 @@ def setup_simulation(lon_min,lat_min,lon_max,lat_max): xyz_file = get_topo(lon_min,lat_min,lon_max,lat_max) ## UTM zone + print("*******************************") + print("determining UTM coordinates...") + print("*******************************") + midpoint_lat = (lat_min + lat_max)/2.0 midpoint_lon = (lon_min + lon_max)/2.0 utm_zone = utm.latlon_to_zone_number(midpoint_lat,midpoint_lon) - print("region midpoint lat/lon: %f / %f " %(midpoint_lat,midpoint_lon)) - print("UTM zone: %d" % utm_zone) + print(" region midpoint lat/lon: %f / %f " %(midpoint_lat,midpoint_lon)) + print(" UTM zone: %d" % utm_zone) print("") # converting to utm @@ -1268,11 +1297,15 @@ def setup_simulation(lon_min,lat_min,lon_max,lat_max): nx,ny,dx,dy = topo_extract(xyz_file) # creates parameter files + # main Par_file update_Par_file(dir) + + # mesher Mesh_Par_file update_Mesh_Par_file(dir,lon_min,lat_min,lon_max,lat_max,nx,ny,dx,dy,xyz_file) print("") print("topo output in directory: ",datadir) + print("") print("all done") print("") return @@ -1287,7 +1320,7 @@ def usage(): # default increment in km incr_dx_km = incr_dx * math.pi/180.0 * 6371.0 - print("usage: ./run_get_simulation_topography.py lon_min lat_min lon_max lat_max [SRTM] [incr_dx] [toposhift] [toposcale]") + print("usage: ./run_get_simulation_topography.py lon_min lat_min lon_max lat_max [--SRTM=SRTM] [--dx=incr_dx] [--toposhift=toposhift] [--toposcale=toposcale]") print(" where") print(" lon_min lat_min lon_max lat_max - region given by points: left bottom right top") print(" for example: 12.35 42.0 12.65 41.8 (Rome)") @@ -1304,34 +1337,41 @@ def usage(): print(" toposhift - (optional) topography shift for 2nd interface (in m)") print(" to shift original topography downwards [default %f m]" % toposhift) print(" toposcale - (optional) scalefactor to topography for shifted 2nd interface (e.g., 0.1) [default %f]" %toposcale) - return + sys.exit(1) if __name__ == '__main__': # gets arguments if len(sys.argv) < 5: usage() - sys.exit(1) + else: lon_min = float(sys.argv[1]) lat_min = float(sys.argv[2]) lon_max = float(sys.argv[3]) lat_max = float(sys.argv[4]) - # type: 'low' == SRTM 90m / 'high' == SRTM 30m / else e.g. 'etopo' == topo30 (30-arc seconds) - if len(sys.argv) >= 6: - SRTM_type = sys.argv[5] - - # GMT grid sampling interval - if len(sys.argv) >= 7: - incr_dx = float(sys.argv[6]) - - # topography shift for 2nd interface - if len(sys.argv) == 8: - toposhift = float(sys.argv[7]) - - # topography scalefactor - if len(sys.argv) == 9: - toposcale = float(sys.argv[8]) + i = 0 + for arg in sys.argv: + #print("argument "+str(i)+": " + arg) + # get arguments + if "--help" in arg: + usage() + elif "--SRTM=" in arg: + # type: 'low' == SRTM 90m / 'high' == SRTM 30m / else e.g. 'etopo' == topo30 (30-arc seconds) + SRTM_type = arg.split('=')[1] + elif "--dx=" in arg: + # GMT grid sampling interval + incr_dx = float(arg.split('=')[1]) + elif "--toposhift=" in arg: + # topography shift for 2nd interface + toposhift = float(arg.split('=')[1]) + elif "--toposcale=" in arg: + # topography scalefactor + toposcale = float(arg.split('=')[1]) + elif i >= 5: + print("argument not recognized: ",arg) + usage() + i += 1 # logging cmd = " ".join(sys.argv)