Line data Source code
1 : !--------------------------------------------------------------------------------------------------!
2 : ! CP2K: A general program to perform molecular dynamics simulations !
3 : ! Copyright 2000-2026 CP2K developers group <https://cp2k.org> !
4 : ! !
5 : ! SPDX-License-Identifier: GPL-2.0-or-later !
6 : !--------------------------------------------------------------------------------------------------!
7 :
8 : ! **************************************************************************************************
9 : !> \brief Generate Gaussian cube files
10 : ! **************************************************************************************************
11 : MODULE realspace_grid_openpmd
12 :
13 : #ifdef __OPENPMD
14 : USE cp_files, ONLY: close_file, &
15 : open_file
16 : USE cp_log_handling, ONLY: cp_logger_get_default_io_unit
17 : USE cp_output_handling_openpmd, ONLY: cp_openpmd_get_value_unit_nr, &
18 : cp_openpmd_per_call_value_type
19 : USE kinds, ONLY: default_string_length, &
20 : dp
21 : USE message_passing, ONLY: &
22 : file_amode_rdonly, file_offset, mp_comm_type, mp_file_descriptor_type, mp_file_type, &
23 : mp_file_type_free, mp_file_type_hindexed_make_chv, mp_file_type_set_view_chv, &
24 : mpi_character_size
25 : USE physcon, ONLY: a_bohr, e_charge, seconds
26 : #if defined(__parallel)
27 : #if defined(__MPI_F08)
28 : USE mpi_f08, ONLY: mpi_allreduce, mpi_integer, mpi_bxor, mpi_allgather
29 : #else
30 : USE mpi, ONLY: mpi_allreduce, mpi_integer, mpi_bxor, mpi_allgather
31 : #endif
32 : #endif
33 :
34 : USE openpmd_api, ONLY: &
35 : openpmd_attributable_type, &
36 : openpmd_dynamic_memory_view_type_1d, &
37 : openpmd_dynamic_memory_view_type_3d, &
38 : openpmd_mesh_type, &
39 : openpmd_particle_species_type, &
40 : openpmd_record_component_type, &
41 : openpmd_record_type, openpmd_type_double, openpmd_type_int
42 : USE pw_grid_types, ONLY: PW_MODE_LOCAL
43 : USE pw_types, ONLY: pw_r3d_rs_type
44 : USE util, ONLY: sort_unique
45 :
46 : #else
47 :
48 : USE pw_types, ONLY: pw_r3d_rs_type
49 : USE kinds, ONLY: dp
50 :
51 : #endif
52 :
53 : #include "../base/base_uses.f90"
54 :
55 : IMPLICIT NONE
56 :
57 : PRIVATE
58 :
59 : PUBLIC ::pw_to_openpmd
60 :
61 : #ifdef __OPENPMD
62 : CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'realspace_grid_openpmd'
63 : LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .FALSE.
64 : LOGICAL, PRIVATE :: parses_linebreaks = .FALSE., &
65 : parse_test = .TRUE.
66 :
67 : TYPE cp_openpmd_write_buffer_1d
68 : REAL(KIND=dp), POINTER :: buffer(:)
69 : END TYPE cp_openpmd_write_buffer_1d
70 : #endif
71 :
72 : CONTAINS
73 :
74 : #ifdef __OPENPMD
75 : ! **************************************************************************************************
76 : !> \brief ...
77 : !> \param particles_z ...
78 : !> \param res_atom_types ...
79 : !> \param res_atom_counts ...
80 : !> \param res_len ...
81 : ! **************************************************************************************************
82 : SUBROUTINE pw_get_atom_types(particles_z, res_atom_types, res_atom_counts, res_len)
83 : INTEGER, DIMENSION(:), INTENT(IN) :: particles_z
84 : INTEGER, ALLOCATABLE, DIMENSION(:), INTENT(OUT) :: res_atom_types, res_atom_counts
85 : INTEGER, INTENT(OUT) :: res_len
86 :
87 : INTEGER :: current_atom_number, i
88 : INTEGER, ALLOCATABLE, DIMENSION(:) :: particles_z_sorted
89 : LOGICAL :: unique
90 :
91 : ALLOCATE (particles_z_sorted(SIZE(particles_z)))
92 : particles_z_sorted(:) = particles_z(:)
93 : CALL sort_unique(particles_z_sorted, unique)
94 :
95 : ALLOCATE (res_atom_types(MIN(118, SIZE(particles_z))))
96 : ALLOCATE (res_atom_counts(MIN(118, SIZE(particles_z))))
97 : current_atom_number = -1
98 : res_len = 0
99 : DO i = 1, SIZE(particles_z_sorted)
100 : IF (particles_z_sorted(i) /= current_atom_number) THEN
101 : res_len = res_len + 1
102 : current_atom_number = particles_z_sorted(i)
103 : res_atom_types(res_len) = current_atom_number
104 : res_atom_counts(res_len) = 1
105 : ELSE
106 : res_atom_counts(res_len) = res_atom_counts(res_len) + 1
107 : END IF
108 : END DO
109 :
110 : END SUBROUTINE pw_get_atom_types
111 :
112 : ! **************************************************************************************************
113 : !> \brief ...
114 : !> \param particles_z ...
115 : !> \param particles_r ...
116 : !> \param particles_zeff ...
117 : !> \param atom_type ...
118 : !> \param atom_count ...
119 : !> \param openpmd_data ...
120 : !> \param do_write_data ...
121 : ! **************************************************************************************************
122 : SUBROUTINE pw_write_particle_species( &
123 : particles_z, &
124 : particles_r, &
125 : particles_zeff, &
126 : atom_type, &
127 : atom_count, &
128 : openpmd_data, &
129 : do_write_data &
130 : )
131 : INTEGER, DIMENSION(:), INTENT(IN) :: particles_z
132 : REAL(KIND=dp), DIMENSION(:, :), INTENT(IN) :: particles_r
133 : REAL(KIND=dp), DIMENSION(:), INTENT(IN), OPTIONAL :: particles_zeff
134 : INTEGER, INTENT(IN) :: atom_type, atom_count
135 : TYPE(cp_openpmd_per_call_value_type) :: openpmd_data
136 : LOGICAL :: do_write_data
137 :
138 : CHARACTER(len=1), DIMENSION(3), PARAMETER :: dims = ["x", "y", "z"]
139 :
140 : CHARACTER(len=3) :: atom_type_as_string
141 : CHARACTER(len=default_string_length) :: species_name
142 : INTEGER :: i, j, k
143 : INTEGER, DIMENSION(1) :: global_extent, global_offset, &
144 : local_extent
145 : TYPE(cp_openpmd_write_buffer_1d) :: charge_write_buffer
146 : TYPE(cp_openpmd_write_buffer_1d), DIMENSION(3) :: write_buffers
147 : TYPE(openpmd_attributable_type) :: attr
148 : TYPE(openpmd_dynamic_memory_view_type_1d) :: unresolved_charge_write_buffer
149 : TYPE(openpmd_dynamic_memory_view_type_1d), &
150 : DIMENSION(3) :: unresolved_write_buffers
151 : TYPE(openpmd_particle_species_type) :: species
152 : TYPE(openpmd_record_component_type) :: charge_component, position_component, &
153 : position_offset_component
154 : TYPE(openpmd_record_type) :: charge, position, position_offset
155 :
156 : ! TODO: The charge is probably constant per species?
157 : ! If yes, we could use a constant component and save storage space
158 :
159 : global_extent(1) = atom_count
160 : IF (do_write_data) THEN
161 : global_offset(1) = 0
162 : local_extent(1) = atom_count
163 : ELSE
164 : global_offset(1) = 0
165 : local_extent(1) = 0
166 : END IF
167 :
168 : WRITE (atom_type_as_string, '(I3)') atom_type
169 : species_name = TRIM(openpmd_data%name_prefix)//"-"//ADJUSTL(atom_type_as_string)
170 :
171 : CALL openpmd_data%iteration%open()
172 : species = openpmd_data%iteration%get_particle_species(TRIM(species_name))
173 :
174 : position_offset = species%get_record("positionOffset")
175 : position = species%get_record("position")
176 : ! length
177 : CALL position%set_unit_dimension([1.0_dp, 0.0_dp, 0.0_dp, 0.0_dp, 0.0_dp, 0.0_dp, 0.0_dp])
178 : DO k = 1, SIZE(dims)
179 : position_offset_component = position_offset%get_component(dims(k))
180 : CALL position_offset_component%make_constant_zero(openpmd_type_int, global_extent)
181 : CALL position_offset_component%set_unit_SI(a_bohr) ! doesnt really matter as it is zero
182 : position_component = position%get_component(dims(k))
183 : CALL position_component%reset_dataset(openpmd_type_double, global_extent)
184 : CALL position_component%set_unit_SI(a_bohr)
185 : unresolved_write_buffers(k) = &
186 : position_component%store_chunk_span_1d_double(global_offset, local_extent)
187 : write_buffers(k)%buffer => unresolved_write_buffers(k)%resolve_double(DEALLOCATE=.FALSE.)
188 : END DO
189 :
190 : IF (PRESENT(particles_zeff)) THEN
191 : charge = species%get_record("charge")
192 : charge_component = charge%as_record_component()
193 : ! charge ~ time x current
194 : CALL charge%set_unit_dimension([0.0_dp, 0.0_dp, 1.0_dp, 1.0_dp, 0.0_dp, 0.0_dp, 0.0_dp])
195 : CALL charge_component%reset_dataset(openpmd_type_double, global_extent)
196 : CALL charge_component%set_unit_SI(e_charge)
197 : unresolved_charge_write_buffer = charge_component%store_chunk_span_1d_double(global_offset, local_extent)
198 : charge_write_buffer%buffer => unresolved_charge_write_buffer%resolve_double(DEALLOCATE=.FALSE.)
199 : END IF
200 :
201 : ! Resolve Spans for a second time to allow for internal reallocations in BP4 engine of ADIOS2
202 : DO k = 1, SIZE(dims)
203 : write_buffers(k)%buffer = unresolved_write_buffers(k)%resolve_double(DEALLOCATE=.TRUE.)
204 : END DO
205 : IF (PRESENT(particles_zeff)) THEN
206 : charge_write_buffer%buffer = unresolved_charge_write_buffer%resolve_double(DEALLOCATE=.TRUE.)
207 : END IF
208 : IF (do_write_data) THEN
209 : j = 1
210 : DO i = 1, SIZE(particles_z)
211 : IF (particles_z(i) == atom_type) THEN
212 : DO k = 1, 3
213 : write_buffers(k)%buffer(j) = particles_r(k, i)
214 : END DO
215 : IF (PRESENT(particles_zeff)) THEN
216 : charge_write_buffer%buffer(j) = particles_zeff(i)
217 : END IF
218 : j = j + 1
219 : END IF
220 : END DO
221 : END IF
222 : attr = openpmd_data%iteration%as_attributable()
223 : CALL attr%series_flush("hdf5.independent_stores = true")
224 : END SUBROUTINE pw_write_particle_species
225 :
226 : ! **************************************************************************************************
227 : !> \brief ...
228 : !> \param particles_z ...
229 : !> \param particles_r ...
230 : !> \param particles_zeff ...
231 : !> \param atom_types ...
232 : !> \param atom_counts ...
233 : !> \param num_atom_types ...
234 : !> \param openpmd_data ...
235 : !> \param gid ...
236 : ! **************************************************************************************************
237 : SUBROUTINE pw_write_particles( &
238 : particles_z, &
239 : particles_r, &
240 : particles_zeff, &
241 : atom_types, &
242 : atom_counts, &
243 : num_atom_types, &
244 : openpmd_data, &
245 : gid &
246 : )
247 : INTEGER, DIMENSION(:), INTENT(IN) :: particles_z
248 : REAL(KIND=dp), DIMENSION(:, :), INTENT(IN) :: particles_r
249 : REAL(KIND=dp), DIMENSION(:), INTENT(IN), OPTIONAL :: particles_zeff
250 : INTEGER, DIMENSION(:), INTENT(IN) :: atom_types, atom_counts
251 : INTEGER, INTENT(IN), TARGET :: num_atom_types
252 : TYPE(cp_openpmd_per_call_value_type) :: openpmd_data
253 : TYPE(mp_comm_type), OPTIONAL :: gid
254 :
255 : INTEGER :: i, mpi_rank
256 : LOGICAL :: do_write_data
257 :
258 : IF (PRESENT(gid)) THEN
259 : CALL gid%get_rank(mpi_rank)
260 : do_write_data = mpi_rank == 0
261 : ELSE
262 : do_write_data = .TRUE.
263 : END IF
264 : DO i = 1, num_atom_types
265 : CALL pw_write_particle_species( &
266 : particles_z, &
267 : particles_r, &
268 : particles_zeff, &
269 : atom_types(i), &
270 : atom_counts(i), &
271 : openpmd_data, &
272 : do_write_data &
273 : )
274 : END DO
275 : END SUBROUTINE pw_write_particles
276 :
277 : ! **************************************************************************************************
278 : !> \brief ...
279 : !> \param pw ...
280 : !> \param unit_nr ...
281 : !> \param title ...
282 : !> \param particles_r ...
283 : !> \param particles_z ...
284 : !> \param particles_zeff ...
285 : !> \param stride ...
286 : !> \param zero_tails ...
287 : !> \param silent ...
288 : !> \param mpi_io ...
289 : ! **************************************************************************************************
290 : SUBROUTINE pw_to_openpmd( &
291 : pw, &
292 : unit_nr, &
293 : title, &
294 : particles_r, &
295 : particles_z, &
296 : particles_zeff, &
297 : stride, &
298 : zero_tails, &
299 : silent, &
300 : mpi_io &
301 : )
302 : TYPE(pw_r3d_rs_type), INTENT(IN) :: pw
303 : INTEGER :: unit_nr
304 : CHARACTER(*), INTENT(IN), OPTIONAL :: title
305 : REAL(KIND=dp), DIMENSION(:, :), INTENT(IN), &
306 : OPTIONAL :: particles_r
307 : INTEGER, DIMENSION(:), INTENT(IN), OPTIONAL :: particles_z
308 : REAL(KIND=dp), DIMENSION(:), INTENT(IN), OPTIONAL :: particles_zeff
309 : INTEGER, DIMENSION(:), OPTIONAL, POINTER :: stride
310 : LOGICAL, INTENT(IN), OPTIONAL :: zero_tails, silent, mpi_io
311 :
312 : CHARACTER(len=*), PARAMETER :: routineN = 'pw_to_openpmd'
313 :
314 : CHARACTER(LEN=default_string_length) :: my_title
315 : INTEGER :: count1, count2, count3, handle, i, I1, &
316 : I2, I3, iat, L1, L2, L3, my_rank, &
317 : my_stride(3), np, num_atom_types, &
318 : num_pe, U1, U2, U3
319 : INTEGER, ALLOCATABLE, DIMENSION(:) :: atom_counts, atom_types
320 : INTEGER, DIMENSION(3) :: global_extent, local_extent, offset
321 : LOGICAL :: be_silent, my_zero_tails, parallel_write
322 : REAL(KIND=dp), DIMENSION(3) :: grid_spacing
323 : REAL(KIND=dp), POINTER :: write_buffer(:, :, :)
324 : TYPE(cp_openpmd_per_call_value_type) :: openpmd_data
325 : TYPE(mp_comm_type) :: gid
326 : TYPE(openpmd_attributable_type) :: attr
327 : TYPE(openpmd_dynamic_memory_view_type_3d) :: unresolved_write_buffer
328 : TYPE(openpmd_mesh_type) :: mesh
329 : TYPE(openpmd_record_component_type) :: scalar_mesh
330 :
331 : CALL timeset(routineN, handle)
332 :
333 : my_zero_tails = .FALSE.
334 : be_silent = .FALSE.
335 : parallel_write = .FALSE.
336 : gid = pw%pw_grid%para%group
337 : IF (PRESENT(title)) my_title = TRIM(title)
338 : IF (PRESENT(zero_tails)) my_zero_tails = zero_tails
339 : IF (PRESENT(silent)) be_silent = silent
340 : IF (PRESENT(mpi_io)) parallel_write = mpi_io
341 : my_stride = 1
342 : IF (PRESENT(stride)) THEN
343 : IF (SIZE(stride) /= 1 .AND. SIZE(stride) /= 3) &
344 : CALL cp_abort(__LOCATION__, "STRIDE keyword can accept only 1 "// &
345 : "(the same for X,Y,Z) or 3 values. Correct your input file.")
346 : IF (SIZE(stride) == 1) THEN
347 : DO i = 1, 3
348 : my_stride(i) = stride(1)
349 : END DO
350 : ELSE
351 : my_stride = stride(1:3)
352 : END IF
353 : CPASSERT(my_stride(1) > 0)
354 : CPASSERT(my_stride(2) > 0)
355 : CPASSERT(my_stride(3) > 0)
356 : END IF
357 :
358 : openpmd_data = cp_openpmd_get_value_unit_nr(unit_nr)
359 :
360 : CPASSERT(PRESENT(particles_z) .EQV. PRESENT(particles_r))
361 : np = 0
362 : IF (PRESENT(particles_z)) THEN
363 : CALL pw_get_atom_types(particles_z, atom_types, atom_counts, num_atom_types)
364 : CPASSERT(SIZE(particles_z) == SIZE(particles_r, dim=2))
365 : np = SIZE(particles_z)
366 : END IF
367 :
368 : DO i = 1, 3
369 : ! Notes:
370 : ! 1. This loses information on the rotation of the mesh, the mesh is stored
371 : ! without reference to a global coordinate system
372 : ! 2. This assumes that the coordinate system is not sheared
373 : grid_spacing(i) = SQRT(SUM(pw%pw_grid%dh(:, i)**2))*REAL(my_stride(i), dp)
374 : END DO
375 :
376 : IF (PRESENT(particles_z)) THEN
377 : IF (parallel_write) THEN
378 : CALL pw_write_particles( &
379 : particles_z, &
380 : particles_r, &
381 : particles_zeff, &
382 : atom_types, &
383 : atom_counts, &
384 : num_atom_types, &
385 : openpmd_data, &
386 : gid &
387 : )
388 : ELSE
389 : CALL pw_write_particles( &
390 : particles_z, &
391 : particles_r, &
392 : particles_zeff, &
393 : atom_types, &
394 : atom_counts, &
395 : num_atom_types, &
396 : openpmd_data &
397 : )
398 : END IF
399 : END IF
400 :
401 : DO iat = 1, 3
402 : global_extent(iat) = (pw%pw_grid%npts(iat) + my_stride(iat) - 1)/my_stride(iat)
403 : ! '- 1' for upper gaussian bracket
404 : offset(iat) = ((pw%pw_grid%bounds_local(1, iat) - pw%pw_grid%bounds(1, iat) + my_stride(iat) - 1)/my_stride(iat))
405 : ! '+ 1' because upper end is inclusive, '- 1' for upper gaussian bracket
406 : ! refer local_extent to the global offset first in order to have consistent rounding
407 : local_extent(iat) = ((pw%pw_grid%bounds_local(2, iat) + 1 - pw%pw_grid%bounds(1, iat) + my_stride(iat) - 1)/my_stride(iat))
408 : END DO
409 : local_extent = local_extent - offset
410 :
411 : mesh = openpmd_data%iteration%get_mesh(TRIM(openpmd_data%name_prefix))
412 : CALL mesh%set_axis_labels(["x", "y", "z"])
413 : CALL mesh%set_position([0.5_dp, 0.5_dp, 0.5_dp])
414 : CALL mesh%set_grid_global_offset([0._dp, 0._dp, 0._dp])
415 : CALL mesh%set_grid_spacing(grid_spacing)
416 : CALL mesh%set_grid_unit_SI(a_bohr)
417 : CALL mesh%set_unit_dimension(openpmd_data%unit_dimension)
418 : scalar_mesh = mesh%as_record_component()
419 : CALL scalar_mesh%set_unit_SI(openpmd_data%unit_si)
420 : CALL scalar_mesh%reset_dataset(openpmd_type_double, global_extent)
421 :
422 : ! shortcut
423 : ! need to adjust L1/U1 for uneven distributions across MPI ranks
424 : ! (when working with a stride, we might have to skip the first n values)
425 : ! so keep this consistent with the offset and local_extent computed above
426 : ! L1 = pw%pw_grid%bounds_local(1, 1)
427 : L1 = pw%pw_grid%bounds(1, 1) + offset(1)*my_stride(1)
428 : L2 = pw%pw_grid%bounds_local(1, 2)
429 : L3 = pw%pw_grid%bounds_local(1, 3)
430 : ! offset + local_extent is the start index for the next rank already
431 : ! since the indexes are inclusive, subtract 1 from the boundary index
432 : U1 = pw%pw_grid%bounds(1, 1) + (offset(1) + local_extent(1) - 1)*my_stride(1)
433 : U2 = pw%pw_grid%bounds_local(2, 2)
434 : U3 = pw%pw_grid%bounds_local(2, 3)
435 :
436 : my_rank = pw%pw_grid%para%group%mepos
437 : num_pe = pw%pw_grid%para%group%num_pe
438 :
439 : IF (ALL(my_stride == 1)) THEN
440 : CALL scalar_mesh%store_chunk(pw%array(L1:U1, L2:U2, L3:U3), offset)
441 : ! Are there some conditions under which we can skip this flush?
442 : attr = openpmd_data%iteration%as_attributable()
443 : CALL attr%series_flush("hdf5.independent_stores = false")
444 : ELSE
445 : count3 = 0
446 : DO I3 = L3, U3, my_stride(3)
447 : ! maybe add an overload to provide `buf` here for HDF5, might have better performance
448 : ! for intermittent flushing
449 : ! or just call the buffer in the outer function if memory is no problem...
450 : unresolved_write_buffer = scalar_mesh%store_chunk_span_3d_double( &
451 : [offset(1), offset(2), offset(3) + count3], &
452 : [local_extent(1), local_extent(2), 1])
453 : write_buffer => unresolved_write_buffer%resolve_double(DEALLOCATE=.TRUE.)
454 :
455 : ! Sanity checks: ensure buffer is associated and matches expected shape
456 : CPASSERT(ASSOCIATED(write_buffer))
457 : CPASSERT(SIZE(write_buffer, 1) == local_extent(1))
458 : CPASSERT(SIZE(write_buffer, 2) == local_extent(2))
459 : CPASSERT(SIZE(write_buffer, 3) == 1)
460 :
461 : count2 = 0
462 : DO I2 = L2, U2, my_stride(2)
463 : ! This loop deals with ray (:, count2, count3) of the local subspace
464 : ! The write buffer itself has been allocated for slice (:, :, count3)
465 : count1 = 0
466 : DO I1 = L1, U1, my_stride(1)
467 : write_buffer(count1 + 1, count2 + 1, 1) = pw%array(I1, I2, I3)
468 : ! Debug: print the target indices in write_buffer to the command line
469 : ! WRITE(*,*) 'write_buffer index:', count1 + 1, ',', count2 + 1, ',', 1
470 : count1 = count1 + 1
471 : END DO
472 : count2 = count2 + 1
473 : END DO
474 : count3 = count3 + 1
475 : END DO
476 : END IF
477 :
478 : CALL timestop(handle)
479 :
480 : END SUBROUTINE pw_to_openpmd
481 :
482 : #else
483 :
484 : ! **************************************************************************************************
485 : !> \brief ...
486 : !> \param pw ...
487 : !> \param unit_nr ...
488 : !> \param title ...
489 : !> \param particles_r ...
490 : !> \param particles_z ...
491 : !> \param particles_zeff ...
492 : !> \param stride ...
493 : !> \param zero_tails ...
494 : !> \param silent ...
495 : !> \param mpi_io ...
496 : ! **************************************************************************************************
497 0 : SUBROUTINE pw_to_openpmd( &
498 : pw, &
499 : unit_nr, &
500 : title, &
501 0 : particles_r, &
502 0 : particles_z, &
503 0 : particles_zeff, &
504 : stride, &
505 : zero_tails, &
506 : silent, &
507 : mpi_io &
508 : )
509 : TYPE(pw_r3d_rs_type), INTENT(IN) :: pw
510 : INTEGER :: unit_nr
511 : CHARACTER(*), INTENT(IN), OPTIONAL :: title
512 : REAL(KIND=dp), DIMENSION(:, :), INTENT(IN), &
513 : OPTIONAL :: particles_r
514 : INTEGER, DIMENSION(:), INTENT(IN), OPTIONAL :: particles_z
515 : REAL(KIND=dp), DIMENSION(:), INTENT(IN), OPTIONAL :: particles_zeff
516 : INTEGER, DIMENSION(:), OPTIONAL, POINTER :: stride
517 : LOGICAL, INTENT(IN), OPTIONAL :: zero_tails, silent, mpi_io
518 :
519 : MARK_USED(pw)
520 : MARK_USED(unit_nr)
521 : MARK_USED(title)
522 : MARK_USED(particles_r)
523 : MARK_USED(particles_z)
524 : MARK_USED(particles_zeff)
525 : MARK_USED(stride)
526 : MARK_USED(zero_tails)
527 : MARK_USED(silent)
528 : MARK_USED(mpi_io)
529 0 : CPABORT("CP2K compiled without the openPMD-api")
530 :
531 0 : END SUBROUTINE pw_to_openpmd
532 :
533 : #endif
534 :
535 : END MODULE realspace_grid_openpmd
|