Line data Source code
1 : !--------------------------------------------------------------------------------------------------!
2 : ! CP2K: A general program to perform molecular dynamics simulations !
3 : ! Copyright 2000-2024 CP2K developers group <https://cp2k.org> !
4 : ! !
5 : ! SPDX-License-Identifier: GPL-2.0-or-later !
6 : !--------------------------------------------------------------------------------------------------!
7 :
8 : MODULE qs_tddfpt2_subgroups
9 : USE admm_types, ONLY: admm_type,&
10 : get_admm_env
11 : USE atomic_kind_types, ONLY: atomic_kind_type
12 : USE basis_set_types, ONLY: get_gto_basis_set,&
13 : gto_basis_set_type
14 : USE cell_types, ONLY: cell_type
15 : USE cp_blacs_env, ONLY: cp_blacs_env_create,&
16 : cp_blacs_env_release,&
17 : cp_blacs_env_type
18 : USE cp_control_types, ONLY: dft_control_type,&
19 : qs_control_type,&
20 : tddfpt2_control_type
21 : USE cp_dbcsr_api, ONLY: dbcsr_create,&
22 : dbcsr_distribution_release,&
23 : dbcsr_distribution_type,&
24 : dbcsr_get_info,&
25 : dbcsr_release,&
26 : dbcsr_type
27 : USE cp_dbcsr_cp2k_link, ONLY: cp_dbcsr_alloc_block_from_nbl
28 : USE cp_dbcsr_operations, ONLY: cp_dbcsr_dist2d_to_dist
29 : USE cp_fm_struct, ONLY: cp_fm_struct_create,&
30 : cp_fm_struct_release,&
31 : cp_fm_struct_type
32 : USE cp_fm_types, ONLY: cp_fm_copy_general,&
33 : cp_fm_create,&
34 : cp_fm_get_info,&
35 : cp_fm_release,&
36 : cp_fm_type
37 : USE distribution_1d_types, ONLY: distribution_1d_type
38 : USE distribution_2d_types, ONLY: distribution_2d_release,&
39 : distribution_2d_type
40 : USE distribution_methods, ONLY: distribute_molecules_2d
41 : USE hartree_local_methods, ONLY: init_coulomb_local
42 : USE hartree_local_types, ONLY: hartree_local_create,&
43 : hartree_local_release,&
44 : hartree_local_type
45 : USE input_constants, ONLY: tddfpt_kernel_full,&
46 : tddfpt_kernel_none,&
47 : tddfpt_kernel_stda
48 : USE input_section_types, ONLY: section_vals_type,&
49 : section_vals_val_get
50 : USE kinds, ONLY: default_string_length,&
51 : dp
52 : USE message_passing, ONLY: mp_para_env_release,&
53 : mp_para_env_type
54 : USE molecule_kind_types, ONLY: molecule_kind_type
55 : USE molecule_types, ONLY: molecule_type
56 : USE particle_types, ONLY: particle_type
57 : USE pw_env_methods, ONLY: pw_env_create,&
58 : pw_env_rebuild
59 : USE pw_env_types, ONLY: pw_env_release,&
60 : pw_env_retain,&
61 : pw_env_type
62 : USE qs_environment_types, ONLY: get_qs_env,&
63 : qs_environment_type
64 : USE qs_kind_types, ONLY: get_qs_kind,&
65 : qs_kind_type
66 : USE qs_ks_types, ONLY: qs_ks_env_type
67 : USE qs_local_rho_types, ONLY: local_rho_set_create,&
68 : local_rho_set_release,&
69 : local_rho_type
70 : USE qs_neighbor_list_types, ONLY: neighbor_list_set_p_type,&
71 : release_neighbor_list_sets
72 : USE qs_neighbor_lists, ONLY: atom2d_build,&
73 : atom2d_cleanup,&
74 : build_neighbor_lists,&
75 : local_atoms_type,&
76 : pair_radius_setup
77 : USE qs_rho0_ggrid, ONLY: rho0_s_grid_create
78 : USE qs_rho0_methods, ONLY: init_rho0
79 : USE qs_rho_atom_methods, ONLY: allocate_rho_atom_internals
80 : USE task_list_methods, ONLY: generate_qs_task_list
81 : USE task_list_types, ONLY: allocate_task_list,&
82 : deallocate_task_list,&
83 : task_list_type
84 : #include "./base/base_uses.f90"
85 :
86 : IMPLICIT NONE
87 :
88 : PRIVATE
89 :
90 : CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'qs_tddfpt2_subgroups'
91 : LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .TRUE.
92 :
93 : PUBLIC :: tddfpt_subgroup_env_type
94 : PUBLIC :: tddfpt_sub_env_init, tddfpt_sub_env_release
95 : PUBLIC :: tddfpt_dbcsr_create_by_dist, tddfpt_fm_replicate_across_subgroups
96 :
97 : ! **************************************************************************************************
98 : !> \brief Parallel (sub)group environment.
99 : !> \par History
100 : !> * 01.2017 created [Sergey Chulkov]
101 : ! **************************************************************************************************
102 : TYPE tddfpt_subgroup_env_type
103 : !> indicates that the global MPI communicator has been split into subgroups; if it is .FALSE.
104 : !> certain components of the structure (blacs_env, para_env, admm_A, and mos_occ)
105 : !> can still be accessed; in this case they simply point to the corresponding global variables
106 : LOGICAL :: is_split = .FALSE.
107 : !> number of parallel groups
108 : INTEGER :: ngroups = -1
109 : !> group_distribution(0:ngroups-1) : a process with rank 'i' belongs to the parallel group
110 : !> with index 'group_distribution(i)'
111 : INTEGER, DIMENSION(:), ALLOCATABLE :: group_distribution
112 : !> group-specific BLACS parallel environment
113 : TYPE(cp_blacs_env_type), POINTER :: blacs_env => NULL()
114 : !> group-specific MPI parallel environment
115 : TYPE(mp_para_env_type), POINTER :: para_env => NULL()
116 : !> occupied MOs stored in a matrix form [nao x nmo_occ(spin)] distributed across processes
117 : !> in the parallel group
118 : TYPE(cp_fm_type), ALLOCATABLE, DIMENSION(:) :: mos_occ
119 : !> group-specific copy of the ADMM A matrix 'admm_type%A'
120 : TYPE(cp_fm_type), POINTER :: admm_A => NULL()
121 : !
122 : !> indicates that a set of multi-grids has been allocated; if it is .FALSE. all the components
123 : !> below point to the corresponding global variables and can be accessed
124 : LOGICAL :: is_mgrid = .FALSE.
125 : !> group-specific DBCSR distribution
126 : TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist => NULL()
127 : !> group-specific two-dimensional distribution of pairs of particles
128 : TYPE(distribution_2d_type), POINTER :: dist_2d => NULL()
129 : !> group-specific plane wave environment
130 : TYPE(pw_env_type), POINTER :: pw_env => NULL()
131 : !> lists of neighbours in auxiliary and primary basis sets
132 : TYPE(neighbor_list_set_p_type), &
133 : DIMENSION(:), POINTER :: sab_aux_fit => NULL(), sab_orb => NULL()
134 : !> task lists in auxiliary and primary basis sets
135 : TYPE(task_list_type), POINTER :: task_list_aux_fit => NULL(), task_list_orb => NULL()
136 : !> soft task lists in auxiliary and primary basis sets
137 : TYPE(task_list_type), POINTER :: task_list_aux_fit_soft => NULL(), task_list_orb_soft => NULL()
138 : !> GAPW local atomic grids
139 : TYPE(hartree_local_type), POINTER :: hartree_local => NULL()
140 : TYPE(local_rho_type), POINTER :: local_rho_set => NULL()
141 : TYPE(local_rho_type), POINTER :: local_rho_set_admm => NULL()
142 : END TYPE tddfpt_subgroup_env_type
143 :
144 : ! **************************************************************************************************
145 : !> \brief Structure to save global multi-grid related parameters.
146 : !> \par History
147 : !> * 09.2016 created [Sergey Chulkov]
148 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
149 : ! **************************************************************************************************
150 : TYPE mgrid_saved_parameters
151 : !> create commensurate grids
152 : LOGICAL :: commensurate_mgrids = .FALSE.
153 : !> create real-space grids
154 : LOGICAL :: realspace_mgrids = .FALSE.
155 : !> do not perform load balancing
156 : LOGICAL :: skip_load_balance = .FALSE.
157 : !> cutoff value at the finest grid level
158 : REAL(KIND=dp) :: cutoff = 0.0_dp
159 : !> inverse scale factor
160 : REAL(KIND=dp) :: progression_factor = 0.0_dp
161 : !> relative cutoff
162 : REAL(KIND=dp) :: relative_cutoff = 0.0_dp
163 : !> list of explicitly given cutoff values
164 : REAL(KIND=dp), DIMENSION(:), POINTER :: e_cutoff => NULL()
165 : END TYPE mgrid_saved_parameters
166 :
167 : CONTAINS
168 :
169 : ! **************************************************************************************************
170 : !> \brief Split MPI communicator to create a set of parallel (sub)groups.
171 : !> \param sub_env parallel group environment (initialised on exit)
172 : !> \param qs_env Quickstep environment
173 : !> \param mos_occ ground state molecular orbitals in primary atomic basis set
174 : !> \param kernel Type of kernel (full/sTDA) that will be used
175 : !> \par History
176 : !> * 01.2017 (sub)group-related code has been moved here from the main subroutine tddfpt()
177 : !> [Sergey Chulkov]
178 : ! **************************************************************************************************
179 2108 : SUBROUTINE tddfpt_sub_env_init(sub_env, qs_env, mos_occ, kernel)
180 : TYPE(tddfpt_subgroup_env_type), INTENT(out) :: sub_env
181 : TYPE(qs_environment_type), POINTER :: qs_env
182 : TYPE(cp_fm_type), DIMENSION(:), INTENT(in) :: mos_occ
183 : INTEGER, INTENT(in) :: kernel
184 :
185 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_sub_env_init'
186 :
187 : INTEGER :: handle, ispin, nao, nao_aux, natom, &
188 : nmo_occ, nspins
189 : TYPE(admm_type), POINTER :: admm_env
190 1054 : TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
191 : TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
192 : TYPE(cp_fm_struct_type), POINTER :: fm_struct
193 : TYPE(dft_control_type), POINTER :: dft_control
194 : TYPE(mgrid_saved_parameters) :: mgrid_saved
195 : TYPE(mp_para_env_type), POINTER :: para_env_global
196 : TYPE(pw_env_type), POINTER :: pw_env_global
197 : TYPE(qs_control_type), POINTER :: qs_control
198 1054 : TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
199 : TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
200 :
201 1054 : CALL timeset(routineN, handle)
202 :
203 1054 : nspins = SIZE(mos_occ)
204 :
205 : CALL get_qs_env(qs_env, blacs_env=blacs_env_global, dft_control=dft_control, &
206 1054 : para_env=para_env_global, pw_env=pw_env_global)
207 :
208 1054 : tddfpt_control => dft_control%tddfpt2_control
209 1054 : qs_control => dft_control%qs_control
210 :
211 : ! ++ split mpi communicator if
212 : ! a) the requested number of processors per group > 0
213 : ! (means that the split has been requested explicitly), and
214 : ! b) the number of subgroups is >= 2
215 1054 : sub_env%is_split = tddfpt_control%nprocs > 0 .AND. tddfpt_control%nprocs*2 <= para_env_global%num_pe
216 :
217 4340 : ALLOCATE (sub_env%mos_occ(nspins))
218 1054 : NULLIFY (sub_env%admm_A)
219 :
220 1054 : IF (sub_env%is_split) THEN
221 6 : ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1))
222 :
223 2 : ALLOCATE (sub_env%para_env)
224 : CALL sub_env%para_env%from_split(comm=para_env_global, ngroups=sub_env%ngroups, &
225 2 : group_distribution=sub_env%group_distribution, subgroup_min_size=tddfpt_control%nprocs)
226 :
227 : ! ++ create a new parallel environment based on the given sub-communicator)
228 2 : NULLIFY (sub_env%blacs_env)
229 :
230 : ! use the default (SQUARE) BLACS grid layout and non-repeatable BLACS collective operations
231 : ! by omitting optional parameters 'blacs_grid_layout' and 'blacs_repeatable'.
232 : ! Ideally we should take these parameters from the variables globenv%blacs_grid_layout and
233 : ! globenv%blacs_repeatable, however the global environment is not available
234 : ! from the subroutine 'qs_energies_properties'.
235 2 : CALL cp_blacs_env_create(sub_env%blacs_env, sub_env%para_env)
236 :
237 2 : NULLIFY (fm_struct)
238 :
239 4 : DO ispin = 1, nspins
240 2 : CALL cp_fm_get_info(mos_occ(ispin), nrow_global=nao, ncol_global=nmo_occ)
241 2 : CALL cp_fm_struct_create(fm_struct, nrow_global=nao, ncol_global=nmo_occ, context=sub_env%blacs_env)
242 2 : CALL cp_fm_create(sub_env%mos_occ(ispin), fm_struct)
243 2 : CALL cp_fm_struct_release(fm_struct)
244 : CALL tddfpt_fm_replicate_across_subgroups(fm_src=mos_occ(ispin), &
245 6 : fm_dest_sub=sub_env%mos_occ(ispin), sub_env=sub_env)
246 : END DO
247 :
248 2 : IF (dft_control%do_admm) THEN
249 2 : CALL get_qs_env(qs_env, admm_env=admm_env)
250 2 : CALL cp_fm_get_info(admm_env%A, nrow_global=nao_aux, ncol_global=nao)
251 2 : CALL cp_fm_struct_create(fm_struct, nrow_global=nao_aux, ncol_global=nao, context=sub_env%blacs_env)
252 2 : ALLOCATE (sub_env%admm_A)
253 2 : CALL cp_fm_create(sub_env%admm_A, fm_struct)
254 2 : CALL cp_fm_struct_release(fm_struct)
255 2 : CALL tddfpt_fm_replicate_across_subgroups(fm_src=admm_env%A, fm_dest_sub=sub_env%admm_A, sub_env=sub_env)
256 : END IF
257 : ELSE
258 1052 : CALL para_env_global%retain()
259 1052 : sub_env%para_env => para_env_global
260 :
261 1052 : CALL blacs_env_global%retain()
262 1052 : sub_env%blacs_env => blacs_env_global
263 :
264 2228 : sub_env%mos_occ(:) = mos_occ(:)
265 :
266 1052 : IF (dft_control%do_admm) THEN
267 184 : CALL get_qs_env(qs_env, admm_env=admm_env)
268 184 : sub_env%admm_A => admm_env%A
269 : END IF
270 : END IF
271 :
272 1054 : IF (kernel == tddfpt_kernel_full) THEN
273 : ! ++ allocate a new plane wave environment
274 558 : sub_env%is_mgrid = sub_env%is_split .OR. tddfpt_control%mgrid_is_explicit
275 :
276 558 : NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
277 558 : NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
278 558 : NULLIFY (sub_env%task_list_orb, sub_env%task_list_aux_fit)
279 558 : NULLIFY (sub_env%task_list_orb_soft, sub_env%task_list_aux_fit_soft)
280 :
281 558 : IF (sub_env%is_mgrid) THEN
282 4 : IF (tddfpt_control%mgrid_is_explicit) &
283 2 : CALL init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
284 :
285 4 : NULLIFY (sub_env%pw_env)
286 :
287 4 : CALL pw_env_create(sub_env%pw_env)
288 4 : CALL pw_env_rebuild(sub_env%pw_env, qs_env, sub_env%para_env)
289 :
290 : CALL tddfpt_build_distribution_2d(distribution_2d=sub_env%dist_2d, dbcsr_dist=sub_env%dbcsr_dist, &
291 4 : blacs_env=sub_env%blacs_env, qs_env=qs_env)
292 : CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb, sab=sub_env%sab_orb, basis_type="ORB", &
293 : distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
294 : soft_valid=.FALSE., skip_load_balance=qs_control%skip_load_balance_distributed, &
295 4 : reorder_grid_ranks=.TRUE.)
296 4 : IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
297 : CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb_soft, sab=sub_env%sab_orb, basis_type="ORB", &
298 : distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
299 : soft_valid=.TRUE., skip_load_balance=qs_control%skip_load_balance_distributed, &
300 0 : reorder_grid_ranks=.TRUE.)
301 : END IF
302 :
303 4 : IF (dft_control%do_admm) THEN
304 : CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit, sab=sub_env%sab_aux_fit, &
305 : basis_type="AUX_FIT", distribution_2d=sub_env%dist_2d, &
306 : pw_env=sub_env%pw_env, qs_env=qs_env, soft_valid=.FALSE., &
307 : skip_load_balance=qs_control%skip_load_balance_distributed, &
308 4 : reorder_grid_ranks=.FALSE.)
309 4 : IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
310 : CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit_soft, sab=sub_env%sab_aux_fit, &
311 : basis_type="AUX_FIT", distribution_2d=sub_env%dist_2d, &
312 : pw_env=sub_env%pw_env, qs_env=qs_env, soft_valid=.TRUE., &
313 : skip_load_balance=qs_control%skip_load_balance_distributed, &
314 0 : reorder_grid_ranks=.FALSE.)
315 : END IF
316 : END IF
317 :
318 4 : IF (tddfpt_control%mgrid_is_explicit) &
319 2 : CALL restore_qs_mgrid(qs_control, mgrid_saved)
320 : ELSE
321 554 : CALL pw_env_retain(pw_env_global)
322 554 : sub_env%pw_env => pw_env_global
323 :
324 : CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, &
325 554 : sab_orb=sub_env%sab_orb, task_list=sub_env%task_list_orb)
326 554 : IF (dft_control%do_admm) THEN
327 : CALL get_admm_env(admm_env, sab_aux_fit=sub_env%sab_aux_fit, &
328 166 : task_list_aux_fit=sub_env%task_list_aux_fit)
329 166 : IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
330 36 : sub_env%task_list_aux_fit_soft => admm_env%admm_gapw_env%task_list
331 : END IF
332 : END IF
333 554 : IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
334 184 : CALL get_qs_env(qs_env, task_list_soft=sub_env%task_list_orb_soft)
335 : END IF
336 : END IF
337 :
338 : ! GAPW initializations
339 558 : IF (dft_control%qs_control%gapw) THEN
340 : CALL get_qs_env(qs_env, &
341 : atomic_kind_set=atomic_kind_set, &
342 : natom=natom, &
343 152 : qs_kind_set=qs_kind_set)
344 :
345 152 : CALL local_rho_set_create(sub_env%local_rho_set)
346 : CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
347 152 : qs_kind_set, dft_control, sub_env%para_env)
348 :
349 : CALL init_rho0(sub_env%local_rho_set, qs_env, dft_control%qs_control%gapw_control, &
350 152 : zcore=0.0_dp)
351 152 : CALL rho0_s_grid_create(sub_env%pw_env, sub_env%local_rho_set%rho0_mpole)
352 152 : CALL hartree_local_create(sub_env%hartree_local)
353 152 : CALL init_coulomb_local(sub_env%hartree_local, natom)
354 406 : ELSEIF (dft_control%qs_control%gapw_xc) THEN
355 : CALL get_qs_env(qs_env, &
356 : atomic_kind_set=atomic_kind_set, &
357 32 : qs_kind_set=qs_kind_set)
358 32 : CALL local_rho_set_create(sub_env%local_rho_set)
359 : CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
360 32 : qs_kind_set, dft_control, sub_env%para_env)
361 : END IF
362 :
363 : ! ADMM/GAPW
364 558 : IF (dft_control%do_admm) THEN
365 170 : IF (dft_control%qs_control%gapw .OR. dft_control%qs_control%gapw_xc) THEN
366 36 : CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set)
367 36 : CALL local_rho_set_create(sub_env%local_rho_set_admm)
368 : CALL allocate_rho_atom_internals(sub_env%local_rho_set_admm%rho_atom_set, atomic_kind_set, &
369 : admm_env%admm_gapw_env%admm_kind_set, &
370 36 : dft_control, sub_env%para_env)
371 : END IF
372 : END IF
373 :
374 496 : ELSE IF (kernel == tddfpt_kernel_stda) THEN
375 402 : sub_env%is_mgrid = .FALSE.
376 402 : NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
377 402 : NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
378 402 : NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft, sub_env%task_list_aux_fit)
379 402 : NULLIFY (sub_env%pw_env)
380 402 : IF (sub_env%is_split) THEN
381 0 : CPABORT('Subsys option not available')
382 : ELSE
383 402 : CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
384 : END IF
385 94 : ELSE IF (kernel == tddfpt_kernel_none) THEN
386 94 : sub_env%is_mgrid = .FALSE.
387 94 : NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
388 94 : NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
389 94 : NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft, sub_env%task_list_aux_fit)
390 94 : NULLIFY (sub_env%pw_env)
391 94 : IF (sub_env%is_split) THEN
392 0 : CPABORT('Subsys option not available')
393 : ELSE
394 94 : CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
395 : END IF
396 : ELSE
397 0 : CPABORT("Unknown kernel type")
398 : END IF
399 :
400 1054 : CALL timestop(handle)
401 :
402 2108 : END SUBROUTINE tddfpt_sub_env_init
403 :
404 : ! **************************************************************************************************
405 : !> \brief Release parallel group environment
406 : !> \param sub_env parallel group environment (modified on exit)
407 : !> \par History
408 : !> * 01.2017 created [Sergey Chulkov]
409 : ! **************************************************************************************************
410 1054 : SUBROUTINE tddfpt_sub_env_release(sub_env)
411 : TYPE(tddfpt_subgroup_env_type), INTENT(inout) :: sub_env
412 :
413 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_sub_env_release'
414 :
415 : INTEGER :: handle, i
416 :
417 1054 : CALL timeset(routineN, handle)
418 :
419 1054 : IF (sub_env%is_mgrid) THEN
420 4 : IF (ASSOCIATED(sub_env%task_list_aux_fit)) &
421 4 : CALL deallocate_task_list(sub_env%task_list_aux_fit)
422 :
423 4 : IF (ASSOCIATED(sub_env%task_list_orb)) &
424 4 : CALL deallocate_task_list(sub_env%task_list_orb)
425 :
426 4 : IF (ASSOCIATED(sub_env%task_list_orb_soft)) &
427 0 : CALL deallocate_task_list(sub_env%task_list_orb_soft)
428 :
429 4 : CALL release_neighbor_list_sets(sub_env%sab_aux_fit)
430 4 : CALL release_neighbor_list_sets(sub_env%sab_orb)
431 :
432 4 : IF (ASSOCIATED(sub_env%dbcsr_dist)) THEN
433 4 : CALL dbcsr_distribution_release(sub_env%dbcsr_dist)
434 4 : DEALLOCATE (sub_env%dbcsr_dist)
435 : END IF
436 :
437 4 : IF (ASSOCIATED(sub_env%dist_2d)) &
438 4 : CALL distribution_2d_release(sub_env%dist_2d)
439 : END IF
440 :
441 : ! GAPW
442 1054 : IF (ASSOCIATED(sub_env%local_rho_set)) THEN
443 184 : CALL local_rho_set_release(sub_env%local_rho_set)
444 : END IF
445 1054 : IF (ASSOCIATED(sub_env%hartree_local)) THEN
446 152 : CALL hartree_local_release(sub_env%hartree_local)
447 : END IF
448 1054 : IF (ASSOCIATED(sub_env%local_rho_set_admm)) THEN
449 36 : CALL local_rho_set_release(sub_env%local_rho_set_admm)
450 : END IF
451 :
452 : ! if TDDFPT-specific plane-wave environment has not been requested,
453 : ! the pointers sub_env%dbcsr_dist, sub_env%sab_*, and sub_env%task_list_*
454 : ! point to the corresponding ground-state variables from qs_env
455 : ! and should not be deallocated
456 :
457 1054 : CALL pw_env_release(sub_env%pw_env)
458 :
459 1054 : sub_env%is_mgrid = .FALSE.
460 :
461 1054 : IF (sub_env%is_split .AND. ASSOCIATED(sub_env%admm_A)) THEN
462 2 : CALL cp_fm_release(sub_env%admm_A)
463 2 : DEALLOCATE (sub_env%admm_A)
464 : NULLIFY (sub_env%admm_A)
465 : END IF
466 :
467 1054 : IF (sub_env%is_split) THEN
468 4 : DO i = SIZE(sub_env%mos_occ), 1, -1
469 4 : CALL cp_fm_release(sub_env%mos_occ(i))
470 : END DO
471 : END IF
472 1054 : DEALLOCATE (sub_env%mos_occ)
473 :
474 1054 : CALL cp_blacs_env_release(sub_env%blacs_env)
475 1054 : CALL mp_para_env_release(sub_env%para_env)
476 :
477 1054 : IF (ALLOCATED(sub_env%group_distribution)) &
478 2 : DEALLOCATE (sub_env%group_distribution)
479 :
480 1054 : sub_env%is_split = .FALSE.
481 :
482 1054 : CALL timestop(handle)
483 :
484 1054 : END SUBROUTINE tddfpt_sub_env_release
485 :
486 : ! **************************************************************************************************
487 : !> \brief Replace the global multi-grid related parameters in qs_control by the ones given in the
488 : !> TDDFPT/MGRID subsection. The original parameters are stored into the 'mgrid_saved'
489 : !> variable.
490 : !> \param qs_control Quickstep control parameters (modified on exit)
491 : !> \param tddfpt_control TDDFPT control parameters
492 : !> \param mgrid_saved structure to hold global MGRID-related parameters (initialised on exit)
493 : !> \par History
494 : !> * 09.2016 created [Sergey Chulkov]
495 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
496 : !> \note the code to build the 'e_cutoff' list was taken from the subroutine read_mgrid_section()
497 : ! **************************************************************************************************
498 2 : SUBROUTINE init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
499 : TYPE(qs_control_type), POINTER :: qs_control
500 : TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
501 : TYPE(mgrid_saved_parameters), INTENT(out) :: mgrid_saved
502 :
503 : CHARACTER(LEN=*), PARAMETER :: routineN = 'init_tddfpt_mgrid'
504 :
505 : INTEGER :: handle, igrid, ngrids
506 :
507 2 : CALL timeset(routineN, handle)
508 :
509 : ! ++ save global plane-wave grid parameters to the variable 'mgrid_saved'
510 2 : mgrid_saved%commensurate_mgrids = qs_control%commensurate_mgrids
511 2 : mgrid_saved%realspace_mgrids = qs_control%realspace_mgrids
512 2 : mgrid_saved%skip_load_balance = qs_control%skip_load_balance_distributed
513 2 : mgrid_saved%cutoff = qs_control%cutoff
514 2 : mgrid_saved%progression_factor = qs_control%progression_factor
515 2 : mgrid_saved%relative_cutoff = qs_control%relative_cutoff
516 2 : mgrid_saved%e_cutoff => qs_control%e_cutoff
517 :
518 : ! ++ set parameters from 'tddfpt_control' as default ones for all newly allocated plane-wave grids
519 2 : qs_control%commensurate_mgrids = tddfpt_control%mgrid_commensurate_mgrids
520 2 : qs_control%realspace_mgrids = tddfpt_control%mgrid_realspace_mgrids
521 2 : qs_control%skip_load_balance_distributed = tddfpt_control%mgrid_skip_load_balance
522 2 : qs_control%cutoff = tddfpt_control%mgrid_cutoff
523 2 : qs_control%progression_factor = tddfpt_control%mgrid_progression_factor
524 2 : qs_control%relative_cutoff = tddfpt_control%mgrid_relative_cutoff
525 :
526 6 : ALLOCATE (qs_control%e_cutoff(tddfpt_control%mgrid_ngrids))
527 2 : ngrids = tddfpt_control%mgrid_ngrids
528 2 : IF (ASSOCIATED(tddfpt_control%mgrid_e_cutoff)) THEN
529 : ! following read_mgrid_section() there is a magic scale factor there (0.5_dp)
530 0 : DO igrid = 1, ngrids
531 0 : qs_control%e_cutoff(igrid) = tddfpt_control%mgrid_e_cutoff(igrid)*0.5_dp
532 : END DO
533 : ! ++ round 'qs_control%cutoff' upward to the nearest sub-grid's cutoff value;
534 : ! here we take advantage of the fact that the array 'e_cutoff' has been sorted in descending order
535 0 : DO igrid = ngrids, 1, -1
536 0 : IF (qs_control%cutoff <= qs_control%e_cutoff(igrid)) THEN
537 0 : qs_control%cutoff = qs_control%e_cutoff(igrid)
538 0 : EXIT
539 : END IF
540 : END DO
541 : ! igrid == 0 if qs_control%cutoff is larger than the largest manually provided cutoff value;
542 : ! use the largest actual value
543 0 : IF (igrid <= 0) &
544 0 : qs_control%cutoff = qs_control%e_cutoff(1)
545 : ELSE
546 2 : qs_control%e_cutoff(1) = qs_control%cutoff
547 4 : DO igrid = 2, ngrids
548 4 : qs_control%e_cutoff(igrid) = qs_control%e_cutoff(igrid - 1)/qs_control%progression_factor
549 : END DO
550 : END IF
551 :
552 2 : CALL timestop(handle)
553 2 : END SUBROUTINE init_tddfpt_mgrid
554 :
555 : ! **************************************************************************************************
556 : !> \brief Restore the global multi-grid related parameters stored in the 'mgrid_saved' variable.
557 : !> \param qs_control Quickstep control parameters (modified on exit)
558 : !> \param mgrid_saved structure that holds global MGRID-related parameters
559 : !> \par History
560 : !> * 09.2016 created [Sergey Chulkov]
561 : ! **************************************************************************************************
562 2 : SUBROUTINE restore_qs_mgrid(qs_control, mgrid_saved)
563 : TYPE(qs_control_type), POINTER :: qs_control
564 : TYPE(mgrid_saved_parameters), INTENT(in) :: mgrid_saved
565 :
566 : CHARACTER(LEN=*), PARAMETER :: routineN = 'restore_qs_mgrid'
567 :
568 : INTEGER :: handle
569 :
570 2 : CALL timeset(routineN, handle)
571 :
572 2 : IF (ASSOCIATED(qs_control%e_cutoff)) &
573 2 : DEALLOCATE (qs_control%e_cutoff)
574 :
575 2 : qs_control%commensurate_mgrids = mgrid_saved%commensurate_mgrids
576 2 : qs_control%realspace_mgrids = mgrid_saved%realspace_mgrids
577 2 : qs_control%skip_load_balance_distributed = mgrid_saved%skip_load_balance
578 2 : qs_control%cutoff = mgrid_saved%cutoff
579 2 : qs_control%progression_factor = mgrid_saved%progression_factor
580 2 : qs_control%relative_cutoff = mgrid_saved%relative_cutoff
581 2 : qs_control%e_cutoff => mgrid_saved%e_cutoff
582 :
583 2 : CALL timestop(handle)
584 2 : END SUBROUTINE restore_qs_mgrid
585 :
586 : ! **************************************************************************************************
587 : !> \brief Distribute atoms across the two-dimensional grid of processors.
588 : !> \param distribution_2d new two-dimensional distribution of pairs of particles
589 : !> (allocated and initialised on exit)
590 : !> \param dbcsr_dist new DBCSR distribution (allocated and initialised on exit)
591 : !> \param blacs_env BLACS parallel environment
592 : !> \param qs_env Quickstep environment
593 : !> \par History
594 : !> * 09.2016 created [Sergey Chulkov]
595 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
596 : ! **************************************************************************************************
597 8 : SUBROUTINE tddfpt_build_distribution_2d(distribution_2d, dbcsr_dist, blacs_env, qs_env)
598 : TYPE(distribution_2d_type), POINTER :: distribution_2d
599 : TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
600 : TYPE(cp_blacs_env_type), POINTER :: blacs_env
601 : TYPE(qs_environment_type), POINTER :: qs_env
602 :
603 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_build_distribution_2d'
604 :
605 : INTEGER :: handle
606 4 : TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
607 : TYPE(cell_type), POINTER :: cell
608 4 : TYPE(molecule_kind_type), DIMENSION(:), POINTER :: molecule_kind_set
609 4 : TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
610 4 : TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
611 4 : TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
612 : TYPE(section_vals_type), POINTER :: input
613 :
614 4 : CALL timeset(routineN, handle)
615 :
616 : CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
617 : molecule_kind_set=molecule_kind_set, molecule_set=molecule_set, &
618 4 : particle_set=particle_set, qs_kind_set=qs_kind_set)
619 :
620 4 : NULLIFY (distribution_2d)
621 : CALL distribute_molecules_2d(cell=cell, &
622 : atomic_kind_set=atomic_kind_set, &
623 : particle_set=particle_set, &
624 : qs_kind_set=qs_kind_set, &
625 : molecule_kind_set=molecule_kind_set, &
626 : molecule_set=molecule_set, &
627 : distribution_2d=distribution_2d, &
628 : blacs_env=blacs_env, &
629 4 : force_env_section=input)
630 :
631 4 : ALLOCATE (dbcsr_dist)
632 4 : CALL cp_dbcsr_dist2d_to_dist(distribution_2d, dbcsr_dist)
633 :
634 4 : CALL timestop(handle)
635 4 : END SUBROUTINE tddfpt_build_distribution_2d
636 :
637 : ! **************************************************************************************************
638 : !> \brief Build task and neighbour lists for the given plane wave environment and basis set.
639 : !> \param task_list new task list (allocated and initialised on exit)
640 : !> \param sab new list of neighbours (allocated and initialised on exit)
641 : !> \param basis_type type of the basis set
642 : !> \param distribution_2d two-dimensional distribution of pairs of particles
643 : !> \param pw_env plane wave environment
644 : !> \param qs_env Quickstep environment
645 : !> \param soft_valid generate a task list for soft basis functions (GAPW, GAPW_XC)
646 : !> \param skip_load_balance do not perform load balancing
647 : !> \param reorder_grid_ranks re-optimise grid ranks and re-create the real-space grid descriptor
648 : !> as well as grids
649 : !> \par History
650 : !> * 09.2016 created [Sergey Chulkov]
651 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
652 : ! **************************************************************************************************
653 8 : SUBROUTINE tddfpt_build_tasklist(task_list, sab, basis_type, distribution_2d, pw_env, qs_env, &
654 : soft_valid, skip_load_balance, reorder_grid_ranks)
655 : TYPE(task_list_type), POINTER :: task_list
656 : TYPE(neighbor_list_set_p_type), DIMENSION(:), &
657 : POINTER :: sab
658 : CHARACTER(len=*), INTENT(in) :: basis_type
659 : TYPE(distribution_2d_type), POINTER :: distribution_2d
660 : TYPE(pw_env_type), POINTER :: pw_env
661 : TYPE(qs_environment_type), POINTER :: qs_env
662 : LOGICAL, INTENT(in) :: soft_valid, skip_load_balance, &
663 : reorder_grid_ranks
664 :
665 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_build_tasklist'
666 :
667 : INTEGER :: handle, ikind, nkinds
668 8 : LOGICAL, ALLOCATABLE, DIMENSION(:) :: orb_present
669 : REAL(kind=dp) :: subcells
670 8 : REAL(kind=dp), ALLOCATABLE, DIMENSION(:) :: orb_radius
671 8 : REAL(kind=dp), ALLOCATABLE, DIMENSION(:, :) :: pair_radius
672 8 : TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
673 : TYPE(cell_type), POINTER :: cell
674 : TYPE(distribution_1d_type), POINTER :: local_particles
675 : TYPE(gto_basis_set_type), POINTER :: orb_basis_set
676 8 : TYPE(local_atoms_type), ALLOCATABLE, DIMENSION(:) :: atom2d
677 8 : TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
678 8 : TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
679 8 : TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
680 : TYPE(qs_ks_env_type), POINTER :: ks_env
681 : TYPE(section_vals_type), POINTER :: input
682 :
683 8 : CALL timeset(routineN, handle)
684 :
685 : CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
686 : ks_env=ks_env, local_particles=local_particles, molecule_set=molecule_set, &
687 8 : particle_set=particle_set, qs_kind_set=qs_kind_set)
688 :
689 8 : nkinds = SIZE(atomic_kind_set)
690 :
691 48 : ALLOCATE (atom2d(nkinds))
692 : CALL atom2d_build(atom2d, local_particles, distribution_2d, atomic_kind_set, &
693 8 : molecule_set, molecule_only=.FALSE., particle_set=particle_set)
694 :
695 24 : ALLOCATE (orb_present(nkinds))
696 24 : ALLOCATE (orb_radius(nkinds))
697 32 : ALLOCATE (pair_radius(nkinds, nkinds))
698 :
699 32 : DO ikind = 1, nkinds
700 24 : CALL get_qs_kind(qs_kind_set(ikind), basis_set=orb_basis_set, basis_type=basis_type)
701 32 : IF (ASSOCIATED(orb_basis_set)) THEN
702 24 : orb_present(ikind) = .TRUE.
703 24 : CALL get_gto_basis_set(gto_basis_set=orb_basis_set, kind_radius=orb_radius(ikind))
704 : ELSE
705 0 : orb_present(ikind) = .FALSE.
706 0 : orb_radius(ikind) = 0.0_dp
707 : END IF
708 : END DO
709 :
710 8 : CALL pair_radius_setup(orb_present, orb_present, orb_radius, orb_radius, pair_radius)
711 :
712 8 : NULLIFY (sab)
713 8 : CALL section_vals_val_get(input, "DFT%SUBCELLS", r_val=subcells)
714 : CALL build_neighbor_lists(sab, particle_set, atom2d, cell, pair_radius, &
715 8 : mic=.FALSE., subcells=subcells, molecular=.FALSE., nlname="sab_orb")
716 :
717 8 : CALL atom2d_cleanup(atom2d)
718 8 : DEALLOCATE (atom2d, orb_present, orb_radius, pair_radius)
719 :
720 8 : CALL allocate_task_list(task_list)
721 : CALL generate_qs_task_list(ks_env, task_list, &
722 : reorder_rs_grid_ranks=reorder_grid_ranks, soft_valid=soft_valid, &
723 : basis_type=basis_type, skip_load_balance_distributed=skip_load_balance, &
724 8 : pw_env_external=pw_env, sab_orb_external=sab)
725 :
726 8 : CALL timestop(handle)
727 24 : END SUBROUTINE tddfpt_build_tasklist
728 :
729 : ! **************************************************************************************************
730 : !> \brief Create a DBCSR matrix based on a template matrix, distribution object, and the list of
731 : !> neighbours.
732 : !> \param matrix matrix to create
733 : !> \param template template matrix
734 : !> \param dbcsr_dist DBCSR distribution
735 : !> \param sab list of neighbours
736 : !> \par History
737 : !> * 09.2016 created [Sergey Chulkov]
738 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
739 : ! **************************************************************************************************
740 2120 : SUBROUTINE tddfpt_dbcsr_create_by_dist(matrix, template, dbcsr_dist, sab)
741 : TYPE(dbcsr_type), POINTER :: matrix, template
742 : TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
743 : TYPE(neighbor_list_set_p_type), DIMENSION(:), &
744 : POINTER :: sab
745 :
746 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_dbcsr_create_by_dist'
747 :
748 : CHARACTER :: matrix_type
749 : CHARACTER(len=default_string_length) :: matrix_name
750 : INTEGER :: handle
751 2120 : INTEGER, DIMENSION(:), POINTER :: col_blk_sizes, row_blk_sizes
752 :
753 2120 : CALL timeset(routineN, handle)
754 :
755 2120 : CPASSERT(ASSOCIATED(template))
756 : CALL dbcsr_get_info(template, row_blk_size=row_blk_sizes, col_blk_size=col_blk_sizes, &
757 2120 : name=matrix_name, matrix_type=matrix_type)
758 :
759 2120 : IF (ASSOCIATED(matrix)) THEN
760 1916 : CALL dbcsr_release(matrix)
761 : ELSE
762 204 : ALLOCATE (matrix)
763 : END IF
764 :
765 2120 : CALL dbcsr_create(matrix, matrix_name, dbcsr_dist, matrix_type, row_blk_sizes, col_blk_sizes, nze=0)
766 2120 : CALL cp_dbcsr_alloc_block_from_nbl(matrix, sab)
767 :
768 2120 : CALL timestop(handle)
769 :
770 2120 : END SUBROUTINE tddfpt_dbcsr_create_by_dist
771 :
772 : ! **************************************************************************************************
773 : !> \brief Replicate a globally distributed matrix across all sub-groups. At the end
774 : !> every sub-group will hold a local copy of the original globally distributed matrix.
775 : !>
776 : !> |--------------------|
777 : !> fm_src | 0 1 2 3 |
778 : !> |--------------------|
779 : !> / MPI ranks \
780 : !> |/_ _\|
781 : !> |--------------------| |--------------------|
782 : !> fm_dest_subgroup0 | 0 1 | | 2 3 | fm_dest_subgroup1
783 : !> |--------------------| |--------------------|
784 : !> subgroup 0 subgroup 1
785 : !>
786 : !> \param fm_src globally distributed matrix to replicate
787 : !> \param fm_dest_sub subgroup-specific copy of the replicated matrix
788 : !> \param sub_env subgroup environment
789 : !> \par History
790 : !> * 09.2016 created [Sergey Chulkov]
791 : !> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
792 : ! **************************************************************************************************
793 4 : SUBROUTINE tddfpt_fm_replicate_across_subgroups(fm_src, fm_dest_sub, sub_env)
794 : TYPE(cp_fm_type), INTENT(IN) :: fm_src, fm_dest_sub
795 : TYPE(tddfpt_subgroup_env_type), INTENT(in) :: sub_env
796 :
797 : CHARACTER(LEN=*), PARAMETER :: routineN = 'tddfpt_fm_replicate_across_subgroups'
798 :
799 : INTEGER :: handle, igroup, igroup_local, ncols_global_dest, ncols_global_src, ngroups, &
800 : nrows_global_dest, nrows_global_src
801 : TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
802 : TYPE(cp_fm_type) :: fm_null
803 : TYPE(mp_para_env_type), POINTER :: para_env_global
804 :
805 16 : IF (sub_env%is_split) THEN
806 4 : CALL timeset(routineN, handle)
807 :
808 : CALL cp_fm_get_info(fm_src, nrow_global=nrows_global_src, ncol_global=ncols_global_src, &
809 4 : context=blacs_env_global, para_env=para_env_global)
810 4 : CALL cp_fm_get_info(fm_dest_sub, nrow_global=nrows_global_dest, ncol_global=ncols_global_dest)
811 :
812 : IF (debug_this_module) THEN
813 4 : CPASSERT(nrows_global_src == nrows_global_dest)
814 4 : CPASSERT(ncols_global_src == ncols_global_dest)
815 : END IF
816 :
817 4 : igroup_local = sub_env%group_distribution(para_env_global%mepos)
818 4 : ngroups = sub_env%ngroups
819 :
820 12 : DO igroup = 0, ngroups - 1
821 12 : IF (igroup == igroup_local) THEN
822 4 : CALL cp_fm_copy_general(fm_src, fm_dest_sub, para_env_global)
823 : ELSE
824 4 : CALL cp_fm_copy_general(fm_src, fm_null, para_env_global)
825 : END IF
826 : END DO
827 :
828 4 : CALL timestop(handle)
829 : END IF
830 4 : END SUBROUTINE tddfpt_fm_replicate_across_subgroups
831 0 : END MODULE qs_tddfpt2_subgroups
832 :
|