program unsteady_ex ! ! ! Base include file for Fortran use of the PETSc package. ! ! ! ! (C) 1993 by Argonne National Laboratory and Mississipi State University. ! All rights reserved. See COPYRIGHT in top-level directory. ! ! ! user include file for MPI programs, with no dependencies ! ! It really is not possible to make a perfect include file that can ! be used by both F77 and F90 compilers, but this is close. We have removed ! continuation lines (allows free form input in F90); systems whose ! Fortran compilers support ! instead of just C or * for comments can ! globally replace a C in the first column with !; the resulting file ! should work for both Fortran 77 and Fortran 90. ! ! If your Fortran compiler supports ! for comments, you can run this ! through sed with ! sed -e 's/^C/\!/g' ! ! We have also removed the use of contractions (involving the single quote) ! character because some users use .F instead of .f files (to invoke the ! cpp preprocessor) and further, their preprocessor is determined to find ! matching single quote pairs (and probably double quotes; given the ! different rules in C and Fortran, this sounds like a disaster). Rather than ! take the position that the poor users should get a better system, we ! have removed the text that caused problems. Of course, the users SHOULD ! get a better system... ! ! return codes INTEGER MPI_SUCCESS,MPI_ERR_BUFFER,MPI_ERR_COUNT,MPI_ERR_TYPE INTEGER MPI_ERR_TAG,MPI_ERR_COMM,MPI_ERR_RANK,MPI_ERR_ROOT INTEGER MPI_ERR_GROUP INTEGER MPI_ERR_OP,MPI_ERR_TOPOLOGY,MPI_ERR_DIMS,MPI_ERR_ARG INTEGER MPI_ERR_UNKNOWN,MPI_ERR_TRUNCATE,MPI_ERR_OTHER INTEGER MPI_ERR_INTERN,MPI_ERR_IN_STATUS,MPI_ERR_PENDING INTEGER MPI_ERR_REQUEST, MPI_ERR_LASTCODE PARAMETER (MPI_SUCCESS=0,MPI_ERR_BUFFER=1,MPI_ERR_COUNT=2) PARAMETER (MPI_ERR_TYPE=3,MPI_ERR_TAG=4,MPI_ERR_COMM=5) PARAMETER (MPI_ERR_RANK=6,MPI_ERR_ROOT=7,MPI_ERR_GROUP=8) PARAMETER (MPI_ERR_OP=9,MPI_ERR_TOPOLOGY=10,MPI_ERR_DIMS=11) PARAMETER (MPI_ERR_ARG=12,MPI_ERR_UNKNOWN=13) PARAMETER (MPI_ERR_TRUNCATE=14,MPI_ERR_OTHER=15) PARAMETER (MPI_ERR_INTERN=16,MPI_ERR_IN_STATUS=17) PARAMETER (MPI_ERR_PENDING=18,MPI_ERR_REQUEST=19) PARAMETER (MPI_ERR_LASTCODE=1073741823) ! INTEGER MPI_UNDEFINED parameter (MPI_UNDEFINED = (-32766)) ! INTEGER MPI_GRAPH, MPI_CART PARAMETER (MPI_GRAPH = 1, MPI_CART = 2) INTEGER MPI_PROC_NULL PARAMETER ( MPI_PROC_NULL = (-1) ) ! INTEGER MPI_BSEND_OVERHEAD PARAMETER ( MPI_BSEND_OVERHEAD = 512 ) INTEGER MPI_SOURCE, MPI_TAG, MPI_ERROR PARAMETER(MPI_SOURCE=2, MPI_TAG=3, MPI_ERROR=4) INTEGER MPI_STATUS_SIZE PARAMETER (MPI_STATUS_SIZE=4) INTEGER MPI_MAX_PROCESSOR_NAME, MPI_MAX_ERROR_STRING PARAMETER (MPI_MAX_PROCESSOR_NAME=256) PARAMETER (MPI_MAX_ERROR_STRING=512) INTEGER MPI_MAX_NAME_STRING PARAMETER (MPI_MAX_NAME_STRING=63) ! INTEGER MPI_COMM_NULL PARAMETER (MPI_COMM_NULL=0) ! INTEGER MPI_DATATYPE_NULL PARAMETER (MPI_DATATYPE_NULL = 0) INTEGER MPI_ERRHANDLER_NULL PARAMETER (MPI_ERRHANDLER_NULL = 0) INTEGER MPI_GROUP_NULL PARAMETER (MPI_GROUP_NULL = 0) INTEGER MPI_KEYVAL_INVALID PARAMETER (MPI_KEYVAL_INVALID = 0) INTEGER MPI_REQUEST_NULL PARAMETER (MPI_REQUEST_NULL = 0) ! INTEGER MPI_IDENT, MPI_CONGRUENT, MPI_SIMILAR, MPI_UNEQUAL PARAMETER (MPI_IDENT=0, MPI_CONGRUENT=1, MPI_SIMILAR=2) PARAMETER (MPI_UNEQUAL=3) ! ! MPI_BOTTOM needs to be a known address; here we put it at the ! beginning of the common block. The point-to-point and collective ! routines know about MPI_BOTTOM, but MPI_TYPE_STRUCT as yet does not. ! ! MPI_STATUS_IGNORE and MPI_STATUSES_IGNORE are similar objects ! Until the underlying MPI library implements the C version of these ! (a null pointer), these are declared as arrays of MPI_STATUS_SIZE ! ! The types MPI_INTEGER1,2,4 and MPI_REAL4,8 are OPTIONAL. ! Their values are zero if they are not available. Note that ! using these reduces the portability of code (though may enhance ! portability between Crays and other systems) ! INTEGER MPI_TAG_UB, MPI_HOST, MPI_IO INTEGER MPI_BOTTOM INTEGER MPI_STATUS_IGNORE(MPI_STATUS_SIZE) INTEGER MPI_STATUSES_IGNORE(MPI_STATUS_SIZE) INTEGER MPI_INTEGER, MPI_REAL, MPI_DOUBLE_PRECISION INTEGER MPI_COMPLEX, MPI_DOUBLE_COMPLEX,MPI_LOGICAL INTEGER MPI_CHARACTER, MPI_BYTE, MPI_2INTEGER, MPI_2REAL INTEGER MPI_2DOUBLE_PRECISION, MPI_2COMPLEX, MPI_2DOUBLE_COMPLEX INTEGER MPI_UB, MPI_LB INTEGER MPI_PACKED, MPI_WTIME_IS_GLOBAL INTEGER MPI_COMM_WORLD, MPI_COMM_SELF, MPI_GROUP_EMPTY INTEGER MPI_SUM, MPI_MAX, MPI_MIN, MPI_PROD, MPI_LAND, MPI_BAND INTEGER MPI_LOR, MPI_BOR, MPI_LXOR, MPI_BXOR, MPI_MINLOC INTEGER MPI_MAXLOC INTEGER MPI_OP_NULL INTEGER MPI_ERRORS_ARE_FATAL, MPI_ERRORS_RETURN ! PARAMETER (MPI_ERRORS_ARE_FATAL=119) PARAMETER (MPI_ERRORS_RETURN=120) ! PARAMETER (MPI_COMPLEX=23,MPI_DOUBLE_COMPLEX=24,MPI_LOGICAL=25) PARAMETER (MPI_REAL=26,MPI_DOUBLE_PRECISION=27,MPI_INTEGER=28) PARAMETER (MPI_2INTEGER=29,MPI_2COMPLEX=30,MPI_2DOUBLE_COMPLEX=31) PARAMETER (MPI_2REAL=32,MPI_2DOUBLE_PRECISION=33,MPI_CHARACTER=1) PARAMETER (MPI_BYTE=3,MPI_UB=16,MPI_LB=15,MPI_PACKED=14) INTEGER MPI_ORDER_C, MPI_ORDER_FORTRAN PARAMETER (MPI_ORDER_C=56, MPI_ORDER_FORTRAN=57) INTEGER MPI_DISTRIBUTE_BLOCK, MPI_DISTRIBUTE_CYCLIC INTEGER MPI_DISTRIBUTE_NONE, MPI_DISTRIBUTE_DFLT_DARG PARAMETER (MPI_DISTRIBUTE_BLOCK=121, MPI_DISTRIBUTE_CYCLIC=122) PARAMETER (MPI_DISTRIBUTE_NONE=123) PARAMETER (MPI_DISTRIBUTE_DFLT_DARG=-49767) INTEGER MPI_MAX_INFO_KEY, MPI_MAX_INFO_VAL PARAMETER (MPI_MAX_INFO_KEY=255, MPI_MAX_INFO_VAL=1024) INTEGER MPI_INFO_NULL PARAMETER (MPI_INFO_NULL=0) ! ! Optional Fortran Types. Configure attempts to determine these. ! INTEGER MPI_INTEGER1, MPI_INTEGER2, MPI_INTEGER4, MPI_INTEGER8 INTEGER MPI_INTEGER16 INTEGER MPI_REAL4, MPI_REAL8, MPI_REAL16 INTEGER MPI_COMPLEX8, MPI_COMPLEX16, MPI_COMPLEX32 PARAMETER (MPI_INTEGER1=1,MPI_INTEGER2=4) PARAMETER (MPI_INTEGER4=6) PARAMETER (MPI_INTEGER8=13) PARAMETER (MPI_INTEGER16=0) PARAMETER (MPI_REAL4=10) PARAMETER (MPI_REAL8=11) PARAMETER (MPI_REAL16=0) PARAMETER (MPI_COMPLEX8=23) PARAMETER (MPI_COMPLEX16=24) PARAMETER (MPI_COMPLEX32=0) ! ! This is now handled with either the "pointer" extension or this same ! code, appended at the end. ! COMMON /MPIPRIV/ MPI_BOTTOM,MPI_STATUS_IGNORE,MPI_STATUSES_IGNORE !C !C Without this save, some Fortran implementations may make the common !C dynamic! !C !C For a Fortran90 module, we might replace /MPIPRIV/ with a simple !C SAVE MPI_BOTTOM !C ! SAVE /MPIPRIV/ ! PARAMETER (MPI_MAX=100,MPI_MIN=101,MPI_SUM=102,MPI_PROD=103) PARAMETER (MPI_LAND=104,MPI_BAND=105,MPI_LOR=106,MPI_BOR=107) PARAMETER (MPI_LXOR=108,MPI_BXOR=109,MPI_MINLOC=110) PARAMETER (MPI_MAXLOC=111, MPI_OP_NULL=0) ! PARAMETER (MPI_GROUP_EMPTY=90,MPI_COMM_WORLD=91,MPI_COMM_SELF=92) PARAMETER (MPI_TAG_UB=80,MPI_HOST=82,MPI_IO=84) PARAMETER (MPI_WTIME_IS_GLOBAL=86) ! INTEGER MPI_ANY_SOURCE PARAMETER (MPI_ANY_SOURCE = (-2)) INTEGER MPI_ANY_TAG PARAMETER (MPI_ANY_TAG = (-1)) ! INTEGER MPI_VERSION, MPI_SUBVERSION PARAMETER (MPI_VERSION = 1, MPI_SUBVERSION = 2) ! ! There are additional MPI-2 constants INTEGER MPI_ADDRESS_KIND, MPI_OFFSET_KIND PARAMETER (MPI_ADDRESS_KIND=4) PARAMETER (MPI_OFFSET_KIND=8) ! ! All other MPI routines are subroutines ! This may cause some Fortran compilers to complain about defined and ! not used. Such compilers should be improved. ! ! Some Fortran compilers will not link programs that contain ! external statements to routines that are not provided, even if ! the routine is never called. Remove PMPI_WTIME and PMPI_WTICK ! if you have trouble with them. ! DOUBLE PRECISION MPI_WTIME, MPI_WTICK,PMPI_WTIME,PMPI_WTICK EXTERNAL MPI_WTIME, MPI_WTICK,PMPI_WTIME,PMPI_WTICK ! ! The attribute copy/delete subroutines are symbols that can be passed ! to MPI routines ! EXTERNAL MPI_NULL_COPY_FN, MPI_NULL_DELETE_FN, MPI_DUP_FN COMMON /MPIPRIV/ MPI_BOTTOM,MPI_STATUS_IGNORE,MPI_STATUSES_IGNORE ! ! Without this save, some Fortran implementations may make the common ! dynamic! ! ! For a Fortran90 module, we might replace /MPIPRIV/ with a simple ! SAVE MPI_BOTTOM ! SAVE /MPIPRIV/ ! ! $Id: mpiof.h.in,v 1.3 1999/08/06 18:33:09 thakur Exp $ ! ! Copyright (C) 1997 University of Chicago. ! See COPYRIGHT notice in top-level directory. ! ! ! user include file for Fortran MPI-IO programs ! INTEGER MPI_MODE_RDONLY, MPI_MODE_RDWR, MPI_MODE_WRONLY INTEGER MPI_MODE_DELETE_ON_CLOSE, MPI_MODE_UNIQUE_OPEN INTEGER MPI_MODE_CREATE, MPI_MODE_EXCL INTEGER MPI_MODE_APPEND, MPI_MODE_SEQUENTIAL PARAMETER (MPI_MODE_RDONLY=2, MPI_MODE_RDWR=8, MPI_MODE_WRONLY=4) PARAMETER (MPI_MODE_CREATE=1, MPI_MODE_DELETE_ON_CLOSE=16) PARAMETER (MPI_MODE_UNIQUE_OPEN=32, MPI_MODE_EXCL=64) PARAMETER (MPI_MODE_APPEND=128, MPI_MODE_SEQUENTIAL=256) ! INTEGER MPI_FILE_NULL PARAMETER (MPI_FILE_NULL=0) ! INTEGER MPI_MAX_DATAREP_STRING PARAMETER (MPI_MAX_DATAREP_STRING=128) ! INTEGER MPI_SEEK_SET, MPI_SEEK_CUR, MPI_SEEK_END PARAMETER (MPI_SEEK_SET=600, MPI_SEEK_CUR=602, MPI_SEEK_END=604) ! INTEGER MPIO_REQUEST_NULL PARAMETER (MPIO_REQUEST_NULL=0) ! integer MPI_DISPLACEMENT_CURRENT PARAMETER (MPI_DISPLACEMENT_CURRENT=-54278278) ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! Part of the base include file for Fortran use of PETSc. ! Note: This file should contain only define statements and ! not the declaration of variables. ! No spaces for #defines as some compilers (PGI) also adds ! those additional spaces during preprocessing - bad for fixed format ! ! ! ! ! ! ! ! The real*8,complex*16 notatiton is used so that the ! PETSc double/complex variables are not affected by ! compiler options like -r4,-r8, sometimes invoked ! by the user. NAG compiler does not like integer*4,real*8 ! ! Macro for templating between real and complex ! ! ! Allows the matrix Fortran Kernels to work with single precision ! matrix data structures ! ! ! PetscLogDouble variables are used to contain double precision numbers ! that are not used in the numerical computations, but rather in logging, ! timing etc. ! ! ! Macros for error checking ! ! ------------------------------------------------------------------------ ! Non Common block Stuff declared first ! ! Flags ! integer PETSC_TRUE,PETSC_FALSE integer PETSC_YES,PETSC_NO parameter (PETSC_TRUE = 1,PETSC_FALSE = 0) parameter (PETSC_YES=1, PETSC_NO=0) integer(kind=selected_int_kind(5)) PETSC_DECIDE,PETSC_DETERMINE parameter (PETSC_DECIDE=-1,PETSC_DETERMINE=-1) integer(kind=selected_int_kind(5)) PETSC_DEFAULT_INTEGER parameter (PETSC_DEFAULT_INTEGER = -2) real(kind=selected_real_kind(10)) PETSC_DEFAULT_DOUBLE_PRECISION parameter (PETSC_DEFAULT_DOUBLE_PRECISION=-2.0d0) integer PETSC_FP_TRAP_OFF,PETSC_FP_TRAP_ON parameter (PETSC_FP_TRAP_OFF = 0,PETSC_FP_TRAP_ON = 1) ! ! Default PetscViewers. ! integer(kind=selected_int_kind(5)) PETSC_VIEWER_DRAW_WORLD integer(kind=selected_int_kind(5)) PETSC_VIEWER_DRAW_SELF integer(kind=selected_int_kind(5)) PETSC_VIEWER_SOCKET_WORLD integer(kind=selected_int_kind(5)) PETSC_VIEWER_SOCKET_SELF integer(kind=selected_int_kind(5)) PETSC_VIEWER_STDOUT_WORLD integer(kind=selected_int_kind(5)) PETSC_VIEWER_STDOUT_SELF integer(kind=selected_int_kind(5)) PETSC_VIEWER_STDERR_WORLD integer(kind=selected_int_kind(5)) PETSC_VIEWER_STDERR_SELF integer(kind=selected_int_kind(5)) PETSC_VIEWER_BINARY_WORLD integer(kind=selected_int_kind(5)) PETSC_VIEWER_BINARY_SELF integer(kind=selected_int_kind(5)) PETSC_VIEWER_MATLAB_WORLD integer(kind=selected_int_kind(5)) PETSC_VIEWER_MATLAB_SELF ! ! The numbers used below should match those in ! src/fortran/custom/zpetsc.h ! parameter (PETSC_VIEWER_DRAW_WORLD = -4) parameter (PETSC_VIEWER_DRAW_SELF = -5) parameter (PETSC_VIEWER_SOCKET_WORLD = -6) parameter (PETSC_VIEWER_SOCKET_SELF = -7) parameter (PETSC_VIEWER_STDOUT_WORLD = -8) parameter (PETSC_VIEWER_STDOUT_SELF = -9) parameter (PETSC_VIEWER_STDERR_WORLD = -10) parameter (PETSC_VIEWER_STDERR_SELF = -11) parameter (PETSC_VIEWER_BINARY_WORLD = -12) parameter (PETSC_VIEWER_BINARY_SELF = -13) parameter (PETSC_VIEWER_MATLAB_WORLD = -14) parameter (PETSC_VIEWER_MATLAB_SELF = -15) ! ! PETSc DataTypes ! integer PETSC_INT,PETSC_DOUBLE,PETSC_COMPLEX integer PETSC_LONG,PETSC_SHORT,PETSC_FLOAT integer PETSC_CHAR,PETSC_LOGICAL parameter (PETSC_INT=0,PETSC_DOUBLE=1,PETSC_COMPLEX=2) parameter (PETSC_LONG=3,PETSC_SHORT=4,PETSC_FLOAT=5) parameter (PETSC_CHAR=6,PETSC_LOGICAL=7) ! ! ------------------------------------------------------------------------ ! PETSc mathematics include file. Defines certain basic mathematical ! constants and functions for working with single and double precision ! floating point numbers as well as complex and integers. ! ! Representation of complex i ! complex(kind=selected_real_kind(10)) PETSC_i parameter (PETSC_i = (0.0d0,1.0d0)) ! ! Basic constants ! real(kind=selected_real_kind(10)) PETSC_PI real(kind=selected_real_kind(10)) PETSC_DEGREES_TO_RADIANS real(kind=selected_real_kind(10)) PETSC_MAX real(kind=selected_real_kind(10)) PETSC_MIN parameter (PETSC_PI = 3.14159265358979323846264d0) parameter (PETSC_DEGREES_TO_RADIANS = 0.01745329251994d0) parameter (PETSC_MAX = 1.d300,PETSC_MIN = -1.d300) real(kind=selected_real_kind(10)) PETSC_MACHINE_EPSILON real(kind=selected_real_kind(10)) PETSC_SQRT_MACHINE_EPSILON real(kind=selected_real_kind(10)) PETSC_SMALL parameter (PETSC_MACHINE_EPSILON = 1.d-14) parameter (PETSC_SQRT_MACHINE_EPSILON = 1.d-7) parameter (PETSC_SMALL = 1.d-10) ! ! ---------------------------------------------------------------------------- ! BEGIN COMMON-BLOCK VARIABLES ! ! ! PETSc world communicator ! integer PETSC_COMM_WORLD,PETSC_COMM_SELF ! ! Fortran Null ! character(len = 80) :: PETSC_NULL_CHARACTER integer(kind=selected_int_kind(5)) PETSC_NULL_INTEGER real(kind=selected_real_kind(10)) PETSC_NULL_DOUBLE integer(kind=selected_int_kind(5)) PETSC_NULL integer(kind=selected_int_kind(5)) PETSC_NULL_OBJECT ! ! A PETSC_NULL_FUNCTION pointer ! external PETSC_NULL_FUNCTION real(kind=selected_real_kind(10)) PETSC_NULL_SCALAR real(kind=selected_real_kind(10)) PETSC_NULL_REAL ! ! Common Block to store some of the PETSc constants. ! which can be set - only at runtime. ! ! ! A string should be in a different common block ! common /petscfortran1/ PETSC_NULL_CHARACTER common /petscfortran2/ PETSC_NULL_INTEGER common /petscfortran3/ PETSC_NULL common /petscfortran4/ PETSC_NULL_SCALAR common /petscfortran5/ PETSC_NULL_DOUBLE common /petscfortran6/ PETSC_NULL_REAL common /petscfortran7/ PETSC_COMM_WORLD,PETSC_COMM_SELF common /petscfortran8/ PETSC_NULL_OBJECT ! ! Possible arguments to PetscPushErrorHandler() ! external PETSCTRACEBACKERRORHANDLER external PETSCABORTERRORHANDLER external PETSCEMACSCLIENTERRORHANDLER external PETSCATTACHDEBUGGERERRORHANDLER external PETSCIGNOREERRORHANDLER ! END COMMON-BLOCK VARIABLES ! ---------------------------------------------------------------------------- use initialize use coordinates use maps use boundaries use elements use buildU use matrix_ops use output use d1ops use globaldata use dss use par_dss use forces use winds use stokes_solvers implicit none integer n,i,j real(kind=8) dt,gam integer nsteps type(coord) coords type(coord) proc_coords type(solution) GUXY,LUXY real(kind=8),allocatable, dimension(:):: force,wind integer nelements integer nely,nelx integer twod integer nglobaldofs integer irc,myrank,nprocs, ierr, status(MPI_STATUS_SIZE) call mpi_init(ierr) ! Get myrank call mpi_comm_rank(MPI_COMM_WORLD,myrank,ierr) call mpi_comm_size(MPI_COMM_WORLD,nprocs,ierr) call PetscInitialize( PETSC_NULL_CHARACTER, ierr ) call MPI_Comm_rank( PETSC_COMM_WORLD, myrank, ierr ) if (myrank .eq. 0) then print *, ÔHello WorldÕ endif call PetscFinalize(ierr) stop twod=2 call in_2d_dat(n,dt,gam,nsteps,coords) if(myrank.eq.0)then open(unit=6,file="foo0.m") else if(myrank.eq.1)then open(unit=6,file="foo1.m") else if(myrank.eq.2)then open(unit=6,file="foo2.m") else if(myrank.eq.3)then open(unit=6,file="f3.m") else if(myrank.eq.4)then open(unit=6,file="foo4.m") else if(myrank.eq.5)then open(unit=6,file="foo5.m") else if(myrank.eq.6)then open(unit=6,file="foo6.m") else if(myrank.eq.7)then open(unit=6,file="foo7.m") else if(myrank.eq.8)then open(unit=6,file="foo8.m") else if(myrank.eq.9)then open(unit=6,file="foo9.m") else if(myrank.eq.10)then open(unit=6,file="foo10.m") else if(myrank.eq.11)then open(unit=6,file="foo11.m") else if(myrank.eq.12)then open(unit=6,file="foo12.m") else if(myrank.eq.13)then open(unit=6,file="foo13.m") else if(myrank.eq.14)then open(unit=6,file="foo14.m") else if(myrank.eq.15)then open(unit=6,file="foo15.m") else if(myrank.eq.16)then open(unit=6,file="foo16.m") end if if(nprocs.gt.get_glob_nel())then write(6,*)'number of elements must be >= processors\n' stop end if call create_glob_el(n,coords) call create_proc_el(n,coords,nprocs,myrank) call create_globalmap(get_proc_el()) call create_localmap(get_proc_el()) call create_pressmap(get_proc_el()) call create_outmap(get_proc_el()) call create_outpmap(get_proc_el()) call build_matrices(n) !coordinates of grids on this processor proc_coords=get_proc_coords(get_glob_nelx(),get_glob_nely(),nprocs,myrank,coords) !GUXY is the global solution GUXY=initialize2dsoln(n,proc_coords,get_proc_el()) !LUXY is the "local" solution i.e. LUXY has duplicate values for the same !point in the xy plane if it is shared by two neighboring elements. LUXY=global_to_local(GUXY,get_proc_el()) call set_space_dimensions(twod) !Here we set the boundary conditions for dirichlet boundary nodes. call build2d_locbnodes(get_proc_el()) call build_boundary(get_soln(LUXY,'u'),get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),get_proc_el(),'u') call build_boundary(get_soln(LUXY,'v'),get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),get_proc_el(),'v') call build_boundary(get_solngp(LUXY,'x'),get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),get_proc_el(),'x') !gpx call build_boundary(get_solngp(LUXY,'y'),get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),get_proc_el(),'y') !gpy !sloppy way of initializing the stream function bc pressure is not used call build_boundary(get_solngp(LUXY,'y'),get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),get_proc_el(),'s') !gpy !Creating a forcing term allocate(force(2*size(get_soln(LUXY,'u')))) force(1:size(force)/2)=forcing(get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),'u') force(size(force)/2+1:size(force))=forcing(get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),'v') !Creating wind term allocate(wind(2*size(get_soln(LUXY,'u')))) wind(1:size(wind)/2)=convect(get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),'u',0.0) wind(size(wind)/2+1:size(wind))=convect(get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),'v',0.0) !Output the initial data for velocities and pressure call print_init_soln(n,get_soln(GUXY,'u'),get_nodes(GUXY,'x'),get_nodes(GUXY,'y'),get_proc_el(),'u') call print_init_soln(n,get_soln(GUXY,'v'),get_nodes(GUXY,'x'),get_nodes(GUXY,'y'),get_proc_el(),'v') call print_init_soln(n,get_solnp(GUXY),get_pnodes(GUXY,'x'),get_pnodes(GUXY,'y'),get_proc_el(),'p') !creating auxillary parameters !The weighting matrix allows us to average the shared values obtained from DSS from !neighboring elements call create_weight_matrix(size(get_soln(LUXY,'u'))) !The mask matrix allows us to zero out contributions on the dirichlet boundary call create_mask_matrix(size(get_soln(LUXY,'u'))) !initializing par_dss parameters call init_pardss(get_glob_el(get_size_globEl()),get_proc_el()) !steady Stokes Solve !call steady_stokes_solve((/get_soln(LUXY,'u'),get_soln(LUXY,'v')/),get_solnp(LUXY),force) !call unsteady_stokes_solve((/get_soln(LUXY,'u'),get_soln(LUXY,'v')/),get_solnp(LUXY),force) !stop !call steady_conv_diff_solve((/get_soln(LUXY,'u'),get_soln(LUXY,'v')/),get_solnp(LUXY),force,wind) !stop !Stokes Solve !call unsteady_stokes_solve((/get_soln(LUXY,'u'),get_soln(LUXY,'v')/),get_solnp(LUXY),force,wind) !call unsteady_stokes_solve((/get_soln(LUXY,'u'),get_soln(LUXY,'v')/),get_solnp(LUXY),force) !call unsteady_ns_solve((/get_soln(LUXY,'u'),get_soln(LUXY,'v')/),get_solnp(LUXY),force) nglobaldofs=get_nguxy(get_proc_el()) call steady_ns_solve((/get_soln(LUXY,'u'),get_soln(LUXY,'v')/),get_solnp(LUXY),force,wind,nglobaldofs) stop call unsteady_conv_diff_solve((/get_soln(LUXY,'u'),get_soln(LUXY,'v')/),get_solnp(LUXY), get_nodes(LUXY,'x'),get_nodes(LUXY,'y'),force,wind) call destroy_glob_el() call destroy_matrices() call soln_destroy(GUXY) call soln_destroy(LUXY) call destroy_boundary() write(6,*)'%exiting program\n' call mpi_finalize(irc) end program unsteady_ex