[Swift-commit] r4779 - in trunk: bin docs docs/newuser-guide docs/utils etc/sites
davidk at ci.uchicago.edu
davidk at ci.uchicago.edu
Tue Jul 5 16:46:12 CDT 2011
Author: davidk
Date: 2011-07-05 16:46:12 -0500 (Tue, 05 Jul 2011)
New Revision: 4779
Added:
trunk/docs/newuser-guide/beagle-quickstart.txt
trunk/docs/newuser-guide/coaster-quickstart.txt
trunk/docs/utils/
trunk/docs/utils/gensites.txt
Removed:
trunk/docs/newuser-guide/beagle-quickstart.txt
trunk/etc/sites/OLD/
Modified:
trunk/bin/gensites
trunk/bin/start-coaster-service
trunk/etc/sites/intrepid
trunk/etc/sites/pads
trunk/etc/sites/persistent-coasters
trunk/etc/sites/queenbee
trunk/etc/sites/sge-local
trunk/etc/sites/ssh-pbs-coasters
trunk/etc/sites/surveyor
Log:
Gensites:
- now creates tc.data from application definitions in swift.properties file
- reads XML comments as template descriptions
- lists all token requirements when listing a template
- added filesystem_url token
Created an asciidoc utility directory
Converted gensites HTML documentation to asciidoc and updated with new features
Added descriptions to various gensites templates
Gave start-coaster-service the ability to copy user specified files at start
- This is useful when starting VMs, machines may not have needed apps
- May run into problems with dynamically linked executables
Added some documentation for the persistent coaster scripts
Modified: trunk/bin/gensites
===================================================================
--- trunk/bin/gensites 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/bin/gensites 2011-07-05 21:46:12 UTC (rev 4779)
@@ -115,7 +115,7 @@
# Locate template
if [ ! -f "$TEMPLATE_PATH" ]; then
if [ -f "$TEMPLATE" ]; then
- $TEMPLATE_PATH = $TEMPLATE
+ TEMPLATE_PATH=$TEMPLATE
elif [ -f "$SWIFT_HOME/$TEMPLATE" ]; then
TEMPLATE_PATH=$SWIFT_HOME/$TEMPLATE
elif [ -f "$HOME/.swift/sites/$TEMPLATE" ]; then
@@ -143,6 +143,12 @@
fi
fi
+# Setup for creating a TC file
+if [ -f "tc.data" ] && [ -n "`grep -e app $PROPERTIES_FILE`" ]; then
+ mv tc.data tc.data.old
+ HOSTS=`grep -i "pool handle" $TEMPLATE|grep -v "^[[:space:]]*#"|cut -d'"' -f2`
+fi
+
# Parse values into variables for later sed processing
#WORK=`pwd`"/work"
while read line
@@ -178,6 +184,29 @@
"#site $TEMPLATE internalhostname="*|'#site internalhostname='*)
INTERNALHOSTNAME=`get_value $line`
;;
+ '#app'*)
+ if [ `echo $line | wc -w` == 2 ]; then
+ for HOST in $HOSTS
+ do
+ NAME=`echo $line |cut -d'=' -f1|awk '{print $2}'`
+ COMMAND=`echo $line |cut -d'=' -f2`
+ COMMAND=`eval echo $COMMAND`
+ echo $HOST $NAME `readlink -f $COMMAND` null null null >> tc.data
+ done
+ fi
+
+ if [ `echo $line |wc -w` == 3 ]; then
+ for HOST in $HOSTS
+ do
+ if [ $HOST == `echo $line|awk '{print $2}'` ]; then
+ NAME=`echo $line|awk '{print $3}'|cut -d'=' -f1`
+ COMMAND=`echo $line|awk '{print $3}'|cut -d'=' -f2`
+ COMMAND=`eval echo $COMMAND`
+ eval echo $HOST $NAME `readlink -f $COMMAND` null null null >> tc.data
+ fi
+ done
+ fi
+ ;;
esac
done < $PROPERTIES_FILE
Modified: trunk/bin/start-coaster-service
===================================================================
--- trunk/bin/start-coaster-service 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/bin/start-coaster-service 2011-07-05 21:46:12 UTC (rev 4779)
@@ -8,6 +8,15 @@
exit 1
}
+# Copy files
+copy_requested_files()
+{
+ for file in $FILES_TO_COPY
+ do
+ scp $file $WORKER_USERNAME@$FILES_TO_COPY_REMOTE_DESTINATION
+ done
+}
+
# Start futuregrid workers
start-workers-futuregrid()
{
@@ -44,7 +53,10 @@
scp $SWIFT_BIN/$WORKER $WORKER_USERNAME@$MACHINE:$WORKER_WORK > /dev/null 2>&1
echo "Starting worker on $MACHINE"
ssh $WORKER_USERNAME@$MACHINE $WORKER_WORK/$WORKER $EXECUTION_URL $MACHINE $LOG_DIR &
- echo $! >> $PID_FILE
+ echo $! >> $PID_FILE
+
+ # Copy requsted file
+ copy_requested_files
done
}
@@ -58,11 +70,21 @@
fi
for MACHINE in $WORKER_HOSTS
do
+ # Enayble ssh tunneling if needed
+ if [ $SSH_TUNNELING == "yes" ]; then
+ ssh -R *:$PORT:localhost:$PORT $WORKER_USERNAME@$MACHINE sleep 999 &
+ echo $! >> $PID_FILE
+ fi
+
+ # Copy and start worker script
scp $SWIFT_BIN/$WORKER $MACHINE:$WORKER_WORK > /dev/null 2>&1
echo Starting worker on $MACHINE
ssh $MACHINE $WORKER_WORK/$WORKER $EXECUTION_URL $MACHINE $LOG_DIR &
echo $! >> $PID_FILE
done
+
+ # Copy requested files
+ copy_requested_files
return 0
}
@@ -117,6 +139,13 @@
crash "Cannot find coaster-service.conf!"
fi
+# Determine Swift config file (cf - used for gensites)
+if [ -f "$RUN_DIR/cf" ]; then
+ SWIFT_CONFIG_FILE="$RUN_DIR/cf"
+else
+ SWIFT_CONFIG_FILE=$CONFIG_FILE
+fi
+
# Determine information needed about this machine
if [ -z "$IPADDR" ]; then
if [ -x "/sbin/ifconfig" ]; then
@@ -228,7 +257,7 @@
# Generate sites.xml
export EXECUTION_URL="http://$IPADDR:$SERVICE_PORT"
echo Generating sites.xml
-gensites persistent-coasters -p $CONFIG_FILE > $RUN_DIR/sites.xml
+gensites persistent-coasters -p $SWIFT_CONFIG_FILE > $RUN_DIR/sites.xml
# Generate config file
if [ $SHARED_FILESYSTEM == "no" ]; then
Deleted: trunk/docs/newuser-guide/beagle-quickstart.txt
===================================================================
--- trunk/docs/newuser-guide/beagle-quickstart.txt 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/docs/newuser-guide/beagle-quickstart.txt 2011-07-05 21:46:12 UTC (rev 4779)
@@ -1,49 +0,0 @@
-PADS Quickstart Guide
-=====================
-
-Requesting Access
------------------
-If you do not already have a Computation Institute account, you can request
-one at https://www.ci.uchicago.edu/accounts/. This page will give you a list
-of resources you can request access to.
-You already have an existing CI account, but do not have access to Beagle,
-send an email to support at ci.uchicago.edu to request access.
-
-Connecting to a login node
---------------------------
-Once you have account, you should be able to access a Beagle login
-node with the following command:
-
------
-ssh yourusername at login.beagle.ci.uchicago.edu
------
-
-Follow the steps outlined below to get started with Swift on Beagle:
-
-*step 1.* Load the Swift module on Beagle as follows: +module load swift+
-
-*step 2.* Create and change to a directory where your Swift related work will
-stay. (say, +mkdir swift-lab+, followed by, +cd swift-lab+)
-
-*step 3.* To get started with a simple example running +/bin/cat+ to read an
-input file +data.txt+ and write to an output file +f.nnn.out+, copy the folder
-at +/home/ketan/catsn+ to the above directory. (+cp -r /home/ketan/catsn
-.+ followed by +cd catsn+).
-
-*step 4.* In the sites file: +beagle-coaster.xml+, make the following two
-changes: *1)* change the path of +workdirectory+ to your preferred location
-(say to +/lustre/beagle/$USER/swift-lab/swift.workdir+) and *2)* Change the
-project name to your project (+CI-CCR000013+) . The workdirectory will contain
-execution data related to each run, e.g. wrapper scripts, system information,
-inputs and outputs.
-
-*step 5.* Run the example using following commandline (also found in run.sh):
-+swift -config cf -tc.file tc -sites.file beagle-coaster.xml catsn.swift -n=1+
-. You can further change the value of +-n+ to any arbitrary number to run that
-many number of concurrent +cat+
-
-*step 6.* Check the output in the generated +outdir+ directory (+ls outdir+)
-
-Note: Running from sandbox node or requesting 1 hour walltime for upto 3 nodes
-will get fast prioritized execution. Good for small tests.
-
Added: trunk/docs/newuser-guide/beagle-quickstart.txt
===================================================================
--- trunk/docs/newuser-guide/beagle-quickstart.txt (rev 0)
+++ trunk/docs/newuser-guide/beagle-quickstart.txt 2011-07-05 21:46:12 UTC (rev 4779)
@@ -0,0 +1,49 @@
+PADS Quickstart Guide
+=====================
+
+Requesting Access
+-----------------
+If you do not already have a Computation Institute account, you can request
+one at https://www.ci.uchicago.edu/accounts/. This page will give you a list
+of resources you can request access to.
+You already have an existing CI account, but do not have access to Beagle,
+send an email to support at ci.uchicago.edu to request access.
+
+Connecting to a login node
+--------------------------
+Once you have account, you should be able to access a Beagle login
+node with the following command:
+
+-----
+ssh yourusername at login.beagle.ci.uchicago.edu
+-----
+
+Follow the steps outlined below to get started with Swift on Beagle:
+
+*step 1.* Load the Swift module on Beagle as follows: +module load swift+
+
+*step 2.* Create and change to a directory where your Swift related work will
+stay. (say, +mkdir swift-lab+, followed by, +cd swift-lab+)
+
+*step 3.* To get started with a simple example running +/bin/cat+ to read an
+input file +data.txt+ and write to an output file +f.nnn.out+, copy the folder
+at +/home/ketan/catsn+ to the above directory. (+cp -r /home/ketan/catsn
+.+ followed by +cd catsn+).
+
+*step 4.* In the sites file: +beagle-coaster.xml+, make the following two
+changes: *1)* change the path of +workdirectory+ to your preferred location
+(say to +/lustre/beagle/$USER/swift-lab/swift.workdir+) and *2)* Change the
+project name to your project (+CI-CCR000013+) . The workdirectory will contain
+execution data related to each run, e.g. wrapper scripts, system information,
+inputs and outputs.
+
+*step 5.* Run the example using following commandline (also found in run.sh):
++swift -config cf -tc.file tc -sites.file beagle-coaster.xml catsn.swift -n=1+
+. You can further change the value of +-n+ to any arbitrary number to run that
+many number of concurrent +cat+
+
+*step 6.* Check the output in the generated +outdir+ directory (+ls outdir+)
+
+Note: Running from sandbox node or requesting 1 hour walltime for upto 3 nodes
+will get fast prioritized execution. Good for small tests.
+
Added: trunk/docs/newuser-guide/coaster-quickstart.txt
===================================================================
--- trunk/docs/newuser-guide/coaster-quickstart.txt (rev 0)
+++ trunk/docs/newuser-guide/coaster-quickstart.txt 2011-07-05 21:46:12 UTC (rev 4779)
@@ -0,0 +1,196 @@
+Coaster Service Quickstart
+==========================
+
+Persistent Coasters
+-------------------
+There are three main parts to the persistent coasters scripts:
+coaster-service.conf, start-coaster-service, and stop-coaster-service.
+The coaster-service.conf file is a configuration file that determines how
+coasters should run. Start-coaster-service and stop-coaster-service start
+and stop the processes that are needed to run Swift in this setup.
+
+coaster-service.conf
+~~~~~~~~~~~~~~~~~~~~
+The coaster-service.conf file is used to control the way
+the coaster service runs. It is located in the etc/ directory
+of your Swift distribution. Below is an explanation of what
+the settings in coaster-service.conf do.
+
+IP Address
+^^^^^^^^^^
+-----
+export IPADDR=123.456.7.8
+-----
+This setting is the IP address of the machine on which you are running start-coaster-service.
+When empty, start-coaster-service will attempt to automatically determine the IP address.
+In the cases of machines with multiple network devices, it may be better to specify the address
+here.
+
+Local Port and Service Port
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+-----
+export LOCAL_PORT=1984
+export SERVICE_PORT=2001
+-----
+The local and service ports are the port numbers that are used by workers and by swift
+to communicate with each other. When these ports are undefined, coaster-service will
+automatically find and use an available port.
+
+Log Directory
+^^^^^^^^^^^^^
+-----
+export LOG_DIR=logs
+-----
+This is the directory to store log files, relative to your current working directory.
+Currently there are two log files that are stored here: swift.log - the output of the swift
+process, and coasters.log, the output of coaster-service.
+
+Project
+^^^^^^^
+-----
+export PROJECT=CI-CCR000013
+-----
+Name of project when submitting to scheduler.
+
+Queue
+^^^^^
+-----
+export QUEUE=fast
+-----
+
+When working directly with a scheduler, specify a queue to where work will be submitted.
+used to specify an internal hostname in sites.xml for systems with multiple
+interfaces.
+
+Shared Filesystem
+^^^^^^^^^^^^^^^^^
+-----
+export SHARED_FILESYSTEM=yes
+-----
+Valid values for SHARED_FILESYSTEM are either yes or no. This defines
+if all the nodes involved share a common filesystem. By default
+Swift assumes this is true.
+
+When SHARED_FILESYSTEM is set to no, a Swift configuration file called
+cf is generated. It contains information that turns on provider staging,
+a mechanism for running on machines without a shared filesystems.
+
+SSH Tunneling
+^^^^^^^^^^^^^
+-----
+export SSH_TUNNNELING=yes
+-----
+SSH tunneling provides a way to bypass restrictive firewalls. Use this is there is
+a firewall blocking ports between the head node and worker nodes.
+
+Swift path
+^^^^^^^^^^
+-----
+export SWIFT=/home/davidk/swift
+-----
+The start-coaster-service script will automatically try to
+determine the path to Swift. If you see any error messages
+about not being able to find swift, you can manually specify
+the full path to swift here.
+
+Swift VM Boot Directory
+^^^^^^^^^^^^^^^^^^^^^^^
+-----
+export SWIFTVMBOOT_DIR=$HOME/swift-vm-boot
+-----
+When configured to work with Futuregrid, this setting will determine the location of the swift-vm-boot
+utility which is responsible for requesting virtual machines.
+
+Work Directory
+^^^^^^^^^^^^^^
+-----
+export WORK=$HOME/work
+-----
+This setting defines a directory in which Swift can write to while it executes work.
+
+Worker Hosts
+^^^^^^^^^^^^^
+-----
+export WORKER_HOSTS="host1 host2 host3"
+-----
+Worker hosts is a list of hosts to start workers on when in SSH mode.
+
+Worker Mode
+^^^^^^^^^^^
+-----
+export WORKER_MODE=ssh
+-----
+Worker mode describes how to start the needed workers.
+Valid options include ssh, local, cobalt, and futuregrid.
+
+SSH mode requires having keys on all the worker nodes
+that will allow you to log in without prompting. SSH
+is used to handle starting and stopping workers.
+
+Local mode starts a single worker on the local machine.
+
+Cobalt mode starts a worker process via the cobalt job
+management suite
+
+Futuregrid works similarly to SSH mode, but will also start
+and stop instances of virtual machines.
+
+Worker Username
+^^^^^^^^^^^^^^^
+-----
+export WORKER_USERNAME=david
+-----
+In modes that use SSH, modify this value to log in as a different user on worker nodes.
+By default this value is set to $USER and assumes login names are the same on each
+system.
+
+Worker Work Directory
+^^^^^^^^^^^^^^^^^^^^^
+-----
+export WORKER_WORK=/home/${USER}/work
+-----
+This setting should point to a directory on the worker nodes that
+is writable to you. This is a reasonable default, but you may
+need to change it in cases where your worker node has a different
+directory structure.
+
+start-coaster-service
+~~~~~~~~~~~~~~~~~~~~~
+Once coaster-service.conf is configured, the start-coaster-service script
+will take care of everything else. It will start the coaster service with
+the correct arguments, start SSH tunneling, and start the worker.pl script
+on all worker nodes. It will create a sites.xml and tc.data in your
+current working directory. If shared filesystem is set to no, it will also
+generate a Swift configuration file called cf. This contains the settings
+needed for provider staging. The start-coaster-service script does not take
+any arguments. It is located in Swift's bin directory.
+
+-----
+$ start-coaster-service
+-----
+
+Running Swift
+~~~~~~~~~~~~~
+Now that start-coaster-service has started all the required programs and
+generated Swift configuration files, you must tell Swift to use them via
+command line arguments.
+
+.Running Swift (with SHARED_FILESYSTEM=yes)
+-----
+$ swift -sites.file sites.xml -tc.file tc.data <myscript.swift>
+-----
+
+.Running Swift (with SHARED_FILESYSTEM=no)
+-----
+$ swift -sites.file sites.xml -tc.file tc.data -config cf <myscript.swift>
+-----
+
+stop-coaster-service
+~~~~~~~~~~~~~~~~~~~~
+The stop-coaster-service script will stop all of the services started by
+start-coaster-service. It ends the coaster service, worker scripts and
+SSH tunneling. The stop-coaster-service takes no command line arguments.
+
+-----
+$ stop-coaster-service
+-----
Added: trunk/docs/utils/gensites.txt
===================================================================
--- trunk/docs/utils/gensites.txt (rev 0)
+++ trunk/docs/utils/gensites.txt 2011-07-05 21:46:12 UTC (rev 4779)
@@ -0,0 +1,235 @@
+Gensites
+==========
+:toc:
+:icons:
+:website: http://www.ci.uchicago.edu/swift
+:numbered:
+
+Overview
+--------
+To simplify this configuration process, versions of Swift starting with
+0.92 include a utility called gensites. The gensites command is used
+to generate a sites.xml file for running a swift workflow on a given
+site. It accomplishes this by using a series of templates. The templates
+used by gensites are the same templates used for internal testing, so
+they are likely up to date and known to work on a given site.
+
+
+Viewing All Available Site Templates
+------------------------------------
+To view a list of all available templates, run the following command:
+
+-----
+$ gensites -T
+-----
+
+You should see output similar to this:
+-----
+intrepid
+local
+local-pbs-coasters
+pads
+queenbee
+sge-local
+ssh
+ssh-pbs-coasters
+surveyor
+-----
+
+You will notice that the templates can be specific to a particular set
+of machines like Intrepid and Queenbee, or they may be more general and
+aim to work across a variety of machines, as in the case of
+local-pbs-coasters. Gensites will look in three directories for available
+templates: your current directory, $SWIFT_HOME/etc/sites and $HOME/.swift/sites.
+
+Listing the Template
+--------------------
+To view the contents of a template, type:
+
+-----
+$ gensites -l templatename
+-----
+
+Running this command will print some information about the template and give
+you an idea of what settings you will need to specify. You should see
+something like this:
+
+-----
+Description: Template for Intrepid. More information about this system can be found at http://www.alcf.anl.gov/support/gettingstarted/index.php
+Required Tokens: host work project queue
+-----
+
+The required tokens are required to properly use the templates. These are placeholder
+values you will need to specify in the
+following steps.
+
+Providing Site Specific Values
+------------------------------
+
+The gensites script needs to know how to replace the placeholder values
+in the template. This is done by configuring the swift.properties file.
+Gensites will first look for a swift.properties file in the current
+directory. If it does not exist, it will next look in $HOME/.swift.
+
+To add site specific values to swift.properties, add a line in the
+follow format:
+
+-----
+#site templatename setting=value
+-----
+
+Continuing the previous example, here is what you could add to
+swift.properties to replace the values of project, queue and work for
+the surveyor template:
+
+-----
+#site surveyor project=MyProject
+#site surveyor queue=MyQueue
+#site surveyor work=/path/to/workdir
+-----
+
+Now, running the command 'gensites surveyor' will produce the following
+valid configuration file:
+
+-----
+<config>
+ <pool handle="surveyor">
+ <filesystem provider="local" />
+ <execution provider="coaster" jobmanager="local:cobalt"/>
+ <profile namespace="globus" key="project">MyProject</profile>
+ <profile namespace="globus" key="queue">MyQueue</profile>
+ <profile namespace="globus" key="kernelprofile">zeptoos</profile>
+ <profile namespace="globus" key="alcfbgpnat">true</profile>
+ <profile namespace="karajan" key="jobthrottle">21</profile>
+ <profile namespace="karajan" key="initialScore">10000</profile>
+ <profile namespace="globus" key="workersPerNode">1</profile>
+ <profile namespace="globus" key="workerLoggingLevel">DEBUG</profile>
+ <profile namespace="globus" key="slots">1</profile>
+ <profile namespace="globus" key="maxTime">900</profile>
+ <profile namespace="globus" key="nodeGranularity">64</profile>
+ <profile namespace="globus" key="maxNodes">64</profile>
+ <workdirectory>/path/to/workdir</workdirectory>
+ </pool>
+</config>
+-----
+
+Generating Application Configurations with Gensites
+---------------------------------------------------
+Gensites can also be used to create a valid application catalog, commonly
+called tc.data. Here are some examples of how to specify applications within
+your swift.properties file:
+
+-----
+#app intrepid echo=/usr/bin/echo
+-----
+
+This first example shows a site specific application. The #app definition
+tells gensites this is related to an application rather than a #site
+definition. In the second part, echo=/usr/bin/echo, the left hand side
+is the name of the application that will be called from within swift. The
+right hand site is the path name which points to the binary.
+
+-----
+#app intrepid echo=$HOME/bin/echo
+-----
+
+Environment variables will be interpreted and converted to full path names
+for Swift.
+
+-----
+#app intrepid echo=bin/echo
+-----
+
+Gensites can take relative paths (relative to your current directory) and
+translate them to full path names for Swift.
+
+WARNING: Running gensites with #app definitions will replace any file
+called tc.data in your current directory. If a file called tc.data exists,
+it will be renamed to tc.data.old. If you run gensites twice, the original
+contents of your tc.data will be lost. Please either rename your tc file
+or copy to a different location.
+
+Running Swift With the New Configuration
+----------------------------------------
+
+Now that the gensites is configured and producing a valid configuration
+file, Swift needs to know to use it. The first step is to create a
+unique config file based on the preferences you specified.
+
+-----
+$ gensites surveyor > myconfig.xml
+-----
+
+This will send the output of gensites to myconfig.xml. This example will
+use a swift.properties location in the default directories (your current
+directory, ~/.swift/swift.properties). To specify a different location
+to the swift.properties, use:
+
+-----
+$ gensites surveyor -p myswift.properties > myconfig.xml
+-----
+
+Next, provide the configuration filename to swift:
+-----
+$ swift -sites.file myconfig.xml myscript.swift
+-----
+
+Alternatively, if you have specified applications, be sure to load that into
+Swift
+-----
+$ swift -sites.file myconfig.xml -tc.file tc.data mycript.swift
+-----
+
+Providing Default Values for All Templates
+------------------------------------------
+It is also possible to specify a default value for a setting, regardless
+of template you use. If you want to set your queue to default to "fast"
+across all templates, you can do this by omitting the template name.
+Consider the following swift.properties:
+
+-----
+#site queue=fast
+#site surveyor project=MyProject
+#site surveyor work=/path/to/workdir
+-----
+By omitting the template name, the default value for queue on surveyor
+(and any other template you use) will be set to "fast". One thing to
+keep in mind when setting default values is that order matters. Be sure
+to set your default values first before setting template specific values.
+
+Just like the #site definitions, when a site name is not specified, an app
+will be created for every site that is defined in your template.
+-----
+#app echo=/bin/echo
+-----
+
+More Help
+---------
+The gensites script provides additional options not discussed here, such
+as using templates and swift.properties in non-standard directories. For
+more information, run gensites -h. Here is a full list of all options
+available.
+
+-----
+$ gensites -help
+
+ usage: gensites template [-p properties.file] [-L template_directory] [-h] [-T] [-l]
+
+ template Name of template to use
+ -p properties.file Specify a swift.properties to use
+ -L template_directory Specify a non-standard template directory
+ -T List all templates available
+ -h Help / usage information
+ -l List the contents of a specific template
+
+ Examples:
+
+ Create a site configuration file for sites.xml using default properties.file in current directory
+ $ gensites pads > sites.xml
+
+ Use a specific properties file for a site
+ $ gensites -p sites.properties pads > sites.xml
+
+ Specify a non-standard directory where templates are located
+ $ gensites -L template.dir pads > sites.xml
+-----
Modified: trunk/etc/sites/intrepid
===================================================================
--- trunk/etc/sites/intrepid 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/etc/sites/intrepid 2011-07-05 21:46:12 UTC (rev 4779)
@@ -1,3 +1,5 @@
+<!-- Template for Intrepid. More information about this system can be found -->
+<!-- at http://www.alcf.anl.gov/support/gettingstarted/index.php -->
<config>
<pool handle="localhost" sysinfo="INTEL32::LINUX">
@@ -4,7 +6,6 @@
<gridftp url="local://localhost" />
<execution provider="local" url="none" />
<workdirectory>_WORK_</workdirectory>
- <!-- <profile namespace="karajan" key="maxSubmitRate">1</profile> -->
<profile namespace="karajan" key="jobThrottle">0.04</profile>
<profile namespace="swift" key="stagingMethod">file</profile>
</pool>
@@ -12,7 +13,6 @@
<pool handle="coasters_alcfbgp">
<filesystem provider="local" />
<execution provider="coaster" jobmanager="local:cobalt"/>
- <!-- <profile namespace="swift" key="stagingMethod">local</profile> -->
<profile namespace="globus" key="internalHostname">_HOST_</profile>
<profile namespace="globus" key="project">_PROJECT_</profile>
<profile namespace="globus" key="queue">_QUEUE_</profile>
@@ -23,7 +23,7 @@
<profile namespace="globus" key="jobsPerNode">1</profile>
<profile namespace="globus" key="workerLoggingLevel">DEBUG</profile>
<profile namespace="globus" key="slots">1</profile>
- <profile namespace="globus" key="maxTime">900</profile> <!-- seconds -->
+ <profile namespace="globus" key="maxTime">900</profile>
<profile namespace="globus" key="nodeGranularity">64</profile>
<profile namespace="globus" key="maxNodes">64</profile>
<workdirectory>_WORK_</workdirectory>
Modified: trunk/etc/sites/pads
===================================================================
--- trunk/etc/sites/pads 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/etc/sites/pads 2011-07-05 21:46:12 UTC (rev 4779)
@@ -1,3 +1,4 @@
+<!-- Swift configuration for PADS - http://pads.ci.uchicago.edu/ -->
<config>
<pool handle="pads-pbs">
<execution jobmanager="local:pbs" provider="local" url="none" />
Modified: trunk/etc/sites/persistent-coasters
===================================================================
--- trunk/etc/sites/persistent-coasters 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/etc/sites/persistent-coasters 2011-07-05 21:46:12 UTC (rev 4779)
@@ -1,3 +1,4 @@
+<!-- This template is used by the start-coaster-service shell script for configuring a persistent coaster setup -->
<config>
<pool handle="persistent-coasters">
<execution provider="coaster-persistent"
Modified: trunk/etc/sites/queenbee
===================================================================
--- trunk/etc/sites/queenbee 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/etc/sites/queenbee 2011-07-05 21:46:12 UTC (rev 4779)
@@ -1,3 +1,4 @@
+<!-- Queenbee on Teragrid - http://www.loni.org/teragrid/users_guide.php -->
<config>
<pool handle="queenbee-pbs">
<execution jobmanager="local:pbs" provider="local" url="none" />
Modified: trunk/etc/sites/sge-local
===================================================================
--- trunk/etc/sites/sge-local 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/etc/sites/sge-local 2011-07-05 21:46:12 UTC (rev 4779)
@@ -1,3 +1,4 @@
+<!-- Template for a generic SGE provider -->
<config>
<pool handle="sge-local">
<execution provider="sge" url="none" />
Modified: trunk/etc/sites/ssh-pbs-coasters
===================================================================
--- trunk/etc/sites/ssh-pbs-coasters 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/etc/sites/ssh-pbs-coasters 2011-07-05 21:46:12 UTC (rev 4779)
@@ -1,7 +1,7 @@
<config>
<pool handle="ssh-pbs-coasters">
- <execution jobmanager="ssh:pbs" provider="coaster" url="login1.pads.ci.uchicago.edu" />
- <filesystem provider="ssh" url="login1.pads.ci.uchicago.edu" />
+ <execution jobmanager="ssh:pbs" provider="coaster" url="_EXECUTION_URL_" />
+ <filesystem provider="ssh" url="_FILESYSTEM_URL_" />
<profile key="maxtime" namespace="globus">3000</profile>
<profile key="jobsPerNode" namespace="globus">8</profile>
<profile key="slots" namespace="globus">1</profile>
Modified: trunk/etc/sites/surveyor
===================================================================
--- trunk/etc/sites/surveyor 2011-07-05 21:45:46 UTC (rev 4778)
+++ trunk/etc/sites/surveyor 2011-07-05 21:46:12 UTC (rev 4779)
@@ -1,3 +1,4 @@
+<!-- Swift template for Surveyor - http://www.alcf.anl.gov/support/gettingstarted/index.php -->
<config>
<pool handle="surveyor">
<filesystem provider="local" />
More information about the Swift-commit
mailing list