[Swift-commit] r5472 - in trunk: bin tests

davidk at ci.uchicago.edu davidk at ci.uchicago.edu
Sat Dec 24 11:12:38 CST 2011


Author: davidk
Date: 2011-12-24 11:12:38 -0600 (Sat, 24 Dec 2011)
New Revision: 5472

Added:
   trunk/tests/providers
Modified:
   trunk/bin/gensites
   trunk/bin/start-coaster-service
   trunk/bin/stop-coaster-service
Log:
Links providers->sites
Updated gensites and coaster-service scripts from 0.93


Modified: trunk/bin/gensites
===================================================================
--- trunk/bin/gensites	2011-12-24 17:03:09 UTC (rev 5471)
+++ trunk/bin/gensites	2011-12-24 17:12:38 UTC (rev 5472)
@@ -187,6 +187,12 @@
          "#site $TEMPLATE internalhostname="*|'#site internalhostname='*)
             INTERNALHOSTNAME=`get_value $line`
             ;;
+         "#site $TEMPLATE jobs_per_node="*|'#site jobs_per_node='*)
+            JOBS_PER_NODE=`get_value $line`
+            ;;
+         "#site $TEMPLATE job_throttle="*|'#site job_throttle='*)
+            JOB_THROTTLE=`get_value $line`
+            ;;
          '#app'*)
             if [ `echo $line | wc -w` == 2 ]; then
                for HOST in $HOSTS
@@ -215,12 +221,12 @@
 fi
 
 # Verify that the variables by the template are defined
-for TOKEN in NODES HOST WORK PROJECT QUEUE N_GRAN N_MAX SLOTS INTERNALHOSTNAME MAXTIME EXECUTION_URL
+for TOKEN in NODES HOST WORK PROJECT QUEUE N_GRAN N_MAX SLOTS INTERNALHOSTNAME MAXTIME EXECUTION_URL JOBS_PER_NODE JOB_THROTTLE
 do
    # Test for HOST/GLOBUS_HOSTNAME - the only values which don't match
    if [ "$TOKEN" == "HOST" ]; then
       if [ -z "$GLOBUS_HOSTNAME" ]; then
-         crash "Not specified: GLOBUS_HOSTNAME"
+         GLOBUS_HOSTNAME=$( hostname -f )
       fi
    elif grep _${TOKEN}_ $TEMPLATE_PATH > /dev/null; then
       if [ -z "${!TOKEN}" ]; then
@@ -245,6 +251,8 @@
   echo "s at _EXECUTION_URL_@${EXECUTION_URL}@"
   echo "s at _SERVICE_COASTERS_@${SERVICE_COASTERS:-NO_URL_GIVEN}@"
   echo "s at _SERVICE_PORT_@${SERVICE_PORT:-NO_PORT_GIVEN}@"
+  echo "s at _JOBS_PER_NODE_@${JOBS_PER_NODE}@"
+  echo "s at _JOB_THROTTLE_@${JOB_THROTTLE}@"
 } > $SEDFILE
 
 

Modified: trunk/bin/start-coaster-service
===================================================================
--- trunk/bin/start-coaster-service	2011-12-24 17:03:09 UTC (rev 5471)
+++ trunk/bin/start-coaster-service	2011-12-24 17:12:38 UTC (rev 5472)
@@ -11,45 +11,77 @@
 # Start futuregrid workers
 start-workers-futuregrid()
 {
+   # Setup environment
    PORT=$1
    EXECUTION_URL=http://localhost:$PORT
+   export EC2_ACCESS_KEY=$FUTUREGRID_IAAS_ACCESS_KEY
+   export EC2_SECRET_KEY=$FUTUREGRID_IAAS_SECRET_KEY
 
-   if [ ! -d "$SWIFTVMBOOT_DIR" ] || [ ! -x "$SWIFTVMBOOT_DIR/bin/bootit.sh" ]; then
+   # Check that SWIFTVMBOOT_DIR looks ok
+   if [ ! -d "$SWIFTVMBOOT_DIR" ] || [ ! -x "$SWIFTVMBOOT_DIR/futuregrid/bin/bootit.sh" ]; then
       crash "SWIFTVMBOOT_DIR incorrectly defined in coaster-service.conf"
    fi
 
+   # Install ve
+   if [ ! -d "$SWIFTVMBOOT_DIR/futuregrid/ve" ]; then
+      echo Setting up environment
+      python $SWIFTVMBOOT_DIR/futuregrid/bin/virtualenv.py $SWIFTVMBOOT_DIR/futuregrid/ve
+      if [ $? -ne 0 ]; then
+         echo "Failed to created the needed python virtual environment"
+         exit 1
+      fi
+   fi
+
+   # Install cloudinitd
+   source $SWIFTVMBOOT_DIR/futuregrid/ve/bin/activate
+   easy_install cloudinitd
+   if [ $? -ne 0 ]; then
+      echo "Failed to install cloudinitd"
+      exit 1
+   fi
+
+   # Register key
+   echo "Registering the key names in all the clouds"
+   python $SWIFTVMBOOT_DIR/futuregrid/bin/register_key.py $SWIFTVMBOOT_DIR/futuregrid/hosts.txt
+   if [ $? -ne 0 ]; then
+      echo "Failed to register the key names"
+      exit 1
+   fi
+
+   # Start virtual machines
    echo Starting virtual machines.. please wait
-   $SWIFTVMBOOT_DIR/bin/bootit.sh | tee -a bootit.log
-   SWIFTVMBOOT_OUTPUT=$SWIFTVMBOOT_DIR/output.json
-
+   $SWIFTVMBOOT_DIR/futuregrid/bin/bootit.sh | tee -a bootit.log
+   SWIFTVMBOOT_OUTPUT=$SWIFTVMBOOT_DIR/futuregrid/output.json
    if [ ! -f "$SWIFTVMBOOT_OUTPUT" ]; then
       crash "Error: Swift VM output file $SWIFTVMBOOT_OUTPUT does not exist!"
    fi
 
    SWIFTVM_INSTANCE=`grep "Starting up run" bootit.log |awk '{print $4}'`
-   echo $SWIFTVM_INSTANCE >> $HOME/.swift/.swiftvm_instance
-
+   echo $SWIFTVM_INSTANCE >> $HOME/.swift/.swiftvm_instances
    WORKER_HOSTS=`grep hostname $SWIFTVMBOOT_OUTPUT |awk '{print $2}'|sed 's/\"//g;s/,//g;s/null//g'`
 
    # Start worker script
    for MACHINE in $WORKER_HOSTS
    do
-      # Enable ssh tunneling if needed
+      echo $MACHINE >> $HOME/.swift/machines
+      scp $SWIFT_BIN/$WORKER $WORKER_USERNAME@$MACHINE:$WORKER_LOCATION > /dev/null 2>&1
       if [ "$SSH_TUNNELING" == "yes" ]; then
          ssh -R *:$PORT:localhost:$PORT $WORKER_USERNAME@$MACHINE sleep 999 &
          echo $! >> $PID_FILE
+         echo "Starting worker on $MACHINE"
+         ssh $WORKER_USERNAME@$MACHINE "$WORKER_LOCATION/$WORKER http://localhost:$PORT $MACHINE $LOG_DIR" &
+         echo $! >> $PID_FILE
+      else
+         echo "Starting worker on $MACHINE"
+         ssh $WORKER_USERNAME@$MACHINE "$WORKER_LOCATION/$WORKER $EXECUTION_URL $MACHINE $LOG_DIR" &
+         echo $! >> $PID_FILE
       fi
-
-      # Copy and start worker script
-      scp $SWIFT_BIN/$WORKER $WORKER_USERNAME@$MACHINE:$WORKER_LOCATION > /dev/null 2>&1
-      echo "Starting worker on $MACHINE"
-      ssh $WORKER_USERNAME@$MACHINE "$WORKER_LOCATION/$WORKER $EXECUTION_URL $MACHINE $LOG_DIR" &
-      echo $! >> $PID_FILE
+      
    done
 }
 
-# EC2 workers
-start-workers-ec2()
+# Globus Provision workers
+start-workers-gp()
 {
    PORT=$1
    EXECUTION_URL=http://localhost:$PORT
@@ -80,10 +112,13 @@
    WORKER_HOSTS=`$SWIFTVMBOOT_DIR/bin/gp-instance-describe $SWIFTVM_INSTANCE|sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"|awk '{print $3}'`
 
    # Start worker script
+   if [ -f "$HOME/.swift/machines" ]; then
+      rm $HOME/.swift/machines
+   fi
    for MACHINE in $WORKER_HOSTS
    do
       # Create a list of machines for other applications, if needed
-      echo $MACHINE >> $HOME/.swift/.machines
+      echo $MACHINE >> $HOME/.swift/machines
 
       # Enable ssh tunneling if needed
       if [ "$SSH_TUNNELING" == "yes" ]; then
@@ -99,6 +134,75 @@
    done
 }
 
+# EC2 workers
+start-workers-ec2()
+{
+   PORT=$1
+   EXECUTION_URL=http://localhost:$PORT
+   if [ ! -d "$SWIFTVMBOOT_DIR" ] || [ ! -x "$SWIFTVMBOOT_DIR/ec2/bin/ec2-run-instances" ]; then
+      crash "SWIFTVMBOOT_DIR incorrectly defined in coaster-service.conf"
+   fi
+
+   export EC2_HOME="$SWIFTVMBOOT_DIR/ec2" 
+   export EC2_PRIVATE_KEY="$EC2_KEYFILE"
+   export EC2_CERT="$EC2_CERTFILE"
+
+   echo Creating instance..
+   $SWIFTVMBOOT_DIR/ec2/bin/ec2-run-instances "$EC2_AMI" -t "$EC2_INSTANCE_TYPE" -n "$EC2_NODES" -K "$EC2_KEYFILE" -C "$EC2_CERT"
+   SWIFTVM_INSTANCES=$( $SWIFTVMBOOT_DIR/ec2/bin/ec2-describe-instances | grep INSTANCE | grep -v terminated |awk '{print $2}' )
+   echo $SWIFTVM_INSTANCES >> $HOME/.swift/.swiftvm_instances
+   echo Waiting for nodes to boot..
+
+   # Wait until all instances are listed as running
+   while /bin/true
+   do
+      SWIFTVM_INSTANCES_AS_STRING=$( echo $SWIFTVM_INSTANCES | tr "\\n" " ")
+      STATUS_LIST=$( $SWIFTVMBOOT_DIR/ec2/bin/ec2-describe-instances $SWIFTVM_INSTANCES_AS_STRING | grep INSTANCE | grep -v terminated | awk '{print $6}' |sort -u )
+      if [ "$STATUS_LIST" == "running" ]; then
+         break
+      fi
+      sleep 5
+   done
+
+   # There is some delay between when the machines are 'running', and when system utilities like sshd are started
+   sleep 30
+
+   WORKER_HOSTS=$( $SWIFTVMBOOT_DIR/ec2/bin/ec2-describe-instances $SWIFTVM_INSTANCES_AS_STRING | grep INSTANCE | grep -v terminated | awk '{print $4}' )
+
+   if [ -f "$HOME/.swift/machines" ]; then
+      rm $HOME/.swift/machines
+   fi
+
+   # Start worker script
+   SSH_OPTS="-i $EC2_KEYFILE -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+   for MACHINE in $WORKER_HOSTS
+   do
+
+      # Create a list of machines for other applications, if needed
+      echo $MACHINE >> $HOME/.swift/machines
+
+      # Copy and start worker script
+      scp $SSH_OPTS $SWIFT_BIN/$WORKER $WORKER_USERNAME@$MACHINE:$WORKER_LOCATION > /dev/null 2>&1
+      echo $! >> $PID_FILE
+
+      # Enable ssh tunneling if needed
+      if [ "$SSH_TUNNELING" == "yes" ]; then
+         ssh $SSH_OPTS -R *:$PORT:localhost:$PORT $WORKER_USERNAME@$MACHINE sleep 999 > /dev/null 2>&1 &
+         sleep 10
+         ssh $SSH_OPTS $WORKER_USERNAME@$MACHINE "$WORKER_LOCATION/$WORKER http://localhost:$PORT $MACHINE $LOG_DIR" 2>&1 &
+         echo $! >> $PID_FILE
+      else
+         echo "Starting worker on $MACHINE"
+         ssh $SSH_OPTS $WORKER_USERNAME@$MACHINE "$WORKER_LOCATION/$WORKER $EXECUTION_URL $MACHINE $LOG_DIR" > /dev/null 2>&1 &
+         echo $! >> $PID_FILE
+      fi
+
+      # Copy SSH key for easier access
+      cat $HOME/.ssh/*.pub | ssh $SSH_OPTS $WORKER_USERNAME@$MACHINE 'umask 077; cat >> $HOME/.ssh/authorized_keys' > /dev/null 2>&1
+   done
+}
+
+
 # Start SSH workers
 start-workers-ssh()
 {
@@ -125,7 +229,7 @@
          ssh $WORKER_USERNAME@$WORKER_RELAY_HOST ssh $MACHINE mkdir -p $WORKER_LOCATION > /dev/null 2>&1
          ssh $WORKER_USERNAME@$WORKER_RELAY_HOST "scp /tmp/$WORKER $WORKER_USERNAME@$MACHINE:$WORKER_LOCATION" > /dev/null 2>&1
          echo Starting worker on $MACHINE
-         ssh $WORKER_USERNAME@$WORKER_RELAY_HOST ssh $WORKER_USERNAME@$MACHINE "WORKER_LOGGING_LEVEL=$WORKER_LOGGING_LEVEL $WORKER_LOCATION/$WORKER $EXECUTION_URL $MACHINE $WORKER_LOG_DIR" &
+         ssh $WORKER_USERNAME@$WORKER_RELAY_HOST ssh $WORKER_USERNAME@$MACHINE "WORKER_LOGGING_LEVEL=$WORKER_LOGGING_LEVEL $WORKER_LOCATION/$WORKER http://localhost:$PORT $MACHINE $WORKER_LOG_DIR" &
          echo $! >> $PID_FILE
       # Connect directly
       else
@@ -308,6 +412,9 @@
    futuregrid)
       start-workers-futuregrid $LOCAL_PORT
       ;;
+   gp)
+      start-workers-gp $LOCAL_PORT
+      ;;
    ec2)
       start-workers-ec2 $LOCAL_PORT
       ;;
@@ -333,7 +440,7 @@
 wrapperlog.always.transfer=false
 execution.retries=0
 provider.staging.pin.swiftfiles=false
-sitedir.keep=true
+sitedir.keep=false
 EOF
 fi
 

Modified: trunk/bin/stop-coaster-service
===================================================================
--- trunk/bin/stop-coaster-service	2011-12-24 17:03:09 UTC (rev 5471)
+++ trunk/bin/stop-coaster-service	2011-12-24 17:12:38 UTC (rev 5472)
@@ -10,7 +10,7 @@
 
 # Location of required files
 PID_FILE="$HOME/.swift/.coaster-service-pids"
-SWIFTVM_INSTANCES="$HOME/.swift/.swiftvm_instance"
+SWIFTVM_INSTANCES="$HOME/.swift/.swiftvm_instances"
 
 # Import settings
 CONFIG_FILE=$HOME/.swift/.config
@@ -23,7 +23,7 @@
       #echo "$pid"
       for i in `ps -ef| awk '$3 == '$pid' { print $2 }'`
       do
-         #echo "$i"
+         echo "Killing process $i"
          kill $i > /dev/null 2>&1
       done
       kill $pid > /dev/null 2>&1
@@ -34,12 +34,12 @@
 if [ "$WORKER_MODE" == "futuregrid" ] && [ -f "$SWIFTVM_INSTANCES" ]; then
    for INSTANCE in `cat $SWIFTVM_INSTANCES`
    do
-      $SWIFTVMBOOT_DIR/ve/bin/cloudinitd terminate $INSTANCE
+      $SWIFTVMBOOT_DIR/futuregrid/ve/bin/cloudinitd terminate $INSTANCE
    done
    rm $SWIFTVM_INSTANCES > /dev/null 2>&1
 fi
 
-if [ "$WORKER_MODE" == "ec2" ] && [ -f "$SWIFTVM_INSTANCES" ]; then
+if [ "$WORKER_MODE" == "gp" ] && [ -f "$SWIFTVM_INSTANCES" ]; then
    for INSTANCE in `cat $SWIFTVM_INSTANCES`
    do
       echo Stopping ec2 instance $INSTANCE...
@@ -49,9 +49,20 @@
   rm $SWIFTVM_INSTANCES > /dev/null 2>&1
 fi
 
-if [ -f "$HOME/.swift/.machines" ]; then
-   rm "$HOME/.swift/.machines"
+if [ "$WORKER_MODE" == "ec2" ] && [ -f "$SWIFTVM_INSTANCES" ]; then
+   export EC2_HOME="$SWIFTVMBOOT_DIR/ec2"
+   export EC2_PRIVATE_KEY="$EC2_KEYFILE"
+   export EC2_CERT="$EC2_CERTFILE"
+   INSTANCES_AS_STRING=$( cat $SWIFTVM_INSTANCES | tr "\\n" " ")
+   $SWIFTVMBOOT_DIR/ec2/bin/ec2-terminate-instances $INSTANCES_AS_STRING
+   if [ -f "$HOME/.swift/.swiftvm_instances" ]; then
+      rm $HOME/.swift/.swiftvm_instances
+   fi
 fi
 
+if [ -f "$HOME/.swift/machines" ]; then
+   rm "$HOME/.swift/machines"
+fi
+
 popd > /dev/null 2>&1
 echo Done

Added: trunk/tests/providers
===================================================================
--- trunk/tests/providers	                        (rev 0)
+++ trunk/tests/providers	2011-12-24 17:12:38 UTC (rev 5472)
@@ -0,0 +1 @@
+link sites
\ No newline at end of file


Property changes on: trunk/tests/providers
___________________________________________________________________
Added: svn:special
   + *




More information about the Swift-commit mailing list