[Swift-commit] r6170 - in branches/faster: libexec resources src/org/globus/swift/catalog src/org/globus/swift/catalog/site src/org/globus/swift/catalog/transformation src/org/globus/swift/data src/org/griphyn/vdl/karajan src/org/griphyn/vdl/karajan/functions src/org/griphyn/vdl/karajan/lib src/org/griphyn/vdl/karajan/lib/cache src/org/griphyn/vdl/karajan/lib/swiftscript src/org/griphyn/vdl/karajan/monitor src/org/griphyn/vdl/karajan/monitor/items src/org/griphyn/vdl/karajan/monitor/monitors/ansi src/org/griphyn/vdl/karajan/monitor/processors src/org/griphyn/vdl/mapping src/org/griphyn/vdl/mapping/file src/org/griphyn/vdl/util
hategan at ci.uchicago.edu
hategan at ci.uchicago.edu
Tue Jan 29 01:31:13 CST 2013
Author: hategan
Date: 2013-01-29 01:31:09 -0600 (Tue, 29 Jan 2013)
New Revision: 6170
Added:
branches/faster/libexec/scheduler.k
branches/faster/libexec/swift-int.k
branches/faster/libexec/swift-lib.k
branches/faster/libexec/swift-operators.k
branches/faster/libexec/swift-xs.k
branches/faster/libexec/swift.k
branches/faster/src/org/globus/swift/catalog/site/
branches/faster/src/org/globus/swift/catalog/site/Parser.java
branches/faster/src/org/griphyn/vdl/karajan/PairSet.java
branches/faster/src/org/griphyn/vdl/karajan/SwiftExecutor.java
branches/faster/src/org/griphyn/vdl/karajan/SwiftRootScope.java
branches/faster/src/org/griphyn/vdl/karajan/lib/CurrentThread.java
branches/faster/src/org/griphyn/vdl/karajan/lib/SiteCatalog.java
branches/faster/src/org/griphyn/vdl/karajan/lib/SwiftFunction.java
branches/faster/src/org/griphyn/vdl/mapping/OOBYield.java
Removed:
branches/faster/libexec/operators.xml
branches/faster/libexec/scheduler.xml
branches/faster/libexec/vdl-int.k
branches/faster/libexec/vdl-lib.xml
branches/faster/libexec/vdl-sc.k
branches/faster/libexec/vdl-xs.k
branches/faster/libexec/vdl.k
branches/faster/src/org/griphyn/vdl/karajan/PairIterator.java
branches/faster/src/org/griphyn/vdl/karajan/VDL2ExecutionContext.java
branches/faster/src/org/griphyn/vdl/karajan/VDL2FutureException.java
branches/faster/src/org/griphyn/vdl/karajan/lib/InfiniteCountingWhile.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Kickstart.java
branches/faster/src/org/griphyn/vdl/karajan/lib/SequentialWithID.java
branches/faster/src/org/griphyn/vdl/karajan/lib/SwiftArg.java
branches/faster/src/org/griphyn/vdl/karajan/lib/ThreadPrefix.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Typecheck.java
branches/faster/src/org/griphyn/vdl/karajan/lib/VDLFunction.java
Modified:
branches/faster/libexec/vdl-int-staging.k
branches/faster/resources/Karajan.stg
branches/faster/src/org/globus/swift/catalog/transformation/File.java
branches/faster/src/org/globus/swift/data/Action.java
branches/faster/src/org/globus/swift/data/Query.java
branches/faster/src/org/griphyn/vdl/karajan/ArrayIndexFutureList.java
branches/faster/src/org/griphyn/vdl/karajan/AssertFailedException.java
branches/faster/src/org/griphyn/vdl/karajan/DSHandleFutureWrapper.java
branches/faster/src/org/griphyn/vdl/karajan/FuturePairIterator.java
branches/faster/src/org/griphyn/vdl/karajan/FutureTracker.java
branches/faster/src/org/griphyn/vdl/karajan/FutureWrapper.java
branches/faster/src/org/griphyn/vdl/karajan/HangChecker.java
branches/faster/src/org/griphyn/vdl/karajan/Loader.java
branches/faster/src/org/griphyn/vdl/karajan/Mergeable.java
branches/faster/src/org/griphyn/vdl/karajan/Monitor.java
branches/faster/src/org/griphyn/vdl/karajan/Pair.java
branches/faster/src/org/griphyn/vdl/karajan/VDSAdaptiveScheduler.java
branches/faster/src/org/griphyn/vdl/karajan/VDSTaskTransformer.java
branches/faster/src/org/griphyn/vdl/karajan/WaitingThreadsMonitor.java
branches/faster/src/org/griphyn/vdl/karajan/functions/ConfigProperty.java
branches/faster/src/org/griphyn/vdl/karajan/functions/ProcessBulkErrors.java
branches/faster/src/org/griphyn/vdl/karajan/lib/AbsFileName.java
branches/faster/src/org/griphyn/vdl/karajan/lib/AppStageins.java
branches/faster/src/org/griphyn/vdl/karajan/lib/AppStageouts.java
branches/faster/src/org/griphyn/vdl/karajan/lib/AppendArray.java
branches/faster/src/org/griphyn/vdl/karajan/lib/CacheAddAndLockFile.java
branches/faster/src/org/griphyn/vdl/karajan/lib/CacheAddFile.java
branches/faster/src/org/griphyn/vdl/karajan/lib/CacheFileRemoved.java
branches/faster/src/org/griphyn/vdl/karajan/lib/CacheFunction.java
branches/faster/src/org/griphyn/vdl/karajan/lib/CacheUnlockFiles.java
branches/faster/src/org/griphyn/vdl/karajan/lib/CleanDataset.java
branches/faster/src/org/griphyn/vdl/karajan/lib/CloseDataset.java
branches/faster/src/org/griphyn/vdl/karajan/lib/CreateArray.java
branches/faster/src/org/griphyn/vdl/karajan/lib/DoRestartLog.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Executable.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Execute.java
branches/faster/src/org/griphyn/vdl/karajan/lib/ExpandArguments.java
branches/faster/src/org/griphyn/vdl/karajan/lib/FileCopier.java
branches/faster/src/org/griphyn/vdl/karajan/lib/FileName.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Flatten.java
branches/faster/src/org/griphyn/vdl/karajan/lib/FringePaths.java
branches/faster/src/org/griphyn/vdl/karajan/lib/GetArrayIterator.java
branches/faster/src/org/griphyn/vdl/karajan/lib/GetDatasetProvenanceID.java
branches/faster/src/org/griphyn/vdl/karajan/lib/GetField.java
branches/faster/src/org/griphyn/vdl/karajan/lib/GetFieldSubscript.java
branches/faster/src/org/griphyn/vdl/karajan/lib/GetFieldValue.java
branches/faster/src/org/griphyn/vdl/karajan/lib/GetURLPrefix.java
branches/faster/src/org/griphyn/vdl/karajan/lib/InFileDirs.java
branches/faster/src/org/griphyn/vdl/karajan/lib/IsDone.java
branches/faster/src/org/griphyn/vdl/karajan/lib/IsFileBound.java
branches/faster/src/org/griphyn/vdl/karajan/lib/IsLogged.java
branches/faster/src/org/griphyn/vdl/karajan/lib/IsRestartable.java
branches/faster/src/org/griphyn/vdl/karajan/lib/JobConstraints.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Log.java
branches/faster/src/org/griphyn/vdl/karajan/lib/LogVar.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Mark.java
branches/faster/src/org/griphyn/vdl/karajan/lib/New.java
branches/faster/src/org/griphyn/vdl/karajan/lib/NiceName.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Operators.java
branches/faster/src/org/griphyn/vdl/karajan/lib/OutFileDirs.java
branches/faster/src/org/griphyn/vdl/karajan/lib/OutFiles.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Parameterlog.java
branches/faster/src/org/griphyn/vdl/karajan/lib/PartialCloseDataset.java
branches/faster/src/org/griphyn/vdl/karajan/lib/PathUtils.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Range.java
branches/faster/src/org/griphyn/vdl/karajan/lib/RuntimeStats.java
branches/faster/src/org/griphyn/vdl/karajan/lib/SetFieldValue.java
branches/faster/src/org/griphyn/vdl/karajan/lib/SetFutureFault.java
branches/faster/src/org/griphyn/vdl/karajan/lib/SetWaitCount.java
branches/faster/src/org/griphyn/vdl/karajan/lib/SiteProfile.java
branches/faster/src/org/griphyn/vdl/karajan/lib/SliceArray.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Stagein.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Stageout.java
branches/faster/src/org/griphyn/vdl/karajan/lib/TCProfile.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Throttled.java
branches/faster/src/org/griphyn/vdl/karajan/lib/ThrottledParallelFor.java
branches/faster/src/org/griphyn/vdl/karajan/lib/Tracer.java
branches/faster/src/org/griphyn/vdl/karajan/lib/UnitEnd.java
branches/faster/src/org/griphyn/vdl/karajan/lib/UnitStart.java
branches/faster/src/org/griphyn/vdl/karajan/lib/UnwrapClosedList.java
branches/faster/src/org/griphyn/vdl/karajan/lib/WaitFieldValue.java
branches/faster/src/org/griphyn/vdl/karajan/lib/cache/CacheReturn.java
branches/faster/src/org/griphyn/vdl/karajan/lib/cache/File.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Assert.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ExtractInt.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FileName.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FileNames.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FnArg.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Fprintf.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Java.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Misc.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ReadData.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ReadStructured.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Sprintf.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Tracef.java
branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/WriteData.java
branches/faster/src/org/griphyn/vdl/karajan/monitor/SystemState.java
branches/faster/src/org/griphyn/vdl/karajan/monitor/items/SummaryItem.java
branches/faster/src/org/griphyn/vdl/karajan/monitor/monitors/ansi/WorkerTerminalInputHandler.java
branches/faster/src/org/griphyn/vdl/karajan/monitor/processors/ExecutionContextProcessor.java
branches/faster/src/org/griphyn/vdl/karajan/monitor/processors/TaskProcessor.java
branches/faster/src/org/griphyn/vdl/mapping/AbstractDataNode.java
branches/faster/src/org/griphyn/vdl/mapping/AbstractMapper.java
branches/faster/src/org/griphyn/vdl/mapping/ArrayDataNode.java
branches/faster/src/org/griphyn/vdl/mapping/DSHandle.java
branches/faster/src/org/griphyn/vdl/mapping/DataDependentException.java
branches/faster/src/org/griphyn/vdl/mapping/DependentException.java
branches/faster/src/org/griphyn/vdl/mapping/HandleOpenException.java
branches/faster/src/org/griphyn/vdl/mapping/Mapper.java
branches/faster/src/org/griphyn/vdl/mapping/MappingParam.java
branches/faster/src/org/griphyn/vdl/mapping/RootArrayDataNode.java
branches/faster/src/org/griphyn/vdl/mapping/RootDataNode.java
branches/faster/src/org/griphyn/vdl/mapping/file/AbstractFileMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/CSVMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/ConcurrentMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/ExternalMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/FileSystemArrayMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/FixedArrayFileMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/RegularExpressionMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/SimpleFileMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/SingleFileMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/StructuredRegularExpressionMapper.java
branches/faster/src/org/griphyn/vdl/mapping/file/TestMapper.java
branches/faster/src/org/griphyn/vdl/util/VDL2Config.java
Log:
initial update of faster branch
Deleted: branches/faster/libexec/operators.xml
===================================================================
--- branches/faster/libexec/operators.xml 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/libexec/operators.xml 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,19 +0,0 @@
-<karajan>
- <namespace prefix="vdlop">
- <export name="sum"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="subtraction"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="product"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="fquotient"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="iquotient"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="remainder"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="le"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="ge"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="gt"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="lt"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="eq"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="ne"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="and"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="or"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- <export name="not"><elementDef classname="org.griphyn.vdl.karajan.lib.Operators"/></export>
- </namespace>
-</karajan>
Added: branches/faster/libexec/scheduler.k
===================================================================
--- branches/faster/libexec/scheduler.k (rev 0)
+++ branches/faster/libexec/scheduler.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,32 @@
+import(sys)
+import(task)
+import('swift-lib')
+
+sites := swift:configProperty("sites.file")
+TCFile := swift:configProperty("tc.file")
+
+log(LOG:INFO, "Using sites file: {sites}")
+if (!file:exists(sites)) {
+ throw("Could not find sites file: {sites}")
+}
+
+log(LOG:INFO, "Using tc.data: {TCFile}")
+
+scheduler("vds-adaptive", shareID = "swift:scheduler:{sites}"
+ property("transformationCatalogFile", TCFile)
+ property("clusteringEnabled", swift:configProperty("clustering.enabled"))
+ property("clusteringQueueDelay", swift:configProperty("clustering.queue.delay"))
+ property("clusteringMinTime", swift:configProperty("clustering.min.time"))
+
+ property("hostSubmitThrottle", swift:configProperty("throttle.host.submit"))
+ property("submitThrottle", swift:configProperty("throttle.submit"))
+ property("jobsPerCpu", "off")
+ property("maxTransfers", swift:configProperty("throttle.transfers"))
+ property("maxFileOperations", swift:configProperty("throttle.file.operations"))
+ property("jobThrottle", swift:configProperty("throttle.score.job.factor"))
+
+ task:availableHandlers(type = "execution", includeAliases = true)
+ task:availableHandlers(type = "file", includeAliases = true)
+
+ resources = swift:siteCatalog(sites)
+)
Deleted: branches/faster/libexec/scheduler.xml
===================================================================
--- branches/faster/libexec/scheduler.xml 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/libexec/scheduler.xml 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,53 +0,0 @@
-<project>
- <import file="sys.xml"/>
- <import file="task.xml"/>
- <import file="vdl-lib.xml"/>
- <import file="vdl-sc.k"/>
-
- <set names="sites, tcfile, clusteringEnabled, clusteringQueueDelay, clusteringMinTime">
- <vdl:configProperty name="sites.file"/>
- <vdl:configProperty name="tc.file"/>
- <vdl:configProperty name="clustering.enabled"/>
- <vdl:configProperty name="clustering.queue.delay"/>
- <vdl:configProperty name="clustering.min.time"/>
- </set>
-
- <set names="hostSubmitThrottle, submitThrottle, maxTransfers, maxFileOperations, jobThrottle">
- <vdl:configProperty name="throttle.host.submit"/>
- <vdl:configProperty name="throttle.submit"/>
- <vdl:configProperty name="throttle.transfers"/>
- <vdl:configProperty name="throttle.file.operations"/>
- <vdl:configProperty name="throttle.score.job.factor"/>
- </set>
-
- <log level="info"><string>Using sites file: {sites}</string></log>
- <if><not><file:exists name="{sites}"/>
- </not>
- <then>
- <generateError>
- <string>
- Could not find sites file: {sites}
- </string></generateError></then>
- </if>
-
- <log level="info"><string>Using tc.data: {tcfile}</string></log>
-
- <scheduler type="vds-adaptive" shareID="vdl2:scheduler:{sites}">
- <property name="transformationCatalogFile" value="{tcfile}"/>
- <property name="clusteringEnabled" value="{clusteringEnabled}"/>
- <property name="clusteringQueueDelay" value="{clusteringQueueDelay}"/>
- <property name="clusteringMinTime" value="{clusteringMinTime}"/>
-
- <property name="hostSubmitThrottle" value="{hostSubmitThrottle}"/>
- <property name="submitThrottle" value="{submitThrottle}"/>
- <property name="jobsPerCpu" value="off"/>
- <property name="maxTransfers" value="{maxTransfers}"/>
- <property name="maxFileOperations" value="{maxFileOperations}"/>
- <property name="jobThrottle" value="{jobThrottle}"/>
-
- <task:availableHandlers type="execution" includeAliases="true"/>
- <task:availableHandlers type="file" includeAliases="true"/>
-
- <vdl:siteCatalog file="{sites}"/>
- </scheduler>
-</project>
Added: branches/faster/libexec/swift-int.k
===================================================================
--- branches/faster/libexec/swift-int.k (rev 0)
+++ branches/faster/libexec/swift-int.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,609 @@
+import(sys)
+import(task)
+import('swift-lib')
+/*
+ * Things that are not exposed to the translated file
+ */
+
+SWIFT:SCRIPT_NAME := contextAttribute("SWIFT:SCRIPT_NAME")
+SWIFT:RUN_ID := contextAttribute("SWIFT:RUN_ID")
+SWIFT:HOME := contextAttribute("SWIFT:HOME")
+
+namespace(swift) {
+
+ rmdir := function(dir, host) {
+ parallelFor(entry, file:list(dir, host=host)) {
+ epath := "{dir}/{entry}"
+ if (file:isDirectory(epath, host=host)) {
+ rmdir(epath, host)
+ }
+ else {
+ file:remove(epath, host=host)
+ }
+ }
+ dir:remove(dir, host=host)
+ }
+
+ createdirs := function(path, dir, host) {
+ dc := dircat(dir, path)
+ log(LOG:INFO, "START path={path} dir={dir} - Creating directory structure")
+
+ dir:make(dc, host=host)
+ }
+
+ checkErrorFile := function(rhost, wfdir, jobid, jobdir) {
+ if (file:exists("{wfdir}/status/{jobdir}/{jobid}-error", host=rhost)) {
+ log(LOG:INFO, "FAILURE jobid={jobid} - Failure file found")
+ task:transfer(srchost=rhost, srcdir="{wfdir}/status/{jobdir}", srcfile="{jobid}-error")
+ error := parallel(
+ file:remove("{wfdir}/status/{jobdir}/{jobid}-error", host=rhost)
+ sequential(
+ str:strip(file:read("{jobid}-error"))
+ file:remove("{jobid}-error")
+ )
+ )
+ error
+ }
+ else {
+ log(LOG:INFO, "NO_STATUS_FILE jobid={jobid} - Error file missing")
+ throw("No status file was found. Check the shared filesystem on {rhost}")
+ }
+ }
+
+ checkJobStatus := function(rhost, wfdir, jobid, tr, jobdir) {
+ log(LOG:DEBUG, "START jobid={jobid}")
+ try {
+ file:remove("{wfdir}/status/{jobdir}/{jobid}-success", host=rhost)
+ log(LOG:INFO, "SUCCESS jobid={jobid} - Success file found")
+ }
+ else {
+ try {
+ msg := checkErrorFile(rhost, wfdir, jobid, jobdir)
+ }
+ else {
+ log(LOG:INFO, "NO_STATUS_FILE jobid={jobid} - Both status files are missing")
+ throw("No status file was found. Check the shared filesystem on {rhost}")
+ }
+ }
+ else {
+ throw(checkErrorFile(rhost, wfdir, jobid, jobdir))
+ }
+ }
+
+ initSharedDir := function(progress, rhost) {
+ once(list(rhost, "shared")) {
+ setProgress(progress, "Initializing site shared directory")
+ log(LOG:INFO, "START host={rhost} - Initializing shared directory")
+
+ wfdir := "{SWIFT:SCRIPT_NAME}-{SWIFT:RUN_ID}"
+ sharedDir := dircat(wfdir, "shared")
+
+ dir:make(sharedDir, host = rhost)
+ transfer(srcdir="{SWIFT:HOME}/libexec/", srcfile=siteProfile(rhost, "swift:wrapperScript"), destdir=sharedDir, desthost=rhost)
+ transfer(srcdir="{SWIFT:HOME}/libexec/", srcfile="_swiftseq", destdir=sharedDir, desthost=rhost)
+ dir:make(dircat(wfdir, "kickstart"), host=rhost)
+
+ statusMode := configProperty("status.mode",host=rhost)
+ if (statusMode == "files") {
+ dir:make(dircat(wfdir, "status"), host=rhost)
+ }
+
+ wrapperMode := configProperty("wrapper.parameter.mode",host=rhost)
+ if (wrapperMode == "files") {
+ dir:make(dircat(wfdir, "parameters"), host=rhost)
+ }
+
+ dir:make(dircat(wfdir, "info"), host=rhost)
+ wfdir, sharedDir
+ //we send the cleanup data to vdl:main()
+ to(cleanup, list(wfdir, rhost))
+ log(LOG:INFO, "END host={rhost} - Done initializing shared directory")
+ }
+ }
+
+ initDDir := function() {
+ ddir := "{SWIFT:SCRIPT_NAME}-{SWIFT:RUN_ID}"
+
+ once(ddir) {
+ if(!file:exists(ddir)) {
+ task:dir:make(ddir)
+ }
+ }
+ ddir
+ }
+
+ inFiles := function(stageins) {
+ pathnames(stageins)
+ }
+
+ fileDirs := function(stageins, stageouts) {
+ list(
+ unique(
+ inFileDirs(stageins)
+ outFileDirs(stageouts)
+ )
+ )
+ }
+
+ createDirSet := function(jobid, destdir, host, dirs) {
+ /*
+ * Ideally this would be done by creating a tree of the directories
+ * to be created and (eventually) exploiting the concurrency in that.
+ */
+ log(LOG:INFO, "START jobid={jobid} host={host} - Initializing directory structure")
+ for(u, dirs) {
+ cache(list(u, destdir, host)) {
+ createdirs(u, destdir, host)
+ }
+ }
+ log(LOG:INFO, "END jobid={jobid} - Done initializing directory structure")
+ }
+
+ cleanup := function(dir, host) {
+ log(LOG:INFO, "START dir={dir} host={host}")
+ cdmfile := cdm:file()
+ log(LOG:DEBUG, "cdmfile {cdmfile}")
+ if (cdmfile != "" & cdm:get("GATHER_DIR") != "UNSET") {
+ log(LOG:INFO, "submitting cdm_cleanup.sh to {dir}")
+ task:transfer(srcfile="cdm_cleanup.sh",
+ srcdir="{SWIFT:HOME}/libexec",
+ desthost=host, destdir=dir)
+ task:transfer(srcfile="cdm_lib.sh",
+ srcdir="{SWIFT:HOME}/libexec",
+ desthost=host, destdir=dir)
+ log(LOG:INFO, "execute: cdm_cleanup.sh")
+ task:execute(
+ executable="/bin/bash",
+ arguments=list("{dir}/cdm_cleanup.sh",
+ cdm:get("GATHER_DIR"), cdm:get("GATHER_TARGET"), UID())
+ host=host, batch=true, TCProfile(host))
+ }
+ if (swift:configProperty("sitedir.keep") == "false") {
+ task:execute(
+ siteProfile(host, "swift:cleanupCommand"),
+ arguments=list(
+ siteProfile(host, "swift:cleanupCommandOptions"),
+ dir
+ )
+ host=host, batch=true, TCProfile(host))
+ }
+ log(LOG:INFO, "END dir={dir} host={host}")
+ }
+
+ cleanupFiles := function(files, host) {
+ parallelFor(r, files) {
+ log(LOG:INFO, "Purging ", r, " on ", host)
+ file:remove(r, host=host)
+ cacheFileRemoved(r, host)
+ }
+ }
+
+ stageWrapperParams := function(jobid, jobdir, wrapfile, dir, host) {
+ log(LOG:INFO, "START jobid={jobid} - staging in wrapper params")
+ provider := provider(wrapfile)
+ srchost := hostname(wrapfile)
+ srcdir := swift:dirname(wrapfile)
+ destdir := dircat(dir, "/parameters/{jobdir}/")
+ filename := basename(wrapfile)
+
+ cache(list(destdir, host)) {
+ dir:make(destdir, host=host, provider=provider)
+ }
+
+ task:transfer(srcprovider=provider, srchost=srchost, srcfile=filename, srcdir=srcdir, desthost=host, destdir=destdir)
+ log(LOG:INFO, "END jobid={jobid}")
+ }
+
+ doStageinFile := function(provider, srchost, srcfile, srcdir, desthost, destdir, size, policy) {
+ cacheAddAndLockFile(srcfile, destdir, desthost, size) {
+ cleanupFiles(cacheFilesToRemove, desthost)
+
+ log(LOG:DEBUG, "FILE_STAGE_IN_START file={srcfile} ",
+ "srchost={srchost} srcdir={srcdir} srcname={srcfile} ",
+ "desthost={desthost} destdir={destdir} provider={provider} ",
+ "policy={policy}")
+ if (policy == "DEFAULT") {
+ restartOnError(2) {
+ task:transfer(srcprovider=provider, srchost=srchost, srcfile=srcfile,
+ srcdir=srcdir, desthost=desthost, destdir=destdir,
+ transferParams(srchost, desthost, provider))
+ }
+ }
+ else if (policy == "BROADCAST") {
+ log(LOG:DEBUG, "FILE_STAGE_IN_BROADCAST file={srcfile} policy={policy}")
+ cdm:broadcast(srcfile=srcfile, srcdir=srcdir)
+ }
+ else if (policy == "EXTERNAL") {
+ log(LOG:DEBUG, "FILE_STAGE_IN_EXTERNAL file={srcfile} policy={policy}")
+ cdm:external(srcfile=srcfile, srcdir=srcdir,
+ desthost=desthost, destdir=destdir)
+ }
+ else {
+ log(LOG:DEBUG, "FILE_STAGE_IN_SKIP file={srcfile} policy={policy}")
+ }
+ log(LOG:DEBUG, "FILE_STAGE_IN_END file={srcfile} ",
+ "srchost={srchost} srcdir={srcdir} srcname={srcfile} ",
+ "desthost={desthost} destdir={destdir} provider={provider}")
+ }
+ cdm:wait()
+ }
+
+ doStagein := function(jobid, files, dir, host) {
+ log(LOG:INFO, "START jobid={jobid} - Staging in files")
+
+ cdmfile := cdm:file()
+ libexec := "{SWIFT:HOME}/libexec"
+
+ if (cdmfile != "") {
+ doStageinFile(provider="file", srchost="localhost", srcfile=basename(cdmfile),
+ srcdir=swift:dirname(cdmfile), desthost=host, destdir=dir,
+ size=file:size(cdmfile), policy="DEFAULT")
+ doStageinFile(provider="file", srchost="localhost", srcfile="cdm.pl",
+ srcdir=libexec, desthost=host, destdir=dir,
+ size=file:size("{libexec}/cdm.pl}"), policy="DEFAULT")
+ doStageinFile(provider="file", srchost="localhost", srcfile="cdm_lib.sh",
+ srcdir=libexec, desthost=host, destdir=dir,
+ size=file:size("{libexec}/cdm_lib.sh}"), policy="DEFAULT")
+ }
+
+ parallelFor(file, files) {
+ provider := provider(file)
+ srchost := hostname(file)
+ srcdir := swift:dirname(file)
+ destdir := dircat(dir, reldirname(file))
+ filename := basename(file)
+ size := file:size("{srcdir}/{filename}", host=srchost, provider=provider)
+
+ policy := cdm:query(query=file)
+ log(LOG:DEBUG, "CDM: {file} : {policy}")
+
+ doStageinFile(provider=provider, srchost=srchost, srcfile=filename,
+ srcdir=srcdir, desthost=host, destdir=destdir, size=size, policy=policy)
+ }
+ log(LOG:INFO, "END jobid={jobid} - Staging in finished")
+ }
+
+ doStageout := function(jobid, stageouts, dir, host) {
+ log(LOG:INFO, "START jobid={jobid} - Staging out files")
+ log(LOG:DEBUG, "stageouts: {stageouts}")
+ done := list(
+ parallelFor(pv, stageouts) {
+ (path, var) := each(pv)
+ file := absFileName(getField(var, path = path))
+ provider := provider(file)
+ dhost := hostname(file)
+ rdir := dircat(dir, reldirname(file))
+ bname := basename(file)
+ ldir := swift:dirname(file)
+ fullLocal := dircat(ldir, bname)
+ fullRemote := dircat(rdir, bname)
+
+ log(LOG:DEBUG, "FILE_STAGE_OUT_START srcname={bname} srcdir={rdir} srchost={host} ",
+ "destdir={ldir} desthost={dhost} provider={provider}")
+ //make sure we do have the directory on the client side
+ dir:make(ldir, host=dhost, provider=provider)
+ policy := cdm:query(query=file)
+ log(LOG:DEBUG, "CDM: {file} : {policy}")
+ if (policy == "DEFAULT" | policy == "BROADCAST") {
+ restartOnError(2) {
+ task:transfer(srchost=host, srcfile=bname,srcdir=rdir,
+ destdir=ldir, desthost=dhost, destprovider=provider,
+ transferParams(host, dhost, provider))
+ }
+ }
+ else {
+ log(LOG:DEBUG, "FILE_STAGE_OUT_SKIP srcname={bname}")
+ }
+ log(LOG:DEBUG, "FILE_STAGE_OUT_END srcname={bname} srcdir={rdir} srchost={host} ",
+ "destdir={ldir} desthost={dhost} provider={provider}")
+
+ list(bname, rdir, host, file:size(fullLocal))
+ }
+ )
+ parallelFor(f, done) {
+ (bname, rdir, host, size) := each(f)
+ cacheAddFile(bname, rdir, host, size, cleanupFiles(cacheFilesToRemove, host))
+ }
+ log(LOG:INFO, "END jobid={jobid} - Staging out finished")
+ }
+
+ export(graphStuff,
+ function(tr, stagein, stageout, err, args = null) {
+ if (configProperty("pgraph") != "false") {
+ errprops := if(err, ",color=lightsalmon", ",color=lightsteelblue1")
+ tp := currentThread()
+ to (graph) {
+ concat(str:quote(tp), " [label=", str:quote(tr), "{errprops}]")
+ }
+ for (si, stagein) {
+ si := basename(si)
+ to(graph) {
+ concat(str:quote(si), " [shape=parallelogram]")
+ concat(str:quote(si), " -> ", str:quote(tp))
+ }
+ }
+ for (pv, stageout) {
+ (path, var) := each(pv)
+ file := fileName(getField(var, path=path))
+ file := basename(file)
+ label := niceName(var, path = path)
+ to(graph) {
+ concat(str:quote(file), " [shape=parallelogram,label=", str:quote(label), "]")
+ concat(str:quote(tp), " -> ", str:quote(file))
+ }
+ }
+ }
+ }
+ )
+
+ fileSizes := function(files) {
+ math:sum(
+ for(f, files, file:size(f))
+ )
+ }
+
+ transferStandardFiles := function(rhost, tmpdir, jobid, stdout, stderr) {
+ concat(
+ for(f, list(list("stderr.txt", stderr), list("stdout.txt", stdout))) {
+ (name, file) := each(f)
+ destfile := concat("{jobid}-", basename(file))
+ try {
+ task:transfer(srchost=rhost, srcdir=tmpdir, srcfile=file, destfile=destfile)
+ "\n {name}: "
+ strip(file:read(destfile))
+ }
+ else {
+ ""
+ }
+ maybe(file:remove(destfile))
+ }
+ )
+ }
+
+
+ transferWrapperLog := function(rhost, wfdir, jobid, jobdir) {
+ recfile := "{jobid}-info"
+ srcdir := dircat("{wfdir}/info/", jobdir)
+ try {
+ task:transfer(srchost=rhost, srcdir=srcdir, srcfile=recfile, destdir="{SWIFT:SCRIPT_NAME}-{SWIFT:RUN_ID}.d/")
+ }
+ else catch (exception) {
+ maybe(file:remove(recfile))
+ log(LOG:INFO, "Failed to transfer wrapper log for job {jobid} from {srcdir} on {rhost}", exception)
+ }
+ recfile
+ }
+
+ export(cleanups,
+ function(cleanup) {
+ log(LOG:INFO, "START cleanups={cleanup}")
+ parallelFor(i, cleanup) {
+ (dir, host) := each(i)
+ try {
+ cleanup(dir, host)
+ }
+ else catch(exception) {
+ log(LOG:DEBUG, "EXCEPTION - Exception caught while cleaning up", exception)
+ to(warnings, exception("Cleanup on {host} failed", exception))
+ }
+ }
+ log(LOG:INFO, "END cleanups={cleanup}")
+ }
+ )
+
+ export(execute2,
+ function(progress, tr, stagein, stageout, restartout
+ replicationGroup, replicationChannel
+ arguments = [], stdin = null, stdout = null, stderr = null, attributes = null) {
+
+ stagein := list(unique(each(stagein)))
+ stageout := list(unique(each(stageout)))
+
+ allocateHost(rhost, constraints = jobConstraints(tr, stagein = stagein)) {
+
+ ddir := initDDir(),
+ (wfdir, sharedDir) :=
+ try {
+ initSharedDir(progress, rhost)
+ }
+ else catch(exception) {
+ throw(exception("Could not initialize shared directory on {rhost}", exception))
+ }
+
+ uid := UID()
+ jobdir := substring(uid, from=0, to=1)
+ jobid := "{tr}-{uid}"
+
+ log(LOG:DEBUG, "THREAD_ASSOCIATION jobid={jobid} thread=", currentThread(), " host={rhost} replicationGroup={replicationGroup}")
+
+ statusMode := configProperty("status.mode",host=rhost)
+ wrapperMode := configProperty("wrapper.parameter.mode",host=rhost)
+
+ wrapfile := "{ddir}/param-{jobid}"
+
+ stdout := if (stdout == null, "stdout.txt", getFieldValue(stdout))
+ stderr := if (stderr == null, "stderr.txt", getFieldValue(stderr))
+
+ fileDirs := fileDirs(stagein, stageout)
+ os := siteProfile(rhost, "SYSINFO:OS")
+
+ if(wrapperMode == "files") {
+ sys:file:write(wrapfile) {
+ "-scratch ", try(siteProfile(rhost, "scratch"), ""),
+ "\n-e ", executable(tr, rhost),
+ "\n-out ", stdout,
+ "\n-err ", stderr,
+ "\n-i ", if (stdin != null, getFieldValue(stdin)),
+ "\n-d ", flatten(each(fileDirs)),
+ "\n-if ", flatten(inFiles(stagein)),
+ "\n-of ", flatten(outFiles(stageout)),
+ "\n-cdmfile ", cdm:file(),
+ "\n-status ", statusMode,
+ for(a, arguments) {
+ "\n-a ", a
+ }
+ }
+ }
+
+ setProgress(progress, "Stage in")
+ tmpdir := dircat("{wfdir}/jobs/{jobdir}", jobid)
+
+ try {
+ createDirSet(jobid, sharedDir, rhost, fileDirs)
+ doStagein(jobid, stagein, sharedDir, rhost)
+ if(wrapperMode == "files") {
+ stageWrapperParams(jobid, jobdir, wrapfile, wfdir, rhost)
+ }
+
+ log(LOG:DEBUG, "JOB_START jobid={jobid} tr={tr}", maybe(" arguments=", arguments), " tmpdir={tmpdir} host={rhost}")
+
+ setProgress(progress, "Submitting")
+
+ if (wrapperMode == "files") {
+ swift:execute(
+ progress,
+ siteProfile(rhost, "swift:wrapperInterpreter"),
+ list(
+ siteProfile(rhost, "swift:wrapperInterpreterOptions"),
+ dircat("shared", siteProfile(rhost, "swift:wrapperScript"), os=os),
+ jobid, "-p", jobdir
+ )
+ directory = wfdir
+ redirect = false
+ host = rhost
+ TCProfile(rhost, if (attributes != null, attributes = attributes), tr = tr)
+ replicationGroup = replicationGroup
+ replicationChannel = replicationChannel
+ jobid = jobid
+ )
+ }
+ else if (wrapperMode == "args") {
+ swift:execute(
+ progress,
+ siteProfile(rhost, "swift:wrapperInterpreter"),
+ list(
+ siteProfile(rhost, "swift:wrapperInterpreterOptions"),
+ dircat("shared", siteProfile(rhost, "swift:wrapperScript"), os=os),
+ jobid,
+ "-jobdir", jobdir,
+ "-scratch", try(siteProfile(rhost, "scratch"), "")
+ "-e", executable(tr, rhost),
+ "-out", stdout,
+ "-err", stderr,
+ "-i", if (stdin != null, getFieldValue(stdin)),
+ "-d", flatten(each(fileDirs)),
+ "-if", flatten(inFiles(stagein)),
+ "-of", flatten(outFiles(stageout)),
+ "-cdmfile", cdm:file(),
+ "-status", statusMode,
+ "-a", if (arguments != null, each(arguments))
+ )
+ directory = wfdir
+ redirect = false
+ host = rhost
+ TCProfile(rhost, if (attributes != null, attributes = attributes), tr = tr)
+ replicationGroup = replicationGroup
+ replicationChannel = replicationChannel
+ jobid = jobid
+ )
+ }
+
+ setProgress(progress, "Checking status")
+ if (statusMode == "files") {
+ checkJobStatus(rhost, wfdir, jobid, tr, jobdir)
+ }
+
+ if (wrapperMode == "files") {
+ file:remove(wrapfile)
+ }
+
+ log(LOG:DEBUG, "STAGING_OUT jobid={jobid}")
+
+
+ /* need to stage the files to upper scratch area in case they are not transfered to another site
+ before all the files get cleaned out */
+
+
+ setProgress(progress, "Stage out")
+ doStageout(jobid, stageout, sharedDir, rhost)
+ doRestartLog(restartout)
+
+ if (configProperty("wrapperlog.always.transfer") == "true") {
+ discard(transferWrapperLog(rhost, wfdir, jobid, jobdir))
+ }
+
+ cacheUnlockFiles(stagein, sharedDir, rhost) {
+ cleanupFiles(cacheFilesToRemove, rhost)
+ }
+
+ log(LOG:DEBUG, "JOB_END jobid={jobid}")
+ }
+ else catch(prev) {
+ if (matches(prev, "^Abort$")) {
+ log(LOG:DEBUG, "JOB_CANCELED jobid={jobid}")
+ cacheUnlockFiles(stagein, sharedDir, rhost, force=false) {
+ cleanupFiles(cacheFilesToRemove, rhost)
+ }
+ throw(prev)
+ }
+ else {
+ setProgress(progress, "Failed but can retry")
+ exception := try(exception(checkErrorFile(rhost, wfdir, jobid, jobdir)), prev)
+
+ log(LOG:DEBUG, "APPLICATION_EXCEPTION jobid={jobid} - Application exception: ", exception)
+
+ if (matches(exception,".*executable bit.*")) {
+ throw(exception)
+ }
+
+ cacheUnlockFiles(stagein, sharedDir, rhost, force=false) {
+ cleanupFiles(cacheFilesToRemove, rhost)
+ }
+
+ outs := transferStandardFiles(rhost, tmpdir, jobid, stdout, stderr)
+
+ discard(maybe(transferWrapperLog(rhost, wfdir, jobid, jobdir)))
+
+
+ throw(
+ exception(
+ concat(
+ "Exception in {tr}:",
+ maybe("\n Arguments: ", arguments),
+ "\n Host: {rhost}",
+ "\n Directory: {tmpdir}",
+ "{outs}",
+ )
+ exception
+ )
+ )
+ }
+ }
+ }
+ }
+ )
+
+ export(generateProvenanceGraph,
+ function(gdata) {
+ pgraph := configProperty("pgraph")
+ gname := if(pgraph == "true", "{SWIFT:SCRIPT_NAME}-{SWIFT:RUN_ID}.dot", pgraph)
+ file:write(gname) {
+ "digraph SwiftProvenance \{\n",
+ " graph [", configProperty("pgraph.graph.options"), "];\n",
+ " node [", configProperty("pgraph.node.options"), "];\n",
+
+ for(i, gdata) {
+ " ", i, "\n"
+ }
+ "}\n"
+ }
+ log(LOG:INFO, "Provenance graph saved in ", gname)
+ }
+ )
+}
+
+// Local variables:
+// mode: scheme
+// tab-width: 4
+// indent-tabs-mode: t
+// End:
Added: branches/faster/libexec/swift-lib.k
===================================================================
--- branches/faster/libexec/swift-lib.k (rev 0)
+++ branches/faster/libexec/swift-lib.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,145 @@
+namespace(swiftscript) {
+ export(extractInt, def("org.griphyn.vdl.karajan.lib.swiftscript.ExtractInt"))
+ export(readData, def("org.griphyn.vdl.karajan.lib.swiftscript.ReadData"))
+ export(readData2, def("org.griphyn.vdl.karajan.lib.swiftscript.ReadStructured"))
+ export(readStructured, def("org.griphyn.vdl.karajan.lib.swiftscript.ReadStructured"))
+ export(writeData, def("org.griphyn.vdl.karajan.lib.swiftscript.WriteData"))
+ export(strCat, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$StrCat"))
+ export(strCut, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$StrCut"))
+ export(strStr, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$StrStr"))
+ export(strSplit, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$StrSplit"))
+ export(strJoin, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$StrJoin"))
+ export(regexp, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$Regexp"))
+ export(toInt, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$ToInt"))
+ export(toFloat, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$ToFloat"))
+ export(format, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$Format"))
+ export(pad, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$Pad"))
+ export(sprintf, def("org.griphyn.vdl.karajan.lib.swiftscript.Sprintf"))
+ export(toString, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$ToString"))
+ export(assert, def("org.griphyn.vdl.karajan.lib.swiftscript.Assert"))
+ export(trace, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$Trace"))
+ export(tracef, def("org.griphyn.vdl.karajan.lib.swiftscript.Tracef"))
+ export(fprintf, def("org.griphyn.vdl.karajan.lib.swiftscript.Fprintf"))
+
+ /* included for backwards compatibility */
+ export(readdata, def("org.griphyn.vdl.karajan.lib.swiftscript.ReadData"))
+ export(readdata2, def("org.griphyn.vdl.karajan.lib.swiftscript.ReadStructured"))
+ export(readstructured, def("org.griphyn.vdl.karajan.lib.swiftscript.ReadStructured"))
+ export(writedata, def("org.griphyn.vdl.karajan.lib.swiftscript.WriteData"))
+ export(toint, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$ToInt"))
+ export(tofloat, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$ToFloat"))
+ export(tostring, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$ToString"))
+
+ export(java, def("org.griphyn.vdl.karajan.lib.swiftscript.Java"))
+ export(filename, def("org.griphyn.vdl.karajan.lib.swiftscript.FileName"))
+ export(filenames, def("org.griphyn.vdl.karajan.lib.swiftscript.FileNames"))
+ export(dirname, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$Dirname"))
+ export(length, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$Length"))
+ export(exists, def("org.griphyn.vdl.karajan.lib.swiftscript.Misc$Exists"))
+ export(arg, def("org.griphyn.vdl.karajan.lib.swiftscript.FnArg"))
+}
+
+namespace(swift) {
+ import(sys)
+ export(SWIFT_CONFIG, contextAttribute("config"))
+ export(SWIFT_TICKER, null)
+
+ export(getDatasetProvenanceID, def("org.griphyn.vdl.karajan.lib.GetDatasetProvenanceID"))
+ export(startProgressTicker, def("org.griphyn.vdl.karajan.lib.RuntimeStats$StartProgressTicker"))
+ export(stopProgressTicker, def("org.griphyn.vdl.karajan.lib.RuntimeStats$StopProgressTicker"))
+ export(initProgressState, def("org.griphyn.vdl.karajan.lib.RuntimeStats$InitProgressState"))
+ export(setProgress, def("org.griphyn.vdl.karajan.lib.RuntimeStats$SetProgress"))
+ export(new, def("org.griphyn.vdl.karajan.lib.New"))
+ export(createArray, def("org.griphyn.vdl.karajan.lib.CreateArray"))
+ /* used from VDL2 for arguments to apps and returns relative paths */
+ export(fileName, def("org.griphyn.vdl.karajan.lib.FileName"))
+ /* for internal use and does not force relative paths */
+ export(absFileName, def("org.griphyn.vdl.karajan.lib.AbsFileName"))
+
+ export(jobConstraints, def("org.griphyn.vdl.karajan.lib.JobConstraints"))
+ export(getField, def("org.griphyn.vdl.karajan.lib.GetField"))
+ export(getFieldSubscript, def("org.griphyn.vdl.karajan.lib.GetFieldSubscript"))
+ export(setFieldValue, def("org.griphyn.vdl.karajan.lib.SetFieldValue"))
+ export(appendArray, def("org.griphyn.vdl.karajan.lib.AppendArray"))
+ export(getFieldValue, def("org.griphyn.vdl.karajan.lib.GetFieldValue"))
+ export(waitFieldValue, def("org.griphyn.vdl.karajan.lib.WaitFieldValue"))
+ export(getArrayIterator, def("org.griphyn.vdl.karajan.lib.GetArrayIterator"))
+ export(sliceArray, def("org.griphyn.vdl.karajan.lib.SliceArray"))
+ export(isFileBound, def("org.griphyn.vdl.karajan.lib.IsFileBound"))
+ export(isRestartable, def("org.griphyn.vdl.karajan.lib.IsRestartable"))
+ export(fringePaths, def("org.griphyn.vdl.karajan.lib.FringePaths"))
+ export(closeDataset, def("org.griphyn.vdl.karajan.lib.CloseDataset"))
+ export(partialCloseDataset, def("org.griphyn.vdl.karajan.lib.PartialCloseDataset"))
+ export(setWaitCount, def("org.griphyn.vdl.karajan.lib.SetWaitCount"))
+ export(cleanDataset, def("org.griphyn.vdl.karajan.lib.CleanDataset"))
+ export(range, def("org.griphyn.vdl.karajan.lib.Range"))
+
+ export(isLogged, def("org.griphyn.vdl.karajan.lib.IsLogged"))
+ export(logVar, def("org.griphyn.vdl.karajan.lib.LogVar"))
+
+ export(executable, def("org.griphyn.vdl.karajan.lib.Executable"))
+ export(TCProfile, def("org.griphyn.vdl.karajan.lib.TCProfile"))
+ export(siteProfile, def("org.griphyn.vdl.karajan.lib.SiteProfile"))
+
+ export(setFutureFault, def("org.griphyn.vdl.karajan.lib.SetFutureFault"))
+
+ export(configProperty, def("org.griphyn.vdl.karajan.functions.ConfigProperty"))
+ export(processBulkErrors, def("org.griphyn.vdl.karajan.functions.ProcessBulkErrors"))
+
+ export(niceName, def("org.griphyn.vdl.karajan.lib.NiceName"))
+
+ export(cacheAddAndLockFile, def("org.griphyn.vdl.karajan.lib.CacheAddAndLockFile"))
+ export(cacheAddFile, def("org.griphyn.vdl.karajan.lib.CacheAddFile"))
+ export(cacheFileRemoved, def("org.griphyn.vdl.karajan.lib.CacheFileRemoved"))
+ export(cacheUnlockFiles, def("org.griphyn.vdl.karajan.lib.CacheUnlockFiles"))
+
+ export(LOG:DEBUG, "debug")
+ export(LOG:INFO, "info")
+ export(LOG:WARN, "warn")
+ export(LOG:ERROR, "error")
+ export(LOG:FATAL, "fatal")
+
+
+ export(log, def("org.griphyn.vdl.karajan.lib.Log"))
+
+ export(unitStart, def("org.griphyn.vdl.karajan.lib.UnitStart"))
+ export(unitEnd, def("org.griphyn.vdl.karajan.lib.UnitEnd"))
+ export(currentThread, def("org.griphyn.vdl.karajan.lib.CurrentThread"))
+
+ export(dirname, def("org.griphyn.vdl.karajan.lib.PathUtils$DirName"))
+ export(reldirname, def("org.griphyn.vdl.karajan.lib.PathUtils$RelDirName"))
+ export(basename, def("org.griphyn.vdl.karajan.lib.PathUtils$BaseName"))
+ export(provider, def("org.griphyn.vdl.karajan.lib.PathUtils$Provider"))
+ export(hostname, def("org.griphyn.vdl.karajan.lib.PathUtils$HostName"))
+ export(dircat, def("org.griphyn.vdl.karajan.lib.PathUtils$DirCat"))
+ export(pathnames, def("org.griphyn.vdl.karajan.lib.PathUtils$PathNames"))
+
+ export(execute, def("org.griphyn.vdl.karajan.lib.Execute"))
+ export(expandArguments, def("org.griphyn.vdl.karajan.lib.ExpandArguments"))
+
+ export(tParallelFor, def("org.griphyn.vdl.karajan.lib.ThrottledParallelFor"))
+ export(throttled, def("org.griphyn.vdl.karajan.lib.Throttled"))
+
+ export(appStageins, def("org.griphyn.vdl.karajan.lib.AppStageins"))
+ export(appStageouts, def("org.griphyn.vdl.karajan.lib.AppStageouts"))
+ export(isDone, def("org.griphyn.vdl.karajan.lib.IsDone"))
+ export(mark, def("org.griphyn.vdl.karajan.lib.Mark"))
+ export(flatten, def("org.griphyn.vdl.karajan.lib.Flatten"))
+ export(parameterlog, def("org.griphyn.vdl.karajan.lib.Parameterlog"))
+ export(inFileDirs, def("org.griphyn.vdl.karajan.lib.InFileDirs"))
+ export(outFileDirs, def("org.griphyn.vdl.karajan.lib.OutFileDirs"))
+ export(outFiles, def("org.griphyn.vdl.karajan.lib.OutFiles"))
+ export(doRestartLog, def("org.griphyn.vdl.karajan.lib.DoRestartLog"))
+ export(unwrapClosedList, def("org.griphyn.vdl.karajan.lib.UnwrapClosedList"))
+
+ export(siteCatalog, def("org.griphyn.vdl.karajan.lib.SiteCatalog"))
+}
+
+namespace(cdm) {
+ export(query, def("org.globus.swift.data.Query$Q"))
+ export(get, def("org.globus.swift.data.Query$Get"))
+ export(file, def("org.globus.swift.data.Query$File"))
+ export(broadcast, def("org.globus.swift.data.Action$Broadcast"))
+ export(external, def("org.globus.swift.data.Action$External"))
+ export(wait, def("org.globus.swift.data.Action$Wait"))
+}
Added: branches/faster/libexec/swift-operators.k
===================================================================
--- branches/faster/libexec/swift-operators.k (rev 0)
+++ branches/faster/libexec/swift-operators.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,17 @@
+namespace(swiftop) {
+ export(sum, def("org.griphyn.vdl.karajan.lib.Operators$Sum"))
+ export(subtraction, def("org.griphyn.vdl.karajan.lib.Operators$Difference"))
+ export(product, def("org.griphyn.vdl.karajan.lib.Operators$Product"))
+ export(fquotient, def("org.griphyn.vdl.karajan.lib.Operators$FQuotient"))
+ export(iquotient, def("org.griphyn.vdl.karajan.lib.Operators$IQuotient"))
+ export(remainder, def("org.griphyn.vdl.karajan.lib.Operators$Remainder"))
+ export(le, def("org.griphyn.vdl.karajan.lib.Operators$LE"))
+ export(ge, def("org.griphyn.vdl.karajan.lib.Operators$GE"))
+ export(gt, def("org.griphyn.vdl.karajan.lib.Operators$GT"))
+ export(lt, def("org.griphyn.vdl.karajan.lib.Operators$LT"))
+ export(eq, def("org.griphyn.vdl.karajan.lib.Operators$EQ"))
+ export(ne, def("org.griphyn.vdl.karajan.lib.Operators$NE"))
+ export(and, def("org.griphyn.vdl.karajan.lib.Operators$And"))
+ export(or, def("org.griphyn.vdl.karajan.lib.Operators$Or"))
+ export(not, def("org.griphyn.vdl.karajan.lib.Operators$Not"))
+}
\ No newline at end of file
Added: branches/faster/libexec/swift-xs.k
===================================================================
--- branches/faster/libexec/swift-xs.k (rev 0)
+++ branches/faster/libexec/swift-xs.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,85 @@
+import(sys)
+
+namespace(xs) {
+ import(java)
+
+ UnresolvedType := function(name, isArray) {
+ java:new("org.griphyn.vdl.type.impl.UnresolvedType", types=["String"], name)
+ }
+
+ typesST := function() {
+ types = ["String", "org.griphyn.vdl.type.Type"]
+ }
+
+ newSimpleNode := function(name, type) {
+ x := java:new("org.griphyn.vdl.type.impl.TypeImpl", types=["String"], name)
+ invokeMethod("setBaseType", object=x, types=["org.griphyn.vdl.type.Type"]
+ invokeMethod("getType", classname="org.griphyn.vdl.type.Types", type)
+ )
+ x
+ }
+
+ newComplexNode := function(name) {
+ java:new("org.griphyn.vdl.type.impl.TypeImpl", types=["String"], name)
+ }
+
+ addNode := function(node) {
+ invokeMethod("addType", classname="org.griphyn.vdl.type.Types"
+ types=["org.griphyn.vdl.type.Type"]
+ node
+ )
+ }
+
+ addField := function(node, name, type) {
+ invokeMethod("addField", object=node, typesST(), name, type)
+ }
+
+ export(schema,
+ function() {
+ invokeMethod("resolveTypes", classname="org.griphyn.vdl.type.Types")
+ }
+ )
+
+ export(simpleType,
+ function(name, type) {
+ addNode(newSimpleNode(name, type))
+ }
+ )
+
+ export(restriction,
+ function(base) {
+ type = last(split(base, ":"))
+ }
+ )
+
+ export(complexType,
+ function(name, ...) {
+ node := newComplexNode(name)
+ for(field, ...) {
+ (name, type) := each(field)
+ addField(node, name, type)
+ }
+ addNode(node)
+ }
+ )
+
+ export(sequence,
+ function(minOccurs = 0, maxOccurs = 0, ...) {
+ (name, type) := each(first(...))
+ if (maxOccurs == "unbounded") {
+ list(name, UnresolvedType(type, true))
+ }
+ else {
+ each(...)
+ }
+ }
+ )
+
+ //should be noted that we're dealing with type names here
+ export(element,
+ function(name, type) {
+ type := last(split(type, ":"))
+ list(name, UnresolvedType(type, false))
+ }
+ )
+}
Added: branches/faster/libexec/swift.k
===================================================================
--- branches/faster/libexec/swift.k (rev 0)
+++ branches/faster/libexec/swift.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,219 @@
+import(sys)
+import(task)
+import(rlog)
+import('swift-operators', export = true)
+import('swift-lib', export = true)
+import('swift-xs', export = true)
+import('swift-int')
+
+SWIFT:SCRIPT_NAME := contextAttribute("SWIFT:SCRIPT_NAME")
+SWIFT:RUN_ID := contextAttribute("SWIFT:RUN_ID")
+SWIFT:HOME := contextAttribute("SWIFT:HOME")
+
+namespace(swift) {
+
+ pstaging := configProperty("use.provider.staging")
+ wstaging := configProperty("use.wrapper.staging")
+
+ import(java)
+
+ once("vdl.k-print-version") {
+ log(LOG:INFO,sys:file:read("{SWIFT:HOME}/libexec/version.txt"))
+ echo(sys:file:read("{SWIFT:HOME}/libexec/version.txt"))
+ log(LOG:INFO,"RUNID id=run:{SWIFT:RUN_ID}")
+ echo("RunID: {SWIFT:RUN_ID}")
+ }
+
+ export(stagein, def("org.griphyn.vdl.karajan.lib.Stagein"))
+ export(stageout, def("org.griphyn.vdl.karajan.lib.Stageout"))
+
+ export(parameterLog,
+ function(direction, variable, id, thread) {
+ if (configProperty("provenance.log") == "true") {
+ log("info","PARAM thread={thread} direction={direction} variable={variable} provenanceid={id}")
+ }
+ }
+ )
+
+ export(split,
+ function(var) {
+ each(str:split(getFieldValue(var), " "))
+ }
+ )
+
+ export(quote,
+ function(var, path = null) {
+ str:quote(getFieldValue(var, if (path != null) { path = path }))
+ }
+ )
+
+ export(types, function() {})
+
+ export(arguments,
+ function(...) {
+ arguments = expandArguments(each(...))
+ }
+ )
+
+ export(mapping,
+ function(descriptor, ...) {
+ mapping=map(map:entry("swift#descriptor", descriptor), each(...))
+ }
+ )
+
+ export(parameter,
+ function(name, value) {
+ map:entry(name, value)
+ }
+ )
+
+ export(stdout,
+ function(file) {
+ stdout = file
+ }
+ )
+
+ export(stdin,
+ function(file) {
+ stdin = file
+ }
+ )
+
+ export(stderr,
+ function(file) {
+ stderr = file
+ }
+ )
+
+ export(tr,
+ function(name) {
+ tr = name
+ }
+ )
+
+ export(attributes,
+ function(attrs) {
+ attributes = attrs
+ }
+ )
+
+ export(mains,
+ function(channel(cleanup), channel(errors), channel(warnings)) {
+ anyerrors := processBulkErrors("The following errors have occurred:", errors)
+
+ //this should be reached after everything is done
+ if (!anyerrors) {
+ log(LOG:DEBUG, "Starting cleanups")
+ discard(append(warnings, from(warnings, cleanups(cleanup))))
+ log(LOG:DEBUG, "Ending cleanups")
+ }
+ else {
+ log(LOG:INFO, "Errors detected. Cleanup not done.")
+ }
+ anywarnings := processBulkErrors("The following warnings have occurred:", warnings, onStdout=true)
+ if (anyerrors) {
+ throw("Execution completed with errors")
+ }
+ }
+ )
+
+ /*
+ * This would run in parallel with the workflow so that we don't keep
+ * all the data in memory until the workflow is done
+ */
+ export(mainp,
+ CBFFunction(channel(graph), channel(cleanup)) {
+ parallel(
+ if(configProperty("pgraph") != "false") {
+ generateProvenanceGraph(graph)
+ }
+ to(cleanup, unique(for(c, cleanup, c)))
+ )
+ }
+ )
+
+ export(execute,
+ function(
+ tr, arguments = null,
+ stdin = null, stdout = null, stderr = null,
+ attributes = null,
+ deperror = false, mdeperror = false,
+ channel(stagein), channel(stageout), channel(restartout)) {
+
+ progress := initProgressState()
+
+ done := isDone(restartout)
+ derr := try(deperror, false)
+ merr := try(mdeperror, false)
+
+ if(derr == false) {
+ if(!done) {
+ try {
+ throttled {
+ setProgress(progress, "Selecting site")
+ restartOnError(number(swift:configProperty("execution.retries"))) {
+ replicationChannel := channel:new()
+ //trigger the first job
+ discard(append(replicationChannel, true))
+ replicationGroup := UID()
+ parallelFor(i, replicationChannel) {
+ try {
+ execute2(
+ progress,
+ tr, maybe(arguments=unwrapClosedList(arguments)),
+ maybe(stdin=stdin), maybe(stdout=stdout), maybe(stderr=stderr), maybe(attributes=attributes),
+ stagein, stageout, restartout, replicationGroup, replicationChannel
+ )
+ }
+ else catch(exception) {
+ if (matches(exception, "^Abort$")) {
+ // ignored
+ }
+ else {
+ throw(exception)
+ }
+ }
+ }
+ }
+ mark(restartout, err=false, mapping=false)
+ log(LOG:INFO, "END_SUCCESS thread=", currentThread(), " tr={tr}")
+ setProgress(progress, "Finished successfully")
+ }
+ }
+ else catch(exception) {
+ log(LOG:INFO, "END_FAILURE thread=", currentThread(), " tr={tr}")
+ setProgress(progress, "Failed")
+ if(swift:configProperty("lazy.errors") == "false") {
+ throw(exception)
+ }
+ else {
+ to(errors, exception)
+ log(LOG:INFO, exception)
+ echo(exception)
+ mark(restartout, err=true, mapping=false)
+ graphStuff(tr, stagein, stageout, err=true, maybe(args=arguments))
+ }
+ }
+ }
+ else {
+ setProgress(progress, "Finished in previous run")
+ }
+ }
+ else { //derr == true
+ if (merr) {
+ exception := exception(concat("Swift: Application ", str:quote(tr),
+ " not executed due to errors in mapping dependencies"))
+ to(errors, exception)
+ log(LOG:INFO, exception)
+ }
+ else if (derr) {
+ exception := exception("Swift: Application {tr} not executed due to errors in dependencies")
+ to(errors, exception)
+ log(LOG:INFO, exception)
+ }
+ mark(restartout, err=true, mapping=merr)
+ graphStuff(tr, stagein, stageout, err=true, maybe(args=arguments))
+ }
+ }
+ )
+}
Modified: branches/faster/libexec/vdl-int-staging.k
===================================================================
--- branches/faster/libexec/vdl-int-staging.k 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/libexec/vdl-int-staging.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -19,73 +19,72 @@
global(PIN, if(pinOption == "true", "pinned:", ""))
global(PROVENANCE_GRAPH_ENABLED, vdl:configProperty("pgraph") != "false")
-namespace("vdl"
+namespace("vdl") {
export(
- element(initDDir, []
+ function(initDDir) {
ddir := "{VDL:SCRIPTNAME}-{VDL:RUNID}.d"
- once(ddir
- if(sys:not(file:exists(ddir))
+ once(ddir) {
+ if(sys:not(file:exists(ddir))) {
task:dir:make(ddir)
- )
- )
+ }
+ }
ddir
- )
+ }
- element(inFiles, [stageins]
+ function(inFiles, stageins) {
pathnames(stageins)
- )
+ }
- element(graphStuff, [tr, stagein, stageout, err, optional(args)]
- if(PROVENANCE_GRAPH_ENABLED
- then(
- errprops := if(err ",color=lightsalmon" ",color=lightsteelblue1")
- tp := vdl:threadPrefix()
- to(graph,
- concat(str:quote(tp), " [label=", str:quote(tr), "{errprops}]")
- )
- for(si, stagein
- si := basename(si)
- to(graph
- concat(str:quote(si), " [shape=parallelogram]")
- concat(str:quote(si), " -> ", str:quote(tp))
- )
- )
- for(pv, stageout
- [path, var] := each(pv)
- file := vdl:fileName(vdl:getfield(var, path=path))
- file := basename(file)
- label := vdl:niceName(var, path = path)
- to(graph
- concat(str:quote(file), " [shape=parallelogram,label=",
- str:quote(label), "]")
- concat(str:quote(tp), " -> ", str:quote(file))
- )
- )
- )
- )
- )
+ function(graphStuff, tr, stagein, stageout, err, optional(args)) {
+ if(PROVENANCE_GRAPH_ENABLED) {
+ errprops := if(err ",color=lightsalmon" ",color=lightsteelblue1")
+ tp := vdl:threadPrefix()
+ to(graph) {
+ concat(str:quote(tp), " [label=", str:quote(tr), "{errprops}]")
+ }
+ for(si, stagein) {
+ si := basename(si)
+ to(graph) {
+ concat(str:quote(si), " [shape=parallelogram]")
+ concat(str:quote(si), " -> ", str:quote(tp))
+ }
+ }
+ for(pv, stageout) {
+ (path, var) := each(pv)
+ file := vdl:fileName(vdl:getfield(var, path=path))
+ file := basename(file)
+ label := vdl:niceName(var, path = path)
+ to(graph) {
+ concat(str:quote(file), " [shape=parallelogram,label=",
+ str:quote(label), "]")
+ concat(str:quote(tp), " -> ", str:quote(file))
+ }
+ }
+ }
+ }
- element(fileSizes, [files]
+ function(fileSizes, files) {
math:sum(
for(f, files, file:size(file))
)
- )
+ }
- element(cleanups, [cleanup]
+ function(cleanups, cleanup) {
log(LOG:INFO, "START cleanups={cleanup}")
- )
+ }
- element(readErrorFile, [dir, jobid]
+ function(readErrorFile, dir, jobid) {
str:strip(file:read("{dir}/{jobid}.error"))
file:remove("{dir}/{jobid}.error")
- )
+ }
- element(execute2, [tr, optional(arguments, stdin, stdout, stderr), stagein, stageout, restartout,
- replicationGroup, replicationChannel]
+ function(execute2, tr, optional(arguments, stdin, stdout, stderr), stagein, stageout, restartout,
+ replicationGroup, replicationChannel) {
+
stagein := list(unique(each(stagein)))
stageout := list(unique(each(stageout)))
- allocateHost(rhost, constraints=vdl:jobConstraints(tr, stagein=stagein)
+ allocateHost(rhost, constraints=vdl:jobConstraints(tr, stagein=stagein)) {
ddir := initDDir()
@@ -93,7 +92,7 @@
jobdir := substring(uid, from=0, to=1)
jobid := "{tr}-{uid}"
- log(LOG:DEBUG, "THREAD_ASSOCIATION jobid={jobid} thread={#thread} host={rhost} replicationGroup={replicationGroup}")
+ log(LOG:DEBUG, "THREAD_ASSOCIATION jobid={jobid} thread=", currentThread(), " host={rhost} replicationGroup={replicationGroup}")
wrapper := "_swiftwrap.staging"
wrapfile := "{ddir}/param-{jobid}"
@@ -105,110 +104,107 @@
tmpdir := dircat("{wfdir}/jobs/{jobdir}", jobid)
cdmfile := cdm:file()
- try(
- sequential(
- log(LOG:DEBUG, "JOB_START jobid={jobid} tr={tr}", maybe(" arguments=", arguments), " tmpdir={tmpdir} host={rhost}")
+ try {
+ log(LOG:DEBUG, "JOB_START jobid={jobid} tr={tr}", maybe(" arguments=", arguments), " tmpdir={tmpdir} host={rhost}")
- vdl:setprogress("Submitting")
+ vdl:setprogress("Submitting")
- vdl:execute(
- vdl:siteprofile(rhost, "swift:wrapperInterpreter"),
- list(
- vdl:siteprofile(rhost, "swift:wrapperInterpreterOptions"),
- wrapper,
- "-e", vdl:executable(tr, rhost),
- "-out", stdout,
- "-err", stderr,
- "-i", maybe(getFieldValue(stdin)),
- "-d", flatten(unique(outFileDirs(stageout))),
- "-if", flatten(infiles(stagein)),
- "-of", flatten(outfiles(stageout)),
- "-k",
- "-cdmfile", cdmfile,
- "-status", "provider"
- "-a", maybe(each(arguments))
- )
- directory = tmpdir
- redirect = false
- host = rhost
+ vdl:execute(
+ vdl:siteprofile(rhost, "swift:wrapperInterpreter"),
+ list(
+ vdl:siteprofile(rhost, "swift:wrapperInterpreterOptions"),
+ wrapper,
+ "-e", vdl:executable(tr, rhost),
+ "-out", stdout,
+ "-err", stderr,
+ "-i", maybe(getFieldValue(stdin)),
+ "-d", flatten(unique(outFileDirs(stageout))),
+ "-if", flatten(infiles(stagein)),
+ "-of", flatten(outfiles(stageout)),
+ "-k",
+ "-cdmfile", cdmfile,
+ "-status", "provider"
+ "-a", maybe(each(arguments))
+ )
+ directory = tmpdir
+ redirect = false
+ host = rhost
- vdl:tcprofile(rhost, tr = tr) //this gets various app params from the tc, such as environment, walltime, etc
- replicationGroup = replicationGroup
- replicationChannel = replicationChannel
- jobid = jobid
+ vdl:tcprofile(rhost, tr = tr) //this gets various app params from the tc, such as environment, walltime, etc
+ replicationGroup = replicationGroup
+ replicationChannel = replicationChannel
+ jobid = jobid
+
+ stagingMethod := vdl:siteProfile(rhost, "swift:stagingMethod", default="proxy")
- stagingMethod := vdl:siteProfile(rhost, "swift:stagingMethod", default="proxy")
+ stageIn("{PIN}{stagingMethod}://localhost/{swift.home}/libexec/{wrapper}", wrapper)
- stageIn("{PIN}{stagingMethod}://localhost/{swift.home}/libexec/{wrapper}", wrapper)
+ if (cdmfile != "") {
+ d := vdl:dirname(cdmfile)
+ file := basename(cdmfile)
+ dir := if (d == "", "./", str:concat(d,"/"))
+ loc := "{PIN}{stagingMethod}://localhost/"
+ stageIn("{loc}{dir}{file}", cdmfile)
+ stageIn("{loc}{swift.home}/libexec/cdm.pl", "cdm.pl")
+ stageIn("{loc}{swift.home}/libexec/cdm_lib.sh", "cdm_lib.sh")
+ }
- if ( cdmfile != "" then(
- d := vdl:dirname(cdmfile)
- file := basename(cdmfile)
- dir := if ( d == "" then("./") else(str:concat(d,"/")))
- loc := "{PIN}{stagingMethod}://localhost/"
- stageIn("{loc}{dir}{file}", cdmfile)
- stageIn("{loc}{swift.home}/libexec/cdm.pl", "cdm.pl")
- stageIn("{loc}{swift.home}/libexec/cdm_lib.sh", "cdm_lib.sh")
- ))
+ appStageins(jobid, stagein, ".", stagingMethod)
- appStageins(jobid, stagein, ".", stagingMethod)
+ stageOut("wrapper.log", "{stagingMethod}://localhost/{ddir}/{jobid}.info",
+ mode = WRAPPER_TRANSFER_MODE)
+ //stageOut("{stdout}", "{stagingMethod}://localhost/{ddir}/{stdout}")
+ //stageOut("{stderr}", "{stagingMethod}://localhost/{ddir}/{stderr}")
+ stageOut("wrapper.error", "{stagingMethod}://localhost/{ddir}/{jobid}.error",
+ mode = STAGING_MODE:IF_PRESENT)
+ appStageouts(jobid, stageout, ".", stagingMethod)
- stageOut("wrapper.log", "{stagingMethod}://localhost/{ddir}/{jobid}.info",
- mode = WRAPPER_TRANSFER_MODE)
- //stageOut("{stdout}", "{stagingMethod}://localhost/{ddir}/{stdout}")
- //stageOut("{stderr}", "{stagingMethod}://localhost/{ddir}/{stderr}")
- stageOut("wrapper.error", "{stagingMethod}://localhost/{ddir}/{jobid}.error",
- mode = STAGING_MODE:IF_PRESENT)
- appStageouts(jobid, stageout, ".", stagingMethod)
-
- task:cleanUp(".") //the whole job directory
- )
- doRestartlog(restartout)
- log(LOG:DEBUG, "JOB_END jobid={jobid}")
- )
- catch("^Abort$"
- log(LOG:DEBUG, "JOB_CANCELED jobid={jobid}")
- throw(exception)
- )
- catch("^(?!Abort$).*"
- vdl:setprogress("Failed but can retry")
- prev := exception
- exception := try(exception(readErrorFile(ddir, jobid)), prev)
- log(LOG:DEBUG, "APPLICATION_EXCEPTION jobid={jobid} - Application exception: ", exception)
-
- throw(
- exception(
- concat(
- "Exception in {tr}:", nl(),
- maybe(" Arguments: {arguments}", nl()),
- " Host: {rhost}", nl(),
- " Directory: {tmpdir}", nl()
- )
- exception
+ task:cleanUp(".") //the whole job directory
+ ) // execute
+ doRestartlog(restartout)
+ log(LOG:DEBUG, "JOB_END jobid={jobid}")
+ }
+ else catch(exception, "^Abort$") {
+ log(LOG:DEBUG, "JOB_CANCELED jobid={jobid}")
+ throw(exception)
+ }
+ else catch(prev, "^(?!Abort$).*") {
+ vdl:setprogress("Failed but can retry")
+ exception := try(exception(readErrorFile(ddir, jobid)), prev)
+ log(LOG:DEBUG, "APPLICATION_EXCEPTION jobid={jobid} - Application exception: ", exception)
+
+ throw(
+ exception(
+ concat(
+ "Exception in {tr}:", nl(),
+ maybe(" Arguments: {arguments}", nl()),
+ " Host: {rhost}", nl(),
+ " Directory: {tmpdir}", nl()
)
+ exception
)
)
- )
- )
- )
+ }
+ }
+ }
- element(generateProvenanceGraph, [gdata]
+ function(generateProvenanceGraph, gdata) {
pgraph := vdl:configProperty("pgraph")
gname := if(pgraph == "true" "{VDL:SCRIPTNAME}-{VDL:RUNID}.dot" pgraph)
- file:write(gname
+ file:write(gname) {
"digraph SwiftProvenance {{", nl()
" graph [", vdl:configProperty("pgraph.graph.options"), "];", nl()
" node [", vdl:configProperty("pgraph.node.options"), "];", nl()
- for(i, gdata
+ for(i, gdata) {
" ", i, nl()
- )
+ }
"}", nl()
- )
+ }
log(LOG:INFO, "Provenance graph saved in ", gname)
- )
+ }
)
-)
+}
// Local variables:
// tab-width: 4
Deleted: branches/faster/libexec/vdl-int.k
===================================================================
--- branches/faster/libexec/vdl-int.k 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/libexec/vdl-int.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,637 +0,0 @@
-import("sys.k")
-import("task.k")
-import("vdl-lib.xml")
-/*
- * Things that are not exposed to the translated file
- */
-
-global(LOG:DEBUG, "debug")
-global(LOG:INFO, "info")
-global(LOG:WARN, "warn")
-global(LOG:ERROR, "error")
-global(LOG:FATAL, "fatal")
-
-namespace("vdl"
- export(
- element(rmdir, [dir, host]
- parallelFor(entry, file:list(dir, host=host)
- epath := "{dir}/{entry}"
- if(
- file:isDirectory(epath, host=host) rmdir(epath, host)
- file:remove(epath, host=host)
- )
- )
- dir:remove(dir, host=host)
- )
-
- element(createdirs, [path, dir, host]
- dc := dircat(dir, path)
- log(LOG:INFO, "START path={path} dir={dir} - Creating directory structure")
-
- dir:make(dc, host=host)
- )
-
- element(checkJobStatus, [rhost, wfdir, jobid, tr, jobdir]
- log(LOG:DEBUG, "START jobid={jobid}")
- try(
- sequential(
- /*
- * This is a bit of optimization, but I'm not completely
- * sure of its correctness. The goal is to both detect
- * the presence of the success file and remove it, all
- * in one operation. It relies on file:remove() throwing
- * an exception if the file is not there.
- */
- file:remove("{wfdir}/status/{jobdir}/{jobid}-success", host=rhost)
- log(LOG:INFO, "SUCCESS jobid={jobid} - Success file found")
- )
- sequential(
- try (
- msg = checkErrorFile(rhost, wfdir, jobid, jobdir)
- sequential (
- log(LOG:INFO, "NO_STATUS_FILE jobid={jobid} - Both status files are missing")
- throw("No status file was found. Check the shared filesystem on {rhost}")
- )
- )
- )
- throw(checkErrorFile(rhost, wfdir, jobid, jobdir))
- )
- )
-
- element(checkErrorFile, [rhost, wfdir, jobid, jobdir]
- if (
- file:exists("{wfdir}/status/{jobdir}/{jobid}-error", host=rhost) then(
- log(LOG:INFO, "FAILURE jobid={jobid} - Failure file found")
- task:transfer(srchost=rhost, srcdir="{wfdir}/status/{jobdir}", srcfile="{jobid}-error")
- error := parallel(
- file:remove("{wfdir}/status/{jobdir}/{jobid}-error", host=rhost)
- sequential(
- str:strip(file:read("{jobid}-error"))
- file:remove("{jobid}-error")
- )
- )
- error
- )
- else (
- log(LOG:INFO, "NO_STATUS_FILE jobid={jobid} - Error file missing")
- throw("No status file was found. Check the shared filesystem on {rhost}")
- )
- )
- )
-
- element(initSharedDir, [rhost]
- once(list(rhost, "shared")
- vdl:setprogress("Initializing site shared directory")
-
- log(LOG:INFO, "START host={rhost} - Initializing shared directory")
-
- wfdir := "{VDL:SCRIPTNAME}-{VDL:RUNID}"
- sharedDir := dircat(wfdir, "shared")
- dir:make(sharedDir, host=rhost)
- transfer(srcdir="{swift.home}/libexec/", srcfile=vdl:siteprofile(rhost, "swift:wrapperScript"), destdir=sharedDir, desthost=rhost)
- transfer(srcdir="{swift.home}/libexec/", srcfile="_swiftseq", destdir=sharedDir, desthost=rhost)
- dir:make(dircat(wfdir, "kickstart"), host=rhost)
-
- statusMode := configProperty("status.mode",host=rhost)
- if(statusMode == "files"
- dir:make(dircat(wfdir, "status"), host=rhost)
- )
-
- wrapperMode := configProperty("wrapper.parameter.mode",host=rhost)
- if(wrapperMode == "files"
- dir:make(dircat(wfdir, "parameters"), host=rhost)
- )
-
- dir:make(dircat(wfdir, "info"), host=rhost)
- wfdir, sharedDir
- //we send the cleanup data to vdl:main()
- to(cleanup, list(wfdir, rhost))
- log(LOG:INFO, "END host={rhost} - Done initializing shared directory")
- )
- )
-
- element(initDDir, []
- ddir := "{VDL:SCRIPTNAME}-{VDL:RUNID}.d"
- once(ddir
- if(sys:not(file:exists(ddir))
- task:dir:make(ddir)
- )
- )
- ddir
- )
-
- element(inFiles, [stageins]
- pathnames(stageins)
- )
-
- element(fileDirs, [stageins, stageouts]
- list(
- unique(
- inFileDirs(stageins)
- outFileDirs(stageouts)
- )
- )
- )
-
- element(createDirSet, [jobid, destdir, host, dirs]
- /*
- * Ideally this would be done by creating a tree of the directories
- * to be created and (eventually) exploiting the concurrency in that.
- */
- log(LOG:INFO, "START jobid={jobid} host={host} - Initializing directory structure")
- for(u, dirs
- cacheOn(list(u, destdir, host)
- createdirs(u, destdir, host)
- )
- )
- log(LOG:INFO, "END jobid={jobid} - Done initializing directory structure")
- )
-
- element(cleanup, [dir, host]
- log(LOG:INFO, "START dir={dir} host={host}")
- cdmfile := cdm:file()
- log(LOG:DEBUG, "cdmfile {cdmfile}")
- if(cdmfile != "" &
- cdm:get("GATHER_DIR") != "UNSET" then(
- log(LOG:INFO, "submitting cdm_cleanup.sh to {dir}")
- task:transfer(srcfile="cdm_cleanup.sh",
- srcdir="{swift.home}/libexec",
- desthost=host, destdir=dir)
- task:transfer(srcfile="cdm_lib.sh",
- srcdir="{swift.home}/libexec",
- desthost=host, destdir=dir)
- log(LOG:INFO, "execute: cdm_cleanup.sh")
- task:execute(
- executable="/bin/bash",
- arguments=list("{dir}/cdm_cleanup.sh",
- cdm:get("GATHER_DIR"), cdm:get("GATHER_TARGET")
- sys:uid() )
- host=host, batch=true, tcprofile(host))
- )
- )
- if(vdl:configProperty("sitedir.keep") == "false"
- task:execute(
- vdl:siteprofile(host, "swift:cleanupCommand"),
- arguments=list(
- vdl:siteprofile(host, "swift:cleanupCommandOptions"),
- dir
- )
- host=host, batch=true, tcprofile(host))
- )
- log(LOG:INFO, "END dir={dir} host={host}")
- )
-
- element(cleanups, [cleanup]
- log(LOG:INFO, "START cleanups={cleanup}")
- parallelFor(i, cleanup
- [dir, host] := each(i)
- try(
- vdl:cleanup(dir, host)
- catch(".*",
- log(LOG:DEBUG, "EXCEPTION - Exception caught while cleaning up", exception)
- to(warnings, exception("Cleanup on {host} failed", exception))
- )
- )
- )
- log(LOG:INFO, "END cleanups={cleanup}")
- )
-
- element(cleanupFiles, [files, host]
- uParallelFor(r, files
- log(LOG:INFO, "Purging ", r, " on ", host)
- file:remove(r, host=host)
- vdl:cacheFileRemoved(r, host)
- )
- )
-
- element(stageWrapperParams, [jobid, jobdir, wrapfile, dir, host]
- log(LOG:INFO, "START jobid={jobid} - staging in wrapper params")
- provider := provider(wrapfile)
- srchost := hostname(wrapfile)
- srcdir := vdl:dirname(wrapfile)
- destdir := dircat(dir, "/parameters/{jobdir}/")
- filename := basename(wrapfile)
-
- cacheOn(list(destdir, host)
- dir:make(destdir, host=host, provider=provider)
- )
-
- task:transfer(srcprovider=provider, srchost=srchost, srcfile=filename, srcdir=srcdir, desthost=host, destdir=destdir)
- log(LOG:INFO, "END jobid={jobid}")
- )
-
- element(doStagein, [jobid, files, dir, host]
- log(LOG:INFO, "START jobid={jobid} - Staging in files")
-
- cdmfile := cdm:file()
- libexec := "{swift.home}/libexec"
-
- if (cdmfile != "" then(
- doStageinFile(provider="file", srchost="localhost", srcfile=basename(cdmfile),
- srcdir=vdl:dirname(cdmfile), desthost=host, destdir=dir,
- size=file:size(cdmfile), policy="DEFAULT")
- doStageinFile(provider="file", srchost="localhost", srcfile="cdm.pl",
- srcdir=libexec, desthost=host, destdir=dir,
- size=file:size("{libexec}/cdm.pl}"), policy="DEFAULT")
- doStageinFile(provider="file", srchost="localhost", srcfile="cdm_lib.sh",
- srcdir=libexec, desthost=host, destdir=dir,
- size=file:size("{libexec}/cdm_lib.sh}"), policy="DEFAULT")
- ))
-
- uParallelFor(file, files
- provider := provider(file)
- srchost := hostname(file)
- srcdir := vdl:dirname(file)
- destdir := dircat(dir, reldirname(file))
- filename := basename(file)
- size := file:size("{srcdir}/{filename}", host=srchost, provider=provider)
-
- policy := cdm:query(query=file)
- log(LOG:DEBUG, "CDM: {file} : {policy}")
-
- doStageinFile(provider=provider, srchost=srchost, srcfile=filename,
- srcdir=srcdir, desthost=host, destdir=destdir, size=size, policy=policy)
- )
- log(LOG:INFO, "END jobid={jobid} - Staging in finished")
- )
-
- element(doStageinFile, [provider, srchost, srcfile, srcdir, desthost, destdir, size, policy]
- vdl:cacheAddAndLockFile(srcfile, destdir, desthost, size
- cleanupFiles(cacheFilesToRemove, desthost)
-
- log(LOG:DEBUG, "FILE_STAGE_IN_START file={srcfile} ",
- "srchost={srchost} srcdir={srcdir} srcname={srcfile} ",
- "desthost={desthost} destdir={destdir} provider={provider} ",
- "policy={policy}")
- if (policy == "DEFAULT" then(
- restartOnError(".*", 2
- task:transfer(srcprovider=provider, srchost=srchost, srcfile=srcfile,
- srcdir=srcdir, desthost=desthost, destdir=destdir,
- transferParams(srchost, desthost, provider))))
- policy == "BROADCAST" then(
- log(LOG:DEBUG, "FILE_STAGE_IN_BROADCAST file={srcfile} policy={policy}")
- cdm:broadcast(srcfile=srcfile, srcdir=srcdir))
- policy == "EXTERNAL" then(
- log(LOG:DEBUG, "FILE_STAGE_IN_EXTERNAL file={srcfile} policy={policy}")
- cdm:external(srcfile=srcfile, srcdir=srcdir,
- desthost=desthost, destdir=destdir))
- else(log(LOG:DEBUG, "FILE_STAGE_IN_SKIP file={srcfile} policy={policy}")))
- log(LOG:DEBUG, "FILE_STAGE_IN_END file={srcfile} ",
- "srchost={srchost} srcdir={srcdir} srcname={srcfile} ",
- "desthost={desthost} destdir={destdir} provider={provider}")
- )
- cdm:wait()
- )
-
- element(doStageout, [jobid, stageouts, dir, host]
- log(LOG:INFO, "START jobid={jobid} - Staging out files")
- log(LOG:DEBUG, "stageouts: {stageouts}")
- done := list(
- uParallelFor(pv, stageouts
- [path, var] := each(pv)
- file := vdl:absfilename(vdl:getfield(var, path = path))
- provider := vdl:provider(file)
- dhost := vdl:hostname(file)
- rdir := dircat(dir, reldirname(file))
- bname := basename(file)
- ldir := vdl:dirname(file)
- fullLocal := dircat(ldir, bname)
- fullRemote := dircat(rdir, bname)
-
- log(LOG:DEBUG, "FILE_STAGE_OUT_START srcname={bname} srcdir={rdir} srchost={host} ",
- "destdir={ldir} desthost={dhost} provider={provider}")
- //make sure we do have the directory on the client side
- dir:make(ldir, host=dhost, provider=provider)
- policy := cdm:query(query=file)
- log(LOG:DEBUG, "CDM: {file} : {policy}")
- if (sys:or(policy == "DEFAULT", policy == "BROADCAST")
- then(
- restartOnError(".*", 2
- task:transfer(srchost=host, srcfile=bname,srcdir=rdir,
- destdir=ldir, desthost=dhost, destprovider=provider,
- transferParams(host, dhost, provider))))
- else(log(LOG:DEBUG, "FILE_STAGE_OUT_SKIP srcname={bname}"))
- )
- log(LOG:DEBUG, "FILE_STAGE_OUT_END srcname={bname} srcdir={rdir} srchost={host} ",
- "destdir={ldir} desthost={dhost} provider={provider}")
-
- list(bname, rdir, host, file:size(fullLocal))
- )
- )
- uParallelFor(f, done
- [bname, rdir, host, size] := each(f)
- vdl:cacheAddFile(bname, rdir, host, size
- cleanupFiles(cacheFilesToRemove, host)
- )
- )
- log(LOG:INFO, "END jobid={jobid} - Staging out finished")
- )
-
- element(graphStuff, [tr, stagein, stageout, err, optional(args)]
- if(
- vdl:configProperty("pgraph") != "false" then(
- errprops := if(err ",color=lightsalmon" ",color=lightsteelblue1")
- tp := vdl:threadPrefix()
- to(graph,
- concat(str:quote(tp), " [label=", str:quote(tr), "{errprops}]")
- )
- for(si, stagein
- si := basename(si)
- to(graph
- concat(str:quote(si), " [shape=parallelogram]")
- concat(str:quote(si), " -> ", str:quote(tp))
- )
- )
- for(pv, stageout
- [path, var] := each(pv)
- file := vdl:fileName(vdl:getfield(var, path=path))
- file := basename(file)
- label := vdl:niceName(var, path = path)
- to(graph
- concat(str:quote(file), " [shape=parallelogram,label=",
- str:quote(label), "]")
- concat(str:quote(tp), " -> ", str:quote(file))
- )
- )
- )
- )
- )
-
- element(fileSizes, [files]
- math:sum(
- for(f, files, file:size(file))
- )
- )
-
- element(transferStandardFiles, [rhost, tmpdir, jobid, stdout, stderr]
- concat(
- for(f, list(list("stderr.txt", stderr), list("stdout.txt", stdout))
- [name, file] := each(f)
- destfile := concat("{jobid}-", basename(file))
- try(
- sequential(
- task:transfer(srchost=rhost, srcdir=tmpdir, srcfile=file,
- destfile=destfile)
- nl()
- " {name}: "
- strip(file:read(destfile))
- )
- ""
- )
- maybe(file:remove(destfile))
- )
- )
- )
-
- element(transferKickstartRec, [rhost, wfdir, jobid, jobdir]
- recfile := "{jobid}-kickstart.xml"
- srcdir := dircat("{wfdir}/kickstart/", jobdir)
- try(
- task:transfer(srchost=rhost, srcdir=srcdir, srcfile=recfile, destdir="{VDL:SCRIPTNAME}-{VDL:RUNID}.d/")
- (
- maybe(file:remove(recfile))
- log(LOG:WARN, "Failed to transfer kickstart records from {srcdir} on {rhost}", exception)
- )
- )
- recfile
- )
-
- element(transferWrapperLog, [rhost, wfdir, jobid, jobdir]
- recfile := "{jobid}-info"
- srcdir := dircat("{wfdir}/info/", jobdir)
- try(
- task:transfer(srchost=rhost, srcdir=srcdir, srcfile=recfile, destdir="{VDL:SCRIPTNAME}-{VDL:RUNID}.d/")
- (
- maybe(file:remove(recfile))
- log(LOG:INFO, "Failed to transfer wrapper log for job {jobid} from {srcdir} on {rhost}", exception)
- )
- )
- recfile
- )
-
- element(execute2, [tr, optional(arguments, stdin, stdout, stderr, attributes), stagein, stageout, restartout,
- replicationGroup, replicationChannel]
- stagein := list(unique(each(stagein)))
- stageout := list(unique(each(stageout)))
-
- allocateHost(rhost, constraints=vdl:jobConstraints(tr, stagein=stagein)
-
- ddir := initDDir()
- [wfdir, sharedDir] := try(
- initSharedDir(rhost)
- throw(exception("Could not initialize shared directory on {rhost}", exception))
- )
-
- uid := uid()
- jobdir := substring(uid, from=0, to=1)
- jobid := "{tr}-{uid}"
-
- log(LOG:DEBUG, "THREAD_ASSOCIATION jobid={jobid} thread={#thread} host={rhost} replicationGroup={replicationGroup}")
-
- statusMode := configProperty("status.mode",host=rhost)
- wrapperMode := configProperty("wrapper.parameter.mode",host=rhost)
-
- wrapfile := "{ddir}/param-{jobid}"
-
- stdout := try(getFieldValue(stdout), "stdout.txt")
- stderr := try(getFieldValue(stderr), "stderr.txt")
- kickstart := vdl:kickstart(rhost)
- fileDirs := fileDirs(stagein, stageout)
- os := vdl:siteprofile(rhost, "SYSINFO:OS")
-
- if(wrapperMode == "files"
- sequential(
- sys:file:write(wrapfile,
- "-scratch ", try(vdl:siteprofile(rhost, "scratch"), ""), nl(),
- "-e ",vdl:executable(tr, rhost), nl(),
- "-out ", stdout, nl(),
- "-err ", stderr, nl(),
- "-i ", maybe(getFieldValue(stdin)), nl(),
- "-d ", flatten(each(fileDirs)), nl(),
- "-if ", flatten(infiles(stagein)), nl(),
- "-of ", flatten(outfiles(stageout)), nl(),
- "-k ", kickstart, nl(),
- "-cdmfile ", cdm:file(), nl(),
- "-status ", statusMode, nl(),
- for(a, arguments, "-a ", a, nl())
- )
- )
- )
-
- vdl:setprogress("Stage in")
- tmpdir := dircat("{wfdir}/jobs/{jobdir}", jobid)
-
-
-
- try(
- sequential(
-
- createDirSet(jobid, sharedDir, rhost, fileDirs)
- doStagein(jobid, stagein, sharedDir, rhost)
- if(wrapperMode == "files"
- stageWrapperParams(jobid, jobdir, wrapfile, wfdir, rhost)
- )
-
-
- log(LOG:DEBUG, "JOB_START jobid={jobid} tr={tr}", maybe(" arguments=", arguments), " tmpdir={tmpdir} host={rhost}")
-
- vdl:setprogress("Submitting")
-
- if(wrapperMode == "files"
- vdl:execute(
- vdl:siteprofile(rhost, "swift:wrapperInterpreter"),
- list(
- vdl:siteprofile(rhost, "swift:wrapperInterpreterOptions"),
- dircat("shared", vdl:siteprofile(rhost, "swift:wrapperScript"), os=os),
- jobid, "-p", jobdir
- )
- directory=wfdir
- redirect=false
- host=rhost
- vdl:tcprofile(rhost, maybe(attributes=attributes), tr=tr) //this gets various app params from the tc, such as environment, walltime, etc
- replicationGroup=replicationGroup
- replicationChannel=replicationChannel
- jobid=jobid
- )
- )
- if(wrapperMode == "args"
- vdl:execute(
- vdl:siteprofile(rhost, "swift:wrapperInterpreter"),
- list(
- vdl:siteprofile(rhost, "swift:wrapperInterpreterOptions"),
- dircat("shared", vdl:siteprofile(rhost, "swift:wrapperScript"), os=os),
- jobid,
- "-jobdir", jobdir,
- "-scratch", try(vdl:siteprofile(rhost, "scratch"), "")
- "-e", vdl:executable(tr, rhost),
- "-out", stdout,
- "-err", stderr,
- "-i", maybe(getFieldValue(stdin)),
- "-d", flatten(each(fileDirs)),
- "-if", flatten(infiles(stagein)),
- "-of", flatten(outfiles(stageout)),
- "-k", kickstart,
- "-cdmfile", cdm:file(),
- "-status", statusMode,
- "-a", maybe(each(arguments))
- )
- directory=wfdir
- redirect=false
- host=rhost
- vdl:tcprofile(rhost, maybe(attributes=attributes), tr=tr)
- replicationGroup=replicationGroup
- replicationChannel=replicationChannel
- jobid=jobid
- )
- )
-
- vdl:setprogress("Checking status")
- if(statusMode == "files"
- checkJobStatus(rhost, wfdir, jobid, tr, jobdir)
- )
-
- if(wrapperMode == "files"
- file:remove(wrapfile)
- )
-
- log(LOG:DEBUG, "STAGING_OUT jobid={jobid}")
-
-
- /* need to stage the files to upper scratch area in case they are not transfered to another site
- before all the files get cleaned out */
-
-
- vdl:setprogress("Stage out")
- doStageout(jobid, stageout, sharedDir, rhost)
- doRestartlog(restartout)
- if(
- kickstart != "" & vdl:configProperty("kickstart.always.transfer") == "true"
- discard(transferKickstartRec(rhost, wfdir, jobid, jobdir))
- )
- if(
- vdl:configProperty("wrapperlog.always.transfer") == "true"
- discard(transferWrapperLog(rhost, wfdir, jobid, jobdir))
- )
- vdl:cacheUnlockFiles(stagein, sharedDir, rhost, cleanupFiles(cacheFilesToRemove, rhost))
- log(LOG:DEBUG, "JOB_END jobid={jobid}")
- )
- catch("^Abort$"
- log(LOG:DEBUG, "JOB_CANCELED jobid={jobid}")
- vdl:cacheUnlockFiles(stagein, sharedDir, rhost, force=false
- cleanupFiles(cacheFilesToRemove, rhost)
- )
- throw(exception)
- )
- catch("^(?!Abort$).*"
- vdl:setprogress("Failed but can retry")
- prev := exception
- exception := try(exception(checkErrorFile(rhost, wfdir, jobid, jobdir)), prev)
-
- log(LOG:DEBUG, "APPLICATION_EXCEPTION jobid={jobid} - Application exception: ", exception)
-
- if(matches(exception,".*executable bit.*")
- generateError(exception)
- )
-
- vdl:cacheUnlockFiles(stagein, sharedDir, rhost, force=false
- cleanupFiles(cacheFilesToRemove, rhost)
- )
-
- outs := transferStandardFiles(rhost, tmpdir, jobid, stdout, stderr)
-
- discard(maybe(transferWrapperLog(rhost, wfdir, jobid, jobdir)))
-
- kickstartRec := if(
- kickstart == "" ""
- else(
- try(
- (
- recfile := transferKickstartRec(rhost, wfdir, jobid, jobdir)
- "KickstartRecord: {recfile}"
- )
- ""
- )
- )
- )
-
- throw(
- exception(
- concat(
- "Exception in {tr}:", nl(),
- maybe(" Arguments: ", arguments, nl()),
- " Host: {rhost}", nl(),
- " Directory: {tmpdir}",
- "{outs}",
- kickstartRec
- )
- exception
- )
- )
- )
- )
- )
- )
-
- element(generateProvenanceGraph, [gdata]
- pgraph := vdl:configProperty("pgraph")
- gname := if(pgraph == "true" "{VDL:SCRIPTNAME}-{VDL:RUNID}.dot" pgraph)
- file:write(gname
- "digraph SwiftProvenance {{", nl()
- " graph [", vdl:configProperty("pgraph.graph.options"), "];", nl()
- " node [", vdl:configProperty("pgraph.node.options"), "];", nl()
-
- for(i, gdata
- " ", i, nl()
- )
- "}", nl()
- )
- log(LOG:INFO, "Provenance graph saved in ", gname)
- )
- )
-)
-
-// Local variables:
-// mode: scheme
-// tab-width: 4
-// indent-tabs-mode: t
-// End:
Deleted: branches/faster/libexec/vdl-lib.xml
===================================================================
--- branches/faster/libexec/vdl-lib.xml 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/libexec/vdl-lib.xml 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,153 +0,0 @@
-<karajan>
-
- <namespace prefix="swiftscript">
- <!-- string functions library -->
- <export name="extract_int"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.ExtractInt"/></export>
- <export name="read_data"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.ReadData"/></export>
- <export name="read_data2"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.ReadStructured"/></export>
- <export name="read_structured"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.ReadStructured"/></export>
- <export name="write_data"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.WriteData"/></export>
- <export name="strcat"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="strcut"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="strstr"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="strsplit"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="strjoin"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="regexp"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="to_int"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="to_float"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="format"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="pad"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="sprintf"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Sprintf"/></export>
- <export name="to_string"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="assert"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Assert"/></export>
- <export name="trace"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="tracef"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Tracef"/></export>
- <export name="fprintf"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Fprintf"/></export>
-
- <!-- included for backwards compatibility -->
- <export name="readdata"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.ReadData"/></export>
- <export name="readdata2"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.ReadStructured"/></export>
- <export name="readstructured"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.ReadStructured"/></export>
- <export name="writedata"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.WriteData"/></export>
- <export name="toint"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="tofloat"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="tostring"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
-
- <export name="java"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Java"/></export>
- <export name="filename"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.FileName"/></export>
- <export name="filenames"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.FileNames"/></export>
- <export name="dirname"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="length"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="exists"><elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.Misc"/></export>
- <export name="arg">
- <import file="sys.xml"/>
- <if>
- <sys:or>
- <equals value1="${vdl:operation}" value2="typecheck"/>
- <equals value1="${vdl:operation}" value2="graph"/>
- </sys:or>
- <element arguments="name"><string></string></element>
- <elementDef classname="org.griphyn.vdl.karajan.lib.swiftscript.FnArg"/>
- </if>
- </export>
- </namespace>
-
- <namespace prefix="vdl">
- <export name="vdl:getdatasetprovenanceid"><elementDef classname="org.griphyn.vdl.karajan.lib.GetDatasetProvenanceID"/></export>
- <export name="infinitecountingwhile"><elementDef classname="org.griphyn.vdl.karajan.lib.InfiniteCountingWhile"/></export>
- <export name="sequentialwithid"><elementDef classname="org.griphyn.vdl.karajan.lib.SequentialWithID"/></export>
- <export name="startprogressticker"><elementDef classname="org.griphyn.vdl.karajan.lib.RuntimeStats"/></export>
- <export name="stopprogressticker"><elementDef classname="org.griphyn.vdl.karajan.lib.RuntimeStats"/></export>
- <export name="initprogressstate"><elementDef classname="org.griphyn.vdl.karajan.lib.RuntimeStats"/></export>
- <export name="setprogress"><elementDef classname="org.griphyn.vdl.karajan.lib.RuntimeStats"/></export>
- <export name="new"><elementDef classname="org.griphyn.vdl.karajan.lib.New"/></export>
- <export name="createarray"><elementDef classname="org.griphyn.vdl.karajan.lib.CreateArray"/></export>
- <!-- used from VDL2 for arguments to apps and returns relative paths -->
- <export name="fileName"><elementDef classname="org.griphyn.vdl.karajan.lib.FileName"/></export>
- <!-- for internal use and does not force relative paths -->
- <export name="absFileName"><elementDef classname="org.griphyn.vdl.karajan.lib.AbsFileName"/></export>
-
- <export name="jobConstraints"><elementDef classname="org.griphyn.vdl.karajan.lib.JobConstraints"/></export>
- <export name="getField"><elementDef classname="org.griphyn.vdl.karajan.lib.GetField"/></export>
- <export name="getFieldSubscript"><elementDef classname="org.griphyn.vdl.karajan.lib.GetFieldSubscript"/></export>
- <export name="setFieldValue"><elementDef classname="org.griphyn.vdl.karajan.lib.SetFieldValue"/></export>
- <export name="appendArray"><elementDef classname="org.griphyn.vdl.karajan.lib.AppendArray"/></export>
- <export name="getFieldValue"><elementDef classname="org.griphyn.vdl.karajan.lib.GetFieldValue"/></export>
- <export name="waitFieldValue"><elementDef classname="org.griphyn.vdl.karajan.lib.WaitFieldValue"/></export>
- <export name="getArrayIterator"><elementDef classname="org.griphyn.vdl.karajan.lib.GetArrayIterator"/></export>
- <export name="slicearray"><elementDef classname="org.griphyn.vdl.karajan.lib.SliceArray"/></export>
- <export name="isFileBound"><elementDef classname="org.griphyn.vdl.karajan.lib.IsFileBound"/></export>
- <export name="isRestartable"><elementDef classname="org.griphyn.vdl.karajan.lib.IsRestartable"/></export>
- <export name="fringePaths"><elementDef classname="org.griphyn.vdl.karajan.lib.FringePaths"/></export>
- <export name="closeDataset"><elementDef classname="org.griphyn.vdl.karajan.lib.CloseDataset"/></export>
- <export name="partialCloseDataset"><elementDef classname="org.griphyn.vdl.karajan.lib.PartialCloseDataset"/></export>
- <export name="setWaitCount"><elementDef classname="org.griphyn.vdl.karajan.lib.SetWaitCount"/></export>
- <export name="cleanDataset"><elementDef classname="org.griphyn.vdl.karajan.lib.CleanDataset"/></export>
- <export name="range"><elementDef classname="org.griphyn.vdl.karajan.lib.Range"/></export>
-
- <export name="isLogged"><elementDef classname="org.griphyn.vdl.karajan.lib.IsLogged"/></export>
- <export name="logVar"><elementDef classname="org.griphyn.vdl.karajan.lib.LogVar"/></export>
- <export name="typecheck"><elementDef classname="org.griphyn.vdl.karajan.lib.Typecheck"/></export>
-
- <export name="threadPrefix"><elementDef classname="org.griphyn.vdl.karajan.lib.ThreadPrefix"/></export>
-
- <export name="executable"><elementDef classname="org.griphyn.vdl.karajan.lib.Executable"/></export>
- <export name="TCProfile"><elementDef classname="org.griphyn.vdl.karajan.lib.TCProfile"/></export>
- <export name="SiteProfile"><elementDef classname="org.griphyn.vdl.karajan.lib.SiteProfile"/></export>
-
- <export name="setFutureFault"><elementDef classname="org.griphyn.vdl.karajan.lib.SetFutureFault"/></export>
-
- <export name="configProperty"><elementDef classname="org.griphyn.vdl.karajan.functions.ConfigProperty"/></export>
- <export name="processBulkErrors"><elementDef classname="org.griphyn.vdl.karajan.functions.ProcessBulkErrors"/></export>
-
- <export name="niceName"><elementDef classname="org.griphyn.vdl.karajan.lib.NiceName"/></export>
-
- <export name="cacheAddAndLockFile"><elementDef classname="org.griphyn.vdl.karajan.lib.CacheAddAndLockFile"/></export>
- <export name="cacheAddFile"><elementDef classname="org.griphyn.vdl.karajan.lib.CacheAddFile"/></export>
- <export name="cacheFileRemoved"><elementDef classname="org.griphyn.vdl.karajan.lib.CacheFileRemoved"/></export>
- <export name="cacheUnlockFiles"><elementDef classname="org.griphyn.vdl.karajan.lib.CacheUnlockFiles"/></export>
-
- <export name="log"><elementDef classname="org.griphyn.vdl.karajan.lib.Log"/></export>
-
- <export name="unitStart"><elementDef classname="org.griphyn.vdl.karajan.lib.UnitStart"/></export>
- <export name="unitEnd"><elementDef classname="org.griphyn.vdl.karajan.lib.UnitEnd"/></export>
-
- <export name="kickstart"><elementDef classname="org.griphyn.vdl.karajan.lib.Kickstart"/></export>
-
- <export name="dirname"><elementDef classname="org.griphyn.vdl.karajan.lib.PathUtils"/></export>
- <export name="reldirname"><elementDef classname="org.griphyn.vdl.karajan.lib.PathUtils"/></export>
- <export name="basename"><elementDef classname="org.griphyn.vdl.karajan.lib.PathUtils"/></export>
- <export name="provider"><elementDef classname="org.griphyn.vdl.karajan.lib.PathUtils"/></export>
- <export name="hostname"><elementDef classname="org.griphyn.vdl.karajan.lib.PathUtils"/></export>
- <export name="dircat"><elementDef classname="org.griphyn.vdl.karajan.lib.PathUtils"/></export>
- <export name="pathnames"><elementDef classname="org.griphyn.vdl.karajan.lib.PathUtils"/></export>
-
- <export name="execute"><elementDef classname="org.griphyn.vdl.karajan.lib.Execute"/></export>
- <export name="expandArguments"><elementDef classname="org.griphyn.vdl.karajan.lib.ExpandArguments"/></export>
-
- <export name="tparallelFor"><elementDef classname="org.griphyn.vdl.karajan.lib.ThrottledParallelFor"/></export>
- <export name="throttled"><elementDef classname="org.griphyn.vdl.karajan.lib.Throttled"/></export>
-
- <export name="appStageins"><elementDef classname="org.griphyn.vdl.karajan.lib.AppStageins"/></export>
- <export name="appStageouts"><elementDef classname="org.griphyn.vdl.karajan.lib.AppStageouts"/></export>
- <export name="isDone"><elementDef classname="org.griphyn.vdl.karajan.lib.IsDone"/></export>
- <export name="mark"><elementDef classname="org.griphyn.vdl.karajan.lib.Mark"/></export>
- <export name="flatten"><elementDef classname="org.griphyn.vdl.karajan.lib.Flatten"/></export>
- <export name="parameterlog"><elementDef classname="org.griphyn.vdl.karajan.lib.Parameterlog"/></export>
- <export name="inFileDirs"><elementDef classname="org.griphyn.vdl.karajan.lib.InFileDirs"/></export>
- <export name="outFileDirs"><elementDef classname="org.griphyn.vdl.karajan.lib.OutFileDirs"/></export>
- <export name="outFiles"><elementDef classname="org.griphyn.vdl.karajan.lib.OutFiles"/></export>
- <export name="doRestartLog"><elementDef classname="org.griphyn.vdl.karajan.lib.DoRestartLog"/></export>
- <export name="unwrapClosedList"><elementDef classname="org.griphyn.vdl.karajan.lib.UnwrapClosedList"/></export>
- </namespace>
-
- <namespace prefix="cdm">
- <export name="query"><elementDef classname="org.globus.swift.data.Query"/></export>
- <export name="get"><elementDef classname="org.globus.swift.data.Query"/></export>
- <export name="file"><elementDef classname="org.globus.swift.data.Query"/></export>
- <export name="broadcast"><elementDef classname="org.globus.swift.data.Action"/></export>
- <export name="external"><elementDef classname="org.globus.swift.data.Action"/></export>
- <export name="wait"><elementDef classname="org.globus.swift.data.Action"/></export>
- </namespace>
-
-</karajan>
Deleted: branches/faster/libexec/vdl-sc.k
===================================================================
--- branches/faster/libexec/vdl-sc.k 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/libexec/vdl-sc.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,115 +0,0 @@
-import("sys.k")
-
-namespace("vdl"
- export(siteCatalog
- element([file]
-
- import("task.k")
-
- element(pool, [handle, optional(gridlaunch), optional(sysinfo), optional(lrc), ..., channel(properties)]
- host(name = handle
- each(...)
- to(properties
- each(properties)
- maybe(property("gridlaunch", gridlaunch))
- maybe(property("sysinfo", sysinfo))
- )
- )
- )
-
- element(gridftp, [url, optional(storage), optional(major), optional(minor), optional(patch)]
- if(
- url == "local://localhost"
- service(type="file", provider="local")
- service(type="file", provider="gsiftp", url=url)
- )
- )
-
- element(jobmanager, [url, major, optional(universe), optional(minor), optional(patch)]
- provider := if(
- url == "local://localhost" "local"
- url == "pbs://localhost" "pbs"
- major == "4" "GT4"
- major == "2" "GT2"
- throw("Unknown job manager version: major = {major}, minor = {minor}, patch = {patch}, url = {url}")
- )
- service(type="execution", provider=provider, url=url)
- )
-
- element(execution, [provider, url, optional(jobManager)]
- service(type="execution", provider=provider, maybe(url=url), maybe(jobManager=jobManager))
- )
-
- element(filesystem, [provider, url, optional(storage)]
- service(type="file", provider=provider, url=url)
- )
-
- element(profile, [namespace, key, value]
- if(
- namespace == "karajan"
- property("{key}", value)
- property("{namespace}:{key}", value)
- )
- )
-
- element(env, [name, value]
- property("env:{name}", value)
- )
-
- element(workdirectory, [dir]
- property("workdir", dir)
- )
-
- element(scratch, [dir]
- property("scratch", dir)
- )
-
- element(p, [name, value]
- print(" {name} = {value}")
- property(name, value)
- )
-
- element(intrepidCoasterParams, [optional(averageJobTime, blockSize)]
- print("Setting Intrepid coaster params:")
- p("scratch", "/scratch")
- p("jobsPerNode", "4")
- p("kernelprofile", "zeptoos")
- p("alcfbgpnat", "true")
- maybe(
- discard(averageJobTime)
- blockSize := try(blockSize, 512)
- parallelism := averageJobTime * 100
- p("slots", math:round(parallelism / (blockSize * 4)))
- p("nodeGranularity", blockSize)
- p("maxNodes", blockSize)
- jt := parallelism * 2
- p("jobThrottle", jt / 100)
- print("For best results, please set the foreach.max.threads property in swift.properties to {jt}")
- )
- )
-
- element(surveyorCoasterParams, [optional(averageJobTime, blockSize)]
- print("Setting Surveyor coaster params:")
- p("scratch", "/scratch")
- p("jobsPerNode", "4")
- p("kernelprofile", "zeptoos")
- p("alcfbgpnat", "true")
- maybe(
- discard(averageJobTime)
- blockSize := try(blockSize, 64)
- parallelism := averageJobTime * 100
- p("slots", 12)
- p("nodeGranularity", blockSize)
- p("maxTime", 3500)
- jt := parallelism * 2
- p("jobThrottle", jt / 100)
- print("For best results, please set the foreach.max.threads property in swift.properties to {jt}")
- )
- )
-
- resources(
- executeFile(file)
- )
- )
- )
-)
Deleted: branches/faster/libexec/vdl-xs.k
===================================================================
--- branches/faster/libexec/vdl-xs.k 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/libexec/vdl-xs.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,72 +0,0 @@
-import("sys.k")
-
-namespace("xs"
- import("java.k")
-
- element(UnresolvedType, [name, isArray]
- new("org.griphyn.vdl.type.impl.UnresolvedType", types=["String"], name)
- )
-
- element(typesST, [], types=["String", "org.griphyn.vdl.type.Type"])
-
- element(newSimpleNode, [name, type]
- x := new("org.griphyn.vdl.type.impl.TypeImpl", types=["String"], name)
- invokeMethod("setBaseType", object=x, types=["org.griphyn.vdl.type.Type"]
- invokeMethod("getType", classname="org.griphyn.vdl.type.Types", type)
- )
- x
- )
-
- element(newComplexNode, [name]
- new("org.griphyn.vdl.type.impl.TypeImpl", types=["String"], name)
- )
-
- element(addNode, [node]
- invokeMethod("addType", classname="org.griphyn.vdl.type.Types"
- types=["org.griphyn.vdl.type.Type"]
- node
- )
- )
-
- element(addField, [node, name, type]
- invokeMethod("addField", object=node, typesST(), name, type)
- )
-
- export(
- element(schema, []
- invokeMethod("resolveTypes", classname="org.griphyn.vdl.type.Types")
- )
-
- element(simpleType, [name, type]
- addNode(newSimpleNode(name, type))
- )
-
- element(restriction, [base]
- type=last(split(base, ":"))
- )
-
- element(complexType, [name, ...]
- node := newComplexNode(name)
- for(field, ...
- [name, type] := each(field)
- addField(node, name, type)
- )
- addNode(node)
- )
-
- element(sequence, [optional(minOccurs, maxOccurs), ...]
- maxOccurs := choice(maxOccurs, 0)
- [name, type] := each(first(...))
- if (
- maxOccurs == "unbounded" list(name, UnresolvedType(type, true))
- else(each(...))
- )
- )
-
- //should be noted that we're dealing with type names here
- element(xs:element, [name, type]
- type := last(split(type, ":"))
- list(name, UnresolvedType(type, false))
- )
- )
-)
Deleted: branches/faster/libexec/vdl.k
===================================================================
--- branches/faster/libexec/vdl.k 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/libexec/vdl.k 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,128 +0,0 @@
-import("sys.k")
-import("task.k")
-import("rlog.k")
-import("operators.xml", export = true)
-
-import("vdl-xs.k", export = true)
-
-namespace("vdl"
-
- import("vdl-sc.k", export = true)
- import("vdl-lib.xml", export = true)
-
- pstaging := configProperty("use.provider.staging")
- wstaging := configProperty("use.wrapper.staging")
- int := if (
- pstaging == "true", "vdl-int-staging.k",
- wstaging == "true", "vdl-int-wrapper-staging.k",
- "vdl-int.k"
- )
-
- import(int)
- import("java.k")
-
- once("vdl.k-print-version"
- log("info",sys:file:read("{swift.home}/libexec/version.txt"))
- echo(sys:file:read("{swift.home}/libexec/version.txt"))
- log("info","RUNID id=run:{VDL:RUNID}")
- echo("RunID: {VDL:RUNID}")
- )
-
- export("stagein", elementDef(classname="org.griphyn.vdl.karajan.lib.Stagein"))
- export("stageout", elementDef(classname="org.griphyn.vdl.karajan.lib.Stageout"))
-
- export(
-
- element(parameterlog, [direction, variable, id, thread],
- if(
- vdl:configProperty("provenance.log") == "true"
- log("info","PARAM thread={thread} direction={direction} variable={variable} provenanceid={id}")
- )
- )
- element(split, [var], each(str:split(vdl:getFieldValue(var), " ")))
-
- element(quote, [var, optional(path)],
- str:quote(vdl:getFieldValue(var, maybe(path = path)))
- )
-
- element(types, [])
-
- element(arguments, [...]
- arguments=expandArguments(each(...))
- )
-
- export(execute
- executeFile(
- if(
- vdl:operation == "dryrun"
- "execute-dryrun.k"
- vdl:operation == "typecheck"
- "execute-typecheck.k"
- vdl:operation == "run"
- "execute-default.k"
- )
- )
- )
-
- element(mapping, [descriptor, ...]
- mapping=map(map:entry("swift#descriptor", descriptor), each(...))
- )
-
- element(parameter, [name, value]
- map:entry(name, value)
- )
-
- element(stdout, [file]
- stdout = file
- )
-
- element(stdin, [file]
- stdin = file
- )
-
- element(stderr, [file]
- stderr = file
- )
-
- element(tr, [name]
- tr = name
- )
-
- element(attributes, [attrs]
- attributes = attrs
- )
-
- element(mains, [channel(cleanup), channel(errors), channel(warnings)]
- anyerrors := vdl:processBulkErrors("The following errors have occurred:", errors)
-
- //this should be reached after everything is done
- if(
- sys:not(anyerrors) then(
- //hmm, you can append to channels!
- log(LOG:DEBUG, "Starting cleanups")
- append(warnings, from(warnings, cleanups(cleanup)))
- log(LOG:DEBUG, "Ending cleanups")
- )
- else(
- log(LOG:INFO, "Errors detected. Cleanup not done.")
- )
- )
- anywarnings := vdl:processBulkErrors("The following warnings have occurred:", warnings, onStdout=true)
- if (anyerrors throw("Execution completed with errors"))
- )
-
- /*
- * This would run in parallel with the workflow so that we don't keep
- * all the data in memory until the workflow is done
- */
- parallelElement(mainp, [channel(graph), channel(cleanup)]
- parallel(
- if(
- vdl:configProperty("pgraph") != "false"
- generateProvenanceGraph(graph)
- )
- to(cleanup, unique(for(c, cleanup, c)))
- )
- )
- )
-)
Modified: branches/faster/resources/Karajan.stg
===================================================================
--- branches/faster/resources/Karajan.stg 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/resources/Karajan.stg 2013-01-29 07:31:09 UTC (rev 6170)
@@ -4,75 +4,74 @@
// are smaller
program(types,procedures,declarations,statements,constants,buildversion,cleanups) ::= <<
-<project><!-- CACHE ID $buildversion$ -->
- <import file="sys.xml"/>
- <import file="scheduler.xml"/>
- <import file="rlog.xml"/>
- <import file="vdl.k"/>
- $if(types)$
- <types>
- <xs:schema targetNamespace="http://ci.uchicago.edu/swift/2009/02/swiftscript" xmlns="http://ci.uchicago.edu/swift/2009/02/swiftscript" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema">
- $types;separator="\n"$
- </xs:schema>
- </types>
- $endif$
- $constants;separator="\n"$
- $procedures;separator="\n"$
- $declarations;separator="\n"$
- $if(statements)$
- <restartLog>
- <vdl:mains>
- <vdl:startprogressticker />
- <vdl:mainp>
- $parallel(statements=statements)$
- </vdl:mainp>
- <vdl:stopprogressticker />
- </vdl:mains>
- </restartLog>
- $endif$
- $cleanups:vdl_cleandataset();separator="\n"$
- <vdl:cleandataset shutdown="true"/>
-</project>
+// CACHE ID $buildversion$
+import(sys)
+import(scheduler)
+import(rlog)
+import(swift)
+
+$if(types)$
+types(
+ xs:schema(
+ $types;separator="\n"$
+ )
+)
+
+$endif$
+$constants;separator="\n"$
+$procedures;separator="\n"$
+$declarations;separator="\n"$
+$if(statements)$
+
+restartLog() {
+ swift:mains(
+ swift:startProgressTicker()
+ swift:mainp(
+ $parallel(statements=statements)$
+ )
+ swift:stopProgressTicker()
+ )
+}
+$endif$
+$cleanups:swift_cleandataset();separator="\n"$
+swift:cleanDataset(shutdown=true)
>>
typeDef(name,type,members,sourcelocation) ::= <<
- $if(type)$
- <xs:element name="$name$" type="$type$"/>
- $else$
- $if(!members)$
- <xs:simpleType name="$name$">
- <xs:restriction base="string"/>
- </xs:simpleType>
- $else$
- <xs:complexType name="$name$">
- <xs:sequence>
- $members;separator="\n"$
- </xs:sequence>
- </xs:complexType>
- $endif$
- $endif$
+$if(type)$
+ xs:element("$name$", "$type$")
+$else$
+ $if(!members)$
+ xs:simpleType("$name$", xs:restriction("string"))
+ $else$
+ xs:complexType("$name$",
+ xs:sequence(
+ $members;separator="\n"$
+ )
+ )
+ $endif$
+$endif$
>>
memberdefinition(type,name,sourcelocation) ::= <<
- <xs:element name="$name$" type="$type$"/>
+ xs:element("$name$", "$type$")
>>
procedure(name, outputs, inputs, arguments, optargs, binding, declarations, statements, config, line, initWaitCounts, cleanups, closes) ::= <<
-<element name="$name$"$if(arguments)$ arguments="$proc_args(args=arguments)$"$endif$$if(optargs)$ optargs="$proc_args(args=optargs)$"$endif$ _defline="$line$">
- $optargs:default_arg();separator="\n"$
- $inputs:vdl_log_input();separator="\n"$
- $outputs:vdl_log_output();separator="\n"$
+
+$name$ := function($if(optargs)$ "$proc_oargs(args=optargs)$"$endif$$if(arguments)$ "$proc_args(args=arguments)$"$endif$) {
+ $inputs:swift_log_input();separator="\n"$
+ $outputs:swift_log_output();separator="\n"$
$if(binding)$
- $vdl_execute(outputs=outputs,inputs=inputs,application=binding.application, name=name,line=line)$
+ $swift_execute(outputs=outputs,inputs=inputs,application=binding.application, name=name,line=line)$
$else$
- $compound(outputs=outputs, inputs=inputs, declarations=declarations, statements=statements, name=name, initWaitCounts=initWaitCounts, cleanups=cleanups)$
+ $compound(outputs=outputs, inputs=inputs, declarations=declarations, statements=statements, name=name, initWaitCounts=initWaitCounts, cleanups=cleanups)$
$endif$
-</element>
-
+}
>>
compound(outputs, inputs, declarations, statements, config, name, initWaitCounts, cleanups) ::= <<
-<unitStart name="$name$" type="COMPOUND" outputs="$outputs:list();separator=","$"/>
+unitStart("COMPOUND", name="$name$", outputs="$outputs:list();separator=","$")
$if(initWaitCounts)$
$initWaitCounts;separator="\n"$
$endif$
@@ -81,11 +80,10 @@
$parallel(statements=statements)$
$endif$
-$outputs:vdl_closedataset();separator="\n"$
+$outputs:swift_closedataset();separator="\n"$
-$cleanups:vdl_cleandataset();separator="\n"$
-<unitEnd name="$name$" type="COMPOUND"/>
-
+$cleanups:swift_cleandataset();separator="\n"$
+unitEnd("COMPOUND", name="$name$")
>>
proc_args(args) ::= <<
@@ -96,116 +94,90 @@
$it.name$
>>
-default_arg() ::= <<
-$if(it.default)$
-<default name="$it.name$">
-$it.default$
-</default>
-$endif$
+proc_oargs(args) ::= <<
+$args:proc_optarg()$
>>
-vdl_execute(outputs,inputs,attributes,application,name,line) ::= <<
-<unitStart name="$name$" line="$line$" type="PROCEDURE" outputs="$outputs:list();separator=","$"/>
-<vdl:execute>
- $attributes$
- <vdl:tr>$application.exec$</vdl:tr>
- $inputs:vdl_stagein();separator="\n"$
- $outputs:vdl_stageout();separator="\n"$
- $vdl_arguments(attributes=application.attributes,arguments=application.arguments, stdin=application.stdin,stdout=application.stdout,stderr=application.stderr)$
-</vdl:execute>
-$outputs:vdl_closedataset();separator="\n"$
-<unitEnd name="$name$" line="$line$" type="PROCEDURE"/>
+proc_optarg() ::= <<
+$it.name$=$if(it.default)$$it.default$$else$null$endif$
>>
-vdl_log_input() ::= <<
-<parameterlog>
-<string>input</string>
-<string>$it.name$</string>
-<vdl:getdatasetprovenanceid var="{$it.name$}" />
-<string>{#thread}</string>
-</parameterlog>
+swift_execute(outputs,inputs,attributes,application,name,line) ::= <<
+unitStart("PROCEDURE", name="$name$", line="$line$", outputs="$outputs:list();separator=","$")
+swift:execute(
+ $attributes$
+ swift:tr("$application.exec$")
+ $inputs:swift_stagein();separator="\n"$
+ $outputs:swift_stageout();separator="\n"$
+ $swift_arguments(attributes=application.attributes,arguments=application.arguments, stdin=application.stdin,stdout=application.stdout,stderr=application.stderr)$
+)
+$outputs:swift_closedataset();separator="\n"$
+unitEnd("PROCEDURE", name="$name$", line="$line$")
>>
-vdl_log_output() ::= <<
-<parameterlog>
-<string>output</string>
-<string>$it.name$</string>
-<vdl:getdatasetprovenanceid var="{$it.name$}" />
-<string>{#thread}</string>
-</parameterlog>
+swift_log_input() ::= <<
+parameterLog("input", "$it.name$", swift:getDatasetProvenanceID($it.name$), currentThread())
>>
-vdl_stagein() ::= <<
-<vdl:stagein var="{$it.name$}"/>
+swift_log_output() ::= <<
+parameterLog("output", "$it.name$", swift:getDatasetProvenanceID($it.name$), currentThread())
>>
-vdl_stageout(outputs) ::= <<
-<vdl:stageout var="{$it.name$}"/>
+swift_stagein() ::= <<
+swift:stageIn($it.name$)
>>
-vdl_closedataset() ::= <<
-<vdl:closedataset var="{$it.name$}"/>
+swift_stageout(outputs) ::= <<
+swift:stageOut($it.name$)
>>
-vdl_cleandataset() ::= <<
-<vdl:cleandataset var="{$it$}"/>
+swift_closedataset() ::= <<
+swift:closeDataset($it.name$)
>>
+swift_cleandataset() ::= <<
+swift:cleanDataset($it$)
+>>
+
list() ::= <<$it.name$>>
-vdl_arguments(attributes,arguments,stdin,stdout,stderr) ::= <<
- $attributes$
-<vdl:arguments>
- $arguments;separator="\n"$
-</vdl:arguments>
+swift_arguments(attributes,arguments,stdin,stdout,stderr) ::= <<
+$attributes$
+swift:arguments(
+ $arguments;separator="\n"$
+)
$if (stdin)$
-<vdl:stdin>
- $stdin$
-</vdl:stdin>
+swift:stdin($stdin$)
$endif$
$if (stdout)$
-<vdl:stdout>
- $stdout$
-</vdl:stdout>
+swift:stdout($stdout$)
$endif$
$if (stderr)$
-<vdl:stderr>
- $stderr$
-</vdl:stderr>
+swift:stderr($stderr$)
$endif$
>>
-vdl_attributes(entries) ::= <<
-<vdl:attributes>
- <map>
- $entries;separator="\n"$
- </map>
-</vdl:attributes>
-
+swift_attributes(entries) ::= <<
+swift:attributes(
+ map(
+ $entries;separator="\n"$
+ )
+)
>>
map_entry(key,value) ::= <<
- <entry>
- <vdl:getfieldvalue>
- $key$
- </vdl:getfieldvalue>
- <vdl:getfieldvalue>
- $value$
- </vdl:getfieldvalue>
- </entry>
+ map:entry(swift:getFieldValue($key$), swift:getFieldValue($value$))
>>
-// use unbuffered parallel here
-// since calls don't use this macro
parallel(statements) ::= <<
$if(rest(statements))$
-<uparallel>
- $statements;separator="\n"$
-</uparallel>
+parallel(
+ $statements;separator="\n"$
+)
$else$
-<sequentialWithID>
- $statements$
-</sequentialWithID>
+sequential(
+ $statements$
+)
$endif$
>>
@@ -214,104 +186,76 @@
// that calls a function in the 'swiftscript' namespace.
function(name, args, datatype, line) ::= <<
-<swiftscript:$name$ _traceline="$line$">
- $if(args)$ $args$ $endif$
-</swiftscript:$name$>
+swiftscript:$name$(
+ $if(args)$ $args$ $endif$
+)
>>
iterate(declarations,statements,cond,var,cleanups,trace,line) ::= <<
-
-<vdl:infinitecountingwhile var="$var$" $if(trace)$ _traceline="$line$"$endif$>
- $sub_comp(declarations=declarations, statements=statements, cleanups=cleanups)$
- <sys:if>
- <vdl:getfieldvalue>$cond$</vdl:getfieldvalue>
- <sys:break/>
- </sys:if>
-
-</vdl:infinitecountingwhile>
+sys:while($var$, 0) {
+ if (vdl:getFieldValue($cond$)) {
+ $sub_comp(declarations=declarations, statements=statements, cleanups=cleanups)$
+ next($var$ + 1)
+ }
+}
>>
foreach(var, in, indexVar, indexVarType, declarations, statements, line, refs, selfClose, cleanups, trace) ::= <<
-<vdl:tparallelFor name="\$"$if(trace)$ _traceline="$line$"$endif$$if(indexVar)$_kvar="$indexVar$"$endif$ _vvar="$var$"$if(selfClose)$ selfClose="true"$endif$$if(refs)$ refs="$refs;separator=" "$"$endif$>
+swift:tParallelFor(\$, _traceline="$line$"$if(indexVar)$, _kvar="$indexVar$"$endif$, _vvar="$var$"$if(selfClose)$, selfClose="true"$endif$$if(refs)$, refs="$refs;separator=" "$"$endif$,
$! The iterator !$
- <getarrayiterator>
- $in$
- </getarrayiterator>
-$! Body !$
- <set names="\$\$, $var$">
- <each items="{\$}"/>
- </set>
+ getArrayIterator($in$)) {
+ (\$\$, $var$) := each(\$)
$if(indexVar)$
- <set name="$indexVar$">
- <vdl:new type="$indexVarType$" value="{\$\$}"/>
- </set>
-
+ $indexVar$ := swift:new($indexVarType$, \$\$)
$endif$
-<unitStart line="$line$" type="FOREACH_IT"/>
+ unitStart("FOREACH_IT", line="$line$")
$declarations;separator="\n"$
$if(statements)$
$parallel(statements=statements)$
- $cleanups:vdl_cleandataset();separator="\n"$
+ $cleanups:swift_cleandataset();separator="\n"$
$endif$
- <unitEnd line="$line$" type="FOREACH_IT"/>
-</vdl:tparallelFor>
+ unitEnd("FOREACH_IT", line="$line$")
+}
>>
-cs() ::= <<
- <set name="swift#cs"><variable>#thread</variable></set>
->>
-
// need to log inputs and outputs at the calling stage here because
// they are not
// $outputs:vdl_log_output();separator="\n"$
callInternal(func, outputs, inputs, line, serialize) ::= <<
-<sequential>
-<unitStart name="$func$" type="INTERNALPROC" outputs="$outputs:list();separator=","$"/>
-$cs()$
-<$func$ _traceline="$line$">
- $if(!serialize)$<parallel>$endif$
- $outputs:callInternal_log_output();separator="\n"$
- $inputs:callInternal_log_input();separator="\n"$
- $if(!serialize)$</parallel>$endif$
-</$func$>
-<unitEnd name="$func$" type="INTERNALPROC"/>
-</sequential>
+sequential(
+ unitStart("INTERNALPROC", name="$func$", outputs="$outputs:list();separator=","$")
+ $func$(
+ $if(!serialize)$parallel($endif$
+ $outputs:callInternal_log_output();separator="\n"$
+ $inputs:callInternal_log_input();separator="\n"$
+ $if(!serialize)$)$endif$
+ )
+ unitEnd("INTERNALPROC", name="$func$")
+)
>>
callInternal_log_input() ::= <<
-<sequential>
- <set name="swift#callInternalValue">$it$</set>
- <parameterlog>
- <string>input</string>
- <string>TODO_name_or_pos</string>
- <vdl:getdatasetprovenanceid var="{swift#callInternalValue}" />
- <string>{swift#cs}</string>
- </parameterlog>
- <variable>swift#callInternalValue</variable>
-</sequential>
+sequential(
+ parameterLog("input", "TODO_name_or_pos", vdl:getDatasetProvenanceID($it$), currentThread())
+ $it$
+)
>>
callInternal_log_output() ::= <<
-<sequential>
- <set name="swift#callInternalValue">$it$</set>
- <parameterlog>
- <string>output</string>
- <string>TODO_name_or_pos</string>
- <vdl:getdatasetprovenanceid var="{swift#callInternalValue}" />
- <string>{swift#cs}</string>
- </parameterlog>
- <variable>swift#callInternalValue</variable>
-</sequential>
+sequential(
+ parameterLog("output", "TODO_name_or_pos", vdl:getDatasetProvenanceID($it$), currentThread())
+ $it$
+)
>>
callUserDefined(func, outputs, inputs, line, serialize, partialClose) ::= <<
-<$func$ _traceline="$line$">
- $if(!serialize)$<parallel>$endif$
+$func$(
+ $if(!serialize)$parallel($endif$
$outputs;separator="\n"$
$inputs;separator="\n"$
- $if(!serialize)$</parallel>$endif$
-</$func$>
+ $if(!serialize)$)$endif$
+)
$if(partialClose)$
$partialClose$
$endif$
@@ -319,159 +263,117 @@
call_arg(bind, expr, datatype) ::= <<
$if(bind)$
-<argument name="$bind$">
- $expr$
-</argument>
+$bind$ = $expr$
$else$
$expr$
$endif$
>>
globalConstant(name, expr, datatype) ::= <<
-<global name="$name$">
- $expr$
-</global>
+$name$ := $expr$
>>
variable(name, type, expr, mapping, nil, file, waitCount, input, datatype, isGlobal, line) ::= <<
-$if(isGlobal)$<global name="$name$">$else$<set name="$name$">$endif$
- $if(mapping)$
- <vdl:new type="$type$" dbgname="$name$"$if(waitCount)$ waitCount="$waitCount$"$endif$ _defline="$line$"$if(input)$ input="true"$endif$>
- $vdl_mapping(mapping=mapping, file=file)$
- </vdl:new>
- $else$
- $if(file)$
- <vdl:new type="$type$" dbgname="$name$"$if(waitCount)$ waitCount="$waitCount$"$endif$ _defline="$line$"$if(input)$ input="true"$endif$>
- $vdl_mapping(mapping=mapping, file=file)$
- </vdl:new>
- $else$
- <vdl:new type="$type$" dbgname="$name$"$if(waitCount)$ waitCount="$waitCount$"$endif$ _defline="$line$"$if(input)$ input="true"$endif$/>
- $endif$
- $endif$
-$if(isGlobal)$</global>$else$</set>$endif$
+$name$ :=
+ $if(mapping)$
+ swift:new("$type$", dbgname="$name$"$if(waitCount)$, waitCount=$waitCount$$endif$, _defline="$line$"$if(input)$, input=true$endif$
+ $swift_mapping(mapping=mapping, file=file)$
+ )
+ $else$
+ $if(file)$
+ swift:new("$type$", dbgname="$name$"$if(waitCount)$, waitCount=$waitCount$$endif$, _defline="$line$"$if(input)$, input=true$endif$
+ $swift_mapping(mapping=mapping, file=file)$
+ )
+ $else$
+ swift:new("$type$", dbgname="$name$"$if(waitCount)$, waitCount=$waitCount$$endif$, _defline="$line$"$if(input)$, input=true$endif$)
+ $endif$
+ $endif$
$variable_log()$
>>
variable_log() ::= <<
- <parameterlog>
- <string>intermediate</string>
- <string>$name$</string>
- <vdl:getdatasetprovenanceid var="{$name$}" />
- <string>{#thread}</string>
- </parameterlog>
+ parameterLog("intermediate", "$name$", swift:getDatasetProvenanceID($name$), currentThread())
>>
-vdl_mapping(mapping, file) ::= <<
+swift_mapping(mapping, file) ::= <<
$if(file)$
-<vdl:mapping descriptor="single_file_mapper">
- <vdl:parameter name="file" value="$file.name$"/>
+swift:mapping("single_file_mapper",
+ swift:parameter("file", "$file.name$")
$if(file.params)$$file.params;separator="\n"$$endif$
-</vdl:mapping>
+)
$else$
-<vdl:mapping descriptor="$mapping.descriptor$">
+swift:mapping("$mapping.descriptor$",
$mapping.params;separator="\n"$
-</vdl:mapping>
+)
$endif$
>>
-vdl_parameter(name,expr) ::= <<
-<vdl:parameter name="$name$">$expr$</vdl:parameter>
+swift_parameter(name,expr) ::= <<
+swift:parameter("$name$", $expr$)
>>
assign(var, value, line, partialClose) ::= <<
- <vdl:setfieldvalue $if(line)$_traceline="$line$"$else$_traceline="-1"$endif$>
+ swift:setFieldValue($if(line)$_traceline="$line$"$else$_traceline="-1"$endif$
$var$
$value$
- </vdl:setfieldvalue>
+ )
$if(partialClose)$
$partialClose$
$endif$
>>
append(array, value, partialClose) ::= <<
- <vdl:appendArray>
+ swift:appendArray(
$array$
$value$
- </vdl:appendArray>
+ )
$if(partialClose)$
$partialClose$
$endif$
>>
callexpr(call, datatype, prefix) ::= <<
-<sequential>
- <set name="swift#callintermediate">
- <vdl:new type="$datatype$" dbgname="swift#callintermediate">
- <vdl:mapping descriptor="concurrent_mapper">
- <vdl:parameter name="prefix">_callintermediate-$prefix$</vdl:parameter>
- </vdl:mapping>
- </vdl:new>
- </set>
+sequential(
+ tmp := swift:new("$datatype$", dbgname="swift#callintermediate",
+ swift:mapping("concurrent_mapper",
+ swift:parameter("prefix", "_callintermediate-$prefix$")
+ )
+ )
$call$
- <variable>swift#callintermediate</variable>
-</sequential>
+ tmp
+)
>>
array(elements, datatype) ::= <<
-<vdl:createarray>
- <list>
- $elements;separator="\n"$
- </list>
-</vdl:createarray>
+swift:createarray(
+ list(
+ $elements;separator="\n"$
+ )
+)
>>
range(from, to, step, datatype) ::= <<
- <sequential>
- <set name="swift#rangeout">
- <vdl:range>
- <argument name="from">$from$</argument>
- <argument name="to">$to$</argument>
-$if(step)$
- <argument name="step">$step$</argument>
-$endif$
- </vdl:range>
- </set>
- $range_log(from=from, to=to, step=step)$
- <variable>swift#rangeout</variable>
- </sequential>
+ sequential(
+ tmp := swift:range($from$, $to$$if(step)$, step = $step$$endif$)
+ tmp
+ $range_log()$
+ )
>>
-range_log(from, to, step) ::= <<
- <if>
- <equals><vdl:configProperty><string>provenance.log</string></vdl:configProperty><string>true</string></equals>
- <then>
- <log level="info">
- <concat>
- <string>ARRAYRANGE thread={#thread} array=</string>
- <vdl:getdatasetprovenanceid var="{swift#rangeout}" />
- <string> from=</string> <vdl:getdatasetprovenanceid>$from$</vdl:getdatasetprovenanceid>
- <string> to=</string> <vdl:getdatasetprovenanceid>$to$</vdl:getdatasetprovenanceid>
- $if(step)$
- <string> step=</string> <vdl:getdatasetprovenanceid>$step$</vdl:getdatasetprovenanceid>
- $else$
- <string> step=none</string>
- $endif$
- </concat>
- </log>
- </then>
- </if>
+range_log() ::= <<
>>
if(condition,vthen,velse,line,trace) ::= <<
-<if $if(trace)$ _traceline="$line$"$endif$>
- <vdl:getfieldvalue>$condition$</vdl:getfieldvalue>
- <then>
- <unitStart type="CONDITION_BLOCK"/>
- $vthen$
- </then>
+if (swift:getFieldValue($condition$)) {
+ unitStart("CONDITION_BLOCK")
+ $vthen$
+}
$if(velse)$
- <else>
- <unitStart type="CONDITION_BLOCK"/>
- $velse$
- </else>
+else {
+ unitStart("CONDITION_BLOCK")
+ $velse$
+}
$endif$
-
-</if>
>>
sub_comp(declarations, statements, cleanups, preClose) ::= <<
@@ -481,158 +383,123 @@
$declarations;separator="\n"$
$if(statements)$
$parallel(statements=statements)$
-$cleanups:vdl_cleandataset();separator="\n"$
+$cleanups:swift_cleandataset();separator="\n"$
$endif$
>>
switch(condition,cases,sdefault) ::= <<
-<sequential>
- <set name="\$_sw">
- $condition$
- </set>
- <if>
+sequential(
+ swc := $condition$
$cases:{case |
- <vdl:getfieldvalue>
- <vdlop:eq>
- <variable>\$_sw</variable>
- $case.value$
- </vdlop:eq>
- </vdl:getfieldvalue>
- <then>
- $sub_comp(declarations=case.declarations, statements=case.statements)$
- </then>
+ if (swift:getFieldValue(swiftop:eq(swc, $case.value$))) {
+ $sub_comp(declarations=case.declarations, statements=case.statements)$
+ }
+ else
}$
$if(sdefault)$
- <else>
+ {
$sdefault$
- </else>
-
+ }
$endif$
- </if>
-</sequential>
+)
>>
// TODO can optimise this like we do with parallel statements so that
// the wrapping layer disappers in the (perhaps common?) case of a
// single layer.
sequential(statements) ::= <<
- <sequential>
- $statements;separator="\n"$
- </sequential>
+ sequential(
+ $statements;separator="\n"$
+ )
>>
partialclose(var, count) ::= <<
-<partialCloseDataset var="{$var$}"$if(count)$ count="$count$"$endif$/>
+partialCloseDataset($var$$if(count)$, count=$count$$endif$)
>>
setWaitCount(name, waitCount) ::= <<
-<setWaitCount var="{$name$}"$if(waitCount)$ count="$waitCount$"$endif$/>
+setWaitCount($name$$if(waitCount)$, count=$waitCount$$endif$)
>>
unitStart(type, outputs) ::= <<
- <unitStart type="$type$" outputs="$outputs$"/>
+ unitStart("$type$", outputs="$outputs$")
>>
unitEnd(type) ::= <<
- <unitEnd type="$type$"/>
+ unitEnd("$type$")
>>
operator ::= [
- "+":"vdlop:sum",
- "-":"vdlop:subtraction",
- "*":"vdlop:product",
- "/":"vdlop:fquotient",
- "%/":"vdlop:iquotient",
- "%%":"vdlop:remainder",
- "<=":"vdlop:le",
- ">=":"vdlop:ge",
- ">":"vdlop:gt",
- "<":"vdlop:lt",
- "<=":"vdlop:le",
- ">=":"vdlop:ge",
- ">":"vdlop:gt",
- "<":"vdlop:lt",
- "==":"vdlop:eq",
- "!=":"vdlop:ne",
- "&&":"vdlop:and",
- "||":"vdlop:or"
+ "+":"swiftop:sum",
+ "-":"swiftop:subtraction",
+ "*":"swiftop:product",
+ "/":"swiftop:fquotient",
+ "%/":"swiftop:iquotient",
+ "%%":"swiftop:remainder",
+ "<=":"swiftop:le",
+ ">=":"swiftop:ge",
+ ">":"swiftop:gt",
+ "<":"swiftop:lt",
+ "<=":"swiftop:le",
+ ">=":"swiftop:ge",
+ ">":"swiftop:gt",
+ "<":"swiftop:lt",
+ "==":"swiftop:eq",
+ "!=":"swiftop:ne",
+ "&&":"swiftop:and",
+ "||":"swiftop:or"
]
unaryNegation(exp, datatype) ::= <<
-<vdlop:product>
- <vdl:new type="int" value="-1" />
- $exp$
-</vdlop:product>
+swiftop:product(swift:new("int", value = -1), $exp$)
>>
binaryop(op,left,right,datatype) ::= <<
-<$operator.(op)$>
- $left$
- $right$
-</$operator.(op)$>
+$operator.(op)$($left$, $right$)
>>
not(exp, datatype) ::= <<
-<vdlop:not>
- $exp$
-</vdlop:not>
+swiftop:not($exp$)
>>
id(var, datatype) ::= <<
-<variable>$var$</variable>
+$var$
>>
extractarrayelement(parent, arraychild, datatype) ::= <<
-<vdl:getfieldsubscript>
- <argument name="var">$parent$</argument>
- <argument name="subscript">$arraychild$</argument>
-</vdl:getfieldsubscript>
+swift:getfieldsubscript($parent$, $arraychild$)
>>
extractstructelement(parent, memberchild, datatype) ::= <<
-<getfield>
- <argument name="var">$parent$</argument>
- <argument name="path">$memberchild$</argument>
-</getfield>
+getfield($parent$, $memberchild$)
>>
slicearray(parent, memberchild, datatype) ::= <<
-<sequential>
- <set name="swift#array">$parent$</set>
- <set name="swift#slice">
- <vdl:slicearray>
- <argument name="var"><variable>swift#array</variable></argument>
- <argument name="path">$memberchild$</argument>
- <argument name="type">$datatype$</argument>
- </vdl:slicearray>
- </set>
+sequential(
+ slice := swift:sliceArray($parent$, $memberchild$, $datatype$)
+ logSliceArray($memberchild$, $parent$)
+ slice
+)
+>>
- <log level="debug"><concat>
- <string>SLICEARRAY thread={#thread} slice=</string>
- <vdl:getdatasetprovenanceid var="{swift#slice}" />
- <string> member=$memberchild$ array=</string>
- <vdl:getdatasetprovenanceid var="{swift#array}" />
- </concat></log>
-
- <variable>swift#slice</variable>
-</sequential>
+logSliceArray(memberchild, array) ::= <<
>>
iConst(value, datatype) ::= <<
-<vdl:new type="int" value="$value$" />
+swift:new("int", value=$value$)
>>
fConst(value, datatype) ::= <<
-<vdl:new type="float" value="$value$" />
+swift:new("float", value=$value$)
>>
bConst(value, datatype) ::= <<
-<vdl:new type="boolean" value="$value$" />
+swift:new("boolean", value=$value$)
>>
sConst(value,innervalue,datatype) ::= <<
-<vdl:new type="string" value="$innervalue$" />
+swift:new("string", value="$value$")
>>
Added: branches/faster/src/org/globus/swift/catalog/site/Parser.java
===================================================================
--- branches/faster/src/org/globus/swift/catalog/site/Parser.java (rev 0)
+++ branches/faster/src/org/globus/swift/catalog/site/Parser.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,55 @@
+//----------------------------------------------------------------------
+//This code is developed as part of the Java CoG Kit project
+//The terms of the license can be found at http://www.cogkit.org/license
+//This message may not be removed or altered.
+//----------------------------------------------------------------------
+
+/*
+ * Created on Jan 7, 2013
+ */
+package org.globus.swift.catalog.site;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+
+import javax.xml.XMLConstants;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.validation.Schema;
+import javax.xml.validation.SchemaFactory;
+import javax.xml.validation.Validator;
+
+import org.w3c.dom.Document;
+import org.xml.sax.SAXException;
+
+public class Parser {
+ public static final String SCHEMA_RESOURCE = "swift-sites-1.0.xsd";
+
+ private File src;
+
+ public Parser(String fileName) {
+ this.src = new File(fileName);
+ }
+
+ public Document parse() throws ParserConfigurationException, SAXException, IOException {
+ URL schemaURL = Parser.class.getClassLoader().getResource(SCHEMA_RESOURCE);
+
+ if (schemaURL == null) {
+ throw new IllegalStateException("Sites schema not found in resources: " + SCHEMA_RESOURCE);
+ }
+
+ SchemaFactory sfactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
+ Schema schema = sfactory.newSchema(schemaURL);
+
+ DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance();
+ dfactory.setSchema(schema);
+ DocumentBuilder dbuilder = dfactory.newDocumentBuilder();
+ Document doc = dbuilder.parse(src);
+
+ return doc;
+ }
+
+}
Modified: branches/faster/src/org/globus/swift/catalog/transformation/File.java
===================================================================
--- branches/faster/src/org/globus/swift/catalog/transformation/File.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/globus/swift/catalog/transformation/File.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1155,6 +1155,8 @@
* @param msg the message to be logged.
*/
protected void logMessage( String msg ) {
- logger.debug(msg);
+ if (logger.isDebugEnabled()) {
+ logger.debug(msg);
+ }
}
}
Modified: branches/faster/src/org/globus/swift/data/Action.java
===================================================================
--- branches/faster/src/org/globus/swift/data/Action.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/globus/swift/data/Action.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,92 +17,103 @@
package org.globus.swift.data;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
-
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
import org.globus.cog.karajan.util.BoundContact;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.functions.FunctionsCollection;
-import org.globus.swift.data.policy.Broadcast;
-import org.globus.swift.data.policy.External;
import org.globus.swift.data.policy.Policy;
/**
* Karajan-accessible CDM functions that change something.
* */
-public class Action extends FunctionsCollection {
- private static final Logger logger =
- Logger.getLogger(Action.class);
-
- public static final Arg PA_SRCFILE =
- new Arg.Positional("srcfile");
- public static final Arg PA_SRCDIR =
- new Arg.Positional("srcdir");
- public static final Arg PA_DESTHOST =
- new Arg.Positional("desthost");
- public static final Arg PA_DESTDIR =
- new Arg.Positional("destdir");
+public class Action {
+ private static final Logger logger = Logger.getLogger(Action.class);
- static {
- setArguments("cdm_broadcast", new Arg[]{ PA_SRCFILE,
- PA_SRCDIR });
- setArguments("cdm_external", new Arg[]{ PA_SRCFILE,
- PA_SRCDIR,
- PA_DESTHOST,
- PA_DESTDIR });
- setArguments("cdm_wait", new Arg[]{});
- }
-
/**
Register a file for broadcast by CDM.
The actual broadcast is triggered by {@link cdm_wait}.
*/
- public void cdm_broadcast(VariableStack stack)
- throws ExecutionException {
- String srcfile = (String) PA_SRCFILE.getValue(stack);
- String srcdir = (String) PA_SRCDIR.getValue(stack);
+ public static class Broadcast extends InternalFunction {
+ private ArgRef<String> srcfile;
+ private ArgRef<String> srcdir;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("srcfile", "srcdir"));
+ }
- logger.debug("cdm_broadcast()");
+ @Override
+ protected void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ String srcfile = this.srcfile.getValue(stack);
+ String srcdir = this.srcdir.getValue(stack);
+
+ logger.debug("cdm_broadcast()");
- Policy policy = Director.lookup(srcfile);
+ Policy policy = Director.lookup(srcfile);
- if (!(policy instanceof Broadcast)) {
- throw new RuntimeException
- ("Attempting to BROADCAST the wrong file: " +
- srcdir + " " + srcfile + " -> " + policy);
+ if (!(policy instanceof org.globus.swift.data.policy.Broadcast)) {
+ throw new RuntimeException("Attempting to BROADCAST the wrong file: " +
+ srcdir + " " + srcfile + " -> " + policy);
+ }
+
+ if (srcdir == "") {
+ srcdir = ".";
+ }
+
+ Director.addBroadcast(srcdir, srcfile);
}
+ }
+
+ public static class External extends InternalFunction {
+ private ArgRef<String> srcfile;
+ private ArgRef<String> srcdir;
+ private ArgRef<BoundContact> desthost;
+ private ArgRef<String> destdir;
- if (srcdir == "") {
- srcdir = ".";
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("srcfile", "srcdir", "desthost", "destdir"));
}
- Director.addBroadcast(srcdir, srcfile);
- }
-
- public void cdm_external(VariableStack stack)
- throws ExecutionException
- {
- String srcfile = (String) PA_SRCFILE.getValue(stack);
- String srcdir = (String) PA_SRCDIR.getValue(stack);
- BoundContact bc = (BoundContact) PA_DESTHOST.getValue(stack);
- String destdir = (String) PA_DESTDIR.getValue(stack);
+ @Override
+ protected void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ String srcfile = this.srcfile.getValue(stack);
+ String srcdir = this.srcdir.getValue(stack);
+ BoundContact bc = this.desthost.getValue(stack);
+ String destdir = this.destdir.getValue(stack);
- if (srcdir.length() == 0)
- srcdir = ".";
- String desthost = bc.getHost();
- String workdir = (String) bc.getProperty("workdir");
-
- External.doExternal(srcfile, srcdir,
- desthost, workdir+"/"+destdir);
+ if (srcdir.length() == 0) {
+ srcdir = ".";
+ }
+ String desthost = bc.getHost();
+ String workdir = (String) bc.getProperty("workdir");
+
+ org.globus.swift.data.policy.External.doExternal(srcfile, srcdir,
+ desthost, workdir + "/" + destdir);
+ }
}
/**
Wait until CDM has ensured that all data has been propagated.
*/
- public void cdm_wait(VariableStack stack)
- throws ExecutionException {
- logger.debug("cdm_wait()");
- Director.doBroadcast();
+ public static class Wait extends InternalFunction {
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params());
+ }
+
+ @Override
+ protected void runBody(LWThread thr) {
+ // TODO busy waiting is not good
+ logger.debug("cdm_wait()");
+ Director.doBroadcast();
+ }
}
}
Modified: branches/faster/src/org/globus/swift/data/Query.java
===================================================================
--- branches/faster/src/org/globus/swift/data/Query.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/globus/swift/data/Query.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,57 +17,75 @@
package org.globus.swift.data;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.functions.FunctionsCollection;
-
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Param;
+import org.globus.cog.karajan.compiled.nodes.functions.AbstractSingleValuedFunction;
+import org.globus.cog.karajan.compiled.nodes.functions.NullaryOp;
import org.globus.swift.data.policy.Policy;
/**
Karajan-accessible read-queries to CDM functionality.
*/
-public class Query extends FunctionsCollection {
+public class Query {
private static final Logger logger = Logger.getLogger(Query.class);
- public static final Arg PA_QUERY = new Arg.Positional("query");
- public static final Arg PA_NAME = new Arg.Positional("name");
-
- static {
- setArguments("cdm_query", new Arg[]{ PA_QUERY });
- setArguments("cdm_get", new Arg[]{ PA_NAME });
- setArguments("cdm_file", new Arg[]{});
- }
-
/**
Do CDM policy lookup based on the CDM file.
*/
- public String cdm_query(VariableStack stack) throws ExecutionException {
- String file = (String) PA_QUERY.getValue(stack);
- Policy policy = Director.lookup(file);
- logger.debug("Director.lookup(): " + file + " -> " + policy);
- return policy.toString();
+ public static class Q extends AbstractSingleValuedFunction {
+ private ArgRef<String> query;
+
+ @Override
+ protected Param[] getParams() {
+ return params("query");
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ String file = query.getValue(stack);
+ Policy policy = Director.lookup(file);
+ if (logger.isDebugEnabled()) {
+ logger.debug("Director.lookup(): " + file + " -> " + policy);
+ }
+ return policy.toString();
+ }
}
/**
Get a CDM property
*/
- public String cdm_get(VariableStack stack) throws ExecutionException {
- String name = (String) PA_NAME.getValue(stack);
- String value = Director.property(name);
- return value;
+ public static class Get extends AbstractSingleValuedFunction {
+ private ArgRef<String> name;
+
+ @Override
+ protected Param[] getParams() {
+ return params("name");
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ String name = this.name.getValue(stack);
+ String value = Director.property(name);
+ return value;
+ }
}
+
/**
Obtain the CDM policy file given on the command-line,
conventionally "fs.data". If not set, returns an empty String.
*/
- public String cdm_file(VariableStack stack) throws ExecutionException {
- String file = "";
- if (Director.policyFile != null)
- file = Director.policyFile.toString();
- return file;
+ public static class File extends NullaryOp<String> {
+ @Override
+ protected String value() {
+ String file = "";
+ if (Director.policyFile != null) {
+ file = Director.policyFile.toString();
+ }
+ return file;
+ }
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/ArrayIndexFutureList.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/ArrayIndexFutureList.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/ArrayIndexFutureList.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -27,22 +27,19 @@
import java.util.Map;
import java.util.Set;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.events.EventBus;
-import org.globus.cog.karajan.workflow.events.EventTargetPair;
-import org.globus.cog.karajan.workflow.futures.FutureEvaluationException;
-import org.globus.cog.karajan.workflow.futures.FutureIterator;
-import org.globus.cog.karajan.workflow.futures.FutureList;
-import org.globus.cog.karajan.workflow.futures.FutureListener;
-import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable;
-import org.globus.cog.karajan.workflow.futures.ListenerStackPair;
+import k.rt.FutureListener;
+
+import org.globus.cog.karajan.futures.FutureEvaluationException;
+import org.globus.cog.karajan.futures.FutureIterator;
+import org.globus.cog.karajan.futures.FutureList;
+import org.globus.cog.karajan.futures.FutureNotYetAvailable;
import org.griphyn.vdl.mapping.ArrayDataNode;
import org.griphyn.vdl.mapping.DSHandle;
public class ArrayIndexFutureList implements FutureList, FutureWrapper {
private ArrayList<Object> keys;
private Map<?, ?> values;
- private List<ListenerStackPair> listeners;
+ private LinkedList<FutureListener> listeners;
private ArrayDataNode node;
private boolean purged;
@@ -86,10 +83,6 @@
return new FuturePairIterator(this);
}
- public FutureIterator futureIterator(VariableStack stack) {
- return new FuturePairIterator(this, stack);
- }
-
public void close() {
throw new UnsupportedOperationException("Not used here");
}
@@ -122,52 +115,38 @@
return node;
}
- public void addModificationAction(FutureListener target, VariableStack stack) {
- synchronized(node) {
+ @Override
+ public void addListener(FutureListener l) {
+ boolean closed;
+ synchronized(this) {
if (listeners == null) {
- listeners = new LinkedList<ListenerStackPair>();
+ listeners = new LinkedList<FutureListener>();
}
- listeners.add(new ListenerStackPair(target, stack));
- WaitingThreadsMonitor.addThread(stack, node);
- if (!node.isClosed()) {
- return;
- }
+ listeners.add(l);
+ closed = isClosed();
}
- // closed == true;
- notifyListeners();
+ if (closed) {
+ notifyListeners();
+ }
}
-
+
public void notifyListeners() {
- List<ListenerStackPair> l;
- synchronized(node) {
+ List<FutureListener> ls;
+ synchronized(this) {
if (listeners == null) {
return;
}
-
- l = listeners;
+ ls = listeners;
listeners = null;
}
-
- for (final ListenerStackPair lsp : l) {
- WaitingThreadsMonitor.removeThread(lsp.stack);
- EventBus.post(new Runnable() {
- @Override
- public void run() {
- lsp.listener.futureModified(ArrayIndexFutureList.this, lsp.stack);
- }
- });
+ for (FutureListener l : ls) {
+ l.futureUpdated(this);
}
}
-
- public EventTargetPair[] getListenerEvents() {
- synchronized(node) {
- if (listeners != null) {
- return listeners.toArray(new EventTargetPair[0]);
- }
- else {
- return null;
- }
- }
+
+ @Override
+ public synchronized List<FutureListener> getListeners() {
+ return new LinkedList<FutureListener>(listeners);
}
public int size() {
Modified: branches/faster/src/org/griphyn/vdl/karajan/AssertFailedException.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/AssertFailedException.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/AssertFailedException.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -18,7 +18,7 @@
package org.griphyn.vdl.karajan;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
/**
* Generated only by SwiftScript @assert().
Modified: branches/faster/src/org/griphyn/vdl/karajan/DSHandleFutureWrapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/DSHandleFutureWrapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/DSHandleFutureWrapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -23,71 +23,20 @@
import java.util.LinkedList;
import java.util.List;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.events.EventBus;
-import org.globus.cog.karajan.workflow.events.EventTargetPair;
-import org.globus.cog.karajan.workflow.futures.FutureEvaluationException;
-import org.globus.cog.karajan.workflow.futures.FutureListener;
-import org.globus.cog.karajan.workflow.futures.ListenerStackPair;
+import k.rt.FutureListener;
+
+import org.globus.cog.karajan.futures.FutureEvaluationException;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
public class DSHandleFutureWrapper implements FutureWrapper {
- private LinkedList<ListenerStackPair> listeners;
- private final AbstractDataNode node;
+ private LinkedList<FutureListener> listeners;
+ private AbstractDataNode node;
public DSHandleFutureWrapper(AbstractDataNode node) {
this.node = node;
}
- public void addModificationAction(FutureListener target, VariableStack stack) {
- /**
- * TODO So, the strategy is the following: getValue() or something else
- * throws a future exception; then some entity catches that and calls
- * this method. There is no way to ensure that the future was not closed
- * in the mean time. What has to be done is that this method should
- * check if the future was closed or modified at the time of the call of
- * this method and call notifyListeners().
- */
- synchronized(node) {
- if (listeners == null) {
- listeners = new LinkedList<ListenerStackPair>();
- }
- listeners.add(new ListenerStackPair(target, stack));
- WaitingThreadsMonitor.addThread(stack, node);
- if (!node.isClosed()) {
- return;
- }
- }
- // closed == true;
- notifyListeners();
- }
-
- public DSHandle getHandle() {
- return node;
- }
-
- public void notifyListeners() {
- List<ListenerStackPair> l;
- synchronized(node) {
- if (listeners == null) {
- return;
- }
-
- l = listeners;
- listeners = null;
- }
-
- for (final ListenerStackPair lsp : l) {
- WaitingThreadsMonitor.removeThread(lsp.stack);
- EventBus.post(new Runnable() {
- public void run() {
- lsp.listener.futureModified(DSHandleFutureWrapper.this, lsp.stack);
- }
- });
- }
- }
-
public void close() {
node.closeShallow();
}
@@ -108,6 +57,37 @@
node.setValue(e);
}
+ @Override
+ public void addListener(FutureListener l) {
+ boolean closed;
+ synchronized(this) {
+ if (listeners == null) {
+ listeners = new LinkedList<FutureListener>();
+ }
+ WaitingThreadsMonitor.addThread(l, node);
+ listeners.add(l);
+ closed = isClosed();
+ }
+ if (closed) {
+ notifyListeners();
+ }
+ }
+
+ public void notifyListeners() {
+ List<FutureListener> ls;
+ synchronized(this) {
+ if (listeners == null) {
+ return;
+ }
+ ls = listeners;
+ listeners = null;
+ }
+ for (FutureListener l : ls) {
+ WaitingThreadsMonitor.removeThread(l);
+ l.futureUpdated(this);
+ }
+ }
+
public int listenerCount() {
synchronized(node) {
if (listeners == null) {
@@ -119,20 +99,17 @@
}
}
- private static final EventTargetPair[] EVENT_ARRAY = new EventTargetPair[0];
+ @Override
+ public synchronized List<FutureListener> getListeners() {
+ return new LinkedList<FutureListener>(listeners);
+ }
- public EventTargetPair[] getListenerEvents() {
- synchronized(node) {
- if (listeners != null) {
- return listeners.toArray(EVENT_ARRAY);
- }
- else {
- return null;
- }
- }
- }
-
- public String toString() {
+ public String toString() {
return "F/" + node;
}
+
+ @Override
+ public DSHandle getHandle() {
+ return node;
+ }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/FuturePairIterator.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/FuturePairIterator.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/FuturePairIterator.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,13 +20,14 @@
*/
package org.griphyn.vdl.karajan;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.futures.FutureEvaluationException;
-import org.globus.cog.karajan.workflow.futures.FutureIterator;
-import org.globus.cog.karajan.workflow.futures.FutureIteratorIncomplete;
-import org.globus.cog.karajan.workflow.futures.FutureListener;
-import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable;
+import k.rt.FutureListener;
+import k.thr.LWThread;
+import org.globus.cog.karajan.futures.FutureEvaluationException;
+import org.globus.cog.karajan.futures.FutureIterator;
+import org.globus.cog.karajan.futures.FutureIteratorIncomplete;
+import org.globus.cog.karajan.futures.FutureNotYetAvailable;
+
public class FuturePairIterator implements FutureIterator {
private ArrayIndexFutureList array;
private int crt;
@@ -35,10 +36,6 @@
this.array = array;
}
- public FuturePairIterator(ArrayIndexFutureList array, VariableStack stack) {
- this.array = array;
- }
-
public synchronized boolean hasAvailable() {
return crt < array.available();
}
@@ -108,11 +105,15 @@
public Object getValue() {
return this;
}
+
+
- public synchronized void addModificationAction(FutureListener target, VariableStack stack) {
- WaitingThreadsMonitor.addThread(stack, array.getHandle());
- array.addModificationAction(target, stack);
- }
+ @Override
+ public void addListener(FutureListener l) {
+ WaitingThreadsMonitor.addThread(LWThread.currentThread(), array.getHandle());
+ array.addListener(l);
+
+ }
private static volatile int cnt = 0;
Modified: branches/faster/src/org/griphyn/vdl/karajan/FutureTracker.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/FutureTracker.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/FutureTracker.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,16 +20,12 @@
import java.util.HashMap;
import java.util.Map;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.futures.Future;
+import k.rt.Future;
+
import org.griphyn.vdl.mapping.DSHandle;
public class FutureTracker {
public static final String VAR_NAME = "#swift:futureTracker";
-
- public static FutureTracker get(VariableStack stack) {
- return (FutureTracker) stack.firstFrame().getVar(VAR_NAME);
- }
private static final FutureTracker ft = new FutureTracker();
Modified: branches/faster/src/org/griphyn/vdl/karajan/FutureWrapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/FutureWrapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/FutureWrapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,16 +17,19 @@
package org.griphyn.vdl.karajan;
-import org.globus.cog.karajan.workflow.events.EventTargetPair;
-import org.globus.cog.karajan.workflow.futures.Future;
+import java.util.List;
+
+import k.rt.Future;
+import k.rt.FutureListener;
+
import org.griphyn.vdl.mapping.DSHandle;
public interface FutureWrapper extends Future {
- void notifyListeners();
-
int listenerCount();
-
- EventTargetPair[] getListenerEvents();
+ List<FutureListener> getListeners();
+
+ void notifyListeners();
+
DSHandle getHandle();
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/HangChecker.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/HangChecker.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/HangChecker.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -31,27 +31,29 @@
import java.util.Timer;
import java.util.TimerTask;
+import k.rt.Context;
+import k.rt.ExecutionException;
+import k.thr.LWThread;
+import k.thr.Scheduler;
+
import org.apache.log4j.Logger;
+import org.globus.cog.karajan.analyzer.VariableNotFoundException;
+import org.globus.cog.karajan.compiled.nodes.grid.SchedulerNode;
import org.globus.cog.karajan.scheduler.WeightedHostScoreScheduler;
-import org.globus.cog.karajan.stack.VariableNotFoundException;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.ThreadingContext;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.events.EventBus;
-import org.globus.cog.karajan.workflow.nodes.grid.SchedulerNode;
import org.griphyn.vdl.mapping.DSHandle;
+import com.sun.org.apache.xpath.internal.VariableStack;
+
public class HangChecker extends TimerTask {
public static final Logger logger = Logger.getLogger(HangChecker.class);
- public static final int CHECK_INTERVAL = 10000;
+ public static final int CHECK_INTERVAL = 1000;
public static final int MAX_CYCLES = 10;
private Timer timer;
- private long lastEventCount;
- private VariableStack stack;
+ private Context context;
- public HangChecker(VariableStack stack) throws ExecutionException {
- this.stack = stack;
+ public HangChecker(Context context) throws ExecutionException {
+ this.context = context;
}
public void start() {
@@ -61,11 +63,11 @@
public void run() {
try {
- WeightedHostScoreScheduler s = (WeightedHostScoreScheduler) stack.getGlobal(SchedulerNode.SCHEDULER);
+ WeightedHostScoreScheduler s = (WeightedHostScoreScheduler) context.getAttribute(SchedulerNode.CONTEXT_ATTR_NAME);
if (s != null) {
int running = s.getRunning();
boolean allOverloaded = s.allOverloaded();
- if (running == 0 && EventBus.eventCount == lastEventCount && !allOverloaded) {
+ if (running == 0 && !Scheduler.getScheduler().isAnythingRunning() && !allOverloaded) {
logger.warn("No events in " + (CHECK_INTERVAL / 1000) + "s.");
ByteArrayOutputStream os = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(os);
@@ -84,7 +86,6 @@
ps.close();
}
}
- lastEventCount = EventBus.eventCount;
}
catch (Exception e) {
logger.warn("Exception caught during hang check", e);
@@ -92,10 +93,10 @@
}
private void findThreadsToBlame(PrintStream ps, Graph g) {
- Map<VariableStack, DSHandle> wt = WaitingThreadsMonitor.getAllThreads();
- Set<VariableStack> sl = g.nodeSet();
- Set<VariableStack> loners = new HashSet<VariableStack>(wt.keySet());
- for (VariableStack s : sl) {
+ Map<LWThread, DSHandle> wt = WaitingThreadsMonitor.getAllThreads();
+ Set<LWThread> sl = g.nodeSet();
+ Set<LWThread> loners = new HashSet<LWThread>(wt.keySet());
+ for (LWThread s : sl) {
for (Graph.Edge e : g.getEdgesFrom(s)) {
loners.remove(e.to);
}
@@ -103,7 +104,7 @@
if (!loners.isEmpty()) {
ps.println();
ps.println("The following threads are independently hung:");
- for (VariableStack s : loners) {
+ for (LWThread s : loners) {
Monitor.dumpThread(ps, s, wt.get(s));
ps.println();
}
@@ -112,14 +113,14 @@
}
private Graph buildGraph() throws VariableNotFoundException {
- Map<VariableStack, List<DSHandle>> ot = WaitingThreadsMonitor.getOutputs();
- Map<VariableStack, DSHandle> wt = WaitingThreadsMonitor.getAllThreads();
- Map<DSHandle, List<VariableStack>> rwt = new HashMap<DSHandle, List<VariableStack>>();
+ Map<LWThread, List<DSHandle>> ot = WaitingThreadsMonitor.getOutputs();
+ Map<LWThread, DSHandle> wt = WaitingThreadsMonitor.getAllThreads();
+ Map<DSHandle, List<LWThread>> rwt = new HashMap<DSHandle, List<LWThread>>();
- for (Map.Entry<VariableStack, DSHandle> e : wt.entrySet()) {
- List<VariableStack> l = rwt.get(e.getValue());
+ for (Map.Entry<LWThread, DSHandle> e : wt.entrySet()) {
+ List<LWThread> l = rwt.get(e.getValue());
if (l == null) {
- l = new LinkedList<VariableStack>();
+ l = new LinkedList<LWThread>();
rwt.put(e.getValue(), l);
}
l.add(e.getKey());
@@ -128,21 +129,20 @@
Graph g = new Graph();
// if n1 -> n2, then n1 produces an output that is used by n2
- for (Map.Entry<VariableStack, List<DSHandle>> e : ot.entrySet()) {
+ for (Map.Entry<LWThread, List<DSHandle>> e : ot.entrySet()) {
for (DSHandle h : e.getValue()) {
- List<VariableStack> sl = rwt.get(h);
+ List<LWThread> sl = rwt.get(h);
if (sl != null) {
- for (VariableStack s : sl) {
+ for (LWThread s : sl) {
g.addEdge(e.getKey(), s, h);
}
}
}
- ThreadingContext tc;
- tc = ThreadingContext.get(e.getKey());
- for (VariableStack stk : ot.keySet()) {
- if (tc.isStrictlySubContext(ThreadingContext.get(stk))) {
+ LWThread tc = e.getKey();
+ for (LWThread stk : ot.keySet()) {
+ if (isStrictlyChildOf(tc, stk)) {
g.addEdge(e.getKey(), stk, null);
}
}
@@ -151,14 +151,28 @@
return g;
}
+ private boolean isStrictlyChildOf(LWThread child, LWThread parent) {
+ if (child == parent) {
+ return false;
+ }
+ child = child.getParent();
+ while (child != null) {
+ if (child == parent) {
+ return true;
+ }
+ child = child.getParent();
+ }
+ return false;
+ }
+
private static boolean findCycles(PrintStream ps, Graph g) {
System.out.print("Finding dependency loops...");
System.out.flush();
- Set<VariableStack> seen = new HashSet<VariableStack>();
+ Set<LWThread> seen = new HashSet<LWThread>();
LinkedList<Object> cycle = new LinkedList<Object>();
List<LinkedList<Object>> cycles = new ArrayList<LinkedList<Object>>();
- for (VariableStack t : g.nodeSet()) {
+ for (LWThread t : g.nodeSet()) {
seen.clear();
cycle.clear();
findLoop(t, g, seen, cycle, cycles);
@@ -195,7 +209,7 @@
else {
ps.println("\tthe above must complete before the block below can complete:");
}
- for (String t : Monitor.getSwiftTrace((VariableStack) o)) {
+ for (String t : Monitor.getSwiftTrace((LWThread) o)) {
ps.println("\t\t" + t);
}
}
@@ -258,8 +272,8 @@
if (a == null || b == null) {
return a == b;
}
- VariableStack sa = (VariableStack) a;
- VariableStack sb = (VariableStack) b;
+ LWThread sa = (LWThread) a;
+ LWThread sb = (LWThread) b;
List<Object> ta = Monitor.getSwiftTraceElements(sa);
List<Object> tb = Monitor.getSwiftTraceElements(sb);
@@ -276,7 +290,7 @@
return true;
}
- private static void findLoop(VariableStack t, Graph g, Set<VariableStack> seen, LinkedList<Object> cycle, List<LinkedList<Object>> cycles) {
+ private static void findLoop(LWThread t, Graph g, Set<LWThread> seen, LinkedList<Object> cycle, List<LinkedList<Object>> cycles) {
if (cycles.size() > MAX_CYCLES) {
return;
}
@@ -310,18 +324,18 @@
public static class Graph {
public static class Edge {
- public final VariableStack to;
+ public final LWThread to;
public final DSHandle contents;
- public Edge(VariableStack to, DSHandle contents) {
+ public Edge(LWThread to, DSHandle contents) {
this.to = to;
this.contents = contents;
}
}
- private Map<VariableStack, List<Edge>> outEdges = new HashMap<VariableStack, List<Edge>>();
+ private Map<LWThread, List<Edge>> outEdges = new HashMap<LWThread, List<Edge>>();
- public void addEdge(VariableStack from, VariableStack to, DSHandle contents) {
+ public void addEdge(LWThread from, LWThread to, DSHandle contents) {
List<Edge> l = outEdges.get(from);
if (l == null) {
l = new ArrayList<Edge>();
@@ -331,7 +345,7 @@
}
public void dump(PrintStream ps) {
- for (Map.Entry<VariableStack, List<Edge>> e : outEdges.entrySet()) {
+ for (Map.Entry<LWThread, List<Edge>> e : outEdges.entrySet()) {
for (Edge edge : e.getValue()) {
String tcf = getThreadingContext(e.getKey());
String tct = getThreadingContext(edge.to);
@@ -340,16 +354,16 @@
}
}
- private String getThreadingContext(VariableStack s) {
+ private String getThreadingContext(LWThread s) {
try {
- return String.valueOf(ThreadingContext.get(s));
+ return String.valueOf(s);
}
catch (VariableNotFoundException e) {
return "?";
}
}
- public List<Edge> getEdgesFrom(VariableStack t) {
+ public List<Edge> getEdgesFrom(LWThread t) {
List<Edge> l = outEdges.get(t);
if (l == null) {
return Collections.emptyList();
@@ -359,7 +373,7 @@
}
}
- public Set<VariableStack> nodeSet() {
+ public Set<LWThread> nodeSet() {
return outEdges.keySet();
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/Loader.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/Loader.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/Loader.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -39,25 +39,25 @@
import java.util.List;
import java.util.Map;
+import k.rt.Context;
+
import org.apache.log4j.Appender;
+import org.apache.log4j.AsyncAppender;
import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
+import org.globus.cog.abstraction.impl.execution.fake.JobSubmissionTaskHandler;
+import org.globus.cog.karajan.compiled.nodes.Main;
+import org.globus.cog.karajan.compiled.nodes.grid.AbstractGridNode;
+import org.globus.cog.karajan.parser.WrapperNode;
import org.globus.cog.karajan.scheduler.WeightedHostScoreScheduler;
-import org.globus.cog.karajan.stack.LinkedStack;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.Monitor;
-import org.globus.cog.karajan.workflow.ElementTree;
-import org.globus.cog.karajan.workflow.PrintStreamChannel;
-import org.globus.cog.karajan.workflow.nodes.FlowElement;
-import org.globus.cog.karajan.workflow.nodes.grid.AbstractGridNode;
+import org.globus.cog.karajan.util.KarajanProperties;
import org.globus.cog.util.ArgumentParser;
import org.globus.cog.util.ArgumentParserException;
import org.globus.cog.util.TextFileLoader;
import org.globus.swift.data.Director;
import org.griphyn.vdl.engine.Karajan;
-import org.griphyn.vdl.karajan.functions.ConfigProperty;
import org.griphyn.vdl.karajan.lib.Execute;
import org.griphyn.vdl.karajan.lib.Log;
import org.griphyn.vdl.karajan.lib.New;
@@ -75,7 +75,6 @@
public static final String ARG_HELP = "help";
public static final String ARG_VERSION = "version";
- public static final String ARG_MONITOR = "monitor";
public static final String ARG_RESUME = "resume";
public static final String ARG_INSTANCE_CONFIG = "config";
public static final String ARG_TYPECHECK = "typecheck";
@@ -114,9 +113,6 @@
version();
System.exit(0);
}
- if (ap.isPresent(ARG_MONITOR)) {
- new Monitor().start();
- }
if (!ap.hasValue(ArgumentParser.DEFAULT)) {
error("No SwiftScript program specified");
}
@@ -177,7 +173,7 @@
System.exit(3);
}
}
- ElementTree tree = null;
+ WrapperNode tree = null;
if (project != null) {
tree = load(project);
}
@@ -186,50 +182,48 @@
shortUsage();
System.exit(1);
}
+
+ tree.setProperty("name", projectName + "-" + runID);
+ tree.setProperty(WrapperNode.FILENAME, project);
- tree.setName(projectName + "-" + runID);
- tree.getRoot().setProperty(FlowElement.FILENAME, project);
+ VDL2Config config = loadConfig(ap);
+ Context context = new Context();
+ context.setArguments(ap.getArguments());
+ context.setAttribute("SWIFT:CONFIG", config);
+ context.setAttribute("projectName", projectName);
+ context.setAttribute("SWIFT:SCRIPT_NAME", projectName);
+ context.setAttribute("SWIFT:RUN_ID", runID);
+ context.setAttribute("SWIFT:DRY_RUN", ap.isPresent(ARG_DRYRUN));
+ context.setAttribute("SWIFT:HOME", System.getProperty("swift.home"));
+
+ Main root = compileKarajan(tree, context);
+ root.setFileName(projectName);
- VDL2ExecutionContext ec = new VDL2ExecutionContext(tree,
- projectName);
- ec.setRunID(runID);
- // no order
- ec.setStdout(new PrintStreamChannel(System.out, true));
-
- VariableStack stack = new LinkedStack(ec);
- VDL2Config config = loadConfig(ap, stack);
+ SwiftExecutor ec = new SwiftExecutor(root);
+
+ List<String> arguments = ap.getArguments();
+ if (ap.hasValue(ARG_RESUME)) {
+ arguments.add("-rlog:resume=" + ap.getStringValue(ARG_RESUME));
+ }
+
addCommandLineProperties(config, ap);
if (logger.isDebugEnabled()) {
logger.debug(config);
}
debugSitesText(config);
-
- if (ap.isPresent(ARG_DRYRUN)) {
- stack.setGlobal(CONST_VDL_OPERATION, VDL_OPERATION_DRYRUN);
- }
- else if (ap.isPresent(ARG_TYPECHECK)) {
- stack.setGlobal(CONST_VDL_OPERATION, VDL_OPERATION_TYPECHECK);
- }
- else {
- stack.setGlobal(CONST_VDL_OPERATION, VDL_OPERATION_RUN);
- }
- stack.setGlobal("swift.home", System.getProperty("swift.home"));
- stack.setGlobal("PATH_SEPARATOR", File.separator);
- List<String> arguments = ap.getArguments();
- if (ap.hasValue(ARG_RESUME)) {
- arguments.add("-rlog:resume=" + ap.getStringValue(ARG_RESUME));
- }
- ec.setArguments(arguments);
- // long start = System.currentTimeMillis();
- new HangChecker(stack).start();
- ec.start(stack);
+ new HangChecker(context).start();
+ long start = System.currentTimeMillis();
+ ec.start(context);
ec.waitFor();
+ long end = System.currentTimeMillis();
+ System.out.println(JobSubmissionTaskHandler.jobsRun + " jobs, " + JobSubmissionTaskHandler.jobsRun * 1000 / (end - start) + " j/s");
if (ec.isFailed()) {
runerror = true;
}
}
catch (Exception e) {
+ e.printStackTrace();
logger.debug("Detailed exception:", e);
error("Could not start execution" + getMessages(e));
}
@@ -248,17 +242,9 @@
private static String getMessages(Throwable e) {
StringBuilder sb = new StringBuilder();
- String lastMessage = null;
while (e != null) {
- String msg = e.getMessage();
- if (msg == null) {
- msg = e.toString();
- }
- sb.append("\n\t");
- if (lastMessage == null || !lastMessage.contains(msg)) {
- sb.append(msg);
- lastMessage = msg;
- }
+ sb.append(":\n\t");
+ sb.append(e.toString());
e = e.getCause();
}
return sb.toString();
@@ -274,7 +260,7 @@
cdmString = ap.getStringValue(ARG_CDMFILE);
File cdmFile = new File(cdmString);
debugText("CDM FILE", cdmFile);
- Director.load(cdmFile);
+ Director.load(cdmFile);
}
catch (IOException e) {
logger.debug("Detailed exception:", e);
@@ -283,6 +269,12 @@
}
}
+ private static Main compileKarajan(WrapperNode n, Context context)
+ throws org.globus.cog.karajan.analyzer.CompilationException {
+ return (Main) n.compile(null, new SwiftRootScope(KarajanProperties.getDefault(),
+ (String) n.getProperty(WrapperNode.FILENAME), context));
+ }
+
public static String compile(String project) throws FileNotFoundException,
ParsingException, IncorrectInvocationException,
CompilationException, IOException {
@@ -295,7 +287,7 @@
File swiftscript = new File(project);
debugText("SWIFTSCRIPT", swiftscript);
String projectBase = project.substring(0, project.lastIndexOf('.'));
- File xml = new File(projectBase + ".swiftx");
+ File xml = new File(projectBase + ".xml");
File kml = new File(projectBase + ".kml");
loadBuildVersion(provenanceEnabled);
@@ -344,6 +336,7 @@
if (recompile) {
VDLt2VDLx.compile(new FileInputStream(swiftscript),
new PrintStream(new FileOutputStream(xml)));
+
try {
FileOutputStream f = new FileOutputStream(kml);
Karajan.compile(xml.getAbsolutePath(), new PrintStream(f), provenanceEnabled);
@@ -364,12 +357,9 @@
// if we leave a kml file around, then a subsequent
// re-run will skip recompiling and cause a different
// error message for the user
- if (e instanceof NullPointerException) {
- e.printStackTrace();
- }
kml.delete();
throw new CompilationException(
- "Failed to convert .swiftx to .kml for " + project, e);
+ "Failed to convert .xml to .kml for " + project, e);
}
}
else {
@@ -425,18 +415,15 @@
}
}
- private static VDL2Config loadConfig(ArgumentParser ap, VariableStack stack)
- throws IOException {
+ private static VDL2Config loadConfig(ArgumentParser ap) throws IOException {
VDL2Config conf;
if (ap.hasValue(ARG_INSTANCE_CONFIG)) {
String configFile = ap.getStringValue(ARG_INSTANCE_CONFIG);
- stack.setGlobal(ConfigProperty.INSTANCE_CONFIG_FILE, configFile);
conf = VDL2Config.getConfig(configFile);
}
else {
conf = (VDL2Config) VDL2Config.getConfig().clone();
}
- stack.setGlobal(ConfigProperty.INSTANCE_CONFIG, conf);
return conf;
}
@@ -554,6 +541,10 @@
File f = new File(logfile);
FileAppender fa = (FileAppender) getAppender(FileAppender.class);
+ AsyncAppender aa = new AsyncAppender();
+ aa.addAppender(fa);
+
+ replaceAppender(fa, aa);
if (fa == null) {
logger.warn("Failed to configure log file name");
}
@@ -583,12 +574,13 @@
Logger.getLogger(Log.class).setLevel(Level.DEBUG);
Logger.getLogger(AbstractGridNode.class).setLevel(Level.DEBUG);
Logger.getLogger(Execute.class).setLevel(Level.DEBUG);
- Logger.getLogger(VDL2ExecutionContext.class).setLevel(Level.INFO);
+ Logger.getLogger(SwiftExecutor.class).setLevel(Level.INFO);
Logger.getLogger(WeightedHostScoreScheduler.class).setLevel(
Level.INFO);
ca.setThreshold(Level.FATAL);
}
else if (ap.isPresent(ARG_MINIMAL_LOGGING)) {
+ Logger.getLogger("swift").setLevel(Level.WARN);
Logger.getRootLogger().setLevel(Level.WARN);
}
else if (ap.isPresent(ARG_REDUCED_LOGGING)) {
@@ -599,6 +591,12 @@
}
+ private static void replaceAppender(FileAppender fa, AsyncAppender aa) {
+ Logger root = Logger.getRootLogger();
+ root.removeAppender(fa);
+ root.addAppender(aa);
+ }
+
@SuppressWarnings({ "rawtypes", "unchecked" })
protected static Appender getAppender(Class cls) {
Logger root = Logger.getRootLogger();
Modified: branches/faster/src/org/griphyn/vdl/karajan/Mergeable.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/Mergeable.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/Mergeable.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,7 +20,7 @@
*/
package org.griphyn.vdl.karajan;
-import org.globus.cog.karajan.workflow.futures.Future;
+import k.rt.Future;
public interface Mergeable {
void mergeListeners(Future f);
Modified: branches/faster/src/org/griphyn/vdl/karajan/Monitor.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/Monitor.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/Monitor.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -50,14 +50,14 @@
import javax.swing.JTable;
import javax.swing.table.AbstractTableModel;
-import org.globus.cog.karajan.stack.Trace;
-import org.globus.cog.karajan.stack.VariableNotFoundException;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.ThreadingContext;
-import org.globus.cog.karajan.workflow.events.EventTargetPair;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.nodes.FlowElement;
-import org.globus.cog.karajan.workflow.nodes.FlowNode;
+import k.rt.ConditionalYield;
+import k.rt.Future;
+import k.rt.FutureListener;
+import k.rt.FutureValue;
+import k.thr.LWThread;
+
+import org.globus.cog.karajan.analyzer.VariableNotFoundException;
+import org.globus.cog.karajan.compiled.nodes.Node;
import org.griphyn.vdl.engine.Karajan;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.ArrayDataNode;
@@ -66,6 +66,8 @@
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.Path;
+import com.sun.org.apache.xpath.internal.VariableStack;
+
public class Monitor implements ActionListener, MouseListener {
public static final int VARS = 0;
public static final int THREADS = 1;
@@ -74,7 +76,7 @@
private JTable t;
private JButton futures, waiting, tasks;
private List<Future> wr;
- private List<VariableStack> wt;
+ private List<LWThread> wt;
private int crtdisp;
public Monitor() {
@@ -151,7 +153,18 @@
entry.add(handle.getType());
entry.add(h);
entry.add(value);
- entry.add(f.isClosed() ? "Closed" : "Open");
+ if (f instanceof FutureValue) {
+ try {
+ ((FutureValue) f).getValue();
+ entry.add("Closed");
+ }
+ catch (ConditionalYield y) {
+ entry.add("Open");
+ }
+ }
+ else {
+ entry.add("-");
+ }
entry.add(sz);
String fs;
if (f instanceof FutureWrapper) {
@@ -188,11 +201,11 @@
}
crtdisp = THREADS;
ArrayList<String> al = new ArrayList<String>();
- wt = new ArrayList<VariableStack>();
- Map<VariableStack, DSHandle> c = WaitingThreadsMonitor.getAllThreads();
- for (Map.Entry<VariableStack, DSHandle> entry : c.entrySet()) {
+ wt = new ArrayList<LWThread>();
+ Map<LWThread, DSHandle> c = WaitingThreadsMonitor.getAllThreads();
+ for (Map.Entry<LWThread, DSHandle> entry : c.entrySet()) {
try {
- al.add(String.valueOf(ThreadingContext.get(entry.getKey())));
+ al.add(entry.getKey().getName());
}
catch (VariableNotFoundException e1) {
al.add("unknown thread");
@@ -259,20 +272,20 @@
public static void dumpThreads(PrintStream pw) {
pw.println("\nWaiting threads:");
- Map<VariableStack, DSHandle> c = WaitingThreadsMonitor.getAllThreads();
- for (Map.Entry<VariableStack, DSHandle> e : c.entrySet()) {
+ Map<LWThread, DSHandle> c = WaitingThreadsMonitor.getAllThreads();
+ for (Map.Entry<LWThread, DSHandle> e : c.entrySet()) {
dumpThread(pw, e.getKey(), e.getValue());
pw.println();
}
pw.println("----");
}
- public static void dumpThread(PrintStream pw, VariableStack stack, DSHandle handle) {
+ public static void dumpThread(PrintStream pw, LWThread thr, DSHandle handle) {
try {
- pw.println("Thread: " + String.valueOf(ThreadingContext.get(stack))
+ pw.println("Thread: " + thr.getName()
+ (handle == null ? "" : ", waiting on " + varWithLine(handle)));
- for (String t : getSwiftTrace(stack)) {
+ for (String t : getSwiftTrace(thr)) {
pw.println("\t" + t);
}
}
@@ -289,54 +302,54 @@
(line == null ? "" : " (declared on line " + line + ")");
}
- public static String getLastCall(VariableStack stack) {
- List<Object> trace = Trace.getAsList(stack);
- for (Object o : trace) {
- if (o instanceof FlowNode) {
- FlowNode n = (FlowNode) o;
- String traceLine = (String) n.getProperty("_traceline");
- if (traceLine != null) {
+ public static String getLastCall(LWThread thr) {
+ List<Object> trace = thr.getTrace();
+ if (trace != null) {
+ for (Object o : trace) {
+ if (o instanceof Node) {
+ Node n = (Node) o;
+ int line = n.getLine();
return(Karajan.demangle(n.getTextualName()) + ", " +
- fileName(n) + ", line " + traceLine);
+ fileName(n) + ", line " + line);
}
}
}
return "?";
}
- public static List<String> getSwiftTrace(VariableStack stack) {
+ public static List<String> getSwiftTrace(LWThread thr) {
List<String> ret = new ArrayList<String>();
- List<Object> trace = Trace.getAsList(stack);
- for (Object o : trace) {
- if (o instanceof FlowNode) {
- FlowNode n = (FlowNode) o;
- String traceLine = (String) n.getProperty("_traceline");
- if (traceLine != null) {
+ List<Object> trace = thr.getTrace();
+ if (trace != null) {
+ for (Object o : trace) {
+ if (o instanceof Node) {
+ Node n = (Node) o;
+ int line = n.getLine();
ret.add(Karajan.demangle(n.getTextualName()) + ", " +
- fileName(n) + ", line " + traceLine);
+ fileName(n) + ", line " + line);
+
}
}
- }
+ }
return ret;
}
- public static List<Object> getSwiftTraceElements(VariableStack stack) {
+ public static List<Object> getSwiftTraceElements(LWThread thr) {
List<Object> ret = new ArrayList<Object>();
- List<Object> trace = Trace.getAsList(stack);
- for (Object o : trace) {
- if (o instanceof FlowNode) {
- FlowNode n = (FlowNode) o;
- String traceLine = (String) n.getProperty("_traceline");
- if (traceLine != null) {
- ret.add(o);
+ List<Object> trace = thr.getTrace();
+ if (trace != null) {
+ for (Object o : trace) {
+ if (o instanceof Node) {
+ Node n = (Node) o;
+ ret.add(n.getLine());
}
}
}
return ret;
}
- private static String fileName(FlowNode n) {
- return new File((String) FlowNode.getTreeProperty(FlowElement.FILENAME, n)).getName().replace(".kml", ".swift");
+ private static String fileName(Node n) {
+ return new File(n.getFileName()).getName().replace(".kml", ".swift");
}
public class VariableModel extends AbstractTableModel {
@@ -365,14 +378,15 @@
return l.get(rowIndex)[columnIndex];
}
else {
- EventTargetPair[] l = Monitor.this.getListeners(rowIndex);
+ List<FutureListener> l = Monitor.this.getListeners(rowIndex);
if (l != null) {
ArrayList<Object> a = new ArrayList<Object>();
- for (int i = 0; i < l.length; i++) {
- try {
- a.add(ThreadingContext.get(l[i].getEvent()));
+ for (int i = 0; i < l.size(); i++) {
+ FutureListener o = l.get(i);
+ if (o instanceof LWThread.Listener) {
+ a.add(((LWThread.Listener) o).getThread().getName());
}
- catch (VariableNotFoundException e) {
+ else {
a.add("unknown");
}
}
@@ -439,12 +453,13 @@
if (e.getClickCount() == 2) {
int row = t.rowAtPoint(e.getPoint());
if (crtdisp == VARS) {
- EventTargetPair[] l = getListeners(row);
- if (l != null) {
+ List<FutureListener> ls = getListeners(row);
+ if (ls != null) {
try {
- for (int i = 0; i < l.length; i++) {
+ for (FutureListener l : ls) {
+
displayPopup("Stack trace for " + t.getValueAt(row, 1),
- Trace.get(l[i].getEvent()));
+ getTrace(l));
}
}
catch (NullPointerException ex) {
@@ -455,21 +470,30 @@
else if (crtdisp == THREADS) {
Object o = wt.get(row);
if (o instanceof VariableStack) {
- displayPopup("Stack trace for " + t.getValueAt(row, 0),
- Trace.get((VariableStack) o));
+ displayPopup("Stack trace for " + t.getValueAt(row, 0), " N/A");
}
}
}
}
- private void displayPopup(String title, String s) {
+ private String getTrace(FutureListener l) {
+ if (l instanceof LWThread.Listener) {
+ LWThread.Listener lt = (LWThread.Listener) l;
+ return String.valueOf(lt.getThread().getTrace());
+ }
+ else {
+ return "unknown";
+ }
+ }
+
+ private void displayPopup(String title, String s) {
JOptionPane.showMessageDialog(frame, s, title, JOptionPane.INFORMATION_MESSAGE);
}
- public EventTargetPair[] getListeners(int wrindex) {
+ public List<FutureListener> getListeners(int wrindex) {
Object o = wr.get(wrindex);
if (o instanceof FutureWrapper) {
- return ((FutureWrapper) o).getListenerEvents();
+ return ((FutureWrapper) o).getListeners();
}
else {
return null;
Modified: branches/faster/src/org/griphyn/vdl/karajan/Pair.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/Pair.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/Pair.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -37,5 +37,4 @@
public int size() {
return 2;
}
-
}
Deleted: branches/faster/src/org/griphyn/vdl/karajan/PairIterator.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/PairIterator.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/PairIterator.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,78 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/*
- * Created on Jun 9, 2006
- */
-package org.griphyn.vdl.karajan;
-
-import java.util.Iterator;
-import java.util.Map;
-
-import org.globus.cog.karajan.util.KarajanIterator;
-
-public class PairIterator implements KarajanIterator {
- private Iterator<?> it;
- private int crt, count;
- private Pair crto;
-
- public PairIterator(Map<?, ?> map) {
- this.it = map.entrySet().iterator();
- this.count = map.size();
- }
-
- public int current() {
- return crt;
- }
-
- public int count() {
- return count;
- }
-
- public Object peek() {
- if (crto == null) {
- crto = convert(it.next());
- }
- return crto;
- }
-
- public void remove() {
- throw new UnsupportedOperationException("remove");
- }
-
- public boolean hasNext() {
- return it.hasNext() || crto != null;
- }
-
- public Object next() {
- crt++;
- if (crto != null) {
- Object o = crto;
- crto = null;
- return o;
- }
- else {
- return convert(it.next());
- }
- }
-
- private Pair convert(Object o) {
- Map.Entry<?, ?> e = (Map.Entry<?, ?>) o;
- return new Pair(e.getKey(), e.getValue());
- }
-
-}
Added: branches/faster/src/org/griphyn/vdl/karajan/PairSet.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/PairSet.java (rev 0)
+++ branches/faster/src/org/griphyn/vdl/karajan/PairSet.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2012 University of Chicago
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/*
+ * Created on Jun 9, 2006
+ */
+package org.griphyn.vdl.karajan;
+
+import java.util.AbstractCollection;
+import java.util.AbstractList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.griphyn.vdl.mapping.DSHandle;
+
+public class PairSet extends AbstractCollection<List<?>> {
+ private Set<Map.Entry<Comparable<?>, DSHandle>> set;
+
+ public PairSet(Map<Comparable<?>, DSHandle> map) {
+ this.set = map.entrySet();
+ }
+
+ @Override
+ public Iterator<List<?>> iterator() {
+ final Iterator<Map.Entry<Comparable<?>, DSHandle>> it = set.iterator();
+ return new Iterator<List<?>>() {
+ @Override
+ public boolean hasNext() {
+ return it.hasNext();
+ }
+
+ @Override
+ public List<?> next() {
+ final Map.Entry<Comparable<?>, DSHandle> e = it.next();
+ return new AbstractList<Object>() {
+ @Override
+ public Object get(int index) {
+ switch (index) {
+ case 0:
+ return e.getKey();
+ case 1:
+ return e.getValue();
+ default:
+ throw new IndexOutOfBoundsException(String.valueOf(index));
+ }
+ }
+
+ @Override
+ public int size() {
+ return 2;
+ }
+
+ };
+ }
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+ }
+ };
+ }
+
+ @Override
+ public int size() {
+ return set.size();
+ }
+}
Added: branches/faster/src/org/griphyn/vdl/karajan/SwiftExecutor.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/SwiftExecutor.java (rev 0)
+++ branches/faster/src/org/griphyn/vdl/karajan/SwiftExecutor.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2012 University of Chicago
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/*
+ * Created on Dec 23, 2006
+ */
+package org.griphyn.vdl.karajan;
+
+import k.rt.Context;
+import k.rt.ExecutionException;
+import k.rt.Executor;
+import k.rt.Stack;
+import k.thr.Scheduler.RootThread;
+
+import org.apache.log4j.Logger;
+import org.globus.cog.karajan.compiled.nodes.Main;
+import org.griphyn.vdl.karajan.functions.ProcessBulkErrors;
+
+public class SwiftExecutor extends Executor {
+ public static final Logger logger = Logger.getLogger(SwiftExecutor.class);
+
+ public SwiftExecutor(Main root) {
+ super(root);
+ }
+
+ protected void printFailure(ExecutionException e) {
+ if (logger.isDebugEnabled()) {
+ logger.debug(e.getMessage(), e);
+ }
+ String msg = e.getMessage();
+ if (!"Execution completed with errors".equals(msg)) {
+ if (msg == null) {
+ msg = getMeaningfulMessage(e);
+ }
+ System.err.print("Execution failed:\n\t");
+ String translation = VDL2ErrorTranslator.getDefault().translate(msg);
+ if (translation != null) {
+ System.err.print(translation);
+ }
+ else {
+ System.err.print(ProcessBulkErrors.getMessageChain(e));
+ }
+ System.err.print("\n");
+ }
+ else {
+ // lazy errors are on and they have already been printed
+ }
+ }
+
+ public void start(Context context) {
+ if (logger.isDebugEnabled()) {
+ logger.debug(context);
+ }
+ logger.info("swift.home = " +
+ System.getProperty("swift.home"));
+ start(new RootThread(getRoot(), new Stack()), context);
+ }
+}
Added: branches/faster/src/org/griphyn/vdl/karajan/SwiftRootScope.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/SwiftRootScope.java (rev 0)
+++ branches/faster/src/org/griphyn/vdl/karajan/SwiftRootScope.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,33 @@
+//----------------------------------------------------------------------
+//This code is developed as part of the Java CoG Kit project
+//The terms of the license can be found at http://www.cogkit.org/license
+//This message may not be removed or altered.
+//----------------------------------------------------------------------
+
+/*
+ * Created on Jan 6, 2013
+ */
+package org.griphyn.vdl.karajan;
+
+import java.io.File;
+
+import k.rt.Context;
+
+import org.globus.cog.karajan.analyzer.RootScope;
+import org.globus.cog.karajan.util.KarajanProperties;
+import org.griphyn.vdl.karajan.lib.swiftscript.FnArg;
+import org.griphyn.vdl.mapping.DuplicateMappingChecker;
+
+public class SwiftRootScope extends RootScope {
+
+ public SwiftRootScope(KarajanProperties props, String file, Context context) {
+ super(props, file, context);
+ context.setAttribute("SWIFT:DM_CHECKER", new DuplicateMappingChecker());
+
+ addVar("PATH_SEPARATOR", File.separator);
+ addVar("SWIFT:DRY_RUN", context.getAttribute("SWIFT:DRY_RUN"));
+ addVar("SWIFT:RUN_ID", context.getAttribute("SWIFT:RUN_ID"));
+ addVar("SWIFT:SCRIPT_NAME", context.getAttribute("SWIFT:SCRIPT_NAME"));
+ addVar("SWIFT:PARSED_ARGS", FnArg.parseArgs(context.getArguments()));
+ }
+}
Deleted: branches/faster/src/org/griphyn/vdl/karajan/VDL2ExecutionContext.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/VDL2ExecutionContext.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/VDL2ExecutionContext.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,122 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/*
- * Created on Dec 23, 2006
- */
-package org.griphyn.vdl.karajan;
-
-import java.util.List;
-
-import org.apache.log4j.Logger;
-import org.globus.cog.karajan.stack.Trace;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ElementTree;
-import org.globus.cog.karajan.workflow.ExecutionContext;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.functions.ProcessBulkErrors;
-import org.griphyn.vdl.mapping.DuplicateMappingChecker;
-
-public class VDL2ExecutionContext extends ExecutionContext {
- public static final Logger logger = Logger.getLogger(VDL2ExecutionContext.class);
-
- public static final String RUN_ID = "vdl:runid";
- public static final String SCRIPT_NAME = "vdl:scriptname";
- public static final String DM_CHECKER = "vdl:dpmchecker";
-
- private String runID;
- private final String scriptName;
-
- public VDL2ExecutionContext(ElementTree tree, String scriptName) {
- super(tree);
- this.scriptName = scriptName;
- }
-
- protected void printFailure(ExecutionException e) {
- if (logger.isDebugEnabled()) {
- logger.debug("Karajan level error: " + getKarajanTrace(e));
- }
- String msg = e.getMessage();
- if (!"Execution completed with errors".equals(msg)) {
- if (msg == null) {
- msg = getMeaningfulMessage(e);
- }
- getStderr().append("Execution failed:\n\t");
- String translation = VDL2ErrorTranslator.getDefault().translate(msg);
- if (translation != null) {
- getStderr().append(translation);
- }
- else {
- getStderr().append(ProcessBulkErrors.getMessageChain(e));
- }
- if (e.getStack() != null) {
- List<String> l = Monitor.getSwiftTrace(e.getStack());
- for (String s : l) {
- getStderr().append("\n\t");
- getStderr().append(s);
- }
- }
- getStderr().append("\n");
- }
- else {
- // lazy errors are on and they have already been printed
- }
- }
-
- private String getKarajanTrace(ExecutionException e) {
- StringBuilder sb = new StringBuilder();
- while (e != null) {
- sb.append(e.getMessage());
- if (e.getStack() != null) {
- sb.append(" at\n");
- sb.append(Trace.get(e.getStack()));
- }
- if (e.getCause() instanceof ExecutionException) {
- e = (ExecutionException) e.getCause();
- sb.append("\ncaused by: ");
- }
- else {
- e = null;
- }
- }
- return sb.toString();
- }
-
- protected void setGlobals(VariableStack stack) {
- super.setGlobals(stack);
- stack.setGlobal(RUN_ID, runID);
- stack.setGlobal(SCRIPT_NAME, scriptName);
- stack.setGlobal(DM_CHECKER, new DuplicateMappingChecker());
- }
-
- public String getRunID() {
- return runID;
- }
-
- public void setRunID(String runID) {
- this.runID = runID;
- }
-
- public void start(VariableStack stack) {
- if (logger.isDebugEnabled()) {
- logger.debug(stack);
- }
- logger.info("swift.home = " +
- System.getProperty("swift.home"));
- super.start(stack);
- }
-}
Deleted: branches/faster/src/org/griphyn/vdl/karajan/VDL2FutureException.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/VDL2FutureException.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/VDL2FutureException.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,61 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/*
- * Created on Jan 4, 2007
- */
-package org.griphyn.vdl.karajan;
-
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.nodes.FlowElement;
-import org.griphyn.vdl.mapping.DSHandle;
-
-public class VDL2FutureException extends RuntimeException {
- private final DSHandle handle;
- private FlowElement listener;
- private VariableStack stack;
-
- public VDL2FutureException(DSHandle handle, FlowElement listener, VariableStack stack) {
- this.handle = handle;
- this.listener = listener;
- this.stack = stack;
- }
-
- public VDL2FutureException(DSHandle handle) {
- this(handle, null, null);
- }
-
- public DSHandle getHandle() {
- return handle;
- }
-
- public VariableStack getStack() {
- return stack;
- }
-
- public FlowElement getListener() {
- return listener;
- }
-
- public void setStack(VariableStack stack) {
- this.stack = stack;
- }
-
- public void setListener(FlowElement listener) {
- this.listener = listener;
- }
-}
Modified: branches/faster/src/org/griphyn/vdl/karajan/VDSAdaptiveScheduler.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/VDSAdaptiveScheduler.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/VDSAdaptiveScheduler.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -38,9 +38,9 @@
import org.globus.cog.abstraction.interfaces.JobSpecification;
import org.globus.cog.abstraction.interfaces.Service;
import org.globus.cog.abstraction.interfaces.Status;
+import org.globus.cog.abstraction.interfaces.StatusListener;
import org.globus.cog.abstraction.interfaces.Task;
import org.globus.cog.karajan.scheduler.AbstractScheduler;
-import org.globus.cog.karajan.scheduler.NoSuchResourceException;
import org.globus.cog.karajan.scheduler.ResourceConstraintChecker;
import org.globus.cog.karajan.scheduler.TaskConstraints;
import org.globus.cog.karajan.scheduler.WeightedHostScoreScheduler;
@@ -61,10 +61,10 @@
private static Timer timer;
private TCCache tc;
- private LinkedList<Object[]> dq;
+ private LinkedList<Entry> dq;
private int clusteringQueueDelay = 1;
private int minClusterTime = 60;
- private Map<Task, List<Object[]>> tasks;
+ private Map<Task, List<Entry>> tasks;
private boolean clusteringEnabled;
private int clusterId;
@@ -75,8 +75,8 @@
private Map<Service, BoundContact> serviceContactMapping;
public VDSAdaptiveScheduler() {
- dq = new LinkedList<Object[]>();
- tasks = new HashMap<Task, List<Object[]>>();
+ dq = new LinkedList<Entry>();
+ tasks = new HashMap<Task, List<Entry>>();
serviceContactMapping = new HashMap<Service, BoundContact>();
}
@@ -115,18 +115,19 @@
}
}
- public void enqueue(Task task, Object constraints) {
+ @Override
+ public void enqueue(Task task, Object constraints, StatusListener l) {
if (shouldBeClustered(task, constraints)) {
startTimer();
if (logger.isDebugEnabled()) {
logger.debug("Adding task to clustering queue: " + task.getIdentity());
}
synchronized (dq) {
- dq.addLast(new Object[] { task, constraints });
+ dq.addLast(new Entry(task, constraints, l));
}
}
else {
- super.enqueue(task, constraints);
+ super.enqueue(task, constraints, l);
}
}
@@ -192,23 +193,23 @@
synchronized (dq) {
while (!dq.isEmpty()) {
int clusterTime = 0;
- LinkedList<Object[]> cluster = new LinkedList<Object[]>();
+ LinkedList<Entry> cluster = new LinkedList<Entry>();
Map<String, String> env = new HashMap<String, String>();
Map<String, Object> attrs = new HashMap<String, Object>();
Object constraints = null;
String dir = null;
- Iterator<Object[]> dqi = dq.iterator();
+ Iterator<Entry> dqi = dq.iterator();
while (clusterTime < minClusterTime && dqi.hasNext()) {
- Object[] h = dqi.next();
- Task task = (Task) h[0];
+ Entry e = dqi.next();
+ Task task = e.task;
JobSpecification js = (JobSpecification) task.getSpecification();
if (constraints == null) {
- constraints = ((Object[]) h[1])[0];
+ constraints = ((Object[]) e.constraints)[0];
}
- else if (!constraints.equals(((Object[]) h[1])[0])) {
+ else if (!constraints.equals(((Object[]) e.constraints)[0])) {
continue;
}
@@ -230,7 +231,7 @@
merge(js, env, attrs);
clusterTime += getMaxWallTime(task);
- cluster.addLast(h);
+ cluster.addLast(e);
}
if (logger.isDebugEnabled()) {
@@ -241,8 +242,8 @@
continue;
}
else if (cluster.size() == 1) {
- Object[] h = cluster.removeFirst();
- super.enqueue((Task) h[0], h[1]);
+ Entry e = cluster.removeFirst();
+ super.enqueue(e.task, e.constraints, e.listener);
}
else if (cluster.size() > 1) {
Task t = new TaskImpl();
@@ -264,8 +265,8 @@
logger.info("Creating cluster " + t.getIdentity() + " with size " + cluster.size());
}
- for (Object[] h : cluster) {
- Task st = (Task) h[0];
+ for (Entry e : cluster) {
+ Task st = e.task;
if (logger.isInfoEnabled()) {
logger.info("Task " + st.getIdentity() + " clustered in " + t.getIdentity());
}
@@ -292,7 +293,7 @@
synchronized (tasks) {
tasks.put(t, cluster);
}
- super.enqueue(t, new Contact[] { (Contact) constraints });
+ super.enqueue(t, new Contact[] { (Contact) constraints }, null);
}
}
}
@@ -388,59 +389,58 @@
sb.append(String.valueOf(value));
}
- protected void failTask(Task t, String message, Exception e) {
- if (e instanceof NoSuchResourceException) {
- message = "The application \"" + getTaskConstraints(t).getConstraint("tr")
- + "\" is not available for any site/pool in your tc.data catalog ";
- }
+ @Override
+ protected void failTask(Entry e, String message, Exception ex) {
if (logger.isDebugEnabled()) {
- logger.debug("Failing task " + t.getIdentity());
+ logger.debug("Failing task " + e.task.getIdentity());
}
- List<Object[]> cluster = null;
+ Task t = e.task;
+ List<Entry> cluster = null;
synchronized (tasks) {
cluster = tasks.get(t);
}
if (cluster != null) {
- for (Object[] h : cluster) {
- super.failTask((Task) h[0], message, e);
+ for (Entry e1 : cluster) {
+ super.failTask(e1, message, ex);
}
}
else {
- super.failTask(t, message, e);
+ super.failTask(e, message, ex);
}
}
- public void statusChanged(StatusEvent e) {
- Task t = (Task) e.getSource();
+ @Override
+ public void statusChanged(StatusEvent se, Entry e) {
+ Task t = e.task;
try {
if (logger.isDebugEnabled()) {
logger.debug("Got task status change for " + t.getIdentity());
}
- List<Object[]> cluster = null;
+ List<Entry> cluster = null;
synchronized (tasks) {
cluster = tasks.get(t);
}
if (cluster == null) {
- super.statusChanged(e);
+ super.statusChanged(se, e);
}
else {
if (logger.isDebugEnabled()) {
logger.debug("Got cluster status change for " + t.getIdentity());
}
- Status clusterMemberStatus = e.getStatus();
+ Status clusterMemberStatus = se.getStatus();
if(clusterMemberStatus.getStatusCode() == Status.FAILED) {
clusterMemberStatus = new StatusImpl(Status.COMPLETED);
}
- for (Object[] h : cluster) {
- Task ct = (Task) h[0];
+ for (Entry e1 : cluster) {
+ Task ct = e1.task;
StatusEvent nse = new StatusEvent(ct, clusterMemberStatus);
ct.setStatus(clusterMemberStatus);
- fireJobStatusChangeEvent(nse);
+ fireJobStatusChangeEvent(nse, e1);
}
- if (e.getStatus().isTerminal()) {
+ if (se.getStatus().isTerminal()) {
if (logger.isInfoEnabled()) {
logger.info("Removing cluster " + t.getIdentity());
}
@@ -451,7 +451,7 @@
}
}
catch (Exception ex) {
- failTask(t, ex.getMessage(), ex);
+ failTask(e, ex.getMessage(), ex);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/VDSTaskTransformer.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/VDSTaskTransformer.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/VDSTaskTransformer.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -35,7 +35,6 @@
import org.globus.cog.karajan.scheduler.TaskTransformer;
import org.globus.cog.karajan.util.BoundContact;
import org.globus.cog.karajan.util.Contact;
-import org.globus.cog.karajan.workflow.KarajanRuntimeException;
import org.globus.swift.catalog.TCEntry;
import org.globus.swift.catalog.types.TCType;
import org.griphyn.vdl.util.FQN;
@@ -151,7 +150,7 @@
}
}
catch(IOException ioe) {
- throw new KarajanRuntimeException("Could not determine wrapper invocation mode", ioe);
+ throw new RuntimeException("Could not determine wrapper invocation mode", ioe);
}
}
@@ -176,7 +175,7 @@
l = tc.getTCEntries(fqn, bc.getHost(), TCType.INSTALLED);
}
catch (Exception e) {
- throw new KarajanRuntimeException(e);
+ throw new RuntimeException(e);
}
if (l == null || l.isEmpty()) {
return;
Modified: branches/faster/src/org/griphyn/vdl/karajan/WaitingThreadsMonitor.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/WaitingThreadsMonitor.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/WaitingThreadsMonitor.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -24,48 +24,62 @@
import java.util.List;
import java.util.Map;
-import org.globus.cog.karajan.stack.VariableStack;
+import k.rt.FutureListener;
+import k.thr.LWThread;
+
import org.griphyn.vdl.mapping.DSHandle;
public class WaitingThreadsMonitor {
- private static Map<VariableStack, DSHandle> threads = new HashMap<VariableStack, DSHandle>();
- private static Map<VariableStack, List<DSHandle>> outputs = new HashMap<VariableStack, List<DSHandle>>();;
+ private static Map<LWThread, DSHandle> threads = new HashMap<LWThread, DSHandle>();
+ private static Map<LWThread, List<DSHandle>> outputs = new HashMap<LWThread, List<DSHandle>>();;
- public static void addThread(VariableStack stack, DSHandle waitingOn) {
- if (stack != null) {
+ public static void addThread(FutureListener fl, DSHandle waitingOn) {
+ if (fl instanceof LWThread.Listener) {
+ addThread(((LWThread.Listener) fl).getThread(), waitingOn);
+ }
+ }
+
+ public static void removeThread(FutureListener fl) {
+ if (fl instanceof LWThread.Listener) {
+ removeThread(((LWThread.Listener) fl).getThread());
+ }
+ }
+
+ public static void addThread(LWThread thr, DSHandle waitingOn) {
+ if (thr != null) {
synchronized(threads) {
- threads.put(stack, waitingOn);
+ threads.put(thr, waitingOn);
}
}
}
- public static void removeThread(VariableStack stack) {
+ public static void removeThread(LWThread thr) {
synchronized(threads) {
- threads.remove(stack);
+ threads.remove(thr);
}
}
- public static Map<VariableStack, DSHandle> getAllThreads() {
+ public static Map<LWThread, DSHandle> getAllThreads() {
synchronized(threads) {
- return new HashMap<VariableStack, DSHandle>(threads);
+ return new HashMap<LWThread, DSHandle>(threads);
}
}
- public static void addOutput(VariableStack stack, List<DSHandle> outputs) {
+ public static void addOutput(LWThread thr, List<DSHandle> outputs) {
synchronized(WaitingThreadsMonitor.outputs) {
- WaitingThreadsMonitor.outputs.put(stack, outputs);
+ WaitingThreadsMonitor.outputs.put(thr, outputs);
}
}
- public static void removeOutput(VariableStack stack) {
+ public static void removeOutput(LWThread thr) {
synchronized(outputs) {
- outputs.remove(stack);
+ outputs.remove(thr);
}
}
- public static Map<VariableStack, List<DSHandle>> getOutputs() {
+ public static Map<LWThread, List<DSHandle>> getOutputs() {
synchronized(outputs) {
- return new HashMap<VariableStack, List<DSHandle>>(outputs);
+ return new HashMap<LWThread, List<DSHandle>>(outputs);
}
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/functions/ConfigProperty.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/functions/ConfigProperty.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/functions/ConfigProperty.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,55 +22,86 @@
import java.io.IOException;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.functions.AbstractFunction;
+import k.rt.Context;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.apache.log4j.Logger;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Param;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.compiled.nodes.functions.AbstractSingleValuedFunction;
+import org.globus.cog.karajan.parser.WrapperNode;
+import org.globus.cog.karajan.util.BoundContact;
import org.griphyn.vdl.util.VDL2Config;
-import org.globus.cog.karajan.util.BoundContact;
-import org.apache.log4j.Logger;
-public class ConfigProperty extends AbstractFunction {
- public static final Arg NAME = new Arg.Positional("name");
- public static final Arg INSTANCE = new Arg.Optional("instance", Boolean.TRUE);
- public static final Arg HOST = new Arg.Optional("host",null);
-
- static {
- setArguments(ConfigProperty.class, new Arg[] { NAME, INSTANCE, HOST });
+public class ConfigProperty extends AbstractSingleValuedFunction {
+ private ArgRef<String> name;
+ private ArgRef<Boolean> instance;
+ private ArgRef<BoundContact> host;
+
+ private VDL2Config instanceConfig;
+ private VarRef<Context> context;
+
+
+ @Override
+ protected Param[] getParams() {
+ return params("name", optional("instance", Boolean.TRUE), optional("host", null));
}
- public static final String INSTANCE_CONFIG_FILE = "vdl:instanceconfigfile";
- public static final String INSTANCE_CONFIG = "vdl:instanceconfig";
+ public static final String INSTANCE_CONFIG = "SWIFT_CONFIG";
public static final Logger logger = Logger.getLogger(ConfigProperty.class);
+
+
+ @Override
+ protected Node compileBody(WrapperNode w, Scope argScope, Scope scope)
+ throws CompilationException {
+ context = scope.getVarRef("#context");
+ return super.compileBody(w, argScope, scope);
+ }
- public Object function(VariableStack stack) throws ExecutionException {
- String name = TypeUtil.toString(NAME.getValue(stack));
- boolean instance = TypeUtil.toBoolean(INSTANCE.getValue(stack));
- Object host = HOST.getValue(stack);
- if(logger.isDebugEnabled()) {
- logger.debug("Getting property "+name+" with host "+host);
+ @Override
+ public Object function(Stack stack) {
+ String name = this.name.getValue(stack);
+ boolean instance = this.instance.getValue(stack);
+ BoundContact host = this.host.getValue(stack);
+ if (logger.isDebugEnabled()) {
+ logger.debug("Getting property " + name + " with host " + host);
}
- if(host!= null) {
+ if (host != null) {
// see if the host has this property defined, and if so
// get its value
- BoundContact h = (BoundContact)host;
- String prop = (String) h.getProperty(name);
- if(prop != null) {
- logger.debug("Found property "+name+" in BoundContact");
+ String prop = (String) host.getProperty(name);
+ if (prop != null) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("Found property " + name + " in BoundContact");
+ }
return prop;
}
- logger.debug("Could not find property "+name+" in BoundContact");
+ if (logger.isDebugEnabled()) {
+ logger.debug("Could not find property " + name + " in BoundContact");
+ }
}
- return getProperty(name, instance, stack);
+ return getProperty(name, instance, getInstanceConfig(stack));
}
- public static String getProperty(String name, VariableStack stack) throws ExecutionException {
- return getProperty(name, true, stack);
+ private synchronized VDL2Config getInstanceConfig(Stack stack) {
+ if (instanceConfig == null) {
+ Context ctx = this.context.getValue(stack);
+ instanceConfig = (VDL2Config) ctx.getAttribute("SWIFT:CONFIG");
+ }
+ return instanceConfig;
}
- public static String getProperty(String name, boolean instance, VariableStack stack) throws ExecutionException {
+ public static String getProperty(String name, VDL2Config instanceConfig) {
+ return getProperty(name, true, instanceConfig);
+ }
+
+ public static String getProperty(String name, boolean instance, VDL2Config instanceConfig) {
try {
VDL2Config conf;
String prop;
@@ -79,20 +110,8 @@
prop = conf.getProperty(name);
}
else {
- synchronized (stack.firstFrame()) {
- conf = (VDL2Config) stack.getGlobal(INSTANCE_CONFIG);
- if (conf == null) {
- String confFile = (String) stack.getGlobal(INSTANCE_CONFIG_FILE);
- if (confFile == null) {
- conf = VDL2Config.getConfig();
- }
- else {
- conf = VDL2Config.getConfig(confFile);
- }
- stack.setGlobal(INSTANCE_CONFIG, conf);
- }
- prop = conf.getProperty(name);
- }
+ conf = instanceConfig;
+ prop = conf.getProperty(name);
}
if (prop == null) {
throw new ExecutionException("Swift config property \"" + name + "\" not found in "
Modified: branches/faster/src/org/griphyn/vdl/karajan/functions/ProcessBulkErrors.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/functions/ProcessBulkErrors.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/functions/ProcessBulkErrors.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,42 +22,61 @@
import java.io.CharArrayWriter;
import java.io.PrintWriter;
+import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import k.rt.Channel;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.functions.AbstractFunction;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.compiled.nodes.functions.AbstractFunction;
+import org.globus.cog.karajan.parser.WrapperNode;
import org.griphyn.vdl.karajan.VDL2ErrorTranslator;
public class ProcessBulkErrors extends AbstractFunction {
public static final Logger logger = Logger.getLogger(ProcessBulkErrors.class);
+
+ private ArgRef<String> message;
+ private ArgRef<List<ExecutionException>> errors;
+ private ArgRef<Boolean> onStdout;
+
+ private ChannelRef<Object> cr_stdout, cr_stderr;
- public static final Arg MESSAGE = new Arg.Positional("message");
- public static final Arg ERRORS = new Arg.Positional("errors");
- public static final Arg ONSTDOUT = new Arg.Optional("onstdout", Boolean.FALSE);
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("message", "errors", optional("onStdout", false)),
+ returns(channel("stdout", DYNAMIC), channel("stderr", DYNAMIC)));
+ }
- static {
- setArguments(ProcessBulkErrors.class, new Arg[] { MESSAGE, ERRORS, ONSTDOUT });
- }
+ @Override
+ protected Node compileBody(WrapperNode w, Scope argScope, Scope scope)
+ throws CompilationException {
+ returnDynamic(scope);
+ return super.compileBody(w, argScope, scope);
+ }
- public Object function(VariableStack stack) throws ExecutionException {
- String message = TypeUtil.toString(MESSAGE.getValue(stack));
- boolean onStdout = TypeUtil.toBoolean(ONSTDOUT.getValue(stack));
- List l = TypeUtil.toList(ERRORS.getValue(stack));
+
+ @Override
+ public Object function(Stack stack) {
+ String message = this.message.getValue(stack);
+ boolean onStdout = this.onStdout.getValue(stack);
+ Collection<ExecutionException> l = this.errors.getValue(stack);
+
VDL2ErrorTranslator translator = VDL2ErrorTranslator.getDefault();
- Map count = new HashMap();
- Iterator i = l.iterator();
- while (i.hasNext()) {
- ExecutionException ex = (ExecutionException) i.next();
+ Map<String, Integer> count = new HashMap<String, Integer>();
+ for (ExecutionException ex : l) {
if (ex.getCause() instanceof ConcurrentModificationException) {
ex.printStackTrace();
}
@@ -76,26 +95,25 @@
}
tmsg = tmsg.trim();
if (count.containsKey(tmsg)) {
- Integer j = (Integer) count.get(tmsg);
+ Integer j = count.get(tmsg);
count.put(tmsg, new Integer(j.intValue() + 1));
}
else {
count.put(tmsg, new Integer(1));
}
}
- Arg.Channel channel = onStdout ? STDOUT : STDERR;
+ ChannelRef<Object> channel = onStdout ? cr_stdout : cr_stderr;
+ Channel<Object> r = channel.get(stack);
if (count.size() != 0) {
- channel.ret(stack, message + "\n");
- i = count.entrySet().iterator();
+ r.add(message + "\n");
int k = 1;
- while (i.hasNext()) {
- Map.Entry e = (Map.Entry) i.next();
- Integer j = (Integer) e.getValue();
+ for (Map.Entry<String, Integer> e : count.entrySet()) {
+ Integer j = e.getValue();
if (j.intValue() == 1) {
- channel.ret(stack, k + ". " + e.getKey() + "\n");
+ r.add(k + ". " + e.getKey() + "\n");
}
else {
- channel.ret(stack, k + ". " + e.getKey() + " (" + j.intValue() + " times)\n");
+ r.add(k + ". " + e.getKey() + " (" + j.intValue() + " times)\n");
}
k++;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/AbsFileName.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/AbsFileName.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/AbsFileName.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,16 +20,22 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.Stack;
-public class AbsFileName extends VDLFunction {
- static {
- setArguments(AbsFileName.class, new Arg[] { PA_VAR });
- }
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.mapping.DSHandle;
- public Object function(VariableStack stack) throws ExecutionException {
- return filename(stack);
- }
+public class AbsFileName extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ return filename(var.getValue(stack));
+ }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/AppStageins.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/AppStageins.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/AppStageins.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -23,34 +23,41 @@
import java.util.LinkedList;
import java.util.List;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.stack.VariableStack;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.AbstractSequentialWithArguments;
import org.globus.swift.data.Director;
import org.globus.swift.data.policy.Policy;
import org.griphyn.vdl.mapping.AbsFile;
-public class AppStageins extends AbstractSequentialWithArguments {
+public class AppStageins extends InternalFunction {
+ private ArgRef<String> jobid;
+ private ArgRef<List<String>> files;
+ private ArgRef<String> dir;
+ private ArgRef<String> stagingMethod;
+
+ private ChannelRef<List<String>> cr_stagein;
static Logger logger = Logger.getLogger(AppStageins.class);
- public static final Arg JOBID = new Arg.Positional("jobid");
- public static final Arg FILES = new Arg.Positional("files");
- public static final Arg DIR = new Arg.Positional("dir");
- public static final Arg STAGING_METHOD = new Arg.Positional("stagingMethod");
- public static final Arg.Channel STAGEIN = new Arg.Channel("stagein");
-
- static {
- setArguments(AppStageins.class, new Arg[] { JOBID, FILES, DIR,
- STAGING_METHOD });
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("jobid", "files", "dir", "stagingMethod"), returns(channel("stagein")));
}
- protected void post(VariableStack stack) throws ExecutionException {
- List files = TypeUtil.toList(FILES.getValue(stack));
+
+ protected void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ List<String> files = this.files.getValue(stack);
+ String stagingMethod = this.stagingMethod.getValue(stack);
+ String dir = this.dir.getValue(stack);
for (Object f : files) {
AbsFile file = new AbsFile(TypeUtil.toString(f));
Policy policy = Director.lookup(file.toString());
@@ -61,7 +68,7 @@
String protocol = file.getProtocol();
if (protocol.equals("file")) {
- protocol = TypeUtil.toString(STAGING_METHOD.getValue(stack));
+ protocol = stagingMethod;
}
String path = file.getDir().equals("") ? file.getName() : file
.getDir()
@@ -70,11 +77,10 @@
if (logger.isDebugEnabled()) {
logger.debug("will stage in: " + relpath + " via: " + protocol);
}
- ArgUtil.getChannelReturn(stack, STAGEIN).append(
+ cr_stagein.append(stack,
makeList(protocol + "://" + file.getHost() + "/" + path,
- TypeUtil.toString(DIR.getValue(stack)) + "/" + relpath));
+ dir + "/" + relpath));
}
- super.post(stack);
}
private List<String> makeList(String s1, String s2) {
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/AppStageouts.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/AppStageouts.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/AppStageouts.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -23,54 +23,55 @@
import java.util.LinkedList;
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.AbstractSequentialWithArguments;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.Path;
-public class AppStageouts extends AbstractSequentialWithArguments {
-
- public static final Arg JOBID = new Arg.Positional("jobid");
- public static final Arg FILES = new Arg.Positional("files");
- public static final Arg DIR = new Arg.Positional("dir");
- public static final Arg STAGING_METHOD = new Arg.Positional("stagingMethod");
- public static final Arg VAR = new Arg.Optional("var", null);
- public static final Arg.Channel STAGEOUT = new Arg.Channel("stageout");
-
- static {
- setArguments(AppStageouts.class, new Arg[] { JOBID, FILES, DIR,
- STAGING_METHOD, VAR });
+public class AppStageouts extends InternalFunction {
+ private ArgRef<String> jobid;
+ private ArgRef<List<List<Object>>> files;
+ private ArgRef<String> dir;
+ private ArgRef<String> stagingMethod;
+
+ private ChannelRef<List<String>> cr_stageout;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("jobid", "files", "dir", "stagingMethod"), returns(channel("stageout")));
}
- protected void post(VariableStack stack) throws ExecutionException {
+ protected void runBody(LWThread thr) {
try {
- List files = TypeUtil.toList(FILES.getValue(stack));
- for (Object f : files) {
- List pv = TypeUtil.toList(f);
+ Stack stack = thr.getStack();
+ List<List<Object>> files = this.files.getValue(stack);
+ String stagingMethod = this.stagingMethod.getValue(stack);
+ String dir = this.dir.getValue(stack);
+ for (List<Object> pv : files) {
Path p = (Path) pv.get(0);
DSHandle handle = (DSHandle) pv.get(1);
- ArgUtil.getNamedArguments(stack).add("var", handle.getField(p));
- AbsFile file = new AbsFile(VDLFunction.filename(stack)[0]);
+ AbsFile file = new AbsFile(SwiftFunction.filename(handle.getField(p))[0]);
String protocol = file.getProtocol();
if (protocol.equals("file")) {
- protocol = TypeUtil.toString(STAGING_METHOD.getValue(stack));
+ protocol = stagingMethod;
}
String path = file.getDir().equals("") ? file.getName() : file.getDir()
+ "/" + file.getName();
String relpath = path.startsWith("/") ? path.substring(1) : path;
- ArgUtil.getChannelReturn(stack, STAGEOUT).append(
- makeList(TypeUtil.toString(DIR.getValue(stack)) + "/" + relpath,
+ cr_stageout.append(stack,
+ makeList(dir + "/" + relpath,
protocol + "://" + file.getHost() + "/" + path));
}
- super.post(stack);
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/AppendArray.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/AppendArray.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/AppendArray.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,10 +17,10 @@
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.InvalidPathException;
@@ -28,25 +28,24 @@
public class AppendArray extends SetFieldValue {
- public static final Arg PA_ID = new Arg.Positional("id");
-
- static {
- setArguments(AppendArray.class, new Arg[] { PA_VAR, PA_VALUE });
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", "value"));
}
-
+
@Override
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
- AbstractDataNode value = (AbstractDataNode) PA_VALUE.getValue(stack);
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
+ AbstractDataNode value = this.value.getValue(stack);
// while there isn't a way to avoid conflicts between auto generated indices
// and a user manually using the same index, adding a "#" may reduce
// the incidence of problems
- Path path = Path.EMPTY_PATH.addFirst(getThreadPrefix(stack), true);
+ Path path = Path.EMPTY_PATH.addFirst(getThreadPrefix(), true);
try {
- deepCopy(var.getField(path), value, stack, 0);
+ deepCopy(var.getField(path), value, stack);
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
return null;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/CacheAddAndLockFile.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/CacheAddAndLockFile.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/CacheAddAndLockFile.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,35 +20,54 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable;
+import java.util.List;
+
+import k.rt.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+import k.thr.Yield;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.futures.FutureNotYetAvailable;
+import org.globus.cog.karajan.util.BoundContact;
import org.griphyn.vdl.karajan.lib.cache.CacheReturn;
import org.griphyn.vdl.karajan.lib.cache.File;
import org.griphyn.vdl.karajan.lib.cache.VDLFileCache;
public class CacheAddAndLockFile extends CacheFunction {
- public static final String PFILE = "#pfile";
+ private ArgRef<String> file;
+ private ArgRef<String> dir;
+ private ArgRef<BoundContact> host;
+ private ArgRef<Number> size;
+ private Node body;
+
+ private VarRef<File> pfile;
+ private VarRef<List<?>> cacheFilesToRemove;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("file", "dir", "host", "size", block("body")));
+ }
+
+ @Override
+ protected void addLocals(Scope scope) {
+ super.addLocals(scope);
+ pfile = scope.getVarRef(scope.addVar("#pfile"));
+ cacheFilesToRemove = scope.getVarRef(scope.addVar(CACHE_FILES_TO_REMOVE));
+ }
- public static final Arg FILE = new Arg.Positional("file");
- public static final Arg DIR = new Arg.Positional("dir");
- public static final Arg HOST = new Arg.Positional("host");
- public static final Arg SIZE = new Arg.Positional("size");
-
- static {
- setArguments(CacheAddAndLockFile.class, new Arg[] { FILE, DIR, HOST, SIZE });
- }
-
- protected void partialArgumentsEvaluated(VariableStack stack) throws ExecutionException {
- String file = TypeUtil.toString(FILE.getValue(stack));
- String dir = TypeUtil.toString(DIR.getValue(stack));
- Object host = HOST.getValue(stack);
- long size = TypeUtil.toLong(SIZE.getValue(stack));
- VDLFileCache cache = CacheFunction.getCache(stack);
+ protected boolean lock(Stack stack) {
+ String file = this.file.getValue(stack);
+ String dir = this.dir.getValue(stack);
+ BoundContact host = this.host.getValue(stack);
+ long size = this.size.getValue(stack).longValue();
+ VDLFileCache cache = getCache(stack);
File f = new File(file, dir, host, size);
- stack.setVar(PFILE, f);
+ pfile.setValue(stack, f);
CacheReturn cr = cache.addAndLockEntry(f);
if (cr.alreadyCached) {
if (cr.cached.isLockedForProcessing()) {
@@ -56,32 +75,60 @@
throw new FutureNotYetAvailable(cr.cached);
}
else {
- complete(stack);
+ return false;
}
}
else {
- super.partialArgumentsEvaluated(stack);
- stack.setVar(CACHE_FILES_TO_REMOVE, cr.remove);
- startRest(stack);
+ cacheFilesToRemove.setValue(stack, cr.remove);
+ return true;
}
}
+
+ @Override
+ protected void runBody(LWThread thr) {
+ int i = thr.checkSliceAndPopState();
+ int fc = thr.popIntState();
+ Stack stack = thr.getStack();
+ try {
+ switch (i) {
+ case 0:
+ fc = stack.frameCount();
+ i++;
+ case 1:
+ if (!lock(stack)) {
+ break;
+ }
+ i++;
+ case 2:
+ body.run(thr);
+ unlock(stack);
+ }
+ }
+ catch (ExecutionException e) {
+ stack.dropToFrame(fc);
+ removeEntry(stack);
+ throw e;
+ }
+ catch (Yield y) {
+ y.getState().push(fc);
+ y.getState().push(i);
+ throw y;
+ }
+ }
- protected void post(VariableStack stack) throws ExecutionException {
- File f = (File) stack.currentFrame().getVar(PFILE);
+ protected void unlock(Stack stack) {
+ File f = pfile.getValue(stack);
if (f == null) {
throw new ExecutionException("Weird inconsistency in " + this
+ ". The file was not found on the current frame.");
}
- VDLFileCache cache = CacheFunction.getCache(stack);
+ VDLFileCache cache = getCache(stack);
cache.unlockFromProcessing(f);
- super.post(stack);
}
- public void failed(VariableStack stack, ExecutionException e)
- throws ExecutionException {
- VDLFileCache cache = CacheFunction.getCache(stack);
- cache.entryRemoved((File) stack.currentFrame().getVar(PFILE));
- super.failed(stack, e);
+ public void removeEntry(Stack stack) {
+ VDLFileCache cache = getCache(stack);
+ cache.entryRemoved(pfile.getValue(stack));
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/CacheAddFile.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/CacheAddFile.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/CacheAddFile.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,37 +20,72 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import java.util.List;
+
+import k.rt.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+import k.thr.Yield;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.util.BoundContact;
import org.griphyn.vdl.karajan.lib.cache.CacheReturn;
import org.griphyn.vdl.karajan.lib.cache.File;
import org.griphyn.vdl.karajan.lib.cache.VDLFileCache;
public class CacheAddFile extends CacheFunction {
- public static final Arg FILE = new Arg.Positional("file");
- public static final Arg DIR = new Arg.Positional("dir");
- public static final Arg HOST = new Arg.Positional("host");
- public static final Arg SIZE = new Arg.Positional("size");
+ private ArgRef<String> file;
+ private ArgRef<String> dir;
+ private ArgRef<BoundContact> host;
+ private ArgRef<Number> size;
+ private Node body;
+
+ private VarRef<List<?>> cacheFilesToRemove;
- static {
- setArguments(CacheAddFile.class, new Arg[] { FILE, DIR, HOST, SIZE });
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("file", "dir", "host", "size", block("body")));
+ }
+
+ @Override
+ protected void addLocals(Scope scope) {
+ super.addLocals(scope);
+ cacheFilesToRemove = scope.getVarRef(scope.addVar(CACHE_FILES_TO_REMOVE));
+ }
- public void partialArgumentsEvaluated(VariableStack stack) throws ExecutionException {
- String file = TypeUtil.toString(FILE.getValue(stack));
- String dir = TypeUtil.toString(DIR.getValue(stack));
- Object host = HOST.getValue(stack);
- long size = TypeUtil.toLong(SIZE.getValue(stack));
- VDLFileCache cache = getCache(stack);
+ @Override
+ protected void runBody(LWThread thr) {
+ int i = thr.checkSliceAndPopState();
+ try {
+ switch (i) {
+ case 0:
+ add(thr.getStack());
+ i++;
+ case 1:
+ body.run(thr);
+ }
+ }
+ catch (Yield y) {
+ y.getState().push(i);
+ throw y;
+ }
+ }
+
+ public void add(Stack stack) {
+ String file = this.file.getValue(stack);
+ String dir = this.dir.getValue(stack);
+ BoundContact host = this.host.getValue(stack);
+ long size = this.size.getValue(stack).longValue();
+ VDLFileCache cache = getCache(stack);
File f = new File(file, dir, host, size);
CacheReturn cr = cache.addEntry(f);
if (cr.alreadyCached) {
- throw new ExecutionException("The cache already contains " + f + ".");
+ throw new ExecutionException(this, "The cache already contains " + f + ".");
}
- super.partialArgumentsEvaluated(stack);
- stack.setVar(CacheFunction.CACHE_FILES_TO_REMOVE, cr.remove);
- startNext(stack);
+ cacheFilesToRemove.setValue(stack, cr.remove);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/CacheFileRemoved.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/CacheFileRemoved.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/CacheFileRemoved.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,28 +20,31 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.karajan.lib.cache.File;
import org.griphyn.vdl.karajan.lib.cache.VDLFileCache;
public class CacheFileRemoved extends CacheFunction {
- public static final Arg PATH = new Arg.Positional("path");
- public static final Arg HOST = new Arg.Positional("host");
+ private ArgRef<String> path;
+ private ArgRef<Object> host;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("path", "host"));
+ }
- static {
- setArguments(CacheFileRemoved.class, new Arg[] { PATH, HOST });
- }
- public void partialArgumentsEvaluated(VariableStack stack) throws ExecutionException {
- String path = TypeUtil.toString(PATH.getValue(stack));
- Object host = HOST.getValue(stack);
- VDLFileCache cache = getCache(stack);
- File f = new File(path, host, 0);
- cache.entryRemoved(f);
- complete(stack);
- }
-
+ @Override
+ protected void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ String path = this.path.getValue(stack);
+ Object host = this.host.getValue(stack);
+ VDLFileCache cache = getCache(stack);
+ File f = new File(path, host, 0);
+ cache.entryRemoved(f);
+ }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/CacheFunction.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/CacheFunction.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/CacheFunction.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,29 +20,53 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.PartialArgumentsContainer;
+import k.rt.Context;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
+import org.globus.cog.karajan.parser.WrapperNode;
import org.griphyn.vdl.karajan.functions.ConfigProperty;
import org.griphyn.vdl.karajan.lib.cache.VDLFileCache;
import org.griphyn.vdl.karajan.lib.cache.VDLFileCacheFactory;
+import org.griphyn.vdl.util.VDL2Config;
import org.griphyn.vdl.util.VDL2ConfigProperties;
-public abstract class CacheFunction extends PartialArgumentsContainer {
- public static final String CACHE_FILES_TO_REMOVE = "cachefilestoremove";
+public abstract class CacheFunction extends InternalFunction {
+ public static final String CACHE_FILES_TO_REMOVE = "cacheFilesToRemove";
- public static final String VDL_FILE_CACHE = "vdl:filecache";
+ private VarRef<Context> context;
+ private VDLFileCache cache;
+
+ @Override
+ protected Node compileBody(WrapperNode w, Scope argScope, Scope scope)
+ throws CompilationException {
+ context = scope.getVarRef("#context");
+ return super.compileBody(w, argScope, scope);
+ }
- protected static VDLFileCache getCache(VariableStack stack) throws ExecutionException {
- VDLFileCache cache;
- synchronized (stack.getExecutionContext()) {
- cache = (VDLFileCache) stack.getGlobal(VDL_FILE_CACHE);
- if (cache == null) {
- cache = VDLFileCacheFactory.newInstance(ConfigProperty.getProperty(
- VDL2ConfigProperties.CACHING_ALGORITHM, stack));
- stack.setGlobal(VDL_FILE_CACHE, cache);
- }
- }
- return cache;
+ protected VDLFileCache getCache(Stack stack) {
+ synchronized(this) {
+ if (cache == null) {
+ cache = getOrCreateCache(stack);
+ }
+ return cache;
+ }
}
+
+ private VDLFileCache getOrCreateCache(Stack stack) {
+ Context ctx = context.getValue(stack);
+ synchronized(ctx) {
+ VDLFileCache cache = (VDLFileCache) ctx.getAttribute("SWIFT:FILE_CACHE");
+ if (cache == null) {
+ cache = VDLFileCacheFactory.newInstance(ConfigProperty.getProperty(
+ VDL2ConfigProperties.CACHING_ALGORITHM, (VDL2Config) ctx.getAttribute("SWIFT:CONFIG")));
+ ctx.setAttribute("SWIFT:FILE_CACHE", cache);
+ }
+ return cache;
+ }
+ }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/CacheUnlockFiles.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/CacheUnlockFiles.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/CacheUnlockFiles.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -21,44 +21,77 @@
package org.griphyn.vdl.karajan.lib;
import java.util.ArrayList;
-import java.util.Iterator;
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+import k.thr.Yield;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.util.BoundContact;
import org.griphyn.vdl.karajan.lib.cache.CacheReturn;
import org.griphyn.vdl.karajan.lib.cache.File;
import org.griphyn.vdl.karajan.lib.cache.VDLFileCache;
import org.griphyn.vdl.mapping.AbsFile;
public class CacheUnlockFiles extends CacheFunction {
- public static final Arg FILE = new Arg.Positional("files");
- public static final Arg DIR = new Arg.Positional("dir");
- public static final Arg HOST = new Arg.Positional("host");
- public static final Arg FORCE = new Arg.Optional("force", Boolean.TRUE);
+ private ArgRef<List<?>> file;
+ private ArgRef<String> dir;
+ private ArgRef<BoundContact> host;
+ private ArgRef<Boolean> force;
+ private Node body;
+
+ private VarRef<List<?>> cacheFilesToRemove;
- static {
- setArguments(CacheUnlockFiles.class, new Arg[] { FILE, DIR, HOST, FORCE });
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("file", "dir", "host", optional("force", Boolean.TRUE), block("body")));
+ }
+
+ @Override
+ protected void addLocals(Scope scope) {
+ super.addLocals(scope);
+ cacheFilesToRemove = scope.getVarRef(scope.addVar(CACHE_FILES_TO_REMOVE));
+ }
- public void partialArgumentsEvaluated(VariableStack stack) throws ExecutionException {
- List pairs = TypeUtil.toList(FILE.getValue(stack));
- String dir = TypeUtil.toString(DIR.getValue(stack));
- Object host = HOST.getValue(stack);
- VDLFileCache cache = getCache(stack);
- List rem = new ArrayList();
-
- Iterator i = pairs.iterator();
- while (i.hasNext()) {
- String file = (String) i.next();
- File f = new File(new AbsFile(file).getPath(), dir, host, 0);
- CacheReturn cr = cache.unlockEntry(f, TypeUtil.toBoolean(FORCE.getValue(stack)));
- rem.addAll(cr.remove);
- }
- super.partialArgumentsEvaluated(stack);
- stack.setVar(CACHE_FILES_TO_REMOVE, rem);
- startRest(stack);
- }
+ @Override
+ protected void runBody(LWThread thr) {
+ int i = thr.checkSliceAndPopState();
+ try {
+ switch (i) {
+ case 0:
+ remove(thr.getStack());
+ i++;
+ case 1:
+ body.run(thr);
+ }
+ }
+ catch (Yield y) {
+ y.getState().push(i);
+ throw y;
+ }
+ }
+
+ public void remove(Stack stack) {
+ List<?> pairs = this.file.getValue(stack);
+ String dir = this.dir.getValue(stack);
+ Object host = this.host.getValue(stack);
+ VDLFileCache cache = getCache(stack);
+ List<Object> rem = new ArrayList<Object>();
+
+ boolean force = this.force.getValue(stack);
+
+ for (Object o : pairs) {
+ String file = (String) o;
+ File f = new File(new AbsFile(file).getPath(), dir, host, 0);
+ CacheReturn cr = cache.unlockEntry(f, force);
+ rem.addAll(cr.remove);
+ }
+
+ cacheFilesToRemove.setValue(stack, rem);
+ }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/CleanDataset.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/CleanDataset.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/CleanDataset.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,24 +20,25 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.file.FileGarbageCollector;
-public class CleanDataset extends VDLFunction {
- public static final Logger logger = Logger.getLogger(CleanDataset.class);
-
- public static final Arg.Optional OA_SHUTDOWN = new Arg.Optional("shutdown");
-
- static {
- setArguments(CleanDataset.class, new Arg[] { PA_VAR, OA_SHUTDOWN });
- }
+public class CleanDataset extends SwiftFunction {
+ private ArgRef<AbstractDataNode> var;
+ private ArgRef<Boolean> shutdown;
- public Object function(VariableStack stack) throws ExecutionException {
- if (OA_SHUTDOWN.isPresent(stack)) {
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("shutdown", Boolean.FALSE)));
+ }
+
+ public Object function(Stack stack) {
+ boolean shutdown = this.shutdown.getValue(stack);
+ if (shutdown) {
// signals that everything is done and the main program should wait for the
// garbage collector to finish everything
try {
@@ -48,7 +49,7 @@
}
}
else {
- AbstractDataNode var = (AbstractDataNode) PA_VAR.getValue(stack);
+ AbstractDataNode var = this.var.getValue(stack);
if (logger.isInfoEnabled()) {
logger.info("Cleaning " + var);
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/CloseDataset.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/CloseDataset.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/CloseDataset.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,43 +20,47 @@
*/
package org.griphyn.vdl.karajan.lib;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.Path;
-public class CloseDataset extends VDLFunction {
+public class CloseDataset extends SwiftFunction {
public static final Logger logger = Logger.getLogger(CloseDataset.class);
- public static final Arg OA_CHILDREN_ONLY = new Arg.Optional("childrenOnly", Boolean.FALSE);
+ private ArgRef<DSHandle> var;
+ private ArgRef<Object> path;
+ private ArgRef<Boolean> childrenOnly;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("path", Path.EMPTY_PATH), optional("childrenOnly", Boolean.FALSE)));
+ }
- static {
- setArguments(CloseDataset.class, new Arg[] { PA_VAR, OA_PATH, OA_CHILDREN_ONLY });
- }
-
- public Object function(VariableStack stack) throws ExecutionException {
- Path path = parsePath(OA_PATH.getValue(stack), stack);
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ public Object function(Stack stack) {
+ Path path = parsePath(this.path.getValue(stack));
+ DSHandle var = this.var.getValue(stack);
try {
if (logger.isDebugEnabled()) {
logger.debug("Closing " + var);
}
var = var.getField(path);
- if (TypeUtil.toBoolean(OA_CHILDREN_ONLY.getValue(stack))) {
- closeChildren(stack, (AbstractDataNode) var);
+ if (childrenOnly.getValue(stack)) {
+ closeChildren((AbstractDataNode) var);
}
else {
var.closeDeep();
}
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
return null;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/CreateArray.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/CreateArray.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/CreateArray.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -19,37 +19,39 @@
import java.util.List;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.futures.FutureFault;
-import org.griphyn.vdl.mapping.AbstractDataNode;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.futures.FutureFault;
import org.griphyn.vdl.mapping.DSHandle;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.MappingParamSet;
+import org.griphyn.vdl.mapping.OOBYield;
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.mapping.RootArrayDataNode;
import org.griphyn.vdl.type.Field;
import org.griphyn.vdl.type.Type;
-public class CreateArray extends VDLFunction {
-
+public class CreateArray extends SetFieldValue {
public static final Logger logger = Logger.getLogger(CreateArray.class);
-
- public static final Arg PA_VALUE = new Arg.Positional("value");
- static {
- setArguments(CreateArray.class, new Arg[] { PA_VALUE });
- }
+ private ArgRef<Object> value;
- public Object function(VariableStack stack) throws ExecutionException {
- Object value = PA_VALUE.getValue(stack);
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("value"));
+ }
+
+ public Object function(Stack stack) {
+ Object value = this.value.getValue(stack);
try {
if (!(value instanceof List)) {
- throw new RuntimeException(
- "An array variable can only be initialized with a list of values");
+ throw new RuntimeException("An array variable can only be initialized with a list of values");
}
Type type = checkTypes((List<?>) value);
@@ -59,12 +61,7 @@
setMapper(handle);
}
- /*
- * The reason this is disabled without provenance is that the identifier
- * is essentially a random number plus a counter. It does not help
- * in debugging problems.
- */
- if (AbstractDataNode.provenance && logger.isInfoEnabled()) {
+ if (logger.isInfoEnabled()) {
logger.info("CREATEARRAY START array=" + handle.getIdentifier());
}
@@ -79,9 +76,9 @@
DSHandle dst = handle.getField(p);
- SetFieldValue.deepCopy(dst, n, stack, 1);
+ deepCopy(dst, n, stack);
- if (AbstractDataNode.provenance && logger.isInfoEnabled()) {
+ if (logger.isInfoEnabled()) {
logger.info("CREATEARRAY MEMBER array=" + handle.getIdentifier()
+ " index=" + index + " member=" + n.getIdentifier());
}
@@ -90,7 +87,7 @@
handle.closeShallow();
- if (AbstractDataNode.provenance && logger.isInfoEnabled()) {
+ if (logger.isInfoEnabled()) {
logger.info("CREATEARRAY COMPLETED array=" + handle.getIdentifier());
}
@@ -100,7 +97,7 @@
throw e;
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
@@ -109,7 +106,15 @@
MappingParamSet params = new MappingParamSet();
params.set(MappingParam.SWIFT_DESCRIPTOR, "concurrent_mapper");
params.set(MappingParam.SWIFT_DBGNAME, "arrayexpr");
- handle.init(params);
+ try {
+ handle.init(params);
+ }
+ catch (OOBYield y) {
+ throw y.wrapped(this);
+ }
+ catch (HandleOpenException e) {
+ throw new ExecutionException(this, "Plain HandleOpenException caught", e);
+ }
}
private boolean hasMappableFields(Type type) {
Added: branches/faster/src/org/griphyn/vdl/karajan/lib/CurrentThread.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/CurrentThread.java (rev 0)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/CurrentThread.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,30 @@
+//----------------------------------------------------------------------
+//This code is developed as part of the Java CoG Kit project
+//The terms of the license can be found at http://www.cogkit.org/license
+//This message may not be removed or altered.
+//----------------------------------------------------------------------
+
+/*
+ * Created on Jul 13, 2012
+ */
+package org.griphyn.vdl.karajan.lib;
+
+import k.thr.LWThread;
+
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
+
+public class CurrentThread extends InternalFunction {
+ private ChannelRef<String> cr_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params(), returns(channel("...", 1)));
+ }
+
+ @Override
+ public void run(LWThread thr) {
+ cr_vargs.append(thr.getStack(), thr.getName());
+ }
+}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/DoRestartLog.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/DoRestartLog.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/DoRestartLog.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,40 +20,52 @@
*/
package org.griphyn.vdl.karajan.lib;
+import java.util.Collection;
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.arguments.VariableArguments;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.AbstractSequentialWithArguments;
+import k.rt.Channel;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.Path;
-public class DoRestartLog extends AbstractSequentialWithArguments {
- public static final Arg RESTARTOUTS = new Arg.Positional("restartouts");
-
- static {
- setArguments(DoRestartLog.class, new Arg[] { RESTARTOUTS });
+public class DoRestartLog extends InternalFunction {
+
+ private ArgRef<List<List<Object>>> restartouts;
+ private ChannelRef<Object> cr_vargs;
+ private ChannelRef<Object> cr_restartLog;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("restartouts"), returns(channel("...", DYNAMIC), channel("restartLog", DYNAMIC)));
}
@Override
- protected void post(VariableStack stack) throws ExecutionException {
- List files = TypeUtil.toList(RESTARTOUTS.getValue(stack));
- VariableArguments ret = ArgUtil.getVariableReturn(stack);
+ protected void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ Collection<List<Object>> files = restartouts.getValue(stack);
+ Channel<Object> ret = cr_vargs.get(stack);
+ Channel<Object> log = cr_restartLog.get(stack);
try {
- for (Object f : files) {
- List pv = TypeUtil.toList(f);
+ for (List<Object> pv : files) {
Path p = (Path) pv.get(0);
DSHandle handle = (DSHandle) pv.get(1);
- LogVar.logVar(stack, handle, p);
+ LogVar.logVar(log, handle, p);
}
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
- super.post(stack);
}
+
+ @Override
+ public String toString() {
+ return super.toString();
+ }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Executable.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Executable.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Executable.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,27 +20,31 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.globus.cog.karajan.util.BoundContact;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
import org.globus.swift.catalog.TCEntry;
import org.griphyn.vdl.karajan.TCCache;
import org.griphyn.vdl.util.FQN;
-public class Executable extends VDLFunction {
- public static final Arg PA_TR = new Arg.Positional("tr");
- public static final Arg PA_HOST = new Arg.Positional("host");
+public class Executable extends SwiftFunction {
+
+ private ArgRef<String> tr;
+ private ArgRef<BoundContact> host;
+
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("tr", "host"));
+ }
- static {
- setArguments(Executable.class, new Arg[] { PA_TR, PA_HOST });
- }
- public Object function(VariableStack stack) throws ExecutionException {
+ public Object function(Stack stack) {
TCCache tc = getTC(stack);
- String tr = TypeUtil.toString(PA_TR.getValue(stack));
- BoundContact bc = (BoundContact) PA_HOST.getValue(stack);
+ String tr = this.tr.getValue(stack);
+ BoundContact bc = this.host.getValue(stack);
TCEntry tce = getTCE(tc, new FQN(tr), bc);
if (tce == null) {
return tr;
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Execute.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Execute.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Execute.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,5 +1,5 @@
/*
- * Copyright 2012 University of Chicago
+ * Copyright 2012 University of Chicagou
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,129 +20,145 @@
*/
package org.griphyn.vdl.karajan.lib;
+import k.rt.Abort;
+import k.rt.Channel;
+import k.rt.ConditionalYield;
+import k.rt.Context;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
import org.globus.cog.abstraction.impl.common.StatusEvent;
import org.globus.cog.abstraction.interfaces.Status;
import org.globus.cog.abstraction.interfaces.Task;
-import org.globus.cog.karajan.arguments.Arg;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Param;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.grid.GridExec;
+import org.globus.cog.karajan.compiled.nodes.grid.TaskStateFuture;
import org.globus.cog.karajan.scheduler.Scheduler;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.KarajanRuntimeException;
-import org.globus.cog.karajan.workflow.futures.FutureVariableArguments;
-import org.globus.cog.karajan.workflow.nodes.grid.GridExec;
+import org.griphyn.vdl.karajan.lib.RuntimeStats.ProgressState;
import org.griphyn.vdl.karajan.lib.replication.CanceledReplicaException;
import org.griphyn.vdl.karajan.lib.replication.ReplicationManager;
public class Execute extends GridExec {
public static final Logger logger = Logger.getLogger(Execute.class);
+
+ private ArgRef<String> replicationGroup;
+ private ArgRef<Channel<Object>> replicationChannel;
+ private ArgRef<String> jobid;
+ private ArgRef<ProgressState> progress;
+
+ private VarRef<Context> context;
+
+ @Override
+ protected Signature getSignature() {
+ Signature sig = super.getSignature();
+ sig.getParams().add(0, new Param("progress", Param.Type.POSITIONAL));
+ sig.getParams().add(optional("replicationGroup", null));
+ sig.getParams().add(optional("replicationChannel", null));
+ sig.getParams().add(optional("jobid", null));
+ return sig;
+ }
+
+ @Override
+ protected void addLocals(Scope scope) {
+ super.addLocals(scope);
+ context = scope.getVarRef("#context");
+ }
- public static final String REPLICATION_MANAGER = "execute:replication-manager";
-
- public static final Arg A_REPLICATION_GROUP = new Arg.Optional("replicationGroup");
- public static final Arg A_REPLICATION_CHANNEL = new Arg.Optional("replicationChannel");
- public static final Arg A_JOBID = new Arg.Optional("jobid");
-
- static {
- setArguments(Execute.class, new Arg[] { A_EXECUTABLE, A_ARGS, A_ARGUMENTS, A_HOST,
- A_STDOUT, A_STDERR, A_STDOUTLOCATION, A_STDERRLOCATION, A_STDIN, A_PROVIDER,
- A_COUNT, A_HOST_COUNT, A_JOBTYPE, A_MAXTIME, A_MAXWALLTIME, A_MAXCPUTIME,
- A_ENVIRONMENT, A_QUEUE, A_PROJECT, A_MINMEMORY, A_MAXMEMORY, A_REDIRECT,
- A_SECURITY_CONTEXT, A_DIRECTORY, A_NATIVESPEC, A_DELEGATION, A_ATTRIBUTES,
- C_ENVIRONMENT, A_FAIL_ON_JOB_ERROR, A_BATCH, A_REPLICATION_GROUP,
- A_REPLICATION_CHANNEL, A_JOBID, C_STAGEIN, C_STAGEOUT, C_CLEANUP });
- }
-
- public Execute() {
- }
-
- public void submitScheduled(Scheduler scheduler, Task task, VariableStack stack,
- Object constraints) throws ExecutionException {
+ @Override
+ public void submitScheduled(Scheduler scheduler, Task task, Stack stack, Object constraints) {
try {
registerReplica(stack, task);
log(task, stack);
- scheduler.addJobStatusListener(this, task);
- setStack(task, stack);
- scheduler.enqueue(task, constraints);
+
+ TaskStateFuture tsf = new SwiftTaskStateFuture(stack, task, false);
+ scheduler.enqueue(task, constraints, tsf);
+ throw new ConditionalYield(1, tsf);
}
catch (CanceledReplicaException e) {
if (logger.isDebugEnabled()) {
logger.debug("Early abort on replicated task " + task);
}
- abort(stack);
+ throw new Abort();
}
}
- void log(Task task, VariableStack stack)
- throws ExecutionException
- {
+ void log(Task task, Stack stack) throws ExecutionException {
if (logger.isDebugEnabled()) {
logger.debug(task);
logger.debug("Submitting task " + task);
}
- String jobid = (String) A_JOBID.getValue(stack,null);
+ String jobid = this.jobid.getValue(stack);
if (logger.isDebugEnabled()) {
- logger.debug("jobid="+jobid+" task=" + task);
+ logger.debug("jobid=" + jobid + " task=" + task);
}
}
- protected void registerReplica(VariableStack stack, Task task) throws CanceledReplicaException {
+ protected void registerReplica(Stack stack, Task task) throws CanceledReplicaException {
setTaskIdentity(stack, task);
- try {
- String rg = TypeUtil.toString(A_REPLICATION_GROUP.getValue(stack, null));
- if (rg != null) {
- getReplicationManager(stack).register(rg, task);
- }
+
+ String rg = this.replicationGroup.getValue(stack);
+ if (rg != null) {
+ getReplicationManager(stack).register(rg, task);
}
- catch (ExecutionException e) {
- throw new KarajanRuntimeException(e);
- }
}
+
+ protected class SwiftTaskStateFuture extends CustomTaskStateFuture {
- public void statusChanged(StatusEvent e) {
- Task task = (Task) e.getSource();
- VariableStack stack = getStack(task);
- try {
- if (stack != null) {
- int c = e.getStatus().getStatusCode();
- if (c == Status.SUBMITTED) {
- RuntimeStats.setProgress(stack, "Submitted");
- getReplicationManager(stack).submitted(task, e.getStatus().getTime());
- }
- else if (c == Status.STAGE_IN) {
- RuntimeStats.setProgress(stack, "Stage in");
- }
- else if (c == Status.STAGE_OUT) {
- RuntimeStats.setProgress(stack, "Stage out");
- }
- else if (c == Status.ACTIVE) {
- RuntimeStats.setProgress(stack, "Active");
- getReplicationManager(stack).active(task, e.getStatus().getTime());
- ((FutureVariableArguments) A_REPLICATION_CHANNEL.getValue(stack)).close();
- }
- else if (e.getStatus().isTerminal()) {
- getReplicationManager(stack).terminated(task);
- }
- else if (c == ReplicationManager.STATUS_NEEDS_REPLICATION) {
- RuntimeStats.setProgress(stack, "Replicating");
- ((FutureVariableArguments) A_REPLICATION_CHANNEL.getValue(stack)).append(Boolean.TRUE);
- }
- }
- }
- catch (ExecutionException ex) {
- logger.warn(ex);
- }
- super.statusChanged(e);
+ public SwiftTaskStateFuture(Stack stack, Task task, boolean taskHasListener) {
+ super(stack, task, taskHasListener);
+ }
+
+ public void statusChanged(StatusEvent e) {
+ Task task = (Task) e.getSource();
+ Stack stack = getStack();
+ try {
+ if (stack != null) {
+ int c = e.getStatus().getStatusCode();
+ ProgressState ps = progress.getValue(stack);
+ if (c == Status.SUBMITTED) {
+ ps.setState("Submitted");
+ getReplicationManager(stack).submitted(task, e.getStatus().getTime());
+ }
+ else if (c == Status.STAGE_IN) {
+ ps.setState("Stage in");
+ }
+ else if (c == Status.STAGE_OUT) {
+ ps.setState("Stage out");
+ }
+ else if (c == Status.ACTIVE) {
+ ps.setState("Active");
+ getReplicationManager(stack).active(task, e.getStatus().getTime());
+ Execute.this.replicationChannel.getValue(stack).close();
+ }
+ else if (e.getStatus().isTerminal()) {
+ getReplicationManager(stack).terminated(task);
+ }
+ else if (c == ReplicationManager.STATUS_NEEDS_REPLICATION) {
+ ps.setState("Replicating");
+ Execute.this.replicationChannel.getValue(stack).add(Boolean.TRUE);
+ }
+ }
+ }
+ catch (ExecutionException ex) {
+ logger.warn(ex);
+ }
+ super.statusChanged(e);
+ }
}
- protected ReplicationManager getReplicationManager(VariableStack stack) throws ExecutionException {
- synchronized (stack.firstFrame()) {
- ReplicationManager rm = (ReplicationManager) stack.firstFrame().getVar(
- REPLICATION_MANAGER);
+ protected ReplicationManager getReplicationManager(Stack stack) throws ExecutionException {
+ Context ctx = this.context.getValue(stack);
+ synchronized (ctx) {
+ ReplicationManager rm = (ReplicationManager) ctx.getAttribute("#replicationManager");
if (rm == null) {
rm = new ReplicationManager(getScheduler(stack));
- stack.firstFrame().setVar(REPLICATION_MANAGER, rm);
+ ctx.setAttribute("#replicationManager", rm);
}
return rm;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/ExpandArguments.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/ExpandArguments.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/ExpandArguments.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -19,48 +19,47 @@
import java.util.ArrayList;
import java.util.Comparator;
-import java.util.Iterator;
import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import k.rt.Channel;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.ArrayDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.PathElementComparator;
-public class ExpandArguments extends VDLFunction {
+public class ExpandArguments extends SwiftFunction {
public static final Logger logger = Logger.getLogger(ExpandArguments.class);
+
+ private ChannelRef<Object> c_vargs;
- static {
- setArguments(ExpandArguments.class, new Arg[] { Arg.VARGS });
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("..."));
+ }
- public Object function(VariableStack stack) throws ExecutionException {
- ArrayList l = new ArrayList();
- Object[] items = Arg.VARGS.asArray(stack);
- for (int i = 0; i < items.length; i++) {
- Object item = items[i];
+ public Object function(Stack stack) {
+ ArrayList<DSHandle> l = new ArrayList<DSHandle>();
+ Channel<Object> items = c_vargs.get(stack);
+ for (Object item : items) {
if(!(item instanceof DSHandle)) {
- throw new RuntimeException("Cannot handle argument implemented by "+item.getClass());
+ throw new RuntimeException("Cannot handle argument implemented by " + item.getClass());
}
- if(item instanceof ArrayDataNode) {
+ if (item instanceof ArrayDataNode) {
ArrayDataNode array = (ArrayDataNode) item;
- Map m=array.getArrayValue();
- Set keySet = m.keySet();
- TreeSet<Comparable<?>> sortedKeySet = new TreeSet<Comparable<?>>(new PathElementComparator());
- sortedKeySet.addAll(keySet);
- Iterator it = sortedKeySet.iterator();
- while(it.hasNext()) {
- Object key = it.next();
- l.add(m.get(key));
- }
- } else {
- l.add(item);
+ Map<Comparable<?>, DSHandle> m = array.getArrayValue();
+ SortedMap<Comparable<?>, DSHandle> sorted = new TreeMap<Comparable<?>, DSHandle>(new PathElementComparator());
+ sorted.putAll(m);
+ l.addAll(m.values());
+ }
+ else {
+ l.add((DSHandle) item);
}
// TODO this does not correctly handle structs or
// externals - at the moment, probably neither of
@@ -68,11 +67,11 @@
// does not handle nested arrays. However, none of
// those should get here in normal operation due
// to static type-checking
- }
+ }
return l;
}
- class StringsAsIntegersComparator implements Comparator {
+ class StringsAsIntegersComparator implements Comparator<Object> {
public int compare(Object l, Object r) {
Integer lnum = new Integer((String)l);
Integer rnum = new Integer((String)r);
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/FileCopier.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/FileCopier.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/FileCopier.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,8 +17,8 @@
package org.griphyn.vdl.karajan.lib;
-import java.util.LinkedList;
-import java.util.List;
+import k.rt.AbstractFuture;
+import k.rt.Future;
import org.globus.cog.abstraction.impl.common.StatusEvent;
import org.globus.cog.abstraction.impl.common.task.FileTransferSpecificationImpl;
@@ -35,21 +35,14 @@
import org.globus.cog.abstraction.interfaces.Status;
import org.globus.cog.abstraction.interfaces.StatusListener;
import org.globus.cog.abstraction.interfaces.TaskHandler;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.futures.FutureEvaluationException;
-import org.globus.cog.karajan.workflow.futures.FutureListener;
-import org.globus.cog.karajan.workflow.futures.FuturesMonitor;
-import org.globus.cog.karajan.workflow.futures.ListenerStackPair;
+import org.globus.cog.karajan.futures.FutureEvaluationException;
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.PhysicalFormat;
-public class FileCopier implements Future, StatusListener {
+public class FileCopier extends AbstractFuture implements Future, StatusListener {
private static final TaskHandler fth = new FileTransferTaskHandler();
private FileTransferTask task;
- private List<ListenerStackPair> actions;
private Exception exception;
private boolean closed;
@@ -72,46 +65,9 @@
task.addStatusListener(this);
}
- public synchronized void addModificationAction(FutureListener target,
- VariableStack stack) {
- if (actions == null) {
- actions = new LinkedList<ListenerStackPair>();
- }
- ListenerStackPair etp = new ListenerStackPair(target, stack);
- if (FuturesMonitor.debug) {
- FuturesMonitor.monitor.add(etp, this);
- }
- synchronized (actions) {
- actions.add(etp);
- }
- if (closed) {
- actions();
- }
- }
-
- public List<ListenerStackPair> getModificationActions() {
- return actions;
- }
-
- private void actions() {
- if (actions != null) {
- synchronized (actions) {
- java.util.Iterator<ListenerStackPair> i = actions.iterator();
- while (i.hasNext()) {
- ListenerStackPair etp = i.next();
- if (FuturesMonitor.debug) {
- FuturesMonitor.monitor.remove(etp);
- }
- i.remove();
- etp.listener.futureModified(this, etp.stack);
- }
- }
- }
- }
-
public void fail(FutureEvaluationException e) {
this.exception = e;
- actions();
+ notifyListeners();
}
public Object getValue() {
@@ -130,7 +86,7 @@
public void close() {
closed = true;
- actions();
+ notifyListeners();
}
public void statusChanged(StatusEvent event) {
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/FileName.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/FileName.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/FileName.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,16 +20,24 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.Stack;
-public class FileName extends VDLFunction {
- static {
- setArguments(FileName.class, new Arg[] { PA_VAR });
- }
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.mapping.DSHandle;
- public Object function(VariableStack stack) throws ExecutionException {
- return argList(filename(stack), true);
+public class FileName extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+ private ChannelRef<String> cr_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ return argList(filename(var.getValue(stack)), true);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Flatten.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Flatten.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Flatten.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,21 +22,24 @@
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.VariableArguments;
-import org.globus.cog.karajan.stack.VariableStack;
+import k.rt.Channel;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-public class Flatten extends VDLFunction {
+public class Flatten extends SwiftFunction {
+ private ChannelRef<?> c_vargs;
- static {
- setArguments(Flatten.class, new Arg[] { Arg.VARGS });
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("..."));
}
@Override
- protected Object function(VariableStack stack) throws ExecutionException {
- VariableArguments v = Arg.VARGS.get(stack);
+ public Object function(Stack stack) {
+ Channel<?> v = c_vargs.get(stack);
if (v.isEmpty()) {
return "";
}
@@ -48,10 +51,10 @@
}
}
- private void flatten(StringBuilder sb, List l) {
+ private void flatten(StringBuilder sb, List<?> l) {
for (Object o : l) {
if (o instanceof List) {
- flatten(sb, (List) o);
+ flatten(sb, (List<?>) o);
}
else {
sb.append(TypeUtil.toString(o));
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/FringePaths.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/FringePaths.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/FringePaths.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,34 +22,41 @@
import java.util.Collection;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable;
+import k.rt.ExecutionException;
+import k.rt.Future;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.futures.FutureNotYetAvailable;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.InvalidPathException;
+import org.griphyn.vdl.mapping.Path;
-public class FringePaths extends VDLFunction {
+public class FringePaths extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+ private ArgRef<Object> path;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("path", Path.EMPTY_PATH)));
+ }
- static {
- setArguments(FringePaths.class, new Arg[] { PA_VAR, OA_PATH });
- }
-
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
DSHandle root = var.getRoot();
try {
- var = var.getField(parsePath(OA_PATH.getValue(stack), stack));
- Collection c;
+ var = var.getField(parsePath(path.getValue(stack)));
+ Collection<Path> c;
synchronized(root) {
c = var.getFringePaths();
}
return c;
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
catch (HandleOpenException e) {
throw new FutureNotYetAvailable((Future) e.getSource());
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/GetArrayIterator.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/GetArrayIterator.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/GetArrayIterator.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,34 +17,37 @@
package org.griphyn.vdl.karajan.lib;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.PairIterator;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.PairSet;
import org.griphyn.vdl.mapping.ArrayDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.Path;
-public class GetArrayIterator extends VDLFunction {
+public class GetArrayIterator extends SwiftFunction {
public static final Logger logger = Logger.getLogger(GetArrayIterator.class);
- static {
- setArguments(GetArrayIterator.class, new Arg[] { PA_VAR, OA_PATH });
- }
+ private ArgRef<DSHandle> var;
+ private ArgRef<Object> path;
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("path", Path.EMPTY_PATH)));
+ }
+
/**
* Takes a supplied variable and path, and returns an array iterator.
*/
- public Object function(VariableStack stack) throws ExecutionException {
- Object var1 = PA_VAR.getValue(stack);
- if (!(var1 instanceof DSHandle)) {
- return var1;
- }
- DSHandle var = (DSHandle) var1;
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
try {
- Path path = parsePath(OA_PATH.getValue(stack), stack);
+ Path path = parsePath(this.path.getValue(stack));
if (path.hasWildcards()) {
throw new RuntimeException("Wildcards not supported");
}
@@ -58,7 +61,7 @@
if (logger.isDebugEnabled()) {
logger.debug("Using closed iterator for " + var);
}
- return new PairIterator(var.getArrayValue());
+ return new PairSet(var.getArrayValue());
}
else {
if (logger.isDebugEnabled()) {
@@ -70,8 +73,7 @@
}
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
-
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/GetDatasetProvenanceID.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/GetDatasetProvenanceID.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/GetDatasetProvenanceID.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,19 +17,23 @@
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
-public class GetDatasetProvenanceID extends VDLFunction {
+public class GetDatasetProvenanceID extends SwiftFunction {
+ private ArgRef<DSHandle> var;
- static {
- setArguments(GetDatasetProvenanceID.class, new Arg[] { PA_VAR });
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var"));
+ }
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
return var.getIdentifier();
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/GetField.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/GetField.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/GetField.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,9 +20,11 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.Path;
@@ -30,28 +32,26 @@
/**
* Obtain the DSHandle from within another DSHandle via the given PATH
* */
-public class GetField extends VDLFunction {
- static {
- setArguments(GetField.class, new Arg[] { OA_PATH, PA_VAR });
- }
+public class GetField extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+ private ArgRef<Object> path;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("path", Path.EMPTY_PATH)));
+ }
- public Object function(VariableStack stack) throws ExecutionException {
- Object var1 = PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
- if(var1 instanceof DSHandle) {
-
- try {
- DSHandle var = (DSHandle) var1;
-
- Path path = parsePath(OA_PATH.getValue(stack), stack);
- DSHandle field = var.getField(path);
- return field;
- }
- catch (InvalidPathException e) {
- throw new ExecutionException(e);
- }
- } else {
- throw new ExecutionException("was expecting a DSHandle, got: "+var1.getClass());
+ try {
+ Path path = parsePath(this.path.getValue(stack));
+ DSHandle field = var.getField(path);
+ return field;
}
+ catch (InvalidPathException e) {
+ throw new ExecutionException(this, e);
+ }
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/GetFieldSubscript.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/GetFieldSubscript.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/GetFieldSubscript.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -19,31 +19,31 @@
import java.util.Collection;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.Path;
-public class GetFieldSubscript extends VDLFunction {
+public class GetFieldSubscript extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+ private ArgRef<Object> subscript;
- public static final SwiftArg PA_SUBSCRIPT = new SwiftArg.Positional("subscript");
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", "subscript"));
+ }
- static {
- setArguments(GetFieldSubscript.class, new Arg[] { PA_VAR, PA_SUBSCRIPT });
- }
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
- public Object function(VariableStack stack) throws ExecutionException {
- Object var1 = PA_VAR.getValue(stack);
- if(!(var1 instanceof DSHandle)) {
- throw new ExecutionException("was expecting a dshandle, got: "+var1.getClass());
- }
- DSHandle var = (DSHandle) var1;
+ Object index = this.subscript.getValue(stack);
- Object index = PA_SUBSCRIPT.getValue(stack);
-
try {
Path path;
if ("*".equals(index)) {
@@ -61,10 +61,10 @@
}
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
catch (HandleOpenException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/GetFieldValue.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/GetFieldValue.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/GetFieldValue.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,35 +20,37 @@
*/
package org.griphyn.vdl.karajan.lib;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.futures.FutureFault;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.futures.FutureFault;
import org.griphyn.vdl.mapping.AbstractDataNode;
-import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.Path;
-public class GetFieldValue extends VDLFunction {
+public class GetFieldValue extends SwiftFunction {
public static final Logger logger = Logger.getLogger(GetFieldValue.class);
+
+ private ArgRef<AbstractDataNode> var;
+ private ArgRef<Object> path;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("path", Path.EMPTY_PATH)));
+ }
- static {
- setArguments(GetFieldValue.class, new Arg[] { PA_VAR, OA_PATH });
- }
-
/**
* Takes a supplied variable and path, and returns the unique value at that
* path. Path can contain wildcards, in which case an array is returned.
*/
- public Object function(VariableStack stack) throws ExecutionException {
- Object var1 = PA_VAR.getValue(stack);
- if (!(var1 instanceof DSHandle)) {
- return var1;
- }
- AbstractDataNode var = (AbstractDataNode) var1;
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode var = this.var.getValue(stack);
try {
- Path path = parsePath(OA_PATH.getValue(stack), stack);
+ Path path = parsePath(this.path.getValue(stack));
if (path.hasWildcards()) {
return var.getFields(path).toArray();
}
@@ -57,7 +59,7 @@
if (var.getType().isArray()) {
throw new RuntimeException("Getting value for array " + var + " which is not permitted.");
}
- var.waitFor();
+ var.waitFor(this);
return var.getValue();
}
}
@@ -65,7 +67,7 @@
throw f;
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/GetURLPrefix.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/GetURLPrefix.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/GetURLPrefix.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,30 +20,43 @@
*/
package org.griphyn.vdl.karajan.lib;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.List;
+import k.rt.Context;
+import k.rt.Stack;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.AbstractSequentialWithArguments;
+import org.globus.cog.karajan.analyzer.Param;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.functions.AbstractSingleValuedFunction;
import org.griphyn.vdl.karajan.functions.ConfigProperty;
+import org.griphyn.vdl.util.VDL2Config;
-public class GetURLPrefix extends AbstractSequentialWithArguments {
+public class GetURLPrefix extends AbstractSingleValuedFunction {
+ private VarRef<Context> context;
+ private VarRef<String> cwd;
@Override
- protected void post(VariableStack stack) throws ExecutionException {
+ protected Param[] getParams() {
+ return params();
+ }
+
+ @Override
+ protected void addLocals(Scope scope) {
+ super.addLocals(scope);
+ context = scope.getVarRef("#context");
+ cwd = scope.getVarRef("CWD");
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ Context ctx = this.context.getValue(stack);
+ String localServerBase = ConfigProperty.getProperty("wrapper.staging.local.server",
+ (VDL2Config) ctx.getAttribute("SWIFT:CONFIG"));
+ String cwd = this.cwd.getValue(stack);
- String localServerBase = ConfigProperty.getProperty("wrapper.staging.local.server", stack);
-
- String cwd = stack.getExecutionContext().getCwd();
if (cwd.endsWith("/.")) {
cwd = cwd.substring(0, cwd.length() - 2);
}
- ret(stack, localServerBase + cwd);
-
- super.post(stack);
+ return localServerBase + cwd;
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/InFileDirs.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/InFileDirs.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/InFileDirs.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,28 +22,33 @@
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.arguments.VariableArguments;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.AbstractSequentialWithArguments;
+import k.rt.Channel;
+import k.rt.Stack;
+import k.thr.LWThread;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
import org.griphyn.vdl.mapping.AbsFile;
-public class InFileDirs extends AbstractSequentialWithArguments {
- public static final Arg STAGEINS = new Arg.Positional("stageins");
+public class InFileDirs extends InternalFunction {
- static {
- setArguments(InFileDirs.class, new Arg[] { STAGEINS });
+ private ArgRef<List<String>> stageins;
+ private ChannelRef<Object> cr_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("stageins"), returns(channel("...", DYNAMIC)));
}
+
@Override
- protected void post(VariableStack stack) throws ExecutionException {
- List files = TypeUtil.toList(STAGEINS.getValue(stack));
- VariableArguments ret = ArgUtil.getVariableReturn(stack);
- for (Object f : files) {
- String path = (String) f;
+ protected void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ List<String> files = stageins.getValue(stack);
+ Channel<Object> ret = cr_vargs.get(stack);
+ for (String path : files) {
AbsFile af = new AbsFile(path);
if ("file".equals(af.getProtocol())) {
String dir = af.getDir();
@@ -52,17 +57,16 @@
// as "a/b/c.txt". Perhaps absolute paths
// should have a unique prefix.
if (dir.startsWith("/") && dir.length() != 1) {
- ret.append(dir.substring(1));
+ ret.add(dir.substring(1));
}
else if (dir.length() != 0) {
- ret.append(dir);
+ ret.add(dir);
}
}
else {
// also prepend host name to the path
- ret.append(af.getHost() + "/" + af.getDir());
+ ret.add(af.getHost() + "/" + af.getDir());
}
}
- super.post(stack);
}
}
Deleted: branches/faster/src/org/griphyn/vdl/karajan/lib/InfiniteCountingWhile.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/InfiniteCountingWhile.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/InfiniteCountingWhile.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,125 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.griphyn.vdl.karajan.lib;
-
-import java.util.Collections;
-import java.util.List;
-
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.ThreadingContext;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.FlowElement;
-import org.globus.cog.karajan.workflow.nodes.Sequential;
-import org.globus.cog.karajan.workflow.nodes.While;
-import org.griphyn.vdl.mapping.RootDataNode;
-import org.griphyn.vdl.type.Types;
-
-public class InfiniteCountingWhile extends Sequential {
-
- public static final String COUNTER_NAME = "$";
- public static final Arg.Positional VAR = new Arg.Positional("var");
-
- private Tracer tracer;
-
- public InfiniteCountingWhile() {
- setOptimize(false);
- }
-
- @Override
- protected void initializeStatic() {
- super.initializeStatic();
- tracer = Tracer.getTracer(this);
- }
-
- public void pre(VariableStack stack) throws ExecutionException {
- ThreadingContext tc = (ThreadingContext)stack.getVar("#thread");
- stack.setVar("#iteratethread", tc);
- stack.setVar("#thread", tc.split(0));
- stack.setVar(COUNTER_NAME, Collections.singletonList(0));
- String var = (String) VAR.getStatic(this);
- if (tracer.isEnabled()) {
- tracer.trace(tc.toString(), var + " = 0");
- }
- stack.setVar(var, new RootDataNode(Types.INT, 0));
- super.pre(stack);
- }
-
- protected void startNext(VariableStack stack) throws ExecutionException {
- if (stack.isDefined("#abort")) {
- abort(stack);
- return;
- }
- int index = getIndex(stack);
- if (elementCount() == 0) {
- post(stack);
- return;
- }
- FlowElement fn = null;
-
- if (index == elementCount() - 1) {
- // the condition is always compiled as the last thing in the loop
- // but the increment needs to happen before the condition is
- // evaluated
- @SuppressWarnings("unchecked")
- List<Integer> c = (List<Integer>) stack.getVar(COUNTER_NAME);
- int i = c.get(0).intValue();
- i++;
- ThreadingContext tc = (ThreadingContext)stack.getVar("#iteratethread");
- ThreadingContext ntc = tc.split(i);
- stack.setVar("#thread", ntc);
- stack.setVar(COUNTER_NAME, Collections.singletonList(i));
- String var = (String) VAR.getStatic(this);
- if (tracer.isEnabled()) {
- tracer.trace(ntc.toString(), var + " = " + i);
- }
- stack.setVar(var, new RootDataNode(Types.INT, i));
- }
- if (index >= elementCount()) {
- // starting new iteration
- setIndex(stack, 1);
- fn = getElement(0);
- }
- else {
- fn = getElement(index++);
- setIndex(stack, index);
- }
- startElement(fn, stack);
- }
-
- public void failed(VariableStack stack, ExecutionException e)
- throws ExecutionException {
- if (e instanceof While.Break) {
- complete(stack);
- return;
- }
- if (e instanceof While.Continue) {
- setIndex(e.getStack(), 0);
- startNext(e.getStack());
- return;
- }
- super.failed(stack, e);
- }
-
- @Override
- public String getTextualName() {
- return "iterate";
- }
-
-
-}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/IsDone.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/IsDone.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/IsDone.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,33 +22,36 @@
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.restartLog.LogChannelOperator;
import org.griphyn.vdl.mapping.DSHandle;
-import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.Path;
-public class IsDone extends VDLFunction {
- public static final Arg STAGEOUT = new Arg.Positional("stageout");
+public class IsDone extends SwiftFunction {
+ private ArgRef<Iterable<List<Object>>> stageout;
- static {
- setArguments(IsDone.class, new Arg[] { STAGEOUT });
- }
+ private ChannelRef<String> cr_restartLog;
@Override
- protected Object function(VariableStack stack) throws ExecutionException {
- List files = TypeUtil.toList(STAGEOUT.getValue(stack));
- for (Object f : files) {
- List pv = TypeUtil.toList(f);
+ protected Signature getSignature() {
+ return new Signature(params("stageout"), returns(channel("...", 1), channel("restartLog")));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ Iterable<List<Object>> files = stageout.getValue(stack);
+ for (List<Object> pv : files) {
Path p = (Path) pv.get(0);
DSHandle handle = (DSHandle) pv.get(1);
- if (!IsLogged.isLogged(stack, handle, p)) {
+ if (!IsLogged.isLogged((LogChannelOperator) cr_restartLog.get(stack), handle, p)) {
return Boolean.FALSE;
}
}
- if (files.isEmpty()) {
+ if (!files.iterator().hasNext()) {
return Boolean.FALSE;
}
return Boolean.TRUE;
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/IsFileBound.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/IsFileBound.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/IsFileBound.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,19 +20,24 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
-public class IsFileBound extends VDLFunction {
- static {
- setArguments(IsFileBound.class, new Arg[] { PA_VAR });
- }
+public class IsFileBound extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var"));
+ }
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
if (var instanceof AbstractDataNode) {
return Boolean.valueOf(!((AbstractDataNode) var).isPrimitive());
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/IsLogged.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/IsLogged.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/IsLogged.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -23,40 +23,51 @@
import java.util.List;
import java.util.Map;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.restartLog.LogChannelOperator;
+import org.globus.cog.karajan.compiled.nodes.restartLog.LogEntry;
import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.restartLog.LogEntry;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.Path;
-public class IsLogged extends VDLFunction {
- static {
- setArguments(IsLogged.class, new Arg[] { PA_VAR, PA_PATH });
- }
+public class IsLogged extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+ private ArgRef<Object> path;
+
+ private ChannelRef<String> cr_restartLog;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", "path"), returns(channel("restartLog")));
+ }
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
Path path;
- Object p = PA_PATH.getValue(stack);
+ Object p = this.path.getValue(stack);
if (p instanceof Path) {
path = (Path) p;
}
else {
path = Path.parse(TypeUtil.toString(p));
}
- return Boolean.valueOf(isLogged(stack, var, path));
+ return Boolean.valueOf(isLogged((LogChannelOperator) cr_restartLog.get(stack), var, path));
}
- public static boolean isLogged(VariableStack stack, DSHandle var, Path path) throws ExecutionException {
+ public static boolean isLogged(LogChannelOperator log, DSHandle var, Path path) throws ExecutionException {
+ Map<LogEntry, Object> logData = log.getLogData();
path = var.getPathFromRoot().append(path);
LogEntry entry = LogEntry.build(var.getRoot().getParam(MappingParam.SWIFT_RESTARTID) + "." + path.stringForm());
- Map map = getLogData(stack);
boolean found = false;
- synchronized (map) {
- List files = (List) map.get(entry);
+ synchronized (logData) {
+ List<?> files = (List<?>) logData.get(entry);
if (files != null && !files.isEmpty()) {
found = true;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/IsRestartable.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/IsRestartable.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/IsRestartable.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,9 +17,10 @@
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
/** Determines if a variable is 'restartable'; that is, if we restart the
@@ -28,14 +29,18 @@
-public class IsRestartable extends VDLFunction {
- static {
- setArguments(IsRestartable.class, new Arg[] { PA_VAR });
- }
+public class IsRestartable extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var"));
+ }
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
- return Boolean.valueOf(var.isRestartable());
- }
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
+ return Boolean.valueOf(var.isRestartable());
+ }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/JobConstraints.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/JobConstraints.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/JobConstraints.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,38 +20,46 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
+import java.util.Collection;
+
+import k.rt.Stack;
+import k.thr.LWThread;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.globus.cog.karajan.scheduler.TaskConstraints;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import org.griphyn.vdl.karajan.lib.cache.CacheMapAdapter;
import org.griphyn.vdl.util.FQN;
-import java.util.Collection;
-import org.griphyn.vdl.karajan.lib.cache.CacheMapAdapter;
-public class JobConstraints extends VDLFunction {
- public static final Arg A_TR = new Arg.Positional("tr");
- public static final Arg STAGE_IN = new Arg.Optional("stagein");
+public class JobConstraints extends CacheFunction {
+ private ArgRef<String> tr;
+ private ArgRef<Collection<String>> stagein;
+ private ChannelRef<Object> cr_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("tr", optional("stagein", null)), returns(channel("...", 1)));
+ }
- static {
- setArguments(JobConstraints.class, new Arg[] { A_TR, STAGE_IN });
- }
-
private static final String[] STRING_ARRAY = new String[0];
- public Object function(VariableStack stack) throws ExecutionException {
- String tr = TypeUtil.toString(A_TR.getValue(stack));
+ @Override
+ public void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ String tr = this.tr.getValue(stack);
String[] filenames = null;
- if (STAGE_IN.isPresent(stack)) {
- filenames = (String[]) ((Collection) STAGE_IN.getValue(stack)).toArray(STRING_ARRAY);
+ Collection<String> c = this.stagein.getValue(stack);
+ if (c != null) {
+ filenames = c.toArray(STRING_ARRAY);
}
TaskConstraints tc = new TaskConstraints();
tc.addConstraint("tr", tr);
tc.addConstraint("trfqn", new FQN(tr));
if (filenames != null) {
tc.addConstraint("filenames", filenames);
- tc.addConstraint("filecache", new CacheMapAdapter(CacheFunction.getCache(stack)));
+ tc.addConstraint("filecache", new CacheMapAdapter(getCache(stack)));
}
- return tc;
+ cr_vargs.append(stack, tc);
}
}
Deleted: branches/faster/src/org/griphyn/vdl/karajan/lib/Kickstart.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Kickstart.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Kickstart.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,66 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/*
- * Created on Dec 26, 2006
- */
-package org.griphyn.vdl.karajan.lib;
-
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.BoundContact;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.functions.ConfigProperty;
-import org.griphyn.vdl.util.TriStateBoolean;
-import org.griphyn.vdl.util.VDL2ConfigProperties;
-
-public class Kickstart extends VDLFunction {
- public static final String PROPERTY_GRIDLAUNCH = "gridlaunch";
-
- public static final Arg A_HOST = new Arg.Positional("host");
-
- static {
- setArguments(Kickstart.class, new Arg[] { A_HOST });
- }
-
- public static final String NOTHING = "";
-
- public Object function(VariableStack stack) throws ExecutionException {
- String enabled = ConfigProperty.getProperty(VDL2ConfigProperties.KICKSTART_ENABLED, stack);
- TriStateBoolean tbs = TriStateBoolean.valueOf(enabled);
- if (tbs.equals(TriStateBoolean.FALSE)) {
- return NOTHING;
- }
- else {
- BoundContact host = (BoundContact) A_HOST.getValue(stack);
- String kickstart = (String) host.getProperty(PROPERTY_GRIDLAUNCH);
- if (kickstart == null) {
- if (tbs.equals(TriStateBoolean.MAYBE)) {
- return NOTHING;
- }
- else {
- throw new ExecutionException(
- "The \"kickstart.enable\" option is set to \"true\" but Kickstart is not installed on "
- + host);
- }
- }
- else {
- return kickstart;
- }
- }
- }
-}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Log.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Log.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Log.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -23,22 +23,26 @@
import java.util.HashMap;
import java.util.Map;
+import k.rt.Channel;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.AbstractSequentialWithArguments;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
-public class Log extends AbstractSequentialWithArguments {
- public static final Arg LEVEL = new Arg.Positional("level");
- public static final Arg MESSAGE = new Arg.Optional("message", null);
+public class Log extends InternalFunction {
+ private ArgRef<String> level;
+ private ChannelRef<Object> c_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("level", "..."));
+ }
- static {
- setArguments(Log.class, new Arg[] { LEVEL, MESSAGE, Arg.VARGS });
- }
-
public static final Logger logger = Logger.getLogger("swift");
private static final Map<String, Level> priorities = new HashMap<String, Level>();
@@ -49,27 +53,21 @@
priorities.put("error", Level.ERROR);
priorities.put("fatal", Level.FATAL);
}
-
- public static Level getLevel(String lvl) {
+
+ public static Level getLevel(String lvl) {
return priorities.get(lvl);
}
- protected void post(VariableStack stack) throws ExecutionException {
- Level lvl = getLevel((String) LEVEL.getValue(stack));
+ protected void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ Level lvl = getLevel(this.level.getValue(stack));
if (logger.isEnabledFor(lvl)) {
- Object smsg = MESSAGE.getValue(stack);
- if (smsg != null) {
- logger.log(lvl, smsg);
+ Channel<Object> l = c_vargs.get(stack);
+ StringBuilder sb = new StringBuilder();
+ for (Object o : l) {
+ sb.append(o);
}
- else {
- Object[] msg = Arg.VARGS.asArray(stack);
- StringBuilder sb = new StringBuilder();
- for (int i = 0; i < msg.length; i++) {
- sb.append(TypeUtil.toString(msg[i]));
- }
- logger.log(lvl, sb.toString());
- }
+ logger.log(lvl, sb.toString());
}
- super.post(stack);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/LogVar.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/LogVar.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/LogVar.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,37 +20,44 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableNotFoundException;
-import org.globus.cog.karajan.stack.VariableStack;
+import k.rt.Channel;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VariableNotFoundException;
import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.restartLog.RestartLog;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.Path;
-public class LogVar extends VDLFunction {
+public class LogVar extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+ private ArgRef<Object> path;
+ private ChannelRef<Object> cr_restartlog;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", "path"), returns(channel("restartlog", 1)));
+ }
- static {
- setArguments(LogVar.class, new Arg[] { PA_VAR, PA_PATH });
- }
-
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
Path path;
- Object p = PA_PATH.getValue(stack);
+ Object p = this.path.getValue(stack);
if (p instanceof Path) {
path = (Path) p;
}
else {
path = Path.parse(TypeUtil.toString(p));
}
- logVar(stack, var, path);
+ logVar(cr_restartlog.get(stack), var, path);
return null;
}
- public static void logVar(VariableStack stack, DSHandle var, Path path) throws VariableNotFoundException {
+ public static void logVar(Channel<Object> log, DSHandle var, Path path) throws VariableNotFoundException {
path = var.getPathFromRoot().append(path);
String annotation;
if(var.getMapper() != null) {
@@ -58,7 +65,7 @@
} else {
annotation = "unmapped";
}
- RestartLog.LOG_CHANNEL.ret(stack, var.getRoot().getParam(MappingParam.SWIFT_RESTARTID)
+ log.add(var.getRoot().getParam(MappingParam.SWIFT_RESTARTID)
+ "." + path.stringForm() + "!" + annotation);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Mark.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Mark.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Mark.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,34 +22,34 @@
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.DataDependentException;
-import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.MappingDependentException;
import org.griphyn.vdl.mapping.Path;
-public class Mark extends VDLFunction {
- public static final Arg RESTARTS = new Arg.Positional("restarts");
- public static final Arg ERR = new Arg.Positional("err");
- public static final Arg MAPPING = new Arg.Optional("mapping", Boolean.FALSE);
-
- static {
- setArguments(Mark.class, new Arg[] { RESTARTS, ERR, MAPPING });
+public class Mark extends SwiftFunction {
+ private ArgRef<List<List<Object>>> restarts;
+ private ArgRef<Boolean> err;
+ private ArgRef<Boolean> mapping;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("restarts", "err", optional("mapping", Boolean.FALSE)));
}
@Override
- protected Object function(VariableStack stack) throws ExecutionException {
+ public Object function(Stack stack) {
try {
- if (TypeUtil.toBoolean(ERR.getValue(stack))) {
- boolean mapping = TypeUtil.toBoolean(MAPPING.getValue(stack));
- List files = TypeUtil.toList(RESTARTS.getValue(stack));
- for (Object f : files) {
- List pv = TypeUtil.toList(f);
- Path p = parsePath(pv.get(0), stack);
+ if (err.getValue(stack)) {
+ boolean mapping = this.mapping.getValue(stack);
+ List<List<Object>> files = this.restarts.getValue(stack);
+ for (List<Object> pv : files) {
+ Path p = parsePath(pv.get(0));
DSHandle handle = (DSHandle) pv.get(1);
DSHandle leaf = handle.getField(p);
synchronized (leaf) {
@@ -65,7 +65,7 @@
}
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
return null;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/New.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/New.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/New.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -24,18 +24,25 @@
import java.util.List;
import java.util.Map;
+import k.rt.Context;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.VDL2ExecutionContext;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.parser.WrapperNode;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.DuplicateMappingChecker;
import org.griphyn.vdl.mapping.ExternalDataNode;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.MappingParamSet;
+import org.griphyn.vdl.mapping.OOBYield;
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.mapping.RootArrayDataNode;
import org.griphyn.vdl.mapping.RootDataNode;
@@ -43,40 +50,55 @@
import org.griphyn.vdl.type.Type;
import org.griphyn.vdl.type.Types;
-public class New extends VDLFunction {
-
+public class New extends SwiftFunction {
public static final Logger logger = Logger.getLogger(New.class);
-
- public static final Arg OA_TYPE = new Arg.Optional("type", null);
- public static final Arg OA_MAPPING = new Arg.Optional("mapping", null);
- public static final Arg OA_VALUE = new Arg.Optional("value", null);
- public static final Arg OA_DBGNAME = new Arg.Optional("dbgname", null);
- public static final Arg OA_WAITCOUNT = new Arg.Optional("waitcount", null);
- public static final Arg OA_INPUT = new Arg.Optional("input", Boolean.FALSE);
-
- static {
- setArguments(New.class,
- new Arg[] { OA_TYPE, OA_MAPPING, OA_VALUE, OA_DBGNAME, OA_WAITCOUNT, OA_INPUT});
+
+ private ArgRef<String> type;
+ private ArgRef<Map<String, Object>> mapping;
+ private ArgRef<Object> value;
+ private ArgRef<String> dbgname;
+ private ArgRef<Number> waitCount;
+ private ArgRef<Boolean> input;
+ private ArgRef<String> _defline;
+
+ private VarRef<Context> context;
+ private VarRef<String> cwd;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("type", optional("mapping", null), optional("value", null),
+ optional("dbgname", null), optional("waitCount", null), optional("input", Boolean.FALSE), optional("_defline", null)));
}
private Tracer tracer;
- @Override
- protected void initializeStatic() {
- super.initializeStatic();
+ @Override
+ protected void addLocals(Scope scope) {
+ super.addLocals(scope);
+ context = scope.getVarRef("#context");
+ cwd = scope.getVarRef("CWD");
+ }
+
+ @Override
+ public Node compile(WrapperNode w, Scope scope)
+ throws CompilationException {
+ Node fn = super.compile(w, scope);
+ if (_defline.getValue() != null) {
+ setLine(Integer.parseInt(_defline.getValue()));
+ }
tracer = Tracer.getTracer(this);
+ return fn;
}
- public Object function(VariableStack stack) throws ExecutionException {
- String typename = TypeUtil.toString(OA_TYPE.getValue(stack));
- Object value = OA_VALUE.getValue(stack);
- @SuppressWarnings("unchecked")
- Map<String,Object> mapping =
- (Map<String,Object>) OA_MAPPING.getValue(stack);
- String dbgname = TypeUtil.toString(OA_DBGNAME.getValue(stack));
- String waitCount = (String) OA_WAITCOUNT.getValue(stack);
- boolean input = TypeUtil.toBoolean(OA_INPUT.getValue(stack));
- String line = (String) getProperty("_defline");
+ @Override
+ public Object function(Stack stack) {
+ String typename = this.type.getValue(stack);
+ Object value = this.value.getValue(stack);
+ Map<String,Object> mapping = this.mapping.getValue(stack);
+ String dbgname = this.dbgname.getValue(stack);
+ Number waitCount = this.waitCount.getValue(stack);
+ boolean input = this.input.getValue(stack);
+ String line = this._defline.getValue(stack);
MappingParamSet mps = new MappingParamSet();
mps.setAll(mapping);
@@ -93,7 +115,7 @@
mps.set(MappingParam.SWIFT_LINE, line);
}
- String threadPrefix = getThreadPrefix(stack);
+ String threadPrefix = getThreadPrefix();
mps.set(MappingParam.SWIFT_RESTARTID, threadPrefix + ":" + dbgname);
@@ -101,7 +123,7 @@
int initialWriteRefCount;
boolean noWriters = input;
if (waitCount != null) {
- initialWriteRefCount = Integer.parseInt(waitCount);
+ initialWriteRefCount = waitCount.intValue();
}
else {
initialWriteRefCount = 0;
@@ -115,7 +137,7 @@
if ("concurrent_mapper".equals(mapper)) {
mps.set(ConcurrentMapper.PARAM_THREAD_PREFIX, threadPrefix);
}
- mps.set(MappingParam.SWIFT_BASEDIR, stack.getExecutionContext().getBasedir());
+ mps.set(MappingParam.SWIFT_BASEDIR, cwd.getValue(stack));
try {
Type type;
@@ -134,8 +156,7 @@
}
else if (type.isArray()) {
// dealing with array variable
- handle = new RootArrayDataNode(type,
- (DuplicateMappingChecker) stack.getGlobal(VDL2ExecutionContext.DM_CHECKER));
+ handle = new RootArrayDataNode(type, getDMChecker(stack));
if (value != null) {
if (value instanceof RootArrayDataNode) {
if (tracer.isEnabled()) {
@@ -184,8 +205,7 @@
handle = (DSHandle) value;
}
else {
- handle = new RootDataNode(type,
- (DuplicateMappingChecker) stack.getGlobal(VDL2ExecutionContext.DM_CHECKER));
+ handle = new RootDataNode(type, getDMChecker(stack));
handle.init(mps);
if (value != null) {
if (tracer.isEnabled()) {
@@ -206,11 +226,19 @@
handle.setWriteRefCount(initialWriteRefCount);
return handle;
}
+ catch (OOBYield y) {
+ throw y.wrapped(this);
+ }
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
+ private DuplicateMappingChecker getDMChecker(Stack stack) {
+ Context ctx = this.context.getValue(stack);
+ return (DuplicateMappingChecker) ctx.getAttribute("SWIFT:DM_CHECKER");
+ }
+
private String formatList(List<?> value) {
StringBuilder sb = new StringBuilder();
sb.append('[');
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/NiceName.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/NiceName.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/NiceName.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,23 +20,31 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.Path;
-public class NiceName extends VDLFunction {
- static {
- setArguments(NiceName.class, new Arg[] { OA_PATH, PA_VAR });
- }
+public class NiceName extends SwiftFunction {
+ private ArgRef<DSHandle> var;
+ private ArgRef<Object> path;
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("path", Path.EMPTY_PATH)));
+ }
+
+
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
try {
- Path path = parsePath(OA_PATH.getValue(stack), stack);
+ Path path = parsePath(this.path.getValue(stack));
DSHandle field = var.getField(path);
Path p = field.getPathFromRoot();
if (p.equals(Path.EMPTY_PATH)) {
@@ -53,7 +61,7 @@
}
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Operators.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Operators.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Operators.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,82 +17,44 @@
package org.griphyn.vdl.karajan.lib;
-import java.io.IOException;
+import k.rt.ExecutionException;
+import k.thr.LWThread;
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.functions.FunctionsCollection;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.compiled.nodes.functions.BinaryOp;
+import org.globus.cog.karajan.compiled.nodes.functions.UnaryOp;
+import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.RootDataNode;
import org.griphyn.vdl.type.Type;
import org.griphyn.vdl.type.Types;
import org.griphyn.vdl.util.VDL2Config;
-public class Operators extends FunctionsCollection {
-
- public static final SwiftArg L = new SwiftArg.Positional("left");
- public static final SwiftArg R = new SwiftArg.Positional("right");
- public static final SwiftArg U = new SwiftArg.Positional("unaryArg");
-
- public static final Logger provenanceLogger = Logger.getLogger("org.globus.swift.provenance.operators");
-
- private DSHandle newNum(Type type, double value) throws ExecutionException {
- try {
- DSHandle handle = new RootDataNode(type);
- if (type == Types.INT) {
- handle.setValue(Integer.valueOf((int) value));
- }
- else {
- handle.setValue(new Double(value));
- }
- handle.closeShallow();
- return handle;
- }
- catch (Exception e) {
- throw new ExecutionException("Internal error", e);
- }
- }
+public class Operators {
- private DSHandle newNum(Type type, int value) throws ExecutionException {
- try {
- DSHandle handle = new RootDataNode(type);
- handle.setValue(new Integer(value));
- handle.closeShallow();
- return handle;
- }
- catch (Exception e) {
- throw new ExecutionException("Internal error", e);
- }
- }
+ public static final boolean PROVENANCE_ENABLED;
- private DSHandle newString(String value) throws ExecutionException {
+ static {
+ boolean v;
try {
- DSHandle handle = new RootDataNode(Types.STRING);
- handle.setValue(value);
- handle.closeShallow();
- return handle;
+ v = VDL2Config.getConfig().getProvenanceLog();
}
catch (Exception e) {
- throw new ExecutionException("Internal error", e);
+ v = false;
}
+ PROVENANCE_ENABLED = v;
}
- private DSHandle newBool(boolean value) throws ExecutionException {
- try {
- DSHandle handle = new RootDataNode(Types.BOOLEAN);
- handle.setValue(new Boolean(value));
- handle.closeShallow();
- return handle;
- }
- catch (Exception e) {
- throw new ExecutionException("Internal error", e);
- }
- }
+ public static final Logger provenanceLogger = Logger.getLogger("org.globus.swift.provenance.operators");
- private Type type(VariableStack stack) throws ExecutionException {
- if (Types.FLOAT.equals(L.getType(stack)) || Types.FLOAT.equals(R.getType(stack))) {
+ private static Type type(DSHandle l, DSHandle r) throws ExecutionException {
+ Type tl = l.getType();
+ Type tr = r.getType();
+ if (Types.STRING.equals(tl) || Types.STRING.equals(tr)) {
+ return Types.STRING;
+ }
+ else if (Types.FLOAT.equals(tl) || Types.FLOAT.equals(tr)) {
return Types.FLOAT;
}
else {
@@ -100,194 +62,231 @@
}
}
- private static final String[] BINARY_OPERATORS = new String[] { "vdlop_sum", "vdlop_subtraction",
- "vdlop_product", "vdlop_quotient", "vdlop_fquotient", "vdlop_iquotient",
- "vdlop_remainder", "vdlop_le", "vdlop_ge", "vdlop_lt", "vdlop_gt", "vdlop_eq", "vdlop_ne", "vdlop_and", "vdlop_or" };
- private static final String[] UNARY_OPERATORS = new String[] { "vdlop_not" };
-
- private static final Arg[] BINARY_ARGS = new Arg[] { L, R };
- private static final Arg[] UNARY_ARGS = new Arg[] { U };
-
- static {
- for (int i = 0; i < BINARY_OPERATORS.length; i++) {
- setArguments(BINARY_OPERATORS[i], BINARY_ARGS);
+ private static int getInt(Node n, DSHandle h) {
+ waitFor(n, h);
+ Object v = h.getValue();
+ if (v instanceof Integer) {
+ return ((Integer) v).intValue();
}
- for (int i = 0; i < UNARY_OPERATORS.length; i++) {
- setArguments(UNARY_OPERATORS[i], UNARY_ARGS);
- }
- }
-
- public Object vdlop_sum(VariableStack stack) throws ExecutionException {
- Object l = L.getValue(stack);
- Object r = R.getValue(stack);
- Type t = type(stack);
- DSHandle ret;
- if (l instanceof String || r instanceof String) {
- ret = newString(((String) l) + ((String) r));
- }
- else if (t == Types.INT) {
- ret = newNum(t, SwiftArg.checkInt(l) + SwiftArg.checkInt(r));
- }
else {
- ret = newNum(t, SwiftArg.checkDouble(l) + SwiftArg.checkDouble(r));
+ throw new ExecutionException(n, "Internal error. Expected an int: " + h);
}
- logBinaryProvenance(stack, "sum", ret);
- return ret;
}
+
+ private static void waitFor(Node n, DSHandle h) {
+ ((AbstractDataNode) h).waitFor(n);
+ }
- public Object vdlop_subtraction(VariableStack stack) throws ExecutionException {
- double l = L.getDoubleValue(stack);
- double r = R.getDoubleValue(stack);
- DSHandle ret = newNum(type(stack), l - r);
- logBinaryProvenance(stack, "subtraction", ret);
- return ret;
+ private static double getFloat(Node n, DSHandle h) {
+ waitFor(n, h);
+ Object v = h.getValue();
+ if (v instanceof Number) {
+ return ((Number) v).doubleValue();
+ }
+ else {
+ throw new ExecutionException(n, "Internal error. Expected float: " + h);
+ }
+ }
+
+ private static boolean getBool(Node n, DSHandle h) {
+ waitFor(n, h);
+ Object v = h.getValue();
+ if (v instanceof Boolean) {
+ return ((Boolean) v).booleanValue();
+ }
+ else {
+ throw new ExecutionException(n, "Internal error. Expected float: " + h);
+ }
+ }
+
+ public static class Sum extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ Type t = type(v1, v2);
+ DSHandle r;
+ if (t == Types.STRING) {
+ r = new RootDataNode(Types.STRING, (String.valueOf(v1.getValue()) + String.valueOf(v2.getValue())));
+ }
+ else if (t == Types.INT) {
+ r = new RootDataNode(Types.INT, getInt(this, v1) + getInt(this, v2));
+ }
+ else {
+ r = new RootDataNode(Types.FLOAT, getFloat(this, v1) + getFloat(this, v2));
+ }
+ logBinaryProvenance("sum", v1, v2, r);
+ return r;
+ }
}
-
- public Object vdlop_product(VariableStack stack) throws ExecutionException {
- double l = L.getDoubleValue(stack);
- double r = R.getDoubleValue(stack);
- DSHandle ret = newNum(type(stack), l * r);
- logBinaryProvenance(stack, "product", ret);
- return ret;
+
+ public static class Difference extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ Type t = type(v1, v2);
+ DSHandle r;
+ if (t == Types.INT) {
+ r = new RootDataNode(Types.INT, getInt(this, v1) - getInt(this, v2));
+ }
+ else {
+ r = new RootDataNode(Types.FLOAT, getFloat(this, v1) - getFloat(this, v2));
+ }
+ logBinaryProvenance("difference", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class Product extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ Type t = type(v1, v2);
+ DSHandle r;
+ if (t == Types.INT) {
+ r = new RootDataNode(Types.INT, getInt(this, v1) * getInt(this, v2));
+ }
+ else {
+ r = new RootDataNode(Types.FLOAT, getFloat(this, v1) * getFloat(this, v2));
+ }
+ logBinaryProvenance("product", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class FQuotient extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.FLOAT, getFloat(this, v1) / getFloat(this, v2));
+ logBinaryProvenance("fquotient", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class Quotient extends FQuotient {
}
- public Object vdlop_quotient(VariableStack stack) throws ExecutionException {
- // for now we map to this one
- return vdlop_fquotient(stack);
- }
+ public static class IQuotient extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.INT, getInt(this, v1) / getInt(this, v2));
+ logBinaryProvenance("iquotient", v1, v2, r);
+ return r;
+ }
+ }
- public Object vdlop_fquotient(VariableStack stack) throws ExecutionException {
- double l = L.getDoubleValue(stack);
- double r = R.getDoubleValue(stack);
- DSHandle ret = newNum(Types.FLOAT, l / r);
- logBinaryProvenance(stack, "fquotient", ret);
- return ret;
- }
+ public static class Remainder extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ Type t = type(v1, v2);
+ DSHandle r;
+ if (t == Types.INT) {
+ r = new RootDataNode(Types.INT, getInt(this, v1) % getInt(this, v2));
+ }
+ else {
+ r = new RootDataNode(Types.FLOAT, getFloat(this, v1) % getFloat(this, v2));
+ }
+ logBinaryProvenance("remainder", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class LE extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.BOOLEAN, getFloat(this, v1) <= getFloat(this, v2));
+ logBinaryProvenance("le", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class GE extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.BOOLEAN, getFloat(this, v1) >= getFloat(this, v2));
+ logBinaryProvenance("ge", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class LT extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.BOOLEAN, getFloat(this, v1) < getFloat(this, v2));
+ logBinaryProvenance("lt", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class GT extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.BOOLEAN, getFloat(this, v1) > getFloat(this, v2));
+ logBinaryProvenance("gt", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class EQ extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.BOOLEAN, v1.getValue().equals(v2.getValue()));
+ logBinaryProvenance("eq", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class NE extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.BOOLEAN, !v1.getValue().equals(v2.getValue()));
+ logBinaryProvenance("ne", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class And extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.BOOLEAN, getBool(this, v1) && getBool(this, v2));
+ logBinaryProvenance("and", v1, v2, r);
+ return r;
+ }
+ }
- public Object vdlop_iquotient(VariableStack stack) throws ExecutionException {
- double l = L.getDoubleValue(stack);
- double r = R.getDoubleValue(stack);
- DSHandle ret = newNum(Types.INT, l / r);
- logBinaryProvenance(stack, "iquotient", ret);
- return ret;
- }
+ public static class Or extends BinaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v1, DSHandle v2) {
+ DSHandle r = new RootDataNode(Types.BOOLEAN, getBool(this, v1) || getBool(this, v2));
+ logBinaryProvenance("or", v1, v2, r);
+ return r;
+ }
+ }
+
+ public static class Not extends UnaryOp<DSHandle, DSHandle> {
+ @Override
+ protected DSHandle value(DSHandle v) {
+ DSHandle r = new RootDataNode(Types.BOOLEAN, !getBool(this, v));
+ logUnaryProvenance("not", v, r);
+ return r;
+ }
+ }
- public Object vdlop_remainder(VariableStack stack) throws ExecutionException {
- double l = L.getDoubleValue(stack);
- double r = R.getDoubleValue(stack);
- DSHandle ret = newNum(type(stack), l % r);
- logBinaryProvenance(stack, "remainder", ret);
- return ret;
- }
-
- public Object vdlop_le(VariableStack stack) throws ExecutionException {
- double l = L.getDoubleValue(stack);
- double r = R.getDoubleValue(stack);
- DSHandle ret = newBool(l <= r);
- logBinaryProvenance(stack, "le", ret);
- return ret;
- }
-
- public Object vdlop_ge(VariableStack stack) throws ExecutionException {
- double l = L.getDoubleValue(stack);
- double r = R.getDoubleValue(stack);
- DSHandle ret = newBool(l >= r);
- logBinaryProvenance(stack, "ge", ret);
- return ret;
- }
-
- public Object vdlop_gt(VariableStack stack) throws ExecutionException {
- double l = L.getDoubleValue(stack);
- double r = R.getDoubleValue(stack);
- DSHandle ret = newBool(l > r);
- logBinaryProvenance(stack, "gt", ret);
- return ret;
- }
-
- public Object vdlop_lt(VariableStack stack) throws ExecutionException {
- double l = L.getDoubleValue(stack);
- double r = R.getDoubleValue(stack);
- DSHandle ret = newBool(l < r);
- logBinaryProvenance(stack, "lt", ret);
- return ret;
- }
-
- public Object vdlop_eq(VariableStack stack) throws ExecutionException {
- Object l = L.getValue(stack);
- Object r = R.getValue(stack);
- if (l == null) {
- throw new ExecutionException("First operand is null");
+ private static void logBinaryProvenance(String name, DSHandle v1, DSHandle v2, DSHandle result) throws ExecutionException {
+ if (PROVENANCE_ENABLED) {
+ String thread = LWThread.currentThread().getName();
+ String lhsid = v1.getIdentifier();
+ String rhsid = v2.getIdentifier();
+ String rid = result.getIdentifier();
+ provenanceLogger.info("OPERATOR thread=" + thread + " operator=" + name +
+ " lhs=" + lhsid + " rhs=" + rhsid + " result=" + rid);
}
- if (r == null) {
- throw new ExecutionException("Second operand is null");
- }
- DSHandle ret = newBool(l.equals(r));
- logBinaryProvenance(stack, "eq", ret);
- return ret;
}
- public Object vdlop_ne(VariableStack stack) throws ExecutionException {
- Object l = L.getValue(stack);
- Object r = R.getValue(stack);
- if (l == null) {
- throw new ExecutionException("First operand is null");
+ private static void logUnaryProvenance(String name, DSHandle v, DSHandle r) throws ExecutionException {
+ if (PROVENANCE_ENABLED) {
+ String thread = LWThread.currentThread().getName();
+ String vid = v.getIdentifier();
+ String rid = r.getIdentifier();
+ provenanceLogger.info("UNARYOPERATOR thread=" + thread + " operator=" + name +
+ " operand=" + vid + " result=" + rid);
}
- if (r == null) {
- throw new ExecutionException("Second operand is null");
- }
- DSHandle ret = newBool(!(l.equals(r)));
- logBinaryProvenance(stack, "ne", ret);
- return ret;
}
-
- public Object vdlop_and(VariableStack stack) throws ExecutionException {
- boolean l = ((Boolean)L.getValue(stack)).booleanValue();
- boolean r = ((Boolean)R.getValue(stack)).booleanValue();
- DSHandle ret = newBool(l && r);
- logBinaryProvenance(stack, "and", ret);
- return ret;
- }
-
- public Object vdlop_or(VariableStack stack) throws ExecutionException {
- boolean l = ((Boolean)L.getValue(stack)).booleanValue();
- boolean r = ((Boolean)R.getValue(stack)).booleanValue();
- DSHandle ret = newBool(l || r);
- logBinaryProvenance(stack, "or", ret);
- return ret;
- }
-
- public Object vdlop_not(VariableStack stack) throws ExecutionException {
- boolean u = ((Boolean)U.getValue(stack)).booleanValue();
- DSHandle ret = newBool(!u);
- logUnaryProvenance(stack, "not", ret);
- return ret;
- }
-
- private void logBinaryProvenance(VariableStack stack, String name, DSHandle resultDataset) throws ExecutionException {
- try {
- if(VDL2Config.getConfig().getProvenanceLog()) {
- String thread = stack.getVar("#thread").toString();
- String lhs = L.getRawValue(stack).getIdentifier();
- String rhs = R.getRawValue(stack).getIdentifier();
- String result = resultDataset.getIdentifier();
- provenanceLogger.info("OPERATOR thread="+thread+" operator="+name+" lhs="+lhs+" rhs="+rhs+" result="+result);
- }
- } catch(IOException ioe) {
- throw new ExecutionException("Exception when logging provenance for binary operator", ioe);
- }
- }
-
- private void logUnaryProvenance(VariableStack stack, String name, DSHandle resultDataset) throws ExecutionException {
- try {
- if(VDL2Config.getConfig().getProvenanceLog()) {
- String thread = stack.getVar("#thread").toString();
- String lhs = U.getRawValue(stack).getIdentifier();
- String result = resultDataset.getIdentifier();
- provenanceLogger.info("UNARYOPERATOR thread="+thread+" operator="+name+" operand="+lhs+" result="+result);
- }
- } catch(IOException ioe) {
- throw new ExecutionException("Exception when logging provenance for unary operator", ioe);
- }
- }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/OutFileDirs.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/OutFileDirs.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/OutFileDirs.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,51 +22,53 @@
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.arguments.VariableArguments;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.Path;
-public class OutFileDirs extends VDLFunction {
- public static final Arg STAGEOUTS = new Arg.Positional("stageouts");
-
- static {
- setArguments(OutFileDirs.class, new Arg[] { STAGEOUTS });
+public class OutFileDirs extends SwiftFunction {
+
+ private ArgRef<List<List<Object>>> stageouts;
+ private ChannelRef<Object> cr_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("stageouts"), returns(channel("...", DYNAMIC)));
}
+
@Override
- public Object function(VariableStack stack) throws ExecutionException {
- List files = TypeUtil.toList(STAGEOUTS.getValue(stack));
- VariableArguments ret = ArgUtil.getVariableReturn(stack);
+ public Object function(Stack stack) {
+ List<List<Object>> files = stageouts.getValue(stack);
try {
- for (Object f : files) {
- List pv = TypeUtil.toList(f);
- Path p = parsePath(pv.get(0), stack);
+ for (List<Object> pv : files) {
+ Path p = parsePath(pv.get(0));
DSHandle handle = (DSHandle) pv.get(1);
DSHandle leaf = handle.getField(p);
- String fname = VDLFunction.filename(leaf)[0];
+ String fname = SwiftFunction.filename(leaf)[0];
AbsFile af = new AbsFile(fname);
if ("file".equals(af.getProtocol())) {
String dir = af.getDir();
if (dir.startsWith("/") && dir.length() != 1) {
- ret.append(dir.substring(1));
+ cr_vargs.append(stack, dir.substring(1));
}
else if (dir.length() != 0) {
- ret.append(dir);
+ cr_vargs.append(stack, dir);
}
}
else {
- ret.append(af.getHost() + "/" + af.getDir());
+ cr_vargs.append(stack, af.getHost() + "/" + af.getDir());
}
}
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
return null;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/OutFiles.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/OutFiles.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/OutFiles.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,40 +22,40 @@
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.arguments.VariableArguments;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.AbstractSequentialWithArguments;
-import org.griphyn.vdl.mapping.AbsFile;
+import k.rt.Channel;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.Path;
-public class OutFiles extends VDLFunction {
- public static final Arg STAGEOUTS = new Arg.Positional("stageouts");
-
- static {
- setArguments(OutFiles.class, new Arg[] { STAGEOUTS });
+public class OutFiles extends SwiftFunction {
+ private ArgRef<List<List<Object>>> stageouts;
+ private ChannelRef<Object> cr_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("stageouts"), returns(channel("...", DYNAMIC)));
}
@Override
- protected Object function(VariableStack stack) throws ExecutionException {
- List files = TypeUtil.toList(STAGEOUTS.getValue(stack));
- VariableArguments ret = ArgUtil.getVariableReturn(stack);
+ public Object function(Stack stack) {
+ List<List<Object>> files = this.stageouts.getValue(stack);
+ Channel<Object> ret = cr_vargs.get(stack);
try {
- for (Object f : files) {
- List pv = TypeUtil.toList(f);
- Path p = parsePath(pv.get(0), stack);
+ for (List<Object> pv : files) {
+ Path p = parsePath(pv.get(0));
DSHandle handle = (DSHandle) pv.get(1);
DSHandle leaf = handle.getField(p);
- String fname = argList(VDLFunction.filename(leaf), true);
- ret.append(fname);
+ String fname = argList(SwiftFunction.filename(leaf), true);
+ ret.add(fname);
}
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
return null;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Parameterlog.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Parameterlog.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Parameterlog.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,52 +20,60 @@
*/
package org.griphyn.vdl.karajan.lib;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.AbstractSequentialWithArguments;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
+import org.globus.cog.karajan.parser.WrapperNode;
import org.griphyn.vdl.karajan.functions.ConfigProperty;
+import org.griphyn.vdl.util.VDL2Config;
-public class Parameterlog extends AbstractSequentialWithArguments {
+public class Parameterlog extends InternalFunction {
public static final Logger logger = Logger.getLogger(Parameterlog.class);
-
- public static final Arg DIRECTION = new Arg.Positional("direction");
- public static final Arg VAR = new Arg.Positional("variable");
- public static final Arg ID = new Arg.Positional("id");
- public static final Arg THREAD = new Arg.Positional("thread");
-
- static {
- setArguments(Parameterlog.class, new Arg[] { DIRECTION, VAR, ID, THREAD });
+
+ private ArgRef<String> direction;
+ private ArgRef<Object> variable;
+ private ArgRef<String> id;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("direction", "variable", "id"));
}
private Boolean enabled;
+ private VarRef<VDL2Config> config;
@Override
- public void pre(VariableStack stack) throws ExecutionException {
- if (enabled == null) {
- enabled = "true".equals(ConfigProperty.getProperty("provenance.log", true, stack));
- }
+ protected Node compileBody(WrapperNode w, Scope argScope, Scope scope)
+ throws CompilationException {
+ config = scope.getVarRef("SWIFT_CONFIG");
+ return super.compileBody(w, argScope, scope);
}
-
-
- @Override
- protected void executeChildren(VariableStack stack) throws ExecutionException {
- if (enabled) {
- super.executeChildren(stack);
- }
- else {
- complete(stack);
- }
- }
+
@Override
- protected void post(VariableStack stack) throws ExecutionException {
- if (enabled) {
- logger.info("PARAM thread=" + THREAD.getValue(stack) + " direction="
- + DIRECTION.getValue(stack) + " variable=" + VAR.getValue(stack)
- + " provenanceid=" + ID.getValue(stack));
+ protected void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ boolean run;
+ synchronized(this) {
+ if (enabled == null) {
+ enabled = "true".equals(ConfigProperty.getProperty("provenance.log", true, config.getValue(stack)));
+ }
+ run = enabled;
}
+ if (run) {
+ super.run(thr);
+ logger.info("PARAM thread=" + thr.getName() + " direction="
+ + direction.getValue(stack) + " variable=" + variable.getValue(stack)
+ + " provenanceid=" + id.getValue(stack));
+ }
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/PartialCloseDataset.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/PartialCloseDataset.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/PartialCloseDataset.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,24 +17,27 @@
package org.griphyn.vdl.karajan.lib;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
-public class PartialCloseDataset extends VDLFunction {
+public class PartialCloseDataset extends SwiftFunction {
public static final Logger logger = Logger.getLogger(CloseDataset.class);
+
+ private ArgRef<DSHandle> var;
+ private ArgRef<Number> count;
- public static final Arg OA_COUNT = new Arg.Optional("count", 1);
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("count", 1)));
+ }
- static {
- setArguments(PartialCloseDataset.class, new Arg[] { PA_VAR, OA_COUNT });
- }
-
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
if (logger.isDebugEnabled()) {
logger.debug("Partially closing " + var);
}
@@ -44,7 +47,7 @@
return null;
}
- int count = TypeUtil.toInt(OA_COUNT.getValue(stack));
+ int count = this.count.getValue(stack).intValue();
var.updateWriteRefCount(-count);
return null;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/PathUtils.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/PathUtils.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/PathUtils.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -18,99 +18,122 @@
package org.griphyn.vdl.karajan.lib;
import java.util.ArrayList;
-import java.util.Iterator;
import java.util.List;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.functions.FunctionsCollection;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Param;
+import org.globus.cog.karajan.compiled.nodes.functions.AbstractSingleValuedFunction;
import org.globus.swift.catalog.types.Os;
import org.griphyn.vdl.mapping.AbsFile;
-public class PathUtils extends FunctionsCollection {
+public class PathUtils {
- public static final Arg PATH = new Arg.Positional("path");
+ public static class DirName extends AbstractSingleValuedFunction {
+ private ArgRef<String> path;
- static {
- setArguments("vdl_dirname", new Arg[] { PATH });
- }
+ @Override
+ protected Param[] getParams() {
+ return params("path");
+ }
- public String vdl_dirname(VariableStack stack) throws ExecutionException {
- String path = TypeUtil.toString(PATH.getValue(stack));
- return new AbsFile(path).getDir();
- }
+ @Override
+ public Object function(Stack stack) {
+ return new AbsFile(path.getValue(stack)).getDir();
+ }
+ }
- static {
- setArguments("vdl_reldirname", new Arg[] { PATH });
- }
+ public static class RelDirName extends AbstractSingleValuedFunction {
+ private ArgRef<String> path;
- public String vdl_reldirname(VariableStack stack) throws ExecutionException {
- String path = TypeUtil.toString(PATH.getValue(stack));
- String dir = new AbsFile(path).getDir();
- if (dir.startsWith("/")) {
- return dir.substring(1);
+ @Override
+ protected Param[] getParams() {
+ return params("path");
}
- else {
- return dir;
+
+ @Override
+ public Object function(Stack stack) {
+ String dir = new AbsFile(path.getValue(stack)).getDir();
+ if (dir.startsWith("/")) {
+ return dir.substring(1);
+ }
+ else {
+ return dir;
+ }
}
}
-
- static {
- setArguments("vdl_basename", new Arg[] { PATH });
- }
- public String vdl_basename(VariableStack stack) throws ExecutionException {
- String path = TypeUtil.toString(PATH.getValue(stack));
- return new AbsFile(path).getName();
+ public static class BaseName extends AbstractSingleValuedFunction {
+ private ArgRef<String> path;
+
+ @Override
+ protected Param[] getParams() {
+ return params("path");
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ return new AbsFile(path.getValue(stack)).getName();
+ }
}
- static {
- setArguments("vdl_provider", new Arg[] { PATH });
- }
+ public static class Provider extends AbstractSingleValuedFunction {
+ private ArgRef<String> path;
- public String vdl_provider(VariableStack stack) throws ExecutionException {
- String path = TypeUtil.toString(PATH.getValue(stack));
- return new AbsFile(path).getProtocol();
+ @Override
+ protected Param[] getParams() {
+ return params("path");
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ return new AbsFile(path.getValue(stack)).getProtocol();
+ }
}
-
- static {
- setArguments("vdl_hostname", new Arg[] { PATH });
- }
- public String vdl_hostname(VariableStack stack) throws ExecutionException {
- String path = TypeUtil.toString(PATH.getValue(stack));
- return new AbsFile(path).getHost();
+ public static class HostName extends AbstractSingleValuedFunction {
+ private ArgRef<String> path;
+
+ @Override
+ protected Param[] getParams() {
+ return params("path");
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ return new AbsFile(path.getValue(stack)).getHost();
+ }
}
- public static final Arg DIR = new Arg.Positional("dir");
- public static final Arg OS = new Arg.Optional("os");
-
- static {
- setArguments("vdl_dircat", new Arg[] { DIR, PATH, OS });
- }
+ public static class DirCat extends AbstractSingleValuedFunction {
+ private ArgRef<String> dir;
+ private ArgRef<String> path;
+ private ArgRef<Os> os;
- public String vdl_dircat(VariableStack stack) throws ExecutionException {
- String dir = TypeUtil.toString(DIR.getValue(stack));
- String path = TypeUtil.toString(PATH.getValue(stack));
- boolean windows = false;
- if (OS.isPresent(stack)) {
- Os os = (Os) OS.getValue(stack);
- windows = Os.WINDOWS.equals(os);
- }
- if (dir.equals("")) {
- return windowsify(path, windows);
+ @Override
+ protected Param[] getParams() {
+ return params("dir", "path", optional("os", Os.LINUX));
}
- else if (dir.endsWith("/")) {
- return windowsify(dir + path, windows);
+
+ @Override
+ public Object function(Stack stack) {
+ String dir = this.dir.getValue(stack);
+ String path = this.path.getValue(stack);
+ boolean windows = this.os.getValue(stack).equals(Os.WINDOWS);
+ if (dir.equals("")) {
+ return windowsify(path, windows);
+ }
+ else if (dir.endsWith("/")) {
+ return windowsify(dir + path, windows);
+ }
+ else {
+ return windowsify(dir + '/' + path, windows);
+ }
}
- else {
- return windowsify(dir + '/' + path, windows);
- }
}
- private String windowsify(String path, boolean windows) {
+ private static String windowsify(String path, boolean windows) {
if (windows) {
return path.replace('/', '\\');
}
@@ -118,18 +141,22 @@
return path;
}
}
+
+ public static class PathNames extends AbstractSingleValuedFunction {
+ private ArgRef<List<String>> files;
- public static final Arg FILES = new Arg.Positional("files");
- static {
- setArguments("vdl_pathnames", new Arg[] { FILES });
- }
+ @Override
+ protected Param[] getParams() {
+ return params("files");
+ }
- public Object[] vdl_pathnames(VariableStack stack) throws ExecutionException {
- List l = new ArrayList();
- Iterator i = TypeUtil.toIterator(FILES.getValue(stack));
- while (i.hasNext()) {
- l.add(new AbsFile((String) i.next()).getPath());
+ @Override
+ public String[] function(Stack stack) {
+ List<String> l = new ArrayList<String>();
+ for (String f : files.getValue(stack)) {
+ l.add(new AbsFile(f).getPath());
+ }
+ return l.toArray(new String[0]);
}
- return l.toArray(new String[0]);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Range.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Range.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Range.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -28,12 +28,13 @@
import java.util.Map;
import java.util.Set;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
-import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.mapping.RootArrayDataNode;
@@ -41,29 +42,33 @@
import org.griphyn.vdl.type.Type;
import org.griphyn.vdl.type.Types;
-public class Range extends VDLFunction {
- public static final SwiftArg PA_FROM = new SwiftArg.Positional("from");
- public static final SwiftArg PA_TO = new SwiftArg.Positional("to");
- public static final SwiftArg OA_STEP = new SwiftArg.Optional("step", new Double(1), Types.FLOAT);
+public class Range extends SwiftFunction {
+ private ArgRef<DSHandle> from;
+ private ArgRef<DSHandle> to;
+ private ArgRef<DSHandle> step;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("from", "to", optional("step", new RootDataNode(Types.FLOAT, 1))));
+ }
- static {
- setArguments(Range.class, new Arg[] { PA_FROM, PA_TO, OA_STEP });
- }
-
- public Object function(final VariableStack stack) throws ExecutionException {
+ @Override
+ public Object function(Stack stack) {
// TODO: deal with expression
- final Type type = PA_FROM.getType(stack);
- final double start = PA_FROM.getDoubleValue(stack);
- final double stop = PA_TO.getDoubleValue(stack);
- final double incr = OA_STEP.getDoubleValue(stack);
+ DSHandle from = this.from.getValue(stack);
+ DSHandle to = this.to.getValue(stack);
+ DSHandle step = this.step.getValue(stack);
+ final Type type = from.getType();
+ final double start = ((Number) from.getValue()).doubleValue();
+ final double stop = ((Number) to.getValue()).doubleValue();
+ final double incr = ((Number) step.getValue()).doubleValue();
// only deal with int and float
try {
final AbstractDataNode handle;
handle = new RootArrayDataNode(type.arrayType()) {
- final DSHandle h = this;
-
+ final DSHandle h = this;
{
closeShallow();
}
@@ -78,10 +83,7 @@
}
else {
int index = (Integer) path.getFirst();
- DSHandle value = new RootDataNode(type);
- value.init(null);
- value.setValue(new Double(start + incr * index));
- value.closeShallow();
+ DSHandle value = new RootDataNode(type, new Double(start + incr * index));
return Collections.singletonList(value);
}
}
@@ -106,15 +108,12 @@
private int key;
{
- value = new RootDataNode(type);
- value.init(null);
if (type == Types.INT) {
- value.setValue(new Integer((int) crt));
+ value = new RootDataNode(Types.INT, new Integer((int) crt));
}
else {
- value.setValue(new Double(crt));
+ value = new RootDataNode(Types.FLOAT, new Double(crt));
}
- value.closeShallow();
key = index;
}
@@ -159,7 +158,7 @@
return handle;
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/RuntimeStats.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/RuntimeStats.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/RuntimeStats.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -19,32 +19,56 @@
import java.io.IOException;
import java.text.SimpleDateFormat;
+import java.util.ArrayList;
import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
import java.util.Map;
+import java.util.Set;
+import k.rt.Context;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableNotFoundException;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.functions.FunctionsCollection;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.Var;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.parser.WrapperNode;
import org.griphyn.vdl.util.VDL2Config;
/** this is an icky class that does too much with globals, but is for
proof of concept. */
-public class RuntimeStats extends FunctionsCollection {
+public class RuntimeStats {
- public static final String TICKER = "#swift-runtime-progress-ticker";
- public static final String PROGRESS = "#swift-runtime-progress";
+ public static final boolean TICKER_DISABLED;
+
+ static{
+ boolean disabled;
+ try{
+ disabled = "true".equalsIgnoreCase(VDL2Config.getConfig().getProperty("ticker.disable"));
+ }
+ catch (Exception e) {
+ disabled = false;
+ }
+ TICKER_DISABLED = disabled;
+ }
+
+ public static final String TICKER = "SWIFT_TICKER";
- public static final Arg PA_STATE = new Arg.Positional("state");
//formatter for timestamp against std.err lines
public static SimpleDateFormat formatter =
new SimpleDateFormat("E, dd MMM yyyy HH:mm:ss Z");
public static final int MIN_PERIOD_MS=1000;
- public static final int MAX_PERIOD_MS=30000;
+ public static final int MAX_PERIOD_MS=1000;
public static final String[] preferredOutputOrder = {
"uninitialized",
@@ -62,83 +86,110 @@
"Finished in previous run",
"Finished successfully"
};
+
+ public static class StartProgressTicker extends Node {
+ private VarRef<Context> context;
- static {
- setArguments("vdl_startprogressticker", new Arg[0]);
- setArguments("vdl_stopprogressticker", new Arg[0]);
- setArguments("vdl_setprogress", new Arg[] { PA_STATE } );
- setArguments("vdl_initprogressstate", new Arg[] { PA_STATE });
+ @Override
+ public Node compile(WrapperNode w, Scope scope)
+ throws CompilationException {
+ super.compile(w, scope);
+ context = scope.getVarRef("#context");
+ return TICKER_DISABLED ? null : this;
+ }
+
+ @Override
+ public void run(LWThread thr) {
+ ProgressTicker t = new ProgressTicker();
+ t.setDaemon(true);
+ t.start();
+ context.getValue(thr.getStack()).setAttribute(TICKER, t);
+ // Allow user to reformat output date
+ String format;
+ try {
+ format = VDL2Config.getDefaultConfig().getTickerDateFormat();
+ }
+ catch (IOException e) {
+ throw new ExecutionException(this, e);
+ }
+ if (format != null && format.length() > 0) {
+ formatter = new SimpleDateFormat(format);
+ }
+ }
}
- public static void setTicker(VariableStack stack, ProgressTicker ticker) {
- stack.setGlobal(TICKER, ticker);
+ public static class InitProgressState extends Node {
+ private VarRef<Context> context;
+ private ChannelRef<Object> cr_vargs;
+
+ @Override
+ public Node compile(WrapperNode w, Scope scope)
+ throws CompilationException {
+ super.compile(w, scope);
+ context = scope.getVarRef("#context");
+ Var.Channel r = scope.lookupChannel("...");
+ r.appendDynamic();
+ cr_vargs = scope.getChannelRef(r);
+ return TICKER_DISABLED ? null : this;
+ }
+
+ @Override
+ public void run(LWThread thr) {
+ ProgressState ps = new ProgressState();
+ ps.crt = "Initializing";
+ Stack stack = thr.getStack();
+ ((ProgressTicker) context.getValue(stack).getAttribute(TICKER)).addState(ps);
+ cr_vargs.get(stack).add(ps);
+ }
}
- public static ProgressTicker getTicker(VariableStack stack) {
- return (ProgressTicker) stack.getGlobal(TICKER);
- }
-
- public static void setProgress(VariableStack stack, RuntimeProgress p) {
- stack.parentFrame().setVar(PROGRESS, p);
- }
+ public static class SetProgress extends InternalFunction {
+ private ArgRef<ProgressState> ps;
+ private ArgRef<String> state;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("ps", "state"));
+ }
+
+ @Override
+ protected void runBody(LWThread thr) {
+ Stack stack = thr.getStack();
+ ps.getValue(stack).crt = state.getValue(stack);
+ }
+ }
- public static RuntimeProgress getProgress(VariableStack stack) throws VariableNotFoundException {
- return (RuntimeProgress) stack.getDeepVar(PROGRESS);
- }
- public Object vdl_startprogressticker(VariableStack stack) throws ExecutionException {
- ProgressTicker t = new ProgressTicker();
- t.setDaemon(true);
- t.start();
- setTicker(stack, t);
-
- // Allow user to reformat output date
- String format;
- try {
- format = VDL2Config.getDefaultConfig().getTickerDateFormat();
- }
- catch (IOException e) {
- throw new ExecutionException(e);
- }
- if (format != null && format.length() > 0)
- formatter =
- new SimpleDateFormat(format);
- return null;
- }
+ public static class StopProgressTicker extends Node {
+ private VarRef<Context> context;
- public Object vdl_setprogress(VariableStack stack) throws ExecutionException {
- setProgress(stack, TypeUtil.toString(PA_STATE.getValue(stack)));
- return null;
- }
+ @Override
+ public Node compile(WrapperNode w, Scope scope)
+ throws CompilationException {
+ super.compile(w, scope);
+ context = scope.getVarRef("#context");
+ return TICKER_DISABLED ? null : this;
+ }
- static public void setProgress(VariableStack stack, String newState) throws ExecutionException {
- RuntimeProgress p = getProgress(stack);
- ProgressTicker t = getTicker(stack);
- synchronized(t) {
- t.dec(p.status);
- t.inc(newState);
- }
- p.status = newState;
- t.dumpState();
- }
+ @Override
+ public void run(LWThread thr) {
+ ProgressTicker t = (ProgressTicker) context.getValue(thr.getStack()).getAttribute(TICKER);
+ t.finalDumpState();
+ t.shutdown();
+ }
+ }
- public Object vdl_initprogressstate(VariableStack stack) throws ExecutionException {
- RuntimeProgress rp = new RuntimeProgress();
- ProgressTicker p = getTicker(stack);
- setProgress(stack, rp);
- rp.status = "Initializing";
- synchronized(p) {
- p.inc(rp.status);
- }
- p.dumpState();
- return null;
- }
+
+ public static class ProgressState {
+ private String crt;
- public synchronized Object vdl_stopprogressticker(VariableStack stack) throws ExecutionException {
- ProgressTicker p = getTicker(stack);
- p.finalDumpState();
- p.shutdown();
- return null;
+ public void setState(String state) {
+ this.crt = state;
+ }
+
+ public String toString() {
+ return "ProgressState: " + crt;
+ }
}
@@ -146,7 +197,7 @@
public static final Logger logger = Logger.getLogger(ProgressTicker.class);
- private Map<String, Integer> counts;
+ private Set<ProgressState> states;
long start;
long lastDumpTime = 0;
@@ -157,7 +208,7 @@
public ProgressTicker() {
super("Progress ticker");
- counts = new HashMap<String, Integer>();
+ states = new HashSet<ProgressState>();
try {
if ("true".equalsIgnoreCase(VDL2Config.getConfig().getProperty("ticker.disable"))) {
logger.info("Ticker disabled in configuration file");
@@ -171,6 +222,12 @@
}
start = System.currentTimeMillis();
}
+
+ public void addState(ProgressState ps) {
+ synchronized(states) {
+ states.add(ps);
+ }
+ }
public void run() {
if (disabled) {
@@ -186,29 +243,7 @@
}
}
}
-
- public void inc(String state) {
- Integer crt = counts.get(state);
- if (crt == null) {
- counts.put(state, 1);
- }
- else {
- counts.put(state, crt + 1);
- }
- }
-
- public void dec(String state) {
- Integer crt = counts.get(state);
- if (crt != null) {
- if (crt == 1) {
- counts.remove(state);
- }
- else {
- counts.put(state, crt - 1);
- }
- }
- }
-
+
void shutdown() {
shutdown = true;
}
@@ -230,11 +265,33 @@
printStates("Final status:");
}
- public synchronized Map<String, Integer> getSummary() {
- return new HashMap<String, Integer>(counts);
+ private Map<String, Integer> getSummary() {
+ List<ProgressState> states;
+
+ synchronized(this.states) {
+ states = new ArrayList<ProgressState>(this.states);
+ }
+ Map<String, Integer> m = new HashMap<String, Integer>();
+
+ for (ProgressState s : states) {
+ inc(m, s);
+ }
+ return m;
+ }
+
+ private void inc(Map<String, Integer> m, ProgressState s) {
+ String v = s.crt;
+ Integer i = m.get(v);
+ if (i == null) {
+ i = 1;
+ }
+ else {
+ i = i + 1;
+ }
+ m.put(v, i);
}
-
- void printStates(String prefix) {
+
+ synchronized void printStates(String prefix) {
Map<String, Integer> summary = getSummary();
StringBuilder sb = new StringBuilder();
@@ -274,8 +331,4 @@
}
}
-
- class RuntimeProgress {
- String status = "uninitialized";
- }
}
Deleted: branches/faster/src/org/griphyn/vdl/karajan/lib/SequentialWithID.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/SequentialWithID.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/SequentialWithID.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,39 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.griphyn.vdl.karajan.lib;
-
-import org.globus.cog.karajan.workflow.nodes.Sequential;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.ThreadingContext;
-import org.globus.cog.karajan.workflow.ExecutionException;
-
-
-/** launch a single child, giving it a new thread ID, but without starting
- an entire karajan thread
-*/
-
-public class SequentialWithID extends Sequential {
-
- protected void executeChildren(VariableStack stack) throws ExecutionException {
- ThreadingContext tc = (ThreadingContext) stack.getVar("#thread");
- stack.setVar("#thread", tc.split(1));
- super.executeChildren(stack);
- }
-
-}
-
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/SetFieldValue.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/SetFieldValue.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/SetFieldValue.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,19 +20,28 @@
*/
package org.griphyn.vdl.karajan.lib;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableNotFoundException;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.futures.FutureFault;
-import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable;
-import org.griphyn.vdl.karajan.Pair;
-import org.griphyn.vdl.karajan.PairIterator;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.analyzer.VariableNotFoundException;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.futures.FutureFault;
+import org.globus.cog.karajan.futures.FutureNotYetAvailable;
+import org.globus.cog.karajan.parser.WrapperNode;
+import org.griphyn.vdl.karajan.PairSet;
import org.griphyn.vdl.karajan.WaitingThreadsMonitor;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
@@ -41,30 +50,76 @@
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.type.Type;
-public class SetFieldValue extends VDLFunction {
+public class SetFieldValue extends SwiftFunction {
public static final Logger logger = Logger.getLogger(SetFieldValue.class);
- public static final Arg PA_VALUE = new Arg.Positional("value");
+ protected ArgRef<DSHandle> var;
+ protected ArgRef<Object> path;
+ protected ArgRef<AbstractDataNode> value;
+
+ protected ArgRef<String> _traceline;
- static {
- setArguments(SetFieldValue.class, new Arg[] { OA_PATH, PA_VAR, PA_VALUE });
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", "value", optional("path", Path.EMPTY_PATH), optional("_traceline", null)));
+ }
+
+ private String src, dest;
+ private Tracer tracer;
+
+ protected VarRef<State> state;
+
+ private static class State {
+ public final List<StateEntry> l = new ArrayList<StateEntry>();
}
- private String src, dest;
- private Tracer tracer;
+ private static class StateEntry {
+ private Object value;
+ private Object it;
+
+ @SuppressWarnings("unchecked")
+ public <T> T it() {
+ return (T) it;
+ }
+
+ public void it(Object it) {
+ this.it= it;
+ }
+
+ @SuppressWarnings("unchecked")
+ public <T> T value() {
+ return (T) value;
+ }
+
+ public void value(Object value) {
+ this.value= value;
+ }
+ }
+
+ @Override
+ protected void addLocals(Scope scope) {
+ super.addLocals(scope);
+ state = scope.getVarRef(scope.addVar("#state"));
+ }
- @Override
- protected void initializeStatic() {
- super.initializeStatic();
+ @Override
+ public Node compile(WrapperNode w, Scope scope)
+ throws CompilationException {
+ Node fn = super.compile(w, scope);
+ if (_traceline.getValue() != null) {
+ setLine(Integer.parseInt(_traceline.getValue()));
+ }
tracer = Tracer.getTracer(this);
+ return fn;
}
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
try {
- Path path = parsePath(OA_PATH.getValue(stack), stack);
+ Path path = parsePath(this.path.getValue(stack));
DSHandle leaf = var.getField(path);
- AbstractDataNode value = (AbstractDataNode) PA_VALUE.getValue(stack);
+ AbstractDataNode value = this.value.getValue(stack);
if (src == null) {
dest = Tracer.getVarName(var);
@@ -72,7 +127,7 @@
}
if (tracer.isEnabled()) {
- log(leaf, value, stack);
+ log(leaf, value, LWThread.currentThread());
}
// TODO want to do a type check here, for runtime type checking
@@ -81,23 +136,25 @@
// for type conversion here; but would be useful to have
// type checking.
- deepCopy(leaf, value, stack, 0);
+ deepCopy(leaf, value, stack);
+
return null;
}
catch (FutureFault f) {
+ LWThread thr = LWThread.currentThread();
if (tracer.isEnabled()) {
- tracer.trace(stack, var + " waiting for " + Tracer.getFutureName(f.getFuture()));
+ tracer.trace(thr, var + " waiting for " + Tracer.getFutureName(f.getFuture()));
}
- WaitingThreadsMonitor.addOutput(stack, Collections.singletonList(var));
+ WaitingThreadsMonitor.addOutput(thr, Collections.singletonList(var));
throw f;
}
catch (Exception e) { // TODO tighten this
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
- private void log(DSHandle leaf, DSHandle value, VariableStack stack) throws VariableNotFoundException {
- tracer.trace(stack, dest + " = " + Tracer.unwrapHandle(value));
+ private void log(DSHandle leaf, DSHandle value, LWThread thr) throws VariableNotFoundException {
+ tracer.trace(thr, dest + " = " + Tracer.unwrapHandle(value));
}
String unpackHandles(DSHandle handle, Map<Comparable<?>, DSHandle> handles) {
@@ -119,21 +176,33 @@
return sb.toString();
}
+ protected void deepCopy(DSHandle dest, DSHandle source, Stack stack) {
+ State state = this.state.getValue(stack);
+ if (state == null) {
+ state = new State();
+ this.state.setValue(stack, state);
+ }
+
+ deepCopy(dest, source, state, 0);
+
+ this.state.setValue(stack, null);
+ }
+
/** make dest look like source - if its a simple value, copy that
and if its an array then recursively copy */
- public static void deepCopy(DSHandle dest, DSHandle source, VariableStack stack, int level) throws ExecutionException {
- ((AbstractDataNode) source).waitFor();
+ public void deepCopy(DSHandle dest, DSHandle source, State state, int level) {
+ ((AbstractDataNode) source).waitFor(this);
if (source.getType().isPrimitive()) {
dest.setValue(source.getValue());
}
else if (source.getType().isArray()) {
- copyArray(dest, source, stack, level);
+ copyArray(dest, source, state, level);
}
else if (source.getType().isComposite()) {
- copyStructure(dest, source, stack, level);
+ copyStructure(dest, source, state, level);
}
else {
- copyNonComposite(dest, source, stack, level);
+ copyNonComposite(dest, source, state, level);
}
}
@@ -147,27 +216,26 @@
}
}
- @SuppressWarnings("unchecked")
- private static void copyStructure(DSHandle dest, DSHandle source,
- VariableStack stack, int level) throws ExecutionException {
+ private void copyStructure(DSHandle dest, DSHandle source, State state, int level) {
Type type = dest.getType();
- Iterator<String> fni = (Iterator<String>) stack.currentFrame().getVar("it" + level);
+ StateEntry se = getStateEntry(state, level);
+ Iterator<String> fni = se.it();
if (fni == null) {
fni = type.getFieldNames().iterator();
- stack.currentFrame().setVar("it" + level, fni);
+ se.it = fni;
}
- String fname = (String) stack.currentFrame().getVar("f" + level);
+ String fname = se.value();
while (fni.hasNext() || fname != null) {
if (fname == null) {
fname = fni.next();
- stack.currentFrame().setVar("f" + level, fname);
+ se.value(fname);
}
Path fpath = Path.EMPTY_PATH.addFirst(fname);
try {
DSHandle dstf = dest.getField(fpath);
try {
DSHandle srcf = source.getField(fpath);
- deepCopy(dstf, srcf, stack, level + 1);
+ deepCopy(dstf, srcf, state, level + 1);
}
catch (InvalidPathException e) {
// do nothing. It's an unused field in the source.
@@ -177,15 +245,29 @@
throw new ExecutionException("Internal type inconsistency detected. " +
dest + " claims not to have a " + fname + " field");
}
- stack.currentFrame().deleteVar("f" + level);
+ se.value(null);
fname = null;
}
- stack.currentFrame().deleteVar("it" + level);
+ popStateEntry(state);
dest.closeShallow();
}
- private static void copyNonComposite(DSHandle dest, DSHandle source,
- VariableStack stack, int level) throws ExecutionException {
+ private static StateEntry getStateEntry(State state, int level) {
+ if (state.l.size() == level) {
+ StateEntry e = new StateEntry();
+ state.l.add(e);
+ return e;
+ }
+ else {
+ return state.l.get(level);
+ }
+ }
+
+ private static void popStateEntry(State state) {
+ state.l.remove(state.l.size() - 1);
+ }
+
+ private static void copyNonComposite(DSHandle dest, DSHandle source, State state, int level) {
Path dpath = dest.getPathFromRoot();
Mapper dmapper = dest.getRoot().getMapper();
if (dmapper.canBeRemapped(dpath)) {
@@ -196,8 +278,9 @@
dest.closeShallow();
}
else {
- if (stack.currentFrame().isDefined("fc")) {
- FileCopier fc = (FileCopier) stack.currentFrame().getVarAndDelete("fc");
+ StateEntry se = getStateEntry(state, level);
+ FileCopier fc = se.value();
+ if (fc != null) {
if (!fc.isClosed()) {
throw new FutureNotYetAvailable(fc);
}
@@ -209,9 +292,9 @@
dest.closeShallow();
}
else {
- FileCopier fc = new FileCopier(source.getMapper().map(source.getPathFromRoot()),
+ fc = new FileCopier(source.getMapper().map(source.getPathFromRoot()),
dest.getMapper().map(dpath));
- stack.setVar("fc", fc);
+ se.value(fc);
try {
fc.start();
}
@@ -220,24 +303,22 @@
}
throw new FutureNotYetAvailable(fc);
}
+ popStateEntry(state);
}
}
- private static void copyArray(DSHandle dest, DSHandle source,
- VariableStack stack, int level) throws ExecutionException {
- PairIterator it;
- if (stack.isDefined("it" + level)) {
- it = (PairIterator) stack.getVar("it" + level);
+ private void copyArray(DSHandle dest, DSHandle source, State state, int level) {
+ StateEntry se = getStateEntry(state, level);
+ Iterator<List<?>> it = se.it();
+ if (it == null) {
+ it = new PairSet(source.getArrayValue()).iterator();
+ se.it(it);
}
- else {
- it = new PairIterator(source.getArrayValue());
- stack.setVar("it" + level, it);
- }
- Pair pair = (Pair) stack.currentFrame().getVar("p" + level);
+ List<?> pair = se.value();
while (it.hasNext() || pair != null) {
if (pair == null) {
- pair = (Pair) it.next();
- stack.currentFrame().setVar("p" + level, pair);
+ pair = it.next();
+ se.value(pair);
}
Comparable<?> lhs = (Comparable<?>) pair.get(0);
DSHandle rhs = (DSHandle) pair.get(1);
@@ -250,11 +331,11 @@
catch (InvalidPathException ipe) {
throw new ExecutionException("Could not get destination field",ipe);
}
- deepCopy(field, rhs, stack, level + 1);
- stack.currentFrame().deleteVar("p" + level);
+ deepCopy(field, rhs, state, level + 1);
+ se.value(null);
pair = null;
}
- stack.currentFrame().deleteVar("it" + level);
+ popStateEntry(state);
dest.closeShallow();
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/SetFutureFault.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/SetFutureFault.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/SetFutureFault.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,47 +20,53 @@
*/
package org.griphyn.vdl.karajan.lib;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.DataDependentException;
import org.griphyn.vdl.mapping.MappingDependentException;
import org.griphyn.vdl.mapping.Path;
-public class SetFutureFault extends VDLFunction {
+public class SetFutureFault extends SwiftFunction {
public static final Logger logger = Logger.getLogger(SetFutureFault.class);
+
+ private ArgRef<DSHandle> var;
+ private ArgRef<Object> path;
+ private ArgRef<Exception> exception;
+ private ArgRef<Boolean> mapping;
- public static final Arg OA_EXCEPTION = new Arg.Optional("exception", null);
- public static final Arg OA_MAPPING = new Arg.Optional("mapping", Boolean.FALSE);
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("path", Path.EMPTY_PATH),
+ optional("exception", null), optional("mapping", false)));
+ }
- static {
- setArguments(SetFutureFault.class, new Arg[] { OA_PATH, PA_VAR, OA_EXCEPTION, OA_MAPPING });
- }
-
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
- boolean mapping = TypeUtil.toBoolean(OA_MAPPING.getValue(stack));
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
+ boolean mapping = this.mapping.getValue(stack);
try {
- Path path = parsePath(OA_PATH.getValue(stack), stack);
+ Path path = parsePath(this.path.getValue(stack));
DSHandle leaf = var.getField(path);
if (logger.isInfoEnabled()) {
logger.info("Failing " + leaf + " (mapping=" + mapping + ")");
}
synchronized (leaf) {
- Object value = OA_EXCEPTION.getValue(stack);
+ Exception e = this.exception.getValue(stack);
if (mapping) {
- leaf.setValue(new MappingDependentException(leaf, (Exception) value));
+ leaf.setValue(new MappingDependentException(leaf, e));
}
else {
- leaf.setValue(new DataDependentException(leaf, (Exception) value));
+ leaf.setValue(new DataDependentException(leaf, e));
}
}
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
return null;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/SetWaitCount.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/SetWaitCount.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/SetWaitCount.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,30 +17,34 @@
package org.griphyn.vdl.karajan.lib;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
-public class SetWaitCount extends VDLFunction {
+public class SetWaitCount extends SwiftFunction {
public static final Logger logger = Logger.getLogger(CloseDataset.class);
- public static final Arg OA_COUNT = new Arg.Optional("count", 1);
+ private ArgRef<DSHandle> var;
+ private ArgRef<Number> count;
- static {
- setArguments(SetWaitCount.class, new Arg[] { PA_VAR, OA_COUNT });
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("count", 1)));
+ }
- public Object function(VariableStack stack) throws ExecutionException {
- DSHandle var = (DSHandle) PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ DSHandle var = this.var.getValue(stack);
if (var.isClosed()) {
throw new ExecutionException("Attempted to set a wait count for a closed variable " + var);
}
- int count = TypeUtil.toInt(OA_COUNT.getValue(stack));
+ int count = this.count.getValue(stack).intValue();
var.setWriteRefCount(count);
return null;
}
Added: branches/faster/src/org/griphyn/vdl/karajan/lib/SiteCatalog.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/SiteCatalog.java (rev 0)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/SiteCatalog.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,223 @@
+//----------------------------------------------------------------------
+//This code is developed as part of the Java CoG Kit project
+//The terms of the license can be found at http://www.cogkit.org/license
+//This message may not be removed or altered.
+//----------------------------------------------------------------------
+
+/*
+ * Created on Jan 7, 2013
+ */
+package org.griphyn.vdl.karajan.lib;
+
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.abstraction.impl.common.AbstractionFactory;
+import org.globus.cog.abstraction.impl.common.ProviderMethodException;
+import org.globus.cog.abstraction.impl.common.task.ExecutionServiceImpl;
+import org.globus.cog.abstraction.impl.common.task.InvalidProviderException;
+import org.globus.cog.abstraction.impl.common.task.ServiceContactImpl;
+import org.globus.cog.abstraction.impl.common.task.ServiceImpl;
+import org.globus.cog.abstraction.interfaces.Service;
+import org.globus.cog.abstraction.interfaces.ServiceContact;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Param;
+import org.globus.cog.karajan.compiled.nodes.functions.AbstractSingleValuedFunction;
+import org.globus.cog.karajan.util.BoundContact;
+import org.globus.cog.karajan.util.ContactSet;
+import org.globus.swift.catalog.site.Parser;
+import org.w3c.dom.Document;
+import org.w3c.dom.NamedNodeMap;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+public class SiteCatalog extends AbstractSingleValuedFunction {
+ private ArgRef<String> fileName;
+
+ @Override
+ protected Param[] getParams() {
+ return params("fileName");
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ String fn = fileName.getValue(stack);
+ Parser p = new Parser(fn);
+ try {
+ Document doc = p.parse();
+ return buildResources(doc);
+ }
+ catch (Exception e) {
+ throw new ExecutionException(this, "Failed to parse site catalog", e);
+ }
+ }
+
+ private Object buildResources(Document doc) {
+ ContactSet cs = new ContactSet();
+ NodeList pools = doc.getElementsByTagName("config").item(0).getChildNodes();
+ for (int i = 0; i < pools.getLength(); i++) {
+ try {
+ BoundContact bc = pool(pools.item(i));
+ if (bc != null) {
+ cs.addContact(bc);
+ }
+ }
+ catch (Exception e) {
+ System.err.println("Invalid pool entry '" + attr(pools.item(i), "name") + "': " + e.getMessage());
+ }
+ }
+ return cs;
+ }
+
+ private BoundContact pool(Node n) throws InvalidProviderException, ProviderMethodException {
+ if (n.getNodeType() != Node.ELEMENT_NODE) {
+ return null;
+ }
+ String name = attr(n, "handle");
+ BoundContact bc = new BoundContact(name);
+
+ String sysinfo = attr(n, "sysinfo");
+ if (sysinfo != null) {
+ bc.addProperty("sysinfo", sysinfo);
+ }
+
+ NodeList cs = n.getChildNodes();
+
+ for (int i = 0; i < cs.getLength(); i++) {
+ Node c = cs.item(i);
+ if (c.getNodeType() != Node.ELEMENT_NODE) {
+ continue;
+ }
+ String ctype = c.getNodeName();
+
+ if (ctype.equals("gridftp")) {
+ bc.addService(gridftp(c));
+ }
+ else if (ctype.equals("jobmanager")) {
+ bc.addService(jobmanager(c));
+ }
+ else if (ctype.equals("execution")) {
+ bc.addService(execution(c));
+ }
+ else if (ctype.equals("filesystem")) {
+ bc.addService(filesystem(c));
+ }
+ else if (ctype.equals("workdirectory")) {
+ bc.addProperty("workdir", text(c));
+ }
+ else if (ctype.equals("scratch")) {
+ bc.addProperty("scratch", text(c));
+ }
+ else if (ctype.equals("env")) {
+ env(bc, c);
+ }
+ else if (ctype.equals("profile")) {
+ profile(bc, c);
+ }
+ else {
+ System.err.println("Unknown node type: " + ctype);
+ }
+ }
+ return bc;
+ }
+
+ private Service jobmanager(Node n) throws InvalidProviderException, ProviderMethodException {
+ String provider;
+ String url = attr(n, "url");
+ String major = attr(n, "major");
+ if (url.equals("local://localhost")) {
+ provider = "local";
+ }
+ else if (url.equals("pbs://localhost")) {
+ provider = "pbs";
+ }
+ else if ("2".equals(major)) {
+ provider = "gt2";
+ }
+ else if ("4".equals(major)) {
+ provider = "gt4";
+ }
+ else {
+ throw new IllegalArgumentException("Unknown job manager version: " + major + ", url = '" + url + "'");
+ }
+
+ ServiceContact contact = new ServiceContactImpl(url);
+ return new ServiceImpl(provider, Service.EXECUTION,
+ contact, AbstractionFactory.newSecurityContext(provider, contact));
+ }
+
+ private Service gridftp(Node n) throws InvalidProviderException, ProviderMethodException {
+ String url = attr(n, "url");
+ if (url.equals("local://localhost")) {
+ return new ServiceImpl("local", Service.FILE_OPERATION, new ServiceContactImpl("localhost"), null);
+ }
+ else {
+ ServiceContact contact = new ServiceContactImpl(url);
+ return new ServiceImpl("gsiftp", Service.FILE_OPERATION,
+ contact, AbstractionFactory.newSecurityContext("gsiftp", contact));
+ }
+ }
+
+ private Service execution(Node n) throws InvalidProviderException, ProviderMethodException {
+ String provider = attr(n, "provider");
+ String url = attr(n, "url");
+ String jobManager = attr(n, "jobManager");
+ ServiceContact contact = null;
+ if (url != null) {
+ contact = new ServiceContactImpl(url);
+ }
+ return new ExecutionServiceImpl(provider, contact,
+ AbstractionFactory.newSecurityContext(provider, contact), jobManager);
+ }
+
+ private Service filesystem(Node n) throws InvalidProviderException, ProviderMethodException {
+ String provider = attr(n, "provider");
+ String url = attr(n, "url");
+ ServiceContact contact = null;
+ if (url != null) {
+ contact = new ServiceContactImpl(url);
+ }
+ return new ServiceImpl(provider, Service.FILE_OPERATION,
+ contact, AbstractionFactory.newSecurityContext(provider, contact));
+ }
+
+ private void env(BoundContact bc, Node n) {
+ String key = attr(n, "key");
+ String value = text(n);
+
+ bc.addProperty("env:" + key, value);
+ }
+
+ private void profile(BoundContact bc, Node n) {
+ String ns = attr(n, "namespace");
+ String key = attr(n, "key");
+ String value = text(n);
+
+ if (ns.equals("karajan")) {
+ bc.addProperty(key, value);
+ }
+ else {
+ bc.addProperty(ns + ":" + key, value);
+ }
+ }
+
+ private String text(Node n) {
+ return n.getFirstChild().getNodeValue();
+ }
+
+ private String attr(Node n, String name) {
+ NamedNodeMap attrs = n.getAttributes();
+ if (attrs != null) {
+ Node attr = attrs.getNamedItem(name);
+ if (attr == null) {
+ return null;
+ }
+ else {
+ return attr.getNodeValue();
+ }
+ }
+ else {
+ return null;
+ }
+ }
+}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/SiteProfile.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/SiteProfile.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/SiteProfile.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -25,30 +25,34 @@
import java.util.Map;
import java.util.Set;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.globus.cog.karajan.util.BoundContact;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
import org.globus.swift.catalog.types.Os;
import org.griphyn.vdl.util.FQN;
-public class SiteProfile extends VDLFunction {
+public class SiteProfile extends SwiftFunction {
public static final Logger logger = Logger.getLogger(SiteProfile.class);
- public static final Arg PA_HOST = new Arg.Positional("host");
- public static final Arg PA_FQN = new Arg.Positional("fqn");
- public static final Arg OA_DEFAULT = new Arg.Optional("default", null);
+ private ArgRef<BoundContact> host;
+ private ArgRef<String> fqn;
+ private ArgRef<Object> _default;
+ private ChannelRef<Object> cr_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("host", "fqn", optional("default", null)), returns(channel("...", 1)));
+ }
- static {
- setArguments(SiteProfile.class, new Arg[] { PA_HOST, PA_FQN, OA_DEFAULT });
+ public Object function(Stack stack) throws ExecutionException {
+ BoundContact bc = host.getValue(stack);
+ return getSingle(bc, new FQN(fqn.getValue(stack)), _default.getValue(stack));
}
-
- public Object function(VariableStack stack) throws ExecutionException {
- BoundContact bc = (BoundContact) PA_HOST.getValue(stack);
- return getSingle(stack, bc, new FQN(TypeUtil.toString(PA_FQN.getValue(stack))), OA_DEFAULT.getValue(stack));
- }
public static final FQN SWIFT_WRAPPER_INTERPRETER = new FQN("swift:wrapperInterpreter");
public static final FQN SWIFT_WRAPPER_INTERPRETER_OPTIONS = new FQN("swift:wrapperInterpreterOptions");
@@ -70,7 +74,6 @@
osm.put(fqn, value);
}
- @SuppressWarnings("unused")
private static boolean hasDefault(Os os, FQN fqn) {
Map<FQN,Object> osm = DEFAULTS.get(os);
if (osm == null) {
@@ -104,7 +107,7 @@
addDefault(null, SWIFT_CLEANUP_COMMAND_OPTIONS, new String[] {"-rf"});
}
- private Object getSingle(VariableStack stack, BoundContact bc, FQN fqn, Object defval)
+ private Object getSingle(BoundContact bc, FQN fqn, Object defval)
throws ExecutionException {
String value = getProfile(bc, fqn);
if (value == null) {
@@ -119,7 +122,7 @@
return defval;
}
else {
- throw new ExecutionException(stack, "Missing profile: " + fqn);
+ throw new ExecutionException(this, "Missing profile: " + fqn);
}
}
else {
@@ -145,7 +148,7 @@
else {
String[] p = o.toString().split("::");
if (p.length < 2) {
- throw new IllegalArgumentException("Invalid sysinfo for " + bc + ": " + o);
+ throw new ExecutionException("Invalid sysinfo for " + bc + ": " + o);
}
return Os.fromString(p[1]);
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/SliceArray.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/SliceArray.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/SliceArray.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,11 +20,14 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.Pair;
-import org.griphyn.vdl.karajan.PairIterator;
+import java.util.List;
+
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.PairSet;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.InvalidPathException;
@@ -36,99 +39,93 @@
-public class SliceArray extends VDLFunction {
+public class SliceArray extends SwiftFunction {
+ private ArgRef<AbstractDataNode> var;
+ private ArgRef<String> path;
+ private ArgRef<String> type;
- public static final Arg PA_TYPE = new Arg.Positional("type");
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", "path", "type"));
+ }
- static {
- setArguments(SliceArray.class, new Arg[] { PA_VAR, PA_PATH, PA_TYPE });
- }
- public Object function(VariableStack stack) throws ExecutionException {
- Object var1 = PA_VAR.getValue(stack);
-
-// TODO for now, this insists the the array be closed entirely before we
-// execute. This may cause overserialisation; and worse, will break when
-// we are trying to use the cut as an output parameter, not an input
-// parameter (likely resulting in a hang).
-// Need to think hard about how to handle this. Static assignment
-// analysis is going to fail, I think - its like pointer aliasing in C,
-// a bit. If I get a ref to an array element using this, then I can
-// assign to it, but the compiler isn't going to be aware that I am
-// assigning to it so can't construct partialCloseDatasets correctly...
-// perhaps thats another argument for map? (as in, moving away from
-// [] based assignments...
-
-
- if(var1 instanceof DSHandle) {
-
- try {
- AbstractDataNode sourceArray = (AbstractDataNode) var1;
- sourceArray.waitFor();
-
- Type sourceType = sourceArray.getType();
-
- if(!sourceType.isArray()) {
- throw new RuntimeException("SliceArray can only slice arrays.");
- }
-
- String destinationTypeName = (String) PA_TYPE.getValue(stack);
- Type destinationType = Types.getType(destinationTypeName);
- RootArrayDataNode destinationArray = new RootArrayDataNode(destinationType);
-
-
- Path cutPath = Path.EMPTY_PATH.addLast((String) PA_PATH.getValue(stack), false);
-
- PairIterator it = new PairIterator(sourceArray.getArrayValue());
-
- while(it.hasNext()) {
- Pair pair = (Pair) it.next();
- Object index = pair.get(0);
- DSHandle sourceElement = (DSHandle) pair.get(1);
-
- Path p = Path.EMPTY_PATH.addLast((Comparable<?>) index, true);
-
- DSHandle n = sourceElement.getField(cutPath);
-
- destinationArray.getField(p).set(n);
- }
-
- // all of the inputs should be closed, so
- // we only need shallow close
- destinationArray.closeShallow();
-
- return destinationArray;
-
-/* code from setfieldvalue to look at:
- } else if(source.getType().isArray()) {
- PairIterator it = new PairIterator(source.getArrayValue());
- while(it.hasNext()) {
- Pair pair = (Pair) it.next();
- Object lhs = pair.get(0);
- DSHandle rhs = (DSHandle) pair.get(1);
- Path memberPath = Path.EMPTY_PATH.addLast(String.valueOf(lhs),true);
- DSHandle field;
- try {
- field = dest.getField(memberPath);
- } catch(InvalidPathException ipe) {
- throw new ExecutionException("Could not get destination field",ipe);
- }
- deepCopy(field,rhs,stack);
- }
- closeShallow(stack, dest);
-
-*/
- }
- catch(NoSuchTypeException nste) {
- throw new ExecutionException("No such type",nste);
- }
- catch (InvalidPathException e) {
- throw new ExecutionException(e);
- }
- } else {
- throw new ExecutionException("was expecting a DSHandle or collection of DSHandles, got: "+var1.getClass());
+ @Override
+ public Object function(Stack stack) {
+ // TODO for now, this insists the the array be closed entirely before we
+ // execute. This may cause overserialisation; and worse, will break when
+ // we are trying to use the cut as an output parameter, not an input
+ // parameter (likely resulting in a hang).
+ // Need to think hard about how to handle this. Static assignment
+ // analysis is going to fail, I think - its like pointer aliasing in C,
+ // a bit. If I get a ref to an array element using this, then I can
+ // assign to it, but the compiler isn't going to be aware that I am
+ // assigning to it so can't construct partialCloseDatasets correctly...
+ // perhaps thats another argument for map? (as in, moving away from
+ // [] based assignments...
+
+
+ try {
+ AbstractDataNode sourceArray = this.var.getValue(stack);
+ sourceArray.waitFor(this);
+
+ Type sourceType = sourceArray.getType();
+
+ if(!sourceType.isArray()) {
+ throw new RuntimeException("SliceArray can only slice arrays.");
+ }
+
+ String destinationTypeName = this.type.getValue(stack);
+ Type destinationType = Types.getType(destinationTypeName);
+ RootArrayDataNode destinationArray = new RootArrayDataNode(destinationType);
+
+
+ Path cutPath = Path.EMPTY_PATH.addLast(this.path.getValue(stack), false);
+
+ PairSet s = new PairSet(sourceArray.getArrayValue());
+
+ for (List<?> pair : s) {
+ Object index = pair.get(0);
+ DSHandle sourceElement = (DSHandle) pair.get(1);
+
+ Path p = Path.EMPTY_PATH.addLast((Comparable<?>) index, true);
+
+ DSHandle n = sourceElement.getField(cutPath);
+
+ destinationArray.getField(p).set(n);
+ }
+
+ // all of the inputs should be closed, so
+ // we only need shallow close
+ destinationArray.closeShallow();
+
+ return destinationArray;
+
+ /* code from setfieldvalue to look at:
+ } else if(source.getType().isArray()) {
+ PairIterator it = new PairIterator(source.getArrayValue());
+ while(it.hasNext()) {
+ Pair pair = (Pair) it.next();
+ Object lhs = pair.get(0);
+ DSHandle rhs = (DSHandle) pair.get(1);
+ Path memberPath = Path.EMPTY_PATH.addLast(String.valueOf(lhs),true);
+ DSHandle field;
+ try {
+ field = dest.getField(memberPath);
+ } catch(InvalidPathException ipe) {
+ throw new ExecutionException("Could not get destination field",ipe);
+ }
+ deepCopy(field,rhs,stack);
+ }
+ closeShallow(stack, dest);
+
+ */
}
+ catch(NoSuchTypeException nste) {
+ throw new ExecutionException("No such type",nste);
+ }
+ catch (InvalidPathException e) {
+ throw new ExecutionException(this, e);
+ }
}
-
-
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Stagein.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Stagein.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Stagein.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,52 +22,55 @@
import java.util.Collection;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.Arg.Channel;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.arguments.NamedArguments;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.futures.FutureFault;
-import org.globus.cog.karajan.workflow.nodes.FlowNode;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.futures.FutureFault;
+import org.globus.cog.karajan.parser.WrapperNode;
import org.griphyn.vdl.engine.Karajan;
import org.griphyn.vdl.mapping.AbstractDataNode;
-import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.DependentException;
import org.griphyn.vdl.mapping.MappingDependentException;
import org.griphyn.vdl.mapping.Path;
-public class Stagein extends VDLFunction {
+public class Stagein extends SwiftFunction {
public static final Logger logger = Logger.getLogger(Stagein.class);
- public static final Arg VAR = new Arg.Positional("var");
+ private ArgRef<AbstractDataNode> var;
+ private ChannelRef<Object> cr_stagein;
+ private VarRef<Boolean> r_deperror;
+ private VarRef<Boolean> r_mdeperror;
- public static final Channel STAGEIN = new Channel("stagein");
-
private Tracer tracer;
private String procName;
-
- static {
- setArguments(Stagein.class, new Arg[] { VAR });
- }
@Override
- protected void initializeStatic() {
- super.initializeStatic();
- FlowNode def = (FlowNode) getParent().getParent();
+ protected Signature getSignature() {
+ return new Signature(params("var"), returns("deperror", "mdeperror", channel("stagein", DYNAMIC)));
+ }
+
+ @Override
+ public Node compile(WrapperNode w, Scope scope)
+ throws CompilationException {
+ Node def = getParent().getParent();
procName = Karajan.demangle(def.getTextualName());
tracer = Tracer.getTracer(def, "APPCALL");
+ return super.compile(w, scope);
}
- private boolean isPrimitive(DSHandle var) {
- return (var instanceof AbstractDataNode && ((AbstractDataNode) var)
- .isPrimitive());
- }
-
- protected Object function(VariableStack stack) throws ExecutionException {
- AbstractDataNode var = (AbstractDataNode) VAR.getValue(stack);
- if (!isPrimitive(var)) {
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode var = this.var.getValue(stack);
+ if (var.isPrimitive()) {
boolean deperr = false;
boolean mdeperr = false;
try {
@@ -75,19 +78,21 @@
try {
for (Path p : fp) {
AbstractDataNode n = (AbstractDataNode) var.getField(p);
- n.waitFor();
+ n.waitFor(this);
}
}
catch (DependentException e) {
deperr = true;
}
+
+ k.rt.Channel<Object> stagein = cr_stagein.get(stack);
for (Path p : fp) {
- STAGEIN.ret(stack, filename(stack, var.getField(p))[0]);
+ stagein.add(filename(var.getField(p))[0]);
}
}
catch (FutureFault f) {
if (tracer.isEnabled()) {
- tracer.trace(stack, procName + " WAIT " + Tracer.getFutureName(f.getFuture()));
+ tracer.trace(LWThread.currentThread(), procName + " WAIT " + Tracer.getFutureName(f.getFuture()));
}
throw f;
}
@@ -97,27 +102,28 @@
mdeperr = true;
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
- if (deperr || mdeperr) {
- NamedArguments named = ArgUtil.getNamedReturn(stack);
- named.add("deperror", deperr);
- named.add("mdeperror", mdeperr);
+ if (deperr) {
+ this.r_deperror.setValue(stack, true);
}
+ if (mdeperr) {
+ this.r_mdeperror.setValue(stack, true);
+ }
}
else {
// we still wait until the primitive value is there
if (tracer.isEnabled()) {
try {
- var.waitFor();
+ var.waitFor(this);
}
catch (FutureFault f) {
- tracer.trace(stack, procName + " WAIT " + Tracer.getFutureName(f.getFuture()));
+ tracer.trace(LWThread.currentThread(), procName + " WAIT " + Tracer.getFutureName(f.getFuture()));
throw f;
}
}
else {
- var.waitFor();
+ var.waitFor(this);
}
}
return null;
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Stageout.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Stageout.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Stageout.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -24,35 +24,33 @@
import java.util.Collection;
import java.util.List;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.Arg.Channel;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.arguments.NamedArguments;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
-import org.griphyn.vdl.mapping.DataNode;
import org.griphyn.vdl.mapping.MappingDependentException;
import org.griphyn.vdl.mapping.Path;
-public class Stageout extends VDLFunction {
+public class Stageout extends SwiftFunction {
public static final Logger logger = Logger.getLogger(Stageout.class);
- public static final Arg VAR = new Arg.Positional("var");
-
- public static final Channel STAGEOUT = new Channel("stageout");
- public static final Channel RESTARTOUT = new Channel("restartout");
-
- static {
- setArguments(Stageout.class, new Arg[] { VAR });
+ private ArgRef<AbstractDataNode> var;
+ private ChannelRef<Object> cr_stageout;
+ private ChannelRef<Object> cr_restartout;
+ private VarRef<Boolean> r_deperror;
+ private VarRef<Boolean> r_mdeperror;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var"), returns("deperror", "mdeperror",
+ channel("stageout", DYNAMIC), channel("restartout", DYNAMIC)));
}
-
- private boolean isPrimitive(DSHandle var) {
- return (var instanceof AbstractDataNode && ((AbstractDataNode) var)
- .isPrimitive());
- }
private List<?> list(Path p, DSHandle var) {
ArrayList<Object> l = new ArrayList<Object>(2);
@@ -61,8 +59,9 @@
return l;
}
- protected Object function(VariableStack stack) throws ExecutionException {
- AbstractDataNode var = (AbstractDataNode) VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode var = this.var.getValue(stack);
boolean deperr = false;
boolean mdeperr = false;
// currently only static arrays are supported as app returns
@@ -71,14 +70,14 @@
// race conditions (e.g. if this array's mapper had some parameter
// dependencies that weren't closed at the time the app was started).
if (var.getType().isArray()) {
- var.waitFor();
+ var.waitFor(this);
}
try {
- if (!isPrimitive(var)) {
- retPaths(STAGEOUT, stack, var);
+ if (var.isPrimitive()) {
+ retPaths(cr_stageout.get(stack), var);
}
if (var.isRestartable()) {
- retPaths(RESTARTOUT, stack, var);
+ retPaths(cr_restartout.get(stack), var);
}
}
catch (MappingDependentException e) {
@@ -86,23 +85,24 @@
deperr = true;
mdeperr = true;
}
- if (deperr || mdeperr) {
- NamedArguments named = ArgUtil.getNamedReturn(stack);
- named.add("deperror", deperr);
- named.add("mdeperror", mdeperr);
+ if (deperr) {
+ this.r_deperror.setValue(stack, true);
}
+ if (mdeperr) {
+ this.r_mdeperror.setValue(stack, true);
+ }
return null;
}
- private void retPaths(Channel channel, VariableStack stack, DSHandle var) throws ExecutionException {
+ private void retPaths(k.rt.Channel<Object> channel, DSHandle var) throws ExecutionException {
try {
Collection<Path> fp = var.getFringePaths();
for (Path p : fp) {
- channel.ret(stack, list(p, var));
+ channel.add(list(p, var));
}
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
}
Deleted: branches/faster/src/org/griphyn/vdl/karajan/lib/SwiftArg.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/SwiftArg.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/SwiftArg.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,244 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/*
- * Created on Jul 31, 2007
- */
-package org.griphyn.vdl.karajan.lib;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.arguments.VariableArguments;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.mapping.AbstractDataNode;
-import org.griphyn.vdl.mapping.ArrayDataNode;
-import org.griphyn.vdl.mapping.DSHandle;
-import org.griphyn.vdl.mapping.DataNode;
-import org.griphyn.vdl.type.Type;
-import org.griphyn.vdl.type.Types;
-
-public abstract class SwiftArg extends Arg {
- public static final Logger logger = Logger.getLogger(SwiftArg.class);
-
- public SwiftArg(String name, int index) {
- super(name, index);
- }
-
- public SwiftArg(String name) {
- super(name, IMPLICIT);
- }
-
- protected Object unwrap(VariableStack stack, Object val) throws ExecutionException {
- if (val instanceof DSHandle) {
- AbstractDataNode handle = (AbstractDataNode) val;
- if (logger.isDebugEnabled()) {
- logger.debug("SwiftArg.getValue(" + handle + ")");
- }
- if (handle.getType().isArray()) {
- return handle;
- }
- else {
- handle.waitFor();
- return handle.getValue();
- }
- }
- else {
- throw new ExecutionException("Expected Swift data, but got some primitive type (" + val
- + ")");
- }
- }
-
- public Object getValue(VariableStack stack) throws ExecutionException {
- Object v = super.getValue(stack);
- if (v == null) {
- return v;
- }
- else {
- return unwrap(stack, v);
- }
- }
-
- public double getDoubleValue(VariableStack stack) throws ExecutionException {
- return checkDouble(getValue(stack));
- }
-
- public static double checkDouble(Object dbl) throws ExecutionException {
- if (dbl instanceof Number) {
- return ((Number) dbl).doubleValue();
- }
- else {
- throw new ExecutionException("Internal type error. Expected a Double. Got " + classOf(dbl));
- }
- }
-
- public static int checkInt(Object i) throws ExecutionException {
- if (i instanceof Integer) {
- return ((Integer) i).intValue();
- }
- else {
- throw new ExecutionException("Internal type error. Expected a Integer. Got " + classOf(i));
- }
- }
-
- public DSHandle getRawValue(VariableStack stack) throws ExecutionException {
- Object v = super.getValue(stack);
- if(v instanceof DSHandle) {
- return (DSHandle)v;
- } else if(v == null) {
- return null;
- } else {
- throw new ExecutionException("Expected Swift data, but got some primitive type (" + v + ")");
- }
- }
-
- private static Class classOf(Object object) {
- if (object == null) {
- return null;
- }
- else {
- return object.getClass();
- }
- }
-
- protected Type getType0(Object o) throws ExecutionException {
- if (o instanceof DSHandle) {
- DSHandle handle = (DSHandle) o;
- return handle.getType();
- }
- else {
- throw new ExecutionException("Expected Swift data, but got some primitive type (" + o
- + ")");
- }
- }
-
- public Type getType(VariableStack stack) throws ExecutionException {
- return getType0(super.getValue(stack));
- }
-
- public static class Positional extends SwiftArg {
- public Positional(String name, int index) {
- super(name, index);
- }
-
- public Positional(String name) {
- super(name);
- }
- }
-
- public static class Optional extends SwiftArg {
- private final Object defaultValue;
- private final Type defaultType;
-
- public Optional(String name, Object defaultValue, Type defaultType) {
- super(name, NOINDEX);
- this.defaultValue = defaultValue;
- this.defaultType = defaultType;
- }
-
- public Optional(String name) {
- this(name, null, Types.ANY);
- }
-
- public Object getValue(VariableStack stack) throws ExecutionException {
- Object o = super.getValue(stack);
- if (o == null) {
- return defaultValue;
- }
- else {
- return o;
- }
- }
-
- public Type getType(VariableStack stack) throws ExecutionException {
- Object o = super.getValue(stack, defaultValue);
- if (o == null) {
- return defaultType;
- }
- else {
- return getType0(o);
- }
- }
- }
-
- public static final class Vargs extends SwiftArg {
- public Vargs() {
- super("...");
- }
-
- public Object[] asArray(VariableStack stack) throws ExecutionException {
- VariableArguments args = get(stack);
- Object[] ret = new Object[args.size()];
- for (int i = 0; i < ret.length; i++) {
- ret[i] = unwrap(stack, args.get(i));
- }
- return ret;
- }
-
- public DSHandle[] asDSHandleArray(VariableStack stack) throws ExecutionException {
- VariableArguments args = get(stack);
- DSHandle[] ret = new DSHandle[args.size()];
- for (int i = 0; i < ret.length; i++) {
- ret[i] = (DSHandle) args.get(i);
- }
- return ret;
- }
-
- public AbstractDataNode[] asDataNodeArray(VariableStack stack) throws ExecutionException {
- VariableArguments args = get(stack);
- AbstractDataNode[] ret = new AbstractDataNode[args.size()];
- for (int i = 0; i < ret.length; i++) {
- ret[i] = (AbstractDataNode) args.get(i);
- }
- return ret;
- }
-
- public List asList(VariableStack stack) throws ExecutionException {
- VariableArguments args = get(stack);
- List ret = new ArrayList();
- Iterator i = args.iterator();
- while (i.hasNext()) {
- ret.add(unwrap(stack, i.next()));
- }
- return get(stack).getAll();
- }
-
- public VariableArguments get(VariableStack stack) throws ExecutionException {
- VariableArguments args = ArgUtil.getVariableArguments(stack);
- if (args == null) {
- throw new ExecutionException("No default channel found on stack");
- }
- return args;
- }
-
- public boolean isPresent(VariableStack stack) throws ExecutionException {
- return ArgUtil.getVariableArguments(stack) != null;
- }
-
- public String getVariableName() {
- return "#vargs";
- }
-
- }
-
- public static final Vargs VARGS = new Vargs();
-}
Added: branches/faster/src/org/griphyn/vdl/karajan/lib/SwiftFunction.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/SwiftFunction.java (rev 0)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/SwiftFunction.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,442 @@
+/*
+ * Copyright 2012 University of Chicago
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.griphyn.vdl.karajan.lib;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import k.rt.Channel;
+import k.rt.Context;
+import k.rt.ExecutionException;
+import k.rt.MemoryChannel;
+import k.rt.Stack;
+import k.thr.LWThread;
+
+import org.apache.log4j.Logger;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.compiled.nodes.functions.AbstractFunction;
+import org.globus.cog.karajan.parser.WrapperNode;
+import org.globus.cog.karajan.util.BoundContact;
+import org.globus.cog.karajan.util.TypeUtil;
+import org.globus.swift.catalog.TCEntry;
+import org.globus.swift.catalog.transformation.File;
+import org.globus.swift.catalog.types.TCType;
+import org.griphyn.vdl.karajan.AssertFailedException;
+import org.griphyn.vdl.karajan.Loader;
+import org.griphyn.vdl.karajan.TCCache;
+import org.griphyn.vdl.karajan.functions.ConfigProperty;
+import org.griphyn.vdl.mapping.AbsFile;
+import org.griphyn.vdl.mapping.AbstractDataNode;
+import org.griphyn.vdl.mapping.DSHandle;
+import org.griphyn.vdl.mapping.DependentException;
+import org.griphyn.vdl.mapping.GeneralizedFileFormat;
+import org.griphyn.vdl.mapping.HandleOpenException;
+import org.griphyn.vdl.mapping.InvalidPathException;
+import org.griphyn.vdl.mapping.Mapper;
+import org.griphyn.vdl.mapping.Path;
+import org.griphyn.vdl.mapping.PathComparator;
+import org.griphyn.vdl.mapping.PhysicalFormat;
+import org.griphyn.vdl.type.Type;
+import org.griphyn.vdl.type.Types;
+import org.griphyn.vdl.util.FQN;
+import org.griphyn.vdl.util.VDL2Config;
+import org.griphyn.vdl.util.VDL2ConfigProperties;
+
+public abstract class SwiftFunction extends AbstractFunction {
+ public static final Logger logger = Logger.getLogger(SwiftFunction.class);
+
+ public static final boolean PROVENANCE_ENABLED;
+
+ static {
+ boolean v;
+ try {
+ v = VDL2Config.getConfig().getProvenanceLog();
+ }
+ catch (IOException e) {
+ v = false;
+ }
+ PROVENANCE_ENABLED = v;
+ }
+
+
+ private VarRef<Context> context;
+
+ @Override
+ protected void addLocals(Scope scope) {
+ super.addLocals(scope);
+ context = scope.getVarRef("#context");
+ }
+
+ @Override
+ protected Node compileBody(WrapperNode w, Scope argScope, Scope scope)
+ throws CompilationException {
+ returnDynamic(scope);
+ return super.compileBody(w, argScope, scope);
+ }
+
+
+
+ @Override
+ public void runBody(LWThread thr) {
+ try {
+ Stack stack = thr.getStack();
+ ret(stack, function(stack));
+ }
+ catch (AssertFailedException e) {
+ logger.fatal("swift: assert failed: " + e.getMessage());
+ throw e;
+ }
+ catch (DependentException e) {
+ // This would not be the primal fault so in non-lazy errors mode it
+ // should not matter
+ throw new ExecutionException("Wrapping a dependent exception in VDLFunction.post() - errors in data dependencies",e);
+ }
+ }
+
+ /*
+ * This will likely break if the engine changes in fundamental ways. It also
+ * depends on the fact that iteration variable is named '$' in this
+ * particular implementation.
+ */
+ public static String getThreadPrefix() throws ExecutionException {
+ return LWThread.currentThread().getName();
+ }
+
+ // TODO - is this needed any more? its doing some type inferencing and
+ // object creation and dequoting of strings, but the necessary behaviour
+ // here has possibly moved elsewhere, into a more strongly typed
+ // intermediate
+ // XML form that removes the need for this inference.
+
+ // we might need to do some casting here for the numerical stuff - eg when
+ // asking for a float but we're given an int? not sure? might be the case
+ // that we already have value in the Double form already, in which case
+ // deference the internal value?
+
+ // this is only used by VDL new (and really should only be used by
+ // VDL new, and should perhaps move to the VDL new source?)
+
+ protected Object internalValue(Type type, Object value) {
+ if (Types.FLOAT.equals(type)) {
+ return new Double(TypeUtil.toDouble(value));
+ }
+ else if (Types.INT.equals(type)) {
+ return new Integer(TypeUtil.toInt(value));
+ }
+ else if (Types.BOOLEAN.equals(type)) {
+ return new Boolean(TypeUtil.toBoolean(value));
+ }
+ else {
+ return value;
+ }
+ }
+
+ public static final String[] EMPTY_STRING_ARRAY = new String[0];
+
+
+ public static String[] filename(DSHandle var) throws ExecutionException {
+ try {
+ if (var.getType().isArray()) {
+ return leavesFileNames(var);
+ }
+ else if(var.getType().getFields().size() > 0) {
+ return leavesFileNames(var);
+ }
+ else {
+ return new String[] { leafFileName(var) };
+ }
+ }
+ catch (DependentException e) {
+ return new String[0];
+ }
+ catch (HandleOpenException e) {
+ throw new ExecutionException("The current implementation should not throw this exception", e);
+ }
+ }
+
+ private static String[] leavesFileNames(DSHandle var) throws ExecutionException, HandleOpenException {
+ Mapper mapper;
+
+ synchronized (var.getRoot()) {
+ mapper = var.getMapper();
+ }
+
+ if (mapper == null) {
+ throw new ExecutionException(var.getType() + " is not a mapped type");
+ }
+
+ List<String> l = new ArrayList<String>();
+ try {
+ Collection<Path> fp = var.getFringePaths();
+ List<Path> src;
+ if (fp instanceof List) {
+ src = (List<Path>) fp;
+ }
+ else {
+ src = new ArrayList<Path>(fp);
+ }
+ Collections.sort(src, new PathComparator());
+
+ for (Path p : src) {
+ l.add(leafFileName(var.getField(p), mapper));
+ }
+ }
+ catch (InvalidPathException e) {
+ throw new ExecutionException("DSHandle is lying about its fringe paths");
+ }
+ return l.toArray(EMPTY_STRING_ARRAY);
+ }
+
+ private static String leafFileName(DSHandle var) throws ExecutionException {
+ return leafFileName(var, var.getMapper());
+ }
+
+ private static String leafFileName(DSHandle var, Mapper mapper) throws ExecutionException {
+ if (Types.STRING.equals(var.getType())) {
+ return relativize(String.valueOf(var.getValue()));
+ }
+ else {
+ if (var.getMapper() == null) {
+ throw new ExecutionException("Cannot invoke filename() on data without a mapper: " + var);
+ }
+ PhysicalFormat f = var.getMapper().map(var.getPathFromRoot());
+ if (f instanceof GeneralizedFileFormat) {
+ String filename = ((GeneralizedFileFormat) f).getURIAsString();
+ if (filename == null) {
+ throw new ExecutionException("Mapper did not provide a file name");
+ }
+ else {
+ return filename;
+ }
+ }
+ else if (f == null) {
+ throw new ExecutionException("Mapper failed to map " + var);
+ }
+ else {
+ throw new ExecutionException("Only file formats are supported for now");
+ }
+ }
+ }
+
+ protected Object pathOnly(Object f) {
+ if (f instanceof String[]) {
+ return pathOnly((String[]) f);
+ }
+ else {
+ return pathOnly((String) f);
+ }
+ }
+
+ protected static String pathOnly(String file) {
+ AbsFile af = new AbsFile(file);
+ if ("file".equals(af.getProtocol())) {
+ return af.getPath();
+ }
+ else {
+ return af.getHost() + "/" + af.getPath();
+ }
+ }
+
+ protected String[] pathOnly(String[] files) {
+ String[] p = new String[files.length];
+ for (int i = 0; i < files.length; i++) {
+ p[i] = pathOnly(files[i]);
+ }
+ return p;
+ }
+
+ /**
+ * Given an input of an array of strings, returns a single string with the
+ * input strings separated by a space. If the 'relative' flag is set to
+ * true, then each input string will be passed through the relativize
+ * function.
+ */
+ public String argList(String[] s, boolean relative) {
+ StringBuffer sb = new StringBuffer();
+ for (int i = 0; i < s.length; i++) {
+ if (relative) {
+ s[i] = relativize(s[i]);
+ }
+ sb.append(s[i]);
+ if (i < s.length - 1) {
+ sb.append(' ');
+ }
+ }
+ return sb.toString();
+ }
+
+ /**
+ * removes leading / character from a supplied filename if present, so that
+ * the path can be used as a relative path.
+ */
+ public static String relativize(String name) {
+ name = pathOnly(name);
+ if (name != null && name.length() > 0 && name.charAt(0) == '/') {
+ return name.substring(1);
+ }
+ else {
+ return name;
+ }
+ }
+
+ protected boolean compatible(Type expectedType, Type actualType) {
+ if (expectedType.equals(Types.FLOAT)) {
+ if (actualType.equals(Types.FLOAT) || actualType.equals(Types.INT)) {
+ return true;
+ }
+ else {
+ return false;
+ }
+ }
+ else if (expectedType.equals(Types.FLOAT.arrayType())) {
+ if (actualType.equals(Types.FLOAT.arrayType())
+ || actualType.equals(Types.INT.arrayType())) {
+ return true;
+ }
+ else {
+ return false;
+ }
+ }
+ else if (expectedType.equals(Types.ANY)) {
+ return true;
+ }
+ else {
+ return actualType.equals(expectedType);
+ }
+ }
+
+ protected void closeChildren(AbstractDataNode handle) throws InvalidPathException {
+ // Close the future
+ handle.closeShallow();
+ // Mark all leaves
+ for (DSHandle child : handle.getFields(Path.CHILDREN)) {
+ child.closeShallow();
+ }
+ }
+
+ public static void waitForAll(Node who, Channel<AbstractDataNode> vargs) throws ExecutionException {
+ for (AbstractDataNode n : vargs) {
+ n.waitFor(who);
+ }
+ }
+
+ public static Map<Comparable<?>, DSHandle> waitForArray(Node who, AbstractDataNode n) throws ExecutionException {
+ n.waitFor(who);
+ Map<Comparable<?>, DSHandle> v = n.getArrayValue();
+ for (DSHandle h : v.values()) {
+ ((AbstractDataNode) h).waitFor(who);
+ }
+ return v;
+ }
+
+ public static Channel<Object> unwrapAll(Node who, Channel<AbstractDataNode> vargs) throws ExecutionException {
+ waitForAll(who, vargs);
+ MemoryChannel<Object> mc = new MemoryChannel<Object>();
+ for (AbstractDataNode n : vargs) {
+ mc.add(n.getValue());
+ }
+ return mc;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static <T> T unwrap(Node who, AbstractDataNode n) throws ExecutionException {
+ n.waitFor(who);
+ return (T) n.getValue();
+ }
+
+ public static Path parsePath(Object o) {
+ if (o instanceof Path) {
+ return (Path) o;
+ }
+ else {
+ return Path.parse((String) o);
+ }
+ }
+
+ private static Set<List<Object>> warnset = new HashSet<List<Object>>();
+
+ protected TCEntry getTCE(TCCache tc, FQN fqn, BoundContact bc) {
+ List<TCEntry> l;
+ try {
+ l = tc.getTCEntries(fqn, bc.getHost(), TCType.INSTALLED);
+ }
+ catch (Exception e) {
+ throw new ExecutionException(this, e);
+ }
+ if (l == null || l.isEmpty()) {
+ return null;
+ }
+ if (l.size() > 1) {
+ synchronized (warnset) {
+ LinkedList<Object> wl = new LinkedList<Object>();
+ wl.add(fqn);
+ wl.add(bc);
+ if (!warnset.contains(wl)) {
+ logger.warn("Multiple entries found for " + fqn + " on " + bc
+ + ". Using the first one");
+ warnset.add(wl);
+ }
+ }
+ }
+ return l.get(0);
+ }
+
+ public static final String TC = "vdl:TC";
+
+ public TCCache getTC(Stack stack) throws ExecutionException {
+ Context c = this.context.getValue(stack);
+ synchronized (c) {
+ TCCache tc = (TCCache) c.getAttribute(TC);
+ if (tc == null) {
+ String prop = ConfigProperty.getProperty(VDL2ConfigProperties.TC_FILE, (VDL2Config) c.getAttribute("SWIFT:CONFIG"));
+ Loader.debugText("TC", new java.io.File(prop));
+ tc = new TCCache(File.getNonSingletonInstance(prop));
+ c.setAttribute(TC, tc);
+ }
+ return tc;
+ }
+ }
+
+ private static int provenanceIDCount = 451000;
+
+ public static synchronized int nextProvenanceID() {
+ return provenanceIDCount++;
+ }
+
+ public static void logProvenanceResult(int id, DSHandle result, String name) {
+ if (logger.isDebugEnabled())
+ logger.debug("FUNCTION id="+id+" name="+name+" result="+result.getIdentifier());
+ else if (logger.isInfoEnabled())
+ logger.info("FUNCTION: " + name + "()");
+ }
+
+ public static void logProvenanceParameter(int id, DSHandle parameter, String paramName) {
+ if (logger.isDebugEnabled())
+ logger.debug("FUNCTIONPARAMETER id="+id+" input="+parameter.getIdentifier()+" name="+paramName);
+ }
+}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/TCProfile.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/TCProfile.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/TCProfile.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -27,85 +27,84 @@
import java.util.Map;
import java.util.Set;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
import org.globus.cog.abstraction.impl.common.execution.WallTime;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.arguments.NamedArguments;
-import org.globus.cog.karajan.stack.VariableStack;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.functions.Map.Entry;
import org.globus.cog.karajan.util.BoundContact;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.grid.GridExec;
import org.globus.swift.catalog.TCEntry;
import org.globus.swift.catalog.util.Profile;
import org.griphyn.vdl.karajan.TCCache;
import org.griphyn.vdl.util.FQN;
-public class TCProfile extends VDLFunction {
+public class TCProfile extends SwiftFunction {
public static final Logger logger = Logger.getLogger(TCProfile.class);
- public static final Arg OA_TR = new Arg.Optional("tr");
-
- /**
- Allows for dynamic attributes from the SwiftScript
- profile statements.
- These override any other attributes.
- */
- public static final Arg OA_ATTRS = new Arg.Positional("attributes");
-
- public static final Arg PA_HOST = new Arg.Positional("host");
-
- static {
- setArguments(TCProfile.class, new Arg[] { PA_HOST, OA_ATTRS, OA_TR });
- }
+ private ArgRef<BoundContact> host;
+ /**
+ Allows for dynamic attributes from the SwiftScript
+ profile statements.
+ These override any other attributes.
+ */
+ private ArgRef<Map<String, Object>> attributes;
+ private ArgRef<String> tr;
+
+ private VarRef<Object> r_count;
+ private VarRef<Object> r_jobType;
+ private VarRef<Object> r_attributes;
+ private ChannelRef<Map.Entry<Object, Object>> cr_environment;
+
+ private enum Attr {
+ COUNT, JOB_TYPE;
+ }
+
+ private static final Map<String, Attr> ATTR_TYPES;
+
+ static {
+ ATTR_TYPES = new HashMap<String, Attr>();
+ ATTR_TYPES.put("count", Attr.COUNT);
+ ATTR_TYPES.put("jobType", Attr.JOB_TYPE);
+ }
- private static Map<String, Arg> PROFILE_T;
+ @Override
+ protected Signature getSignature() {
+ return new Signature(
+ params("host", optional("attributes", null), optional("tr", null)),
+ returns("count", "jobType",
+ "attributes", channel("environment", DYNAMIC))
+ );
+ }
- static {
- PROFILE_T = new HashMap<String, Arg>();
- PROFILE_T.put("count", GridExec.A_COUNT);
- PROFILE_T.put("jobtype", GridExec.A_JOBTYPE);
- PROFILE_T.put("maxcputime", GridExec.A_MAXCPUTIME);
- PROFILE_T.put("maxmemory", GridExec.A_MAXMEMORY);
- PROFILE_T.put("maxtime", GridExec.A_MAXTIME);
- PROFILE_T.put("maxwalltime", GridExec.A_MAXWALLTIME);
- PROFILE_T.put("minmemory", GridExec.A_MINMEMORY);
- PROFILE_T.put("project", GridExec.A_PROJECT);
- PROFILE_T.put("queue", GridExec.A_QUEUE);
- }
-
- public Object function(VariableStack stack) throws ExecutionException {
+ public Object function(Stack stack) {
TCCache tc = getTC(stack);
- String tr = null;
+ String tr = this.tr.getValue(stack);
- Map<String,Object> dynamicAttributes =
- readDynamicAttributes(stack);
+ Map<String, Object> dynamicAttributes = readDynamicAttributes(stack);
- if (OA_TR.isPresent(stack)) {
- tr = TypeUtil.toString(OA_TR.getValue(stack));
- }
- BoundContact bc = (BoundContact) PA_HOST.getValue(stack);
+ BoundContact bc = this.host.getValue(stack);
- NamedArguments named = ArgUtil.getNamedReturn(stack);
Map<String,Object> attrs = null;
- attrs = attributesFromHost(bc, attrs, named);
+ attrs = attributesFromHost(bc, attrs, stack);
TCEntry tce = null;
if (tr != null) {
tce = getTCE(tc, new FQN(tr), bc);
}
- Map<String,String> env = new HashMap<String,String>();
if (tce != null) {
- addEnvironment(env, tce);
- addEnvironment(env, bc);
- attrs = attributesFromTC(tce, attrs, named);
+ addEnvironment(stack, tce);
+ addEnvironment(stack, bc);
+ attrs = attributesFromTC(tce, attrs, stack);
}
- named.add(GridExec.A_ENVIRONMENT, env);
- checkWalltime(tr, named);
attrs = addDynamicAttributes(attrs, dynamicAttributes);
- addAttributes(named, attrs);
+ checkWalltime(attrs, tr, stack);
+ addAttributes(attrs, stack);
return null;
}
@@ -113,14 +112,8 @@
Bring in the dynamic attributes from the Karajan stack
@return Map, may be null
*/
- @SuppressWarnings("unchecked")
- private Map<String, Object>
- readDynamicAttributes(VariableStack stack)
- throws ExecutionException {
- Map<String, Object> result = null;
- if (OA_ATTRS.isPresent(stack))
- result = (Map<String,Object>) OA_ATTRS.getValue(stack);
- return result;
+ private Map<String, Object> readDynamicAttributes(Stack stack) {
+ return this.attributes.getValue(stack);
}
/**
@@ -143,13 +136,11 @@
return result;
}
- private void checkWalltime(String tr, NamedArguments attrs) {
- Object walltime = null;
- if (attrs != null) {
- if (attrs.hasArgument("maxwalltime")) {
- walltime = attrs.getArgument("maxwalltime");
- }
- }
+ private void checkWalltime(Map<String, Object> attrs, String tr, Stack stack) {
+ if (attrs == null) {
+ return;
+ }
+ Object walltime = attrs.get("maxwalltime");
if (walltime == null) {
return;
}
@@ -157,7 +148,7 @@
//validate walltime
WallTime.timeToSeconds(walltime.toString());
}
- catch (IllegalArgumentException e) {
+ catch (ExecutionException e) {
warn(tr, "Warning: invalid walltime specification for \"" + tr
+ "\" (" + walltime + ").");
}
@@ -174,33 +165,30 @@
}
}
- private void addEnvironment(Map<String,String> m,
- TCEntry tce) {
+ private void addEnvironment(Stack stack, TCEntry tce) {
List<Profile> list = tce.getProfiles(Profile.ENV);
if (list != null) {
for (Profile p : list) {
- m.put(p.getProfileKey(), p.getProfileValue());
+ cr_environment.append(stack, new Entry(p.getProfileKey(), p.getProfileValue()));
}
}
}
- public static final String PROFILE_GLOBUS_PREFIX =
- (Profile.GLOBUS + "::").toLowerCase();
+ public static final String PROFILE_GLOBUS_PREFIX = (Profile.GLOBUS + "::").toLowerCase();
- private void addEnvironment(Map<String,String> m,
- BoundContact bc) {
+ private void addEnvironment(Stack stack, BoundContact bc) {
Map<String,Object> props = bc.getProperties();
for (Map.Entry<String,Object> e : props.entrySet()) {
String name = e.getKey();
FQN fqn = new FQN(name);
String value = (String) e.getValue();
if (Profile.ENV.equalsIgnoreCase(fqn.getNamespace())) {
- m.put(fqn.getName(), value);
+ cr_environment.append(stack, new Entry(fqn.getName(), value));
}
}
}
- private void addAttributes(NamedArguments named, Map<String,Object> attrs) {
+ private void addAttributes(Map<String,Object> attrs, Stack stack) {
if (logger.isDebugEnabled()) {
logger.debug("Attributes: " + attrs);
}
@@ -210,26 +198,23 @@
Iterator<Map.Entry<String, Object>> i = attrs.entrySet().iterator();
while (i.hasNext()) {
Map.Entry<String, Object> e = i.next();
- Arg a = PROFILE_T.get(e.getKey());
+ Attr a = ATTR_TYPES.get(e.getKey());
if (a != null) {
- named.add(a, e.getValue());
+ setAttr(a, stack, e.getValue());
i.remove();
}
}
if (attrs.size() == 0) {
return;
}
- named.add(GridExec.A_ATTRIBUTES, attrs);
+ this.r_attributes.setValue(stack, attrs);
}
- private Map<String,Object>
- attributesFromTC(TCEntry tce,
- Map<String,Object> attrs,
- NamedArguments named) {
+ private Map<String,Object> attributesFromTC(TCEntry tce, Map<String,Object> attrs, Stack stack) {
List<Profile> list = tce.getProfiles(Profile.GLOBUS);
if (list != null) {
for (Profile p : list) {
- Arg a = PROFILE_T.get(p.getProfileKey());
+ Attr a = ATTR_TYPES.get(p.getProfileKey());
if (a == null) {
if (attrs == null) {
attrs = new HashMap<String,Object>();
@@ -237,7 +222,7 @@
attrs.put(p.getProfileKey(), p.getProfileValue());
}
else {
- named.add(a, p.getProfileValue());
+ setAttr(a, stack, p.getProfileValue());
}
}
}
@@ -248,16 +233,13 @@
Inserts namespace=globus attributes from BoundContact bc
into given attrs
*/
- private Map<String,Object>
- attributesFromHost(BoundContact bc,
- Map<String,Object> attrs,
- NamedArguments named) {
+ private Map<String,Object> attributesFromHost(BoundContact bc, Map<String, Object> attrs, Stack stack) {
Map<String,Object> props = bc.getProperties();
if (props != null) {
for (Map.Entry<String,Object> e : props.entrySet()) {
FQN fqn = new FQN(e.getKey());
if (Profile.GLOBUS.equalsIgnoreCase(fqn.getNamespace())) {
- Arg a = PROFILE_T.get(fqn.getName());
+ Attr a = ATTR_TYPES.get(fqn.getName());
if (a == null) {
if (attrs == null) {
attrs = new HashMap<String,Object>();
@@ -265,11 +247,22 @@
attrs.put(fqn.getName(), e.getValue());
}
else {
- named.add(a, e.getValue());
+ setAttr(a, stack, e.getValue());
}
}
}
}
return attrs;
}
+
+ private void setAttr(Attr a, Stack stack, Object value) {
+ switch (a) {
+ case COUNT:
+ r_count.setValue(stack, value);
+ break;
+ case JOB_TYPE:
+ r_jobType.setValue(stack, value);
+ break;
+ }
+ }
}
Deleted: branches/faster/src/org/griphyn/vdl/karajan/lib/ThreadPrefix.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/ThreadPrefix.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/ThreadPrefix.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,32 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/*
- * Created on Dec 26, 2006
- */
-package org.griphyn.vdl.karajan.lib;
-
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-
-public class ThreadPrefix extends VDLFunction {
-
- public Object function(VariableStack stack) throws ExecutionException {
- return getThreadPrefix(stack);
- }
-
-}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Throttled.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Throttled.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Throttled.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -23,55 +23,69 @@
import java.io.IOException;
import java.util.LinkedList;
-import org.globus.cog.karajan.stack.VariableStack;
+import k.rt.ConditionalYield;
+import k.rt.FutureObject;
+import k.thr.LWThread;
+import k.thr.Yield;
+
+import org.globus.cog.karajan.compiled.nodes.Sequential;
import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.Sequential;
import org.griphyn.vdl.util.VDL2Config;
public class Throttled extends Sequential {
public static final int DEFAULT_MAX_THREADS = 1000000;
- private LinkedList<VariableStack> waiting;
+ private LinkedList<FutureObject> waiting;
private int maxThreadCount, current;
public Throttled() {
try {
maxThreadCount = TypeUtil.toInt(VDL2Config.getConfig()
- .getProperty("exec.throttle", String.valueOf(DEFAULT_MAX_THREADS)));
+ .getProperty("max.threads", String.valueOf(DEFAULT_MAX_THREADS)));
}
catch (IOException e) {
maxThreadCount = DEFAULT_MAX_THREADS;
}
current = 0;
- waiting = new LinkedList<VariableStack>();
+ waiting = new LinkedList<FutureObject>();
}
-
+
@Override
- protected void executeChildren(VariableStack stack)
- throws ExecutionException {
- synchronized(this) {
- if (current == maxThreadCount) {
- waiting.addLast(stack);
- return;
+ public void run(LWThread thr) {
+ int i = thr.checkSliceAndPopState();
+ try {
+ switch (i) {
+ case 0:
+ tryIncCurrent();
+ i++;
+ default:
+ super.run(thr);
+ decCurrent();
}
- else {
- current++;
- }
}
- super.executeChildren(stack);
+ catch (Yield y) {
+ y.getState().push(i);
+ throw y;
+ }
+ catch (RuntimeException e) {
+ decCurrent();
+ throw e;
+ }
}
-
- @Override
- protected void post(VariableStack stack) throws ExecutionException {
- synchronized(this) {
- if (!waiting.isEmpty()) {
- super.executeChildren(waiting.removeFirst());
- }
- else {
- current--;
- }
+
+ private synchronized void decCurrent() {
+ current--;
+ if (!waiting.isEmpty()) {
+ waiting.removeFirst().setValue(Boolean.TRUE);
}
- super.post(stack);
- }
+ }
+
+ private synchronized void tryIncCurrent() {
+ if (current == maxThreadCount) {
+ FutureObject fo = new FutureObject();
+ waiting.addLast(fo);
+ throw new ConditionalYield(fo);
+ }
+ current++;
+ }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/ThrottledParallelFor.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/ThrottledParallelFor.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/ThrottledParallelFor.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -23,87 +23,99 @@
import java.util.List;
import java.util.StringTokenizer;
+import k.rt.ExecutionException;
+import k.rt.KRunnable;
+import k.rt.Stack;
+import k.thr.LWThread;
+import k.thr.ThreadSet;
+import k.thr.ThrottledThreadSet;
+import k.thr.Yield;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableNotFoundException;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.Identifier;
-import org.globus.cog.karajan.util.KarajanIterator;
-import org.globus.cog.karajan.util.ThreadingContext;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.CompilerSettings;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.compiled.nodes.UParallelFor;
+import org.globus.cog.karajan.parser.WrapperNode;
import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.futures.FutureEvaluationException;
-import org.globus.cog.karajan.workflow.futures.FutureFault;
-import org.globus.cog.karajan.workflow.futures.FutureIterator;
-import org.globus.cog.karajan.workflow.futures.FutureIteratorIncomplete;
-import org.globus.cog.karajan.workflow.futures.FutureListener;
-import org.globus.cog.karajan.workflow.futures.ListenerStackPair;
-import org.globus.cog.karajan.workflow.nodes.AbstractParallelIterator;
import org.griphyn.vdl.karajan.Pair;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.util.VDL2Config;
-public class ThrottledParallelFor extends AbstractParallelIterator {
- public static final Logger logger = Logger.getLogger(ThrottledParallelFor.class);
+public class ThrottledParallelFor extends UParallelFor {
+ public static final Logger logger = Logger
+ .getLogger(ThrottledParallelFor.class);
- public static final int DEFAULT_MAX_THREADS = 10000000;
+ public static final int DEFAULT_MAX_THREADS = 1024;
- public static final Arg A_NAME = new Arg.Positional("name");
- public static final Arg A_IN = new Arg.Positional("in");
- public static final Arg O_SELFCLOSE = new Arg.Optional("selfclose", Boolean.FALSE);
- public static final Arg O_REFS = new Arg.Optional("refs", null);
+ private ArgRef<Boolean> selfClose;
+ private ArgRef<String> refs;
+ private ArgRef<String> _kvar;
+ private ArgRef<String> _vvar;
+ private ArgRef<String> _traceline;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(
+ params(
+ identifier("name"), "in",
+ optional("selfClose", Boolean.FALSE), optional("refs", null),
+ optional("_kvar", null), optional("_vvar", null), optional("_traceline", null),
+ block("body")
+ )
+ );
+ }
- static {
- setArguments(ThrottledParallelFor.class, new Arg[] { A_NAME, A_IN, O_SELFCLOSE, O_REFS });
- }
-
- public static final String THREAD_COUNT = "#threadcount";
-
private int maxThreadCount = -1;
private Tracer forTracer, iterationTracer;
- private String kvar, vvar;
- private List<StaticRefCount> srefs;
-
- private static class StaticRefCount {
- public final String name;
+ private List<StaticRefCount> srefs;
+
+ private static class StaticRefCount {
+ public final VarRef<?> ref;
public final int count;
-
- public StaticRefCount(String name, int count) {
- this.name = name;
+
+ public StaticRefCount(VarRef<?> ref, int count) {
+ this.ref = ref;
this.count = count;
}
- }
-
- private static class RefCount {
- public final DSHandle var;
- public final int count;
-
- public RefCount(DSHandle var, int count) {
- this.var = var;
- this.count = count;
- }
-
- public void inc() {
-
- }
-
- public void dec() {
-
- }
- }
-
+ }
+
+ private static class RefCount {
+ public final DSHandle var;
+ public final int count;
+
+ public RefCount(DSHandle var, int count) {
+ this.var = var;
+ this.count = count;
+ }
+
+ public void inc() {
+
+ }
+
+ public void dec() {
+
+ }
+ }
+
@Override
- protected void initializeStatic() {
- super.initializeStatic();
+ protected Node compileBody(WrapperNode w, Scope argScope, Scope scope)
+ throws CompilationException {
+ srefs = buildStaticRefs(scope);
+ if (_traceline.getValue() != null) {
+ setLine(Integer.parseInt(_traceline.getValue()));
+ }
forTracer = Tracer.getTracer(this, "FOREACH");
iterationTracer = Tracer.getTracer(this, "ITERATION");
- kvar = (String) getProperty("_kvar");
- vvar = (String) getProperty("_vvar");
- srefs = buildStaticRefs();
+ return super.compileBody(w, argScope, scope);
}
-
- private List<StaticRefCount> buildStaticRefs() {
- String refs = (String) O_REFS.getStatic(this);
+
+ private List<StaticRefCount> buildStaticRefs(Scope scope) {
+ String refs = this.refs.getValue();
if (refs == null) {
return null;
}
@@ -117,115 +129,141 @@
}
else {
int count = Integer.parseInt(st.nextToken());
- l.add(new StaticRefCount(name.toLowerCase(), count));
+ l.add(new StaticRefCount(scope.getVarRef(name), count));
}
flip = !flip;
}
return l;
}
- private List<RefCount> buildRefs(VariableStack stack) throws VariableNotFoundException {
+ private List<RefCount> buildRefs(Stack stack) {
if (srefs == null) {
return null;
}
List<RefCount> l = new ArrayList<RefCount>(srefs.size());
for (StaticRefCount s : srefs) {
- l.add(new RefCount((DSHandle) stack.getVar(s.name), s.count));
+ l.add(new RefCount((DSHandle) s.ref.getValue(stack), s.count));
}
return l;
}
- protected void partialArgumentsEvaluated(VariableStack stack)
- throws ExecutionException {
- if (forTracer.isEnabled()) {
- forTracer.trace(ThreadingContext.get(stack).toString());
+ @SuppressWarnings("unchecked")
+ @Override
+ protected void runBody(final LWThread thr) {
+ int i = thr.checkSliceAndPopState();
+ Iterator<Object> it = (Iterator<Object>) thr.popState();
+ ThrottledThreadSet ts = (ThrottledThreadSet) thr.popState();
+ int fc = thr.popIntState();
+ List<RefCount> drefs = (List<RefCount>) thr.popState();
+ Stack stack = thr.getStack();
+ try {
+ switch(i) {
+ case 0:
+ it = in.getValue(stack).iterator();
+ ts = new ThrottledThreadSet(getMaxThreads());
+ drefs = buildRefs(stack);
+ ts.lock();
+ fc = stack.frameCount() + 1;
+
+ if (forTracer.isEnabled()) {
+ forTracer.trace(thr);
+ }
+
+ i++;
+ case 1:
+ final ThreadSet tsf = ts;
+
+ ts.checkFailed();
+
+ startBulk(thr, ts, it, fc, drefs);
+ startRest(thr, ts, it, fc, drefs);
+
+ ts.unlock();
+ decRefs(drefs);
+ ts.waitFor();
+ }
}
- super.partialArgumentsEvaluated(stack);
+ catch (Yield y) {
+ y.getState().push(drefs);
+ y.getState().push(fc);
+ y.getState().push(ts);
+ y.getState().push(it);
+ y.getState().push(i);
+ throw y;
+ }
}
- public void iterate(VariableStack stack, Identifier var, KarajanIterator i)
- throws ExecutionException {
- if (elementCount() > 0) {
- if (logger.isDebugEnabled()) {
- logger.debug("iterateParallel: " + stack.parentFrame());
- }
- stack.setVar(VAR, var);
- setChildFailed(stack, false);
- stack.setCaller(this);
- initThreadCount(stack, TypeUtil.toBoolean(O_SELFCLOSE.getStatic(this)), i);
- citerate(stack, var, i);
- }
- else {
- complete(stack);
- }
- }
-
- protected void citerate(VariableStack stack, Identifier var,
- KarajanIterator i) throws ExecutionException {
- ThreadCount tc = getThreadCount(stack);
-
- // we can bulk operations at the start to avoid contention
- // on the counter since at least as many
- // threads as reported by available() are available
- int available = tc.available();
- try {
- int j = 0;
- try {
- for (; j < available && i.hasNext(); j++) {
- startIteration(tc, var, i.current(), i.next(), stack);
- }
- }
- finally {
- tc.add(j);
- }
- while (i.hasNext()) {
- startIteration(tc, var, i.current(), tc.tryIncrement(), stack);
- }
-
- decRefs(tc.rc);
-
- int left;
- synchronized(tc) {
- // can only have closed and running = 0 in one place
- tc.close();
- left = tc.current();
- }
- if (left == 0) {
- complete(stack);
- }
- }
- catch (FutureIteratorIncomplete fii) {
- synchronized (stack.currentFrame()) {
- stack.setVar(ITERATOR, i);
+ private boolean startBulk(LWThread thr, ThrottledThreadSet ts, Iterator<Object> it, int fcf, List<RefCount> refs) {
+ int available = ts.freeSlots();
+ int j = 0;
+ Stack stack = thr.getStack();
+ for (; j < available && it.hasNext(); j++) {
+ if (startOne(thr, ts, it.next(), fcf, refs)) {
+ // aborted
+ return true;
}
- fii.getFutureIterator().addModificationAction(this, stack);
- }
- }
+ }
+ return false;
+ }
- private void startIteration(ThreadCount tc, Identifier var, int id, Object value,
- VariableStack stack) throws ExecutionException {
- incRefs(tc.rc);
- VariableStack copy = stack.copy();
- copy.enter();
- ThreadingContext ntc = ThreadingContext.get(copy).split(id);
- ThreadingContext.set(copy, ntc);
- setIndex(copy, 2);
- if (iterationTracer.isEnabled()) {
- iterationTracer.trace(ntc.toString(), unwrap(value));
+ private boolean startRest(LWThread thr, ThrottledThreadSet ts, Iterator<Object> it, int fcf, List<RefCount> refs) {
+ Stack stack = thr.getStack();
+ while (it.hasNext()) {
+ ts.waitForSlot();
+ if (startOne(thr, ts, it.next(), fcf, refs)) {
+ return true;
+ }
}
- copy.setVar(var.getName(), value);
- startElement(1, copy);
+ return false;
}
+ private boolean startOne(final LWThread thr, final ThreadSet ts, final Object value, final int fcf, List<RefCount> refs) {
+ incRefs(refs);
+ LWThread ct = thr.fork(new KRunnable() {
+ @Override
+ public void run(LWThread thr2) {
+ try {
+ if (iterationTracer.isEnabled()) {
+ iterationTracer.trace(thr2, unwrap(value));
+ }
+
+ if (CompilerSettings.PERFORMANCE_COUNTERS) {
+ startCount++;
+ }
+ body.run(thr2);
+ ts.threadDone(thr2, null);
+ }
+ catch (ExecutionException e) {
+ throw e;
+ }
+ catch (Exception e) {
+ thr2.getStack().dropToFrame(fcf);
+ ts.threadDone(thr2, new ExecutionException(ThrottledParallelFor.this, e));
+ ts.abortAll();
+ thr.awake();
+ }
+ }
+ });
+ if(ts.add(ct)) {
+ return true;
+ }
+
+ Stack cs = ct.getStack();
+ cs.enter(this, frameSize);
+ this.var.setValue(cs, value);
+ ct.start();
+ return false;
+ }
+
private void decRefs(List<RefCount> rcs) throws ExecutionException {
- if (rcs != null) {
- for (RefCount rc : rcs) {
- rc.var.updateWriteRefCount(-rc.count);
- }
- }
- }
-
- private void incRefs(List<RefCount> rcs) throws ExecutionException {
+ if (rcs != null) {
+ for (RefCount rc : rcs) {
+ rc.var.updateWriteRefCount(-rc.count);
+ }
+ }
+ }
+
+ private void incRefs(List<RefCount> rcs) throws ExecutionException {
if (rcs != null) {
for (RefCount rc : rcs) {
rc.var.updateWriteRefCount(rc.count);
@@ -233,218 +271,38 @@
}
}
- private Object unwrap(Object value) {
+
+ private int getMaxThreads() {
+ if (maxThreadCount < 0) {
+ try {
+ maxThreadCount = TypeUtil.toInt(VDL2Config.getConfig()
+ .getProperty("foreach.max.threads", String.valueOf(DEFAULT_MAX_THREADS)));
+ }
+ catch (IOException e) {
+ maxThreadCount = DEFAULT_MAX_THREADS;
+ }
+ }
+ return maxThreadCount;
+ }
+
+ protected Object unwrap(Object value) {
if (value instanceof Pair) {
Pair p = (Pair) value;
- if (kvar != null) {
- return kvar + "=" + p.get(0) + ", " + vvar + "=" + Tracer.unwrapHandle(p.get(1));
+ if (_kvar.getValue() != null) {
+ return _kvar.getValue() + "=" + p.get(0) + ", " + _vvar.getValue() + "=" + Tracer.unwrapHandle(p.get(1));
}
else {
- return vvar + "=" + Tracer.unwrapHandle(p.get(1));
+ return _vvar.getValue() + "=" + Tracer.unwrapHandle(p.get(1));
}
}
else {
return "!";
}
}
-
- @Override
- public void completed(VariableStack stack) throws ExecutionException {
- int index = preIncIndex(stack) - 1;
- if (index == 1) {
- // iterator
- stack.currentFrame().deleteVar(QUOTED);
- processArguments(stack);
- try {
- partialArgumentsEvaluated(stack);
- }
- catch (FutureFault e) {
- e.getFuture().addModificationAction(new PartialResume(), stack);
- }
- }
- else if (index == elementCount()) {
- iterationCompleted(stack);
- }
- else {
- startElement(index, stack);
- }
- }
- public void failed(VariableStack stack, ExecutionException e) throws ExecutionException {
- if (!testAndSetChildFailed(stack)) {
- if (stack.parentFrame().isDefined(VAR)) {
- stack.leave();
- }
- failImmediately(stack, e);
- }
- }
- protected void iterationCompleted(VariableStack stack)
- throws ExecutionException {
- stack.leave();
- ThreadCount tc = getThreadCount(stack);
- int running;
- boolean closed;
- boolean iteratorHasValues;
- synchronized(tc) {
- closed = tc.isClosed();
- running = tc.decrement();
- iteratorHasValues = tc.iteratorHasValues();
- }
- boolean done = false;
- if (running == 0) {
- if (closed) {
- complete(stack);
- }
- if (tc.selfClose && !iteratorHasValues) {
- decRefs(tc.rc);
- complete(stack);
- }
- }
- }
-
- private void initThreadCount(VariableStack stack, boolean selfClose, KarajanIterator i) throws VariableNotFoundException {
- if (maxThreadCount < 0) {
- try {
- maxThreadCount = TypeUtil.toInt(VDL2Config.getConfig()
- .getProperty("foreach.max.threads", String.valueOf(DEFAULT_MAX_THREADS)));
- }
- catch (IOException e) {
- maxThreadCount = DEFAULT_MAX_THREADS;
- }
- }
- stack.setVar(THREAD_COUNT, new ThreadCount(maxThreadCount, selfClose, i, buildRefs(stack)));
- }
-
- private ThreadCount getThreadCount(VariableStack stack)
- throws VariableNotFoundException {
- return (ThreadCount) stack.getVar(THREAD_COUNT);
- }
-
@Override
public String getTextualName() {
return "foreach";
}
-
- private static class ThreadCount implements FutureIterator {
- public boolean selfClose;
- private int maxThreadCount;
- private int crt;
- private boolean closed;
- private List<ListenerStackPair> listeners;
- private KarajanIterator i;
- private final List<RefCount> rc;
-
- public ThreadCount(int maxThreadCount, boolean selfClose, KarajanIterator i, List<RefCount> rc) {
- this.maxThreadCount = maxThreadCount;
- this.i = i;
- crt = 0;
- this.selfClose = selfClose;
- this.rc = rc;
- }
-
- public boolean raiseWaiting() {
- return false;
- }
-
- public boolean iteratorHasValues() {
- try {
- return i.hasNext();
- }
- catch (FutureFault e) {
- return false;
- }
- }
-
- public synchronized int available() {
- return maxThreadCount - crt;
- }
-
- public synchronized void add(int count) {
- crt += count;
- }
-
- public synchronized Object tryIncrement() {
- // there is no way that both crt == 0 and i has no values outside this critical section
- if (crt < maxThreadCount) {
- Object o = i.next();
- crt++;
- return o;
- }
- else {
- throw new FutureIteratorIncomplete(this, this);
- }
- }
-
- public synchronized int decrement() {
- crt--;
- notifyListeners();
- return crt;
- }
-
- private void notifyListeners() {
- if (listeners != null) {
- Iterator<ListenerStackPair> i = listeners.iterator();
- listeners = null;
- while (i.hasNext()) {
- ListenerStackPair etp = i.next();
- i.remove();
- etp.listener.futureModified(this, etp.stack);
- }
- }
- }
-
- public boolean hasAvailable() {
- return false;
- }
-
- public int count() {
- return 0;
- }
-
- public synchronized int current() {
- return crt;
- }
-
- public Object peek() {
- return null;
- }
-
- public boolean hasNext() {
- return false;
- }
-
- public Object next() {
- return null;
- }
-
- public void remove() {
- }
-
- public synchronized void addModificationAction(FutureListener target,
- VariableStack stack) {
- if (listeners == null) {
- listeners = new ArrayList<ListenerStackPair>();
- }
- listeners.add(new ListenerStackPair(target, stack));
- if (crt < maxThreadCount) {
- notifyListeners();
- }
- }
-
- public synchronized void close() {
- this.closed = true;
- }
-
- public void fail(FutureEvaluationException e) {
- }
-
- public Object getValue() {
- return null;
- }
-
- public synchronized boolean isClosed() {
- return closed;
- }
- }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/Tracer.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Tracer.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Tracer.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -13,13 +13,12 @@
import java.util.HashMap;
import java.util.Map;
+import k.rt.Future;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.stack.VariableNotFoundException;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.ThreadingContext;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.nodes.FlowElement;
-import org.globus.cog.karajan.workflow.nodes.FlowNode;
+import org.globus.cog.karajan.analyzer.VariableNotFoundException;
+import org.globus.cog.karajan.compiled.nodes.Node;
import org.griphyn.vdl.engine.Karajan;
import org.griphyn.vdl.karajan.FutureWrapper;
import org.griphyn.vdl.mapping.AbstractDataNode;
@@ -56,7 +55,7 @@
this.enabled = enabled;
}
- private Tracer(FlowNode fe, String name) {
+ private Tracer(Node fe, String name) {
source = buildSource(fe, name);
if (source == null) {
enabled = false;
@@ -76,11 +75,11 @@
enabled = true;
}
- private Tracer(FlowNode fe) {
+ private Tracer(Node fe) {
this(fe, null);
}
- private String buildSource(FlowNode fe, String name) {
+ private String buildSource(Node fe, String name) {
String line = findLine(fe);
if (line == null) {
return null;
@@ -99,7 +98,7 @@
return sb.toString();
}
- private String getType(FlowNode fe) {
+ private String getType(Node fe) {
String t = Karajan.demangle(fe.getTextualName());
String nt = NAME_MAPPINGS.get(t);
if (nt == null) {
@@ -110,31 +109,16 @@
}
}
- private String findLine(FlowElement fe) {
- String line;
- if (fe.hasProperty("_traceline")) {
- line = (String) fe.getProperty("_traceline");
- }
- else if (fe.hasProperty("_defline")) {
- line = (String) fe.getProperty("_defline");
- }
- else {
- line = null;
- }
- if (line == null || line.equals("-1") || line.equals("")) {
- return null;
- }
- else {
- return line;
- }
+ private String findLine(Node fe) {
+ return String.valueOf(fe.getLine());
}
public boolean isEnabled() {
return enabled;
}
- public void trace(VariableStack stack, Object msg) throws VariableNotFoundException {
- trace(threadName(stack), msg);
+ public void trace(LWThread thr, Object msg) throws VariableNotFoundException {
+ trace(threadName(thr), msg);
}
public void trace(String thread, Object msg) {
@@ -158,7 +142,7 @@
logger.info(str);
}
- public void trace(String thread) {
+ public void trace(LWThread thread) {
logger.info(source + ", thread " + threadName(thread));
}
@@ -171,17 +155,17 @@
}
}
- private String threadName(VariableStack stack) throws VariableNotFoundException {
- return threadName(ThreadingContext.get(stack).toString());
+ private String threadName(LWThread thr) throws VariableNotFoundException {
+ return thr.getName();
}
private static Tracer disabledTracer, enabledTracer;
- public static Tracer getTracer(FlowNode fe) {
+ public static Tracer getTracer(Node fe) {
return getTracer(fe, null);
}
- public static Tracer getTracer(FlowNode fe, String name) {
+ public static Tracer getTracer(Node fe, String name) {
if (globalTracingEnabled) {
return new Tracer(fe, name);
}
Deleted: branches/faster/src/org/griphyn/vdl/karajan/lib/Typecheck.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/Typecheck.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/Typecheck.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,69 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/*
- * Created on Dec 26, 2006
- */
-package org.griphyn.vdl.karajan.lib;
-
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.mapping.DSHandle;
-import org.griphyn.vdl.type.NoSuchTypeException;
-import org.griphyn.vdl.type.Type;
-import org.griphyn.vdl.type.Types;
-
-public class Typecheck extends VDLFunction {
- public static final Arg PA_TYPE = new Arg.Positional("type");
- public static final Arg OA_ARGNAME = new Arg.Optional("argname");
-
- static {
- setArguments(Typecheck.class, new Arg[] { PA_VAR, PA_TYPE, OA_ARGNAME });
- }
-
- public Object function(VariableStack stack) throws ExecutionException {
- String type = TypeUtil.toString(PA_TYPE.getValue(stack));
- Object ovar = PA_VAR.getValue(stack);
- if (!(ovar instanceof DSHandle)) {
- throw new ExecutionException("Wrong java type for argument. "
- + "Expected DSHandle containing " + type
- + "; got java object of class " + ovar.getClass() + " with value " + ovar);
- }
- DSHandle var = (DSHandle) ovar;
- String argname = TypeUtil.toString(OA_ARGNAME.getValue(stack, null));
-
- try {
- Type t = Types.getType(type);
- if (!compatible(t, var.getType())) {
- if (argname != null) {
- throw new ExecutionException("Wrong type for argument '" + argname + "'. Expected "
- + type + "; got " + var.getType() + ". Actual argument: " + var);
- }
- else {
- throw new ExecutionException("Wrong type for argument. Expected " + type + "; got "
- + var.getType() + ". Actual argument: " + var);
- }
- }
- }
- catch (NoSuchTypeException e) {
- throw new ExecutionException(e);
- }
- return null;
- }
-}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/UnitEnd.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/UnitEnd.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/UnitEnd.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -9,37 +9,32 @@
*/
package org.griphyn.vdl.karajan.lib;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.ThreadingContext;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.FlowNode;
+import k.rt.ExecutionException;
+import k.thr.LWThread;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
import org.griphyn.vdl.karajan.WaitingThreadsMonitor;
-public class UnitEnd extends FlowNode {
- public static final Arg.Positional TYPE = new Arg.Positional("type");
- public static final Arg.Optional NAME = new Arg.Optional("name", null);
- public static final Arg.Optional LINE = new Arg.Optional("line", null);
+public class UnitEnd extends InternalFunction {
- @Override
- public void execute(VariableStack stack) throws ExecutionException {
- executeSimple(stack);
- complete(stack);
- }
+ private ArgRef<String> type;
+ private ArgRef<String> name;
+ private ArgRef<String> line;
@Override
- public boolean isSimple() {
- return super.isSimple();
+ protected Signature getSignature() {
+ return new Signature(params("type", optional("name", null), optional("line", null)));
}
@Override
- public void executeSimple(VariableStack stack) throws ExecutionException {
- String type = (String) TYPE.getStatic(this);
- ThreadingContext thread = ThreadingContext.get(stack);
- String name = (String) NAME.getStatic(this);
- String line = (String) LINE.getStatic(this);
+ public void run(LWThread thr) throws ExecutionException {
+ String type = this.type.getValue();
+ String name = this.name.getValue();
+ String line = this.line.getValue();
- UnitStart.log(false, type, thread, name, line);
- WaitingThreadsMonitor.removeOutput(stack);
+ UnitStart.log(false, type, thr, name, line);
+ WaitingThreadsMonitor.removeOutput(thr);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/UnitStart.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/UnitStart.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/UnitStart.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -10,163 +10,182 @@
package org.griphyn.vdl.karajan.lib;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collections;
+import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
+import java.util.Set;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableNotFoundException;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.ThreadingContext;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.FlowNode;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.CompilationException;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
+import org.globus.cog.karajan.parser.WrapperNode;
import org.griphyn.vdl.engine.Karajan;
import org.griphyn.vdl.karajan.WaitingThreadsMonitor;
import org.griphyn.vdl.mapping.DSHandle;
-public class UnitStart extends FlowNode {
+public class UnitStart extends InternalFunction {
public static final Logger uslogger = Logger.getLogger(UnitStart.class);
// keep compatibility with log()
public static final Logger logger = Logger.getLogger("swift");
- public static final Arg.Positional TYPE = new Arg.Positional("type");
- public static final Arg.Optional NAME = new Arg.Optional("name", null);
- public static final Arg.Optional LINE = new Arg.Optional("line", null);
- public static final Arg.Optional OUTPUTS = new Arg.Optional("outputs", null);
+ private ArgRef<String> type;
+ private ArgRef<String> name;
+ private ArgRef<String> line;
+ private ArgRef<String> arguments;
+ private ArgRef<String> outputs;
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("type", optional("name", null), optional("line", null),
+ optional("outputs", null), optional("arguments", null)));
+ }
+
+ private static class NamedRef {
+ public final String name;
+ public final VarRef<DSHandle> ref;
+
+ public NamedRef(String name, VarRef<DSHandle> ref) {
+ this.name = name;
+ this.ref = ref;
+ }
+ }
+
private Tracer tracer;
- private List<String> inputArgs, outputArgs;
-
+ private List<NamedRef> inputArgs, outputArgs;
+
@Override
- protected void initializeStatic() {
- super.initializeStatic();
- String type = (String) TYPE.getStatic(this);
+ public Node compile(WrapperNode w, Scope scope) throws CompilationException {
+ Node fn = super.compile(w, scope);
+ String type = this.type.getValue();
if (type.equals("PROCEDURE")) {
- tracer = Tracer.getTracer((FlowNode) getParent(), "APPCALL");
+ tracer = Tracer.getTracer(line.getValue(), "APPCALL");
}
else if (type.equals("COMPOUND")) {
- tracer = Tracer.getTracer((FlowNode) getParent(), "CALL");
+ tracer = Tracer.getTracer(line.getValue(), "CALL");
}
if (tracer != null && tracer.isEnabled()) {
- populateArgNames();
+ populateArgNames(scope);
}
+ return fn;
}
- private void populateArgNames() {
- String outs = (String) getStaticArguments().get("outputs");
+ private void populateArgNames(Scope scope) {
+ String outs = this.outputs.getValue();
+ Set<String> outNames = new HashSet<String>();
if (outs != null && outs.length() > 0) {
- outputArgs = Arrays.asList(outs.split(","));
+ outputArgs = new ArrayList<NamedRef>();
+ for (String name : outs.split(",")) {
+ VarRef<DSHandle> ref = scope.getVarRef(name);
+ outputArgs.add(new NamedRef(name, ref));
+ outNames.add(name);
+ }
}
else {
- outputArgs = Collections.emptyList();
+ outputArgs = null;
}
- String args = (String) getParent().getStaticArguments().get("arguments");
+ String args = this.arguments.getValue();
if (args != null && args.length() > 0) {
- inputArgs = new ArrayList<String>(Arrays.asList(args.split(",")));
- inputArgs.removeAll(outputArgs);
+ inputArgs = new ArrayList<NamedRef>();
+ for (String name : args.split(",")) {
+ if (outNames.contains(name)) {
+ continue;
+ }
+ VarRef<DSHandle> ref = scope.getVarRef(name);
+ inputArgs.add(new NamedRef(name, ref));
+ }
}
else {
- inputArgs = Collections.emptyList();
+ inputArgs = null;
}
- }
-
- @Override
- public void execute(VariableStack stack) throws ExecutionException {
- executeSimple(stack);
- complete(stack);
- }
+ }
- @Override
- public boolean isSimple() {
- return super.isSimple();
- }
@Override
- public void executeSimple(VariableStack stack) throws ExecutionException {
- String type = (String) TYPE.getStatic(this);
- ThreadingContext thread = ThreadingContext.get(stack);
- String name = (String) NAME.getStatic(this);
- String line = (String) LINE.getStatic(this);
+ protected void runBody(LWThread thr) {
+ String type = this.type.getValue();
+ String name = this.name.getValue();
+ String line = this.line.getValue();
if (tracer != null && tracer.isEnabled()) {
- tracer.trace(thread.toString(), Karajan.demangle(name) + "(" + formatArguments(stack) + ")");
+ tracer.trace(thr, Karajan.demangle(name) + "(" + formatArguments(thr.getStack()) + ")");
}
- log(true, type, thread, name, line);
+ log(true, type, thr, name, line);
- String outputs = (String) OUTPUTS.getStatic(this);
- if (outputs != null) {
- trackOutputs(stack, outputs, "SCOPE".equals(type));
+ if (outputArgs != null) {
+ trackOutputs(thr);
}
}
- private String formatArguments(VariableStack stack) {
- StringBuilder sb = new StringBuilder();
- boolean first = true;
- for (String name : inputArgs) {
- if (first) {
- first = false;
+ private String formatArguments(Stack stack) {
+ if (inputArgs != null) {
+ StringBuilder sb = new StringBuilder();
+ boolean first = true;
+ for (NamedRef nr : inputArgs) {
+ if (first) {
+ first = false;
+ }
+ else {
+ sb.append(", ");
+ }
+ sb.append(Karajan.demangle(nr.name));
+ sb.append(" = ");
+ sb.append(Tracer.unwrapHandle(nr.ref.getValue(stack)));
}
- else {
- sb.append(", ");
- }
- sb.append(Karajan.demangle(name));
- sb.append(" = ");
- sb.append(Tracer.unwrapHandle(stack.parentFrame().getVar(name)));
+ return sb.toString();
}
- return sb.toString();
+ else {
+ return "";
+ }
}
private static final List<DSHandle> EMPTY_OUTPUTS = Collections.emptyList();
- private void trackOutputs(VariableStack stack, String outputs, boolean deep) {
- if (outputs.length() != 0) {
- String[] names = outputs.split(",");
+ private void trackOutputs(LWThread thr) {
+ Stack stack = thr.getStack();
+ if (!outputArgs.isEmpty()) {
List<DSHandle> l = new LinkedList<DSHandle>();
- for (String name : names) {
- if (deep) {
- try {
- l.add((DSHandle) stack.getVar(name.toLowerCase()));
- }
- catch (VariableNotFoundException e) {
- logger.info("Could not find variable " + name, e);
- }
- }
- else {
- l.add((DSHandle) stack.parentFrame().getVar(name));
- }
+ for (NamedRef nr : outputArgs) {
+ l.add(nr.ref.getValue(stack));
}
- WaitingThreadsMonitor.addOutput(stack, l);
+ WaitingThreadsMonitor.addOutput(thr, l);
}
}
- protected static void log(boolean start, String type, ThreadingContext thread, String name, String line) {
+ protected static void log(boolean start, String type, LWThread thread, String name, String line) {
if (type.equals("COMPOUND")) {
- logger.info((start ? "START" : "END") + type + " thread=" + thread + " name=" + name);
+ logger.info((start ? "START" : "END") + type + " thread=" + thread.getName() + " name=" + name);
}
else if (type.equals("PROCEDURE")) {
if (start) {
- logger.debug("PROCEDURE line=" + line + " thread=" + thread + " name=" + name);
+ logger.debug("PROCEDURE line=" + line + " thread=" + thread.getName() + " name=" + name);
}
else {
- logger.debug("PROCEDURE_END line=" + line + " thread=" + thread + " name=" + name);
+ logger.debug("PROCEDURE_END line=" + line + " thread=" + thread.getName() + " name=" + name);
}
}
else if (type.equals("FOREACH_IT")) {
- logger.debug("FOREACH_IT_" + (start ? "START" : "END") + " line=" + line + " thread=" + thread);
+ logger.debug("FOREACH_IT_" + (start ? "START" : "END") + " line=" + line + " thread=" + thread.getName());
if (start) {
- logger.debug("SCOPE thread=" + thread);
+ logger.debug("SCOPE thread=" + thread.getName());
}
}
else if (type.equals("INTERNALPROC")) {
- logger.debug("INTERNALPROC_" + (start ? "START" : "END") + " thread=" + thread + " name=" + name);
+ logger.debug("INTERNALPROC_" + (start ? "START" : "END") + " thread=" + thread.getName() + " name=" + name);
}
else if (type.equals("CONDITION_BLOCK")) {
if (start) {
- logger.debug("SCOPE thread=" + thread);
+ logger.debug("SCOPE thread=" + thread.getName());
}
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/UnwrapClosedList.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/UnwrapClosedList.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/UnwrapClosedList.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -24,24 +24,26 @@
import java.util.List;
import java.util.Map;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.DSHandle;
-public class UnwrapClosedList extends VDLFunction {
+public class UnwrapClosedList extends SwiftFunction {
public static final Logger logger = Logger.getLogger(UnwrapClosedList.class);
- public static final Arg.Positional PA_LIST = new Arg.Positional("list");
+ private ArgRef<List<DSHandle>> list;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("list"));
+ }
- static {
- setArguments(UnwrapClosedList.class, new Arg[] { PA_LIST });
- }
-
- public Object function(VariableStack stack) throws ExecutionException {
- @SuppressWarnings("unchecked")
- List<DSHandle> l = (List<DSHandle>) PA_LIST.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ List<DSHandle> l = this.list.getValue(stack);
List<Object> r = new ArrayList<Object>(l.size());
Deleted: branches/faster/src/org/griphyn/vdl/karajan/lib/VDLFunction.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/VDLFunction.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/VDLFunction.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -1,475 +0,0 @@
-/*
- * Copyright 2012 University of Chicago
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.griphyn.vdl.karajan.lib;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Stack;
-
-import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.arguments.ArgUtil;
-import org.globus.cog.karajan.arguments.VariableArguments;
-import org.globus.cog.karajan.stack.StackFrame;
-import org.globus.cog.karajan.stack.VariableNotFoundException;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.BoundContact;
-import org.globus.cog.karajan.util.ThreadingContext;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.KarajanRuntimeException;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.nodes.SequentialWithArguments;
-import org.globus.cog.karajan.workflow.nodes.restartLog.RestartLog;
-import org.globus.swift.catalog.TCEntry;
-import org.globus.swift.catalog.transformation.File;
-import org.globus.swift.catalog.types.TCType;
-import org.griphyn.vdl.karajan.AssertFailedException;
-import org.griphyn.vdl.karajan.FutureWrapper;
-import org.griphyn.vdl.karajan.Loader;
-import org.griphyn.vdl.karajan.TCCache;
-import org.griphyn.vdl.karajan.functions.ConfigProperty;
-import org.griphyn.vdl.mapping.AbsFile;
-import org.griphyn.vdl.mapping.AbstractDataNode;
-import org.griphyn.vdl.mapping.DSHandle;
-import org.griphyn.vdl.mapping.DependentException;
-import org.griphyn.vdl.mapping.GeneralizedFileFormat;
-import org.griphyn.vdl.mapping.HandleOpenException;
-import org.griphyn.vdl.mapping.InvalidPathException;
-import org.griphyn.vdl.mapping.Mapper;
-import org.griphyn.vdl.mapping.Path;
-import org.griphyn.vdl.mapping.PathComparator;
-import org.griphyn.vdl.mapping.PhysicalFormat;
-import org.griphyn.vdl.type.Type;
-import org.griphyn.vdl.type.Types;
-import org.griphyn.vdl.util.FQN;
-import org.griphyn.vdl.util.VDL2ConfigProperties;
-
-public abstract class VDLFunction extends SequentialWithArguments {
- public static final Logger logger = Logger.getLogger(VDLFunction.class);
-
- public static final Arg.Channel ERRORS = new Arg.Channel("errors");
-
- public static final Arg OA_PATH = new Arg.Optional("path", "");
- public static final Arg PA_PATH = new Arg.Positional("path");
- public static final Arg PA_VAR = new Arg.Positional("var");
- public static final Arg OA_ISARRAY = new Arg.Optional("isArray", Boolean.FALSE);
-
- public final void post(VariableStack stack) throws ExecutionException {
- try {
- Object o = function(stack);
- if (o != null) {
- ret(stack, o);
- }
- super.post(stack);
- }
- catch (AssertFailedException e) {
- logger.fatal("swift: assert failed: " + e.getMessage());
- stack.getExecutionContext().failedQuietly(stack, e);
- }
- catch (ExecutionException e) {
- if (e.getStack() == null) {
- e.setStack(stack);
- }
- throw e;
- }
- catch (DependentException e) {
- // This would not be the primal fault so in non-lazy errors mode it
- // should not matter
- throw new ExecutionException(stack, e);
- }
- }
-
- protected void ret(VariableStack stack, final Object value) throws ExecutionException {
- if (value != null) {
- final VariableArguments vret = ArgUtil.getVariableReturn(stack);
- if (value.getClass().isArray()) {
- if (value.getClass().getComponentType().isPrimitive()) {
- vret.append(value);
- }
- else {
- Object[] array = (Object[]) value;
- for (int i = 0; i < array.length; i++) {
- vret.append(array[i]);
- }
- }
- }
- else {
- vret.append(value);
- }
- }
- }
-
- protected abstract Object function(VariableStack stack) throws ExecutionException;
-
- /*
- * This will likely break if the engine changes in fundamental ways. It also
- * depends on the fact that iteration variable is named '$' in this
- * particular implementation.
- */
- public static String getThreadPrefix(VariableStack stack) throws ExecutionException {
- stack = stack.copy();
- ThreadingContext last = ThreadingContext.get(stack);
- Stack<Object> s = new Stack<Object>();
- while (stack.frameCount() > 1) {
- StackFrame frame = stack.currentFrame();
- if (frame.isDefined("$")) {
- List<?> itv = (List<?>) frame.getVar("$");
- s.push(itv.get(0));
- stack.leave();
- last = ThreadingContext.get(stack);
- }
- else {
- ThreadingContext tc = ThreadingContext.get(stack);
- if (!last.equals(tc)) {
- s.push(String.valueOf(last.getLastID()));
- last = tc;
- }
- stack.leave();
- }
- }
-
- StringBuffer sb = new StringBuffer();
- while (!s.isEmpty()) {
- sb.append(s.pop());
- if (!s.isEmpty()) {
- sb.append('-');
- }
- }
- return sb.toString();
- }
-
- // TODO - is this needed any more? its doing some type inferencing and
- // object creation and dequoting of strings, but the necessary behaviour
- // here has possibly moved elsewhere, into a more strongly typed
- // intermediate
- // XML form that removes the need for this inference.
-
- // we might need to do some casting here for the numerical stuff - eg when
- // asking for a float but we're given an int? not sure? might be the case
- // that we already have value in the Double form already, in which case
- // deference the internal value?
-
- // this is only used by VDL new (and really should only be used by
- // VDL new, and should perhaps move to the VDL new source?)
-
- protected Object internalValue(Type type, Object value) {
- if (Types.FLOAT.equals(type)) {
- return new Double(TypeUtil.toDouble(value));
- }
- else if (Types.INT.equals(type)) {
- return new Integer(TypeUtil.toInt(value));
- }
- else if (Types.BOOLEAN.equals(type)) {
- return new Boolean(TypeUtil.toBoolean(value));
- }
- else {
- return value;
- }
- }
-
- public static final String[] EMPTY_STRING_ARRAY = new String[0];
-
- public static String[] filename(VariableStack stack) throws ExecutionException {
- DSHandle handle = (DSHandle)PA_VAR.getValue(stack);
- return filename(stack, handle);
- }
-
- public static String[] filename(VariableStack stack, DSHandle handle) throws ExecutionException {
- return filename(handle);
- }
-
- public static String[] filename(DSHandle var) throws ExecutionException {
- try {
- if (var.getType().isArray()) {
- return leavesFileNames(var);
- }
- else if(var.getType().getFields().size() > 0) {
- return leavesFileNames(var);
- }
- else {
- return new String[] { leafFileName(var) };
- }
- }
- catch (DependentException e) {
- return new String[0];
- }
- catch (HandleOpenException e) {
- throw new ExecutionException("The current implementation should not throw this exception", e);
- }
- }
-
- private static String[] leavesFileNames(DSHandle var) throws ExecutionException, HandleOpenException {
- Mapper mapper;
-
- synchronized (var.getRoot()) {
- mapper = var.getMapper();
- }
-
- if (mapper == null) {
- throw new ExecutionException(var.getType() + " is not a mapped type");
- }
-
- List<String> l = new ArrayList<String>();
- try {
- Collection<Path> fp = var.getFringePaths();
- List<Path> src;
- if (fp instanceof List) {
- src = (List<Path>) fp;
- }
- else {
- src = new ArrayList<Path>(fp);
- }
- Collections.sort(src, new PathComparator());
-
- for (Path p : src) {
- l.add(leafFileName(var.getField(p), mapper));
- }
- }
- catch (InvalidPathException e) {
- throw new ExecutionException("DSHandle is lying about its fringe paths");
- }
- return l.toArray(EMPTY_STRING_ARRAY);
- }
-
- private static String leafFileName(DSHandle var) throws ExecutionException {
- return leafFileName(var, var.getMapper());
- }
-
- private static String leafFileName(DSHandle var, Mapper mapper) throws ExecutionException {
- if (Types.STRING.equals(var.getType())) {
- return relativize(String.valueOf(var.getValue()));
- }
- else {
- if (var.getMapper() == null) {
- throw new ExecutionException("Cannot invoke filename() on data without a mapper: " + var);
- }
- PhysicalFormat f = var.getMapper().map(var.getPathFromRoot());
- if (f instanceof GeneralizedFileFormat) {
- String filename = ((GeneralizedFileFormat) f).getURIAsString();
- if (filename == null) {
- throw new ExecutionException("Mapper did not provide a file name");
- }
- else {
- return filename;
- }
- }
- else if (f == null) {
- throw new ExecutionException("Mapper failed to map " + var);
- }
- else {
- throw new ExecutionException("Only file formats are supported for now");
- }
- }
- }
-
- protected Object pathOnly(Object f) {
- if (f instanceof String[]) {
- return pathOnly((String[]) f);
- }
- else {
- return pathOnly((String) f);
- }
- }
-
- protected static String pathOnly(String file) {
- AbsFile af = new AbsFile(file);
- if ("file".equals(af.getProtocol())) {
- return af.getPath();
- }
- else {
- return af.getHost() + "/" + af.getPath();
- }
- }
-
- protected String[] pathOnly(String[] files) {
- String[] p = new String[files.length];
- for (int i = 0; i < files.length; i++) {
- p[i] = pathOnly(files[i]);
- }
- return p;
- }
-
- /**
- * Given an input of an array of strings, returns a single string with the
- * input strings separated by a space. If the 'relative' flag is set to
- * true, then each input string will be passed through the relativize
- * function.
- */
- public String argList(String[] s, boolean relative) {
- StringBuffer sb = new StringBuffer();
- for (int i = 0; i < s.length; i++) {
- if (relative) {
- s[i] = relativize(s[i]);
- }
- sb.append(s[i]);
- if (i < s.length - 1) {
- sb.append(' ');
- }
- }
- return sb.toString();
- }
-
- /**
- * removes leading / character from a supplied filename if present, so that
- * the path can be used as a relative path.
- */
- public static String relativize(String name) {
- name = pathOnly(name);
- if (name != null && name.length() > 0 && name.charAt(0) == '/') {
- return name.substring(1);
- }
- else {
- return name;
- }
- }
-
- protected static Map getLogData(VariableStack stack) throws ExecutionException {
- try {
- return (Map) stack.getDeepVar(RestartLog.LOG_DATA);
- }
- catch (VariableNotFoundException e) {
- throw new ExecutionException("No log data found. Missing restartLog()?");
- }
- }
-
- protected boolean compatible(Type expectedType, Type actualType) {
- if (expectedType.equals(Types.FLOAT)) {
- if (actualType.equals(Types.FLOAT) || actualType.equals(Types.INT)) {
- return true;
- }
- else {
- return false;
- }
- }
- else if (expectedType.equals(Types.FLOAT.arrayType())) {
- if (actualType.equals(Types.FLOAT.arrayType())
- || actualType.equals(Types.INT.arrayType())) {
- return true;
- }
- else {
- return false;
- }
- }
- else if (expectedType.equals(Types.ANY)) {
- return true;
- }
- else {
- return actualType.equals(expectedType);
- }
- }
-
- protected void closeChildren(VariableStack stack, AbstractDataNode handle) throws ExecutionException,
- InvalidPathException {
- // Close the future
- handle.closeShallow();
- // Mark all leaves
- for (DSHandle child : handle.getFields(Path.CHILDREN)) {
- child.closeShallow();
- }
- }
-
- public static AbstractDataNode[] waitForAllVargs(VariableStack stack) throws ExecutionException {
- AbstractDataNode[] args = SwiftArg.VARGS.asDataNodeArray(stack);
-
- for (int i = 0; i < args.length; i++) {
- args[i].waitFor();
- }
-
- return args;
- }
-
- public static Path parsePath(Object o, VariableStack stack) throws ExecutionException {
- if (o instanceof Path) {
- return (Path) o;
- }
- else {
- return Path.parse(TypeUtil.toString(o));
- }
- }
-
- private static Set warnset = new HashSet();
-
- protected TCEntry getTCE(TCCache tc, FQN fqn, BoundContact bc) {
- List l;
- try {
- l = tc.getTCEntries(fqn, bc.getHost(), TCType.INSTALLED);
- }
- catch (Exception e) {
- throw new KarajanRuntimeException(e);
- }
- if (l == null || l.isEmpty()) {
- return null;
- }
- if (l.size() > 1) {
- synchronized (warnset) {
- LinkedList wl = new LinkedList();
- wl.add(fqn);
- wl.add(bc);
- if (!warnset.contains(wl)) {
- logger.warn("Multiple entries found for " + fqn + " on " + bc
- + ". Using the first one");
- warnset.add(wl);
- }
- }
- }
- return (TCEntry) l.get(0);
- }
-
- public static final String TC = "vdl:TC";
-
- public static TCCache getTC(VariableStack stack) throws ExecutionException {
- synchronized (stack.firstFrame()) {
- TCCache tc = (TCCache) stack.firstFrame().getVar(TC);
- if (tc == null) {
- String prop = ConfigProperty.getProperty(VDL2ConfigProperties.TC_FILE, stack);
- Loader.debugText("TC", new java.io.File(prop));
- tc = new TCCache(File.getNonSingletonInstance(prop));
- stack.firstFrame().setVar(TC, tc);
- }
- return tc;
- }
- }
-
-
- private static int provenanceIDCount = 451000;
-
- public static synchronized int nextProvenanceID() {
- return provenanceIDCount++;
- }
-
- public static void logProvenanceResult(int id, DSHandle result,
- String name)
- throws ExecutionException {
- if (logger.isDebugEnabled())
- logger.debug("FUNCTION id="+id+" name="+name+" result="+result.getIdentifier());
- else if (logger.isInfoEnabled())
- logger.info("FUNCTION: " + name + "()");
- }
-
- public static void logProvenanceParameter(int id, DSHandle parameter, String paramName) throws ExecutionException {
- if (logger.isDebugEnabled())
- logger.debug("FUNCTIONPARAMETER id="+id+" input="+parameter.getIdentifier()+" name="+paramName);
- }
-}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/WaitFieldValue.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/WaitFieldValue.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/WaitFieldValue.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,35 +17,40 @@
package org.griphyn.vdl.karajan.lib;
-import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.Path;
-public class WaitFieldValue extends VDLFunction {
- public static final Logger logger = Logger.getLogger(WaitFieldValue.class);
+public class WaitFieldValue extends SwiftFunction {
+ private ArgRef<AbstractDataNode> var;
+ private ArgRef<Object> path;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var", optional("path", Path.EMPTY_PATH)));
+ }
- static {
- setArguments(WaitFieldValue.class, new Arg[] { PA_VAR, OA_PATH });
- }
/**
* Takes a supplied variable and path, and returns the unique value at that
* path. Path can contain wildcards, in which case an array is returned.
*/
- public Object function(VariableStack stack) throws ExecutionException {
- AbstractDataNode var = (AbstractDataNode) PA_VAR.getValue(stack);
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode var = this.var.getValue(stack);
try {
- Path path = parsePath(OA_PATH.getValue(stack), stack);
+ Path path = parsePath(this.path.getValue(stack));
var = (AbstractDataNode) var.getField(path);
- var.waitFor();
+ var.waitFor(this);
return null;
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/cache/CacheReturn.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/cache/CacheReturn.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/cache/CacheReturn.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -24,10 +24,10 @@
public class CacheReturn {
public final boolean alreadyCached;
- public final List remove;
+ public final List<?> remove;
public final File cached;
- public CacheReturn(boolean alreadyCached, List remove, File cached) {
+ public CacheReturn(boolean alreadyCached, List<?> remove, File cached) {
this.alreadyCached = alreadyCached;
this.remove = remove;
this.cached = cached;
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/cache/File.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/cache/File.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/cache/File.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,25 +20,17 @@
*/
package org.griphyn.vdl.karajan.lib.cache;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
+import k.rt.AbstractFuture;
+import k.rt.Future;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.events.EventBus;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.futures.FutureEvaluationException;
-import org.globus.cog.karajan.workflow.futures.FutureListener;
-import org.globus.cog.karajan.workflow.futures.ListenerStackPair;
+import org.globus.cog.karajan.futures.FutureEvaluationException;
-public class File implements Future {
+public class File extends AbstractFuture implements Future {
private String path;
private Object host;
private long size, lastAccess;
private int locked;
private boolean processingLock;
- private List<ListenerStackPair> listeners;
public File(String file, String dir, Object host, long size) {
if (dir.endsWith("/")) {
@@ -166,31 +158,6 @@
return processingLock;
}
- public void notifyListeners() {
- if (listeners != null) {
- Iterator<ListenerStackPair> i = listeners.iterator();
- while (i.hasNext()) {
- final ListenerStackPair etp = i.next();
- i.remove();
- EventBus.post(new Runnable() {
- public void run() {
- etp.listener.futureModified(File.this, etp.stack);
- }
- });
- }
- }
- }
-
- public synchronized void addModificationAction(FutureListener target, VariableStack stack) {
- if (listeners == null) {
- listeners = new LinkedList<ListenerStackPair>();
- }
- listeners.add(new ListenerStackPair(target, stack));
- if (isClosed()) {
- notifyListeners();
- }
- }
-
public void close() {
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Assert.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Assert.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Assert.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -18,11 +18,13 @@
package org.griphyn.vdl.karajan.lib.swiftscript;
// import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
import org.griphyn.vdl.karajan.AssertFailedException;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.type.Types;
@@ -31,36 +33,29 @@
Throw AssertionException if input is false or 0.
Optional second argument is string message printed on failure.
*/
-public class Assert extends VDLFunction {
-
- // private static final Logger logger =
- // Logger.getLogger(Assert.class);
+public class Assert extends SwiftFunction {
+ private ArgRef<AbstractDataNode> value;
+ private ArgRef<AbstractDataNode> message;
- static {
- setArguments(Assert.class, new Arg[] { Arg.VARGS });
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("value", "message"));
}
-
+
@Override
- protected Object function(VariableStack stack)
- throws ExecutionException {
- AbstractDataNode[] args = waitForAllVargs(stack);
- String message = "";
+ public Object function(Stack stack) {
+ AbstractDataNode hmessage = this.message.getValue(stack);
+ hmessage.waitFor(this);
+ String message = (String) hmessage.getValue();
+ AbstractDataNode hvalue = this.value.getValue(stack);
+ hvalue.waitFor(this);
+
+ checkAssert(hvalue, message);
- if (args.length == 2)
- if (args[1].getType() == Types.STRING)
- message = (String) args[1].getValue();
- else
- throw new ExecutionException
- ("Second argument to assert() must be a String!");
-
- checkAssert(args[0], message);
-
return null;
}
- private void checkAssert(DSHandle value, String message)
- throws ExecutionException
- {
+ private void checkAssert(DSHandle value, String message) {
boolean success = true;
if (value.getType() == Types.BOOLEAN) {
if (! (Boolean) value.getValue())
@@ -71,10 +66,11 @@
if (d == 0)
success = false;
}
- else
- throw new ExecutionException
- ("First argument to assert() must be boolean or int!");
- if (! success)
+ else {
+ throw new ExecutionException(this, "First argument to assert() must be boolean or int!");
+ }
+ if (!success) {
throw new AssertFailedException(message);
+ }
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ExtractInt.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ExtractInt.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ExtractInt.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,26 +22,31 @@
import java.io.IOException;
import java.io.Reader;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.RootDataNode;
import org.griphyn.vdl.type.Types;
-public class ExtractInt extends VDLFunction {
- static {
- setArguments(ExtractInt.class, new Arg[] { PA_VAR });
- }
+public class ExtractInt extends SwiftFunction {
+ private ArgRef<AbstractDataNode> var;
- public Object function(VariableStack stack) throws ExecutionException {
- AbstractDataNode handle = null;
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode handle = this.var.getValue(stack);
try {
- handle = (AbstractDataNode) PA_VAR.getValue(stack);
- handle.waitFor();
+ handle.waitFor(this);
String fn = argList(filename(handle), true);
Reader freader = new FileReader(fn);
@@ -49,9 +54,9 @@
String str = breader.readLine();
freader.close();
DSHandle result = new RootDataNode(Types.INT, Integer.parseInt(str));
- int provid = VDLFunction.nextProvenanceID();
- VDLFunction.logProvenanceResult(provid, result, "extractint");
- VDLFunction.logProvenanceParameter(provid, handle, "filename");
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceResult(provid, result, "extractint");
+ SwiftFunction.logProvenanceParameter(provid, handle, "filename");
return result;
}
catch (IOException ioe) {
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FileName.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FileName.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FileName.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,25 +17,35 @@
package org.griphyn.vdl.karajan.lib.swiftscript;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
+import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.RootDataNode;
import org.griphyn.vdl.type.Types;
-public class FileName extends VDLFunction {
- static {
- setArguments(FileName.class, new Arg[] { PA_VAR });
- }
+public class FileName extends SwiftFunction {
+ private ArgRef<AbstractDataNode> var;
- public Object function(VariableStack stack) throws ExecutionException {
- String s = argList(filename(stack), true);
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode var = this.var.getValue(stack);
+ String s = argList(filename(var), true);
DSHandle result = new RootDataNode(Types.STRING, s);
- int provid = VDLFunction.nextProvenanceID();
- //VDLFunction.logProvenanceParameter(provid, (DSHandle) PA_VAR.getValue(stack), "input");
- //VDLFunction.logProvenanceResult(provid, result, "filename");
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceParameter(provid, var, "input");
+ SwiftFunction.logProvenanceResult(provid, result, "filename");
+ }
+
return result;
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FileNames.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FileNames.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FileNames.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,28 +20,35 @@
*/
package org.griphyn.vdl.karajan.lib.swiftscript;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
+import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.Path;
-import org.griphyn.vdl.mapping.RootDataNode;
import org.griphyn.vdl.mapping.RootArrayDataNode;
import org.griphyn.vdl.type.Types;
-public class FileNames extends VDLFunction {
- static {
- setArguments(FileNames.class, new Arg[] { PA_VAR });
- }
+public class FileNames extends SwiftFunction {
+ private ArgRef<AbstractDataNode> var;
- public Object function(VariableStack stack) throws ExecutionException {
- String[] f = filename(stack);
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("var"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode var = this.var.getValue(stack);
+ String[] f = filename(var);
DSHandle returnArray = new RootArrayDataNode(Types.STRING.arrayType());
try {
for (int i = 0; i < f.length; i++) {
- Path p = parsePath("["+i+"]", stack);
+ Path p = parsePath("["+i+"]");
DSHandle h = returnArray.getField(p);
h.setValue(relativize(f[i]));
}
@@ -50,9 +57,11 @@
}
returnArray.closeShallow();
- int provid = VDLFunction.nextProvenanceID();
- logProvenanceParameter(provid, (DSHandle) PA_VAR.getValue(stack), "input");
- logProvenanceResult(provid, returnArray, "filenames");
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ logProvenanceParameter(provid, var, "input");
+ logProvenanceResult(provid, returnArray, "filenames");
+ }
return returnArray;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FnArg.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FnArg.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/FnArg.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -21,78 +21,90 @@
package org.griphyn.vdl.karajan.lib.swiftscript;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionContext;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.functions.AbstractFunction;
-import org.griphyn.vdl.karajan.lib.SwiftArg;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Scope;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.analyzer.VarRef;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
+import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.RootDataNode;
import org.griphyn.vdl.type.Types;
-public class FnArg extends AbstractFunction {
- public static final String PARSED_ARGS = "cmdline:named";
+public class FnArg extends SwiftFunction {
+ private ArgRef<AbstractDataNode> name;
+ private ArgRef<AbstractDataNode> value;
+
+ private VarRef<Map<String, String>> parsedArgs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("name", optional("value", null)));
+ }
- public static final SwiftArg P_NAME = new SwiftArg.Positional("name");
- public static final SwiftArg P_VALUE = new SwiftArg.Positional("value");
+ @Override
+ protected void addLocals(Scope scope) {
+ super.addLocals(scope);
+ parsedArgs = scope.getVarRef("SWIFT:PARSED_ARGS");
+ }
- static {
- setArguments(FnArg.class, new Arg[] { P_NAME, P_VALUE });
- }
-
- public Object function(VariableStack stack) throws ExecutionException {
- synchronized (stack.firstFrame()) {
- if (!stack.firstFrame().isDefined(PARSED_ARGS)) {
- List argv = (List) stack.firstFrame().getVar(ExecutionContext.CMDLINE_ARGS);
- Map named = new HashMap();
- Iterator i = argv.iterator();
- while (i.hasNext()) {
- String arg = (String) i.next();
- if (!arg.startsWith("-")) {
- continue;
- }
- int index = arg.indexOf('=');
- if (index == -1 || (arg.charAt(0) != '-')) {
- throw new ExecutionException("Invalid command line argument: " + arg);
- }
- else {
- String name = arg.substring(1, index);
- named.put(name, arg.substring(index + 1));
- }
- }
- stack.firstFrame().setVar(PARSED_ARGS, named);
- }
+ @Override
+ public Object function(Stack stack) {
+ Map<String, String> args = this.parsedArgs.getValue(stack);
+ AbstractDataNode hname = this.name.getValue(stack);
+ hname.waitFor(this);
+ AbstractDataNode hvalue = this.value.getValue(stack);
+ if (hvalue != null) {
+ hvalue.waitFor(this);
}
- Map args = (Map) stack.firstFrame().getVar(PARSED_ARGS);
- String name = TypeUtil.toString(P_NAME.getValue(stack));
+ String name = (String) hname.getValue();
name = name.trim();
if (name.startsWith("\"") && name.endsWith("\"")) {
name = name.substring(1, name.length() - 1);
- }
+ }
Object value = args.get(name);
- if (value == null && P_VALUE.isPresent(stack)) {
- value = P_VALUE.getValue(stack);
+ if (value == null && hvalue != null) {
+ value = hvalue.getValue();
}
if (value == null) {
- throw new ExecutionException(stack, "Missing command line argument: " + name);
+ throw new ExecutionException("Missing command line argument: " + name);
}
else {
- DSHandle result = new RootDataNode(Types.STRING, value);
- int provid=VDLFunction.nextProvenanceID();
- VDLFunction.logProvenanceResult(provid, result, "arg");
- VDLFunction.logProvenanceParameter(provid, P_NAME.getRawValue(stack), "name");
- if(P_VALUE.getRawValue(stack) != null) {
- VDLFunction.logProvenanceParameter(provid, P_VALUE.getRawValue(stack), "value");
+ DSHandle result = new RootDataNode(Types.STRING, value);
+ if (PROVENANCE_ENABLED) {
+ int provid = nextProvenanceID();
+ logProvenanceResult(provid, result, "arg");
+ logProvenanceParameter(provid, hname, "name");
+ if (hvalue != null) {
+ logProvenanceParameter(provid, hvalue, "value");
+ }
}
return result;
}
}
+
+ public static Map<String, String> parseArgs(List<String> argv) {
+ Map<String, String> named = new HashMap<String, String>();
+ for (String arg : argv) {
+ if (!arg.startsWith("-")) {
+ continue;
+ }
+ int index = arg.indexOf('=');
+ if (index == -1 || (arg.charAt(0) != '-')) {
+ throw new ExecutionException("Invalid command line argument: " + arg);
+ }
+ else {
+ String name = arg.substring(1, index);
+ named.put(name, arg.substring(index + 1));
+ }
+ }
+ return named;
+ }
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Fprintf.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Fprintf.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Fprintf.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -21,14 +21,16 @@
import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;
+import k.rt.Channel;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbstractDataNode;
-import org.griphyn.vdl.mapping.DSHandle;
-import org.griphyn.vdl.type.Types;
/**
Formatted file output. <br>
@@ -37,31 +39,38 @@
@see Tracef, Sprintf
@author wozniak
*/
-public class Fprintf extends VDLFunction {
-
- private static final Logger logger =
- Logger.getLogger(Fprintf.class);
+public class Fprintf extends SwiftFunction {
+ private static final Logger logger = Logger.getLogger(Fprintf.class);
- static {
- setArguments(Fprintf.class, new Arg[] { Arg.VARGS });
+ private ArgRef<AbstractDataNode> filename;
+ private ArgRef<AbstractDataNode> spec;
+ private ChannelRef<AbstractDataNode> c_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("filename", "spec", "..."));
}
- static ConcurrentHashMap<String, Object> openFiles =
- new ConcurrentHashMap<String, Object>();
+ static ConcurrentHashMap<String, Object> openFiles = new ConcurrentHashMap<String, Object>();
@Override
- protected Object function(VariableStack stack)
- throws ExecutionException {
- AbstractDataNode[] args = waitForAllVargs(stack);
+ public Object function(Stack stack) {
+ AbstractDataNode hfilename = this.filename.getValue(stack);
+ AbstractDataNode hspec = this.spec.getValue(stack);
+ hfilename.waitFor(this);
+ hspec.waitFor(this);
+ Channel<AbstractDataNode> args = c_vargs.get(stack);
+ waitForAll(this, args);
+ String filename = (String) hfilename.getValue();
+ String spec = (String) hspec.getValue();
- check(args);
-
- String filename = (String) args[0].getValue();
- String spec = (String) args[1].getValue();
- DSHandle[] vars = Sprintf.copyArray(args, 2, args.length-2);
-
StringBuilder output = new StringBuilder();
- Sprintf.format(spec, vars, output);
+ try {
+ Sprintf.format(spec, args, output);
+ }
+ catch (RuntimeException e) {
+ throw new ExecutionException(this, e.getMessage());
+ }
String msg = output.toString();
logger.debug("file: " + filename + " msg: " + msg);
@@ -69,19 +78,6 @@
return null;
}
- private static void check(DSHandle[] args)
- throws ExecutionException {
- if (args.length < 2)
- throw new ExecutionException
- ("fprintf(): requires at least 2 arguments!");
- if (! args[0].getType().equals(Types.STRING))
- throw new ExecutionException
- ("fprintf(): first argument must be a string filename!");
- if (! args[0].getType().equals(Types.STRING))
- throw new ExecutionException
- ("fprintf(): second argument must be a string specifier!");
- }
-
private static void write(String filename, String msg)
throws ExecutionException {
acquire(filename);
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Java.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Java.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Java.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,29 +20,40 @@
import java.lang.reflect.Method;
import java.util.Arrays;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import k.rt.Channel;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.RootDataNode;
import org.griphyn.vdl.type.Type;
import org.griphyn.vdl.type.Types;
-public class Java extends VDLFunction
-{
-
- static
- {
- setArguments(Java.class, new Arg[] { Arg.VARGS });
+public class Java extends SwiftFunction {
+ private ArgRef<AbstractDataNode> lib;
+ private ArgRef<AbstractDataNode> name;
+ private ChannelRef<AbstractDataNode> c_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("lib", "name", "..."));
}
- protected Object function(VariableStack stack) throws ExecutionException
- {
- AbstractDataNode[] args = waitForAllVargs(stack);
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hlib = this.lib.getValue(stack);
+ AbstractDataNode hname = this.name.getValue(stack);
+ Channel<AbstractDataNode> args = this.c_vargs.get(stack);
+ hlib.waitFor(this);
+ hname.waitFor(this);
+ waitForAll(this, args);
- Method method = getMethod(args);
+ Method method = getMethod((String) hlib.getValue(), (String) hname.getValue(), args);
Object[] p = convertInputs(method, args);
Type type = returnType(method);
Object value = invoke(method, p);
@@ -55,30 +66,18 @@
Given the user args, locate the Java Method.
*/
- Method getMethod(DSHandle[] args)
- {
+ private Method getMethod(String lib, String name, Channel<AbstractDataNode> args) {
Method result;
Class<?> clazz;
- String lib = "unset";
- String name = "unset";
+ Class<?>[] parameterTypes = new Class[args.size()];
- Class[] parameterTypes = new Class[args.length-2];
-
- if (args.length < 2)
- throw new RuntimeException
- ("@java() requires at least two arguments");
-
- try
- {
- lib = (String) args[0].getValue();
- name = (String) args[1].getValue();
+ try {
clazz = Class.forName(lib);
- for (int i = 2; i < args.length; i++)
- {
- Class p = null;
- Type t = args[i].getType();
+ for (int i = 0; i < args.size(); i++) {
+ Class<?> p = null;
+ Type t = args.get(i).getType();
if (t.equals(Types.FLOAT)) p = double.class;
else if (t.equals(Types.INT)) p = int.class;
@@ -86,20 +85,19 @@
else if (t.equals(Types.STRING)) p = String.class;
else throw new RuntimeException("Cannot use @java with non-primitive types");
- parameterTypes[i-2] = p;
+ parameterTypes[i] = p;
}
result = clazz.getMethod(name, parameterTypes);
}
- catch (Exception e)
- {
+ catch (Exception e) {
e.printStackTrace();
- throw new RuntimeException
- ("@java(): Error attempting to use: " + args[0].getValue());
+ throw new ExecutionException(this, "@java(): Error attempting to use: " + lib);
}
if (result == null)
- throw new RuntimeException
- ("No method: " + name + " in " + lib + "with parameter types" + Arrays.toString(parameterTypes));
+ throw new ExecutionException(this, "No method: "
+ + name + " in " + lib + "with parameter types"
+ + Arrays.toString(parameterTypes));
return result;
}
@@ -107,34 +105,12 @@
/**
Convert the user args to a Java Object array.
*/
- Object[] convertInputs(Method method, DSHandle[] args)
- {
- Object[] result = new Object[args.length-2];
+ private Object[] convertInputs(Method method, Channel<AbstractDataNode> args) {
+ Object[] result = new Object[args.size()];
Object a = null;
- try
- {
- for (int i = 2; i < args.length; i++)
- {
- Type t = args[i].getType();
- Object v = args[i].getValue();
- if (t.equals(Types.FLOAT))
- a = (Double) v;
- else if (t.equals(Types.INT))
- a = (Integer) v;
- else if (t.equals(Types.BOOLEAN))
- a = (Boolean) v;
- else if (t.equals(Types.STRING))
- a = (String) v;
- result[i-2] = a;
- }
+ for (int i = 0; i < args.size(); i++) {
+ result[i] = args.get(i).getValue();
}
- catch (Exception e) {
- e.printStackTrace();
- throw new RuntimeException
- ("Error converting input arguments: \n" +
- " to: " + method.getDeclaringClass() +
- "." + method + " \n argument: " + a);
- }
return result;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Misc.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Misc.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Misc.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,14 +22,19 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import k.rt.Channel;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+import k.thr.LWThread;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.globus.cog.karajan.compiled.nodes.InternalFunction;
+import org.globus.cog.karajan.compiled.nodes.functions.AbstractFunction;
import org.globus.cog.karajan.util.TypeUtil;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.nodes.functions.FunctionsCollection;
-import org.griphyn.vdl.karajan.lib.SwiftArg;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
@@ -40,424 +45,571 @@
import org.griphyn.vdl.type.Types;
import org.griphyn.vdl.util.VDL2Config;
-public class Misc extends FunctionsCollection {
+public class Misc {
private static final Logger logger = Logger.getLogger(Misc.class);
-
- public static final SwiftArg PA_INPUT = new SwiftArg.Positional("input");
- public static final SwiftArg PA_PATTERN = new SwiftArg.Positional("regexp");
- public static final SwiftArg PA_TRANSFORM = new SwiftArg.Positional("transform");
- public static final SwiftArg PA_FILE = new SwiftArg.Positional("file");
- public static final SwiftArg PA_ARRAY = new SwiftArg.Positional("array");
-
+
+ public static final boolean PROVENANCE_ENABLED;
+
static {
- setArguments("swiftscript_dirname", new Arg[] { PA_FILE });
- setArguments("swiftscript_exists", new Arg[] { Arg.VARGS });
- setArguments("swiftscript_existsfile", new Arg[] { PA_FILE });
- setArguments("swiftscript_format", new Arg[] { PA_INPUT, PA_TRANSFORM });
- setArguments("swiftscript_length", new Arg[] { PA_ARRAY });
- setArguments("swiftscript_pad", new Arg[] { PA_INPUT, PA_TRANSFORM });
- setArguments("swiftscript_regexp", new Arg[] { PA_INPUT, PA_PATTERN, PA_TRANSFORM });
- setArguments("swiftscript_strcat", new Arg[] { Arg.VARGS });
- setArguments("swiftscript_strcut", new Arg[] { PA_INPUT, PA_PATTERN });
- setArguments("swiftscript_strsplit", new Arg[] { PA_INPUT, PA_PATTERN });
- setArguments("swiftscript_strjoin", new Arg[] { PA_ARRAY, PA_INPUT });
- setArguments("swiftscript_strstr", new Arg[] { PA_INPUT, PA_PATTERN });
- setArguments("swiftscript_trace", new Arg[] { Arg.VARGS });
- setArguments("swiftscript_to_int", new Arg[] { PA_INPUT });
- setArguments("swiftscript_toint", new Arg[] { PA_INPUT });
- setArguments("swiftscript_to_float", new Arg[] { PA_INPUT });
- setArguments("swiftscript_tofloat", new Arg[] { PA_INPUT });
- setArguments("swiftscript_to_string", new Arg[] { PA_INPUT });
- setArguments("swiftscript_tostring", new Arg[] { PA_INPUT });
+ boolean v;
+ try {
+ v = VDL2Config.getConfig().getProvenanceLog();
+ }
+ catch (IOException e) {
+ v = false;
+ }
+ PROVENANCE_ENABLED = v;
}
- private static final Logger traceLogger =
- Logger.getLogger("org.globus.swift.trace");
- public DSHandle swiftscript_trace(VariableStack stack)
- throws ExecutionException {
+ private static final Logger traceLogger = Logger.getLogger("org.globus.swift.trace");
+
+ public static class Trace extends InternalFunction {
+ private ChannelRef<AbstractDataNode> c_vargs;
- AbstractDataNode[] args = VDLFunction.waitForAllVargs(stack);
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("..."));
+ }
- StringBuilder buf = new StringBuilder();
- buf.append("SwiftScript trace: ");
- for (int i = 0; i < args.length; i++) {
- DSHandle handle = args[i];
- if (i != 0) {
- buf.append(", ");
- }
- Object v = args[i].getValue();
- //buf.append(v == null ? args[i] : v);
- prettyPrint(buf, args[i]);
- }
- traceLogger.warn(buf);
- return null;
+ @Override
+ protected void runBody(LWThread thr) {
+ Channel<AbstractDataNode> vargs = c_vargs.get(thr.getStack());
+ SwiftFunction.waitForAll(this, vargs);
+
+ StringBuilder buf = new StringBuilder();
+ buf.append("SwiftScript trace: ");
+ boolean first = true;
+ for (AbstractDataNode n : vargs) {
+ if (!first) {
+ buf.append(", ");
+ }
+ else {
+ first = false;
+ }
+ //buf.append(v == null ? args[i] : v);
+ prettyPrint(buf, n);
+ }
+ traceLogger.warn(buf);
+ }
}
-
- private void prettyPrint(StringBuilder buf, DSHandle h) {
- Object o = h.getValue();
- if (o == null) {
- buf.append(h);
- }
- else {
- if (h.getType().isPrimitive()) {
- if (h.getType().equals(Types.INT)) {
- buf.append(((Number) o).intValue());
- }
- else {
- buf.append(o);
- }
- }
- else if (h.getType().isArray()) {
- buf.append('{');
- boolean first = true;
- for (Map.Entry<Comparable<?>, DSHandle> e : h.getArrayValue().entrySet()) {
- if (first) {
- first = false;
- }
- else {
- buf.append(", ");
- }
- buf.append(e.getKey());
- buf.append(" = ");
- prettyPrint(buf, e.getValue());
- }
- buf.append('}');
- }
- else {
- buf.append(h);
- }
- }
+
+ private static void prettyPrint(StringBuilder buf, DSHandle h) {
+ Object o = h.getValue();
+ if (o == null) {
+ buf.append(h);
+ }
+ else {
+ if (h.getType().isPrimitive()) {
+ if (h.getType().equals(Types.INT)) {
+ buf.append(((Number) o).intValue());
+ }
+ else {
+ buf.append(o);
+ }
+ }
+ else if (h.getType().isArray()) {
+ buf.append('{');
+ boolean first = true;
+ for (Map.Entry<Comparable<?>, DSHandle> e : h.getArrayValue().entrySet()) {
+ if (first) {
+ first = false;
+ }
+ else {
+ buf.append(", ");
+ }
+ buf.append(e.getKey());
+ buf.append(" = ");
+ prettyPrint(buf, e.getValue());
+ }
+ buf.append('}');
+ }
+ else {
+ buf.append(h);
+ }
+ }
}
+
+ public static class StrCat extends AbstractFunction {
+ private ChannelRef<AbstractDataNode> c_vargs;
- public DSHandle swiftscript_strcat(VariableStack stack) throws ExecutionException {
- Object[] args = SwiftArg.VARGS.asArray(stack);
- int provid = VDLFunction.nextProvenanceID();
- StringBuffer buf = new StringBuffer();
-
- for (int i = 0; i < args.length; i++) {
- buf.append(TypeUtil.toString(args[i]));
- }
-
- DSHandle handle = new RootDataNode(Types.STRING, buf.toString());
-
- try {
- if(VDL2Config.getConfig().getProvenanceLog()) {
- DSHandle[] provArgs = SwiftArg.VARGS.asDSHandleArray(stack);
- for (int i = 0; i < provArgs.length; i++) {
- VDLFunction.logProvenanceParameter(provid, provArgs[i], ""+i);
- }
- VDLFunction.logProvenanceResult(provid, handle, "strcat");
- }
- } catch(IOException ioe) {
- throw new ExecutionException("When logging provenance for strcat", ioe);
- }
- return handle;
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("..."));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ Channel<AbstractDataNode> vargs = c_vargs.get(stack);
+ Channel<Object> args = SwiftFunction.unwrapAll(this, vargs);
+
+ StringBuffer buf = new StringBuffer();
+
+ for (Object o : args) {
+ buf.append(TypeUtil.toString(o));
+ }
+
+ DSHandle handle = new RootDataNode(Types.STRING, buf.toString());
+
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ int index = 0;
+ for (AbstractDataNode dn : vargs) {
+ SwiftFunction.logProvenanceParameter(provid, dn, String.valueOf(index++));
+ }
+ SwiftFunction.logProvenanceResult(provid, handle, "strcat");
+ }
+ return handle;
+ }
}
+
+ public static class Exists extends AbstractFunction {
+ private ArgRef<AbstractDataNode> file;
- public DSHandle swiftscript_exists(VariableStack stack)
- throws ExecutionException {
- logger.debug(stack);
- Object[] args = SwiftArg.VARGS.asArray(stack);
- int provid = VDLFunction.nextProvenanceID();
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("file"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode dn = file.getValue(stack);
+ String filename = SwiftFunction.unwrap(this, dn);
- if (args.length != 1)
- throw new ExecutionException
- ("Wrong number of arguments to @exists()");
+ AbsFile file = new AbsFile(filename);
+ if (logger.isDebugEnabled()) {
+ logger.debug("exists: " + file);
+ }
+ DSHandle handle = new RootDataNode(Types.BOOLEAN, file.exists());
+
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceParameter(provid, dn, "file");
+ SwiftFunction.logProvenanceResult(provid, handle, "exists");
+ }
+
+ return handle;
+ }
+ }
+
+ public static class StrCut extends AbstractFunction {
+ private ArgRef<AbstractDataNode> input;
+ private ArgRef<AbstractDataNode> pattern;
- String filename = TypeUtil.toString(args[0]);
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("input", "pattern"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hinput = this.input.getValue(stack);
+ String input = SwiftFunction.unwrap(this, hinput);
+ AbstractDataNode hpattern = this.pattern.getValue(stack);
+ String pattern = SwiftFunction.unwrap(this, hpattern);
- AbsFile file = new AbsFile(filename);
- logger.debug("exists: " + file);
- DSHandle handle = new RootDataNode(Types.BOOLEAN, file.exists());
+ if (logger.isDebugEnabled()) {
+ logger.debug("strcut will match '" + input + "' with pattern '" + pattern + "'");
+ }
- try {
- if(VDL2Config.getConfig().getProvenanceLog()) {
- DSHandle[] provArgs =
- SwiftArg.VARGS.asDSHandleArray(stack);
- for (int i = 0; i < provArgs.length; i++) {
- VDLFunction.logProvenanceParameter
- (provid, provArgs[i], ""+i);
- }
- VDLFunction.logProvenanceResult
- (provid, handle, "exists");
- }
- } catch (IOException ioe) {
- throw new ExecutionException
- ("When logging provenance for exists",
- ioe);
- }
-
- return handle;
+ String group;
+ try {
+ Pattern p = Pattern.compile(pattern);
+ // TODO probably should memoize this?
+
+ Matcher m = p.matcher(input);
+ m.find();
+ group = m.group(1);
+ }
+ catch (IllegalStateException e) {
+ throw new ExecutionException("@strcut could not match pattern " + pattern
+ + " against string " + input, e);
+ }
+ if (logger.isDebugEnabled()) {
+ logger.debug("strcut matched '" + group + "'");
+ }
+ DSHandle handle = new RootDataNode(Types.STRING, group);
+
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceResult(provid, handle, "strcut");
+ SwiftFunction.logProvenanceParameter(provid, hinput, "input");
+ SwiftFunction.logProvenanceParameter(provid, hpattern, "pattern");
+ }
+ return handle;
+ }
}
+
+ public static class StrStr extends AbstractFunction {
+ private ArgRef<AbstractDataNode> input;
+ private ArgRef<AbstractDataNode> pattern;
- public DSHandle swiftscript_strcut(VariableStack stack)
- throws ExecutionException {
- int provid = VDLFunction.nextProvenanceID();
- String inputString = TypeUtil.toString(PA_INPUT.getValue(stack));
- String pattern = TypeUtil.toString(PA_PATTERN.getValue(stack));
- if (logger.isDebugEnabled()) {
- logger.debug("strcut will match '" + inputString + "' with pattern '" + pattern + "'");
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("input", "pattern"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hinput = this.input.getValue(stack);
+ String input = SwiftFunction.unwrap(this, hinput);
+ AbstractDataNode hpattern = this.pattern.getValue(stack);
+ String pattern = SwiftFunction.unwrap(this, hpattern);
- String group;
- try {
- Pattern p = Pattern.compile(pattern);
- // TODO probably should memoize this?
+ if (logger.isDebugEnabled()) {
+ logger.debug("strstr will search '" + input + "' for pattern '" + pattern + "'");
+ }
+
+ DSHandle result = new RootDataNode(Types.INT, input.indexOf(pattern));
- Matcher m = p.matcher(inputString);
- m.find();
- group = m.group(1);
- }
- catch (IllegalStateException e) {
- throw new ExecutionException("@strcut could not match pattern " + pattern
- + " against string " + inputString, e);
- }
- if (logger.isDebugEnabled()) {
- logger.debug("strcut matched '" + group + "'");
- }
- DSHandle handle = new RootDataNode(Types.STRING, group);
-
- VDLFunction.logProvenanceResult(provid, handle, "strcut");
- VDLFunction.logProvenanceParameter(provid, PA_INPUT.getRawValue(stack), "input");
- VDLFunction.logProvenanceParameter(provid, PA_PATTERN.getRawValue(stack), "pattern");
- return handle;
- }
-
- public DSHandle swiftscript_strstr(VariableStack stack)
- throws ExecutionException {
- String inputString = TypeUtil.toString(PA_INPUT.getValue(stack));
- String pattern = TypeUtil.toString(PA_PATTERN.getValue(stack));
- if (logger.isDebugEnabled()) {
- logger.debug("strstr will search '" + inputString +
- "' for pattern '" + pattern + "'");
+
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceResult(provid, result, "strstr");
+ SwiftFunction.logProvenanceParameter(provid, hinput, "input");
+ SwiftFunction.logProvenanceParameter(provid, hpattern, "pattern");
+ }
+ return result;
}
- int result = inputString.indexOf(pattern);
- return new RootDataNode(Types.INT, result);
}
+
+ public static class StrSplit extends AbstractFunction {
+ private ArgRef<AbstractDataNode> input;
+ private ArgRef<AbstractDataNode> pattern;
- public DSHandle swiftscript_strsplit(VariableStack stack)
- throws ExecutionException, InvalidPathException {
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("input", "pattern"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hinput = this.input.getValue(stack);
+ String input = SwiftFunction.unwrap(this, hinput);
+ AbstractDataNode hpattern = this.pattern.getValue(stack);
+ String pattern = SwiftFunction.unwrap(this, hpattern);
- String str = TypeUtil.toString(PA_INPUT.getValue(stack));
- String pattern = TypeUtil.toString(PA_PATTERN.getValue(stack));
+ String[] split = input.split(pattern);
- String[] split = str.split(pattern);
-
- DSHandle handle = new RootArrayDataNode(Types.STRING.arrayType());
- for (int i = 0; i < split.length; i++) {
- DSHandle el = handle.getField(Path.EMPTY_PATH.addFirst(i, true));
- el.setValue(split[i]);
- }
- handle.closeDeep();
- int provid=VDLFunction.nextProvenanceID();
- VDLFunction.logProvenanceResult(provid, handle, "strsplit");
- VDLFunction.logProvenanceParameter(provid, PA_INPUT.getRawValue(stack), "input");
- VDLFunction.logProvenanceParameter(provid, PA_PATTERN.getRawValue(stack), "pattern");
- return handle;
- }
+ DSHandle handle = new RootArrayDataNode(Types.STRING.arrayType());
+ for (int i = 0; i < split.length; i++) {
+ DSHandle el;
+ try {
+ el = handle.getField(Path.EMPTY_PATH.addFirst(i, true));
+ el.setValue(split[i]);
+ }
+ catch (InvalidPathException e) {
+ throw new ExecutionException(this, e);
+ }
+ }
+ handle.closeDeep();
+
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceResult(provid, handle, "strsplit");
+ SwiftFunction.logProvenanceParameter(provid, hinput, "input");
+ SwiftFunction.logProvenanceParameter(provid, hpattern, "pattern");
+ }
+ return handle;
+ }
+ }
/**
- * swiftscript_strjoin (@strjoin) - Combine elements of an array into a single string with a specified delimiter
+ * StrJoin (@strjoin) - Combine elements of an array into a single string with a specified delimiter
* @param stack
* @return DSHandle representing the resulting string
* @throws ExecutionException
*/
- public DSHandle swiftscript_strjoin(VariableStack stack) throws ExecutionException
- {
- AbstractDataNode array = (AbstractDataNode) PA_ARRAY.getRawValue(stack);
- String delim = TypeUtil.toString(PA_INPUT.getValue(stack));
- String result = "";
+ public static class StrJoin extends AbstractFunction {
+ private ArgRef<AbstractDataNode> array;
+ private ArgRef<AbstractDataNode> delim;
- array.waitFor();
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("array", optional("delim", new RootDataNode(Types.STRING, ", "))));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode harray = this.array.getValue(stack);
+ Map<Comparable<?>, DSHandle> arrayValues = SwiftFunction.waitForArray(this, harray);
+ AbstractDataNode hdelim = this.delim.getValue(stack);
+ String delim = SwiftFunction.unwrap(this, hdelim);
- Map<?, ?> arrayValues = array.getArrayValue();
- for (Object value : arrayValues.values()) {
- if (result == "") { result += ((DSHandle) value).getValue(); }
- else { result += delim + ((DSHandle) value).getValue(); }
+ StringBuilder result = new StringBuilder();
+
+ boolean first = true;
+ for (DSHandle h : arrayValues.values()) {
+ if (first) {
+ first = false;
+ }
+ else {
+ result.append(delim);
+ }
+ result.append(h.getValue());
+ }
+
+ DSHandle handle = new RootDataNode(Types.STRING, result.toString());
+
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceResult(provid, handle, "strjoin");
+ SwiftFunction.logProvenanceParameter(provid, harray, "array");
+ SwiftFunction.logProvenanceParameter(provid, hdelim, "delim");
+ }
+ return handle;
}
-
- DSHandle handle = new RootDataNode(Types.STRING, result);
- return handle;
}
+
+ public static class Regexp extends AbstractFunction {
+ private ArgRef<AbstractDataNode> input;
+ private ArgRef<AbstractDataNode> pattern;
+ private ArgRef<AbstractDataNode> transform;
- public DSHandle swiftscript_regexp(VariableStack stack)
- throws ExecutionException {
- String inputString = TypeUtil.toString(PA_INPUT.getValue(stack));
- String pattern = TypeUtil.toString(PA_PATTERN.getValue(stack));
- String transform = TypeUtil.toString(PA_TRANSFORM.getValue(stack));
- if (logger.isDebugEnabled()) {
- logger.debug("regexp will match '" + inputString + "' with pattern '" + pattern + "'");
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("input", "pattern", "transform"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hinput = this.input.getValue(stack);
+ String input = SwiftFunction.unwrap(this, hinput);
+ AbstractDataNode hpattern = this.pattern.getValue(stack);
+ String pattern = SwiftFunction.unwrap(this, hpattern);
+ AbstractDataNode htransform = this.transform.getValue(stack);
+ String transform = SwiftFunction.unwrap(this, htransform);
- String group;
- try {
- Pattern p = Pattern.compile(pattern);
- // TODO probably should memoize this?
+ if (logger.isDebugEnabled()) {
+ logger.debug("regexp will match '" + input + "' with pattern '" + pattern + "'");
+ }
+
+ String group;
+ try {
+ Pattern p = Pattern.compile(pattern);
+
+ Matcher m = p.matcher(input);
+ m.find();
+ group = m.replaceFirst(transform);
+ }
+ catch (IllegalStateException e) {
+ throw new ExecutionException("@regexp could not match pattern " + pattern
+ + " against string " + input, e);
+ }
+ if (logger.isDebugEnabled()) {
+ logger.debug("regexp replacement produced '" + group + "'");
+ }
+ DSHandle handle = new RootDataNode(Types.STRING);
+ handle.setValue(group);
+ handle.closeShallow();
- Matcher m = p.matcher(inputString);
- m.find();
- group = m.replaceFirst(transform);
- }
- catch (IllegalStateException e) {
- throw new ExecutionException("@regexp could not match pattern " + pattern
- + " against string " + inputString, e);
- }
- if (logger.isDebugEnabled()) {
- logger.debug("regexp replacement produced '" + group + "'");
- }
- DSHandle handle = new RootDataNode(Types.STRING);
- handle.setValue(group);
- handle.closeShallow();
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceResult(provid, handle, "regexp");
+ SwiftFunction.logProvenanceParameter(provid, hinput, "input");
+ SwiftFunction.logProvenanceParameter(provid, hpattern, "pattern");
+ SwiftFunction.logProvenanceParameter(provid, htransform, "transform");
+ }
+ return handle;
+ }
+ }
+
+ public static class ToInt extends AbstractFunction {
+ private ArgRef<AbstractDataNode> str;
- int provid=VDLFunction.nextProvenanceID();
- VDLFunction.logProvenanceResult(provid, handle, "regexp");
- VDLFunction.logProvenanceParameter(provid, PA_INPUT.getRawValue(stack), "input");
- VDLFunction.logProvenanceParameter(provid, PA_PATTERN.getRawValue(stack), "pattern");
- VDLFunction.logProvenanceParameter(provid, PA_TRANSFORM.getRawValue(stack), "transform");
- return handle;
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("str"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hstr = str.getValue(stack);
+ String str = SwiftFunction.unwrap(this, hstr);
+
+ DSHandle handle = new RootDataNode(Types.INT, Integer.valueOf(str));
- public DSHandle swiftscript_toint(VariableStack stack)
- throws ExecutionException {
- return swiftscript_to_int(stack);
- }
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceParameter(provid, hstr, "str");
+ SwiftFunction.logProvenanceResult(provid, handle, "toint");
+ }
+
+ return handle;
+ }
+ }
- public DSHandle swiftscript_to_int(VariableStack stack)
- throws ExecutionException {
- String inputString = TypeUtil.toString(PA_INPUT.getValue(stack));
-
- DSHandle handle = new RootDataNode(Types.INT, new Double(inputString).intValue());
+ public static class ToFloat extends AbstractFunction {
+ private ArgRef<AbstractDataNode> str;
- int provid=VDLFunction.nextProvenanceID();
- VDLFunction.logProvenanceResult(provid, handle, "toint");
- VDLFunction.logProvenanceParameter(provid, PA_INPUT.getRawValue(stack), "string");
- return handle;
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("str"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hstr = str.getValue(stack);
+ String str = SwiftFunction.unwrap(this, hstr);
+
+ DSHandle handle = new RootDataNode(Types.FLOAT, Double.valueOf(str));
- public DSHandle swiftscript_tofloat(VariableStack stack)
- throws ExecutionException {
- return swiftscript_to_float(stack);
- }
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceParameter(provid, hstr, "str");
+ SwiftFunction.logProvenanceResult(provid, handle, "tofloat");
+ }
+
+ return handle;
+ }
+ }
- public DSHandle swiftscript_to_float(VariableStack stack)
- throws ExecutionException {
- String inputString = TypeUtil.toString(PA_INPUT.getValue(stack));
- DSHandle handle = new RootDataNode(Types.FLOAT);
-
- try
- {
- handle.setValue(new Double(inputString));
- }
- catch(NumberFormatException e)
- {
- throw new ExecutionException(stack, "Could not convert value \""+inputString+"\" to type float");
- }
- handle.closeShallow();
- int provid=VDLFunction.nextProvenanceID();
- VDLFunction.logProvenanceResult(provid, handle, "tofloat");
- VDLFunction.logProvenanceParameter(provid, PA_INPUT.getRawValue(stack), "string");
- return handle;
- }
-
/*
* Takes in a float and formats to desired precision and returns a string
*/
- public DSHandle swiftscript_format(VariableStack stack)
- throws ExecutionException {
- String inputString = TypeUtil.toString(PA_INPUT.getValue(stack));
- String inputFormat = TypeUtil.toString(PA_TRANSFORM.getValue(stack));
- DSHandle handle = new RootDataNode(Types.STRING);
+ public static class Format extends AbstractFunction {
+ private ArgRef<AbstractDataNode> format;
+ private ArgRef<AbstractDataNode> value;
- String output = String.format("%."+inputFormat+"f", Double.parseDouble(inputString));
- handle.setValue(output);
- handle.closeShallow();
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("format", "value"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hformat = this.format.getValue(stack);
+ String format = SwiftFunction.unwrap(this, hformat);
+ AbstractDataNode hvalue = this.value.getValue(stack);
+ Double value = SwiftFunction.unwrap(this, hvalue);
+
+ DSHandle handle = new RootDataNode(Types.STRING,
+ String.format("%." + format + "f", value));
- int provid=VDLFunction.nextProvenanceID();
- VDLFunction.logProvenanceResult(provid, handle, "format");
- VDLFunction.logProvenanceParameter(provid, PA_INPUT.getRawValue(stack), "float");
- VDLFunction.logProvenanceParameter(provid, PA_TRANSFORM.getRawValue(stack), "float");
- return handle;
- }
-
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceResult(provid, handle, "format");
+ SwiftFunction.logProvenanceParameter(provid, hformat, "format");
+ SwiftFunction.logProvenanceParameter(provid, hvalue, "value");
+ }
+ return handle;
+ }
+ }
+
/*
* Takes in an int and pads zeros to the left and returns a string
*/
- public DSHandle swiftscript_pad(VariableStack stack)
- throws ExecutionException {
- String inputString = TypeUtil.toString(PA_INPUT.getValue(stack));
- String inputFormat = TypeUtil.toString(PA_TRANSFORM.getValue(stack));
- DSHandle handle = new RootDataNode(Types.STRING);
+ public static class Pad extends AbstractFunction {
+ private ArgRef<AbstractDataNode> size;
+ private ArgRef<AbstractDataNode> value;
- int num_length = inputString.length();
- int zeros_to_pad = Integer.parseInt(inputFormat);
- zeros_to_pad += num_length;
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("size", "value"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hsize = this.size.getValue(stack);
+ Integer size = SwiftFunction.unwrap(this, hsize);
+ AbstractDataNode hvalue = this.value.getValue(stack);
+ Integer value = SwiftFunction.unwrap(this, hvalue);
+
+ DSHandle handle = new RootDataNode(Types.STRING,
+ String.format("%0" + size + "d", value));
- String output = String.format("%0"+zeros_to_pad+"d",
- Integer.parseInt(inputString));
- handle.setValue(output);
- handle.closeShallow();
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceResult(provid, handle, "pad");
+ SwiftFunction.logProvenanceParameter(provid, hsize, "size");
+ SwiftFunction.logProvenanceParameter(provid, hvalue, "value");
+ }
+ return handle;
+ }
+ }
+
+ public static class ToString extends AbstractFunction {
+ private ArgRef<AbstractDataNode> value;
- int provid=VDLFunction.nextProvenanceID();
- VDLFunction.logProvenanceResult(provid, handle, "pad");
- VDLFunction.logProvenanceParameter(provid, PA_INPUT.getRawValue(stack), "int");
- VDLFunction.logProvenanceParameter(provid, PA_TRANSFORM.getRawValue(stack), "int");
- return handle;
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("value"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode hvalue = this.value.getValue(stack);
+
+ StringBuilder sb = new StringBuilder();
+ prettyPrint(sb, hvalue);
+ DSHandle handle = new RootDataNode(Types.STRING, sb.toString());
- public DSHandle swiftscript_tostring(VariableStack stack)
- throws ExecutionException {
- return swiftscript_to_string(stack);
- }
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceParameter(provid, hvalue, "value");
+ SwiftFunction.logProvenanceResult(provid, handle, "tostring");
+ }
+
+ return handle;
+ }
+ }
- public DSHandle swiftscript_to_string(VariableStack stack)
- throws ExecutionException {
- Object input = PA_INPUT.getValue(stack);
- DSHandle handle = new RootDataNode(Types.STRING);
- if (input instanceof DSHandle) {
- StringBuilder sb = new StringBuilder();
- prettyPrint(sb, (DSHandle) input);
- handle.setValue(sb.toString());
- }
- else {
- handle.setValue(String.valueOf(input));
- }
- handle.closeShallow();
- return handle;
- }
+ /*
+ * This is copied from swiftscript_dirname.
+ * Both the functions could be changed to be more readable.
+ * Returns length of an array.
+ * Good for debugging because array needs to be closed
+ * before the length is determined
+ */
+ public static class Dirname extends AbstractFunction {
+ private ArgRef<AbstractDataNode> file;
- public DSHandle swiftscript_dirname(VariableStack stack)
- throws ExecutionException {
- AbstractDataNode n = (AbstractDataNode) PA_FILE.getRawValue(stack);
- n.waitFor();
- String name = VDLFunction.filename(n)[0];
- String result = new AbsFile(name).getDir();
- return new RootDataNode(Types.STRING, result);
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("file"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode dn = file.getValue(stack);
+ String name = SwiftFunction.filename(dn)[0];
- /*
- * This is copied from swiftscript_dirname.
- * Both the functions could be changed to be more readable.
- * Returns length of an array.
- * Good for debugging because array needs to be closed
- * before the length is determined
- */
- public DSHandle swiftscript_length(VariableStack stack)
- throws ExecutionException {
- AbstractDataNode array = (AbstractDataNode) PA_ARRAY.getRawValue(stack);
- array.waitFor();
- Map<?, ?> n = array.getArrayValue();
- return new RootDataNode(Types.INT, Integer.valueOf(n.size()));
- }
+ String result = new AbsFile(name).getDir();
+ DSHandle handle = new RootDataNode(Types.BOOLEAN, result);
+
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceParameter(provid, dn, "file");
+ SwiftFunction.logProvenanceResult(provid, handle, "dirname");
+ }
+
+ return handle;
+ }
+ }
+
+ public static class Length extends AbstractFunction {
+ private ArgRef<AbstractDataNode> array;
- public DSHandle swiftscript_existsfile(VariableStack stack)
- throws ExecutionException {
- DSHandle result = null;
- Object[] args = SwiftArg.VARGS.asArray(stack);
- String arg = (String) args[0];
- AbsFile file = new AbsFile(arg);
- return new RootDataNode(Types.BOOLEAN, file.exists());
- }
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("array"));
+ }
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode harray = this.array.getValue(stack);
+ harray.waitFor(this);
+
+ DSHandle handle = new RootDataNode(Types.INT, Integer.valueOf(harray.getArrayValue().size()));
+
+ if (PROVENANCE_ENABLED) {
+ int provid = SwiftFunction.nextProvenanceID();
+ SwiftFunction.logProvenanceResult(provid, handle, "length");
+ SwiftFunction.logProvenanceParameter(provid, harray, "array");
+ }
+ return handle;
+ }
+ }
}
/*
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ReadData.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ReadData.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ReadData.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -28,12 +28,13 @@
import java.util.HashSet;
import java.util.Set;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.Tracer;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
@@ -43,32 +44,24 @@
import org.griphyn.vdl.type.Type;
import org.griphyn.vdl.type.Types;
-public class ReadData extends VDLFunction {
+public class ReadData extends SwiftFunction {
public static final Logger logger = Logger.getLogger(ReadData.class);
-
- public static final Arg DEST = new Arg.Positional("dest");
- public static final Arg SRC = new Arg.Positional("src");
- public static boolean warning;
- public Tracer tracer;
+ private ArgRef<AbstractDataNode> dest;
+ private ArgRef<AbstractDataNode> src;
- static {
- setArguments(ReadData.class, new Arg[] { DEST, SRC });
- }
-
@Override
- protected void initializeStatic() {
- super.initializeStatic();
- tracer = Tracer.getTracer(this, "SWIFTCALL");
+ protected Signature getSignature() {
+ return new Signature(params("dest", "src"));
}
- protected Object function(VariableStack stack) throws ExecutionException {
- DSHandle dest = (DSHandle) DEST.getValue(stack);
- AbstractDataNode src = (AbstractDataNode) SRC.getValue(stack);
- if (tracer.isEnabled()) {
- tracer.trace(stack, "readData(" + Tracer.unwrapHandle(src) + ")");
- }
- src.waitFor();
+ public static boolean warning;
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode dest = this.dest.getValue(stack);
+ AbstractDataNode src = this.src.getValue(stack);
+ src.waitFor(this);
if (src.getType().equals(Types.STRING)) {
readData(dest, (String) src.getValue());
}
@@ -116,7 +109,7 @@
}
}
catch (IOException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
@@ -154,7 +147,7 @@
}
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
@@ -175,7 +168,7 @@
}
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
@@ -218,7 +211,7 @@
}
}
catch (InvalidPathException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ReadStructured.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ReadStructured.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/ReadStructured.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -25,12 +25,13 @@
import java.io.FileReader;
import java.io.IOException;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.Tracer;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
@@ -38,32 +39,25 @@
import org.griphyn.vdl.mapping.PhysicalFormat;
import org.griphyn.vdl.type.Types;
-public class ReadStructured extends VDLFunction {
+public class ReadStructured extends SwiftFunction {
public static final Logger logger = Logger.getLogger(ReadStructured.class);
- public static final Arg DEST = new Arg.Positional("dest");
- public static final Arg SRC = new Arg.Positional("src");
- public static boolean warning;
+ private ArgRef<AbstractDataNode> dest;
+ private ArgRef<AbstractDataNode> src;
- static {
- setArguments(ReadStructured.class, new Arg[] { DEST, SRC });
- }
-
- public Tracer tracer;
-
- @Override
- protected void initializeStatic() {
- super.initializeStatic();
- tracer = Tracer.getTracer(this, "SWIFTCALL");
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("dest", "src"));
}
- protected Object function(VariableStack stack) throws ExecutionException {
- DSHandle dest = (DSHandle) DEST.getValue(stack);
- AbstractDataNode src = (AbstractDataNode) SRC.getValue(stack);
- if (tracer.isEnabled()) {
- tracer.trace(stack, "readData2(" + Tracer.unwrapHandle(src) + ")");
- }
- src.waitFor();
+ public static boolean warning;
+
+
+ @Override
+ public Object function(Stack stack) {
+ AbstractDataNode dest = this.dest.getValue(stack);
+ AbstractDataNode src = this.src.getValue(stack);
+ src.waitFor(this);
if (src.getType().equals(Types.STRING)) {
readData(dest, (String) src.getValue());
}
@@ -100,7 +94,7 @@
}
}
catch (Exception e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Sprintf.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Sprintf.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Sprintf.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,11 +17,15 @@
package org.griphyn.vdl.karajan.lib.swiftscript;
+import k.rt.Channel;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.ArrayDataNode;
import org.griphyn.vdl.mapping.DSHandle;
@@ -47,63 +51,49 @@
%k: Variable sKipped, no output. <br>
%q: Array output
*/
-public class Sprintf extends VDLFunction {
+public class Sprintf extends SwiftFunction {
- private static final Logger logger =
- Logger.getLogger(Sprintf.class);
+ private static final Logger logger = Logger.getLogger(Sprintf.class);
- static {
- setArguments(Sprintf.class, new Arg[] { Arg.VARGS });
+ private ArgRef<AbstractDataNode> spec;
+ private ChannelRef<AbstractDataNode> c_vargs;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("spec", "..."));
}
+
@Override
- protected Object function(VariableStack stack)
- throws ExecutionException {
- AbstractDataNode[] args = waitForAllVargs(stack);
+ public Object function(Stack stack) {
+ AbstractDataNode hspec = this.spec.getValue(stack);
+ hspec.waitFor(this);
+ Channel<AbstractDataNode> args = c_vargs.get(stack);
+ waitForAll(this, args);
+ String spec = (String) hspec.getValue();
- String msg = format(args);
- logger.debug("generated: " + msg);
+ String msg = format(spec, args);
+ if (logger.isDebugEnabled()) {
+ logger.debug("generated: " + msg);
+ }
DSHandle result = new RootDataNode(Types.STRING);
result.setValue(msg);
return result;
}
- public static String format(DSHandle[] args)
- throws ExecutionException {
- if (! (args[0].getType() == Types.STRING))
- throw new ExecutionException
- ("First argument to sprintf() must be a string!");
-
- String spec = (String) args[0].getValue();
+ public static String format(String spec, Channel<AbstractDataNode> args) {
logger.debug("spec: " + spec);
- DSHandle[] vars = copyArray(args, 1, args.length-1);
-
StringBuilder output = new StringBuilder();
- format(spec, vars, output);
-
+ format(spec, args, output);
return output.toString();
}
-
- public static DSHandle[] copyArray(DSHandle[] src,
- int offset, int length)
- {
- DSHandle[] result = new DSHandle[length];
-
- for (int i = 0; i < length; i++)
- result[i] = src[i+offset];
-
- return result;
- }
/**
This method can be targeted as a helper function
(by @sprintf(), etc.)
*/
- public static void format(String spec, DSHandle[] vars,
- StringBuilder output)
- throws ExecutionException
- {
+ public static void format(String spec, Channel<AbstractDataNode> vars, StringBuilder output) {
int i = 0;
int arg = 0;
while (i < spec.length()) {
@@ -123,53 +113,48 @@
}
}
- private static int append(char c, int arg, DSHandle[] vars,
- StringBuilder output)
- throws ExecutionException {
+ private static int append(char c, int arg, Channel<AbstractDataNode> vars, StringBuilder output) {
if (c == '%') {
output.append('%');
return arg;
}
- if (arg >= vars.length) {
- throw new ExecutionException
- ("tracef(): too many specifiers!");
+ if (arg >= vars.size()) {
+ throw new IllegalArgumentException("tracef(): too many specifiers!");
}
if (c == 'M') {
- append_M(vars[arg], output);
+ append_M(vars.get(arg), output);
}
else if (c == 'b') {
- append_b(vars[arg], output);
+ append_b(vars.get(arg), output);
}
else if (c == 'f') {
- append_f(vars[arg], output);
+ append_f(vars.get(arg), output);
}
else if (c == 'i') {
- append_i(vars[arg], output);
+ append_i(vars.get(arg), output);
}
else if (c == 'p') {
- output.append(vars[arg].toString());
+ output.append(vars.get(arg).toString());
}
else if (c == 's') {
- append_s(vars[arg], output);
+ append_s(vars.get(arg), output);
}
else if (c == 'q') {
- append_q(vars[arg], output);
+ append_q(vars.get(arg), output);
}
else if (c == 'k') {
;
}
else {
- throw new ExecutionException
- ("tracef(): Unknown format: %" + c);
+ throw new IllegalArgumentException("tracef(): Unknown format: %" + c);
}
return arg+1;
}
- private static void append_M(DSHandle arg, StringBuilder output)
- throws ExecutionException {
+ private static void append_M(DSHandle arg, StringBuilder output) {
try {
synchronized (arg.getRoot()) {
- String[] names = VDLFunction.filename(arg);
+ String[] names = SwiftFunction.filename(arg);
if (names.length > 1)
output.append(names);
else
@@ -177,8 +162,7 @@
}
}
catch (Exception e) {
- throw new ExecutionException
- ("tracef(%M): Could not lookup: " + arg);
+ throw new IllegalArgumentException("tracef(%M): Could not lookup: " + arg);
}
}
@@ -188,36 +172,33 @@
output.append(arg.getValue());
}
else {
- throw new ExecutionException
- ("tracef(): %b requires a boolean! " + dshandleDescription(arg));
+ throw new IllegalArgumentException("tracef(): %b requires a boolean! "
+ + dshandleDescription(arg));
}
}
- private static void append_f(DSHandle arg, StringBuilder output)
- throws ExecutionException {
+ private static void append_f(DSHandle arg, StringBuilder output) {
if (arg.getType() == Types.FLOAT) {
output.append(arg.getValue());
}
else {
- throw new ExecutionException
- ("tracef(): %f requires a float! " + dshandleDescription(arg));
+ throw new IllegalArgumentException("tracef(): %f requires a float! "
+ + dshandleDescription(arg));
}
}
- private static void append_i(DSHandle arg, StringBuilder output)
- throws ExecutionException {
+ private static void append_i(DSHandle arg, StringBuilder output) {
if (arg.getType() == Types.INT) {
Integer d = (Integer) arg.getValue();
output.append(d);
}
else {
- throw new ExecutionException
- ("tracef(): %i requires an int! " + dshandleDescription(arg));
+ throw new IllegalArgumentException("tracef(): %i requires an int! "
+ + dshandleDescription(arg));
}
}
- private static void append_q(DSHandle arg, StringBuilder output)
- throws ExecutionException {
+ private static void append_q(DSHandle arg, StringBuilder output) {
if (arg instanceof ArrayDataNode) {
ArrayDataNode node = (ArrayDataNode) arg;
output.append("[");
@@ -234,25 +215,24 @@
}
catch (Exception e) {
e.printStackTrace();
- throw new ExecutionException
- ("trace(%q): Could not get children of: " + arg);
+ throw new IllegalArgumentException("trace(%q): Could not get children of: "
+ + arg);
}
output.append("]");
}
else {
- throw new ExecutionException
- ("tracef(): %q requires an array! " + dshandleDescription(arg));
+ throw new IllegalArgumentException("tracef(): %q requires an array! "
+ + dshandleDescription(arg));
}
}
- private static void append_s(DSHandle arg, StringBuilder output)
- throws ExecutionException {
+ private static void append_s(DSHandle arg, StringBuilder output) {
if (arg.getType() == Types.STRING) {
output.append(arg.getValue());
}
else {
- throw new ExecutionException
- ("tracef(): %s requires a string! " + dshandleDescription(arg));
+ throw new IllegalArgumentException("tracef(): %s requires a string! "
+ + dshandleDescription(arg));
}
}
@@ -273,8 +253,7 @@
output.append('\t');
}
else {
- throw new ExecutionException
- ("tracef(): unknown backslash escape sequence! " +
+ throw new IllegalArgumentException("tracef(): unknown backslash escape sequence! " +
"(\\" + c + ")\n" +
"\t in " + spec + " character: " + i);
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Tracef.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Tracef.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/Tracef.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,11 +17,14 @@
package org.griphyn.vdl.karajan.lib.swiftscript;
+import k.rt.Channel;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.ChannelRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbstractDataNode;
/**
@@ -34,21 +37,27 @@
3) allows for consumption of variables without display (%k);
4) does not impose any formatting (commas, etc.). <br><br>
*/
-public class Tracef extends VDLFunction {
+public class Tracef extends SwiftFunction {
+ private static final Logger logger = Logger.getLogger(Tracef.class);
+
+ private ArgRef<AbstractDataNode> spec;
+ private ChannelRef<AbstractDataNode> c_vargs;
- private static final Logger logger =
- Logger.getLogger(Tracef.class);
-
- static {
- setArguments(Tracef.class, new Arg[] { Arg.VARGS });
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("spec", "..."));
}
+
@Override
- protected Object function(VariableStack stack)
- throws ExecutionException {
- AbstractDataNode[] args = waitForAllVargs(stack);
-
- String msg = Sprintf.format(args);
+ public Object function(Stack stack) {
+ AbstractDataNode hspec = this.spec.getValue(stack);
+ hspec.waitFor(this);
+ Channel<AbstractDataNode> args = c_vargs.get(stack);
+ waitForAll(this, args);
+ String spec = (String) hspec.getValue();
+
+ String msg = Sprintf.format(spec, args);
logger.info(msg);
System.out.print(msg);
return null;
Modified: branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/WriteData.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/WriteData.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/lib/swiftscript/WriteData.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -25,15 +25,16 @@
import java.util.Map;
import java.util.TreeMap;
+import k.rt.ExecutionException;
+import k.rt.Stack;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.arguments.Arg;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.griphyn.vdl.karajan.lib.VDLFunction;
+import org.globus.cog.karajan.analyzer.ArgRef;
+import org.globus.cog.karajan.analyzer.Signature;
+import org.griphyn.vdl.karajan.lib.SwiftFunction;
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.DSHandle;
-import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.mapping.PhysicalFormat;
@@ -41,25 +42,28 @@
import org.griphyn.vdl.type.Types;
-public class WriteData extends VDLFunction {
+public class WriteData extends SwiftFunction {
public static final Logger logger = Logger.getLogger(WriteData.class);
- public static final Arg DEST = new Arg.Positional("dest");
- public static final Arg SRC = new Arg.Positional("src");
+ private ArgRef<AbstractDataNode> dest;
+ private ArgRef<AbstractDataNode> src;
+
+ @Override
+ protected Signature getSignature() {
+ return new Signature(params("dest", "src"));
+ }
+
public static boolean warning;
- static {
- setArguments(WriteData.class, new Arg[] { DEST, SRC });
- }
-
- protected Object function(VariableStack stack) throws ExecutionException {
+ @Override
+ public Object function(Stack stack) {
// dest needs to be mapped to a file, or a string
- DSHandle dest = (DSHandle) DEST.getValue(stack);
+ AbstractDataNode dest = this.dest.getValue(stack);
// src can be any of several forms of value
- AbstractDataNode src = (AbstractDataNode) SRC.getValue(stack);
+ AbstractDataNode src = this.src.getValue(stack);
- src.waitFor();
+ src.waitFor(this);
if (dest.getType().equals(Types.STRING)) {
writeData((String)dest.getValue(), src);
@@ -109,7 +113,7 @@
}
}
catch (IOException e) {
- throw new ExecutionException(e);
+ throw new ExecutionException(this, e);
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/monitor/SystemState.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/monitor/SystemState.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/monitor/SystemState.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -25,17 +25,20 @@
import java.util.Map;
import java.util.Set;
-import org.globus.cog.karajan.stack.VariableStack;
+import k.rt.Stack;
+
import org.griphyn.vdl.karajan.monitor.items.StatefulItem;
import org.griphyn.vdl.karajan.monitor.items.StatefulItemClass;
+import com.sun.org.apache.xpath.internal.VariableStack;
+
public class SystemState {
private Map<StatefulItemClass, StatefulItemClassSet<? extends StatefulItem>> classes;
private Set<SystemStateListener> listeners;
private Map<String, Stats> stats;
private int total, completed;
private long start;
- private VariableStack stack;
+ private Stack stack;
private String projectName;
public SystemState(String projectName) {
@@ -132,11 +135,11 @@
return start;
}
- public VariableStack getStack() {
+ public Stack getStack() {
return stack;
}
- public void setStack(VariableStack stack) {
+ public void setStack(Stack stack) {
this.stack = stack;
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/monitor/items/SummaryItem.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/monitor/items/SummaryItem.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/monitor/items/SummaryItem.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,6 +20,7 @@
*/
package org.griphyn.vdl.karajan.monitor.items;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@@ -58,14 +59,8 @@
public int getCount(String key, SystemState state) {
if (state.getStack() != null) {
- ProgressTicker t = RuntimeStats.getTicker(state.getStack());
- Integer value = t.getSummary().get(key);
- if (value != null) {
- return value;
- }
- else {
- return 0;
- }
+ // TODO Must get these from log
+ return -1;
}
else {
return getCount(key);
@@ -74,7 +69,8 @@
public synchronized Map<String, Integer> getCounts(SystemState state) {
if (state.getStack() != null) {
- return RuntimeStats.getTicker(state.getStack()).getSummary();
+ // TODO Must get these from log
+ return Collections.emptyMap();
}
else {
return new HashMap<String, Integer>(counts);
Modified: branches/faster/src/org/griphyn/vdl/karajan/monitor/monitors/ansi/WorkerTerminalInputHandler.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/monitor/monitors/ansi/WorkerTerminalInputHandler.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/monitor/monitors/ansi/WorkerTerminalInputHandler.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -25,9 +25,9 @@
import org.globus.cog.abstraction.impl.execution.coaster.WorkerShellCommand;
import org.globus.cog.abstraction.interfaces.Service;
import org.globus.cog.abstraction.interfaces.Task;
-import org.globus.cog.karajan.workflow.service.ProtocolException;
-import org.globus.cog.karajan.workflow.service.channels.ChannelManager;
-import org.globus.cog.karajan.workflow.service.channels.KarajanChannel;
+import org.globus.cog.coaster.ProtocolException;
+import org.globus.cog.coaster.channels.ChannelManager;
+import org.globus.cog.coaster.channels.CoasterChannel;
import org.griphyn.vdl.karajan.monitor.monitors.ansi.tui.Dialog;
import org.griphyn.vdl.karajan.monitor.monitors.ansi.tui.Terminal;
import org.griphyn.vdl.karajan.monitor.monitors.ansi.tui.Terminal.InputHandler;
@@ -72,7 +72,7 @@
private String runcmd(String cmd) {
try {
- KarajanChannel channel = ChannelManager.getManager()
+ CoasterChannel channel = ChannelManager.getManager()
.reserveChannel(contact, cred, LocalRequestManager.INSTANCE);
WorkerShellCommand wsc = new WorkerShellCommand(workerId, cmd);
wsc.execute(channel);
Modified: branches/faster/src/org/griphyn/vdl/karajan/monitor/processors/ExecutionContextProcessor.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/monitor/processors/ExecutionContextProcessor.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/monitor/processors/ExecutionContextProcessor.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -21,8 +21,7 @@
package org.griphyn.vdl.karajan.monitor.processors;
import org.apache.log4j.Level;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.griphyn.vdl.karajan.VDL2ExecutionContext;
+import org.griphyn.vdl.karajan.SwiftExecutor;
import org.griphyn.vdl.karajan.monitor.SystemState;
public class ExecutionContextProcessor extends AbstractMessageProcessor {
@@ -32,12 +31,9 @@
}
public Class<?> getSupportedSource() {
- return VDL2ExecutionContext.class;
+ return SwiftExecutor.class;
}
public void processMessage(SystemState state, Object message, Object details) {
- if (message instanceof VariableStack) {
- state.setStack((VariableStack) message);
- }
}
}
Modified: branches/faster/src/org/griphyn/vdl/karajan/monitor/processors/TaskProcessor.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/karajan/monitor/processors/TaskProcessor.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/karajan/monitor/processors/TaskProcessor.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -35,7 +35,7 @@
}
public Class<?> getSupportedSource() {
- return org.globus.cog.karajan.workflow.nodes.grid.AbstractGridNode.class;
+ return org.globus.cog.karajan.compiled.nodes.grid.AbstractGridNode.class;
}
public void processMessage(SystemState state, Object message, Object details) {
Modified: branches/faster/src/org/griphyn/vdl/mapping/AbstractDataNode.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/AbstractDataNode.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/AbstractDataNode.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -28,10 +28,12 @@
import java.util.List;
import java.util.Map;
+import k.rt.Future;
+import k.thr.Yield;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.workflow.ExecutionException;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable;
+import org.globus.cog.karajan.compiled.nodes.Node;
+import org.globus.cog.karajan.futures.FutureNotYetAvailable;
import org.griphyn.vdl.karajan.DSHandleFutureWrapper;
import org.griphyn.vdl.karajan.FutureTracker;
import org.griphyn.vdl.karajan.FutureWrapper;
@@ -109,7 +111,7 @@
}
}
- public void init(MappingParamSet params) {
+ public void init(MappingParamSet params) throws HandleOpenException {
throw new UnsupportedOperationException();
}
@@ -623,12 +625,15 @@
return DATASET_URI_PREFIX + datasetIDPartialID + ":" + datasetIDCounter;
}
- public synchronized void waitFor() {
+ public synchronized void waitFor(Node who) {
if (!closed) {
if (logger.isDebugEnabled()) {
logger.debug("Waiting for " + this);
}
- throw new FutureNotYetAvailable(getFutureWrapper());
+
+ Yield y = new FutureNotYetAvailable(getFutureWrapper());
+ y.getState().addTraceElement(who);
+ throw y;
}
else {
if (logger.isDebugEnabled()) {
@@ -640,6 +645,24 @@
}
}
+ public synchronized void waitFor() throws OOBYield {
+ if (!closed) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("Waiting for " + this);
+ }
+
+ throw new OOBYield(new FutureNotYetAvailable(getFutureWrapper()), this);
+ }
+ else {
+ if (logger.isDebugEnabled()) {
+ logger.debug("Do not need to wait for " + this);
+ }
+ if (value instanceof RuntimeException) {
+ throw (RuntimeException) value;
+ }
+ }
+ }
+
public void addListener(DSHandleListener listener) {
throw new UnsupportedOperationException();
}
Modified: branches/faster/src/org/griphyn/vdl/mapping/AbstractMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/AbstractMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/AbstractMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -48,7 +48,7 @@
}
}
- public void setParams(MappingParamSet params) {
+ public void setParams(MappingParamSet params) throws HandleOpenException {
this.params = params;
}
Modified: branches/faster/src/org/griphyn/vdl/mapping/ArrayDataNode.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/ArrayDataNode.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/ArrayDataNode.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -23,9 +23,10 @@
import java.util.List;
import java.util.Map;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.futures.FutureList;
-import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable;
+import k.rt.Future;
+
+import org.globus.cog.karajan.futures.FutureList;
+import org.globus.cog.karajan.futures.FutureNotYetAvailable;
import org.griphyn.vdl.karajan.ArrayIndexFutureList;
import org.griphyn.vdl.karajan.FutureTracker;
import org.griphyn.vdl.type.Field;
Modified: branches/faster/src/org/griphyn/vdl/mapping/DSHandle.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/DSHandle.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/DSHandle.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -47,7 +47,7 @@
*/
public Type getType();
- public void init(MappingParamSet params);
+ public void init(MappingParamSet params) throws HandleOpenException;
public DSHandle getRoot();
Modified: branches/faster/src/org/griphyn/vdl/mapping/DataDependentException.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/DataDependentException.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/DataDependentException.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -35,7 +35,7 @@
}
public String getMessage() {
- return getVariableName() + " not derived due to errors in data dependencies";
+ return getHandle() + " not derived due to errors in data dependencies";
}
}
Modified: branches/faster/src/org/griphyn/vdl/mapping/DependentException.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/DependentException.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/DependentException.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -41,15 +41,6 @@
public DSHandle getHandle() {
return handle;
}
-
- public String getVariableName() {
- if (handle instanceof AbstractDataNode) {
- return ((AbstractDataNode) handle).getDisplayableName();
- }
- else {
- return handle.toString();
- }
- }
public String toString() {
return getMessage();
Modified: branches/faster/src/org/griphyn/vdl/mapping/HandleOpenException.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/HandleOpenException.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/HandleOpenException.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -24,11 +24,17 @@
private DSHandle source;
public HandleOpenException(DSHandle source) {
- super("Handle open: " + source.getType() + " " + source.toString());
this.source = source;
}
public DSHandle getSource() {
return source;
}
+
+ @Override
+ public String getMessage() {
+ return "Handle open: " + source.getType() + " " + source.toString();
+ }
+
+
}
Modified: branches/faster/src/org/griphyn/vdl/mapping/Mapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/Mapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/Mapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -54,10 +54,8 @@
*/
boolean isStatic();
- void setParams(MappingParamSet params);
+ void setParams(MappingParamSet params) throws HandleOpenException;
- void setParam(MappingParam p, Object value);
-
Object getParam(MappingParam name);
/**
Modified: branches/faster/src/org/griphyn/vdl/mapping/MappingParam.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/MappingParam.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/MappingParam.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -61,9 +61,9 @@
/** Returns the value of this parameter. If the value is a dataset,
* then the value will be converted to a string. If no value is
* specified in the SwiftScript program, then the default value
- * will be returned.
+ * will be returned.
*/
- public Object getValue(Mapper mapper) {
+ public Object getValue(Mapper mapper) throws HandleOpenException {
Object value = mapper.getParam(this);
if (value instanceof AbstractDataNode) {
AbstractDataNode handle = (AbstractDataNode) value;
@@ -89,7 +89,7 @@
}
}
- public Object getValue(MappingParamSet params) {
+ public Object getValue(MappingParamSet params) throws HandleOpenException {
Object value = params.get(this);
if (value instanceof AbstractDataNode) {
AbstractDataNode handle = (AbstractDataNode) value;
@@ -120,7 +120,7 @@
/** Returns the mapper parameter as a String. Other data types will be
converted to a String as appropriate. */
- public String getStringValue(Mapper mapper) {
+ public String getStringValue(Mapper mapper) throws HandleOpenException {
Object value = getValue(mapper);
if (value == null) {
return null;
@@ -128,7 +128,7 @@
return String.valueOf(value);
}
- public String getStringValue(MappingParamSet params) {
+ public String getStringValue(MappingParamSet params) throws HandleOpenException {
Object value = getValue(params);
if (value == null) {
return null;
@@ -136,10 +136,6 @@
return String.valueOf(value);
}
- public void setValue(Mapper mapper, Object value) {
- mapper.setParam(this, value);
- }
-
public boolean isPresent(Mapper mapper) {
return mapper.getParam(this) != null;
}
@@ -155,7 +151,7 @@
* match, then 'false' is returned (note an exception is not thrown,
* unlike other getValue methods).
*/
- public boolean getBooleanValue(Mapper mapper) {
+ public boolean getBooleanValue(Mapper mapper) throws HandleOpenException {
Object value = getValue(mapper);
if (value instanceof String) {
return Boolean.valueOf((String) value).booleanValue();
@@ -172,7 +168,7 @@
* is a string, then the string will be parsed to an int. If the
* value is an int, then this will be passed through. Otherwise,
* a NumberFormatException will be thrown. */
- public int getIntValue(Mapper mapper) {
+ public int getIntValue(Mapper mapper) throws HandleOpenException {
Object value = getValue(mapper);
if (value instanceof String) {
return Integer.parseInt((String) value);
Added: branches/faster/src/org/griphyn/vdl/mapping/OOBYield.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/OOBYield.java (rev 0)
+++ branches/faster/src/org/griphyn/vdl/mapping/OOBYield.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -0,0 +1,30 @@
+//----------------------------------------------------------------------
+//This code is developed as part of the Java CoG Kit project
+//The terms of the license can be found at http://www.cogkit.org/license
+//This message may not be removed or altered.
+//----------------------------------------------------------------------
+
+/*
+ * Created on Jan 28, 2013
+ */
+package org.griphyn.vdl.mapping;
+
+import k.thr.Yield;
+
+public class OOBYield extends HandleOpenException {
+ private final Yield y;
+
+ public OOBYield(Yield y, DSHandle h) {
+ super(h);
+ this.y = y;
+ }
+
+ public Yield wrapped() {
+ return y;
+ }
+
+ public Yield wrapped(Object traceElement) {
+ y.getState().addTraceElement(traceElement);
+ return y;
+ }
+}
Modified: branches/faster/src/org/griphyn/vdl/mapping/RootArrayDataNode.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/RootArrayDataNode.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/RootArrayDataNode.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -17,11 +17,11 @@
package org.griphyn.vdl.mapping;
+import k.rt.Future;
+import k.rt.FutureListener;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.futures.FutureListener;
-import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable;
+import org.globus.cog.karajan.futures.FutureNotYetAvailable;
import org.griphyn.vdl.karajan.lib.Tracer;
import org.griphyn.vdl.type.Field;
import org.griphyn.vdl.type.Type;
@@ -50,7 +50,7 @@
this.dmc = dmc;
}
- public void init(MappingParamSet params) {
+ public void init(MappingParamSet params) throws HandleOpenException {
this.params = params;
if (this.params == null) {
initialized();
@@ -60,14 +60,14 @@
}
}
- private synchronized void innerInit() {
+ private synchronized void innerInit() throws HandleOpenException {
if (logger.isDebugEnabled()) {
logger.debug("innerInit: " + this);
}
waitingMapperParam = params.getFirstOpenParamValue();
if (waitingMapperParam != null) {
- waitingMapperParam.getFutureWrapper().addModificationAction(this, null);
+ waitingMapperParam.getFutureWrapper().addListener(this);
if (tracer.isEnabled()) {
tracer.trace(getThread(), getDeclarationLine(), getDisplayableName() + " WAIT "
+ Tracer.getVarName(waitingMapperParam));
@@ -109,8 +109,16 @@
}
}
- public void futureModified(Future f, VariableStack stack) {
- innerInit();
+ public void futureUpdated(Future f) {
+ try {
+ innerInit();
+ }
+ catch (OOBYield e) {
+ throw e.wrapped();
+ }
+ catch (HandleOpenException e) {
+ e.printStackTrace();
+ }
}
public String getParam(MappingParam p) {
Modified: branches/faster/src/org/griphyn/vdl/mapping/RootDataNode.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/RootDataNode.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/RootDataNode.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,11 +20,11 @@
*/
package org.griphyn.vdl.mapping;
+import k.rt.Future;
+import k.rt.FutureListener;
+
import org.apache.log4j.Logger;
-import org.globus.cog.karajan.stack.VariableStack;
-import org.globus.cog.karajan.workflow.futures.Future;
-import org.globus.cog.karajan.workflow.futures.FutureListener;
-import org.globus.cog.karajan.workflow.futures.FutureNotYetAvailable;
+import org.globus.cog.karajan.futures.FutureNotYetAvailable;
import org.griphyn.vdl.karajan.lib.Tracer;
import org.griphyn.vdl.type.Field;
import org.griphyn.vdl.type.Type;
@@ -56,21 +56,23 @@
setValue(value);
}
- public void init(MappingParamSet params) {
+ public void init(MappingParamSet params) throws HandleOpenException {
this.params = params;
- if(this.params == null) {
+ if (this.params == null) {
initialized();
- } else {
+ }
+ else {
innerInit();
}
}
/** must have this.params set to the appropriate parameters before
- being called. */
- private synchronized void innerInit() {
+ being called.
+ * @throws HandleOpenException */
+ private synchronized void innerInit() throws HandleOpenException {
waitingMapperParam = params.getFirstOpenParamValue();
if (waitingMapperParam != null) {
- waitingMapperParam.getFutureWrapper().addModificationAction(this, null);
+ waitingMapperParam.getFutureWrapper().addListener(this);
if (tracer.isEnabled()) {
tracer.trace(getThread(), getDeclarationLine(), getDisplayableName() + " WAIT "
+ Tracer.getVarName(waitingMapperParam));
@@ -113,8 +115,16 @@
}
}
- public void futureModified(Future f, VariableStack stack) {
- innerInit();
+ public void futureUpdated(Future f) {
+ try {
+ innerInit();
+ }
+ catch (OOBYield e) {
+ throw e.wrapped();
+ }
+ catch (HandleOpenException e) {
+ e.printStackTrace();
+ }
}
@@ -271,9 +281,6 @@
initialized = true;
waitingMapperParam = null;
if (tracer.isEnabled()) {
- if ("sphOut".equals(getDisplayableName())) {
- System.out.println();
- }
tracer.trace(getThread(), getDeclarationLine(), getDisplayableName() + " INITIALIZED " + params);
}
}
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/AbstractFileMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/AbstractFileMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/AbstractFileMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -31,6 +31,7 @@
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractDataNode;
import org.griphyn.vdl.mapping.AbstractMapper;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.InvalidMappingParameterException;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.MappingParamSet;
@@ -71,6 +72,8 @@
public static final MappingParam PARAM_PATTERN = new MappingParam("pattern", null);
public static final MappingParam PARAM_LOCATION = new MappingParam("location", null);
public static final MappingParam PARAM_NOAUTO = new MappingParam("noauto", "false");
+
+ private String location, prefix, suffix, pattern;
public static final Logger logger = Logger.getLogger(AbstractFileMapper.class);
@@ -82,6 +85,8 @@
this.elementMapper = elementMapper;
}
+
+
/** Creates an AbstractFileMapper without specifying a
* FileNameElementMapper. The elementMapper must be specified
@@ -99,7 +104,7 @@
this.elementMapper = elementMapper;
}
- public void setParams(MappingParamSet params) {
+ public void setParams(MappingParamSet params) throws HandleOpenException {
super.setParams(params);
if (PARAM_SUFFIX.isPresent(this)) {
String suffix = PARAM_SUFFIX.getStringValue(this);
@@ -109,22 +114,23 @@
". Value set was '" + noauto + "'");
}
if (!suffix.startsWith(".") && noauto.equals("false")) {
- PARAM_SUFFIX.setValue(this, "." + suffix);
+ params.set(PARAM_SUFFIX, "." + suffix);
}
}
if (PARAM_PATTERN.isPresent(this)) {
String pattern = PARAM_PATTERN.getStringValue(this);
- PARAM_PATTERN.setValue(this, replaceWildcards(pattern));
+ params.set(PARAM_PATTERN, replaceWildcards(pattern));
}
+ location = PARAM_LOCATION.getStringValue(this);
+ prefix = PARAM_PREFIX.getStringValue(this);
+ suffix = PARAM_SUFFIX.getStringValue(this);
+ pattern = PARAM_PATTERN.getStringValue(this);
}
public PhysicalFormat map(Path path) {
if(logger.isDebugEnabled())
logger.debug("mapper id="+this.hashCode()+" starting to map "+path);
StringBuffer sb = new StringBuffer();
- final String location = PARAM_LOCATION.getStringValue(this);
- final String prefix = PARAM_PREFIX.getStringValue(this);
- final String suffix = PARAM_SUFFIX.getStringValue(this);
maybeAppend(sb, location);
if (location != null && !location.endsWith("/")) {
sb.append('/');
@@ -215,10 +221,6 @@
public Collection<Path> existing() {
if(logger.isDebugEnabled())
logger.debug("list existing paths for mapper id="+this.hashCode());
- final String location = PARAM_LOCATION.getStringValue(this);
- final String prefix = PARAM_PREFIX.getStringValue(this);
- final String suffix = PARAM_SUFFIX.getStringValue(this);
- final String pattern = PARAM_PATTERN.getStringValue(this);
List<Path> result = new ArrayList<Path>();
final AbsFile f;
@@ -287,21 +289,20 @@
public Path rmap(String name) {
logger.debug("rmap "+name);
- final String prefix = PARAM_PREFIX.getStringValue(this);
-
- if(prefix!=null) {
- if(name.startsWith(prefix)) {
+ if(prefix != null) {
+ if (name.startsWith(prefix)) {
name = name.substring(prefix.length());
- } else {
+ }
+ else {
throw new RuntimeException("filename '"+name+"' does not begin with prefix '"+prefix+"'");
}
}
- final String suffix = PARAM_SUFFIX.getStringValue(this);
- if(suffix!=null) {
- if(name.endsWith(suffix)) {
+ if(suffix != null) {
+ if (name.endsWith(suffix)) {
name = name.substring(0,name.length() - suffix.length());
- } else {
+ }
+ else {
throw new RuntimeException("filename '"+name+"' does not end with suffix '"+suffix+"'");
}
}
@@ -353,11 +354,11 @@
}
public String getLocation() {
- return PARAM_LOCATION.getStringValue(this);
+ return location;
}
public String getPrefix() {
- return PARAM_PREFIX.getStringValue(this);
+ return prefix;
}
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/CSVMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/CSVMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/CSVMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -33,6 +33,7 @@
import org.griphyn.vdl.mapping.AbstractMapper;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.GeneralizedFileFormat;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.InvalidMappingParameterException;
import org.griphyn.vdl.mapping.Mapper;
import org.griphyn.vdl.mapping.MappingParam;
@@ -67,16 +68,24 @@
private List content = new ArrayList();
/** whether the CSV file has been read already. */
- private boolean read = false;
+ private boolean read = false;
+
+ private String delim, hdelim;
+ private boolean header;
+ private int skip;
- public void setParams(MappingParamSet params) {
+ public void setParams(MappingParamSet params) throws HandleOpenException {
super.setParams(params);
if (!PARAM_FILE.isPresent(this)) {
throw new InvalidMappingParameterException("CSV mapper must have a file parameter.");
}
- if (!PARAM_HDELIMITER.isPresent(this)) {
- PARAM_HDELIMITER.setValue(this, PARAM_DELIMITER.getValue(this));
- }
+ if (!PARAM_HDELIMITER.isPresent(this)) {
+ params.set(PARAM_HDELIMITER, PARAM_DELIMITER.getRawValue(this));
+ }
+ delim = PARAM_DELIMITER.getStringValue(this);
+ hdelim = PARAM_HDELIMITER.getStringValue(this);
+ header = PARAM_HEADER.getBooleanValue(this);
+ skip = PARAM_SKIP.getIntValue(this);
}
private synchronized void readFile() {
@@ -86,10 +95,6 @@
String file = getCSVFile();
- String delim = PARAM_DELIMITER.getStringValue(this);
- String hdelim = PARAM_HDELIMITER.getStringValue(this);
- boolean header = PARAM_HEADER.getBooleanValue(this);
- int skip = PARAM_SKIP.getIntValue(this);
try {
BufferedReader br =
new BufferedReader(new FileReader(file));
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/ConcurrentMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/ConcurrentMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/ConcurrentMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -26,6 +26,7 @@
import java.util.Map;
import java.util.Set;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.Mapper;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.MappingParamSet;
@@ -41,7 +42,7 @@
super(new ConcurrentElementMapper());
}
- public void setParams(MappingParamSet params) {
+ public void setParams(MappingParamSet params) throws HandleOpenException {
String prefix = PARAM_PREFIX.getStringValue(params);
prefix = "_concurrent/" + (prefix == null ? "" : prefix + "-") +
PARAM_THREAD_PREFIX.getValue(params);
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/ExternalMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/ExternalMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/ExternalMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -25,14 +25,13 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import org.apache.log4j.Logger;
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractMapper;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.MappingParamSet;
import org.griphyn.vdl.mapping.Path;
@@ -48,7 +47,7 @@
private static final String[] STRING_ARRAY = new String[0];
- public void setParams(MappingParamSet params) {
+ public void setParams(MappingParamSet params) throws HandleOpenException {
super.setParams(params);
map = new HashMap<Path, AbsFile>();
rmap = new HashMap<String, Path>();
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/FileSystemArrayMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/FileSystemArrayMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/FileSystemArrayMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -46,7 +46,7 @@
if (!path.isArrayIndex(0)) {
return null;
}
- String location = PARAM_LOCATION.getStringValue(this);
+ String location = getLocation();
Object index = path.getFirst();
String filename = filenames.get(index);
if (filename == null) {
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/FixedArrayFileMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/FixedArrayFileMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/FixedArrayFileMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -18,74 +18,53 @@
package org.griphyn.vdl.mapping.file;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
-import java.util.Iterator;
import java.util.List;
-import java.util.Map;
-import java.util.Set;
import java.util.StringTokenizer;
-import java.util.TreeSet;
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractMapper;
-import org.griphyn.vdl.mapping.DSHandle;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.InvalidMappingParameterException;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.MappingParamSet;
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.mapping.PhysicalFormat;
-import org.griphyn.vdl.type.Types;
/** Maps a string (separated by space, comma or semicolon) of filenames to
an array. */
public class FixedArrayFileMapper extends AbstractMapper {
public static final MappingParam PARAM_FILES = new MappingParam("files");
- private List<String> files;
+ private String[] files;
+
public FixedArrayFileMapper() {
super();
}
- public void setParams(MappingParamSet params) {
+ public void setParams(MappingParamSet params) throws HandleOpenException {
super.setParams(params);
- DSHandle dn = (DSHandle) PARAM_FILES.getRawValue(this);
- if (dn == null) {
- throw new InvalidMappingParameterException("Missing required mapper parameter: "
- + PARAM_FILES);
+ String cfiles = PARAM_FILES.getStringValue(this);
+ if (cfiles == null) {
+ throw new InvalidMappingParameterException("Missing required mapper parameter: "
+ + PARAM_FILES);
}
- if (Types.STRING.equals(dn.getType())) {
- String cfiles = (String) dn.getValue();
-
- StringTokenizer st = new StringTokenizer(cfiles, " ,;");
- String[] files = new String[st.countTokens()];
- for (int i = 0; st.hasMoreTokens(); i++) {
- files[i] = st.nextToken();
- }
- this.files = Arrays.asList(files);
+ StringTokenizer st = new StringTokenizer(cfiles, " ,;");
+ files = new String[st.countTokens()];
+ for (int i = 0; st.hasMoreTokens(); i++) {
+ files[i] = st.nextToken();
}
- else if (dn.getType().isArray() && Types.STRING.equals(dn.getType().itemType())) {
- files = new ArrayList<String>();
- Map<?, DSHandle> m = dn.getArrayValue();
- // must keep order
- @SuppressWarnings("unchecked")
- Set<Comparable<?>> s = new TreeSet<Comparable<?>>((Set<Comparable<?>>) m.keySet());
- Iterator<?> i = s.iterator();
- while(i.hasNext()) {
- Comparable<?> nextKey = (Comparable<?>) i.next();
- files.add((String) m.get(nextKey).getValue());
- }
- }
- else {
- throw new InvalidMappingParameterException("Unrecognized value for "
- + PARAM_FILES + " parameter: " + dn.getType() + ". Valid values are a string or an array of strings.");
- }
+ params.set(PARAM_FILES, files);
}
+ protected String[] getFiles() {
+ return files;
+ }
+
public Collection<Path> existing() {
List<Path> l = new ArrayList<Path>();
- for (int i = 0; i < files.size(); i++) {
+ for (int i = 0; i < getFiles().length; i++) {
l.add(Path.EMPTY_PATH.addLast(i, true));
}
return l;
@@ -99,7 +78,7 @@
Object o = path.getFirst();
if (o instanceof Integer) {
int index = ((Integer) o).intValue();
- return new AbsFile(files.get(index));
+ return new AbsFile(getFiles()[index]);
}
else {
throw new IllegalArgumentException("The fixed array mapper can only be used with an int key array");
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/RegularExpressionMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/RegularExpressionMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/RegularExpressionMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -28,6 +28,7 @@
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractMapper;
import org.griphyn.vdl.mapping.DSHandle;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.MappingParamSet;
import org.griphyn.vdl.mapping.Path;
@@ -36,16 +37,21 @@
public class RegularExpressionMapper extends AbstractMapper {
public static final MappingParam PARAM_SOURCE = new MappingParam("source");
public static final MappingParam PARAM_MATCH = new MappingParam("match");
- public static final MappingParam PARAM_TRANSFORM = new MappingParam("transform");
+ public static final MappingParam PARAM_TRANSFORM = new MappingParam("transform");
+
+ private String match, source, transform;
public RegularExpressionMapper() {
}
- public void setParams(MappingParamSet params) {
+ public void setParams(MappingParamSet params) throws HandleOpenException {
super.setParams(params);
if (!PARAM_MATCH.isPresent(this)) {
throw new RuntimeException("Missing parameter match!");
- }
+ }
+ match = PARAM_MATCH.getStringValue(this);
+ source = PARAM_SOURCE.getStringValue(this);
+ transform = PARAM_TRANSFORM.getStringValue(this);
}
public Collection<Path> existing() {
@@ -64,9 +70,7 @@
PARAM_MATCH.getName() + "; maybe you meant @filename(" + h.getPathFromRoot() + ")?");
}
}
- String match = PARAM_MATCH.getStringValue(this);
- String source = PARAM_SOURCE.getStringValue(this);
- String transform = PARAM_TRANSFORM.getStringValue(this);
+
Pattern p = Pattern.compile(match);
Matcher m = p.matcher(source);
if (!m.find()) {
@@ -104,7 +108,12 @@
params.put("transform", "\\1_area.\\2");
MappingParamSet mps = new MappingParamSet();
mps.setAll(params);
- reMapper.setParams(mps);
+ try {
+ reMapper.setParams(mps);
+ }
+ catch (HandleOpenException e) {
+ e.printStackTrace();
+ }
System.out.println(reMapper.map(Path.EMPTY_PATH));
}
}
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/SimpleFileMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/SimpleFileMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/SimpleFileMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -20,6 +20,7 @@
*/
package org.griphyn.vdl.mapping.file;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.MappingParam;
import org.griphyn.vdl.mapping.MappingParamSet;
@@ -30,7 +31,7 @@
super();
}
- public void setParams(MappingParamSet params) {
+ public void setParams(MappingParamSet params) throws HandleOpenException {
super.setParams(params);
int precision = PARAM_PADDING.getIntValue(this);
setElementMapper(new DefaultFileNameElementMapper(precision));
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/SingleFileMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/SingleFileMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/SingleFileMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -23,7 +23,9 @@
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractMapper;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.MappingParam;
+import org.griphyn.vdl.mapping.MappingParamSet;
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.mapping.PhysicalFormat;
@@ -32,13 +34,21 @@
public class SingleFileMapper extends AbstractMapper {
public static final MappingParam PARAM_FILE = new MappingParam("file");
+
+ private AbsFile file;
public SingleFileMapper() {
super();
}
- public Collection<Path> existing() {
- if (new AbsFile(PARAM_FILE.getStringValue(this)).exists()) {
+ @Override
+ public void setParams(MappingParamSet params) throws HandleOpenException {
+ super.setParams(params);
+ file = new AbsFile(PARAM_FILE.getStringValue(this));
+ }
+
+ public Collection<Path> existing() {
+ if (file.exists()) {
return Arrays.asList(new Path[] {Path.EMPTY_PATH});
}
else {
@@ -47,7 +57,7 @@
}
public PhysicalFormat map(Path path) {
- return new AbsFile(PARAM_FILE.getStringValue(this));
+ return file;
}
public boolean isStatic() {
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/StructuredRegularExpressionMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/StructuredRegularExpressionMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/StructuredRegularExpressionMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -41,16 +41,21 @@
public static final MappingParam PARAM_SOURCE = new MappingParam("source");
public static final MappingParam PARAM_MATCH = new MappingParam("match");
- public static final MappingParam PARAM_TRANSFORM = new MappingParam("transform");
+ public static final MappingParam PARAM_TRANSFORM = new MappingParam("transform");
+
+ private String match, transform;
public StructuredRegularExpressionMapper() {
}
- public void setParams(MappingParamSet params) {
+ public void setParams(MappingParamSet params) throws HandleOpenException {
super.setParams(params);
if (!PARAM_MATCH.isPresent(this)) {
throw new RuntimeException("Missing parameter match!");
- }
+ }
+
+ match = PARAM_MATCH.getStringValue(this);
+ transform = PARAM_TRANSFORM.getStringValue(this);
}
public Collection<Path> existing() {
@@ -82,9 +87,6 @@
logger.debug("map(): path: " + path);
- String match = PARAM_MATCH.getStringValue(this);
- String transform = PARAM_TRANSFORM.getStringValue(this);
-
DSHandle sourceHandle = (DSHandle) PARAM_SOURCE.getRawValue(this);
DSHandle hereHandle;
try {
Modified: branches/faster/src/org/griphyn/vdl/mapping/file/TestMapper.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/mapping/file/TestMapper.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/mapping/file/TestMapper.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -22,8 +22,10 @@
import org.griphyn.vdl.mapping.AbsFile;
import org.griphyn.vdl.mapping.AbstractMapper;
+import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.Mapper;
import org.griphyn.vdl.mapping.MappingParam;
+import org.griphyn.vdl.mapping.MappingParamSet;
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.mapping.PhysicalFormat;
@@ -35,15 +37,27 @@
public static final MappingParam PARAM_STATIC = new MappingParam("static", true);
private PhysicalFormat remap, map;
+
+ private boolean remappable, temp, _static;
+ private String file;
@Override
+ public void setParams(MappingParamSet params) throws HandleOpenException {
+ super.setParams(params);
+ remappable = PARAM_REMAPPABLE.getBooleanValue(this);
+ temp = PARAM_TEMP.getBooleanValue(this);
+ _static = PARAM_STATIC.getBooleanValue(this);
+ file = PARAM_FILE.getStringValue(this);
+ }
+
+ @Override
public boolean canBeRemapped(Path path) {
- return PARAM_REMAPPABLE.getBooleanValue(this);
+ return remappable;
}
@Override
public void remap(Path path, Mapper sourceMapper, Path sourcePath) {
- if (PARAM_REMAPPABLE.getBooleanValue(this)) {
+ if (remappable) {
remap = sourceMapper.map(sourcePath);
System.out.println("Remapping " + path + " -> " + remap);
ensureCollectionConsistency(sourceMapper, sourcePath);
@@ -56,7 +70,7 @@
@Override
public void clean(Path path) {
PhysicalFormat pf = map(path);
- if (PARAM_TEMP.getBooleanValue(this)) {
+ if (temp) {
System.out.println("Cleaning file " + pf);
FileGarbageCollector.getDefault().decreaseUsageCount(pf);
}
@@ -67,13 +81,13 @@
@Override
public boolean isPersistent(Path path) {
- return !PARAM_TEMP.getBooleanValue(this);
+ return !temp;
}
public PhysicalFormat map(Path path) {
if (remap == null) {
if (map == null) {
- map = new AbsFile(PARAM_FILE.getStringValue(this));
+ map = new AbsFile(file);
}
return map;
}
@@ -87,6 +101,6 @@
}
public boolean isStatic() {
- return PARAM_STATIC.getBooleanValue(this);
+ return _static;
}
}
Modified: branches/faster/src/org/griphyn/vdl/util/VDL2Config.java
===================================================================
--- branches/faster/src/org/griphyn/vdl/util/VDL2Config.java 2013-01-29 04:30:59 UTC (rev 6169)
+++ branches/faster/src/org/griphyn/vdl/util/VDL2Config.java 2013-01-29 07:31:09 UTC (rev 6170)
@@ -115,7 +115,6 @@
put("cdm.broadcast.mode", "file");
put("use.provider.staging", "false");
- put("use.wrapper.staging", "false");
put("ticker.date.format", "");
put("ticker.prefix", "Progress: time:");
More information about the Swift-commit
mailing list