Index: pegasus-wms_4.0.1+dfsg/etc/advanced.properties
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/etc/advanced.properties	2012-05-24 16:47:52.064156782 -0700
+++ pegasus-wms_4.0.1+dfsg/etc/advanced.properties	2012-05-24 16:47:54.072166747 -0700
@@ -596,12 +596,10 @@
 # System   : Pegasus
 # Since    : 2.0
 # Type     : enumeration
-# Value[0] : RLS
-# Value[1] : LRC
-# Value[2] : JDBCRC
-# Value[3] : File
-# Value[4] : MRC
-# Default  : RLS
+# Value[0] : JDBCRC
+# Value[1] : File
+# Value[2] : MRC
+# Default  : File
 #
 # Pegasus queries a Replica Catalog to discover the physical filenames
 # (PFN) for input files specified in the DAX. Pegasus can interface
@@ -609,29 +607,6 @@
 # which type of Replica Catalog to use during the planning process.
 #
 # <variablelist>
-# <varlistentry><term>RLS</term>
-# <listitem> RLS (Replica Location Service) is a distributed replica
-#      catalog, which ships with GT4. There is an index service called
-#      Replica Location Index (RLI) to which 1 or more Local Replica
-#      Catalog (LRC) report. Each LRC can contain all or a subset of
-#      mappings. In this mode, Pegasus queries the central RLI to
-#      discover in which LRC's the mappings for a LFN reside. It then
-#      queries the individual LRC's for the PFN's.
-#      To use RLS, the user additionally needs to set the property
-#      pegasus.catalog.replica.url to specify the URL for the RLI to
-#      query. 
-#      Details about RLS can be found at
-#      http://www.globus.org/toolkit/data/rls/  
-# </listitem></varlistentry>
-# <varlistentry><term>LRC</term>
-# <listitem> If the user does not want to query the RLI, but directly a
-#      single Local Replica Catalog. 
-#      To use LRC, the user additionally needs to set the property
-#      pegasus.catalog.replica.url to specify the URL for the LRC to
-#      query. 
-#      Details about RLS can be found at
-#      http://www.globus.org/toolkit/data/rls/  
-# </listitem></varlistentry>
 # <varlistentry><term>JDBCRC</term>
 # <listitem> In this mode, Pegasus queries a SQL based replica catalog that
 #      is accessed via JDBC. The sql schema's for this catalog can be
Index: pegasus-wms_4.0.1+dfsg/etc/basic.properties
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/etc/basic.properties	2012-05-24 16:47:52.064156782 -0700
+++ pegasus-wms_4.0.1+dfsg/etc/basic.properties	2012-05-24 16:47:54.072166747 -0700
@@ -93,12 +93,10 @@
 # System   : Pegasus
 # Since    : 2.0
 # Type     : enumeration
-# Value[0] : RLS
-# Value[1] : LRC
-# Value[2] : JDBCRC
-# Value[3] : File
-# Value[4] : MRC
-# Default  : RLS
+# Value[0] : JDBCRC
+# Value[1] : File
+# Value[2] : MRC
+# Default  : File
 #
 # Pegasus queries a Replica Catalog to discover the physical filenames
 # (PFN) for input files specified in the DAX. Pegasus can interface
@@ -107,35 +105,6 @@
 #
 # <variablelist>
 # <varlistentry>
-# <term>RLS</term>
-# <listitem> 
-#      RLS (Replica Location Service) is a distributed replica
-#      catalog, which ships with GT4. There is an index service called
-#      Replica Location Index (RLI) to which 1 or more Local Replica
-#      Catalog (LRC) report. Each LRC can contain all or a subset of
-#      mappings. In this mode, Pegasus queries the central RLI to
-#      discover in which LRC's the mappings for a LFN reside. It then
-#      queries the individual LRC's for the PFN's.
-#      To use RLS, the user additionally needs to set the property
-#      pegasus.catalog.replica.url to specify the URL for the RLI to
-#      query. 
-#      Details about RLS can be found at
-#      http://www.globus.org/toolkit/data/rls/  
-# </listitem>
-# </varlistentry>
-# <varlistentry>
-# <term>LRC</term>
-# <listitem>
-#      If the user does not want to query the RLI, but directly a
-#      single Local Replica Catalog. 
-#      To use LRC, the user additionally needs to set the property
-#      pegasus.catalog.replica.url to specify the URL for the LRC to
-#      query. 
-#      Details about RLS can be found at
-#      http://www.globus.org/toolkit/data/rls/  
-# </listitem>
-# </varlistentry>
-# <varlistentry>
 # <term>JDBCRC</term>
 # <listitem> 
 #      In this mode, Pegasus queries a SQL based replica catalog that
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/impl/LRC.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/catalog/replica/impl/LRC.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,2922 +0,0 @@
-/**
- *  Copyright 2007-2008 University Of Southern California
- *
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package edu.isi.pegasus.planner.catalog.replica.impl;
-
-import edu.isi.pegasus.planner.catalog.replica.*;
-import edu.isi.pegasus.common.logging.LogManagerFactory;
-import edu.isi.pegasus.common.logging.LogManager;
-
-import edu.isi.pegasus.planner.catalog.ReplicaCatalog;
-import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry;
-import edu.isi.pegasus.planner.catalog.CatalogException;
-
-import edu.isi.pegasus.common.util.CommonProperties;
-
-import org.globus.replica.rls.RLSClient;
-import org.globus.replica.rls.RLSException;
-import org.globus.replica.rls.RLSAttribute;
-import org.globus.replica.rls.RLSAttributeObject;
-import org.globus.replica.rls.RLSString2Bulk;
-import org.globus.replica.rls.RLSString2;
-import org.globus.replica.rls.RLSOffsetLimit;
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.Properties;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Iterator;
-
-/**
- * This class implements the VDS replica catalog interface on top of the
- * LRC. This class implementation ends up talking to a single LRC.
- * It is accessed internally from the RLI implementation.
- * RLS Exceptions are being caught here. They probably should be thrown
- * and caught at the calling class (i.e the RLI implementation).
- *
- * @author Karan Vahi
- * @author Gaurang Mehta
- *
- * @version $Revision: 2585 $
- */
-public class LRC implements ReplicaCatalog {
-
-    /**
-     * The number of entries searched in each bulk query to RLS.
-     */
-    public static final int RLS_BULK_QUERY_SIZE = 1000;
-
-    /**
-     * The default timeout in seconds to be used while querying the LRC.
-     */
-    public static final String DEFAULT_LRC_TIMEOUT = "30";
-
-    /**
-     * The key that is used to get hold of the timeout value from the properties
-     * object.
-     */
-    public static final String RLS_TIMEOUT_KEY = "rls.timeout";
-
-
-    /**
-     * The key that is used to get hold of the timeout value from the properties
-     * object.
-     */
-    public static final String LRC_TIMEOUT_KEY = "lrc.timeout";
-
-    /**
-     * The properties key that allow us to associate a site with a LRC URL,
-     * and hence providing a value for the SITE_ATTRIBUTE. 
-     * User will specify lrc.site.isi_viz rls://isi.edu  to associate
-     * site isi_viz with rls://isi.edu
-     */
-    public static final String LRC_SITE_TO_LRC_URL_KEY = "lrc.site.";
-
-    /**
-     * The attribute in RLS that maps to a site handle.
-     */
-    public static final String SITE_ATTRIBUTE = ReplicaCatalogEntry.RESOURCE_HANDLE;
-
-    /**
-     * The undefined pool attribute value. The pool attribute is assigned this
-     * value if the pfn queried does not have a pool associated with it.
-     */
-    public static final String UNDEFINED_SITE = "UNDEFINED_POOL";
-
-    /**
-     * The key that is used to get hold of the url from the properties object.
-     */
-    public static final String URL_KEY = "url";
-
-    /**
-     * The key that if set, specifies the proxy to be picked up while connecting
-     * to the RLS.
-     */
-    public static final String PROXY_KEY = "proxy";
-
-    /**
-     * The error message for not connected to LRC.
-     */
-    public static final String LRC_NOT_CONNECTED_MSG = "Not connected to LRC ";
-
-
-    /**
-     * The handle to the logging object. Should be log4j soon.
-     */
-    private LogManager mLogger;
-
-    /**
-     * The string holding the message that is logged in the logger.
-     */
-    private String mLogMsg;
-
-    /**
-     * The URL pointing to the LRC to which this instance of class talks to.
-     */
-    private String mLRCURL;
-
-    /**
-     * The handle to the client that allows access to the RLS running at the
-     * url specified while connecting.
-     */
-    private RLSClient mRLS;
-
-    /**
-     * The handle to the client that allows access to the LRC running at the
-     * url specified while connecting.
-     */
-    private RLSClient.LRC mLRC;
-
-    /**
-     * The batch size while querying the LRC in the bulk mode.
-     */
-    private int mBatchSize;
-
-    /**
-     * The timeout in seconds while querying to the LRC.
-     */
-    private int mTimeout;
-    
-    /**
-     * The default site attribute to be associated with the results.
-     */
-    private String mDefaultSiteAttribute;
-
-    /**
-     * The default constructor, that creates an object which is not linked with
-     * any RLS. Use the connect method to connect to the LRC and use it.
-     *
-     * @see #connect(Properties).
-     */
-    public LRC() {
-        mRLS = null;
-        mLRC = null;
-        mLogger =  LogManagerFactory.loadSingletonInstance();
-        mBatchSize = LRC.RLS_BULK_QUERY_SIZE;
-        mTimeout   = Integer.parseInt(LRC.DEFAULT_LRC_TIMEOUT);
-    }
-
-    /**
-     * Establishes a connection to the LRC.
-     *
-     * @param props contains all necessary data to establish the link.
-     * @return true if connected now, or false to indicate a failure.
-     */
-    public boolean connect(Properties props) {
-        boolean con = false;
-        Object obj = props.get(URL_KEY);
-        mLRCURL = (obj == null) ? null : (String) obj;
-
-        if (mLRCURL == null) {
-            //nothing to connect to.
-            log("The LRC url is not specified",
-                LogManager.ERROR_MESSAGE_LEVEL);
-            return con;
-        }
-
-        //try to see if a proxy cert has been specified or not
-        String proxy = props.getProperty(PROXY_KEY);
-
-        //determine timeout
-        mTimeout = getTimeout(props);
-
-        //set the batch size for querie
-        setBatchSize(props);
-        
-        //stripe out the properties that assoicate site handle to lrc url
-        Properties site2LRC = CommonProperties.matchingSubset( props, LRC.LRC_SITE_TO_LRC_URL_KEY, false);
-        //traverse through the properties to figure out
-        //the default site attribute for the URL
-        for( Iterator it = site2LRC.entrySet().iterator(); it.hasNext(); ){
-            Map.Entry<String,String> entry = (Map.Entry<String,String>)it.next();
-            if( entry.getValue().equalsIgnoreCase( mLRCURL ) ){
-                mDefaultSiteAttribute = entry.getKey();
-            }
-        }
-        
-        if( mDefaultSiteAttribute != null ){
-            mLogger.log( "Default Site attribute is " + mDefaultSiteAttribute,
-                         LogManager.DEBUG_MESSAGE_LEVEL );
-        }
-        
-        return connect(mLRCURL, proxy);
-    }
-
-    /**
-     * Establishes a connection to the LRC, picking up the proxy from the default
-     * location usually /tmp/ directory.
-     *
-     * @param url    the url to lrc to connect to.
-     *
-     * @return true if connected now, or false to indicate a failure.
-     */
-    public boolean connect(String url) {
-        return connect(url,null);
-    }
-
-
-    /**
-     * Establishes a connection to the LRC.
-     *
-     * @param url    the url to lrc to connect to.
-     * @param proxy  the path to the proxy file to be picked up. null denotes
-     *               default location.
-     *
-     * @return true if connected now, or false to indicate a failure.
-     *
-     * @throws ReplicaCatalogException in case of
-     */
-    public boolean connect(String url, String proxy) {
-        mLRCURL = url;
-        try {
-            mRLS = (proxy == null) ?
-                new RLSClient(url) : //proxy is picked up from default loc /tmp
-                new RLSClient(url, proxy);
-
-            //set the timeout
-            mRLS.SetTimeout(mTimeout);
-
-            //connect is only successful if we have
-            //successfully connected to the LRC
-            mLRC = mRLS.getLRC();
-
-        }
-        catch (RLSException e) {
-            log("RLS Exception", e,LogManager.ERROR_MESSAGE_LEVEL);
-            return false;
-        }
-        return true;
-    }
-
-    /**
-     * Gets a handle to the LRC that is associated with the RLS running at
-     * url.
-     *
-     * @return <code>RLSClient.LRC</code> that points to the RLI that is
-     *         running , or null in case connect method not being called.
-     * @see #mLRCURL
-     */
-    public RLSClient.LRC getLRC() {
-        return (isClosed()) ? null : mLRC;
-    }
-
-    /**
-     * Retrieves the entry for a given filename and resource handle from
-     * the LRC.
-     *
-     * @param lfn is the logical filename to obtain information for.
-     * @param handle is the resource handle to obtain entries for.
-     * @return the (first) matching physical filename, or
-     * <code>null</code> if no match was found.
-     *
-     * @throws ReplicaCatalogException in case of any error that is throw by LRC
-     *                                that can't be handled.
-     */
-    public String lookup(String lfn, String handle) {
-        //sanity check
-        if (this.isClosed()) {
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-        String pfn = null;
-        String site = null;
-
-        //query the lrc
-        try {
-            List l = mLRC.getPFN(lfn);
-            for (Iterator it = l.iterator(); it.hasNext(); ) {
-                //query for the pool attribute
-                pfn = ( (RLSString2) it.next()).s2;
-                site = getSiteHandle(pfn);
-                if (site.equalsIgnoreCase(handle)) {
-                    //ok we have the first pfn with for the site and lfn
-                    break;
-                }
-            }
-        }
-        catch (RLSException ex) {
-            if(ex.GetRC() == RLSClient.RLS_LFN_NEXIST ||
-               ex.GetRC() == RLSClient.RLS_MAPPING_NEXIST){
-                   pfn = null;
-                }
-                else{
-                    throw exception("lookup(String,String)", ex);
-                }
-        }
-
-        return pfn;
-    }
-
-    /**
-     * Retrieves all entries for a given LFN from the LRC.
-     * Each entry in the result set is a tuple of a PFN and all its
-     * attributes.
-     *
-     * @param lfn is the logical filename to obtain information for.
-     * @return a collection of replica catalog entries,  or null in case of
-     *         unable to connect to RLS or error.
-     *
-     * @throws ReplicaCatalogException in case of any error that is throw by LRC
-     *                                that can't be handled.
-     * @see ReplicaCatalogEntry
-     */
-    public Collection lookup(String lfn) throws ReplicaCatalogException {
-        //sanity check
-        if (this.isClosed()) {
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-
-        List res = new ArrayList(3);
-
-        //query the lrc
-        try {
-            List l = mLRC.getPFN(lfn);
-            for (Iterator it = l.iterator(); it.hasNext(); ) {
-                String pfn = ( (RLSString2) it.next()).s2;
-                //get hold of all attributes
-                ReplicaCatalogEntry entry = new ReplicaCatalogEntry(pfn,
-                    getAttributes(pfn));
-                res.add(entry);
-            }
-        }
-        catch (RLSException ex) {
-            if(ex.GetRC() == RLSClient.RLS_LFN_NEXIST ||
-               ex.GetRC() == RLSClient.RLS_MAPPING_NEXIST){
-                    log("Mapping for lfn " + lfn + " does not exist",
-                        LogManager.DEBUG_MESSAGE_LEVEL);
-                }
-                else{
-                    throw exception("lookup(String)", ex);
-                }
-        }
-
-        return res;
-
-    }
-
-    /**
-     * Retrieves all entries for a given LFN from the replica catalog.
-     * Each entry in the result set is just a PFN string. Duplicates
-     * are reduced through the set paradigm.
-     *
-     * @param lfn is the logical filename to obtain information for.
-     * @return a set of PFN strings, or null in case of unable to connect
-     *         to RLS.
-     *
-     */
-    public Set lookupNoAttributes(String lfn) {
-        //sanity check
-        if (this.isClosed()) {
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-
-        Set res = new HashSet(3);
-
-        //query the lrc
-        try {
-            List l = mLRC.getPFN(lfn);
-            for (Iterator it = l.iterator(); it.hasNext(); ) {
-                String pfn = ( (RLSString2) it.next()).s2;
-                res.add(pfn);
-            }
-        }
-        catch (RLSException ex) {
-            //am not clear whether to throw the exception or what
-            log("lookup(String,String):", ex,LogManager.ERROR_MESSAGE_LEVEL);
-            return null;
-        }
-
-        return res;
-
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete LRC. It uses the bulk query api to the LRC to query for stuff.
-     * Bulk query has been in RLS since version 2.0.8. Internally, the bulk
-     * queries are done is sizes specified by variable mBatchSize.
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries for the LFN.
-     * @see ReplicaCatalogEntry
-     * @see #getBatchSize()
-     */
-    public Map lookup(Set lfns) throws ReplicaCatalogException {
-        //one has to do a bulk query in batches
-        Set s = null;
-        int size = mBatchSize;
-        Map map = new HashMap(lfns.size());
-
-        log("Number of files to query LRC " + lfns.size() +
-            " in batch sizes of " + size, LogManager.DEBUG_MESSAGE_LEVEL);
-
-        for (Iterator it = lfns.iterator(); it.hasNext(); ) {
-            s = new HashSet(size);
-            for (int j = 0; (j < size) && (it.hasNext()); j++) {
-                s.add(it.next());
-            }
-            if (!s.isEmpty()) {
-                //there is no conflict, as the keys are unique
-                //via the set paradigm. Passing null as we want
-                //to get hold of all attributes.
-                map.putAll(bulkLookup(s, null));
-            }
-
-        }
-
-        return map;
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete LRC.
-     *
-     * The <code>noAttributes</code> flag is missing on purpose, because
-     * due to the resource handle, attribute lookups are already required.
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     * @param handle is the resource handle, restricting the LFNs.
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries.
-     *
-     * @see ReplicaCatalogEntry
-     */
-    public Map lookup(Set lfns, String handle) {
-        return lookup(lfns,SITE_ATTRIBUTE,handle);
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete LRC. It returns the complete RCE for each entry i.e all the
-     * attributes a pfn is associated with in addition to the one that is
-     * the key for matching.
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     * @param name is the name of the attribute.
-     * @param value is the value of the attribute.
-     *
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries.
-     *
-     * @see ReplicaCatalogEntry
-     */
-    public Map lookup(Set lfns, String name, Object value) {
-        //one has to do a bulk query in batches
-        Set s = null;
-        int size = mBatchSize;
-        Map map = new HashMap(lfns.size());
-
-        log("Number of files to query LRC " + lfns.size() +
-            " in batch sizes of " + size, LogManager.DEBUG_MESSAGE_LEVEL);
-
-        for (Iterator it = lfns.iterator(); it.hasNext(); ) {
-            s = new HashSet(size);
-            for (int j = 0; (j < size) && (it.hasNext()); j++) {
-                s.add(it.next());
-            }
-            if (!s.isEmpty()) {
-                //there is no conflict, as the keys are unique
-                //via the set paradigm.
-                //temp contains results indexed by lfn but each value
-                //is a collection of ReplicaCatalogEntry objects
-                //we query for all attributes as we are to return
-                //complete RCE as stipulated by the interface.
-                Map temp = bulkLookup(s, null);
-                //iterate thru it
-                for (Iterator it1 = temp.entrySet().iterator(); it1.hasNext(); ) {
-                    Map.Entry entry = (Map.Entry) it1.next();
-                    Set pfns = subset( (Collection) entry.getValue(),
-                                               name, value);
-                    if (!pfns.isEmpty()) {
-                        map.put(entry.getKey(), pfns);
-                    }
-                }
-            }
-
-        }
-
-        return map;
-    }
-
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete catalog. Retrieving full catalogs should be harmful, but
-     * may be helpful in an online display or portal.
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     * @return a map indexed by the LFN. Each value is a set
-     * of PFN strings.
-     */
-    public Map lookupNoAttributes(Set lfns) {
-        //one has to do a bulk query in batches
-        Set s = null;
-        int size = mBatchSize;
-        size = (size > lfns.size())?lfns.size():size;
-        Map map = new HashMap(lfns.size());
-
-        log("Number of files to query LRC " + lfns.size() +
-            " in batch sizes of " + size,LogManager.DEBUG_MESSAGE_LEVEL);
-
-        for (Iterator it = lfns.iterator(); it.hasNext(); ) {
-            s = new HashSet(size);
-            for (int j = 0; (j < size) && (it.hasNext()); j++) {
-                s.add(it.next());
-            }
-            if (!s.isEmpty()) {
-                //there is no conflict, as the keys are unique
-                //via the set paradigm.
-                map.putAll(bulkLookupNoAttributes(s));
-            }
-
-        }
-
-        return map;
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete catalog. Retrieving full catalogs should be harmful, but
-     * may be helpful in online display or portal.<p>
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     * @param handle is the resource handle, restricting the LFNs.
-     *
-     * @return a map indexed by the LFN. Each value is a set of
-     * physical filenames.
-     */
-    public Map lookupNoAttributes(Set lfns, String handle) {
-        return lookupNoAttributes(lfns,SITE_ATTRIBUTE,handle);
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete catalog. Retrieving full catalogs should be harmful, but
-     * may be helpful in online display or portal.<p>
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     * @param name is the PFN attribute name to look for.
-     * @param value is an exact match of the attribute value to match.
-     *
-     * @return a map indexed by the LFN. Each value is a set of
-     * physical filenames.
-     */
-    public Map lookupNoAttributes(Set lfns, String name, Object value) {
-        //one has to do a bulk query in batches
-        Set s = null;
-        Collection c ;
-        int size = mBatchSize;
-        Map map = new HashMap(lfns.size());
-
-        log("Number of files to query LRC " + lfns.size() +
-            " in batch sizes of " + size,LogManager.DEBUG_MESSAGE_LEVEL);
-
-        for (Iterator it = lfns.iterator(); it.hasNext(); ) {
-            s = new HashSet(size);
-            for (int j = 0; (j < size) && (it.hasNext()); j++) {
-                s.add(it.next());
-            }
-            if (!s.isEmpty()) {
-                //there is no conflict, as the keys are unique
-                //via the set paradigm.
-                //temp contains results indexed by lfn but each value
-                //is a collection of ReplicaCatalogEntry objects
-                Map temp = bulkLookup(s, name,value);
-                //iterate thru it
-                for (Iterator it1 = temp.entrySet().iterator(); it1.hasNext(); ) {
-                    Map.Entry entry = (Map.Entry) it1.next();
-                    c = (Collection) entry.getValue();
-                    //System.out.println("Entry is " + entry);
-                    Set pfns = new HashSet(c.size());
-                    for(Iterator cit = c.iterator();cit.hasNext();){
-                        pfns.add( ((ReplicaCatalogEntry)cit.next()).getPFN());
-                    }
-                    if (!pfns.isEmpty()) {
-                        map.put(entry.getKey(), pfns);
-                    }
-                }
-            }
-
-        }
-
-        return map;
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete catalog. Retrieving full catalogs should be harmful, but
-     * may be helpful in online display or portal.<p>
-     *
-     * At present it DOES NOT SUPPORT ATTRIBUTE MATCHING.
-     *
-     * @param constraints is mapping of keys 'lfn', 'pfn', to a string that
-     * has some meaning to the implementing system. This can be a SQL
-     * wildcard for queries, or a regular expression for Java-based memory
-     * collections. Unknown keys are ignored. Using an empty map requests
-     * the complete catalog.
-     *
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries.
-     *
-     * @see ReplicaCatalogEntry
-     */
-    public Map lookup(Map constraints) throws ReplicaCatalogException{
-        return (constraints.isEmpty())?
-            lookup(list()):
-            getAttributes(lookupLFNPFN(constraints),null,null);
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete catalog. Retrieving full catalogs should be harmful, but
-     * may be helpful in online display or portal. At present it does not
-     * support attribute matching.
-     *
-     * @param constraints is mapping of keys 'lfn', 'pfn', or any
-     * attribute name, e.g. the resource handle 'site', to a string that
-     * has some meaning to the implementing system. This can be a SQL
-     * wildcard for queries, or a regular expression for Java-based memory
-     * collections. Unknown keys are ignored. Using an empty map requests
-     * the complete catalog.
-     *
-     * @return A list of <code>MyRLSString2Bulk</code> objects containing
-     *         the lfn in s1 field, and pfn in s2 field. The list is
-     *         grouped by lfns. The set may be empty.
-     */
-    public List lookupLFNPFN(Map constraints) {
-        if(isClosed()){
-            //not connected to LRC
-            //throw an exception??
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-        /*
-        if(constraints == null || constraints.isEmpty()){
-            //return the set of all LFNs in the catalog
-            return list();
-        }*/
-
-        List result = new ArrayList();
-        boolean notFirst = false;
-
-        for (Iterator i = constraints.keySet().iterator(); i.hasNext(); ) {
-            String key = (String) i.next();
-            if ( key.equals("lfn") ) {
-                if(notFirst){
-                    //do the AND(intersect)operation
-                    result.retainAll(listLFNPFN((String)constraints.get(key),true));
-                }
-                else{
-                    result = listLFNPFN( (String) constraints.get(key), true);
-                }
-            }
-            else if ( key.equals("pfn") ) {
-                if(notFirst){
-                    //do the AND(intersect)operation
-                    result.retainAll(listLFNPFN((String)constraints.get(key),false));
-                }
-                else{
-                    result = listLFNPFN( (String) constraints.get(key), false);
-                }
-            }
-            else{
-                //just a warning
-                log("Implementation does not support constraint " +
-                    "matching of type " + key,
-                    LogManager.WARNING_MESSAGE_LEVEL);
-            }
-            if(result.isEmpty()){
-                //the intersection is already empty. No use matching further
-                break;
-            }
-            notFirst = true;
-        }
-
-        //sort according to lfn
-        Collections.sort(result,new RLSString2BulkComparator());
-
-        return result;
-    }
-
-    /**
-     * Lists all logical filenames in the catalog.
-     *
-     * @return a set of all logical filenames known to the catalog or null in
-     *         case of not connected to the LRC or error.
-     */
-    public Set list() {
-        return list("*");
-    }
-
-
-    /**
-     * Lists a subset of all logical filenames in the catalog.
-     *
-     * @param constraint is a constraint for the logical filename only. It
-     * is a string that has some meaning to the implementing system. This
-     * can be a SQL wildcard for queries, or a regular expression for
-     * Java-based memory collections.
-     *
-     * @return A set of logical filenames that match. The set may be empty
-     */
-    public Set list(String constraint) {
-        List l = listLFNPFN(constraint,true);
-        Set result = new HashSet(l.size());
-        for(Iterator it = l.iterator();it.hasNext();){
-            RLSString2Bulk rs = (RLSString2Bulk)it.next();
-            result.add(rs.s1);
-        }
-        return result;
-    }
-
-    /**
-     * Lists a subset of all LFN,PFN pairs in the catalog matching to
-     * a pfn or a lfn constraint.
-     *
-     * @param constraint is a constraint for the logical filename only. It
-     * is a string that has some meaning to the implementing system. This
-     * can be a SQL wildcard for queries, or a regular expression for
-     * Java-based memory collections.
-     *
-     * @return A set a list of <code>MyRLSString2Bulk</code> objects containing
-     *         the lfn in s1 field, and pfn in s2 field. The list is
-     *         grouped by lfns. The set may be empty.
-     *
-     * @see #getAttributes(List,String,Object)
-     */
-    public List listLFNPFN( String constraint, boolean lfnConstraint )
-           throws ReplicaCatalogException{
-        if(isClosed()){
-            //not connected to LRC
-            //throw an exception??
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-
-        int size = getBatchSize();
-        List l   = new ArrayList();
-        ArrayList result = new ArrayList();
-        int capacity = size;
-
-        //do a wildcard query in batch sizes
-        RLSOffsetLimit offset = new RLSOffsetLimit(0,size);
-        while(true){
-            try{
-                l = (lfnConstraint)?
-                    //do lfn matching
-                    mLRC.getPFNWC(constraint, offset):
-                    //do pfn matching
-                    mLRC.getLFNWC(constraint, offset);
-
-                    //we need to group pfns by lfn
-                Collections.sort(l, new RLSString2Comparator());
-            }
-            catch(RLSException e){
-                if(e.GetRC() == RLSClient.RLS_PFN_NEXIST ||
-                   e.GetRC() == RLSClient.RLS_LFN_NEXIST ||
-                   e.GetRC() == RLSClient.RLS_MAPPING_NEXIST){
-                    log("listLFNPFN(String, boolean) :Mapping matching constraint " +
-                        constraint + " does not exist",LogManager.ERROR_MESSAGE_LEVEL);
-                }
-                else{
-                    //am not clear whether to throw the exception or what
-                    log("list()", e,LogManager.ERROR_MESSAGE_LEVEL);
-                }
-                //return empty list
-                return new ArrayList(0);
-            }
-            //result = new ArrayList(l.size());
-            //increment the size of the list
-            //but first the capacity
-            capacity += l.size();
-            result.ensureCapacity(capacity);
-
-            for(Iterator it = l.iterator();it.hasNext();){
-                RLSString2 res = (RLSString2)it.next();
-                result.add(convert(res));
-            }
-            if(offset.offset == -1)
-                break;//offset is set to -1 when no more results
-
-        }
-
-        return result;
-
-    }
-
-
-    /**
-     * Inserts multiple mappings into the replica catalog. The input is a
-     * map indexed by the LFN. The value for each LFN key is a collection
-     * of replica catalog entries. Ends up doing a sequential insert for all
-     * the entries instead of doing a bulk insert. Easier to track failure this
-     * way.
-     *
-     * @param x is a map from logical filename string to list of replica
-     * catalog entries.
-     * @return the number of insertions.
-     * @see ReplicaCatalogEntry
-     */
-    public int insert(Map x) {
-        int result = 0;
-        String lfn;
-        ReplicaCatalogEntry rce = null;
-
-//        Not doing sequential inserts any longer
-//        Karan April 9, 2006
-//        Collection c;
-//        for(Iterator it = x.entrySet().iterator();it.hasNext();){
-//            Map.Entry entry = (Map.Entry)it.next();
-//            lfn = (String)entry.getKey();
-//            c   = (Collection)entry.getValue();
-//            log("Inserting entries for lfn " + lfn,
-//                LogManager.DEBUG_MESSAGE_LEVEL);
-//            for(Iterator pfnIt = c.iterator();pfnIt.hasNext();){
-//                try{
-//                    rce = (ReplicaCatalogEntry)pfnIt.next();
-//                    insert(lfn,rce);
-//                    res += 1;
-//                }
-//                catch(ReplicaCatalogException e){
-//                    log("Inserting lfn->pfn " +
-//                        lfn + "->" + rce.getPFN(),e,
-//                        LogManager.ERROR_MESSAGE_LEVEL);
-//                }
-//            }
-//        }
-//        return res;
-
-        int size = this.getBatchSize();
-        int current = 0;
-        String pfn;
-        List lfnPfns = new ArrayList(size);
-        List attrs   = new ArrayList(size);
-        CatalogException exception = new ReplicaCatalogException();
-
-        //indexed by pfn and values as RLSAttributeObject objects
-        Map attrMap  = new HashMap(size);
-
-        for (Iterator it = x.entrySet().iterator(); it.hasNext(); ) {
-            Map.Entry entry = (Map.Entry)it.next();
-            lfn = (String)entry.getKey();
-            Collection c = (Collection)entry.getValue();
-
-            //traverse through the rce's for the pfn's
-            for(Iterator pfnIt = c.iterator();pfnIt.hasNext();){
-                rce = (ReplicaCatalogEntry)pfnIt.next();
-                pfn = rce.getPFN();
-                lfnPfns.add(new RLSString2(lfn,pfn));
-
-                //increment current only once per pfn
-                if(rce.getAttributeCount() == 0)current++;
-
-                //build the attributes list
-                for(Iterator attrIt = rce.getAttributeIterator(); attrIt.hasNext();current++){
-                    String key   = (String)attrIt.next();
-                    RLSAttribute attr = new RLSAttribute(key,RLSAttribute.LRC_PFN,
-                                                         (String)rce.getAttribute(key));
-                    attrs.add(new RLSAttributeObject(attr,pfn));
-                    attrMap.put(pfn,new RLSAttributeObject(attr,pfn));
-                }
-            }
-            //check if diff is more than batch size
-            if( current >= size){
-                //we have the subset of RCE's on which we
-                //want to do bulk inserts, and the value till
-                //we want to do bulk inserts
-                try{
-                    result += bulkInsert(lfnPfns, attrMap);
-                }
-                catch(ReplicaCatalogException e){exception.setNextException(e);}
-
-                //reset data structures
-                current = 0;
-                lfnPfns.clear();
-                attrs.clear();
-                attrMap.clear();
-            }
-        }
-        //check for the last bunch
-        if(!lfnPfns.isEmpty()){
-            //we have the subset of RCE's on which we
-            //want to do bulk inserts, and the value till
-            //we want to do bulk inserts
-            try{
-                result += bulkInsert(lfnPfns, attrMap);
-            }catch(ReplicaCatalogException e){exception.setNextException(e);}
-            current = 0;
-        }
-
-        //throw an exception only if a nested exception
-        if( (exception = exception.getNextException()) != null) throw exception;
-
-        return result;
-    }
-
-    /**
-     * Calls the bulk delete on the mappings. This function can timeout if the
-     * size of the list passed is too large.
-     *
-     * @param lfnPfns  list of <code>RLSString2</code> objects containing the
-     *                 lfn pfn mappings to be deleted.
-     *
-     * @return the number of items deleted
-     *
-     * @throws ReplicaCatalogException in case of error
-     */
-    private int bulkDelete(List lfnPfns) throws ReplicaCatalogException{
-        if(isClosed()){
-            //not connected to LRC
-            //throw an exception??
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-
-        //result only tracks successful lfn->pfn mappings
-        int result = lfnPfns.size();
-        Collection failedDeletes;
-        CatalogException exception = new ReplicaCatalogException();
-
-        //do a bulk delete
-        //FIX ME: The deletes should have been done in batches.
-        try{
-            failedDeletes = mLRC.deleteBulk( (ArrayList)lfnPfns);
-        }
-        catch(RLSException e){
-            mLogger.log("RLS: Bulk Delete " ,e , LogManager.ERROR_MESSAGE_LEVEL);
-            throw new ReplicaCatalogException("RLS: Bulk Delete " + e.getMessage());
-        }
-
-        if(!failedDeletes.isEmpty()){
-            result -= failedDeletes.size();
-            //FIXME: Do we really care about failed deletes
-            //and reporting why deletes failed.
-            // i think we do.
-            RLSString2Bulk rs;
-            int error;
-            for(Iterator it = failedDeletes.iterator();it.hasNext();){
-                rs = (RLSString2Bulk)it.next();
-                error = rs.rc;
-
-                if(error == RLSClient.RLS_PFN_NEXIST ||
-                   error == RLSClient.RLS_LFN_NEXIST ||
-                   error == RLSClient.RLS_MAPPING_NEXIST){
-
-                    log("Mapping " + rs.s1 + "->" + rs.s2 +
-                        " does not exist",LogManager.DEBUG_MESSAGE_LEVEL);
-                }
-                else{
-                    exception.setNextException(exception(rs));
-                }
-            }
-
-        }
-
-        //throw an exception only if a nested exception
-        if( (exception = exception.getNextException()) != null) throw exception;
-
-        return result;
-
-    }
-
-    /**
-     * Calls the bulk insert on the mappings. This function can timeout if the
-     * size of the list passed is too large.
-     *
-     * @param lfnPfns  list of <code>RLSString2</code> objects containing the
-     *                 lfn pfn mappings to be inserted.
-     * @param attrMap  a map indexed by pfn and values as RLSAttributeObject objects.
-     *
-     * @return the number of items inserted
-     *
-     * @throws ReplicaCatalogException in case of error
-     */
-    private int bulkInsert(List lfnPfns, Map attrMap){
-        if(isClosed()){
-            //not connected to LRC
-            //throw an exception??
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-
-        //result only tracks successful lfn->pfn mappings
-        int result = lfnPfns.size();
-
-        List failedCreates;
-        List failedAdds;
-        CatalogException exception = new ReplicaCatalogException();
-
-
-        try{
-            /* bulk insert on mappings starts*/
-            failedCreates = mLRC.createBulk( (ArrayList) lfnPfns);
-
-            //try to do a bulkAdd on the failed creates
-            List bulkAdd = new ArrayList(failedCreates.size());
-            for (Iterator it = failedCreates.iterator(); it.hasNext(); ) {
-                RLSString2Bulk rs = (RLSString2Bulk) it.next();
-                if (rs.rc == RLSClient.RLS_LFN_EXIST) {
-                    //s1 is lfn and s2 is pfn
-                    bulkAdd.add(new RLSString2(rs.s1, rs.s2));
-                }
-                else {
-                    exception.setNextException(exception(rs));
-                    result--;
-                }
-            }
-
-            //do a bulk add if list non empty
-            if (!bulkAdd.isEmpty()) {
-                failedAdds = mLRC.addBulk( (ArrayList) bulkAdd);
-                //pipe all the failed adds to the exception
-                for (Iterator it = failedAdds.iterator(); it.hasNext(); ) {
-                    RLSString2Bulk rs = (RLSString2Bulk) it.next();
-
-                    //just log that mapping already exists
-                    if (rs.rc == RLSClient.RLS_MAPPING_EXIST) {
-                        //we want to log instead of throwning an exception
-                        log("LFN-PFN Mapping alreadys exists in LRC "
-                            + mLRCURL + " for " + rs.s1 + "->" + rs.s2,
-                            LogManager.DEBUG_MESSAGE_LEVEL);
-                        result --;
-
-                    }
-                    else {
-                        exception.setNextException(exception(rs));
-                        result--;
-                    }
-                }
-            }
-            /*bulk insert on mappings ends */
-
-            /*bulk insert on attributes starts */
-            ArrayList failedAttrs;//the failed attributes
-
-            //build the attribute list
-            ArrayList attrs = new ArrayList(attrMap.size());
-            int num = 0;
-            for(Iterator it = attrMap.values().iterator();it.hasNext();num++){
-                attrs.add(it.next());
-            }
-
-            //try a bulk add on attributes assuming attrs already exist
-            failedAttrs = mLRC.attributeAddBulk((ArrayList)attrs);
-
-            //go through the failed attributes and create them
-            for(Iterator it = failedAttrs.iterator();it.hasNext();){
-                RLSString2Bulk s2b = (RLSString2Bulk)it.next();
-                /*
-                RLSAttribute attributeToAdd =
-                                 new RLSAttribute(s2b.s2,RLSAttribute.LRC_PFN,
-                                                  (String)tuple.getAttribute(s2b.s2));
-                */
-
-
-                //s1 is the pfn
-                //s2 is the attribute name
-                String pfn = s2b.s1;
-                RLSAttributeObject attrObject = (RLSAttributeObject)attrMap.get(pfn);
-                RLSAttribute attributeToAdd =
-                                      attrObject.attr;
-
-                if(s2b.rc == RLSClient.RLS_ATTR_NEXIST){
-                    //we need to create the attribute
-                    log("Creating an attribute name " + s2b.s2 +
-                        " for pfn " + pfn, LogManager.DEBUG_MESSAGE_LEVEL);
-                    try{
-                        //FIXME : should have done a bulkAttributeCreate that doesnt exist
-                        mLRC.attributeCreate(s2b.s2, RLSAttribute.LRC_PFN,
-                                             RLSAttribute.STR);
-                        //add the attribute in sequentially instead of bulk
-                        mLRC.attributeAdd(pfn,attributeToAdd);
-
-                    }
-                    catch(RLSException e){
-                        //ignore any attribute already exist error
-                        //case of multiple creates of same attribute
-                        if(e.GetRC() != RLSClient.RLS_ATTR_EXIST){
-                            exception.setNextException(
-                                   new ReplicaCatalogException("Adding attrib to pfn " +
-                                   pfn + " " + e.getMessage()));
-                        }
-                    }
-                }
-                else if(s2b.rc == RLSClient.RLS_ATTR_EXIST){
-                    log("Attribute " + s2b.s2 + " for pfn " + pfn +
-                        " already exists", LogManager.DEBUG_MESSAGE_LEVEL);
-                    //get the existing value of attribute
-                    List l = null;
-                    try{
-                        l = mLRC.attributeValueGet(pfn, s2b.s2, RLSAttribute.LRC_PFN);
-                    }
-                    catch(RLSException e){
-                        exception.setNextException(
-                            new ReplicaCatalogException("Getting value of existing attrib "+
-                                                            e.getMessage()));
-                    }
-                    if(l == null || l.isEmpty() || l.size() > 1){
-                        log("Contents of list are " + l,LogManager.DEBUG_MESSAGE_LEVEL);
-                        //should never happen
-                        log("Unreachable case.",LogManager.FATAL_MESSAGE_LEVEL);
-                        throw new ReplicaCatalogException(
-                            "Whammy while trying to get value of an exisiting attribute " +
-                            s2b.s2 + " associated with PFN " + pfn);
-                    }
-
-                    //try to see if it matches with the existing value
-                    RLSAttribute attribute = (RLSAttribute)l.get(0);
-                    if(!attribute.GetStrVal().equalsIgnoreCase(
-                        attributeToAdd.GetStrVal())){
-
-                        //log a warning saying updating value
-                        mLogMsg = "Existing value for attribute " + s2b.s2 +
-                            " associated with PFN " + pfn +
-                            " updated with new value " + attributeToAdd.GetStrVal();
-
-                        //update the value
-                        try{
-                            mLRC.attributeModify(pfn, attributeToAdd);
-                            log(mLogMsg,LogManager.WARNING_MESSAGE_LEVEL);
-                        }
-                        catch(RLSException e){
-                            exception.setNextException(
-                                new ReplicaCatalogException("RLS Exception "+ e.getMessage()));
-                        }
-                    }
-                }
-                else {
-                    exception.setNextException(exception(s2b));
-                }
-            }
-
-            /*bulk insert on attributes ends */
-
-        }
-        catch(RLSException e){
-            exception.setNextException(
-                new ReplicaCatalogException("RLS Exception "+ e.getMessage()));
-        }
-
-
-
-        //throw an exception only if a nested exception
-        if( (exception = exception.getNextException()) != null) throw exception;
-
-        return result;
-    }
-
-    /**
-     * Inserts a new mapping into the replica catalog. The attributes are added
-     * in bulk assuming the attribute definitions already exist. If an attribute
-     * definition does not exist, it is created and inserted. Note there is no
-     * notion of transactions in LRC. It assumes all the attributes are of type
-     * String.
-     *
-     * @param lfn is the logical filename under which to book the entry.
-     * @param tuple is the physical filename and associated PFN attributes.
-     *
-     * @return number of insertions, should always be 1. On failure,
-     * throws an exception instead of returning zero.
-     */
-    public int insert(String lfn, ReplicaCatalogEntry tuple) {
-        Map m = new HashMap(1);
-        List l = new ArrayList(1); l.add(tuple);
-        m.put(lfn,l);
-        return insert(m);
-
-//        Just composing the call to insert(Map method)
-//        Only one code handles inserts. Karan April 12, 2006
-//        if(isClosed()){
-//            //not connected to LRC
-//            //throw an exception??
-//            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-//        }
-//        int res = 0;
-
-//        //we have no notion of transaction at this point.
-//        String pfn = tuple.getPFN();
-//        try{
-//            //insert the pfn
-//            mLRC.add(lfn, pfn);
-//        }
-//        catch(RLSException e){
-//            if(e.GetRC() == RLSClient.RLS_LFN_NEXIST){
-//                //the first instance of the lfn, so we add
-//                //instead of creating the mapping
-//                try{
-//                    mLRC.create(lfn, pfn);
-//                }
-//                catch(RLSException ex){
-//                    throw new ReplicaCatalogException("RLS Exception "+ ex.getMessage());
-//                }
-//            }
-//            else if(e.GetRC() == RLSClient.RLS_MAPPING_EXIST){
-//                log("LFN-PFN Mapping alreadys exists in LRC "
-//                    + mLRCURL + " for " + lfn + "->" + pfn,
-//                    LogManager.ERROR_MESSAGE_LEVEL);
-//                return res;
-//            }
-//            else
-//                throw new ReplicaCatalogException("RLS Exception "+ e.getMessage());
-//        }
-//
-//        //we need to add attributes in bulk
-//        String key;
-//        ArrayList failedAttrs;//the failed attributes
-//        ArrayList attrs = new ArrayList(tuple.getAttributeCount());
-//        for(Iterator it = tuple.getAttributeIterator(); it.hasNext();){
-//            key   = (String)it.next();
-//            RLSAttribute attr = new RLSAttribute(key,RLSAttribute.LRC_PFN,
-//                                                 (String)tuple.getAttribute(key));
-//            attrs.add(new RLSAttributeObject(attr,pfn));
-//
-//        }
-//
-//        try{
-//            failedAttrs = mLRC.attributeAddBulk(attrs);
-//        }
-//        catch(RLSException e){
-//            throw new ReplicaCatalogException("RLS Exception "+ e.getMessage());
-//        }
-//
-//        //go through the failed attributes and create them
-//        for(Iterator it = failedAttrs.iterator();it.hasNext();){
-//            RLSString2Bulk s2b = (RLSString2Bulk)it.next();
-//            RLSAttribute attributeToAdd = new RLSAttribute(s2b.s2,RLSAttribute.LRC_PFN,
-//                                                          (String)tuple.getAttribute(s2b.s2));
-//            //s1 is the pfn
-//            //s2 is the attribute name
-//            if(s2b.rc == RLSClient.RLS_ATTR_NEXIST){
-//                //we need to create the attribute
-//                log("Creating an attribute name " + s2b.s2 +
-//                    " for pfn " + pfn, LogManager.DEBUG_MESSAGE_LEVEL);
-//                try{
-//                    mLRC.attributeCreate(s2b.s2, RLSAttribute.LRC_PFN,
-//                                         RLSAttribute.STR);
-//                    //add the attribute in sequentially instead of bulk
-//                    mLRC.attributeAdd(pfn,attributeToAdd);
-//
-//                }
-//                catch(RLSException e){
-//                    throw new ReplicaCatalogException("RLS Exception "+ e.getMessage());
-//                }
-//            }
-//            else if(s2b.rc == RLSClient.RLS_ATTR_EXIST){
-//                log("Attribute " + s2b.s2 + " for pfn " + pfn +
-//                    " already exists", LogManager.DEBUG_MESSAGE_LEVEL);
-//                //get the existing value of attribute
-//                List l = null;
-//                try{
-//                    l = mLRC.attributeValueGet(pfn, s2b.s2, RLSAttribute.LRC_PFN);
-//                }
-//                catch(RLSException e){
-//                    throw new ReplicaCatalogException("RLS Exception "+ e.getMessage());
-//                }
-//                if(l == null || l.isEmpty() || l.size() > 1){
-//                    log("Contents of list are " + l,LogManager.DEBUG_MESSAGE_LEVEL);
-//                    //should never happen
-//                    log("Unreachable case.",LogManager.FATAL_MESSAGE_LEVEL);
-//                    throw new ReplicaCatalogException(
-//                        "Whammy while trying to get value of an exisiting attribute " +
-//                        s2b.s2 + " associated with PFN " + pfn);
-//                }
-//                //try to see if it matches with the existing value
-//                RLSAttribute attribute = (RLSAttribute)l.get(0);
-//                if(!attribute.GetStrVal().equalsIgnoreCase(
-//                                                   attributeToAdd.GetStrVal())){
-//
-//                    //log a warning saying updating value
-//                    mLogMsg = "Existing value for attribute " + s2b.s2 +
-//                        " associated with PFN " + pfn +
-//                        " updated with new value " + attributeToAdd.GetStrVal();
-//
-//                    //update the value
-//                    try{
-//                        mLRC.attributeModify(pfn, attributeToAdd);
-//                        log(mLogMsg,LogManager.WARNING_MESSAGE_LEVEL);
-//                    }
-//                    catch(RLSException e){
-//                        throw new ReplicaCatalogException("RLS Exception" +
-//                                                          e.getMessage());
-//                    }
-//                }
-//            }
-//            else{
-//                throw new ReplicaCatalogException(
-//                    "Unknown Error while adding attributes. RLS Error Code " +
-//                    s2b.rc);
-//            }
-//        }
-//
-//        return 1;
-
-    }
-
-    /**
-     * Inserts a new mapping into the replica catalog. This is a
-     * convenience function exposing the resource handle. Internally,
-     * the <code>ReplicaCatalogEntry</code> element will be contructed, and passed to
-     * the appropriate insert function.
-     *
-     * @param lfn is the logical filename under which to book the entry.
-     * @param pfn is the physical filename associated with it.
-     * @param handle is a resource handle where the PFN resides.
-     *
-     * @return number of insertions, should always be 1. On failure,
-     * throw an exception, don't use zero.
-     *
-     * @see #insert( String, ReplicaCatalogEntry )
-     * @see ReplicaCatalogEntry
-     */
-    public int insert(String lfn, String pfn, String handle) {
-        //prepare the appropriate ReplicaCatalogEntry object
-        ReplicaCatalogEntry rce = new ReplicaCatalogEntry(pfn,handle);
-        return insert(lfn,rce);
-    }
-
-    /**
-     * Deletes multiple mappings into the replica catalog. The input is a
-     * map indexed by the LFN. The value for each LFN key is a collection
-     * of replica catalog entries. On setting matchAttributes to false, all entries
-     * having matching lfn pfn mapping to an entry in the Map are deleted.
-     * However, upon removal of an entry, all attributes associated with the pfn
-     * also evaporate (cascaded deletion).
-     * The deletes are done in batches.
-     *
-     * @param x                is a map from logical filename string to list of
-     *                         replica catalog entries.
-     * @param matchAttributes  whether mapping should be deleted only if all
-     *                         attributes match.
-     *
-     * @return the number of deletions.
-     * @see ReplicaCatalogEntry
-     */
-    public int delete( Map x , boolean matchAttributes){
-        int result = 0;
-        if(isClosed()){
-            //not connected to LRC
-            //throw an exception??
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-
-        String lfn,pfn;
-        ReplicaCatalogEntry rce;
-        Collection c;
-        CatalogException exception = new ReplicaCatalogException();
-
-        if(matchAttributes){
-            //do a sequential delete for the time being
-            for(Iterator it = x.entrySet().iterator();it.hasNext();){
-                Map.Entry entry = (Map.Entry)it.next();
-                lfn = (String)entry.getKey();
-                c   = (Collection)entry.getValue();
-
-                //iterate through all RCE's for this lfn and delete
-                for(Iterator rceIt = c.iterator();rceIt.hasNext();){
-                    rce = (ReplicaCatalogEntry)it.next();
-                    result += delete(lfn,rce);
-                }
-            }
-        }
-        else{
-            //we can use bulk delete
-            int size = this.getBatchSize();
-            int current = 0;
-            List lfnPfns = new ArrayList(size);
-            for (Iterator it = x.entrySet().iterator(); it.hasNext(); ) {
-                Map.Entry entry = (Map.Entry)it.next();
-                lfn = (String)entry.getKey();
-                c = (Collection)entry.getValue();
-
-                //traverse through the rce's for the pfn's
-                for(Iterator pfnIt = c.iterator();pfnIt.hasNext();){
-                    rce = (ReplicaCatalogEntry) pfnIt.next();
-                    pfn = rce.getPFN();
-                    lfnPfns.add(new RLSString2(lfn, pfn));
-                    current++;
-
-                    //check if diff is more than batch size
-                    if( current >= size){
-                        //we have the subset of RCE's on which we
-                        //want to do bulk deletes
-                        try{
-                            result += bulkDelete(lfnPfns);
-                        }
-                        catch(ReplicaCatalogException e){exception.setNextException(e);}
-
-                        current = 0;
-                        lfnPfns.clear();
-                    }
-
-                }
-            }
-            //check for the last bunch
-            if(!lfnPfns.isEmpty()){
-                //we have the subset of RCE's on which we
-                //we want to do bulk deletes
-                try{
-                    result += bulkDelete(lfnPfns);
-                }
-                catch(ReplicaCatalogException e){exception.setNextException(e);}
-
-                current = 0;
-            }
-        }
-
-        //throw an exception only if a nested exception
-        if( (exception = exception.getNextException()) != null) throw exception;
-
-        return result;
-    }
-
-
-
-    /**
-     * Deletes a specific mapping from the replica catalog. We don't care
-     * about the resource handle. More than one entry could theoretically
-     * be removed. Upon removal of an entry, all attributes associated
-     * with the PFN also evaporate (cascading deletion) automatically at the
-     * RLS server end.
-     *
-     * @param lfn is the logical filename in the tuple.
-     * @param pfn is the physical filename in the tuple.
-     *
-     * @return the number of removed entries.
-     */
-    public int delete(String lfn, String pfn) {
-        int res = 0;
-        if(isClosed()){
-            //not connected to LRC
-            //throw an exception??
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-
-        try{
-            mLRC.delete(lfn,pfn);
-            res++;
-        }
-        catch(RLSException e){
-            if(e.GetRC() == RLSClient.RLS_PFN_NEXIST ||
-               e.GetRC() == RLSClient.RLS_LFN_NEXIST ||
-               e.GetRC() == RLSClient.RLS_MAPPING_NEXIST){
-                log("Mapping " + lfn + "->" + pfn +
-                    " does not exist",LogManager.DEBUG_MESSAGE_LEVEL);
-            }
-            else{
-                throw new ReplicaCatalogException("Error while deleting mapping " +
-                                           e.getMessage());
-            }
-        }
-        return res;
-    }
-
-    /**
-     * Deletes a very specific mapping from the replica catalog. The LFN
-     * must be matches, the PFN, and all PFN attributes specified in the
-     * replica catalog entry. More than one entry could theoretically be
-     * removed. Upon removal of an entry, all attributes associated with
-     * the PFN also evaporate (cascading deletion).
-     *
-     * @param lfn is the logical filename in the tuple.
-     * @param tuple is a description of the PFN and its attributes.
-     * @return the number of removed entries, either 0 or 1.
-     */
-    public int delete(String lfn, ReplicaCatalogEntry tuple) {
-        int res = 0;
-        if(isClosed()){
-            //not connected to LRC
-            //throw an exception??
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-
-        //get hold of all the RCE in this LRC matching to lfn
-        Collection c = lookup(lfn);
-        ReplicaCatalogEntry rce;
-        for(Iterator it = c.iterator();it.hasNext();){
-            rce = (ReplicaCatalogEntry)it.next();
-            if(rce.equals(tuple)){
-                //we need to delete the rce
-                //cascaded deletes take care of the attribute deletes
-                delete(lfn,tuple.getPFN());
-                res++;
-            }
-        }
-
-        return res;
-    }
-
-    /**
-     * Deletes all PFN entries for a given LFN from the replica catalog
-     * where the PFN attribute is found, and matches exactly the object
-     * value. This method may be useful to remove all replica entries that
-     * have a certain MD5 sum associated with them. It may also be harmful
-     * overkill.
-     *
-     * @param lfn is the logical filename to look for.
-     * @param name is the PFN attribute name to look for.
-     * @param value is an exact match of the attribute value to match.
-     *
-     * @return the number of removed entries.
-     */
-    public int delete(String lfn, String name, Object value) {
-        int result = 0;
-        Collection c = null;
-        if(isClosed()){
-            //not connected to LRC
-            //throw an exception??
-            throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL);
-        }
-
-        //query lookup for that lfn and delete accordingly.
-        Set s = new HashSet(1);
-        s.add(lfn);
-        Map map = this.lookupNoAttributes(s,name,value);
-        if(map == null || map.isEmpty()){
-            return 0;
-        }
-
-        //we need to pipe this into a list of RLSString2 objects
-        ArrayList lfnPfns = new ArrayList(3);
-        for(Iterator it = map.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry) it.next();
-            lfn = (String)entry.getKey();
-
-            for (Iterator it1 = ( (Set) entry.getValue()).iterator();
-                 it1.hasNext(); ) {
-                RLSString2 lfnPfn = new RLSString2(lfn, (String) it1.next());
-                lfnPfns.add(lfnPfn);
-                result++;
-            }
-
-        }
-
-        try{
-            c = mLRC.deleteBulk(lfnPfns);
-        }
-        catch(RLSException e){
-            log("remove(Set)" ,e,LogManager.ERROR_MESSAGE_LEVEL);
-        }
-
-        //c should be empty ideally
-        if(!c.isEmpty()){
-            result -= c.size();
-            log("Removing lfns remove(Set)" + c, LogManager.ERROR_MESSAGE_LEVEL);
-        }
-        return result;
-
-    }
-
-    /**
-     * Deletes all PFN entries for a given LFN from the replica catalog
-     * where the resource handle is found. Karan requested this
-     * convenience method, which can be coded like
-     * <pre>
-     *  delete( lfn, SITE_ATTRIBUTE, handle )
-     * </pre>
-     *
-     * @param lfn is the logical filename to look for.
-     * @param handle is the resource handle
-     *
-     * @return the number of entries removed.
-     *
-     * @see #SITE_ATTRIBUTE
-     */
-    public int deleteByResource(String lfn, String handle) {
-        return delete(lfn,SITE_ATTRIBUTE,handle);
-    }
-
-    /**
-     * Removes all mappings for a set of LFNs.
-     *
-     * @param lfn is a set of logical filename to remove all mappings for.
-     *
-     * @return the number of removed entries.
-     */
-    public int remove(String lfn) {
-        //first get hold of all the pfn mappings for the lfn
-        Collection c = this.lookupNoAttributes(lfn);
-        int result   = 0;
-        if(c == null || c.isEmpty()){
-            return 0;
-        }
-
-        //we need to pipe this into a list of RLSString2Bulk objects
-        result = c.size();
-        ArrayList lfnPfns = new ArrayList(result);
-        for(Iterator it = c.iterator();it.hasNext();){
-            RLSString2 lfnPfn = new RLSString2(lfn,(String)it.next());
-            lfnPfns.add(lfnPfn);
-        }
-
-        //do a bulk delete
-        try{
-            c = mLRC.deleteBulk(lfnPfns);
-        }
-        catch(RLSException e){
-            log("remove(String)",e,LogManager.ERROR_MESSAGE_LEVEL);
-        }
-
-        //c should be empty ideally
-        if(!c.isEmpty()){
-            result -= c.size();
-            log("remove(String)" + c,LogManager.ERROR_MESSAGE_LEVEL);
-        }
-        return result;
-    }
-
-    /**
-     * Removes all mappings for a set of LFNs.
-     *
-     * @param lfns is a set of logical filename to remove all mappings for.
-     *
-     * @return the number of removed entries.
-     */
-    public int remove(Set lfns) {
-        String lfn   = null;
-        Collection c = null;
-        int result   = 0;
-
-        //first get hold of all the pfn mappings for the lfn
-        Map map = this.lookupNoAttributes(lfns);
-        if(map == null || map.isEmpty()){
-            return 0;
-        }
-        //we need to pipe this into a list of RLSString2 objects
-        ArrayList lfnPfns = new ArrayList(map.keySet().size());
-        for(Iterator it = map.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry) it.next();
-            lfn = (String)entry.getKey();
-            for (Iterator it1 = ( (Set) entry.getValue()).iterator();
-                 it1.hasNext(); ) {
-                RLSString2 lfnPfn = new RLSString2(lfn, (String) it1.next());
-                lfnPfns.add(lfnPfn);
-                result++;
-            }
-        }
-
-        //do a bulk delete
-        //FIX ME: The deletes should have been done in batches.
-        try{
-            c = mLRC.deleteBulk(lfnPfns);
-        }
-        catch(RLSException e){
-            log("remove(Set)" + e,LogManager.ERROR_MESSAGE_LEVEL);
-        }
-
-        //c should be empty ideally
-        if(!c.isEmpty()){
-            result -= c.size();
-            log("remove(Set)" + c,LogManager.ERROR_MESSAGE_LEVEL);
-            for(Iterator it = c.iterator();it.hasNext();){
-                RLSString2Bulk rs2 = (RLSString2Bulk)it.next();
-                System.out.println("(" + rs2.s1 + "->" + rs2.s2 +"," +
-                                   rs2.rc + ")");
-            }
-        }
-        return result;
-
-    }
-
-    /**
-     * Removes all entries associated with a particular resource handle.
-     * This is useful, if a site goes offline. It is a convenience method,
-     * which calls the generic <code>removeByAttribute</code> method.
-     *
-     * @param handle is the site handle to remove all entries for.
-     *
-     * @return the number of removed entries.
-     *
-     * @see #removeByAttribute( String, Object )
-     */
-    public int removeByAttribute(String handle) {
-        return removeByAttribute(SITE_ATTRIBUTE,handle);
-    }
-
-    /**
-     * Removes all entries from the replica catalog where the PFN attribute
-     * is found, and matches exactly the object value.
-     *
-     * @param name is the PFN attribute name to look for.
-     * @param value is an exact match of the attribute value to match.
-     *
-     * @return the number of removed entries.
-     */
-    public int removeByAttribute(String name, Object value) {
-        String lfn   = null;
-        String pfn   = null;
-        Collection c = null;
-        int result   = 0;
-
-        //get hold of all the lfns in the lrc
-        Set s = list();
-
-        //first get hold of all the pfn mappings for the lfn
-        Map map = this.lookup(s,name,value);
-        if(map == null || map.isEmpty()){
-            return 0;
-        }
-
-        //we need to pipe this into a list of RLSString2 objects
-        ArrayList lfnPfns = new ArrayList(result);
-        for(Iterator it = map.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry) it.next();
-            lfn = (String)entry.getKey();
-            //System.out.println(lfn + " ->");
-            for (Iterator it1 = ( (Set) entry.getValue()).iterator();
-                 it1.hasNext(); ) {
-                pfn = ((ReplicaCatalogEntry)it1.next()).getPFN();
-                RLSString2 lfnPfn = new RLSString2(lfn, pfn);
-                lfnPfns.add(lfnPfn);
-                result++;
-                //System.out.print(lfnPfn.s2 + ",");
-            }
-        }
-
-        //do a bulk delete
-        //FIX ME: The deletes should have been done in batches.
-        try{
-            c = mLRC.deleteBulk(lfnPfns);
-        }
-        catch(RLSException e){
-            throw new ReplicaCatalogException("Bulk Delete: " + e.getMessage());
-        }
-
-        //c should be empty ideally
-        if(!c.isEmpty()){
-            result -= c.size();
-            log("removeByAttribute(String,Object)" + c,
-                        LogManager.ERROR_MESSAGE_LEVEL);
-        }
-        return result;
-    }
-
-    /**
-     * Removes everything. Use with caution!
-     *
-     * @return the number of removed entries.
-     */
-    public int clear() {
-
-        //do a bulk delete
-        //FIX ME: The deletes should have been done in batches.
-        try{
-            mLRC.clear();
-        }
-        catch(RLSException e){
-            log("clear()",e,LogManager.ERROR_MESSAGE_LEVEL);
-        }
-        return 0;
-        /*
-        String lfn   = null;
-        String pfn   = null;
-        Collection c = null;
-        int result   = 0;
-
-        //get hold of all the lfns in the lrc
-        Set s = list();
-
-        //first get hold of all the pfn mappings for the lfn
-        Map map = this.lookupNoAttributes(s);
-        if(map == null || map.isEmpty()){
-            return 0;
-        }
-
-        //we need to pipe this into a list of RLSString2 objects
-        ArrayList lfnPfns = new ArrayList(result);
-        for(Iterator it = map.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry) it.next();
-            lfn = (String)entry.getKey();
-            //System.out.println(lfn + " ->");
-            for (Iterator it1 = ( (Set) entry.getValue()).iterator();
-                 it1.hasNext(); ) {
-                pfn = ((String)it1.next());
-                RLSString2 lfnPfn = new RLSString2(lfn, pfn);
-                lfnPfns.add(lfnPfn);
-                result++;
-                //System.out.print(lfnPfn.s2 + ",");
-            }
-        }
-
-        //do a bulk delete
-        //FIX ME: The deletes should have been done in batches.
-        try{
-            c = mLRC.deleteBulk(lfnPfns);
-        }
-        catch(RLSException e){
-            log("clear()",e,LogManager.ERROR_MESSAGE_LEVEL);
-        }
-
-        //c should be empty ideally
-        if(!c.isEmpty()){
-            result -= c.size();
-            log("clear()" + c,LogManager.ERROR_MESSAGE_LEVEL);
-        }
-        return result;
-         */
-    }
-
-    /**
-     * Explicitely free resources before the garbage collection hits.
-     */
-    public void close() {
-        try {
-            if (mRLS != null) {
-                mRLS.Close();
-            }
-        }
-        catch (RLSException e) {
-            //ignore
-        }
-        finally {
-            mRLS = null;
-        }
-    }
-
-    /**
-     * Returns whether the connection to the RLS with which this instance is
-     * associated is closed or not.
-     *
-     * @return true, if the implementation is disassociated, false otherwise.
-     * @see #close()
-     */
-    public boolean isClosed() {
-        return (mRLS == null);
-    }
-
-
-    /**
-     * It returns the timeout value in seconds after which to timeout in case of
-     * no activity from the LRC.
-     *
-     * Referred to by the "lrc.timeout" property.
-     *
-     * @return the timeout value if specified else,
-     *         the value specified by "rls.timeout" property, else
-     *         DEFAULT_LRC_TIMEOUT.
-     *
-     * @see #DEFAULT_LRC_TIMEOUT
-     */
-    protected int getTimeout(Properties properties) {
-        String prop = properties.getProperty( this.LRC_TIMEOUT_KEY);
-
-        //if prop is null get rls timeout,
-        prop = (prop == null)? properties.getProperty(this.RLS_TIMEOUT_KEY):prop;
-
-        int val = 0;
-        try {
-            val = Integer.parseInt( prop );
-        } catch ( Exception e ) {
-            val = Integer.parseInt( DEFAULT_LRC_TIMEOUT );
-        }
-        return val;
-
-    }
-
-
-    /**
-     * Returns the site handle associated with the pfn at the lrc to which
-     * the instance of this application binds. It returns <code>UNDEFINED_SITE
-     * </code> even when the pfn is not in the lrc.
-     *
-     * @param pfn            The pfn with which the attribute is associated.
-     *
-     * @return value of the attribute if found
-     *         else UNDEFINED_POOL
-     */
-    private String getSiteHandle(String pfn) {
-        return getSiteHandle(mLRC, pfn);
-    }
-
-    /**
-     * Returns the site handle associated with a pfn at the lrc associated
-     * with the <code>RLSClient</code> passed. It returns <code>UNDEFINED_SITE
-     * </code> even when the pfn is not in the lrc.
-     *
-     * @param lrc  the handle to the lrc , where the attributes are stored.
-     * @param pfn  the pfn with which the attribute is associated.
-     *
-     * @return value of the attribute if found
-     *         else UNDEFINED_POOL
-     *
-     */
-    private String getSiteHandle(RLSClient.LRC lrc, String pfn) {
-        String poolAttr = getAttribute(lrc, pfn, SITE_ATTRIBUTE);
-        return (poolAttr == null) ?
-            defaultResourceHandle() :
-            poolAttr;
-    }
-
-    
-    
-    /**
-     * Returns the default value that is to be assigned to site handle
-     * for a replica catalog entry.
-     * 
-     * @return default site handle
-     */
-    private String defaultResourceHandle(){
-        return ( this.mDefaultSiteAttribute == null ) ?
-                 LRC.UNDEFINED_SITE:
-                 this.mDefaultSiteAttribute;
-    }
-    
-    /**
-     * Sets the  resource handle in an attribute map.
-     * The resource handle is set to the default site handle if the map
-     * does not contain the site attribute key.
-     * 
-     * @param m the attribute map.
-     * 
-     * @see #defaultResourceHandle() 
-     */
-    private void setResourceHandle( Map<String,String> m ){
-        String dflt = defaultResourceHandle();
-        //update the site attribute only if the default
-        //attribute is other than undefined site
-        if( m.containsKey( LRC.SITE_ATTRIBUTE) && !dflt.equals(LRC.UNDEFINED_SITE ) ){
-            //populate the default site handle
-            m.put( LRC.SITE_ATTRIBUTE, dflt );
-        }
-        else if( !m.containsKey( LRC.SITE_ATTRIBUTE ) ){
-            //populate the default site handle
-            m.put( LRC.SITE_ATTRIBUTE, dflt );
-        }
-    }
-    
-    /**
-     * Sets the  resource handle in an attribute map.
-     * The resource handle is set to the default site handle if the map
-     * does not contain the site attribute key.
-     * 
-     * @param rce   the <code>ReplicaCatalogEntry</code>
-     * 
-     * @see #defaultResourceHandle() 
-     */
-    private void setResourceHandle( ReplicaCatalogEntry rce ){
-        String dflt = defaultResourceHandle();
-        //update the site attribute only if the default
-        //attribute is other than undefined site
-        if( rce.hasAttribute( LRC.SITE_ATTRIBUTE) && !dflt.equals(LRC.UNDEFINED_SITE ) ){
-            //populate the default site handle
-            rce.setResourceHandle( dflt );
-        }
-        else if( ! rce.hasAttribute( LRC.SITE_ATTRIBUTE ) ){
-            //populate the default site handle
-            rce.setResourceHandle( dflt );
-        }
-    }
-    
-    /**
-     * Retrieves from the lrc, associated with this instance all the
-     * attributes associated with the <code>pfn</code> in a map. All the
-     * attribute values are stored as String.
-     *
-     * @param pfn  the pfn with which the attribute is associated.
-     *
-     * @return <code>Map</code>containing the attribute keys and values,
-     *         else an empty Map.
-     */
-    private Map getAttributes(String pfn) {
-        return getAttributes(mLRC, pfn);
-    }
-
-    /**
-     * Retrieves from the lrc associated with this instance, all the
-     * attributes associated with the lfn-pfns in a map indexed by the lfn.
-     * The value for each entry is a collection of
-     * <code>ReplicaCatalogEntry</code> objects.
-     * All the attribute values are stored as String.
-     *
-     * If the attribute value passed is not null, then explicit matching occurs
-     * on attribute values in addition.
-     *
-     * @param lfnPfns  a list of <code>RLSString2Bulk</code> objects containing
-     *                 the lfn in s1 field, and pfn in s2 field. The list is
-     *                 assumed to be grouped by lfns.
-     * @param attrKey  the name of attribute that needs to be queried for each
-     *                 pfn. a value of null denotes all attributes.
-     * @param attrVal  the value of the attribute that should be matching.
-     *
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries for the LFN.
-     */
-    private Map getAttributes(List lfnPfns, String attrKey, Object attrVal) {
-        Map result = new HashMap();
-        String curr = null;
-        String prev = null;
-        //loss of information. i should have known the size at this pt!
-        List l = new ArrayList();
-        ArrayList pfns = new ArrayList(lfnPfns.size());
-        int size = mBatchSize;
-        ReplicaCatalogEntry entry = null;
-        Map temp = new HashMap();
-        Map pfnMap = new HashMap(); //contains pfn and their ReplicaCatalogEntry objects
-
-        //sanity check
-        if(lfnPfns == null || lfnPfns.isEmpty()){
-            return result;
-        }
-
-        //put just the pfns in a list that needs
-        //to be sent to the RLS API
-        for (Iterator it = lfnPfns.iterator(); it.hasNext(); ) {
-            pfns.add( ( (RLSString2Bulk) it.next()).s2);
-        }
-        //now query for the attributes in bulk
-        List attributes = null;
-        try {
-            attributes = mLRC.attributeValueGetBulk(pfns, attrKey,
-                RLSAttribute.LRC_PFN);
-        }
-        catch (RLSException e) {
-            //some other error, but we can live with it.
-            //just flag as warning
-            mLogMsg = "getAttributes(List,String,Object)";
-            log(mLogMsg, e,LogManager.ERROR_MESSAGE_LEVEL);
-            return result;
-        }
-
-        //we need to sort them on the basis of the pfns
-        //which is the populate the key field
-        Collections.sort(attributes, new RLSAttributeComparator());
-        /*
-        System.out.println("Sorted attributes are ");
-        for(Iterator it = attributes.iterator(); it.hasNext();){
-            RLSAttributeObject obj = (RLSAttributeObject) it.next();
-            if(obj.rc == RLSClient.RLS_ATTR_NEXIST){
-                System.out.print("\tAttribute does not exist");
-            }
-            System.out.println("\t" + obj.key + "->rc" + obj.rc);
-        }
-        */
-
-        for (Iterator it = attributes.iterator(); it.hasNext(); ) {
-            RLSAttributeObject attr = (RLSAttributeObject) it.next();
-            Object value = (attr.rc == RLSClient.RLS_ATTR_NEXIST)?
-                            null: //assign an empty value
-                            getAttributeValue(attr.attr);//retrieve the value
-
-            curr = attr.key;
-
-            //push in the attribute into the temp map only
-            //if prev is null or the prev and current pfn's match
-            if((prev == null || curr.equalsIgnoreCase(prev))
-               &&
-               (value != null)//value being null means no attribute associated
-               &&
-               ((attrVal == null)
-                 || (attrVal.equals(value)) )){
-                temp.put(attr.attr.name,value);
-            }
-            else{
-                //push it into the map all attributes for a single pfn
-                //only if the map is not empty or there was no matching
-                //being done attrVal (i.e it is null)
-                if(attrVal == null || !temp.isEmpty()){
-                    entry = new ReplicaCatalogEntry(prev, temp);
-                    //System.out.println("0:Entry being made is " + entry);
-                    //the entry has to be put in a map keyed by the pfn name
-                    pfnMap.put(prev, entry);
-                    temp = new HashMap();
-                }
-                //added June 15,2005
-                if(value != null &&
-                   ( attrVal == null || attrVal.equals(value))){
-                    temp.put(attr.attr.name,value);
-                }
-            }
-            //push in the last attribute entry
-            if(!it.hasNext()){
-                //push it into the map all attributes for a single pfn
-                //only if the map is not empty or there was no matching
-                //being done attrVal (i.e it is null)
-                if(attrVal == null || !temp.isEmpty()){
-                    entry = new ReplicaCatalogEntry(curr, temp);
-                    //System.out.println("1:Entry being made is " + entry);
-                    //the entry has to be put in a map keyed by the pfn name
-                    pfnMap.put(curr, entry);
-                }
-            }
-            prev = curr;
-
-        }
-
-        //the final iteration that groups the pfn and their
-        //attributes according to the lfn
-        prev = null;
-        for (Iterator it = lfnPfns.iterator(); it.hasNext(); ) {
-            RLSString2Bulk lfnPfn = (RLSString2Bulk) it.next();
-            curr = lfnPfn.s1;
-
-            entry = (ReplicaCatalogEntry) pfnMap.get(lfnPfn.s2);
-            if(entry == null){
-                //means no match on attribute or attribute value was found
-                continue;
-            }
-
-            if (!curr.equalsIgnoreCase(prev) && (prev != null)) {
-                //push it into the map
-                //we have entry for one lfn and all pfns constructed
-                //System.out.println("Putting in entry for " + prev + " " + l);
-                result.put(prev, l);
-                l = new ArrayList();
-            }
-            
-            //set a site handle if not already set
-            setResourceHandle( entry );
-            
-            l.add(entry);
-            //if this was the last one push it in result
-            if(!it.hasNext()){
-                //System.out.println("Putting in entry for " + curr + " " + l);
-                result.put(curr, l);
-            }
-            prev = curr;
-        }
-
-        return result;
-    }
-
-    /**
-     * Retrieves from the lrc, all the attributes associated with the <code>pfn
-     * </code> in a map. All the attribute values are stored as String.
-     *
-     * @param lrc  the handle to the lrc , where the attributes are stored.
-     * @param pfn  the pfn with which the attribute is associated.
-     * @return <code>Map</code>containing the attribute keys and values,
-     *         else an empty Map.
-     */
-    private Map getAttributes(RLSClient.LRC lrc, String pfn) {
-        String val = null;
-        List attrList = null;
-        Map m = new HashMap();
-        RLSAttribute att = null;
-
-        try {
-            //passing null denotes to get
-            //hold of all attributes
-            attrList = lrc.attributeValueGet(pfn, null,
-                                             RLSAttribute.LRC_PFN);
-        }
-        catch (RLSException e) {
-            //attribute does not exist error means no attributes
-            //associated, return empty map else just denote a warning
-            if(e.GetRC() != RLSClient.RLS_ATTR_NEXIST){
-                //some other error, but we can live with it.
-                //just flag as warning
-                mLogMsg = "getAttributes(RLSClient.LRC,String)";
-                log(mLogMsg, e,LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            
-            //associate a default value if required.
-            setResourceHandle( m );
-            
-            return m;
-        }
-
-        //iterate throught the list and push all
-        //the attributes in the map
-        for (Iterator it = attrList.iterator(); it.hasNext(); ) {
-            att = (RLSAttribute) it.next();
-            //the list can contain a null attribute key
-            //we dont want that.
-            if( att.name != null ){
-                m.put(att.name, att.GetStrVal());
-            }
-        }
-
-        //populate default site handle if
-        //site attribute is not specified
-        setResourceHandle( m );
-        
-        return m;
-    }
-
-    /**
-     * Retrieves from the lrc associated with this instance all, the attribute
-     * value associated with the <code>pfn</code> for a given attribute name.
-     *
-     * @param pfn  the pfn with which the attribute is associated.
-     * @param name the name of the attribute for which we want to search.
-     *
-     * @return value of the attribute if found
-     *         else null
-     */
-    private String getAttribute(String pfn, String name) {
-        return getAttribute(mLRC, pfn, name);
-    }
-
-    /**
-     * Retrieves from the lrc, the attribute value associated with the <code>pfn
-     * </code> for a given attribute name.
-     *
-     * @param lrc  the handle to the lrc , where the attributes are stored.
-     * @param pfn  the pfn with which the attribute is associated.
-     * @param name the name of the attribute for which we want to search.
-     *
-     * @return value of the attribute if found
-     *         else null
-     */
-    private String getAttribute(RLSClient.LRC lrc, String pfn, String name) {
-        String val = null;
-        List attrList = null;
-
-        try {
-            attrList = lrc.attributeValueGet(pfn, name,
-                                             RLSAttribute.LRC_PFN);
-        }
-        catch (RLSException e) {
-            if (e.GetRC() == RLSClient.RLS_ATTR_NEXIST) {
-                //attribute does not exist we return null
-            }
-            else {
-                //some other error, but we can live with it.
-                //just flag as warning
-                mLogMsg = "getAttribute(String,String,String):";
-                log(mLogMsg, e,LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            return null;
-        }
-
-        return (attrList.isEmpty()) ?
-            null :
-            //we return the first attribute value
-            //Does not make much sense for
-            //more than one attribute value
-            //for the same key and pfn
-            attrList.get(0).toString();
-    }
-
-
-    /**
-     * Retrieves the attribute value as an object from the <code>RLSAttribute</code>
-     * object. Does automatic boxing (i.e converts int to Integer) etc.
-     * The value is returned of the type as determined from the internal value
-     * type.
-     *
-     * @param attr the <code>RLSAttribute</code> from which to extract the value.
-     *
-     * @return Object containing the value.
-     *
-     * @throws ReplicaCatalogException if illegal value associated.
-     */
-    private Object getAttributeValue(RLSAttribute attr){
-        Object obj = null;
-        int type = attr.GetValType();
-
-        switch(type){
-            case RLSAttribute.STR:
-               obj = attr.GetStrVal();
-               break;
-
-            case RLSAttribute.DATE:
-                obj = attr.GetDateVal();
-                break;
-
-            case RLSAttribute.DOUBLE:
-                obj = new Double(attr.GetDoubleVal());
-                break;
-
-            case RLSAttribute.INT:
-                obj = new Integer(attr.GetIntVal());
-                break;
-
-            default:
-                throw new ReplicaCatalogException("Invalid value type associated " + type);
-        }
-
-        return obj;
-    }
-
-    /**
-     * Sets the number of lfns in each batch while querying the lrc in the
-     * bulk mode.
-     *
-     * @param properties  the properties passed while connecting.
-     *
-     */
-    private void setBatchSize(Properties properties) {
-        String s = properties.getProperty(this.BATCH_KEY);
-        int size = this.RLS_BULK_QUERY_SIZE;
-        try{
-            size = Integer.parseInt(s);
-        }
-        catch(Exception e){}
-        mBatchSize = size;
-    }
-
-
-    /**
-     * Returns the number of lfns in each batch while querying the lrc in the
-     * bulk mode.
-     *
-     * @return the batch size.
-     */
-    private int getBatchSize() {
-        return mBatchSize;
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete LRC. It uses the bulk query api to the LRC to query for stuff.
-     * Bulk query has been in RLS since version 2.0.8. All the lfns in set
-     * are put in one single bulk query to the LRC. There is a risk of seeing
-     * a timeout error in case of large set of lfns. User should use the
-     * lookup function that internally does the bulk query in batches.
-     * Passing a null value for the attribute key results in the querying for all
-     * attributes. The function returns <code>ReplicaCatalogEntry</code> objects
-     * that have the attribute identified by attribute key passed.
-     *
-     * @param lfns     set of logical filename strings to look up.
-     * @param attrKey  the name of attribute that needs to be queried for each
-     *                 pfn. a value of null denotes all attributes.
-     *
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries for the LFN.
-     * @see ReplicaCatalogEntry
-     * @see #lookup(Set)
-     */
-    private Map bulkLookup(Set lfns, String attrKey) {
-        return bulkLookup(lfns,attrKey,null);
-    }
-
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete LRC. It uses the bulk query api to the LRC to query for stuff.
-     * Bulk query has been in RLS since version 2.0.8. All the lfns in set
-     * are put in one single bulk query to the LRC. There is a risk of seeing
-     * a timeout error in case of large set of lfns. User should use the
-     * lookup function that internally does the bulk query in batches.
-     * Passing a null value for the attribute key results in the querying for all
-     * attributes. A null value for the attribute value, disables attribute matching
-     * and results in the <code>ReplicaCatalogEntry</code> objects that have
-     * the attribute identified by attribute key passed.
-     *
-     * @param lfns     set of logical filename strings to look up.
-     * @param attrKey  the name of attribute that needs to be queried for each
-     *                 pfn. a value of null denotes all attributes.
-     * @param attrVal  the value of the attribute that should be matching.
-     *
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries for the LFN.
-     * @see ReplicaCatalogEntry
-     * @see #lookup(Set)
-     */
-    private Map bulkLookup(Set lfns, String attrKey, Object attrVal) {
-        List list = null;
-        List lfnsFound = null;
-        RLSString2Bulk curr = null;
-        int size = mBatchSize;
-        Map result = new HashMap(lfns.size());
-
-        try {
-            list = mLRC.getPFNBulk( new ArrayList(lfns));
-            //we need to group pfns by lfn
-            Collections.sort(list, new RLSString2BulkComparator());
-            /*
-            System.out.println("Sorted list is ");
-            for(Iterator it = list.iterator(); it.hasNext();){
-                RLSString2Bulk s2b = (RLSString2Bulk) it.next();
-                System.out.println("\t" + s2b.s1 + "->" + s2b.s2);
-            }
-            */
-            size = list.size() <= size ? list.size() :size;
-            for (Iterator it = list.iterator(); it.hasNext(); ) {
-                //the pfn themseleves need to be queried
-                //in batches to avoid timeout errors but the batch size
-                //should have all the pfns for a lfn!!
-                List l = new ArrayList(size);
-                String prev = "";
-                if (curr != null) {
-                    //this is the case where the current
-                    //item is not in any of the sublists
-                    l.add(curr);
-                }
-                for (int j = 0; (it.hasNext()); ) {
-                    RLSString2Bulk s2b = (RLSString2Bulk) it.next();
-                    //s1 is the lfn
-                    //s2 denotes the pfn
-                    //rc is the exit status returned by the RLI
-                    if (s2b.rc == RLSClient.RLS_SUCCESS) {
-                        curr = s2b;
-                        if (s2b.s2 != null) {
-                            //query for the pool attribute
-                            //for that pfn to the lrc
-                            //if none is found or you do not
-                            //query for the attribute
-                            //pool is set to UNDEFINED_POOL
-                            if (!curr.s1.equalsIgnoreCase(prev)) {
-                                //do nothing
-                                //check if j > size
-                                if (j >= size) {
-                                    //break out of the loop.
-                                    //current needs to go into the next list
-                                    break;
-                                }
-                            }
-
-                            l.add(s2b);
-                            j++;
-                        }
-                        else {
-                            mLogMsg =
-                                "bulkLookup(Set): Unexpected Mapping with no pfn for lfn: " +
-                                s2b.s1;
-                            log(mLogMsg,LogManager.ERROR_MESSAGE_LEVEL);
-                        }
-                        prev = curr.s1;
-                    }
-                    else if (s2b.rc != RLSClient.RLS_LFN_NEXIST) {
-                        mLogMsg = "bulkLookup(Set): " +
-                            mRLS.getErrorMessage(s2b.rc);
-                        log(mLogMsg,LogManager.ERROR_MESSAGE_LEVEL);
-                    }
-                    //prev = curr.s1;
-                }
-                //get hold of all attributes for the pfn's
-                result.putAll(getAttributes(l, attrKey,attrVal));
-
-            }
-
-        }
-        catch (Exception e) {
-            log("bulkLookup(Set)", e,LogManager.FATAL_MESSAGE_LEVEL);
-            System.exit(1);
-        }
-
-        return result;
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete LRC. It uses the bulk query api to the LRC to query for stuff.
-     * Bulk query has been in RLS since version 2.0.8. All the lfns in set
-     * are put in one single bulk query to the LRC. There is a risk of seeing
-     * a timeout error in case of large set of lfns. User should use the
-     * lookup function that internally does the bulk query in batches.
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     *
-     * @return a map indexed by the LFN. Each value is a set
-     * of PFN strings.
-     *
-     * @see #lookupNoAttributes(Set)
-     */
-    private Map bulkLookupNoAttributes(Set lfns) {
-        List list = null;
-        List lfnsFound = null;
-        Map result = new HashMap(lfns.size());
-        String prev = null;
-        String curr = null;
-        Set s = new HashSet();
-
-        try {
-            list = mLRC.getPFNBulk( new ArrayList(lfns));
-            //we need to group pfns by lfn
-            Collections.sort(list, new RLSString2BulkComparator());
-
-            for (Iterator it = list.iterator(); it.hasNext(); ) {
-                RLSString2Bulk s2b = (RLSString2Bulk) it.next();
-
-                //s1 is the lfn
-                //s2 denotes the pfn
-                //rc is the exit status returned by the RLI
-                if (s2b.rc == RLSClient.RLS_SUCCESS) {
-                    curr = s2b.s1;
-                    if (s2b.s2 != null) {
-                        if (!curr.equalsIgnoreCase(prev) && (prev != null)) {
-                            //push it into the map
-                            //we have entry for one lfn and all pfns constructed
-                            result.put(prev, s);
-                            s = new HashSet();
-                        }
-                        s.add(s2b.s2);
-                        //if this was the last one push it in result
-                        if(!it.hasNext()){
-                            result.put(curr,s);
-                        }
-                    }
-                    else {
-                        mLogMsg =
-                            "bulkLookupNoAttributes(Set): Unexpected Mapping with no pfn for lfn: " +
-                            s2b.s1;
-                        log(mLogMsg,LogManager.ERROR_MESSAGE_LEVEL);
-                    }
-                    prev = curr;
-                }
-                else if (s2b.rc != RLSClient.RLS_LFN_NEXIST) {
-                    mLogMsg = "bulkLookupNoAttributes(Set): " +
-                        mRLS.getErrorMessage(s2b.rc);
-                    log(mLogMsg,LogManager.ERROR_MESSAGE_LEVEL);
-                }
-                //prev = curr;
-            }
-        }
-        catch (Exception e) {
-            log("bulkLookupNoAttributes(Set):", e,
-                LogManager.FATAL_MESSAGE_LEVEL);
-            System.exit(1);
-        }
-
-        return result;
-    }
-
-    /**
-     * Constructs replica catalog exception out the RLSException that is
-     * thrown by the API underneath.
-     *
-     * @param prefix   the prefix that is to be applied to the message
-     *                 passed while creating the exception.
-     * @param e        the RLSException that is caught underneath.
-     *
-     * @return a ReplicaCatalogException
-     */
-    private ReplicaCatalogException exception(String prefix,RLSException e){
-        StringBuffer message = new StringBuffer(32);
-        message.append("{LRC ").append(mLRCURL).append("} ")
-               .append(prefix).append(": ").append(e.getMessage());
-        return new ReplicaCatalogException(message.toString(),e);
-    }
-
-
-    /**
-     * Constructs an exception from the <code>RLSString2Bulk</code> object.
-     *
-     * @return a ReplicaCatalogException
-     */
-    private ReplicaCatalogException exception(RLSString2Bulk rs){
-        StringBuffer sb = new StringBuffer(32);
-        sb.append("Error (lfn,pfn,ec)").append(" (")
-          .append(rs.s1).append(',')
-          .append(rs.s2).append(',')
-          .append(rs.rc).append(',').append(mRLS.getErrorMessage(rs.rc))
-          .append(')');
-        return new ReplicaCatalogException(sb.toString());
-
-    }
-
-    /**
-     * Returns a subset of a collection of <code>ReplicaCatalogEntry</code>
-     * objects that have attributes matchin to the attribute identified by
-     * the parameters passed.
-     *
-     * @param collection  the collection of <code>ReplicaCatalogEntry</code>
-     *                    objects.
-     * @param name        the attribute name to match.
-     * @param value       the attribute value.
-     *
-     * @return Set of matching <code>ReplicaCatalogEntry</code> objects.
-     */
-    private Set subset(Collection collection, String name,
-                       Object value) {
-        return subset(collection,name,value,false);
-    }
-
-
-    /**
-     * Returns a subset of a collection of <code>ReplicaCatalogEntry</code>
-     * objects that have attributes matchin to the attribute identified by
-     * the parameters passed.
-     *
-     * @param collection  the collection of <code>ReplicaCatalogEntry</code>
-     *                    objects.
-     * @param name        the attribute name to match.
-     * @param value       the attribute value.
-     * @param onlyPFN     boolean to denote if we only want the PFN's
-     *
-     * @return Set of <code>ReplicaCatalogEntry</code> objects if onlyPfn
-     *         parameter is set to false, else a Set of pfns.
-     */
-    private Set subset(Collection collection, String name,
-                                Object value, boolean onlyPFN) {
-        Set s = new HashSet();
-        ReplicaCatalogEntry rce;
-        Object attrVal;
-        for (Iterator it = collection.iterator(); it.hasNext(); ) {
-            rce = (ReplicaCatalogEntry) it.next();
-            //System.out.println("RCE is " + rce);
-            attrVal = rce.getAttribute(name);
-            if ( attrVal != null &&  attrVal.equals(value)) {
-                //adding to the set only if
-                //the attribute existed in the rce
-                s.add(onlyPFN?
-                      (Object)rce.getPFN():
-                      rce);
-            }
-        }
-
-        return s;
-    }
-
-    /**
-     * A helper method that converts RLSString2 to MyRLSString2Bulk object.
-     *
-     * @param obj  the <code>RLSString2</code> to convert.
-     *
-     * @return the converted <code>MyRLSString2</code> object.
-     */
-    private RLSString2Bulk convert(RLSString2 obj){
-        return new MyRLSString2Bulk(0,obj.s1,obj.s2);
-    }
-
-    /**
-     * Logs to the logger object.
-     *
-     * @param message the message to be logged.
-     * @param level   the logger level at which the message is to be logged.
-     */
-    private void log(String message,int level){
-        message = "{LRC " + mLRCURL + "} " + message;
-        mLogger.log(message,level);
-    }
-
-    /**
-     * Logs to the logger object.
-     *
-     * @param message the message to be logged.
-     * @param e       the exception that occured.
-     * @param level   the logger level at which the message is to be logged.
-     */
-    private void log(String message,Exception e,int level){
-        message = "{LRC " + mLRCURL + "} " +  message;
-        mLogger.log(message,e,level);
-    }
-
-
-
-    /**
-     * The comparator that is used to group the <code>RLSString2</code> objects by the
-     * value in the s1 field.  This comparator should only  be used for grouping
-     * purposes not in Sets or Maps etc.
-     */
-    private class RLSString2Comparator implements Comparator {
-
-        /**
-         * Compares this object with the specified object for order. Returns a
-         * negative integer, zero, or a positive integer if the first argument is
-         * less than, equal to, or greater than the specified object. The
-         * RLSString2 are compared by their s1 field.
-         *
-         * @param o1 is the first object to be compared.
-         * @param o2 is the second object to be compared.
-         *
-         * @return a negative number, zero, or a positive number, if the
-         * object compared against is less than, equals or greater than
-         * this object.
-         * @exception ClassCastException if the specified object's type
-         * prevents it from being compared to this Object.
-         */
-        public int compare(Object o1, Object o2) {
-            if (o1 instanceof RLSString2 && o2 instanceof RLSString2) {
-                return ( (RLSString2) o1).s1.compareTo( ( (RLSString2)
-                    o2).s1);
-            }
-            else {
-                throw new ClassCastException("object is not RLSString2");
-            }
-        }
-
-    }
-
-    /**
-     * The comparator that is used to group the RLSString2Bulk objects by the
-     * value in the s1 field.  This comparator should only  be used for grouping
-     * purposes not in Sets or Maps etc.
-     */
-    private class RLSString2BulkComparator implements Comparator {
-
-        /**
-         * Compares this object with the specified object for order. Returns a
-         * negative integer, zero, or a positive integer if the first argument is
-         * less than, equal to, or greater than the specified object. The
-         * RLSString2Bulk are compared by their s1 field.
-         *
-         * @param o1 is the first object to be compared.
-         * @param o2 is the second object to be compared.
-         *
-         * @return a negative number, zero, or a positive number, if the
-         * object compared against is less than, equals or greater than
-         * this object.
-         * @exception ClassCastException if the specified object's type
-         * prevents it from being compared to this Object.
-         */
-        public int compare(Object o1, Object o2) {
-            if (o1 instanceof RLSString2Bulk && o2 instanceof RLSString2Bulk) {
-                return ( (RLSString2Bulk) o1).s1.compareTo( ( (RLSString2Bulk)
-                    o2).s1);
-            }
-            else {
-                throw new ClassCastException("object is not RLSString2Bulk");
-            }
-        }
-
-    }
-
-    /**
-     * The comparator that is used to group the RLSAttributeObject objects by the
-     * value in the key field.  This comparator should only  be used for grouping
-     * purposes not in Sets or Maps etc.
-     */
-    private class RLSAttributeComparator implements Comparator {
-
-        /**
-         * Compares this object with the specified object for order. Returns a
-         * negative integer, zero, or a positive integer if the first argument is
-         * less than, equal to, or greater than the specified object. The
-         * RLSAttributeObject are compared by their s1 field.
-         *
-         * @param o1 is the first object to be compared.
-         * @param o2 is the second object to be compared.
-         *
-         * @return a negative number, zero, or a positive number, if the
-         * object compared against is less than, equals or greater than
-         * this object.
-         * @exception ClassCastException if the specified object's type
-         * prevents it from being compared to this Object.
-         */
-        public int compare(Object o1, Object o2) {
-            if (o1 instanceof RLSAttributeObject && o2 instanceof RLSAttributeObject) {
-                return ( (RLSAttributeObject) o1).key.compareTo( ( (
-                    RLSAttributeObject) o2).key);
-            }
-            else {
-                throw new ClassCastException("object is not RLSAttributeObject");
-            }
-        }
-
-    }
-
-    /**
-     * The class that extends RLSString2Bulk and adds on the equals method,
-     * that allows me to do the set operations
-     */
-    private class MyRLSString2Bulk extends RLSString2Bulk{
-
-
-        /**
-         * The overloaded constructor.
-         *
-         * @param rca  the rls exitcode
-         * @param s1a  the String object usually containing the lfn
-         */
-        public MyRLSString2Bulk(int rca, java.lang.String s1a){
-            super(rca,s1a);
-        }
-
-
-        /**
-         * The overloaded constructor.
-         *
-         * @param rca  the rls exitcode.
-         * @param s1a  the String object usually containing the lfn.
-         * @param s2a  the String object usually containing the pfn.
-         */
-        public MyRLSString2Bulk(int rca, java.lang.String s1a, java.lang.String s2a){
-            super(rca,s1a,s2a);
-        }
-
-        /**
-         * Indicates whether some other object is "equal to" this one.
-         *
-         * An object is considered equal if it is of the same type and
-         * all the fields s1 and s2 match.
-         *
-         * @return boolean whether the object is equal or not.
-         */
-        public boolean equals(Object obj){
-            if(obj instanceof MyRLSString2Bulk){
-                MyRLSString2Bulk sec = (MyRLSString2Bulk)obj;
-                return this.s1.equals(sec.s1) && this.s2.equals(sec.s2);
-            }
-            return false;
-        }
-
-        /**
-         * Returns a string representation of the object.
-         *
-         * @return the String representation.
-         */
-        public String toString(){
-            StringBuffer sb = new StringBuffer(10);
-            sb.append("(").append(s1).append("->").append(s2).
-            append(",").append(rc).append(")");
-            return sb.toString();
-        }
-    }
-
-    /**
-     * Testing function.
-     */
-    public static void main(String[] args){
-        LRC lrc = new LRC();
-        lrc.connect("rls://sukhna.isi.edu");
-        String lfn = "test";
-        LogManagerFactory.loadSingletonInstance().setLevel(LogManager.DEBUG_MESSAGE_LEVEL);
-
-        /*
-        ReplicaCatalogEntry rce = new ReplicaCatalogEntry("gsiftp://sukhna.isi.edu/tmp/test");
-        rce.addAttribute("name","karan");
-        lrc.insert("test",rce);
-        lrc.insert("test","gsiftp://sukhna.isi.edu/tmp/test1","isi");
-        lrc.insert("test","gsiftp://sukhna.isi.edu/constraint/testvahi","isi");
-        lrc.insert("vahi.f.a","file:///tmp/vahi.f.a","isi");
-        lrc.insert("testvahi.f.a","file:///tmp/testvahi.f.a","isi");
-
-        rce = new ReplicaCatalogEntry("gsiftp://sukhna.isi.edu/tmp/testX");
-        rce.addAttribute("name","karan");
-        rce.addAttribute("pool","isi");
-        lrc.insert("testX",rce);
-        */
-
-       /*
-        System.out.println("Getting list of lfns");
-        System.out.println("\t" + lrc.list());
-
-
-
-        Set s = new HashSet();
-        s.add("test");s.add("vahi.f.a");s.add("testX");
-        s.add("unknown");
-
-
-        System.out.println("\nQuerying for complete RCE for site  " + s );
-        System.out.println(lrc.lookup(s));
-        */
-
-        /*
-        System.out.println("\n Deleting " + lfn);
-        System.out.println(lrc.deleteByResource(lfn,"isi"));
-
-
-        System.out.println("\nQuerying for PFN for site" + s );
-        System.out.println(lrc.lookupNoAttributes(s,"isi"));
-
-        System.out.println("\nRemoving lfns " + s);
-        //System.out.println(lrc.remove(s));
-        System.out.println(lrc.removeByAttribute("isi"));
-
-
-       System.out.println("\n\nClearing catalog " + lrc.clear());
-       */
-
-       //System.out.println("Getting list of lfns");
-       //System.out.println("\t" + lrc.listLFNPFN("*vahi*",false));
-
-       /*
-       System.out.println("Removing lfns in set " + s + " ");
-       System.out.println(lrc.removeByAttribute("isi"));
-
-       Map m = new HashMap();
-       //m.put("pfn","*vahi*");
-       m.put("lfn","test*");
-       System.out.println("Getting lfns matching constraint");
-       System.out.println("\t" + lrc.lookup(m));
-       */
-
-
-       //test bulk insert
-       System.out.println("Clearing the database");
-       //lrc.clear();
-       Map inserts = new HashMap();
-       Collection c1 = new ArrayList();
-       c1.add(new ReplicaCatalogEntry("gsiftp://test/f.a","isi"));
-       Collection c2 = new ArrayList();
-       c2.add(new ReplicaCatalogEntry("gsiftp://test/f.b","isi"));
-       Collection c3 = new ArrayList();
-       c3.add(new ReplicaCatalogEntry("gsiftp://test/f.c","isi1"));
-       inserts.put("f.a",c1);
-       inserts.put("f.b",c2);
-       inserts.put("f.c",c3);
-       System.out.println("Doing bulk inserts");
-       try{
-           System.out.println("Inserted " + lrc.insert(inserts) + " entries");
-       }
-       catch(ReplicaCatalogException rce){
-           do {
-               System.out.println(rce.getMessage());
-               rce = (ReplicaCatalogException) rce.getNextException();
-           } while ( rce != null );
-
-       }
-
-       lrc.close();
-    }
-
-
-} //end of  class LRC
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/impl/RLI.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/catalog/replica/impl/RLI.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,1988 +0,0 @@
-/**
- *  Copyright 2007-2008 University Of Southern California
- *
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package edu.isi.pegasus.planner.catalog.replica.impl;
-
-import edu.isi.pegasus.planner.catalog.replica.*;
-import edu.isi.pegasus.common.logging.LogManagerFactory;
-import edu.isi.pegasus.common.logging.LogManager;
-
-import edu.isi.pegasus.planner.catalog.ReplicaCatalog;
-import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry;
-import edu.isi.pegasus.planner.catalog.CatalogException;
-
-import edu.isi.pegasus.common.util.Version;
-import org.globus.replica.rls.RLSClient;
-import org.globus.replica.rls.RLSException;
-import org.globus.replica.rls.RLSAttribute;
-import org.globus.replica.rls.RLSAttributeObject;
-import org.globus.replica.rls.RLSLRCInfo;
-import org.globus.replica.rls.RLSString2Bulk;
-import org.globus.replica.rls.RLSString2;
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.Properties;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.LinkedHashSet;
-import java.util.Iterator;
-
-/**
- * This class implements the VDS replica catalog interface on top of RLI API.
- * A thing to take care of is that all delete and remove operations are
- * propoagated to all the Local Replica Catalogs (LRCs) reporting to the RLI.
- * Hence,
- * you should be careful while deleting LFNs, as deletions can cascade to
- * multiple LRCs. If you want to delete or remove an LFN from a particular LRC,
- * use the LRC implementation to connect to that LRC and call the corresponding
- * delete functions on that.
- * There is no transaction support in the implementation. The implementation
- * is best effort. Inconsistencies can occur if one of the LRCs goes offline,
- * or an operation fails for whatsoever reason.
- *
- * @author Karan Vahi
- * @author Gaurang Mehta
- *
- * @version $Revision: 2079 $
- */
-public class RLI implements ReplicaCatalog {
-
-    /**
-     * The number of entries searched in each bulk query to RLS.
-     */
-    public static final int RLS_BULK_QUERY_SIZE = 1000;
-
-    /**
-     * The default timeout in seconds to be used while querying the RLI.
-     */
-    public static final String DEFAULT_RLI_TIMEOUT = "30";
-
-    /**
-     * The key that is used to get hold of the timeout value from the properties
-     * object.
-     */
-    public static final String RLS_TIMEOUT_KEY = "rls.timeout";
-
-    /**
-     * The key that is used to get hold of the timeout value from the properties
-     * object.
-     */
-    public static final String RLI_TIMEOUT_KEY = "rli.timeout";
-
-    /**
-     * The key that is used to designate the LRC whose results are to be
-     * ignored.
-     */
-    public static final String LRC_IGNORE_KEY = "lrc.ignore";
-
-    /**
-     * The key that is used to designate the LRC whose results are to be
-     * restricted.
-     */
-    public static final String LRC_RESTRICT_KEY = "lrc.restrict";
-
-    /**
-     * The attribute in RLS that maps to a site handle.
-     */
-    public static final String SITE_ATTRIBUTE = "pool";
-
-    /**
-     * The key that is used to get hold of the url from the properties object.
-     */
-    public static final String URL_KEY = "url";
-
-    /**
-     * The key that if set, specifies the proxy to be picked up while connecting
-     * to the RLS.
-     */
-    public static final String PROXY_KEY = "proxy";
-
-    /**
-     * The error message for not connected to RLI.
-     */
-    public static final String RLI_NOT_CONNECTED_MSG = "Not connected to RLI ";
-
-    /**
-     * The error message for not connected to LRC.
-     */
-    public static final String LRC_NOT_CONNECTED_MSG = "Unable to connect to LRC ";
-
-
-    /**
-     * The LRC query state indicating that LRC needs to queried fully. The LRC
-     * returns all PFNs irrespective of whether they have a site attribute or
-     * not.
-     */
-    public static final int LRC_QUERY_NORMAL = 0;
-
-    /**
-     * The LRC query state indicating that LRC has to be restricted query.
-     * LRC should return only PFNs with site attributes tagged.
-     */
-    public static final int LRC_QUERY_RESTRICT = 1;
-
-    /**
-     * The LRC query state indicating that LRC has to be ignored.
-     */
-    public static final int LRC_QUERY_IGNORE = 2;
-
-
-    /**
-     * The handle to the client that allows access to both the RLI and the LRC
-     * running at the url specified while connecting.
-     */
-    private RLSClient mRLS;
-
-    /**
-     * The handle to the client that allows access to the LRC running at the
-     * url specified while connecting.
-     */
-    private RLSClient.RLI mRLI;
-
-    /**
-     * The url to the RLI to which this instance implementation talks to.
-     */
-    private String mRLIURL;
-
-    /**
-     * A String array contains the LRC URLs that have to be ignored for querying.
-     */
-    private String[] mLRCIgnoreList;
-
-    /**
-     * A String array contains the LRC URLs that have to be restricted for querying.
-     * Only those entries are returned that have a site attribute associated
-     * with them.
-     */
-    private String[] mLRCRestrictList;
-
-
-    /**
-     * The handle to the logging object. Should be log4j soon.
-     */
-    private LogManager mLogger;
-
-    /**
-     * The string holding the message that is logged in the logger.
-     */
-    private String mLogMsg;
-
-
-    /**
-     * The properties object containing all the properties, that are required
-     * to connect to a RLS.
-     */
-    private Properties mConnectProps;
-
-    /**
-     * The batch size while querying the RLI in the bulk mode.
-     */
-    private int mBatchSize;
-
-    /**
-     * The timeout in seconds to be applied while querying the RLI.
-     */
-    private int mTimeout;
-    
-    /**
-     * The default constructor, that creates an object which is not linked with
-     * any RLS. Use the connect method to connect to the RLS.
-     *
-     * @see #connect(Properties).
-     */
-    public RLI() {
-        mRLS = null;
-        mLogger =  LogManagerFactory.loadSingletonInstance();
-        mConnectProps = new Properties();
-        mBatchSize = this.RLS_BULK_QUERY_SIZE;
-        mTimeout   = Integer.parseInt(DEFAULT_RLI_TIMEOUT);
-
-    }
-
-   /**
-    * Establishes a connection to the RLI, picking up the proxy from the default
-    * location usually /tmp/ directory.
-    *
-    * @param url    the url to lrc to connect to.
-    *
-    * @return true if connected now, or false to indicate a failure.
-    */
-   public boolean connect(String url) {
-       return connect(url,null);
-   }
-
-
-    /**
-     * Establishes a connection to the RLI.
-     *
-     * @param props contains all necessary data to establish the link.
-     *
-     * @return true if connected now, or false to indicate a failure.
-     */
-    public boolean connect(Properties props) {
-        boolean con = false;
-        Object obj = props.remove(URL_KEY);
-        mRLIURL = (obj == null) ? null : (String) obj;
-
-        if (mRLIURL == null) {
-            //nothing to connect to.
-            mLogger.log("The RLI url is not specified",
-                        LogManager.ERROR_MESSAGE_LEVEL);
-            return con;
-        }
-
-        //try to see if a proxy cert has been specified or not
-        String proxy = props.getProperty(PROXY_KEY);
-        mConnectProps = props;//??
-        
-        mLogger.log( "[RLI-RC] Connection properties passed are " + props,
-                     LogManager.DEBUG_MESSAGE_LEVEL );
-
-        mLRCIgnoreList   = this.getRLSLRCIgnoreURLs( props );
-        mLRCRestrictList = this.getRLSLRCRestrictURLs( props );
-
-
-        //determine timeout
-        mTimeout = getTimeout(props);
-
-        //set the batch size for queries
-        setBatchSize(props);
-
-        return connect(mRLIURL, proxy);
-    }
-
-    /**
-     * Establishes a connection to the RLI.
-     *
-     * @param url    the url to lrc to connect to.
-     * @param proxy  the path to the proxy file to be picked up. null denotes
-     *               default location.
-     *
-     * @return true if connected now, or false to indicate a failure.
-     */
-    public boolean connect(String url, String proxy) {
-        mRLIURL = url;
-        //push it into the internal properties object
-        mConnectProps.setProperty(URL_KEY,url);
-        if(proxy != null){
-            mConnectProps.setProperty(PROXY_KEY, proxy);
-        }
-        try {
-            mRLS = (proxy == null) ?
-                new RLSClient(url) : //proxy is picked up from default loc /tmp
-                new RLSClient(url, proxy);
-
-            //set RLI timeout
-            mRLS.SetTimeout(mTimeout);
-
-            //connect is only successful if we have
-            //successfully connected to the LRC
-            mRLI = mRLS.getRLI();
-
-        }
-        catch (RLSException e) {
-            mLogger.log("RLS Exception", e,LogManager.ERROR_MESSAGE_LEVEL);
-            return false;
-        }
-        return true;
-    }
-
-
-    /**
-     * Gets a handle to the RLI that is associated with the RLS running at
-     * url.
-     *
-     * @return <code>RLSClient.RLI</code> that points to the RLI that is
-     *         running , or null in case connect method not being called.
-     * @see #mRLIURL
-     */
-    public RLSClient.RLI getRLI() {
-        return (this.isClosed()) ? null: mRLS.getRLI() ;
-    }
-
-    /**
-     * Gets a handle to the LRC that is associated with the RLS running at
-     * url.
-     *
-     * @return <code>RLSClient.LRC</code> that points to the RLI that is
-     *         running , or null in case connect method not being called.
-     * @see #mRLIURL
-     */
-    public RLSClient.LRC getLRC() {
-        return (this.isClosed()) ? null : mRLS.getLRC();
-    }
-
-    /**
-     * Retrieves the entry for a given filename and resource handle from
-     * the RLS.
-     *
-     * @param lfn is the logical filename to obtain information for.
-     * @param handle is the resource handle to obtain entries for.
-     *
-     * @return the (first) matching physical filename, or
-     * <code>null</code> if no match was found.
-     */
-    public String lookup(String lfn, String handle) {
-        //sanity check
-        if (this.isClosed()) {
-            //probably an exception should be thrown here!!
-            throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL);
-        }
-        String pfn = null;
-        ArrayList lrcList = null;
-        try {
-            lrcList = mRLI.getLRC(lfn);
-            for (Iterator it = lrcList.iterator(); it.hasNext(); ) {
-                //connect to an lrc
-                String lrcURL = ( (RLSString2) it.next()).s2;
-                //push the lrcURL to the properties object
-                mConnectProps.setProperty(this.URL_KEY,lrcURL);
-                LRC lrc = new LRC();
-                if(!lrc.connect(mConnectProps)){
-                    //log an error/warning message
-                    mLogger.log("Unable to connect to LRC " + lrcURL,
-                                LogManager.ERROR_MESSAGE_LEVEL);
-                    continue;
-                }
-
-                //query the lrc
-                try{
-                    pfn = lrc.lookup(lfn,handle);
-                    if(pfn != null)
-                        return pfn;
-                }
-                catch(Exception ex){
-                    mLogger.log("lookup(String,String)",ex,
-                                LogManager.ERROR_MESSAGE_LEVEL);
-                }
-                finally{
-                    //disconnect
-                    lrc.close();
-                }
-            }
-        }
-        catch (RLSException ex) {
-            mLogger.log("lookup(String,String)",ex,
-                        LogManager.ERROR_MESSAGE_LEVEL);
-        }
-
-        return null;
-    }
-    
-    /**
-     * Retrieves all entries for a given LFN from the replica catalog.
-     * Each entry in the result set is a tuple of a PFN and all its
-     * attributes.
-     *
-     * @param lfn is the logical filename to obtain information for.
-     *
-     * @return a collection of replica catalog entries,  or null in case of
-     *         unable to connect to RLS.
-     *
-     * @see ReplicaCatalogEntry
-     */
-    public Collection lookup(String lfn) {
-        Set lfns = new HashSet();
-        lfns.add( lfn );
-        
-        Map<String, Collection<ReplicaCatalogEntry>> result = this.lookup( lfns );
-
-        if( result == null ){
-            return null;
-        }
-        else{
-            Collection values = result.get( lfn );
-            if( values == null ){
-                //JIRA PM-74
-                values = new ArrayList();
-            }
-            return values;
-        }
-        
-    }
-
-    
-
-    /**
-     * Retrieves all entries for a given LFN from the replica catalog.
-     * Each entry in the result set is just a PFN string. Duplicates
-     * are reduced through the set paradigm.
-     *
-     * @param lfn is the logical filename to obtain information for.
-     * @return a set of PFN strings, or null in case of unable to connect
-     *         to RLS.
-     *
-     */
-    public Set lookupNoAttributes(String lfn) {
-        Set lfns = new HashSet();
-        lfns.add( lfn );
-        
-        Map<String, Set<String>> result = this.lookupNoAttributes( lfns );
-
-        if( result == null ){
-            return null;
-        }
-        else{
-            Set values = result.get( lfn );
-            if( values == null ){
-                //JIRA PM-74
-                values = new HashSet();
-            }
-            return values;
-        }
-     
-    }
-  
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete LRC. It uses the bulk query api to the LRC to query for stuff.
-     * Bulk query has been in RLS since version 2.0.8. Internally, the bulk
-     * queries are done is sizes specified by variable mBatchSize.
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     *
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries for the LFN.
-     *
-     * @see ReplicaCatalogEntry
-     * @see #getBatchSize()
-     */
-    public Map lookup(Set lfns) {
-        //Map indexed by lrc url and each value a collection
-        //of lfns that the RLI says are present in it.
-        Map lrc2lfn  = this.getLRC2LFNS(lfns);
-
-        if(lrc2lfn == null){
-            //probably RLI is not connected!!
-            return null;
-        }
-
-        // now query the LRCs with the LFNs that they are responsible for
-        // and aggregate stuff.
-        String key = null;
-        Map result = new HashMap(lfns.size());
-        String message;
-        for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry)it.next();
-            key = (String)entry.getKey();
-            message = "Querying LRC " + key;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,key);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + key,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-
-            //query the lrc
-            try{
-                Map m = lrc.lookup((Set)entry.getValue());
-
-                //figure out if we need to restrict our queries or not.
-                //restrict means only include results if they have a site
-                //handle associated
-                boolean restrict = ( this.determineQueryType(key) == this.LRC_QUERY_RESTRICT );
-
-                for(Iterator mit = m.entrySet().iterator();mit.hasNext();){
-                    entry = (Map.Entry)mit.next();
-                    List pfns = (( List )entry.getValue());
-                    if ( restrict ){
-                        //traverse through all the PFN's and check for resource handle
-                        for ( Iterator pfnIterator = pfns.iterator(); pfnIterator.hasNext();  ){
-                            ReplicaCatalogEntry pfn = (ReplicaCatalogEntry) pfnIterator.next();
-                            if ( pfn.getResourceHandle() == null ){
-                                //do not include in the results if the entry does not have
-                                //a pool attribute associated with it.
-                                mLogger.log("Ignoring entry " + entry.getValue() +
-                                            " from  LRC " + key,
-                                            LogManager.DEBUG_MESSAGE_LEVEL);
-                                pfnIterator.remove();
-                            }
-                        }
-
-                    }
-
-                    //if pfns are empty which could be due to
-                    //restriction case taking away all pfns
-                    //do not merge in result
-                    if( pfns.isEmpty() ){ continue; }
-
-                    //merge the entries into the main result
-                    key   = (String)entry.getKey(); //the lfn
-                    if( result.containsKey(key) ){
-                        //right now no merging of RCE being done on basis
-                        //on them having same pfns. duplicate might occur.
-                        ((List)result.get(key)).addAll( pfns );
-                    }
-                    else{
-                        result.put( key, pfns );
-                    }
-                }
-            }
-            catch(Exception ex){
-                mLogger.log("lookup(Set)",ex,LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                //disconnect
-                lrc.close();
-            }
-
-
-            mLogger.log( message  + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-        return result;
-    }
-
-    /**
-     * Retrieves all entries for a given LFN from the replica catalog.
-     * Each entry in the result set is just a PFN string. Duplicates
-     * are reduced through the set paradigm.
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     * @return a map indexed by the LFN. Each value is a collection
-     * of PFN's for the LFN.
-     */
-    public Map lookupNoAttributes(Set lfns) {
-        //Map indexed by lrc url and each value a collection
-        //of lfns that the RLI says are present in it.
-        Map lrc2lfn  = this.getLRC2LFNS(lfns);
-        if(lrc2lfn == null){
-            //probably RLI is not connected!!
-            return null;
-        }
-
-        // now query the LRCs with the LFNs that they are responsible for
-        // and aggregate stuff.
-        String key = null;
-        String message;
-        Map result = new HashMap(lfns.size());
-        for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry)it.next();
-            key = (String)entry.getKey();
-            message = "Querying LRC " + key;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,key);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + key,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-
-            //query the lrc
-            try{
-                Map m = lrc.lookupNoAttributes((Set)entry.getValue());
-                for(Iterator mit = m.entrySet().iterator();mit.hasNext();){
-                    entry = (Map.Entry)mit.next();
-                    //merge the entries into the main result
-                    key   = (String)entry.getKey(); //the lfn
-                    if(result.containsKey(key)){
-                        //right now no merging of RCE being done on basis
-                        //on them having same pfns. duplicate might occur.
-                        ((Set)result.get(key)).addAll((Set)entry.getValue());
-                    }
-                    else{
-                        result.put(key,entry.getValue());
-                    }
-                }
-            }
-            catch(Exception ex){
-                mLogger.log("lookup(Set)",ex,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                //disconnect
-                lrc.close();
-            }
-
-
-            mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-        return result;
-
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filenames, up to the
-     * complete catalog. Retrieving full catalogs should be harmful, but
-     * may be helpful in online display or portal.<p>
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     * @param handle is the resource handle, restricting the LFNs.
-     *
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries (all attributes).
-     *
-     * @see ReplicaCatalogEntry
-     */
-    public Map lookup(Set lfns, String handle) {
-        //Map indexed by lrc url and each value a collection
-        //of lfns that the RLI says are present in it.
-        Map lrc2lfn  = this.getLRC2LFNS(lfns);
-        if(lrc2lfn == null){
-            //probably RLI is not connected!!
-            return null;
-        }
-
-        // now query the LRCs with the LFNs they are responsible for
-        // and aggregate stuff.
-        String key = null,message = null;
-        Map result = new HashMap(lfns.size());
-        for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry)it.next();
-            key = (String)entry.getKey();
-            message = "Querying LRC " + key;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,key);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + key,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-
-            //query the lrc
-            try{
-                Map m = lrc.lookup((Set)entry.getValue(),handle);
-                for(Iterator mit = m.entrySet().iterator();mit.hasNext();){
-                    entry = (Map.Entry)mit.next();
-                    //merge the entries into the main result
-                    key   = (String)entry.getKey(); //the lfn
-                    if(result.containsKey(key)){
-                        //right now no merging of RCE being done on basis
-                        //on them having same pfns. duplicate might occur.
-                        ((Set)result.get(key)).addAll((Set)entry.getValue());
-                    }
-                    else{
-                        result.put(key,entry.getValue());
-                    }
-                }
-            }
-            catch(Exception ex){
-                mLogger.log("lookup(Set,String)",ex,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                //disconnect
-                lrc.close();
-            }
-
-
-            mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-        return result;
-
-
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete catalog. Retrieving full catalogs should be harmful, but
-     * may be helpful in online display or portal.<p>
-     *
-     * The <code>noAttributes</code> flag is missing on purpose, because
-     * due to the resource handle, attribute lookups are already required.
-     *
-     * @param lfns is a set of logical filename strings to look up.
-     * @param handle is the resource handle, restricting the LFNs.
-     *
-     * @return a map indexed by the LFN. Each value is a set of
-     * physical filenames.
-     */
-    public Map lookupNoAttributes( Set lfns, String handle ){
-        //Map indexed by lrc url and each value a collection
-        //of lfns that the RLI says are present in it.
-        Map lrc2lfn  = this.getLRC2LFNS(lfns);
-        if(lrc2lfn == null){
-            //probably RLI is not connected!!
-            return null;
-        }
-
-        // now query the LRCs with the LFNs that they are responsible for
-        // and aggregate stuff.
-        String key = null,message = null;
-        Map result = new HashMap(lfns.size());
-        for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry)it.next();
-            key = (String)entry.getKey();
-            message = "Querying LRC " + key;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,key);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + key,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-
-            //query the lrc
-            try{
-                Map m = lrc.lookupNoAttributes((Set)entry.getValue(),handle);
-                for(Iterator mit = m.entrySet().iterator();mit.hasNext();){
-                    entry = (Map.Entry)mit.next();
-                    //merge the entries into the main result
-                    key   = (String)entry.getKey(); //the lfn
-                    if(result.containsKey(key)){
-                        //right now no merging of RCE being done on basis
-                        //on them having same pfns. duplicate might occur.
-                        ((Set)result.get(key)).addAll((Set)entry.getValue());
-                    }
-                    else{
-                        result.put(key,entry.getValue());
-                    }
-                }
-            }
-            catch(Exception ex){
-                mLogger.log("lookup(Set,String):",ex,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                //disconnect
-                lrc.close();
-            }
-
-
-            mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-        return result;
-
-    }
-
-    /**
-     * Retrieves multiple entries for a given logical filename, up to the
-     * complete catalog. Retrieving full catalogs should be harmful, but
-     * may be helpful in online display or portal.<p>
-     *
-     *
-     * At present it DOES NOT SUPPORT ATTRIBUTE MATCHING.
-     *
-     * @param constraints is mapping of keys 'lfn', 'pfn' to a string that
-     * has some meaning to the implementing system. This can be a SQL
-     * wildcard for queries, or a regular expression for Java-based memory
-     * collections. Unknown keys are ignored. Using an empty map requests
-     * the complete catalog.
-     *
-     * @return a map indexed by the LFN. Each value is a collection
-     * of replica catalog entries.
-     *
-     * @see ReplicaCatalogEntry
-     */
-    public Map lookup(Map constraints) {
-        //sanity check
-        if (this.isClosed()) {
-            //probably an exception should be thrown here!!
-            throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL);
-        }
-        Map result = new HashMap();
-        String url = null,message = null;
-        //we need to get hold of all the LRC
-        //that report to the RLI and call the
-        //list() method on each of them
-        for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){
-            url = (String)it.next();
-            message = "Querying LRC " + url;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,url);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + url,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-            try{
-                Map m = lrc.lookup(constraints);
-                for(Iterator mit = m.entrySet().iterator();mit.hasNext();){
-                    Map.Entry entry = (Map.Entry)mit.next();
-                    //merge the entries into the main result
-                    String key   = (String)entry.getKey(); //the lfn
-                    if(result.containsKey(key)){
-                        //right now no merging of RCE being done on basis
-                        //on them having same pfns. duplicate might occur.
-                        ((List)result.get(key)).addAll((List)entry.getValue());
-                    }
-                    else{
-                        result.put(key,entry.getValue());
-                    }
-                }
-
-            }
-            catch(Exception e){
-                mLogger.log("list(String)",e,LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                lrc.close();
-            }
-        }
-
-        return result;
-
-    }
-
-    /**
-     * Lists all logical filenames in the catalog.
-     *
-     * @return A set of all logical filenames known to the catalog.
-     */
-    public Set list() {
-        //sanity check
-        if (this.isClosed()) {
-            //probably an exception should be thrown here!!
-            throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL);
-        }
-        Set result = new HashSet();
-        String url = null,message = null;
-        //we need to get hold of all the LRC
-        //that report to the RLI and call the
-        //list() method on each of them
-        for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){
-            url = (String)it.next();
-            message = "Querying LRC " + url;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,url);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + url,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-            try{
-                result.addAll(lrc.list());
-            }
-            catch(Exception e){
-                mLogger.log("list()",e,LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                lrc.close();
-            }
-            mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-
-        return result;
-    }
-
-    /**
-     * Lists a subset of all logical filenames in the catalog.
-     *
-     * @param constraint is a constraint for the logical filename only. It
-     * is a string that has some meaning to the implementing system. This
-     * can be a SQL wildcard for queries, or a regular expression for
-     * Java-based memory collections.
-     *
-     * @return A set of logical filenames that match. The set may be empty
-     */
-    public Set list(String constraint) {
-        //sanity check
-        if (this.isClosed()) {
-            //probably an exception should be thrown here!!
-            throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL);
-        }
-        Set result = new HashSet();
-        String url = null,message = null;
-        //we need to get hold of all the LRC
-        //that report to the RLI and call the
-        //list() method on each of them
-        for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){
-            url = (String)it.next();
-            message = "Querying LRC " + url;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,url);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + url,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-            try{
-                result.addAll(lrc.list(constraint));
-            }
-            catch(Exception e){
-                mLogger.log("list(String)",e,LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                lrc.close();
-            }
-            mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-
-        return result;
-    }
-
-    /**
-     * Inserts a new mapping into the LRC running at the URL, where the RLI
-     * is running.
-     *
-     * @param lfn is the logical filename under which to book the entry.
-     * @param tuple is the physical filename and associated PFN attributes.
-     *
-     * @return number of insertions, should always be 1. On failure,
-     * throws a RuntimeException.
-     */
-    public int insert(String lfn, ReplicaCatalogEntry tuple) {
-        //get hold of the LRC if that is running
-        LRC lrc     = new LRC();
-        int result  = 1;
-        if(!lrc.connect(mConnectProps)){
-            //log an error/warning message
-            throw new RuntimeException(LRC_NOT_CONNECTED_MSG +
-                                    mConnectProps.getProperty(URL_KEY));
-        }
-        result = lrc.insert(lfn,tuple);
-        //better to keep a handle to the running LRC
-        //as a member variable, and close it in
-        //RLI.close()
-        lrc.close();
-        return result;
-
-    }
-
-    /**
-     * Inserts a new mapping into the LRC running at the URL, where the RLI
-     * is running.
-     * This is a convenience function exposing the resource handle. Internally,
-     * the <code>ReplicaCatalogEntry</code> element will be contructed, and passed to
-     * the appropriate insert function.
-     *
-     * @param lfn is the logical filename under which to book the entry.
-     * @param pfn is the physical filename associated with it.
-     * @param handle is a resource handle where the PFN resides.
-     *
-     * @return number of insertions, should always be 1. On failure,
-     * throws a RuntimeException.
-     *
-     * @see #insert( String, ReplicaCatalogEntry )
-     * @see ReplicaCatalogEntry
-     */
-    public int insert(String lfn, String pfn, String handle) {
-        //get hold of the LRC if that is running
-        LRC lrc     = new LRC();
-        int result  = 1;
-        if(!lrc.connect(mConnectProps)){
-            //log an error/warning message
-            throw new RuntimeException(LRC_NOT_CONNECTED_MSG +
-                                    mConnectProps.getProperty(URL_KEY));
-        }
-        result = lrc.insert(lfn,pfn,handle);
-        //better to keep a handle to the running LRC
-        //as a member variable, and close it in
-        //RLI.close()
-        lrc.close();
-        return result;
-
-    }
-
-    /**
-     * Inserts multiple mappings into the replica catalog. The input is a
-     * map indexed by the LFN. The value for each LFN key is a collection
-     * of replica catalog entries.
-     *
-     * @param x is a map from logical filename string to list of replica
-     * catalog entries.
-     *
-     * @return the number of insertions.
-     * @see ReplicaCatalogEntry
-     */
-     public int insert(Map x) {
-         //get hold of the LRC if that is running
-        LRC lrc     = new LRC();
-        int result  = 1;
-        if(!lrc.connect(mConnectProps)){
-            //log an error/warning message
-            throw new RuntimeException(LRC_NOT_CONNECTED_MSG +
-                                    mConnectProps.getProperty(URL_KEY));
-        }
-        result = lrc.insert(x);
-        //better to keep a handle to the running LRC
-        //as a member variable, and close it in
-        //RLI.close()
-        lrc.close();
-        return result;
-
-     }
-
-     /**
-      * Deletes a specific mapping from the replica catalog. We don't care
-      * about the resource handle. More than one entry could theoretically
-      * be removed. Upon removal of an entry, all attributes associated
-      * with the PFN also evaporate (cascading deletion).
-      *
-      * It can result in a deletion of more than one entry, and from more
-      * than one local replica catalog that might be reporting to the RLI.
-      *
-      * @param lfn is the logical filename in the tuple.
-      * @param pfn is the physical filename in the tuple.
-      *
-      * @return the number of removed entries.
-      */
-     public int delete(String lfn, String pfn) {
-         ReplicaCatalogEntry rce = new ReplicaCatalogEntry(pfn);
-         return delete(lfn,rce);
-     }
-
-     /**
-      * Deletes a very specific mapping from the replica catalog. The LFN
-      * must be matches, the PFN, and all PFN attributes specified in the
-      * replica catalog entry. More than one entry could theoretically be
-      * removed. Upon removal of an entry, all attributes associated with
-      * the PFN also evaporate (cascading deletion).
-      * It can result in a deletion of more than one entry, and from more
-      * than one local replica catalog that might be reporting to the RLI.
-      *
-      * @param lfn is the logical filename in the tuple.
-      * @param tuple is a description of the PFN and its attributes.
-      *
-      * @return the number of removed entries, either 0 or 1.
-      */
-     public int delete(String lfn, ReplicaCatalogEntry tuple) {
-         //Map indexed by lrc url and each value a collection
-         //of lfns that the RLI says are present in it.
-         Set lfns     = new HashSet(1);
-         lfns.add(lfn);
-         Map lrc2lfn  = this.getLRC2LFNS(lfns);
-         int result = 0;
-
-         if(lrc2lfn == null){
-             //probably RLI is not connected!!
-             return 0;
-         }
-
-         // call the delete function on the individual
-         // LRCs where the mapping resides
-         String key = null,message = null;
-         for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){
-             Map.Entry entry = (Map.Entry)it.next();
-             key = (String)entry.getKey();
-             message = "Querying LRC " + key;
-             mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-             //push the lrcURL to the properties object
-             mConnectProps.setProperty(this.URL_KEY,key);
-             LRC lrc     = new LRC();
-             if(!lrc.connect(mConnectProps)){
-                 //log an error/warning message
-                 mLogger.log("Unable to connect to LRC " + key,
-                             LogManager.ERROR_MESSAGE_LEVEL);
-                 continue;
-             }
-
-             //delete from the LRC
-             try{
-                 result += lrc.delete(lfn,tuple);
-             }
-             catch(Exception ex){
-                 mLogger.log("delete(String, ReplicaCatalogEntry)",ex,
-                             LogManager.ERROR_MESSAGE_LEVEL);
-             }
-             finally{
-                 //disconnect
-                 lrc.close();
-             }
-             mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-
-        return result;
-
-     }
-
-     /**
-      * Deletes all PFN entries for a given LFN from the replica catalog
-      * where the PFN attribute is found, and matches exactly the object
-      * value. This method may be useful to remove all replica entries that
-      * have a certain MD5 sum associated with them. It may also be harmful
-      * overkill.
-      * It can result in a deletion of more than one entry, and from more
-      * than one local replica catalog that might be reporting to the RLI.
-      *
-      * @param lfn is the logical filename to look for.
-      * @param name is the PFN attribute name to look for.
-      * @param value is an exact match of the attribute value to match.
-      *
-      * @return the number of removed entries.
-      */
-     public int delete(String lfn, String name, Object value) {
-         //Map indexed by lrc url and each value a collection
-         //of lfns that the RLI says are present in it.
-         Set lfns     = new HashSet(1);
-         lfns.add(lfn);
-         Map lrc2lfn  = this.getLRC2LFNS(lfns);
-         int result = 0;
-
-         if(lrc2lfn == null){
-             //probably RLI is not connected!!
-             return 0;
-         }
-
-         // call the delete function on the individual
-         // LRCs where the mapping resides
-         String key = null,message = null;
-         for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){
-             Map.Entry entry = (Map.Entry)it.next();
-             key = (String)entry.getKey();
-             message = "Deleting from LRC " + key;
-             mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-             //push the lrcURL to the properties object
-             mConnectProps.setProperty(this.URL_KEY,key);
-             LRC lrc     = new LRC();
-             if(!lrc.connect(mConnectProps)){
-                 //log an error/warning message
-                 mLogger.log("Unable to connect to LRC " + key,
-                             LogManager.ERROR_MESSAGE_LEVEL);
-                 continue;
-             }
-
-             //delete from the LRC
-             try{
-                 result += lrc.delete(lfn,name,value);
-             }
-             catch(Exception ex){
-                 mLogger.log("delete(String, String, Object)",
-                             ex,LogManager.ERROR_MESSAGE_LEVEL);
-             }
-             finally{
-                 //disconnect
-                 lrc.close();
-             }
-             mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-
-        return result;
-
-     }
-
-     /**
-      * Deletes all PFN entries for a given LFN from the replica catalog
-      * where the resource handle is found. Karan requested this
-      * convenience method, which can be coded like
-      * <pre>
-      *  delete( lfn, SITE_ATTRIBUTE, handle )
-      * </pre>
-      *
-      * It can result in a deletion of more than one entry, and from more
-      * than one local replica catalog that might be reporting to the RLI.
-      *
-      * @param lfn is the logical filename to look for.
-      * @param handle is the resource handle
-      *
-      * @return the number of entries removed.
-      */
-    public int deleteByResource(String lfn, String handle) {
-        return delete(lfn,SITE_ATTRIBUTE,handle);
-    }
-
-    /**
-     * Deletes multiple mappings into the replica catalog. The input is a
-     * map indexed by the LFN. The value for each LFN key is a collection
-     * of replica catalog entries. On setting matchAttributes to false, all entries
-     * having matching lfn pfn mapping to an entry in the Map are deleted.
-     * However, upon removal of an entry, all attributes associated with the pfn
-     * also evaporate (cascaded deletion).
-     * The deletes are done in batches.
-     *
-     * @param x                is a map from logical filename string to list of
-     *                         replica catalog entries.
-     * @param matchAttributes  whether mapping should be deleted only if all
-     *                         attributes match.
-     *
-     * @return the number of deletions.
-     * @see ReplicaCatalogEntry
-     */
-    public int delete( Map x , boolean matchAttributes){
-        //Map indexed by lrc url and each value a collection
-        //of lfns that the RLI says are present in it.
-        Set lfns     = new HashSet(x.size());
-        for(Iterator it = x.keySet().iterator();it.hasNext();){
-            lfns.add( (String)it.next());
-        }
-        Map lrc2lfn  = this.getLRC2LFNS(lfns);
-        int result = 0;
-
-        if(lrc2lfn == null){
-            //probably RLI is not connected!!
-            return 0;
-        }
-
-        //compose an exception that might need to be thrown
-        CatalogException exception = new ReplicaCatalogException();
-
-        // call the delete function on the individual
-        // LRCs where the mapping resides
-        String key = null,message = null;
-        String lfn;
-        for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry)it.next();
-            key = ( String )entry.getKey();
-            lfns = ( Set )entry.getValue();
-            message = "Deleting from LRC " + key;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,key);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + key,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-
-            //compose the map to delete for a particular LRC
-            Map lmap = new HashMap(lfns.size());
-            for(Iterator lfnIt = lfns.iterator();lfnIt.hasNext();){
-                lfn = (String)lfnIt.next();
-                lmap.put(lfn,x.get(lfn));
-            }
-
-             //delete from the LRC
-             try{
-                 result += lrc.delete(x,matchAttributes);
-             }
-             catch(ReplicaCatalogException e){
-                 exception.setNextException(e);
-             }
-             catch(Exception ex){
-                 mLogger.log("delete(Map,boolean)",
-                             ex,LogManager.ERROR_MESSAGE_LEVEL);
-             }
-             finally{
-                 //disconnect
-                 lrc.close();
-             }
-             mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-
-
-        //throw an exception only if a nested exception
-        if( (exception = exception.getNextException()) != null) throw exception;
-
-        return result;
-
-    }
-
-    /**
-     * Removes all mappings for an LFN from the replica catalog.
-     * It can result in a deletion of more than one entry, and from more
-     * than one local replica catalog that might be reporting to the RLI.
-     *
-     * @param lfn is the logical filename to remove all mappings for.
-     *
-     * @return the number of removed entries.
-     */
-    public int remove(String lfn) {
-        //Map indexed by lrc url and each value a collection
-        //of lfns that the RLI says are present in it.
-        Set lfns     = new HashSet(1);
-        lfns.add(lfn);
-        Map lrc2lfn  = this.getLRC2LFNS(lfns);
-        int result = 0;
-
-        if(lrc2lfn == null){
-            //probably RLI is not connected!!
-            return 0;
-        }
-
-        // call the delete function on the individual
-        // LRCs where the mapping resides
-        String key = null,message = null;
-        for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry)it.next();
-            key = (String)entry.getKey();
-            message = "Deleting from LRC " + key;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,key);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + key,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-
-            //delete from the LRC
-            try{
-                result += lrc.remove(lfn);
-            }
-            catch(Exception ex){
-                mLogger.log("remove(String):",ex,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                //disconnect
-                lrc.close();
-            }
-            mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-       }
-
-       return result;
-
-    }
-
-    /**
-     * Removes all mappings for a set of LFNs.
-     * It can result in a deletion of more than one entry, and from more
-     * than one local replica catalog that might be reporting to the RLI.
-     *
-     * @param lfns is a set of logical filename to remove all mappings for.
-     *
-     * @return the number of removed entries.
-     */
-    public int remove(Set lfns) {
-        //Map indexed by lrc url and each value a collection
-        //of lfns that the RLI says are present in it.
-        Map lrc2lfn  = this.getLRC2LFNS(lfns);
-        int result = 0;
-        Set s = null;
-
-        if(lrc2lfn == null){
-            //probably RLI is not connected!!
-            return 0;
-        }
-
-        // call the delete function on the individual
-        // LRCs where the mapping resides
-        String key = null,message = null;
-        for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){
-            Map.Entry entry = (Map.Entry)it.next();
-            key = (String)entry.getKey();
-            message = "Deleting from LRC " + key;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,key);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + key,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-
-            //delete from the LRC
-            try{
-                s = (Set)entry.getValue();
-                mLogger.log("\tDeleting the following lfns " + s,
-                            LogManager.DEBUG_MESSAGE_LEVEL);
-                result += lrc.remove((Set)entry.getValue());
-            }
-            catch(Exception ex){
-                mLogger.log("remove(Set)",ex,LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                //disconnect
-                lrc.close();
-            }
-            mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-       }
-
-       return result;
-
-    }
-
-    /**
-     * Removes all entries from the replica catalog where the PFN attribute
-     * is found, and matches exactly the object value.
-     * It can result in a deletion of more than one entry, and from more
-     * than one local replica catalog that might be reporting to the RLI.
-     *
-     * @param name is the PFN attribute name to look for.
-     * @param value is an exact match of the attribute value to match.
-     *
-     * @return the number of removed entries.
-     */
-    public int removeByAttribute(String name, Object value) {
-        //sanity check
-        if (this.isClosed()) {
-            //probably an exception should be thrown here!!
-            throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL);
-        }
-        int result = 0;
-        String url = null;
-        //we need to get hold of all the LRC
-        //that report to the RLI and call the
-        //list() method on each of them
-        for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){
-            url = (String)it.next();
-
-            mLogger.log("Removing from LRC " + url,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,url);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + url,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-            try{
-                result += lrc.removeByAttribute(name,value);
-            }
-            catch(Exception e){
-                mLogger.log("removeByAttribute(String,Object)",e,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                lrc.close();
-            }
-            mLogger.log( "Removing from LRC " + url + LogManager.MESSAGE_DONE_PREFIX,
-                          LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-
-        return result;
-
-    }
-
-    /**
-     * Removes all entries associated with a particular resource handle.
-     * This is useful, if a site goes offline. It is a convenience method,
-     * which calls the generic <code>removeByAttribute</code> method.
-     * It can result in a deletion of more than one entry, and from more
-     * than one local replica catalog that might be reporting to the RLI.
-     *
-     * @param handle is the site handle to remove all entries for.
-     *
-     * @return the number of removed entries.
-     * @see #removeByAttribute( String, Object )
-     */
-    public int removeByAttribute(String handle) {
-            return removeByAttribute(SITE_ATTRIBUTE,handle);
-    }
-
-    /**
-     * Removes everything from all the LRCs that report to this RLI.
-     *  Use with caution!
-     *
-     * @return the number of removed entries.
-     */
-    public int clear() {
-        //sanity check
-        if (this.isClosed()) {
-            //probably an exception should be thrown here!!
-            throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL);
-        }
-        int result = 0;
-        String url = null,message = null;
-        //we need to get hold of all the LRC
-        //that report to the RLI and call the
-        //list() method on each of them
-        for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){
-            url = (String)it.next();
-            message = "Querying LRC " + url;
-            mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL);
-
-            //push the lrcURL to the properties object
-            mConnectProps.setProperty(this.URL_KEY,url);
-            LRC lrc     = new LRC();
-            if(!lrc.connect(mConnectProps)){
-                //log an error/warning message
-                mLogger.log("Unable to connect to LRC " + url,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                continue;
-            }
-            try{
-                result += lrc.clear();
-            }
-            catch(Exception e){
-                mLogger.log("list(String)",e,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-            }
-            finally{
-                lrc.close();
-            }
-            mLogger.log( message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-
-        return result;
-    }
-
-    /**
-     * Explicitely free resources before the garbage collection hits.
-     */
-    public void close() {
-        try{
-            if (mRLS != null)
-                mRLS.Close();
-        }
-        catch(RLSException e){
-            //ignore
-        }
-        finally{
-            mRLS = null;
-        }
-    }
-
-    /**
-     * Returns whether the connection to the RLS with which this instance is
-     * associated is closed or not.
-     *
-     * @return true, if the implementation is disassociated, false otherwise.
-     * @see #close()
-     */
-    public boolean isClosed() {
-        return (mRLS == null);
-    }
-
-
-    /**
-     * It returns the timeout value in seconds after which to timeout in case of
-     * no activity from the RLI.
-     *
-     * Referred to by the "rli.timeout" property.
-     *
-     * @param properties   the properties passed in the connect method.
-     *
-     * @return the timeout value if specified else,
-     *         the value specified by "rls.timeout" property, else
-     *         DEFAULT_RLI_TIMEOUT.
-     *
-     * @see #DEFAULT_RLI_TIMEOUT
-     */
-    public int getTimeout(Properties properties) {
-        String prop = properties.getProperty( this.RLI_TIMEOUT_KEY);
-
-        //if prop is null get rls timeout,
-        prop = (prop == null)? properties.getProperty(this.RLS_TIMEOUT_KEY):prop;
-
-        int val = 0;
-        try {
-            val = Integer.parseInt( prop );
-        } catch ( Exception e ) {
-            val = Integer.parseInt( DEFAULT_RLI_TIMEOUT );
-        }
-        return val;
-
-    }
-
-
-
-    /**
-     * Sets the number of lfns in each batch while querying the lrc in the
-     * bulk mode.
-     *
-     * @param properties  the properties passed while connecting.
-     *
-     */
-    protected void setBatchSize(Properties properties) {
-        String s = properties.getProperty(this.BATCH_KEY);
-        int size = this.RLS_BULK_QUERY_SIZE;
-        try{
-            size = Integer.parseInt(s);
-        }
-        catch(Exception e){}
-        mBatchSize = size;
-    }
-
-
-    /**
-     * Returns the number of lfns in each batch while querying the lrc in the
-     * bulk mode.
-     *
-     * @return the batch size.
-     */
-    protected int getBatchSize() {
-        return mBatchSize;
-    }
-
-
-    /**
-     * Returns a map indexed by lrc urls. Each value is a set of
-     * String objects referring to the logical filenames whose mappings reside
-     * at a particular lrc amongst the set of logical filenames passed.
-     *
-     * @param lfns  the set of lfns queried to the RLI.
-     *
-     * @return Map indexed by lrc urls. Each value is a set of lfn strings.
-     *         null in case the connection to RLI is closed or error.
-     */
-    private Map getLRC2LFNS(Set lfns){
-        int batch = lfns.size() > mBatchSize ? mBatchSize:lfns.size();
-        //sanity check
-        if (this.isClosed()) {
-            //probably an exception should be thrown here!!
-            throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL);
-        }
-
-        Map lrc2lfn = new HashMap();//indexed by lrc url and each value a collection
-                                    //of lfns that the RLI says are present init.
-                                    //get a handle to the rli
-
-        //we need to query the RLI in batches
-        for (Iterator it = lfns.iterator(); it.hasNext(); ) {
-            ArrayList l = new ArrayList(batch);
-            for (int j = 0; (j < batch) && (it.hasNext()); j++) {
-                l.add(it.next());
-            }
-
-            //query the RLI for one batch
-            List res = null;
-            try{
-                res = mRLI.getLRCBulk(l);
-            }
-            catch(RLSException ex){
-                mLogger.log("getLRC2LFNS(Set)",ex,
-                            LogManager.ERROR_MESSAGE_LEVEL);
-                //or throw a runtime exception
-                return null;
-            }
-            //iterate through the results and put them in the map
-            String lrc = null;
-            String lfn = null;
-            for(Iterator lit = res.iterator();lit.hasNext();){
-                RLSString2Bulk s2b = (RLSString2Bulk) lit.next();
-                lfn = s2b.s1;//s1 is the lfn
-                lrc = s2b.s2;//s2 denotes the lrc which contains the mapping
-
-                //rc is the exit status returned by the RLI
-                if (s2b.rc == RLSClient.RLS_SUCCESS) {
-                    //we are really only concerned with success
-                    //and do not care about other exit codes
-                    Object val = null;
-                    Set s      = null;
-                    s = ( (val = lrc2lfn.get(lrc)) == null) ?
-                        new LinkedHashSet():
-                        (LinkedHashSet)val;
-                    s.add(lfn);
-                    if(val == null)
-                        lrc2lfn.put(lrc,s);
-
-                }
-            }
-        }
-
-        //match LRC's just once against ingore and restrict lists
-        for( Iterator it = lrc2lfn.keySet().iterator(); it.hasNext(); ){
-            String lrc = ( String ) it.next();
-            int state = this.determineQueryType(lrc);
-
-            //do the query on the basis of the state
-            if (state == LRC_QUERY_IGNORE) {
-                mLogger.log("Skipping LRC " + lrc,
-                            LogManager.DEBUG_MESSAGE_LEVEL);
-                it.remove();
-            }
-        }
-
-
-
-        return lrc2lfn;
-    }
-
-
-    /**
-     * Returns a tri state indicating what type of query needs to be done to
-     * a particular LRC.
-     *
-     * @param url   the LRC url.
-     *
-     * @return tristate
-     */
-    private int determineQueryType(String url){
-        int type = RLI.LRC_QUERY_NORMAL;
-
-        if(mLRCRestrictList != null){
-            for ( int j = 0; j < mLRCRestrictList.length; j++ ) {
-                if ( url.indexOf( mLRCRestrictList[ j ] ) != -1 ) {
-                    type = RLI.LRC_QUERY_RESTRICT;
-                    break;
-                }
-            }
-        }
-        if(mLRCIgnoreList != null){
-            for ( int j = 0; j < mLRCIgnoreList.length; j++ ) {
-                if ( url.indexOf( mLRCIgnoreList[ j ] ) != -1 ) {
-                    type = RLI.LRC_QUERY_IGNORE;
-                    break;
-                }
-            }
-        }
-
-
-        return type;
-    }
-
-
-    /**
-     * Returns the rls LRC urls to ignore for querying (requested by LIGO).
-     *
-     * Referred to by the "pegasus.catalog.replica.lrc.ignore" property.
-     *
-     * @param properties  the properties passed in the connect method.
-     *
-     * @return String[] if a comma separated list supplied as the property value,
-     *         else null
-     */
-    protected String[] getRLSLRCIgnoreURLs( Properties properties ) {
-        String urls =  properties.getProperty( this.LRC_IGNORE_KEY,
-                                               null );
-        if ( urls != null ) {
-            String[] urllist = urls.split( "," );
-            return urllist;
-        } else {
-            return null;
-        }
-    }
-
-    /**
-     * Returns the rls LRC urls to restrict for querying (requested by LIGO).
-     *
-     * Referred to by the "pegasus.catalog.replica.lrc.restrict" property.
-     *
-     * @param properties  the properties passed in the connect method.
-     *
-     * @return String[] if a comma separated list supplied as the property value,
-     *         else null
-     */
-    protected String[] getRLSLRCRestrictURLs( Properties properties ) {
-        String urls = properties.getProperty( this.LRC_RESTRICT_KEY,
-                                              null );
-        if ( urls != null ) {
-            String[] urllist = urls.split( "," );
-            return urllist;
-        } else {
-            return null;
-        }
-    }
-
-
-    /**
-     * Retrieves the URLs of all the LRCs that report to the RLI.
-     *
-     * @return a Set containing the URLs to all the LRCs that report to the
-     *         RLI.
-     */
-    private Set getReportingLRC(){
-        //sanity check
-        if (this.isClosed()) {
-            //probably an exception should be thrown here!!
-            throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL);
-        }
-        Set result = new HashSet();
-        Collection c = null;
-
-        try{
-            c = mRLI.lrcList();
-        }
-        catch(RLSException e){
-            mLogger.log("getReportingLRC(Set)",e,LogManager.ERROR_MESSAGE_LEVEL);
-        }
-
-        for(Iterator it = c.iterator(); it.hasNext();){
-            RLSLRCInfo lrc = (RLSLRCInfo)it.next();
-            result.add(lrc.url);
-        }
-
-        return result;
-    }
-
-
-
-
-    /**
-     * Populates the mapping table by querying the LRC in the mLRCList. At
-     * present it searches for all the files in the original DAG. At this point
-     * it should be all the files in the Reduced Dag but not doing so in order to
-     * conserve memory.
-     *
-     * @param allInCache  indicates whether all input file entries were found in
-     *                    cache or not.
-     *
-     * @return List
-     */
-     /*
-    private List populateMapTable( boolean allInCache ) {
-        String lrcURL = null;
-        List list = null;
-        RLSQuery client = null;
-        ReplicaLocation rl = null;
-        List pfnList = null;
-
-        mTable = new HashMap( mSearchFiles.size() );
-
-        int size = mLRCMap.size();
-        mLogger.log("Number of LRCs that will be queried: "+size,
-                    LogManager.DEBUG_MESSAGE_LEVEL);
-        for ( Iterator iter = mLRCMap.keySet().iterator(); iter.hasNext(); ) {
-            lrcURL = ( String ) iter.next();
-            int state = this.determineQueryType(lrcURL);
-
-            //do the query on the basis of the state
-            if ( state == LRC_QUERY_IGNORE ) {
-                mLogger.log( "Skipping LRC " + lrcURL,
-                             LogManager.DEBUG_MESSAGE_LEVEL);
-            }
-            else{
-                mLogger.log( "Querying LRC " + lrcURL,
-                             LogManager.DEBUG_MESSAGE_LEVEL);
-                list = ( ArrayList ) mLRCMap.get( lrcURL );
-                try {
-                    client = new RLSQuery( lrcURL );
-                    boolean restrict = (state == LRC_QUERY_RESTRICT);
-                    client.bulkQueryLRC( list, RLSQuery.RLS_BULK_QUERY_SIZE,
-                                         mTable,restrict);
-                    client.close();
-                } catch ( Exception e ) {
-                    mLogMsg =
-                        "RLSEngine.java: While getting connection to LRC " +
-                        lrcURL + " " + e;
-                    mLogger.log( mLogMsg, LogManager.ERROR_MESSAGE_LEVEL );
-                    size--;
-
-                    //do a hard fail only if the RLS exitmode is set to error
-                    //or  we could not query to all the LRCs
-                    //    and we could not find all the entries in the cache
-                    mLogger.log("RLS exit mode is " + mProps.getRLSExitMode(),
-                                 LogManager.DEBUG_MESSAGE_LEVEL);
-                    boolean exitOnError = mProps.getRLSExitMode().equalsIgnoreCase( "error" );
-                    if (  exitOnError || ( size == 0 && !allInCache )) {
-                        mLogMsg = ( exitOnError ) ?
-                                  "Unable to access LRC " + lrcURL :
-                                  "Unable to query any LRC and not all input files are in cache!";
-                        throw new RuntimeException( mLogMsg );
-                    }
-                }
-                mLogger.logCompletion("Querying LRC " + lrcURL,
-                                      LogManager.DEBUG_MESSAGE_LEVEL);
-
-            }
-        }
-        return new ArrayList( mTable.keySet() );
-
-    }
-
-*/
-    /**
-     * The main program, for some unit testing.
-     *
-     * @param args String[]
-     */
-    public static void main(String[] args) {
-        //setup the logger for the default streams.
-        LogManager logger = LogManagerFactory.loadSingletonInstance(  );
-        logger.logEventStart( "event.pegasus.catalog.replica.RLI", "planner.version", Version.instance().toString() );
-
-        RLI rli = new RLI();
-        Properties props = new Properties();
-        props.setProperty( RLI.URL_KEY, "rls://dataserver.phy.syr.edu" );
-        props.setProperty( RLI.LRC_IGNORE_KEY, "rls://ldas-cit.ligo.caltech.edu:39281" );
-        rli.connect(props);
-        System.out.println( "Complete Lookup "  + rli.lookup("H-H1_RDS_C03_L2-847608132-128.gwf" ) );
-        System.out.println( "Lookup without attributes "  + rli.lookupNoAttributes("H-H1_RDS_C03_L2-847608132-128.gwf" ) );
-        rli.close();
-        
-        
-        //RLI rli = new RLI();
-        String lfn = "test";
-        Set s = new HashSet();
-        s.add(lfn);s.add("testX");s.add("vahi.f.a");
-        System.out.println("Connecting " + rli.connect("rls://sukhna"));
-        boolean insert = false;
-        
-        
-        if(insert){
-            ReplicaCatalogEntry rce = new ReplicaCatalogEntry(
-                "gsiftp://sukhna.isi.edu/tmp/test");
-            rce.addAttribute("name", "karan");
-            LRC sukhna = new LRC();
-            sukhna.connect("rls://sukhna");
-            sukhna.insert("test", rce);
-            sukhna.insert("test", "gsiftp://sukhna.isi.edu/tmp/test1", "isi");
-            sukhna.insert("vahi.f.a", "file:///tmp/vahi.f.a", "isi");
-            sukhna.insert("testX", "gsiftp://sukhna.isi.edu/tmp/testX", "isi");
-            sukhna.insert("testX", "gsiftp://sukhna.isi.edu/tmp/testXvahi", "isi");
-            sukhna.close();
-
-            LRC smarty = new LRC();
-            ReplicaCatalogEntry rce1 = new ReplicaCatalogEntry(
-                "gsiftp://smarty.isi.edu/tmp/test");
-            rce1.addAttribute("name", "gaurang");
-
-            smarty.connect("rlsn://smarty");
-            smarty.insert("test", rce1);
-            smarty.insert("test", "gsiftp://smarty.isi.edu/tmp/test1", "isi");
-            smarty.insert("vahi.f.a", "file:///tmp-smarty/vahi.f.a", "isi");
-            smarty.insert("testX", "gsiftp://smarty.isi.edu/tmp/testX", "isi");
-            smarty.close();
-        }
-
-        System.out.println("\n Searching for lfn " + lfn);
-        System.out.println(rli.lookup(lfn));
-
-        System.out.println("\n Searching for lfn w/o attributes " + lfn);
-        System.out.println(rli.lookupNoAttributes(lfn));
-
-        System.out.println("\nSearching for a set of lfn " + s);
-        System.out.println(rli.lookup(s));
-
-        System.out.println("\nSearching for a set of lfn with handle matching" + s);
-        System.out.println(rli.lookup(s,"isi"));
-
-        System.out.println("\nSearching for a set of lfn with handle matching "+
-                           " returning only pfns" + s);
-        System.out.println(rli.lookupNoAttributes(s,"isi"));
-
-        System.out.println("\nListing all the lfns tracked in RLI");
-        System.out.println(rli.list("*").size());
-
-        //System.out.println("\n Removing entry for lfn " + lfn);
-        //System.out.println(rli.remove(lfn));
-
-        //System.out.println("\n Removing entry for lfns " + s);
-        //System.out.println(rli.remove(s));
-
-        //System.out.println("\n Removing entry for lfn by handle matching " + lfn);
-        //System.out.println(rli.deleteByResource(lfn,"isi"));
-
-        //System.out.println("\nSearching for a set of lfn " + s);
-        //System.out.println(rli.lookup(s));
-        Map m = new HashMap();
-        m.put("lfn","test*");
-        m.put("pfn","*vahi*");
-        System.out.println("\nDoing a constraint lookup " + rli.lookup(m));
-        rli.close();
-    }
-
-
-}//end of main class
-
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/classes/LRCResult.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/catalog/site/impl/old/classes/LRCResult.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,150 +0,0 @@
-/**
- *  Copyright 2007-2008 University Of Southern California
- *
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-
-
-package edu.isi.pegasus.planner.catalog.site.impl.old.classes;
-
-import edu.isi.pegasus.planner.classes.Data;
-
-import edu.isi.pegasus.common.logging.LogManager;
-
-import org.globus.replica.rls.RLSClient;
-import org.globus.replica.rls.RLSString2Bulk;
-
-
-/**
- * A class that stores the results
- * of querying an LRC. It includes
- * whether the operation was a success
- * or not and in addition the value
- * of the pool attribute.
- *
- * @author Gaurang Mehta
- * @author Karan Vahi
- * @version $Revision: 2572 $
- *
- * @see org.globus.replica.rls.RLSString2Bulk
- */
-
-public class LRCResult extends Data {
-
-    /**
-     * The lfn for which the LRC
-     * was queried.
-     */
-    public String lfn;
-
-    /**
-     * The pfn associated
-     * with the lfn, if an
-     * entry found in the LRC.
-     * Else it can be null.
-     */
-    public String pfn;
-
-    /**
-     * The pool attribute associated
-     * with the pfn returned.
-     * This should be set to null
-     * if pfn is not found.
-     */
-    public String pool;
-
-    /**
-     * The status of the operation.
-     * Whether it was a sucess or not.
-     * The status are defined in
-     * RLSClient.java
-     *
-     * @see org.globus.replica.rls.RLSClient
-     */
-    public int LRCExitStatus;
-
-
-
-    /**
-     * The default constructor
-     */
-    public LRCResult() {
-        lfn = new String();
-        pfn = new String();
-        pool= new String();
-        LRCExitStatus = 0;
-    }
-
-    /**
-     * The overloaded constructor.
-     * Takes in RLSString2Bulk
-     * object.
-     */
-    public LRCResult(RLSString2Bulk s2, String poolAtt){
-        lfn = s2.s1;
-        pfn = s2.s2;
-        LRCExitStatus = s2.rc;
-        pool = poolAtt;
-    }
-
-    /**
-     * Returns a clone of the
-     * existing object.
-     */
-    public Object clone(){
-        LRCResult res = new LRCResult();
-
-        res.lfn           = this.lfn;
-        res.pfn           = this.pfn;
-        res.pool          = this.pool;
-        res.LRCExitStatus = this.LRCExitStatus;
-
-        return res;
-    }
-
-    /**
-     * Returns a textual description
-     * of the object.
-     */
-    public String toString(){
-        String str = "\n lfn: " + lfn +
-                     " exit status: " + getErrorMessage()+
-                     " pfn: " + pfn +
-                     " pool: " + pool;
-        return str;
-
-
-    }
-
-    /**
-     * Returns the error/status
-     * message according to
-     * the LRCExitStatus.
-     */
-    public String getErrorMessage(){
-        RLSClient rls = null;
-
-        try{
-            rls = new RLSClient();
-        }
-        catch(Exception e){
-            mLogger.log("Exception while initialising to RLS" + e.getMessage(),
-                        LogManager.ERROR_MESSAGE_LEVEL);
-        }
-        String err = rls.getErrorMessage(this.LRCExitStatus);
-
-        return err;
-    }
-
-}
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/PasoaProvenanceClient.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/client/PasoaProvenanceClient.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,1485 +0,0 @@
-/**
- *  Copyright 2007-2008 University Of Southern California
- *
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-package edu.isi.pegasus.planner.client;
-
-
-import java.io.BufferedReader;
-import java.io.StringReader;
-import java.io.FileReader;
-import java.net.URL;
-import java.util.LinkedList;
-import java.io.File;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.HashMap;
-
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.DocumentBuilder;
-
-import org.pasoa.common.Constants;
-import org.pasoa.pstructure.Record;
-import org.pasoa.pstructure.ActorStatePAssertion;
-import org.pasoa.pstructure.GlobalPAssertionKey;
-import org.pasoa.pstructure.InteractionKey;
-import org.pasoa.pstructure.InteractionPAssertion;
-import org.pasoa.pstructure.ObjectID;
-import org.pasoa.pstructure.RelationshipPAssertion;
-import org.pasoa.storeclient.ClientLib;
-import org.pasoa.util.httpsoap.WSAddressEndpoint;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.NodeList;
-import org.w3c.dom.Node;
-
-import org.xml.sax.InputSource;
-import java.io.StringWriter;
-import java.io.IOException;
-import java.io.Reader;
-import edu.isi.pegasus.planner.transfer.Refiner;
-import org.xml.sax.InputSource;
-public class PasoaProvenanceClient {
-
-    /** change this to connect to the preserv server **/
-    public static String URL = "http://localhost:8080/preserv-1.0";
-    public static String XMLHEADER ="<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>";
-    public static String CONDOR= "www.cs.wisc.edu/condor";
-    public long filecount=0;
-    public static String documentationStyle = "http://www.pasoa.org/schemas/pegasus";
-    public   ClientLib clientLib = null;
-    public	URL provenanceStore = null;
-    public String jobname=null;
-    public String wf_label=null;
-    public String wf_planned_time=null;
-    public String transformation=null;
-    public Element docelement=null;
-    public Element daxelement=null;
-    //  public List input=null;
-    //   public List output=null;
-    public List parents=null;
-    public List children=null;
-    public Map input = null;
-    public Map output = null;
-
-
-    public PasoaProvenanceClient(String url){
-        clientLib=new ClientLib();
-        try{
-            provenanceStore = new URL(url + "/record");
-        }catch(Exception e){
-            System.err.println("Bad Bad Bad url");
-        }
-    }
-
-    public PasoaProvenanceClient(){
-        clientLib=new ClientLib();
-        try{
-            provenanceStore = new URL(URL + "/record");
-        }catch(Exception e){
-            System.err.println("Bad Bad Bad url");
-        }
-
-    }
-
-
-
-    public static void main(String[] args) throws Exception {
-
-        PasoaProvenanceClient cle=null;
-        String jobfile=null;
-        String daxfile=null;
-        String dagfile=null;
-        String url=null;
-        if(args.length<3){
-            System.err.println("Usage: Client daxfile dagfile outfile");
-           // System.err.println("Usage: Client daxfile dagfile preservurl");
-            System.exit(1);
-
-        }else if(args.length==3){
-            jobfile=args[2];
-            daxfile=args[0];
-            dagfile=args[1];
-            cle = new PasoaProvenanceClient();
-
-        }
-	/*}else {
-	    jobfile=args[0];
-	    daxfile=args[0];
-	    dagfile=args[2];
-	    url=args[3];
-            cle = new PasoaProvenanceClient(url);
-
-	}*/
-	try{
-	   	    cle.jobname=(new File(jobfile)).getName().split("\\.out")[0];
-	    System.out.println("Processing job --- "+ cle.jobname);
-	    cle.parseKickstartRecord(jobfile);
-            cle.parseDag(dagfile);
-            List newlist=new ArrayList();
-            if(cle.parents!=null && !cle.parents.isEmpty()){
-                System.out.println("Adding parents "+ cle.parents);
-                newlist.addAll(cle.parents);
-            }
-            if(cle.children!=null && !cle.children.isEmpty()){
-                System.out.println("Adding children  "+ cle.children);
-                newlist.addAll(cle.children);
-            }
-            System.out.println("Adding job "+ cle.jobname);
-
-            newlist.add(cle.jobname);
-            System.out.println("Job List is  "+ newlist);
-            cle.parseFiles(newlist);
-//            cle.parseDaxFile(daxfile,newlist);
- //           cle.parseInput();
-            System.out.println("Inputs == "+cle.input);
-            System.out.println("Outputs == "+cle.output);
-
-            if(cle.jobname.startsWith( Refiner.STAGE_IN_PREFIX )|| (cle.jobname.startsWith(Refiner.STAGE_OUT_PREFIX))){
-                InteractionKey ik = cle.transferInvocationInteraction();
-                cle.transferCompletionInteraction(ik);
-            } else if(cle.jobname.startsWith("new_rc_register")){
-                InteractionKey ik = cle.registerInvocationInteraction();
-                cle.registerCompletionInteraction(ik);
-            } else if(cle.jobname.startsWith("create_dir")) {
-                //write this handler
-            } else if(cle.jobname.startsWith("clean_up")){
-                //write this handler
-            }else if(cle.jobname.startsWith("pegasus_concat")){
-                //write this handler
-            }else{
-                InteractionKey ik = cle.jobInvocationInteraction();
-                cle.jobCompletionInteraction(ik);
-            }
-        }catch (Exception e){
-            e.printStackTrace();
-        }
-
-    }
-
-    private void parseDag(String file) throws Exception{
-        BufferedReader bf = new BufferedReader(new FileReader(file));
-        String line = null;
-        while((line=bf.readLine())!=null){
-            String[] list = null;
-            if (line.startsWith("PARENT")){
-                list = line.split(" ");
-            }
-            if(list!=null){
-                if(list[1].equalsIgnoreCase(jobname)){
-                    if(children==null){
-                        children=new ArrayList();
-                    }
-                    children.add(list[3]);
-                }
-                if(list[3].equalsIgnoreCase(jobname)){
-                    if(parents==null){
-                        parents=new ArrayList();
-                    }
-                    parents.add(list[1]);
-                }
-            }
-        }
-        bf.close();
-    }
-
-    private void parseKickstartRecord(String file) throws Exception{
-        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
-        DocumentBuilder db = dbf.newDocumentBuilder();
-        List records=extractToMemory(new File(file));
-        if(records!=null){
-            for (Iterator i=records.iterator();i.hasNext();){
-
-                Document msgDoc = db.parse(new InputSource(new StringReader((String)i.next())));
-                docelement = msgDoc.getDocumentElement();
-                transformation = docelement.getAttribute("transformation");
-                wf_label=docelement.getAttribute("wf-label");
-                wf_planned_time=docelement.getAttribute("wf-stamp");
-
-            }
-        }
-    }
-
-    public List extractToMemory( java.io.File input )
-      throws Exception
-    {
-        List result = new ArrayList();
-           StringWriter out = null;
-           // open the files
-           int p1, p2, state = 0;
-           try {
-             BufferedReader in = new BufferedReader( new FileReader(input) );
-             out = new StringWriter(4096);
-             String line = null;
-             while ( (line = in.readLine()) != null ) {
-               if ( (state & 1) == 0 ) {
-                 // try to copy the XML line in any case
-                 if ( (p1 = line.indexOf( "<?xml" )) > -1 )
-                   if ( (p2 = line.indexOf( "?>", p1 )) > -1 ) {
- //                    out.write( line, p1, p2+2 );
-                     System.out.println( "state=" + state + ", seen <?xml ...?>" );
-                   }
-                 // start state with the correct root element
-                 if ( (p1 = line.indexOf( "<invocation")) > -1 ) {
-                   if ( p1 > 0 ) line = line.substring( p1 );
-                   System.out.println( "state=" + state + ", seen <invocation>" );
-                   out.write(XMLHEADER);
-                   ++state;
-                 }
-               }
-               if ( (state & 1) == 1 ) {
-                 out.write( line );
-                 if ( (p1 = line.indexOf("</invocation>")) > -1 ) {
-                   System.out.println( "state=" + state + ", seen </invocation>" );
-                   ++state;
-
-                   out.flush();
-                   out.close();
-                   result.add( out.toString() );
-                   out = new StringWriter(4096);
-                 }
-               }
-             }
-
-             in.close();
-             out.close();
-           } catch ( IOException ioe ) {
-             throw new Exception( "While copying " + input.getPath() +
-                                      " into temp. file: " + ioe.getMessage() );
-         }
-
-
-      // some sanity checks
-      if ( state == 0 )
-        throw new Exception( "File " + input.getPath() +
-                                 " does not contain invocation records," +
-                                 " assuming failure");
-      if ( (state & 1) == 1 )
-        throw new Exception( "File " + input.getPath() +
-                                 " contains an incomplete invocation record," +
-                                 " assuming failure" );
-
-      // done
-      return result;
-  }
-
-private void parseFiles(List jobs)throws Exception{
-    File infile = null;
-    File outfile = null;
-    List ilist = null;
-    List temp = new ArrayList(jobs);
-    for (Iterator i = temp.iterator(); i.hasNext(); ) {
-        String job = (String) i.next();
-        if (job.startsWith( Refiner.STAGE_IN_PREFIX )) {
-            //this is for stagein jobs
-            outfile = new File(job + ".out.lof");
-            if (outfile.exists() && outfile.canRead() && outfile.length() != 0) {
-                try {
-                    BufferedReader in = new BufferedReader(new FileReader(outfile));
-                    String str;
-                    while ( (str = in.readLine()) != null) {
-                        if (output == null) {
-                            output = new HashMap();
-                        }
-                        if (!output.containsKey(job)) {
-                            output.put(job, new ArrayList());
-                        }
-                        ilist = (List) output.get(job);
-                        ilist.add(str);
-                    }
-                    in.close();
-                }
-                catch (IOException e) {
-                }
-            }
-
-        }else if (job.startsWith( Refiner.STAGE_OUT_PREFIX )) {
-            //this is for stageout/inter tx jobs
-            outfile = new File(job + ".out.lof");
-            if (outfile.exists() && outfile.canRead() && outfile.length() != 0) {
-                try {
-                    BufferedReader in = new BufferedReader(new FileReader(outfile));
-                    String str;
-                    while ( (str = in.readLine()) != null) {
-                        if (input == null) {
-                            input = new HashMap();
-                        }
-                        if (!input.containsKey(job)) {
-                            input.put(job, new ArrayList());
-                        }
-                        ilist = (List) input.get(job);
-                        ilist.add(str);
-                    }
-                    in.close();
-                }
-                catch (IOException e) {
-                }
-            }
-
-        }else if(job.startsWith( Refiner.INTER_POOL_PREFIX )){
-            outfile = new File(job + ".out.lof");
-            if (outfile.exists() && outfile.canRead() && outfile.length() != 0) {
-                try {
-                    BufferedReader in = new BufferedReader(new FileReader(outfile));
-                    String str;
-                    while ( (str = in.readLine()) != null) {
-                        if (output == null) {
-                            output = new HashMap();
-                        }
-                        if (!output.containsKey(job)) {
-                            output.put(job, new ArrayList());
-                        }
-                        ilist = (List) output.get(job);
-                        ilist.add(str);
-                        if (input == null) {
-                            input = new HashMap();
-                        }
-                        if (!input.containsKey(job)) {
-                            input.put(job, new ArrayList());
-                        }
-                        ilist = (List) input.get(job);
-                        ilist.add(str);
-                    }
-                    in.close();
-                }
-                catch (IOException e) {
-                }
-            }
-
-        } else if(job.startsWith("new_rc_register")){
-            BufferedReader bf =new BufferedReader(new FileReader(new File(job+".in")));
-            String line = null;
-            while((line=bf.readLine())!=null){
-                String lfn=null;
-                lfn= line.split(" ")[0];
-                if(input==null){
-                    input=new HashMap();
-                }
-                if(!input.containsKey(job)){
-                    input.put(job, new ArrayList());
-                }
-                ilist=(List)input.get(job);
-                ilist.add(lfn);
-            }
-            bf.close();
-        }else if (job.startsWith("cln_")) {
-            //this is for cleanup jobs
-            infile = new File(job + ".in.lof");
-            if (infile.exists() && infile.canRead() && infile.length() != 0) {
-                try {
-                    BufferedReader in = new BufferedReader(new FileReader(infile));
-                    String str;
-                    while ( (str = in.readLine()) != null) {
-
-                        if (input == null) {
-                            input = new HashMap();
-                        }
-                        if (!input.containsKey(job)) {
-                            input.put(job, new ArrayList());
-                        }
-                        ilist = (List) input.get(job);
-                        ilist.add(str);
-                    }
-                    in.close();
-                }
-                catch (IOException e) {
-                }
-            }
-        } else if (!job.endsWith("_cdir")) {
-            //this is a regular job
-            outfile = new File(job + ".out.lof");
-            if (outfile.exists() && outfile.canRead() && outfile.length() != 0) {
-                try {
-                    BufferedReader in = new BufferedReader(new FileReader(outfile));
-                    String str;
-                    while ( (str = in.readLine()) != null) {
-                        if (output == null) {
-                            output = new HashMap();
-                        }
-                        if (!output.containsKey(job)) {
-                            output.put(job, new ArrayList());
-                        }
-                        ilist = (List) output.get(job);
-                        ilist.add(str);
-                    }
-                    in.close();
-                }
-                catch (IOException e) {
-                }
-            }
-
-            infile = new File(job + ".in.lof");
-            if (infile.exists() && infile.canRead() && infile.length() != 0) {
-                try {
-                    BufferedReader in = new BufferedReader(new FileReader(infile));
-                    String str;
-                    while ( (str = in.readLine()) != null) {
-
-                        if (input == null) {
-                            input = new HashMap();
-                        }
-                        if (!input.containsKey(job)) {
-                            input.put(job, new ArrayList());
-                        }
-                        ilist = (List) input.get(job);
-                        ilist.add(str);
-                    }
-                    in.close();
-                }
-                catch (IOException e) {
-                }
-            }
-        }
-    }
-}
-
-    private void parseDaxFile(String file, List jobs)throws Exception {
-        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
-        DocumentBuilder       db = dbf.newDocumentBuilder();
-        Document      msgDoc = db.parse(new File(file));
-        NodeList nlist = msgDoc.getElementsByTagName("job");
-        List temp = new ArrayList(jobs);
-        input = new HashMap();
-        output = new HashMap();
-
-        for (int i =0;i<nlist.getLength();i++){
-            String tempname=nlist.item(i).getAttributes().getNamedItem("name").getNodeValue()+"_"+nlist.item(i).getAttributes().getNamedItem("id").getNodeValue();
-            if(temp.contains(tempname)){
-                temp.remove(tempname);
-                NodeList uselist = nlist.item(i).getChildNodes();
-                for (int j=0;j<uselist.getLength();j++){
-                    if(uselist.item(j).getNodeName().equals("uses")){
-                        Node n = uselist.item(j).getAttributes().getNamedItem("link");
-                        if(n!=null) {
-                            List ilist = null;
-                            String fname = uselist.item(j).getAttributes().getNamedItem("file").getNodeValue();
-                            if(n.getNodeValue().equalsIgnoreCase("output")){
-
-                                if(output==null){
-                                    output = new HashMap();
-                                    ilist = new ArrayList();
-                                    output.put(tempname,ilist);
-                                }
-                                if(!output.containsKey(tempname)){
-                                    output.put(tempname,new ArrayList());
-                                }
-                                ilist=(List)output.get(tempname);
-                                ilist.add(fname);
-                            } else {
-                                if(input==null){
-                                    input=new HashMap();
-                                }
-                                if(!input.containsKey(tempname)){
-                                    input.put(tempname,new ArrayList());
-                                }
-                                ilist=(List)input.get(tempname);
-                                ilist.add(fname);
-                            }
-                        }
-
-                    }
-                }
-            }
-        }
-    }
-
-    private void parseInput() throws Exception{
-        if(parents!=null && !parents.isEmpty()){
-            for(Iterator p=parents.iterator();p.hasNext();){
-                String tempjob=(String)p.next();
-                if(tempjob.startsWith(Refiner.STAGE_IN_PREFIX) || tempjob.startsWith(Refiner.INTER_POOL_PREFIX) ){
-                    List ilist=null;
-                    if(output==null){
-                        output = new HashMap();
-                    }
-                    if(!output.containsKey(tempjob)){
-                        output.put(tempjob,new ArrayList());
-                    }
-                    ilist=(List)output.get(tempjob);
-                    BufferedReader bf =new BufferedReader(new FileReader(new File(tempjob+".in")));
-                    String line = null;
-                    while((bf.readLine())!=null){
-                        bf.readLine();
-                        bf.readLine();
-                        line=bf.readLine();
-                        filecount++;
-                        String lfn= line.split("run\\d{4}/")[1];
-                        ilist.add(lfn);
-                    }
-                    bf.close();
-                }
-
-            }
-        }
-        if(children!=null && !children.isEmpty()){
-            for(Iterator c=children.iterator();c.hasNext();){
-                String tempjob=(String)c.next();
-                if(tempjob.startsWith(Refiner.STAGE_OUT_PREFIX) || tempjob.startsWith(Refiner.INTER_POOL_PREFIX) ){
-                    List ilist=null;
-                    if(input==null){
-                        input = new HashMap();
-
-                    }
-                    if(!input.containsKey(tempjob)){
-                        input.put(tempjob,new ArrayList());
-                    }
-                    ilist=(List)input.get(tempjob);
-                    BufferedReader bf =new BufferedReader(new FileReader(new File(tempjob+".in")));
-                    String line = null;
-                    while((bf.readLine())!=null){
-
-                        line=bf.readLine();
-                        filecount++;
-                        String lfn= line.split("run\\d{4}/")[1];
-                        ilist.add(lfn);
-                        bf.readLine();
-                        bf.readLine();
-                    }
-                    bf.close();
-
-                }
-
-            }
-        }
-        if(jobname.startsWith(Refiner.STAGE_IN_PREFIX)||jobname.startsWith(Refiner.STAGE_OUT_PREFIX)||jobname.startsWith(Refiner.INTER_POOL_PREFIX)){
-            BufferedReader bf =new BufferedReader(new FileReader(new File(jobname+".in")));
-            String line = null;
-            List ilist=null;
-            while((bf.readLine())!=null){
-                String lfn=null;
-                line=bf.readLine();
-                if(jobname.startsWith(Refiner.STAGE_OUT_PREFIX) || jobname.startsWith(Refiner.INTER_POOL_PREFIX)){
-                    lfn= line.split("run\\d{4}/")[1];
-                }
-                if(input==null){
-                    input=new HashMap();
-                }
-                input.put(jobname,new ArrayList());
-                if(input.containsKey(jobname)){
-                    ilist=(List)input.get(jobname);
-                }
-                ilist.add(lfn);
-                bf.readLine();
-                line=bf.readLine();
-                if(jobname.startsWith(Refiner.STAGE_IN_PREFIX)||jobname.startsWith(Refiner.INTER_POOL_PREFIX)){
-                    lfn= line.split("run\\d{4}/")[1];
-
-                }
-                if(output==null){
-                    output=new HashMap();
-                }
-                output.put(jobname,new ArrayList());
-                if(output.containsKey(jobname)){
-                    ilist=(List)output.get(jobname);
-                }
-                ilist.add(lfn);
-
-            }
-            bf.close();
-        }
-
-        if(jobname.startsWith("new_rc_register")){
-            BufferedReader bf =new BufferedReader(new FileReader(new File(jobname+".in")));
-            String line = null;
-            List ilist=null;
-            while((line=bf.readLine())!=null){
-                String lfn=null;
-                lfn= line.split(" ")[0];
-                if(input==null){
-                    input=new HashMap();
-                }
-                input.put(jobname,new ArrayList());
-                if(input.containsKey(jobname)){
-                    ilist=(List)input.get(jobname);
-                }
-                ilist.add(lfn);
-            }
-            bf.close();
-        }
-    }
-
-    public InteractionKey jobInvocationInteraction() throws Exception{
-
-        System.out.println("We now create the job Invocation interaction key");
-
-        // Create addresses for the source and sink of the
-        // interaction.
-        WSAddressEndpoint source = new WSAddressEndpoint(CONDOR);
-
-        WSAddressEndpoint sink   = new WSAddressEndpoint(jobname);
-
-
-        // Create an interactionId, this should be unique!
-
-        String interactionId =  wf_label+wf_planned_time+jobname;
-        InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId);
-
-
-        System.out.println("Building p-assertions...");
-
-        InteractionPAssertion ipa = createJobInvocationInteractionPAssertion();
-
-
-        //setting sender type
-        System.out.println("We are the sender/client view of the interaction");
-        String vk = Constants.SENDER_VIEW_TYPE;
-        System.out.println();
-
-        //set asserter to CONDOR
-
-        WSAddressEndpoint asserter = new WSAddressEndpoint(CONDOR);
-
-        List records = new ArrayList();
-
-        System.out.println("Creating Record objects for each p-assertion");
-
-        Record recIpa = new Record(ipa, ik, vk, asserter.getElement());
-        records.add(recIpa);
-
-        //iterate over parents to create multiple rpa's
-        RelationshipPAssertion rpa = null;
-        Record recRpa=null;
-        if(input.containsKey(jobname)){
-            List inputs = (List)input.get(jobname);
-            //    for(int i=0; i<inputs.size();i++){
-            //       Iterator j = inputs.iterator();
-            int i=0;
-            for(Iterator j = inputs.iterator();j.hasNext();){
-                String tempfile=(String)j.next();
-                for(Iterator k = parents.iterator();k.hasNext();){
-                    String tempjob=(String)k.next();
-                    List templist=(List)output.get(tempjob);
-                    if(templist!=null){
-                        if (templist.contains(tempfile)){
-                            i++;
-                            System.out.println("Parent Relationship *** file="+tempfile+" from="+jobname+" to="+tempjob);
-                            recRpa=new Record(createJobToTransferRelationshipPAssertion(tempfile,tempjob,i ),ik,vk,asserter.getElement());
-                            records.add(recRpa);
-                        }
-                    }
-                }
-
-            }
-        }
-
-        System.out.println("Recording the p-assertions in provenance store " + provenanceStore);
-
-        clientLib.record(records.iterator(), provenanceStore,true);
-
-
-        System.out.println("sender p-assertions recorded");
-        System.out.println();
-
-        //setting reciever type
-
-        System.out.println("We are the sender/client view of the interaction");
-        vk = Constants.RECEIVER_VIEW_TYPE;
-        System.out.println();
-
-        //set asserter to Job
-
-        asserter = new WSAddressEndpoint(jobname);
-        recIpa = new Record(ipa, ik, vk, asserter.getElement());
-
-
-
-        System.out.println("Recording the p-assertions in provenance store " + provenanceStore);
-
-        clientLib.record(recIpa, provenanceStore);
-
-
-        System.out.println("receiver p-assertions recorded");
-        System.out.println();
-
-        return ik;
-
-    }
-
-    public void jobCompletionInteraction(InteractionKey invocationinteractionkey) throws Exception{
-
-        System.out.print("Creating Completion Interaction Key ....... ");
-
-        // Create addresses for the source and sink of the
-        // interaction.
-        // Create an interactionId, this should be unique!
-        WSAddressEndpoint source = new WSAddressEndpoint(jobname);
-        WSAddressEndpoint sink   = new WSAddressEndpoint(CONDOR);
-        String interactionId =  wf_label+wf_planned_time+jobname;
-        InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId);
-        System.out.println("DONE");
-
-        //setting sender type
-        String vk = Constants.SENDER_VIEW_TYPE;
-        //set asserter to be the job
-        WSAddressEndpoint asserter = new WSAddressEndpoint(jobname);
-
-        System.out.println("Building p-assertions ..... ");
-        List records = new ArrayList();
-
-        InteractionPAssertion ipa = createJobCompletionInteractionPAssertion();
-        Record recIpa = new Record(ipa, ik, vk, asserter.getElement());
-        records.add(recIpa);
-        //iterate over files to create multiple rpa's
-        RelationshipPAssertion rpa = null;
-        Record recRpa=null;
-        if(output.containsKey(jobname)){
-            int count=0;
-            for(Iterator i=((List)output.get(jobname)).iterator(); i.hasNext();){
-                count++;
-                recRpa=new Record(createJobRelationshipPAssertion(invocationinteractionkey,(String)i.next(),count ) ,ik,vk,asserter.getElement());
-                records.add(recRpa);
-            }
-        }
-        ActorStatePAssertion apa = createActorStatePAssertion(0);
-
-        Record recApa = new Record(apa, ik, vk, asserter.getElement());
-
-        records.add(recApa);
-        System.out.print("Recording the sender p-assertions in provenance store ..... ");
-
-        clientLib.record(records.iterator(), provenanceStore,true);
-
-        System.out.println("DONE");
-
-        //setting reciever type
-        vk = Constants.RECEIVER_VIEW_TYPE;
-
-
-        //set asserter to CONDOR
-
-        asserter = new WSAddressEndpoint(CONDOR);
-        recIpa = new Record(ipa, ik, vk, asserter.getElement());
-
-        records = new ArrayList();
-        records.add(recIpa);
-        /**
-         * //iterate over children to create multiple rpa's
-         * rpa = null;
-         * recRpa=null;
-         * List outputs = (List)output.get(jobname);
-         * //     for(int i=0; i<outputs.size();i++){
-         * //         Iterator j = outputs.iterator();
-         * int i =0;
-         * for(Iterator j=outputs.iterator();j.hasNext();){
-         *
-         * String tempfile=(String)j.next();
-         * for(Iterator k = children.iterator();k.hasNext();){
-         * String tempjob=(String)k.next();
-         * List templist = (List)input.get(tempjob);
-         * if(templist!=null){
-         * if(templist.contains(tempfile)){
-         * i++;
-         * System.out.println("Child Relationship *** file="+tempfile+" from="+jobname+" to="+tempjob);
-         *
-         * recRpa=new Record(createJobToTransferRelationshipPAssertion(tempfile,tempjob,i ),ik,vk,asserter.getElement());
-         * records.add(recRpa);
-         * }
-         * }
-         * }
-         *
-         * }
-         **/
-        System.out.print("Recording the receiver p-assertions in provenance store ..... ");
-
-        clientLib.record(recIpa, provenanceStore);
-
-
-        System.out.println("Done");
-
-    }
-
-    public InteractionKey transferInvocationInteraction() throws Exception{
-
-        System.out.print("Creating Invocation Interaction Key ..... ");
-
-        // Create addresses for the source and sink of the
-        // interaction.
-        WSAddressEndpoint source = new WSAddressEndpoint(CONDOR);
-        WSAddressEndpoint sink   = new WSAddressEndpoint(jobname);
-        String interactionId =  wf_label+wf_planned_time+jobname;
-        InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId);
-        System.out.println("Done");
-
-        //setting sender type
-        String vk = Constants.SENDER_VIEW_TYPE;
-        //set asserter to CONDOR
-        WSAddressEndpoint asserter = new WSAddressEndpoint(CONDOR);
-
-
-        System.out.print("Building p-assertions ..... ");
-        InteractionPAssertion ipa = createTransferInvocationInteractionPAssertion();
-
-        List records=new ArrayList();
-        Record recIpa = new Record(ipa, ik, vk, asserter.getElement());
-        records.add(recIpa);
-        if(!jobname.startsWith(Refiner.STAGE_IN_PREFIX)){
-            //iterate over parents to create multiple rpa's
-            RelationshipPAssertion rpa = null;
-            Record recRpa=null;
-            List inputs = (List)input.get(jobname);
-            //    for(int i=0; i<inputs.size();i++){
-            //       Iterator j = inputs.iterator();
-            int i=0;
-            for(Iterator j = inputs.iterator();j.hasNext();){
-                String tempfile=(String)j.next();
-                for(Iterator k = parents.iterator();k.hasNext();){
-                    String tempjob=(String)k.next();
-                    List templist=(List)output.get(tempjob);
-                    if(templist!=null){
-                        if (templist.contains(tempfile)){
-                            i++;
-                            //  System.out.println("Parent Relationship *** file="+tempfile+" from="+jobname+" to="+tempjob);
-                            recRpa=new Record(createJobToTransferRelationshipPAssertion(tempfile,tempjob,i ),ik,vk,asserter.getElement());
-                            records.add(recRpa);
-                        }
-                    }
-                }
-
-            }
-        }
-        System.out.println("Done");
-        System.out.print("Recording the sender p-assertions in provenance store .......... ");
-        clientLib.record(records.iterator(), provenanceStore,true);
-        System.out.println("Done");
-
-        //setting reciever type
-        vk = Constants.RECEIVER_VIEW_TYPE;
-        //set asserter to job type
-        asserter = new WSAddressEndpoint(jobname);
-        //add the interaction P assertion
-        recIpa = new Record(ipa, ik, vk, asserter.getElement());
-        System.out.print("Recording the receiver p-assertions in provenance store ........ ");
-        clientLib.record(recIpa, provenanceStore);
-        System.out.println("DONE");
-        return ik;
-
-    }
-
-    public void transferCompletionInteraction(InteractionKey invocationinteractionkey) throws Exception{
-
-        System.out.print("Creating Completion Interaction Key ....... ");
-        // Create addresses for the source and sink of the
-        // interaction.
-        WSAddressEndpoint source = new WSAddressEndpoint(jobname);
-        WSAddressEndpoint sink   = new WSAddressEndpoint(CONDOR);
-        String interactionId =  wf_label+wf_planned_time+jobname;
-        InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId);
-        System.out.println("Done");
-
-        //setting sender type
-        String vk = Constants.SENDER_VIEW_TYPE;
-        //set asserter to the job type
-        WSAddressEndpoint asserter = new WSAddressEndpoint(jobname);
-
-        System.out.print("Building p-assertions ..... ");
-        List records = new ArrayList();
-
-        InteractionPAssertion ipa = createTransferCompletionInteractionPAssertion();
-        Record recIpa = new Record(ipa, ik, vk, asserter.getElement());
-        records.add(recIpa);
-
-        //iterate over files to create multiple rpa's
-        RelationshipPAssertion rpa = null;
-        Record recRpa=null;
-
-        //get this file number from the .in file
-        //simon or paul will change the client.record method to take iterator of assertions instead of iterator of records.
-        for(int i=0; i<filecount;i++){
-            recRpa=new Record(createTransferRelationshipPAssertion(invocationinteractionkey,i ) ,ik,vk,asserter.getElement());
-            records.add(recRpa);
-        }
-
-        ActorStatePAssertion apa = createActorStatePAssertion(0);
-        Record recApa = new Record(apa, ik, vk, asserter.getElement());
-        records.add(recApa);
-
-        System.out.println("DONE");
-
-        System.out.print("Recording the sender p-assertions in provenance store ..... ");
-        clientLib.record(records.iterator(), provenanceStore,true);
-        System.out.println("Done");
-
-        //setting reciever type
-
-        vk = Constants.RECEIVER_VIEW_TYPE;
-        //set asserter to CONDOR
-        asserter = new WSAddressEndpoint(CONDOR);
-
-        //adding the interaction p assertion
-        recIpa = new Record(ipa, ik, vk, asserter.getElement());
-        System.out.print("Recording the receiver p-assertions in provenance store .... ");
-
-        clientLib.record(recIpa, provenanceStore);
-        System.out.println("DONE");
-
-    }
-
-    private InteractionPAssertion createTransferInvocationInteractionPAssertion()
-    throws Exception {
-        // Create an interaction p-assertion
-        // First we make a local p-assertion id and then
-        // we make a documentationStyle. In this case we
-        // call it verbatium.
-        //
-        // In most cases, you'll be grabing the messageBody from the message
-        // being sent between parties. So a SOAP message, or a CORBA message.
-        // With this example we'll just use a hard coded message body.
-
-        String localPAssertionId = "1";
-
-        // this message content will be obtained by parsing the transfer input files <jobid.in> and obtaining the source urls
-
-        BufferedReader bf =new BufferedReader(new FileReader(new File(jobname+".in")));
-        String line = null;
-        StringBuffer  message = new StringBuffer("<transfer xmlns=\"http://pegasus.isi.edu/schema/pasoa/content/transfer\">");
-        while((bf.readLine())!=null){
-            line=bf.readLine();
-            filecount++;
-            if(!jobname.startsWith(Refiner.STAGE_OUT_PREFIX)){
-                message.append("<filename>"+line+"</filename>");
-            } else {
-                String lfn= line.split("run\\d{4}/")[1];
-
-                message.append("<filename file=\""+lfn+"\">"+line+"</filename>");
-            }
-            bf.readLine();
-            bf.readLine();
-        }
-        bf.close();
-        message.append("</transfer>");
-
-        // Convert it into a DOM Element
-        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
-        DocumentBuilder db = dbf.newDocumentBuilder();
-        Document msgDoc = db.parse(new InputSource(new StringReader(message.toString())));
-        Element messageBody = msgDoc.getDocumentElement();
-
-        InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody);
-
-        return ipa;
-    }
-
-    private InteractionPAssertion createTransferCompletionInteractionPAssertion()
-    throws Exception {
-        // Create an interaction p-assertion
-        // First we make a local p-assertion id and then
-        // we make a documentationStyle. In this case we
-        // call it verbatium.
-        //
-        // In most cases, you'll be grabing the messageBody from the message
-        // being sent between parties. So a SOAP message, or a CORBA message.
-        // With this example we'll just use a hard coded message body.
-
-        String localPAssertionId = "1";
-
-        // this message content will be obtained by parsing the transfer input files <jobid.in> and obtaining the destination urls
-
-
-        BufferedReader bf =new BufferedReader(new FileReader(new File(jobname+".in")));
-        String line = null;
-        StringBuffer  message = new StringBuffer("<transfer xmlns=\"http://pegasus.isi.edu/schema/pasoa/content/transfer\">");
-        while((line=bf.readLine())!=null){
-            bf.readLine();
-            bf.readLine();
-            line = bf.readLine();
-            filecount++;
-            if(jobname.startsWith(Refiner.STAGE_OUT_PREFIX)){
-                message.append("<filename>"+line+"</filename>");
-            }else {
-                String lfn= line.split("run\\d{4}/")[1];
-                message.append("<filename file=\""+lfn+"\">"+line+"</filename>");
-            }
-        }
-        bf.close();
-        message.append("</transfer>");
-
-        // Convert it into a DOM Element
-        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
-        DocumentBuilder db = dbf.newDocumentBuilder();
-        Document msgDoc = db.parse(new InputSource(new StringReader(message.toString())));
-        Element messageBody = msgDoc.getDocumentElement();
-
-        InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody);
-
-        return ipa;
-    }
-
-    private ActorStatePAssertion createActorStatePAssertion(long count)
-    throws Exception {
-        // Create an actor state p-assertion
-        // Just like the interaction p-assertion this p-assertion
-        // needs a local p-assertion id. Remember, all the p-assertions
-        // in one view need a different id. Therefore, we give this assertion
-        // the id of "2" instead of "1".
-        //
-        // Again you'll typically be getting some state from the actor,
-        // translating it to XML to create the actor state p-assertion
-        // In this example, we just use a hard coded string.
-
-        String localPAssertionId = "aspa-"+count;
-
-        ActorStatePAssertion asa = new ActorStatePAssertion(localPAssertionId, docelement);
-
-        return asa;
-    }
-
-    private RelationshipPAssertion createTransferRelationshipPAssertion(InteractionKey invocationik, long index)
-    throws Exception {
-        // Create a relationship p-assertion
-        // Again a different local p-assertion id
-        //
-        // We'll create a "usage" relationship between the interaction p-assertion
-        // and the actor state p-assertion. This relationship says that
-        // message represented by interaction p-assertion "1" used the actor state
-        // represented by actor state p-assertion "2".
-        // There are no data accessors or links so we pass null.
-
-        // Create the information to identify the subject of the relationship
-        // Remember, parameter names must be identified and they need to be URIs
-        String localPAssertionId = "rpa"+index;
-        String subjectLocalPAssertionId = "1";
-        String subjectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/outputfile";
-
-        // Create the information to identify the object of the relationship
-
-        String objectLocalPAssertionId = "1"; // points to the interaction p-assertion of the invocation interaction receiver
-
-
-        GlobalPAssertionKey gpak = new GlobalPAssertionKey(invocationik, "receiver", objectLocalPAssertionId);
-        String objectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/inputfile";
-
-        Element dataAccessor= createTransferDataAccessor(index);
-
-        ObjectID objId = new ObjectID(gpak, objectParameterName, dataAccessor, null);
-
-        // We add the objId to the list of objects. We only have one objectId here
-        // but when making another type of relationship more than one objectId may
-        // be required
-        LinkedList objectIds = new LinkedList();
-        objectIds.add(objId);
-
-        // Create the "use" relation. Again this should be a URI
-        String relation = "http://pegasus.isi.edu/pasoa/relation/transfer/copy-of";
-        dataAccessor= createTransferDataAccessor( index);
-        // Finally, create the relationship object and return it.
-        RelationshipPAssertion rel = new RelationshipPAssertion(localPAssertionId,
-                subjectLocalPAssertionId,
-                dataAccessor,
-                subjectParameterName,
-                relation,
-                objectIds);
-
-        return rel;
-
-    }
-
-    //will have to do for handling merged jobs correctly.
-    private InteractionPAssertion createMergedJobInvocationInteractionPAssertion() throws Exception{
-
-      String localPAssertionId = "1";
-
-      // this message content will be obtained by parsing the transfer input files <jobid.in> and obtaining the source urls
-      StringBuffer  message = new StringBuffer("<files link=\"input\" xmlns=\"http://pegasus.isi.edu/schema/pasoa/content/files\">");
-      if(input!=null){
-           if(input.containsKey(jobname)){
-               List inputs = (List) input.get(jobname);
-               for (Iterator i = inputs.iterator(); i.hasNext(); ) {
-                   message.append("<filename>" + (String) i.next() +
-                                  "</filename>");
-               }
-           }
-      }
-      message.append("</files>");
-
-      // Convert it into a DOM Element
-      DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
-      DocumentBuilder db = dbf.newDocumentBuilder();
-      Document msgDoc = db.parse(new InputSource(new StringReader(message.toString())));
-      Element messageBody = msgDoc.getDocumentElement();
-
-      InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody);
-
-      return ipa;
-
-  }
-
-
-    private InteractionPAssertion createJobInvocationInteractionPAssertion() throws Exception{
-
-        String localPAssertionId = "1";
-
-        // this message content will be obtained by parsing the transfer input files <jobid.in> and obtaining the source urls
-
-        StringBuffer  message = new StringBuffer("<files link=\"input\" xmlns=\"http://pegasus.isi.edu/schema/pasoa/content/files\">");
-        if(input!=null){
-             if(input.containsKey(jobname)){
-                 List inputs = (List) input.get(jobname);
-                 for (Iterator i = inputs.iterator(); i.hasNext(); ) {
-                     message.append("<filename>" + (String) i.next() +
-                                    "</filename>");
-                 }
-             }
-        }
-        message.append("</files>");
-
-        // Convert it into a DOM Element
-        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
-        DocumentBuilder db = dbf.newDocumentBuilder();
-        Document msgDoc = db.parse(new InputSource(new StringReader(message.toString())));
-        Element messageBody = msgDoc.getDocumentElement();
-
-        InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody);
-
-        return ipa;
-
-    }
-
-
-
-    private InteractionPAssertion createJobCompletionInteractionPAssertion() throws Exception{
-
-        String localPAssertionId = "1";
-
-        // this message content will be obtained by parsing the transfer input files <jobid.in> and obtaining the source urls
-
-        StringBuffer  message = new StringBuffer("<files link=\"output\" xmlns=\"http://pegasus.isi.edu/schema/pasoa/content/files\">");
-        if(output!=null){
-            if(output.containsKey(jobname)){
-                List outputs = (List) output.get(jobname);
-                for (Iterator i = outputs.iterator(); i.hasNext(); ) {
-                    message.append("<filename>" + (String) i.next() +
-                                   "</filename>");
-                }
-            }
-        }
-        message.append("</files>");
-
-        // Convert it into a DOM Element
-        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
-        DocumentBuilder db = dbf.newDocumentBuilder();
-        Document msgDoc = db.parse(new InputSource(new StringReader(message.toString())));
-        Element messageBody = msgDoc.getDocumentElement();
-
-        InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody);
-
-        return ipa;
-    }
-
-    private RelationshipPAssertion createJobRelationshipPAssertion(InteractionKey invocationik, String filename, long index)
-    throws Exception {
-        String localPAssertionId = "rpa"+index;
-        String subjectLocalPAssertionId = "1";
-        String subjectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/outputfile";
-
-        // Create the information to identify the object of the relationship
-
-        String objectLocalPAssertionId = "1"; // points to the interaction p-assertion of the invocation interaction receiver
-
-
-        GlobalPAssertionKey gpak = new GlobalPAssertionKey(invocationik, "receiver", objectLocalPAssertionId);
-        String objectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/inputfile";
-        LinkedList objectIds = new LinkedList();
-
-        for(Iterator i=((List)input.get(jobname)).iterator();i.hasNext();){
-            Element dataAccessor= createLFNDataAccessor((String)i.next());
-            // We add the objId to the list of objects. We only have one objectId here
-            // but when making another type of relationship more than one objectId may
-            // be required
-            objectIds.add(new ObjectID(gpak, objectParameterName, dataAccessor, null));
-        }
-
-        // Create the "use" relation. Again this should be a URI
-        String relation = "http://pegasus.isi.edu/pasoa/relation/transformation/product-of";
-        Element dataAccessor= createLFNDataAccessor(filename);
-
-        // Finally, create the relationship object and return it.
-        RelationshipPAssertion rel = new RelationshipPAssertion(localPAssertionId,
-                subjectLocalPAssertionId,
-                dataAccessor,
-                subjectParameterName,
-                relation,
-                objectIds);
-
-        return rel;
-
-    }
-    private RelationshipPAssertion createJobToTransferRelationshipPAssertion(String filename,String parentjob,int index) throws Exception{
-        String localPAssertionId = "rpa"+index;
-        String subjectLocalPAssertionId = "1";
-        String subjectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/inputfile";
-
-        // Create the information to identify the object of the relationship
-
-        String objectLocalPAssertionId = "1"; // points to the interaction p-assertion of the invocation interaction receiver
-        // interaction.
-
-        WSAddressEndpoint source = new WSAddressEndpoint(parentjob);
-        WSAddressEndpoint sink   = new WSAddressEndpoint(CONDOR);
-
-        String interactionId =  wf_label+wf_planned_time+parentjob;
-        InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId);
-
-        GlobalPAssertionKey gpak = new GlobalPAssertionKey(ik, "receiver", objectLocalPAssertionId);
-        String objectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/outputfile";
-
-        Element dataAccessor= createLFNDataAccessor(filename);
-
-        ObjectID objId = new ObjectID(gpak, objectParameterName, dataAccessor, null);
-
-        // We add the objId to the list of objects. We only have one objectId here
-        // but when making another type of relationship more than one objectId may
-        // be required
-        LinkedList objectIds = new LinkedList();
-        objectIds.add(objId);
-
-        // Create the "use" relation. Again this should be a URI
-        String relation = "http://pegasus.isi.edu/pasoa/relation/transfer/same-as";
-        //     dataAccessor=createNameValueDataAccessor(filename);
-        // Finally, create the relationship object and return it.
-        RelationshipPAssertion rel = new RelationshipPAssertion(localPAssertionId,
-                subjectLocalPAssertionId,
-                dataAccessor,
-                subjectParameterName,
-                relation,
-                objectIds);
-
-        return rel;
-    }
-    private Element createTransferDataAccessor(long index){
-        Map namespaces = new HashMap();
-        namespaces.put("tr", "http://pegasus.isi.edu/schema/pasoa/content/transfer");
-        return new org.pasoa.accessors.snxpath.SingleNodeXPathManager().createAccessor("/tr:transfer[0]/tr:filename[" + index + "]",
-                namespaces);
-
-    }
-
-    private Element createLFNDataAccessor(String value){
-        return new org.pasoa.accessors.lfn.LFNAccessorManager().createLFNAccessor(value);
-    }
-
-    private InteractionPAssertion createRegisterInvocationInteractionPAssertion()
-    throws Exception {
-        // Create an interaction p-assertion
-        // First we make a local p-assertion id and then
-        // we make a documentationStyle. In this case we
-        // call it verbatium.
-        //
-        // In most cases, you'll be grabing the messageBody from the message
-        // being sent between parties. So a SOAP message, or a CORBA message.
-        // With this example we'll just use a hard coded message body.
-
-        String localPAssertionId = "1";
-
-
-        BufferedReader bf =new BufferedReader(new FileReader(new File(jobname+".in")));
-        String line = null;
-        StringBuffer  message = new StringBuffer("<register xmlns=\"http://pegasus.isi.edu/schema/pasoa/content/register\">");
-        while((line=bf.readLine())!=null){
-            filecount++;
-            String[] lfn= line.split(" ");
-            message.append("<filename file=\""+lfn[0]+"\">"+lfn[1]+"</filename>");
-
-        }
-        message.append("</register>");
-
-        // Convert it into a DOM Element
-        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
-        DocumentBuilder db = dbf.newDocumentBuilder();
-        Document msgDoc = db.parse(new InputSource(new StringReader(message.toString())));
-        Element messageBody = msgDoc.getDocumentElement();
-
-        InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody);
-
-        return ipa;
-    }
-
-    public InteractionKey registerInvocationInteraction() throws Exception{
-
-        System.out.println("We now create the transfer Invocation interaction key");
-
-        // Create addresses for the source and sink of the
-        // interaction.
-        WSAddressEndpoint source = new WSAddressEndpoint(CONDOR);
-        WSAddressEndpoint sink   = new WSAddressEndpoint(jobname);
-
-
-        String interactionId =  wf_label+wf_planned_time+jobname;
-        InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId);
-
-        System.out.println("Building p-assertions...");
-
-        InteractionPAssertion ipa = createRegisterInvocationInteractionPAssertion();
-
-        List records=new ArrayList();
-        //setting sender type
-        System.out.println("We are the sender/client view of the interaction");
-        String vk = Constants.SENDER_VIEW_TYPE;
-        System.out.println();
-
-        //set asserter to CONDOR
-
-        WSAddressEndpoint asserter = new WSAddressEndpoint(CONDOR);
-
-
-        System.out.println("Creating Record objects for each p-assertion");
-
-        Record recIpa = new Record(ipa, ik, vk, asserter.getElement());
-        records.add(recIpa);
-        Record recRpa = null;
-
-        String tempparent=null;
-        if(parents !=null || !parents.isEmpty()){
-            tempparent=(String)parents.get(0);
-        }
-        for(int i=0; i<filecount;i++){
-            recRpa=new Record(createRegisterToTransferRelationshipPAssertion(tempparent,i ) ,ik,vk,asserter.getElement());
-            records.add(recRpa);
-        }
-        System.out.println("Recording the p-assertions in provenance store " + provenanceStore);
-
-        clientLib.record(records.iterator(), provenanceStore,true);
-
-
-        System.out.println("sender p-assertions recorded");
-        System.out.println();
-
-        //setting reciever type
-
-        System.out.println("We are the sender/client view of the interaction");
-        vk = Constants.RECEIVER_VIEW_TYPE;
-        System.out.println();
-
-
-        //set asserter to CONDOR
-
-        asserter = new WSAddressEndpoint(jobname);
-        recIpa = new Record(ipa, ik, vk, asserter.getElement());
-
-
-        System.out.println("Recording the p-assertions in provenance store " + provenanceStore);
-
-        clientLib.record(recIpa, provenanceStore);
-
-
-        System.out.println("receiver p-assertions recorded");
-        System.out.println();
-
-        return ik;
-
-    }
-
-    public void registerCompletionInteraction(InteractionKey invocationinteractionkey) throws Exception{
-
-        System.out.println("We now create the register Completion interaction key");
-
-        // Create addresses for the source and sink of the
-        // interaction.
-        WSAddressEndpoint source = new WSAddressEndpoint(jobname);
-        WSAddressEndpoint sink   = new WSAddressEndpoint(CONDOR);
-
-
-        String interactionId =  wf_label+wf_planned_time+jobname;
-        InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId);
-
-        System.out.println("Building p-assertions...");
-        List records = new ArrayList();
-        //setting sender type
-        System.out.println("We are the sender/client view of the interaction");
-        String vk = Constants.SENDER_VIEW_TYPE;
-        System.out.println();
-
-
-        WSAddressEndpoint asserter = new WSAddressEndpoint(jobname);
-
-        System.out.println("Creating Record objects for each p-assertion ....... ");
-
-        ActorStatePAssertion apa = createActorStatePAssertion(0);
-        System.out.println("Done");
-        Record recApa = new Record(apa, ik, vk, asserter.getElement());
-
-        records.add(recApa);
-        System.out.print("Recording sender p-assertions ............ ");
-
-        clientLib.record(records.iterator(), provenanceStore,true);
-
-        System.out.println("DONE\n");
-
-        //setting reciever type
-
-        System.out.println("We are the sender/client view of the interaction\n");
-        vk = Constants.RECEIVER_VIEW_TYPE;
-        asserter = new WSAddressEndpoint(CONDOR);
-
-        //no receiver InteractionPAssertion.
-
-
-    }
-
-    private RelationshipPAssertion createRegisterToTransferRelationshipPAssertion(String parentjob,  long index)
-    throws Exception {
-        // Create a relationship p-assertion
-        // Again a different local p-assertion id
-        //
-        // We'll create a "usage" relationship between the interaction p-assertion
-        // and the actor state p-assertion. This relationship says that
-        // message represented by interaction p-assertion "1" used the actor state
-        // represented by actor state p-assertion "2".
-        // There are no data accessors or links so we pass null.
-
-        // Create the information to identify the subject of the relationship
-        // Remember, parameter names must be identified and they need to be URIs
-        String localPAssertionId = "rpa"+index;
-        String subjectLocalPAssertionId = "1";
-        String subjectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/outputfile";
-
-        // Create the information to identify the object of the relationship
-
-        String objectLocalPAssertionId = "1"; // points to the interaction p-assertion of the invocation interaction receiver
-
-
-        WSAddressEndpoint source = new WSAddressEndpoint(parentjob);
-        WSAddressEndpoint sink   = new WSAddressEndpoint(CONDOR);
-
-        String interactionId =  wf_label+wf_planned_time+parentjob;
-        InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId);
-
-        GlobalPAssertionKey gpak = new GlobalPAssertionKey(ik, "receiver", objectLocalPAssertionId);
-        String objectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/inputfile";
-
-        Element dataAccessor= createTransferDataAccessor(index);
-
-        ObjectID objId = new ObjectID(gpak, objectParameterName, dataAccessor, null);
-
-        // We add the objId to the list of objects. We only have one objectId here
-        // but when making another type of relationship more than one objectId may
-        // be required
-        LinkedList objectIds = new LinkedList();
-        objectIds.add(objId);
-
-        // Create the "use" relation. Again this should be a URI
-        String relation = "http://pegasus.isi.edu/pasoa/relation/register/rls-mapping";
-        // Finally, create the relationship object and return it.
-        RelationshipPAssertion rel = new RelationshipPAssertion(localPAssertionId,
-                subjectLocalPAssertionId,
-                dataAccessor,
-                subjectParameterName,
-                relation,
-                objectIds);
-
-        return rel;
-
-    }
-}
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Braindump.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/code/generator/Braindump.java	2012-05-24 16:47:52.064156782 -0700
+++ pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Braindump.java	2012-05-24 16:47:54.084166807 -0700
@@ -21,7 +21,6 @@
 
 import java.net.UnknownHostException;
 
-import org.globus.gsi.GlobusCredentialException;
 import edu.isi.pegasus.planner.classes.ADag;
 import edu.isi.pegasus.planner.classes.DagInfo;
 import edu.isi.pegasus.planner.classes.PegasusBag;
@@ -30,8 +29,6 @@
 import edu.isi.pegasus.planner.classes.PlannerOptions;
 import edu.isi.pegasus.planner.common.PegasusProperties;
 
-import org.globus.gsi.GlobusCredential;
-
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
@@ -430,17 +427,7 @@
      * @return the DN else null if proxy file not found.
      */
     protected String getGridDN( ){
-        String dn = null;
-        try {
-            
-            GlobusCredential credential = GlobusCredential.getDefaultCredential();
-                    //new GlobusCredential(proxyFile);
-
-            dn = credential.getIdentity();
-        } catch (GlobusCredentialException ex) {
-            mLogger.log( "Unable to determine GRID DN", ex, LogManager.DEBUG_MESSAGE_LEVEL );
-        }
-        return dn;
+        return "unknown";
     }
     
     
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Metrics.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/code/generator/Metrics.java	2012-05-24 16:47:52.064156782 -0700
+++ pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Metrics.java	2012-05-24 16:47:54.084166807 -0700
@@ -20,7 +20,6 @@
 
 import java.net.UnknownHostException;
 
-import org.globus.gsi.GlobusCredentialException;
 import edu.isi.pegasus.planner.classes.ADag;
 import edu.isi.pegasus.planner.classes.PegasusBag;
 import edu.isi.pegasus.planner.classes.Job;
@@ -28,8 +27,6 @@
 import edu.isi.pegasus.planner.classes.PlannerOptions;
 import edu.isi.pegasus.planner.common.PegasusProperties;
 
-import org.globus.gsi.GlobusCredential;
-
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/pps/Pasoa.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/provenance/pasoa/pps/Pasoa.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,372 +0,0 @@
-/**
- *  Copyright 2007-2008 University Of Southern California
- *
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package edu.isi.pegasus.planner.provenance.pasoa.pps;
-
-import edu.isi.pegasus.planner.provenance.pasoa.PPS;
-
-
-import edu.isi.pegasus.planner.classes.Job;
-import edu.isi.pegasus.planner.refiner.Refiner;
-
-import org.pasoa.common.BestPractice;
-import org.pasoa.common.Constants;
-
-import org.pasoa.pstructure.GlobalPAssertionKey;
-import org.pasoa.pstructure.InteractionKey;
-import org.pasoa.pstructure.InteractionPAssertion;
-import org.pasoa.pstructure.ObjectID;
-import org.pasoa.pstructure.PAssertion;
-import org.pasoa.pstructure.Record;
-import org.pasoa.pstructure.RelationshipPAssertion;
-import org.pasoa.pstructure.SubjectID;
-
-import org.pasoa.storeclient.ClientLib;
-
-import org.pasoa.util.httpsoap.WSAddressEndpoint;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.net.URL;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import org.w3c.dom.Element;
-import org.xml.sax.InputSource;
-import org.xml.sax.SAXException;
-
-/**
- * Implements the PPS interface for recording documentation from a Pegasus refinement.
- */
-public class Pasoa implements PPS {
-    // The current workflow XML serialisation (except for the final footer part: see _xmlFooter below)
-    // This is built up cumulatively over time by the refiners providing XML fragments to add
-    private String         _workflowXML;
-    // A count of the number of relationship p-assertions recorded (used to create unique p-assertion IDs)
-    private int            _relationshipPAssertionCounter;
-    // The key for the interaction in which a refiner is invoked
-    private InteractionKey _causeKey;
-    // The key for the interaction in which a refiner completes
-    private InteractionKey _effectKey;
-    // The name (URI) of the current refinement step
-    private String         _refinement;
-    // The unique name of the current refinement process, generated from system time
-    private String         _refinementID;
-
-    // The suffix to the XML workflow serialisation
-    //private static final String _xmlFooter = "</workflow>";
-    private static final String _xmlFooter = "";
-
-    /**
-     * On initialisation, create a ClientLib object for communication with a
-     * store, set the store URL and create a namespace-aware DOM document parser.
-     */
-    public Pasoa () throws Exception {
-        _storeProxy     = new ClientLib ();
-        String storeURL = "http://localhost:8080/preserv-1.0";
-
-        _storeRecordURL = new URL (storeURL + "/record");
-
-        DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance ();
-        factory.setNamespaceAware (true);
-        _builder = factory.newDocumentBuilder ();
-    }
-
-
-    /**
-     * On initialisation, create a ClientLib object for communication with a
-     * store, set the store URL and create a namespace-aware DOM document parser.
-     */
-    public Pasoa (String storeURL) throws Exception {
-        _storeProxy     = new ClientLib ();
-        _storeRecordURL = new URL (storeURL + "/record");
-
-        DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance ();
-        factory.setNamespaceAware (true);
-        _builder = factory.newDocumentBuilder ();
-    }
-
-    //  PPS methods  //
-
-    public String beginWorkflowRefinementStep (Refiner workflow, String refinementStepName, boolean firstStep) throws Exception {
-        if (firstStep) {
-            _workflowXML = workflow.getXMLProducer ().toXML ();
-            _refinementID = Long.toString (System.currentTimeMillis ());
-            _causeKey = createInteractionKey (_refinementID, refinementStepName, true);
-        } else {
-            // Record relationships between output of one refinement, input of the one being started
-            _causeKey  = _effectKey;
-            _effectKey = createInteractionKey (_refinementID, refinementStepName, true);
-            for (Iterator it = workflow.getWorkflow ().jobIterator (); it.hasNext ();){
-                Job job = (Job) it.next ();
-                String name = job.getName ();
-                isIdenticalTo (name, name);
-            }
-            // Now move on to refinement itself
-            _causeKey = _effectKey;
-        }
-        _effectKey = createInteractionKey (_refinementID, refinementStepName, false);
-        _refinement = refinementStepName;
-        _relationshipPAssertionCounter = 0;
-
-
-        // Record the initial invocation of the refiner
-        recordInteraction (_workflowXML + _xmlFooter, _causeKey, _refinement, true);
-
-        return _refinementID;
-    }
-
-    public void isIdenticalTo (String afterNode, String beforeNode) throws Exception {
-        recordRelationship (_relationshipPAssertionCounter, afterNode,
-                _identicalParameter, _identicalRelation, _causeKey,
-                beforeNode, _identicalParameter, _refinement);
-        _relationshipPAssertionCounter += 1;
-    }
-
-    public void siteSelectionFor (String afterNode, String beforeNode) throws Exception {
-        recordRelationship (_relationshipPAssertionCounter, afterNode,
-                _siteSelectionOutputParameter, _siteSelectionRelation, _causeKey,
-                beforeNode, _siteSelectionInputParameter, _refinement);
-        _relationshipPAssertionCounter += 1;
-    }
-
-    public void stagingIntroducedFor (List stagingNodes, String appNode) throws Exception {
-        for (Object stagingNode : stagingNodes) {
-            recordRelationship (_relationshipPAssertionCounter, stagingNode.toString (),
-                    _stagingParameter, _stagingRelation, _causeKey,
-                    appNode, _stagedForParameter, _refinement);
-            _relationshipPAssertionCounter += 1;
-        }
-    }
-
-    public void registrationIntroducedFor (String registrationNode, String dataStagingNode) throws Exception {
-        recordRelationship (_relationshipPAssertionCounter, dataStagingNode,
-                _registrationParameter, _registrationRelation, _causeKey,
-                registrationNode, _registrationOfParameter, _refinement);
-        _relationshipPAssertionCounter += 1;
-    }
-
-    public void clusteringOf (String clusteredJob, List jobs) throws Exception {
-        for (Object inCluster : jobs) {
-            recordRelationship (_relationshipPAssertionCounter, clusteredJob,
-                    _clusterParameter, _clusteredRelation, _causeKey,
-                    inCluster.toString (), _inClusterParameter, _refinement);
-            _relationshipPAssertionCounter += 1;
-        }
-    }
-
-    public void isPartitionOf (String afterNode, List beforeNode) {
-        throw new UnsupportedOperationException ();
-    }
-
-    public void endWorkflowRefinementStep (Refiner workflow) throws Exception {
-        _workflowXML += workflow.getXMLProducer ().toXML ();
-        recordInteraction (_workflowXML + _xmlFooter, _effectKey, _refinement, false);
-    }
-
-    //  Utility constants and methods  //
-
-    /**
-     * A namespace we can use to identify relationships and concepts defined for Pegasus' provenance data
-     */
-    //private static final String _namespace = "http://www.isi.edu/pasoa";
-
-    // Relations:
-    // Relationships are asserted between workflow nodes before a refinement and
-    // those after the refinement.  The former are 'objects' of the relationship,
-    // the latter are 'subjects'.  Every relationship has a type which is identified
-    // by a URI.
-    //
-    // For each subject and object of a relationship, the role that each plays
-    // in the relationship must be declared, the role type being called the
-    // 'parameter name' and identified by a URI.
-
-    /**
-     * The identicalTo relationship relates a workflow node before and after a
-     * refinement that has not changed during that refinement
-     */
-    public static final String _identicalRelation = NAMESPACE + "/relations#identicalTo";
-    /*
-     * In an identical relationship both subject and object play the role of
-     * 'item', as in 'this item is identical to that item'.
-     */
-    public static final String _identicalParameter = NAMESPACE + "/parameters#item";
-
-    /**
-     * The site seleciotn relationship relates a job that has had its site selected
-     * to that same job before site selection.
-     */
-    public static final String _siteSelectionRelation        = NAMESPACE + "/relations#siteSelectionOf";
-    /**
-     * The job before site selection plays the 'preselection' role.
-     */
-    public static final String _siteSelectionInputParameter  = NAMESPACE + "/parameters#preselection";
-    /**
-     * The job after site selection plays the 'postselection' role.
-     */
-    public static final String _siteSelectionOutputParameter = NAMESPACE + "/parameters#postselection";
-
-    public static final String _stagingRelation    = NAMESPACE + "/relations#staging";
-    public static final String _stagedForParameter = NAMESPACE + "/parameters#stagedFor";
-    public static final String _stagingParameter   = NAMESPACE + "/parameters#staging";
-
-    public static final String _registrationRelation    = NAMESPACE + "/relations#registration";
-    public static final String _registrationOfParameter = NAMESPACE + "/parameters#registrationOf";
-    public static final String _registrationParameter   = NAMESPACE + "/parameters#registration";
-
-    public static final String _clusteredRelation  = NAMESPACE + "/relations#clustered";
-    public static final String _inClusterParameter = NAMESPACE + "/parameters#inCluster";
-    public static final String _clusterParameter   = NAMESPACE + "/parameters#cluster";
-
-    /**
-     * A partially refined workflow is specified as an XML document.
-     * We represent this as a String object, and for convenience this is
-     * the closing tag of that document.
-     */
-    private static final String _workflowPostfix = "</workflow>";
-
-    /** ClientLib is the primary class by which a client communicates with a provenance store */
-    private ClientLib       _storeProxy;
-    /** The URL of the provenance store Web Service (recording port) */
-    private URL             _storeRecordURL;
-    /** A pre-created DOM XML parser (expensive to create so we do just once) */
-    private DocumentBuilder _builder;
-
-    /**
-     * Conventionally, we use WS-Addressing to identify the endpoints of an
-     * interaction between actors, and this method constructs an XML (DOM) fragment
-     * in the WS-Addressing schema for a particular URL.
-     *
-     * @param address The URL of the endpoint
-     * @return An XML (DOM) fragment in WS-Addressing endpoint schema containing the address
-     */
-    public static Element addressToElement (String address) {
-        return new WSAddressEndpoint (address).getElement ();
-    }
-
-    /**
-     * Individual jobs in a workflow are identified by an XML document fragment,
-     * called a data accessor, and this method constructs the fragment for a given
-     * job ID.
-     *
-     * @param jobID The job ID
-     * @return An XML (DOM) fragment representing a reference to that job in an XML workflow representation
-     */
-    public Element createDataAccessor (String jobID) throws IOException, SAXException {
-        return toElement ("<jobID xmlns = \"" + NAMESPACE + "\">" + jobID + "</jobID>");
-    }
-
-    /**
-     * Creates an interaction p-assertion asserting that a given partially
-     * refined workflow was exchanged between actors.
-     *
-     * @param workflow The (XML) content of the partially refined workflow
-     * @return A JavaBean representation of an interaction p-assertion containing the workflow
-     */
-    public InteractionPAssertion createInteractionPAssertion (String workflow) throws IOException, SAXException {
-        return new InteractionPAssertion ("1",
-                BestPractice.VERBATIM_STYLE,
-                toElement (workflow + _workflowPostfix));
-    }
-
-    /**
-     * Creates an interaction key to identify an interaction between two actors.
-     *
-     * @param refinementID The unique identifier for this workflow refinement (run of Pegasus)
-     * @param refinementAddress The URI of the particular refinement step (site selection, cluster etc.)
-     * @param preRefinement True if the interaction is pre-refinement, i.e. from Pegasus to a refiner, rather than the other way round
-     */
-    public InteractionKey createInteractionKey (String refinementID, String refinementAddress, boolean preRefinement) {
-        if (preRefinement) {
-            return new InteractionKey (addressToElement (PEGASUS),
-                    addressToElement (refinementAddress), refinementID + "Start");
-        } else {
-            return new InteractionKey (addressToElement (refinementAddress),
-                    addressToElement (PEGASUS), refinementID + "End");
-        }
-    }
-
-    /**
-     * Creates a relationship p-assertion between nodes in two partially refined workflows.
-     *
-     * @param count The index of this relationship p-assertion in the interaction (to support the requirement that each p-assertion has a unique ID)
-     * @param effectJobID The job ID of the subject (effect) of the relationship
-     * @param effectParameter The role played by the subject of the relationship
-     * @param relationType The type of the relationship
-     * @param causeKey The interaction key of the object of the relationship
-     * @param causeJobID The job ID of the object (cause) of the relationship
-     * @param causeParameter The role played by the object of the relationship
-     * @return A RelationshipPAssertion JavaBean representing the relationship p-assertion with the given arguments
-     */
-    public RelationshipPAssertion createRelationship (int count, String effectJobID,
-            String effectParameter, String relationType, InteractionKey causeKey,
-            String causeJobID, String causeParameter) throws IOException, SAXException {
-        List <ObjectID> objectIDs = new LinkedList <ObjectID> ();
-        ObjectID        objectID  = new ObjectID (
-                new GlobalPAssertionKey (causeKey, Constants.RECEIVER_VIEW_TYPE, "1"),
-                effectParameter,
-                createDataAccessor (causeJobID),
-                null);
-
-        objectIDs.add (objectID);
-
-        return new RelationshipPAssertion ("RPA" + count,
-                new SubjectID ("1", createDataAccessor (effectJobID), effectParameter),
-                relationType,
-                objectIDs);
-    }
-
-    public void record (PAssertion passertion, InteractionKey interactionKey, boolean isSender, String asserterURL) throws Exception {
-        if (isSender) {
-            _storeProxy.record (new Record (passertion, interactionKey, Constants.SENDER_VIEW_TYPE,   addressToElement (asserterURL)), _storeRecordURL);
-        } else {
-            _storeProxy.record (new Record (passertion, interactionKey, Constants.RECEIVER_VIEW_TYPE, addressToElement (asserterURL)), _storeRecordURL);
-        }
-    }
-
-    public void recordInteraction (InteractionPAssertion passertion, InteractionKey interactionKey, String refinerType, boolean refinementInput) throws Exception {
-        if (refinementInput) {
-            record (passertion, interactionKey, true,  PEGASUS);
-            record (passertion, interactionKey, false, refinerType);
-        } else {
-            record (passertion, interactionKey, true,  refinerType);
-            record (passertion, interactionKey, false, PEGASUS);
-        }
-    }
-
-    public void recordInteraction (String workflow, InteractionKey interactionKey, String refinerType, boolean refinementInput) throws Exception {
-        recordInteraction (createInteractionPAssertion (workflow), interactionKey, refinerType, refinementInput);
-    }
-
-    public RelationshipPAssertion recordRelationship (int count, String effectJobID,
-            String effectParameter, String relationType, InteractionKey causeKey,
-            String causeJobID, String causeParameter, String asserterURL) throws Exception {
-        RelationshipPAssertion passertion = createRelationship (count, effectJobID, effectParameter, relationType,
-                causeKey, causeJobID, causeParameter);
-        record (passertion, _effectKey, true, asserterURL);
-        return passertion;
-    }
-
-    /**
-     * Convenience method to parse string represented XML into a DOM XML fragment representation
-     */
-    public Element toElement (String xmlAsString) throws IOException, SAXException {
-        //System.out.println( "XML as string is " + xmlAsString );
-        return _builder.parse (new InputSource (new StringReader (xmlAsString))).getDocumentElement ();
-    }
-}
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/AuthenticateEngine.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/refiner/AuthenticateEngine.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,191 +0,0 @@
-/**
- *  Copyright 2007-2008 University Of Southern California
- *
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package edu.isi.pegasus.planner.refiner;
-
-import edu.isi.pegasus.planner.catalog.site.classes.FileServer;
-import edu.isi.pegasus.planner.catalog.site.classes.GridGateway;
-import edu.isi.pegasus.planner.classes.AuthenticateRequest;
-import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPServer;
-import edu.isi.pegasus.planner.catalog.site.impl.old.classes.JobManager;
-
-import edu.isi.pegasus.common.logging.LogManager;
-import edu.isi.pegasus.planner.common.PegasusProperties;
-
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-import edu.isi.pegasus.planner.classes.PegasusBag;
-
-/**
- * It authenticates the user with the sites, that the user specifies at the
- * execution time. It spawns out a thread for each pool that authenticates
- * against the jobmanager for the vanilla universe as specified in the pool
- * configuration file.
- *
- * @author Karan Vahi
- * @version $Revision: 2582 $
- */
-public class AuthenticateEngine extends Engine {
-
-    /**
-     * The Set of pools that need to be authenticated against.
-     */
-    private Set mExecPools;
-
-
-    /**
-     * The overloaded constructor.
-     *
-     * @param props  the <code>PegasusProperties</code> to be used.
-     * @param pools  The set of pools against which you want to authenticate the
-     *               user.
-     */
-/*    public AuthenticateEngine( PegasusProperties props, Set pools) {
-        super( props );
-        mExecPools = pools;
-
-    }
-*/ 
-
-    /**
-     * The overloaded constructor.
-     *
-     * @param bag    the <code>PegasusBag</code> to be used.
-     * @param pools  The set of pools against which you want to authenticate the
-     *               user.
-     */
-    public AuthenticateEngine( PegasusBag bag, Set pools) {
-        super( bag );
-        mExecPools = pools;
-    }
-    
-    /**
-     * It returns a set of pools against which the user can authenticate to.
-     *
-     * @return  the set of authenticated pools.
-     */
-    public Set authenticate(){
-        Iterator it = mExecPools.iterator();
-        ThreadPool manager = new ThreadPool( mProps, mExecPools);
-        String pool;
-        GridGateway jm;
-        FileServer gserv;
-        String contact;
-
-        //we need synchronization to ensure that an threads are started only
-        //when all the requests have been sent to the threadpool, as this
-        //failure to authenticate against a pool leads to it's removal from
-        //this set.
-        synchronized(mExecPools){
-            while(it.hasNext()){
-                pool = (String)it.next();
-
- //               List jmList =  mPoolHandle.getJobmanagers(pool);
- //               Iterator it1 = jmList.iterator();
-                for( Iterator it1 = mSiteStore.lookup( pool ).getGridGatewayIterator(); it1.hasNext() ;){
-                    jm = (GridGateway)it1.next();
-//                    contact = jm.getInfo(JobManager.URL);
-                    AuthenticateRequest ar = new AuthenticateRequest('j',pool, jm.getContact());
-                    manager.acceptRequest(ar);
-                }
-
-//                List gridFtpList = mPoolHandle.getGridFTPServers(pool);
-//                it1 = gridFtpList.iterator();
-//                while(it1.hasNext()){
-                for( Iterator it1 = mSiteStore.lookup( pool ).getFileServerIterator(); it1.hasNext();){
-                    gserv = ( FileServer )it1.next();
-//                    contact = gserv.getInfo(GridFTPServer.GRIDFTP_URL);
-                    AuthenticateRequest ar = new AuthenticateRequest('g',pool, gserv.getURLPrefix() );
-                    manager.acceptRequest(ar);
-
-                }
-            }
-        }
-        manager.shutdown();
-        purgePools();
-
-        return mExecPools;
-    }
-
-
-    /**
-     * It removies from the list of pools the pool that was not authenticated
-     * against. It queries the soft state of the pool config to see if there
-     * are at least one jobmanager and gridftp server on the pool.
-     * Due to the authentication the unauthenticated jobmanagers and servers
-     * would have been removed from the soft state of the pool config.
-     */
-    private synchronized void purgePools(){
-        Iterator it = mExecPools.iterator();
-        String pool;
-        List l;
-
-        while(it.hasNext()){
-            pool = (String)it.next();
-            l = mSiteStore.lookup( pool ).getFileServers();
-            if(l == null || l.isEmpty()){
-                mLogger.log("Removing Exec pool " + pool +
-                            "  as no authenticated gridftp server",
-                            LogManager.DEBUG_MESSAGE_LEVEL);
-                it.remove();
-                continue;
-            }
-
-            List l1 = mSiteStore.lookup( pool ).getGridGateways( );
-//            List l1 = mPoolHandle.getJobmanagers(pool,"transfer");
-            if( (l == null || l.isEmpty()) ||
-                (l1 == null || l1.isEmpty())){
-                //we have no jobmanagers for universe vanilla or transfer universe
-                mLogger.log("Removing Exec pool " + pool +
-                            " as no authenticated jobmanager",
-                            LogManager.DEBUG_MESSAGE_LEVEL);
-                it.remove();
-                continue;
-            }
-
-        }
-
-    }
-
-
-
-
-    /**
-     * The main testing method.
-     *
-     */
-    public static void main(String[] args){
-        Set s = new HashSet();
-        //s.add("isi_condor");
-        s.add("isi_lsf");
-/*
-        AuthenticateEngine a = new AuthenticateEngine( PegasusProperties.getInstance(),s);
-        a.mLogger.setLevel(1);
-
-        a.authenticate();
-
-        System.out.println("Authentication Done!!");
-        System.out.println(a.mPoolHandle.getGridFTPServers("isi_lsf"));
-        a.mLogger.log("Vanilla JMS " + a.mPoolHandle.getJobmanagers("isi_lsf"),
-                      LogManager.DEBUG_MESSAGE_LEVEL);
-*/ 
-
-    }
-
-
-}
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/Authenticate.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/refiner/Authenticate.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,508 +0,0 @@
-/**
- *  Copyright 2007-2008 University Of Southern California
- *
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-
-package edu.isi.pegasus.planner.refiner;
-
-
-import edu.isi.pegasus.common.logging.LogManagerFactory;
-import edu.isi.pegasus.planner.classes.AuthenticateRequest;
-
-import edu.isi.pegasus.common.logging.LogManager;
-import edu.isi.pegasus.planner.common.PegasusProperties;
-
-import edu.isi.pegasus.planner.catalog.site.impl.old.PoolInfoProvider;
-
-import org.globus.gram.Gram;
-import org.globus.gram.GramException;
-
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.InterruptedIOException;
-import java.io.OutputStreamWriter;
-
-import java.net.ConnectException;
-import java.net.InetSocketAddress;
-import java.net.Socket;
-
-import java.util.StringTokenizer;
-
-/**
- * It takes in a authenticate request and authenticates against the resource
- * on the basis of the type of the resource against which authentication is
- * required.
- *
- * @author Karan Vahi
- * @version $Revision: 2582 $
- */
-
-public class Authenticate {
-
-
-    /**
-     * The standard port at which Grid FTP runs.
-     */
-    public static final int GRID_FTP_STANDARD_PORT = 2811;
-
-    /**
-     * The timeout in seconds. All sockets opened timeout after this period.
-     */
-    public static final int TIMEOUT_VALUE = 120;
-
-    /**
-     * The timeout value that is to be used in milliseconds
-     */
-    private int mTimeout;
-
-    /**
-     * The object containing the authenticate request.
-     */
-    private AuthenticateRequest mAuthRequest;
-
-    /**
-     * The handle to the Pool Info Provider.
-     */
-    private PoolInfoProvider mPoolHandle;
-
-    /**
-     * The handle to the LogManager object.
-     */
-    private LogManager mLogger;
-
-    /**
-     * The handle to the PegasusProperties object.
-     */
-    private PegasusProperties mProps;
-
-    /**
-     * The credential to be used while authentication to jobmanager.
-     */
-    private GSSCredential mCredential;
-
-
-    /**
-     * The overloaded constructor.
-     *
-     * @param properties  the <code>PegasusProperties</code> to be used.
-     */
-    public Authenticate( PegasusProperties properties, PoolInfoProvider poolHandle ) {
-        mPoolHandle = poolHandle;
-        mLogger     =  LogManagerFactory.loadSingletonInstance( );
-        mProps      = properties;
-        mTimeout    = (mProps.getGridFTPTimeout() == null)?
-                      this.TIMEOUT_VALUE:
-                      Integer.parseInt(mProps.getGridFTPTimeout());
-	mTimeout    *= 1000;
-    }
-
-    /**
-     * Sets the credential that has to be used for authentication.
-     *
-     * @param credential  the credential to be set.
-     */
-    public void setCredential(GSSCredential credential){
-        mCredential = credential;
-    }
-
-
-    /**
-     * Authenticates against a resource referred to in the authenticate request
-     * object.
-     */
-    public boolean authenticate(AuthenticateRequest ar) {
-        mAuthRequest = ar;
-        char type = ar.getResourceType();
-        boolean alive = false;
-
-        //check if the request is invalid
-        if (ar.requestInvalid()) {
-            throw new RuntimeException("Invalid authentication request " + ar);
-        }
-
-        if (type == AuthenticateRequest.GRIDFTP_RESOURCE) {
-            //check if the grid ftp server is alive.
-            HostPort hp = getHostPort(ar.getResourceContact());
-            alive = gridFTPAlive(hp.getHost(),hp.getPort());
-
-        }
-        if (type == AuthenticateRequest.JOBMANAGER_RESOURCE) {
-            alive = authenticateJobManager(ar.getResourceContact());
-        }
-
-        return alive;
-    }
-
-
-
-
-    /**
-     * It tries to remove a resource from the soft state of the pool. This is
-     * possible only if the underlying pool interface implementation is soft
-     * state.
-     *
-     * @param ar   the AuthenticateRequest containing the resource info
-     *
-     * @return boolean true removal was successful.
-     *                 false unable to remove.
-     */
-    public boolean removeResource(AuthenticateRequest ar){
-        char type = ar.getResourceType();
-
-        if(type == AuthenticateRequest.GRIDFTP_RESOURCE){
-            return mPoolHandle.removeGridFtp(ar.getPool(),
-                                             ar.getResourceContact());
-        }
-        if(type == AuthenticateRequest.JOBMANAGER_RESOURCE){
-            return mPoolHandle.removeJobManager(ar.getPool(),null,ar.getResourceContact());
-        }
-
-        return false;
-    }
-
-
-    /**
-     * It authenticates against the jobmanager specifyied.
-     *
-     * @param contact  the jobmanager contact.
-     */
-    public boolean authenticateJobManager(String contact){
-        boolean val = true;
-        try{
-            mLogger.log( "Authenticating " + contact, LogManager.DEBUG_MESSAGE_LEVEL);
-
-            if(mCredential == null){
-                //try authenticating the default credential
-                Gram.ping(contact);
-            }
-            else
-                Gram.ping(mCredential,contact);
-        }
-        catch(GramException gex){
-            mLogger.log("Unable authenticate against jobmanager " +
-                        contact + " because " + gex.getMessage(),
-                        LogManager.ERROR_MESSAGE_LEVEL);
-            val = false;
-        }
-        catch(GSSException gss){
-            String message = (gss.getMajor() == GSSException.CREDENTIALS_EXPIRED)?
-                "Your credentials have expired. You need to do a grid-proxy-init.":
-                "GssException caught " +gss.getMajorString()
-                + gss.getMinorString();
-            mLogger.log(message,LogManager.ERROR_MESSAGE_LEVEL);
-            val = false;
-        }
-        catch(Exception e){
-            //an unknown exception occured. print a message and return false
-            mLogger.log("Unknown Exception occured " + e.getMessage(),
-                        LogManager.ERROR_MESSAGE_LEVEL);
-            val = false;
-        }
-        finally{
-            mLogger.log("Authenticating completed for " + contact,LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-        return val;
-    }
-
-    /**
-     * It checks with a grid ftp server running at a particular host
-     * and port, to see if it is up or not. This is done by opening a
-     * socket to the specified host at the specified port. If the socket
-     * timesout (which could be due to excessive load on the server or
-     * server being hung) false is returned.
-     *
-     * @param host  the host at which the gridftp server is running .
-     * @param port  the port at which server is running on the host.
-     *
-     * @return true the gridftp server is alive and kicking.
-     *         false - the submit host is not connected to the network.
-     *               - the server is not running.
-     *               - we were able to connect but timeout.
-     *               - version is not compatible.
-     *
-     */
-    public boolean gridFTPAlive(String host, int port) {
-        Socket s = new Socket();
-        String hp = combine(host, port);
-        boolean alive = false;
-
-        mLogger.log("Checking status of " + hp, LogManager.DEBUG_MESSAGE_LEVEL);
-        InetSocketAddress addrs = new InetSocketAddress(host, port);
-        if (addrs.isUnresolved()) {
-            //either the host on which Pegasus is running is not connected
-            //to the network, or the hostname is invalid. Either way we return
-            //false;
-            mLogger.log("Unresolved address to " + hp,
-                        LogManager.DEBUG_MESSAGE_LEVEL);
-            return false;
-        }
-
-        try {
-            s.connect(addrs,mTimeout);
-            //set the timeout for the input streams
-            // gotten from this socket
-            s.setSoTimeout(mTimeout);
-            String response;
-            char type = 'c';
-            BufferedReader rd = new BufferedReader(new InputStreamReader(
-                s.getInputStream()));
-
-            BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
-                s.getOutputStream()));
-
-            while ( (response = rd.readLine()) != null) {
-                /*mLogger.logMessage("Response from server " + hp + " " +
-                                   response,
-                                   1);*/
-
-                alive = parseGridFTPResponse(response, type);
-
-                if (type == 'c' && alive) {
-                    //send the quit command to the server
-                    out.write("quit\r\n");
-                    //do a half close. We just need to wait for the response
-                    //from server now
-                    s.shutdownOutput();
-                    type = 'q';
-                }
-                else {
-                    //invalid response or the server is stuck.
-                    //break out of the infinite waiting.
-                    break;
-                }
-
-            }
-        }
-        catch(java.net.SocketTimeoutException se){
-            //means we experienced a timeout on read
-            mLogger.log("Timeout experienced while reading from ip" +
-                        " stream of " + hp, LogManager.ERROR_MESSAGE_LEVEL);
-            alive = false;
-        }
-        catch (InterruptedIOException e) {
-            //timeout was reached.
-            mLogger.log("Timeout experienced while contacting " +
-                        hp, LogManager.ERROR_MESSAGE_LEVEL);
-            alive = false;
-        }
-        catch (ConnectException ce) {
-            //probably no process running at the port
-            mLogger.log("GridFtp server on " + host + " not running on port " +
-                        port + " .Exception " + ce.getMessage(),
-                        LogManager.ERROR_MESSAGE_LEVEL);
-            alive = false;
-        }
-        catch (IOException ie) {
-            mLogger.log("Unable to contact " + hp + " due to " + ie.getMessage(),
-                        LogManager.ERROR_MESSAGE_LEVEL);
-            alive = false;
-        }
-        catch(Exception e){
-            //an unknown exception occured. print a message and return false
-            mLogger.log("Unknown Exception occured " + e.getMessage(),
-                        LogManager.ERROR_MESSAGE_LEVEL);
-            alive = false;
-        }
-        finally{
-            try{
-                s.close();
-            }
-            catch(IOException e){
-                mLogger.log("Unable to close socket to " + hp + " because" +
-                            e.getMessage(),LogManager.ERROR_MESSAGE_LEVEL);
-                alive = false;
-            }
-        }
-
-
-        return alive;
-    }
-
-    /**
-     * The parses the grid ftp server response and returns if the response
-     * was valid or not.
-     *
-     * @param response the response got from the grid ftp server.
-     * @param type     c response when first connected to server.
-     *                 q response when sent the quit command.
-     *
-     * @return boolean true if the response was valid
-     *                 false invalid response.
-     */
-    private boolean parseGridFTPResponse(String response, char type) {
-        StringTokenizer st = new StringTokenizer(response);
-        boolean valid = false;
-
-        switch (type) {
-            case 'c':
-
-                //valid response should be of type 220 blah
-                while (st.hasMoreTokens()) {
-                    if (st.nextToken().equals("220")) {
-                        valid = true;
-                    }
-                    break;
-                }
-                break;
-
-            case 'q':
-
-                //valid response would be type 221 blah
-                while (st.hasMoreTokens()) {
-                    if (st.nextToken().equals("221")) {
-                        valid = true;
-                    }
-                    break;
-                }
-                break;
-
-            default:
-                valid = false;
-
-        }
-
-        if(valid == false)
-            mLogger.log(response,LogManager.ERROR_MESSAGE_LEVEL);
-        return valid;
-
-    }
-
-    /**
-     * A small helper method that returns the standard host and port
-     * combination to be used for logging purposes.
-     *
-     * @param host  the host.
-     * @param port  the port.
-     *
-     * @return combined string.
-     */
-    private String combine(String host, int port) {
-        String st = host + ":" + port;
-        return st;
-    }
-
-    /**
-     * Determines the hostname from the urlPrefix string in the pool file.
-     *
-     * @param urlPrefix  the protocol, hostname and port combination.
-     *
-     * @return the host name.
-     */
-    private HostPort getHostPort(String urlPrefix) {
-        StringTokenizer st = new StringTokenizer(urlPrefix);
-        String hostPort;
-        String hostName = new String();
-        String token = new String();
-        int count = 0;
-        int port = this.GRID_FTP_STANDARD_PORT;
-        HostPort hp = null;
-
-        while (st.hasMoreTokens()) {
-            token = st.nextToken("/");
-            count++;
-            if (count == 2) {
-                hostPort = token.trim();
-                StringTokenizer st1 = new StringTokenizer(hostPort,":");
-                hostName = st1.nextToken();
-                if(st1.hasMoreTokens()){
-                    //port is specified
-                    try{
-                        port = Integer.parseInt(st1.nextToken());
-                    }
-                    catch(NumberFormatException e){
-                        port = this.GRID_FTP_STANDARD_PORT;
-                    }
-                }
-                //System.out.println("Host->" + hostName + " Port->" + port);
-                hp = new HostPort(hostName,port);
-                //System.out.println(hp);
-                return hp;
-            }
-
-        }
-        return null;
-
-    }
-
-
-    /**
-     * A convenience inner class that stores the host and the port associated
-     * with a server.
-     */
-    class HostPort{
-
-        /**
-         * The host at which the server is running.
-         */
-        private String mHost;
-
-        /**
-         * The port at which the server is running.
-         */
-        private int mPort;
-
-        /**
-         * The overloaded constructor
-         */
-        public HostPort(String host, int port){
-            mHost = host;
-            mPort = port;
-        }
-
-        /**
-         * Returns the host associated with this object.
-         *
-         * @return String
-         */
-        public String getHost(){
-            return mHost;
-        }
-
-
-        /**
-         * Returns the port associated with this object.
-         *
-         * @return int
-         */
-        public int getPort(){
-            return mPort;
-        }
-
-        /**
-         * Returns the string version of this object.
-         */
-        public String toString(){
-            StringBuffer sb = new StringBuffer();
-            sb.append("host name ").append(mHost).
-                append(" port ").append(mPort);
-
-            return sb.toString();
-        }
-    }
-
-    public static void main(String[] args){
-        Authenticate a = new Authenticate( PegasusProperties.getInstance(), null );
-        String contact = "dc-user2.isi.edu/jobmanager-lsf";
-        String contact1 = "dc-n1.isi.edu";
-        System.out.println("Authenticating " + contact1);
-        //a.authenticateJobManager(contact);
-        a.gridFTPAlive("dc-n1.isi.edu",a.GRID_FTP_STANDARD_PORT);
-    }
-}
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/MainEngine.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/refiner/MainEngine.java	2012-05-24 16:47:52.064156782 -0700
+++ pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/MainEngine.java	2012-05-24 16:47:54.088166819 -0700
@@ -115,12 +115,6 @@
     private RemoveDirectory mRemoveEng;
 
     /**
-     * The handle to the Authentication Engine that performs the authentication
-     * with the various sites.
-     */
-    private AuthenticateEngine mAuthEng;
-
-    /**
      * The handle to the node collapser.
      */
     private NodeCollapser mNodeCollapser;
@@ -161,27 +155,7 @@
         
         //refinement process starting
         mOriginalDag.setWorkflowRefinementStarted( true );
-        
-        //do the authentication against the pools
-        if (mPOptions.authenticationSet()) {
-            mAuthEng = new AuthenticateEngine( mBag,
-                          new java.util.HashSet(mPOptions.getExecutionSites()));
-
-            mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_AUTHENTICATION, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() );
-            Set authenticatedSet = mAuthEng.authenticate();
-            if (authenticatedSet.isEmpty()) {
-                StringBuffer error = new StringBuffer( );
-                error.append( "Unable to authenticate against any site. ").
-                      append( "Probably your credentials were not generated" ).
-                      append( " or have expired" );
-                throw new RuntimeException( error.toString() );
-            }
-            mLogger.log("Sites authenticated are " +
-                        setToString(authenticatedSet, ","),
-                        LogManager.DEBUG_MESSAGE_LEVEL);
-            mLogger.logEventCompletion();
-            mPOptions.setExecutionSites(authenticatedSet);
-        }
+
 
         String message = null;
         mRCBridge = new ReplicaCatalogBridge( mOriginalDag, mBag );
Index: pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/ThreadPool.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/edu/isi/pegasus/planner/refiner/ThreadPool.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,484 +0,0 @@
-/**
- *  Copyright 2007-2008 University Of Southern California
- *
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-
-package edu.isi.pegasus.planner.refiner;
-
-import edu.isi.pegasus.common.logging.LogManagerFactory;
-import java.io.File;
-import java.io.FileInputStream;
-import java.util.LinkedList;
-import java.util.Set;
-
-import org.gridforum.jgss.ExtendedGSSCredential;
-import org.gridforum.jgss.ExtendedGSSManager;
-import edu.isi.pegasus.planner.classes.AuthenticateRequest;
-import edu.isi.pegasus.planner.classes.Profile;
-import edu.isi.pegasus.common.logging.LogManager;
-import edu.isi.pegasus.planner.common.PegasusProperties;
-import edu.isi.pegasus.planner.namespace.ENV;
-import edu.isi.pegasus.planner.catalog.site.impl.old.PoolInfoProvider;
-import edu.isi.pegasus.planner.catalog.site.impl.old.PoolMode;
-import org.ietf.jgss.GSSCredential;
-
-
-/**
- * This maintains a pool of authenticate threads that authenticate against a
- * particular resource.
- *
- * @author Karan Vahi
- * @version $Revision: 2582 $
- */
-
-public class ThreadPool {
-
-    /**
-     * The maximum number of authentication threads that are spawned.
-     */
-    public static final int NUMBER_OF_THREADS = 5;
-
-    /**
-     * The request queue that holds the authenticate requests. The worker
-     * threads do access this job queue.
-     */
-    private LinkedList mQueue;
-
-    /**
-     * The handle to the properties object.
-     */
-    private PegasusProperties mProps;
-
-    /**
-     * The handle to the Pool Info Provider.
-     */
-    private PoolInfoProvider mPoolHandle;
-
-    /**
-     * The handle to the LogManager object.
-     */
-    private LogManager mLogger;
-
-    /**
-     * The Set of pools that need to be authenticated against.
-     */
-    private Set mExecPools;
-
-    /**
-     * The number of pools that one has to authenticate against.
-     */
-    private Integer mNumOfPools;
-
-    /**
-     * The handle to the pool of threads that this thread pool is reponsible for.
-     */
-    private AuthenticateThread[] mWorkers;
-
-    /**
-     * The condition variable that is used to synchronize the shutdown.
-     */
-    private ConditionVariable mCurrentNum;
-
-    /**
-     * The namespace object holding the environment variables for local
-     * pool.
-     */
-    private ENV mLocalEnv;
-
-    /**
-     * The credential loaded from the non default location if specified.
-     */
-    private GSSCredential mCredential;
-
-    /**
-     * The overloaded constructor.
-     *
-     * @param properties  the <code>PegasusProperties</code> to be used.
-     * @param pools       the set of pools against which the user is authenticating.
-     */
-    public ThreadPool( PegasusProperties properties, Set pools ) {
-        mQueue      = new LinkedList();
-        mCurrentNum = new ConditionVariable();
-        mProps      = properties;
-        mLogger     =  LogManagerFactory.loadSingletonInstance( properties );
-        String poolClass = PoolMode.getImplementingClass(mProps.getPoolMode());
-        mPoolHandle = PoolMode.loadPoolInstance(poolClass,mProps.getPoolFile(),
-                                                PoolMode.SINGLETON_LOAD);
-        mExecPools  = pools;
-        mNumOfPools = new Integer(pools.size());
-
-        //load the local environment variables
-        mLocalEnv   = loadLocalEnvVariables();
-        //load the credential if the user has set the
-        //corresponding environment variable.
-        mCredential = (mLocalEnv.containsKey(ENV.X509_USER_PROXY_KEY))?
-                        //load the proxy from the path specified
-                        getGSSCredential((String)mLocalEnv.get(ENV.X509_USER_PROXY_KEY)):
-                        null;
-
-        if(mCredential == null){
-            //log message
-            mLogger.log("Proxy will be picked up from the default location in /tmp",
-                        LogManager.DEBUG_MESSAGE_LEVEL);
-        }
-
-        //intialise the worker threads
-        mWorkers = new AuthenticateThread[this.NUMBER_OF_THREADS];
-        for(int i = 0; i < NUMBER_OF_THREADS; i++){
-            mWorkers[i] = new AuthenticateThread(i);
-
-            //start the threads
-            mWorkers[i].start();
-        }
-    }
-
-
-    /**
-     * This method is called to ensure the clean shutdown of threads, and
-     * waits till all the requests have been serviced.
-     */
-    public void shutdown(){
-
-        //mNumOfPools is the CV on which you do a shutdowm
-        synchronized(mCurrentNum){
-
-            int numOfPools = mNumOfPools.intValue();
-            for (int i = 0; i < NUMBER_OF_THREADS; i++) {
-                //send the shutdown signal to the worker threads
-                mWorkers[i].shutdown();
-            }
-
-            //wake up all the threads on this
-            synchronized(mQueue){
-                //mLogger.logMessage("Manager sending notify to all");
-                mQueue.notifyAll();
-            }
-
-            while(mCurrentNum.getValue() < NUMBER_OF_THREADS){
-                try{
-                    mCurrentNum.wait();
-                }
-                catch(InterruptedException e){
-                    mLogger.log(
-                        "Manager got interrupted during shutdown" + e.getMessage(),
-                        LogManager.ERROR_MESSAGE_LEVEL);
-                }
-            }
-        }
-
-    }
-
-
-    /**
-     * Accepts an authentication request, that has to be serviced. It is added
-     * to queue of requests.
-     */
-    public void acceptRequest(Object request){
-
-        //see if any of the worker threads are available
-        /*for(int i = 0; i < NUMBER_OF_THREADS; i++){
-            if(mWorkers[i].isAvailable()){
-                //no need to add to queue.
-            }
-        }*/
-
-        synchronized(mQueue){
-            mQueue.addLast(request);
-            //send a notification to a worker thread
-            mQueue.notify();
-        }
-
-    }
-
-
-    /**
-     * Reads in the environment variables into memory from the properties file
-     * and the pool catalog.
-     *
-     * @return  the <code>ENV</code> namespace object holding the environment
-     *          variables.
-     */
-    private ENV loadLocalEnvVariables(){
-        //assumes that pool handle, and property handle are initialized.
-        ENV env = new ENV();
-
-        //load from the pool.config
-        env.checkKeyInNS(mPoolHandle.getPoolProfile("local",Profile.ENV));
-        //load from property file
-        env.checkKeyInNS(mProps.getLocalPoolEnvVar());
-
-        return env;
-    }
-
-    /**
-     * Loads a GSSCredential from the proxy file residing at the path specified.
-     *
-     * @param file the path to the proxy file.
-     *
-     * @return GSSCredential
-     *         null in case the file format is wrong, or file does not exist.
-     */
-    private GSSCredential getGSSCredential(String file){
-        File f = new File(file);
-        GSSCredential gcred = null;
-        //sanity check first
-        if(!f.exists()){
-            return null;
-        }
-
-        try{
-            byte[] data = new byte[ (int) f.length()];
-            FileInputStream in = new FileInputStream(f);
-            in.read(data);
-            in.close();
-
-            ExtendedGSSManager manager =
-                (ExtendedGSSManager) ExtendedGSSManager.getInstance();
-
-            gcred = manager.createCredential(data,
-                                             ExtendedGSSCredential.IMPEXP_OPAQUE,
-                                             GSSCredential.DEFAULT_LIFETIME,
-                                             null,
-                                             GSSCredential.INITIATE_AND_ACCEPT);
-            mLogger.log("Loaded the credential from proxy file " + file,
-                        LogManager.DEBUG_MESSAGE_LEVEL);
-
-        }
-        catch(Exception e){
-            mLogger.log(
-                "Unable to load proxy from file" + file  + " "  +
-                e.getMessage(),LogManager.ERROR_MESSAGE_LEVEL);
-        }
-        return gcred;
-    }
-
-    /**
-     * A thread as an inner class, that authenticates against one particular
-     * pool.
-     */
-    class AuthenticateThread implements Runnable{
-
-        /**
-         * The pool against which to authenticate.
-         */
-        private String mPool;
-
-        /**
-         * The thread object that is used to launch the thread.
-         */
-        private Thread mThread;
-
-        /**
-         * Whether the thread is available to do some work or not.
-         */
-        private boolean mAvailable;
-
-        /**
-         * Whether to shutdown or not.
-         */
-        private boolean mShutdown;
-
-        /**
-         * The unique identifying id of the thread.
-         */
-        private int mIndex;
-
-        /**
-         * The overloaded constructor.
-         *
-         *
-         */
-        public AuthenticateThread(int index){
-            mAvailable = true;
-            mShutdown = false;
-            mIndex = index;
-        }
-
-        /**
-         * The start method for the thread. It initialises the thread and calls
-         * it's start method.
-         */
-        public void start(){
-            mThread = new Thread(this);
-            mThread.start();
-        }
-
-
-        /**
-         * Returns whether a thread is available to do some work or not.
-         */
-        public boolean isAvailable(){
-            return mAvailable;
-        }
-
-        /**
-         * Sets the shutdown flag to true. This does not make the thread stop.
-         * The thread only stops when it's current request is serviced and the
-         * queue is empty.
-         */
-        public void shutdown(){
-            mShutdown = true;
-        }
-
-        /**
-         * Calls the corresponding join method of the thread associated with
-         * this class.
-         *
-         * @param millis   The time to wait in milliseconds.
-         */
-        public void join(long millis) throws InterruptedException{
-            mThread.join(millis);
-        }
-
-        /**
-         * The runnable method of the thread, that is called when the thread is
-         * started.
-         */
-        public void run(){
-            AuthenticateRequest ar;
-            Authenticate a = new Authenticate( mProps, mPoolHandle );
-            a.setCredential(mCredential);
-            boolean authenticated = false;
-
-            for(;;){
-                //remain in an infinite loop and wait for a request to be released
-                //from the queue.
-                ar = getAuthenticateRequest();
-                if(ar == null){
-                    //no more requests to service and the shutdown signal has
-                    //been received. send the notification to the manager and exit
-                    mLogger.log("Thread [" + mIndex +"] got shutdown signal",
-                                LogManager.DEBUG_MESSAGE_LEVEL);
-                    synchronized(mCurrentNum){
-                        mCurrentNum.increment();
-                        mCurrentNum.notify();
-                    }
-
-                    break;
-                }
-
-                //means worker is busy, processing a request.
-                mAvailable = false;
-                //do the processing.
-                authenticated = a.authenticate(ar);
-                mLogger.log("Thread [" + mIndex +"] Authentication of " + ar +
-                            " successful:" + authenticated,
-                            LogManager.DEBUG_MESSAGE_LEVEL);
-                if(!authenticated){
-                    //we need to remove
-                    boolean removal = a.removeResource(ar);
-                    mLogger.log("Thread [" + mIndex +"] Removal of resource" + ar +
-                                " successful:" + removal,LogManager.DEBUG_MESSAGE_LEVEL);
-                }
-                mAvailable = true;
-                //be nice and sleep
-                try{
-                    mThread.sleep(5);
-                }
-                catch (InterruptedException ex) {
-                    mLogger.log(
-                        "Authenticate Thread [" + mIndex +"] got interrupted while waiting",
-                        LogManager.DEBUG_MESSAGE_LEVEL);
-                    //go into sleep again
-                    continue;
-                }
-
-            }
-
-        }
-
-        /**
-         * Returns an authentication request to the worker thread.
-         *
-         * @return  the authentication request.
-         */
-        public AuthenticateRequest getAuthenticateRequest(){
-            synchronized(mQueue){
-
-                for(;;){
-                    if(mQueue.isEmpty() && mShutdown){
-                        //no more requests to service and the shutdown signal has
-                        //been received.
-                        return null;
-                    }
-                    else if (mQueue.isEmpty()) {
-                        //there is nothing in the queue so wait on it.
-                        try {
-                            mLogger.log("Thread [" + mIndex +"] going to wait",
-                                        LogManager.DEBUG_MESSAGE_LEVEL);
-                            mQueue.wait();
-                            //again check for empty queue and shutdown signal
-                            if(mQueue.isEmpty() && !mShutdown)
-                                //go back to the wait state to receive a new
-                                //request or a AR request
-                                continue;
-                        }
-                        catch (InterruptedException ex) {
-                            mLogger.log(
-                                "Authenticate Thread [" + mIndex +"] got interrupted while waiting " +
-                                ex.getMessage(),LogManager.ERROR_MESSAGE_LEVEL);
-                            //go into sleep again
-                            continue;
-                        }
-
-                    }
-                    return (mQueue.isEmpty() && mShutdown)?
-                           //indicates shutdown
-                           null:
-                           (AuthenticateRequest)mQueue.removeFirst();
-
-
-                }
-
-            }
-        }
-
-    }
-
-
-    /**
-     * A wrapper around an int that acts as a Condition Variable, and is used
-     * as such. In behaviour it is probably closer to a semaphore.
-     */
-    class ConditionVariable{
-
-        /**
-         * The int that is associated with this object.
-         */
-        private int value;
-
-        /**
-         * The default constructor.
-         */
-        public ConditionVariable(){
-            value = 0;
-        }
-
-        /**
-         * It increments the value by 1.
-         */
-        public void increment(){
-            value++;
-        }
-
-        /**
-         * Returns the value.
-         */
-        public int getValue(){
-            return value;
-        }
-    }
-
-}
Index: pegasus-wms_4.0.1+dfsg/src/org/globus/common/CoGProperties.java
===================================================================
--- /dev/null	1970-01-01 00:00:00.000000000 +0000
+++ pegasus-wms_4.0.1+dfsg/src/org/globus/common/CoGProperties.java	2012-05-24 16:49:26.696626021 -0700
@@ -0,0 +1,693 @@
+/*
+ * Copyright 1999-2006 University of Chicago
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.globus.common;
+
+import java.util.Properties;
+import java.util.Enumeration;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.FileInputStream;
+import java.io.OutputStream;
+import java.io.FileOutputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+import org.globus.util.ConfigUtil;
+
+/** Responsible for managing the properties file 
+ * "~/.globus/cog.properties", which holds information about various properties
+ * needed by the security classes.  These properties include:
+ * <UL>
+ * <LI> the location of the user certificate file </LI>
+ * <LI> the location of the user key file </LI>
+ * <LI> the location of the CA certificates </LI>
+ * <LI> the location of the proxy file </LI>
+ * <LI> the tcp port range </LI>
+ * <LI> the local ip address for DHCP systems</LI>
+ * <LI> the socket timeout when connecting to a myproxy host </LI>
+ * </UL>
+ */
+public class CoGProperties extends Properties {
+
+    private static final String DEFAULT_RANDOM_PROVIDER =
+	"cryptix.jce.provider.CryptixRandom";
+    
+    private static final String DEFAULT_RANDOM_ALGORITHM = 
+	"DevRandom";
+
+    public static final String ENFORCE_SIGNING_POLICY = 
+        "java.security.gsi.signing.policy";
+
+    public static final String DELEGATION_KEY_CACHE_LIFETIME =
+        "org.globus.jglobus.delegation.cache.lifetime";
+    
+    public static final String CRL_CACHE_LIFETIME =
+        "org.globus.jglobus.crl.cache.lifetime";
+
+    public static final String MDSHOST = "localhost";
+    public static final String MDSPORT = "2135";
+    public static final String BASEDN  = "Mds-Vo-name=local, o=Grid";
+    
+    /** the configuration file properties are read from -- 
+     * located in ~/.globus" */
+    public static final String CONFIG_FILE = "cog.properties";
+    
+    /** the default properties file **/
+    private static CoGProperties defaultProps = null;
+    
+    /** the config file location **/
+    public static String configFile = null;
+    
+    public CoGProperties() {
+    }
+    
+    public CoGProperties(String file) 
+	throws IOException {
+	load(file);
+    }
+
+    public synchronized static CoGProperties getDefault() {
+	if (defaultProps != null) {
+	    return defaultProps;
+	}
+	
+	defaultProps = new CoGProperties();
+	
+	String file = System.getProperty("org.globus.config.file");
+	if (file == null) {
+	    file = ConfigUtil.globus_dir + CONFIG_FILE;
+	} else if (file.equalsIgnoreCase("none")) {
+	    return defaultProps;
+	}
+
+	configFile = file;
+
+	try {	
+	    defaultProps.load(configFile);
+	}
+	catch(Exception e) {}
+
+	return defaultProps;
+    }
+
+    /**
+     * Sets default configuration. It can be used
+     * to set a different configuration dynamically.
+     */
+    public static void setDefault(CoGProperties properties) {
+	defaultProps = properties;
+    }
+    
+    public void save()
+	throws IOException {
+	save(configFile);
+    }
+
+    public void save(String file) 
+	throws IOException {
+	OutputStream out = null;
+	try {
+	    out = new FileOutputStream(file);
+	    store(out, "Java CoG Kit Configuration File");
+	} finally {
+	    if (out != null) {
+		try { out.close(); } catch (Exception e) {}
+	    }
+	}
+    }
+    
+    public void load(String file) 
+	throws IOException {
+	FileInputStream in = null;
+	try {
+	    in = new FileInputStream(file);
+	    load(in);
+	} finally {
+	    if (in != null) {
+		try { in.close(); } catch(Exception e) {}
+	    }
+	}
+    }
+    
+    public void load(InputStream in) 
+	throws IOException {
+	super.load(in);
+	fixSpace(this);
+    }
+    
+    public static void fixSpace(Properties p) {
+	// this will get rid off the trailing spaces
+	String key, value;
+	Enumeration e = p.keys();
+	while(e.hasMoreElements()) {
+	    key   = e.nextElement().toString();
+	    value = p.getProperty(key);
+	    p.put(key, value.trim());
+	}
+    }
+
+    /**
+     * Retrieves the location of the user cert file. 
+     * It first checks the X509_USER_CERT system property. If the property
+     * is not set, it checks next the 'usercert' property in the current
+     * configuration. If that property is not set, it returns a default
+     * location of the user cert file. The default value
+     * is the 'usercert.pem' file in the user's globus directory. For example:
+     * ${user.home}/.globus/usercert.pem.
+     *
+     * @return <code>String</code> the location of the user cert file
+     */
+    public String getUserCertFile() {
+	String location;
+	location = System.getProperty("X509_USER_CERT");
+	if (location != null) {
+	    return location;
+	}
+	location = getProperty("usercert");
+	if (location != null) {
+	    return location;
+	}
+	return ConfigUtil.discoverUserCertLocation();
+    }
+
+    public void setUserCertFile(String userCertFile) {
+	put("usercert", userCertFile);
+    }
+
+    public String getPKCS11LibraryName() {
+	String lib;
+	lib = System.getProperty("PKCS11_LIB");
+	if (lib != null) {
+	    return lib;
+	}
+	lib = getProperty("pkcs11lib");
+	if (lib != null) {
+	    return lib;
+	}
+	return ConfigUtil.discoverPKCS11LibName();
+    }
+  
+    public String getDefaultPKCS11Handle() {
+	return getProperty("pkcs11.handle", "Globus User Credentials");
+    }
+
+    /**
+     * Retrieves the location of the user key file.  
+     * It first checks the X509_USER_KEY system property. If the property
+     * is not set, it checks next the 'userkey' property in the current
+     * configuration. If that property is not set, it returns a default
+     * location of the user key file. The default value
+     * is the 'userkey.pem' file in the user's globus directory. For example:
+     * ${user.home}/.globus/userkey.pem.
+     *
+     * @return <code>String</code> the location of the user key file
+     */
+    public String getUserKeyFile() {
+	String location;
+	location = System.getProperty("X509_USER_KEY");
+	if (location != null) {
+	    return location;
+	}
+	location = getProperty("userkey");
+	if (location != null) {
+	    return location;
+	}
+	return ConfigUtil.discoverUserKeyLocation();
+    }
+
+    /**
+     * Sets user key file location
+     * @param userKeyFile user key file location
+     */
+    public void setUserKeyFile(String userKeyFile) {
+	put("userkey", userKeyFile);
+    }
+    
+    /**
+     * Returns the user specified hostname. This is used
+     * for DHCP machines where java is unable to determine the
+     * right hostname/IP address.
+     * It first checks the 'GLOBUS_HOSTNAME' system property. If the property
+     * is not set, it checks the 'host' system property next. If the 'host' 
+     * property is not set in the current configuration, null is returned
+     * (and default 'localhost' hostname will be used)
+     *
+     * @return <code>String</code> the hostname of the machine.
+     */
+    public String getHostName() {
+	String value = System.getProperty("GLOBUS_HOSTNAME");
+	if (value != null) {
+	    return value;
+	}
+	return getProperty("hostname", null);
+    }
+
+    /**
+     * Sets hostname
+     * @param host hostname
+     */
+    public void setHostName(String host) {
+	put("hostname", host);
+    }
+
+    /**
+     * Returns the user specified ip address. This is used
+     * for DHCP machines where java is unable to determine the
+     * right IP address.
+     * It first checks the 'org.globus.ip' system property.
+     * If that property is not set, it checks next the 'ip' property 
+     * in the current configuration. If the 'ip' property is not set in the
+     * current configuration, the hostname of the machine is looked up
+     * using the {@link #getHostName() getHostName()} function. If 
+     * <code>getHostName()</code> returns a hostname that hostname is converted
+     * into an IP address and it is returned. Otherwise, null is returned
+     * (and default ip address will be used)
+     *
+     * @return <code>String</code> the ip address of the machine.
+     */
+    public String getIPAddress() {
+	String value = System.getProperty("org.globus.ip");
+	if (value != null) {
+	    return value;
+	}
+	value = getProperty("ip", null);
+	if (value != null) {
+	    return value;
+	}
+	value = getHostName();
+	if (value != null) {
+	    try {
+		return InetAddress.getByName(value).getHostAddress();
+	    } catch (UnknownHostException e) {
+		return null;
+	    }
+	}
+	return value;
+    }
+
+    /**
+     * Sets ip address
+     * @param ipAddress ip address
+     */
+    public void setIPAddress(String ipAddress) {
+	put("ip", ipAddress);
+    }
+    
+    /**
+     * @deprecated Use getCaCertLocations() instead.
+     *
+     * @see #getCaCertLocations() getCaCertLocations
+     *
+     * @return <code>String</code> the locations of the CA certificates
+     */
+    public String getCaCertFile() {
+	return getCaCertLocations();
+    }
+    
+    /**
+     * @deprecated Use getCaCertLocations() instead.
+     *
+     * @see #getCaCertLocations() getCaCertLocations
+     *
+     * @return <code>String</code> the locations of the CA certificates
+     */
+    public String getCaCerts() {
+	return getCaCertLocations();
+    }
+    
+    /**
+     * Retrieves the location of the CA certificate files.  
+     * It first checks the X509_CERT_DIR system property. If the property
+     * is not set, it checks next the 'cacert' property in the current
+     * configuration. If that property is not set, it tries to find
+     * the certificates using the following rules:<BR>
+     * First the ${user.home}/.globus/certificates directory is checked.
+     * If the directory does not exist, and on a Unix machine, the
+     * /etc/grid-security/certificates directory is checked next.
+     * If that directory does not exist and GLOBUS_LOCATION 
+     * system property is set then the ${GLOBUS_LOCATION}/share/certificates
+     * directory is checked. Otherwise, null is returned. 
+     * This indicates that the certificates directory could
+     * not be found.
+     * <BR>
+     * Moreover, this function can return multiple file and directory 
+     * locations. The locations must be comma separated.
+     *
+     * @return <code>String</code> the locations of the CA certificates
+     */
+    public String getCaCertLocations() {
+	String location;
+	location = System.getProperty("X509_CERT_DIR");
+	if (location != null) {
+	    return location;
+	}
+	location = getProperty("cacert");
+	if (location != null) {
+	    return location;
+	}
+	return ConfigUtil.discoverCertDirLocation();
+    }
+
+    public void setCaCertLocations(String list) {
+	put("cacert", list);
+    }
+
+    /**
+     * Retrieves the location of the proxy file. 
+     * It first checks the X509_USER_PROXY system property. If the property
+     * is not set, it checks next the 'proxy' property in the current
+     * configuration. If that property is not set, then it defaults to a 
+     * value based on the following rules: <BR>
+     * If a UID system property is set, and running on a Unix machine it 
+     * returns /tmp/x509up_u${UID}. If any other machine then Unix, it returns
+     * ${tempdir}/x509up_u${UID}, where tempdir is a platform-specific 
+     * temporary directory as indicated by the java.io.tmpdir system property. 
+     * If a UID system property is not set, the username will be used instead
+     * of the UID. That is, it returns ${tempdir}/x509up_u_${username}
+     * <BR>
+     * This is done this way because Java is not able to obtain the current 
+     * uid.
+     *
+     * @return <code>String</code> the location of the proxy file
+     */
+    public String getProxyFile() {
+	String location;
+	location = System.getProperty("X509_USER_PROXY");
+	if (location != null) {
+	    return location;
+	}
+	location = getProperty("proxy");
+	if (location != null) {
+	    return location;
+	}
+	return ConfigUtil.discoverProxyLocation();
+    }
+
+    public void setProxyFile(String proxyFile) {
+	put("proxy", proxyFile);
+    }
+    
+    /**
+     * Returns the tcp port range.
+     * It first checks the 'GLOBUS_TCP_PORT_RANGE' system property. If that 
+     * system property is not set then 'org.globus.tcp.port.range' system
+     * property is checked. If that system property is not set then it returns
+     * the value specified in the configuration file. Returns null if the port
+     * range is not defined.<BR>
+     * The port range is in the following form: <minport>, <maxport>
+     *
+     * @return <code>String</code> the port range. 
+     */
+    public String getTcpPortRange() {
+	String value = null;
+	value = System.getProperty("GLOBUS_TCP_PORT_RANGE");
+	if (value != null) {
+	    return value;
+	}
+	value = System.getProperty("org.globus.tcp.port.range");
+	if (value != null) {
+	    return value;
+	}
+	return getProperty("tcp.port.range", null);
+    }
+
+    /**
+     * Returns the tcp source port range.
+     * It first checks the 'GLOBUS_TCP_SOURCE_PORT_RANGE' system property. 
+     * If that system property is not set then 
+     * 'org.globus.source.tcp.port.range' system property is checked. 
+     * If that system property is not set then it returns
+     * the value specified in the configuration file. Returns null if the port
+     * range is not defined.<BR>
+     * The port range is in the following form: <minport>, <maxport>
+     *
+     * @return <code>String</code> the port range. 
+     */
+    public String getTcpSourcePortRange() {
+	String value = null;
+	value = System.getProperty("GLOBUS_TCP_SOURCE_PORT_RANGE");
+	if (value != null) {
+	    return value;
+	}
+	value = System.getProperty("org.globus.tcp.source.port.range");
+	if (value != null) {
+	    return value;
+	}
+	return getProperty("tcp.source.port.range", null);
+    }
+
+    /**
+     * Returns the udp source port range.
+     * It first checks the 'GLOBUS_UDP_SOURCE_PORT_RANGE' system property. 
+     * If that system property is not set then 
+     * 'org.globus.source.udp.port.range' system property is checked. 
+     * If that system property is not set then it returns
+     * the value specified in the configuration file. Returns null if the port
+     * range is not defined.<BR>
+     * The port range is in the following form: <minport>, <maxport>
+     *
+     * @return <code>String</code> the port range. 
+     */
+    public String getUdpSourcePortRange() {
+	String value = null;
+	value = System.getProperty("GLOBUS_UDP_SOURCE_PORT_RANGE");
+	if (value != null) {
+	    return value;
+	}
+	value = System.getProperty("org.globus.udp.source.port.range");
+	if (value != null) {
+	    return value;
+	}
+	return getProperty("udp.source.port.range", null);
+    }
+    
+    /**
+     * Returns whether to use the /dev/urandom device
+     * for seed generation.
+     *
+     * @return true if the device should be used (if available of course)
+     *         Returns true by default unless specified otherwise by the
+     *         user.
+     */
+    public boolean useDevRandom() {
+	String value = System.getProperty("org.globus.dev.random");
+	if (value != null && value.equalsIgnoreCase("no")) {
+	    return false;
+	}
+	return getAsBoolean("org.globus.dev.random", true);
+    }
+
+    public boolean enforceSigningPolicy() {
+
+	String value = System.getProperty(ENFORCE_SIGNING_POLICY);
+	if ((value != null) && (value.equalsIgnoreCase("no")
+                              || (value.equalsIgnoreCase("false")))) {
+	    return false;
+	}
+	return getAsBoolean(ENFORCE_SIGNING_POLICY, true);
+    }
+
+    /**
+     * Returns the delegation key cache lifetime for all delegations from this
+     * JVM. If this property is not set or set to zero or less, no caching is done. The
+     * value is the number of milliseconds the key/pair is cached.
+     * @return
+     */
+    public int getDelegationKeyCacheLifetime() {
+
+        int valueInt = 0;
+
+        String valueStr = System.getProperty(DELEGATION_KEY_CACHE_LIFETIME);
+
+        if (valueStr != null && valueStr.length() > 0) {
+            int parsedvalueInt = Integer.parseInt(valueStr);
+            if (parsedvalueInt > 0) {
+                valueInt = parsedvalueInt;
+            }
+        }
+
+        if (valueInt == -1) { // Didn't find a system property
+            valueStr = getProperty(DELEGATION_KEY_CACHE_LIFETIME);
+            if (valueStr != null && valueStr.length() > 0) {
+                int parsedvalueInt = Integer.parseInt(valueStr);
+                if (parsedvalueInt > 0) {
+                    valueInt = parsedvalueInt;
+                }
+            }
+        }
+        return valueInt;
+    }
+
+
+    /**
+     * Returns the CRL cache lifetime. If this property is not set or
+     * set to zero or less, no caching is done. The value is the
+     * number of milliseconds the CRLs are cached without checking for
+     * modifications on disk.
+     *
+     * @throws NumberFormatException if the cache lifetime property
+     *         could not be parsed
+     * @return the CRL cache lifetime in milliseconds
+     */
+    public long getCRLCacheLifetime()
+        throws NumberFormatException {
+
+        long value = 0;
+
+        String property = getProperty(CRL_CACHE_LIFETIME);
+        if (property != null && property.length() > 0) {
+            long parsedValue  = Long.parseLong(property);
+            if (parsedValue > 0) {
+                value = parsedValue;
+            }
+        }
+
+        // System property takes precedence
+        property = System.getProperty(CRL_CACHE_LIFETIME);
+        if (property != null && property.length() > 0) {
+            long parsedValue = Long.parseLong(property);
+            if (parsedValue > 0) {
+                value = parsedValue;
+            }
+        }
+
+        return value;
+    }
+
+    public String getSecureRandomProvider() {
+	String value = System.getProperty("org.globus.random.provider");
+	if (value != null) {
+	    return value;
+	}
+	return getProperty("random.provider", 
+			   DEFAULT_RANDOM_PROVIDER);
+    }
+
+    public String getSecureRandomAlgorithm() {
+	String value = System.getProperty("org.globus.random.algorithm");
+	if (value != null) {
+	    return value;
+	}
+	return getProperty("random.algorithm", 
+			   DEFAULT_RANDOM_ALGORITHM);
+    }
+
+    /**
+     * Returns the timeout (in seconds) for creating a new socket connection
+     * to a MyProxy host.  The socket timeout property can be set either as
+     * the Java system property "MYPROXY_SOCKET_TIMEOUT" (i.e. via the '-D'
+     * command line option or environment variable) or via the
+     * "sockettimeout" property in the cog.properties file.  If no such
+     * property is found, the default timeout of 10 seconds is returned.
+     *
+     * @return The timeout for creating a socket connectino to a MyProxy
+     *         host. Defaults to 10 seconds.
+     */
+    public int getSocketTimeout() {
+        int timeoutInt = -1;  // -1 indicates it hasn't been set yet
+        String timeoutStr = System.getProperty("MYPROXY_SOCKET_TIMEOUT");
+        if (timeoutStr != null && timeoutStr.length() > 0) {
+            int parsedTimeoutInt = Integer.parseInt(timeoutStr);
+            if (parsedTimeoutInt >= 0) {
+                timeoutInt = parsedTimeoutInt;
+            }
+        }
+        if (timeoutInt == -1) { // Didn't find a system property
+            timeoutStr = getProperty("sockettimeout");
+            if (timeoutStr != null && timeoutStr.length() > 0) {
+                int parsedTimeoutInt = Integer.parseInt(timeoutStr);
+                if (parsedTimeoutInt >= 0) {
+                    timeoutInt = parsedTimeoutInt;
+                }
+            }
+        }
+        if (timeoutInt == -1) { // Didn't find any property at all
+            timeoutInt = 10;
+        }
+        return timeoutInt;
+    }
+
+    public void setSocketTimeout(int socketTimeout) {
+        put("sockettimeout", String.valueOf(socketTimeout));
+    }
+
+
+    // -------------------------------------------------------
+    
+    public int getProxyStrength() {
+	return getAsInt("proxy.strength", 512);
+    }
+    
+    public void setProxyStrength(int strength) {
+	put("proxy.strength", String.valueOf(strength));
+    }
+    
+    public int getProxyLifeTime() {
+	return getAsInt("proxy.lifetime", 12);
+    }
+    
+    public void setProxyLifeTime(int lifeTimeInHours) {
+	put("proxy.lifetime", String.valueOf(lifeTimeInHours));
+    }
+    
+    // --------------------------------------------------------
+
+    // --- Most of these functions are deprecated ---
+
+    public String getRootMDSHost() {
+	return getProperty("mds.root.host", MDSHOST);
+    }
+    
+    public String getRootMDSPort() {
+	return getProperty("mds.root.port", MDSPORT);
+    }
+    
+    public String getRootMDSBaseDN() {
+	return getProperty("mds.root.basedn", BASEDN);
+    }
+    
+    public String getOrgMDSHost() {
+	return getProperty("mds.org.host", MDSHOST);
+    }
+    
+    public String getOrgMDSPort() {
+	return getProperty("mds.org.port", MDSPORT);
+    }
+    
+    public String getOrgMDSBaseDN() {
+	return getProperty("mds.org.basedn", BASEDN);
+    }
+    
+    // ----------------------------------------------------
+    
+    protected boolean getAsBoolean(String key, boolean defaultValue) {
+	String tmp = getProperty(key);
+	if (tmp == null) {
+	    return defaultValue;
+	}
+	return (tmp.equalsIgnoreCase("yes") || tmp.equalsIgnoreCase("true"));
+    }
+    
+    protected int getAsInt(String label, int defValue) {
+	String tmp = getProperty(label);
+	return (isNullOrEmpty(tmp)) ? defValue : Integer.parseInt(tmp);
+    }
+    
+    protected final static boolean isNullOrEmpty(String tmp) {
+	return (tmp == null || (tmp != null && tmp.length() == 0));
+    }
+    
+}
Index: pegasus-wms_4.0.1+dfsg/src/org/globus/util/ConfigUtil.java
===================================================================
--- /dev/null	1970-01-01 00:00:00.000000000 +0000
+++ pegasus-wms_4.0.1+dfsg/src/org/globus/util/ConfigUtil.java	2012-05-24 16:47:54.092166841 -0700
@@ -0,0 +1,259 @@
+/*
+ * Copyright 1999-2006 University of Chicago
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.globus.util;
+
+import java.io.IOException;
+import java.io.File;
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+
+public class ConfigUtil {
+
+    public static final int UNDEFINED_OS = -1;
+    public static final int WINDOWS_OS   = 0;
+    public static final int UNIX_OS      = 1;
+    public static final int MAC_OS       = 2;
+    public static final int OTHER_OS     = 3;
+  
+    private static int osType = UNDEFINED_OS;
+
+    private static final String PROXY_NAME = "x509up_u";
+
+    private static final String SOLARIS_ID_EXEC =
+        "/usr/xpg4/bin/id";
+        
+    public static String globus_dir = null;
+    
+    static {
+        globus_dir = System.getProperty("user.home") + 
+            File.separator + 
+            ".globus" +
+            File.separator;
+    }
+    
+    /** Returns default PKCS11 library name */
+    public static String discoverPKCS11LibName() {
+        return "dspkcs"; // use the ibutton library as the default for now
+    }
+    
+    /** Returns default location of user cert file */
+    public static String discoverUserCertLocation() {
+        String location = null;
+        location = globus_dir + "usercert.pem";
+        return location;
+    }
+    
+    /** Returns default location of user key file */
+    public static String discoverUserKeyLocation() {
+        String location = null;
+        location = globus_dir + "userkey.pem";
+        return location;
+    }
+    
+    /**
+     * Tries to discover user proxy location.
+     * If a UID system property is set, and running on a Unix machine it
+     * returns /tmp/x509up_u${UID}. If any other machine then Unix, it returns
+     * ${tempdir}/x509up_u${UID}, where tempdir is a platform-specific
+     * temporary directory as indicated by the java.io.tmpdir system property.
+     * If a UID system property is not set, the username will be used instead
+     * of the UID. That is, it returns ${tempdir}/x509up_u_${username}
+     */
+    public static String discoverProxyLocation() {
+
+        String dir = null;
+
+        if (getOS() == UNIX_OS) {
+            dir = "/tmp/";
+        } else {
+            String tmpDir = System.getProperty("java.io.tmpdir");
+            dir = (tmpDir == null) ? globus_dir : tmpDir;
+        }
+        
+        String uid = System.getProperty("UID");
+
+        if (uid != null) {
+            return getLocation(dir, PROXY_NAME + uid);
+        } else if (getOS() == UNIX_OS) {
+            try {
+                return getLocation(dir, PROXY_NAME + getUID());
+            } catch (IOException e) {
+            }
+        }
+        
+        /* If all else fails use username */
+        String suffix = System.getProperty("user.name");
+        if (suffix != null) {
+            suffix = suffix.toLowerCase();
+        } else {
+            suffix = "nousername";
+        }
+
+        return getLocation(dir, PROXY_NAME + "_" + suffix);
+    }
+
+    private static String getLocation(String dir, String file) {
+        File f = new File(dir, file);
+        return f.getAbsolutePath();
+    }
+
+    /**
+     * Returns the user id. The user id is obtained by executing 'id -u'
+     * external program. 
+     * <BR><BR><B>Note: </B><I>
+     * Under some circumstances, this function executes an external program; 
+     * thus, its behavior is influenced by environment variables such as the
+     * caller's PATH and the environment variables that control dynamic 
+     * loading.  Care should be used if calling this function from a program 
+     * that will be run as a Unix setuid program, or in any other manner in
+     * which the owner of the Unix process does not completely control its
+     * runtime environment. 
+     * </I>
+     *
+     * @throws IOException if unable to determine the user id.
+     * @return the user id
+     */
+    public static String getUID() throws IOException {
+        String exec = "id";
+        String osname = System.getProperty("os.name");
+        if (osname != null) {
+            osname = osname.toLowerCase();
+            if ((osname.indexOf("solaris") != -1) ||
+                (osname.indexOf("sunos") != -1)) {
+                if ((new File(SOLARIS_ID_EXEC).exists())) {
+                    exec = SOLARIS_ID_EXEC;
+                }
+            } else if (osname.indexOf("windows") != -1) {
+                throw new IOException("Unable to determine the user id");
+            }
+        }
+
+        Runtime runTime = Runtime.getRuntime();
+        Process process = null;
+        BufferedReader buffInReader = null;
+        String s = null;
+        StringBuffer output = new StringBuffer();
+        int exitValue = -1;
+
+        try {
+            process = runTime.exec(exec + " -u");
+            buffInReader = new BufferedReader
+                ( new InputStreamReader(process.getInputStream()) ); 
+            while ((s = buffInReader.readLine()) != null) {
+                output.append(s);
+            }
+            exitValue = process.waitFor();
+        } catch (Exception e) {
+            throw new IOException("Unable to execute 'id -u'");
+        } finally {
+            if (buffInReader != null) {
+                try { 
+                    buffInReader.close();
+                } catch (IOException e) {}
+            }
+            if (process != null) {
+                try { 
+                    process.getErrorStream().close(); 
+                } catch (IOException e) {}
+                try { 
+                    process.getOutputStream().close(); 
+                } catch (IOException e) {}
+            }
+        }
+        if (exitValue != 0) {
+            throw new IOException("Unable to perform 'id -u'");
+        }
+        return output.toString().trim();
+    } 
+
+    /**
+     * Discovers location of CA certificates directory.
+     * First the ${user.home}/.globus/certificates directory is checked.
+     * If the directory does not exist, and on a Unix machine, the
+     * /etc/grid-security/certificates directory is checked next.
+     * If that directory does not exist and GLOBUS_LOCATION 
+     * system property is set then the ${GLOBUS_LOCATION}/share/certificates
+     * directory is checked. Otherwise, null is returned. 
+     * This indicates that the certificates directory could
+     * not be found.
+     */
+    public static String discoverCertDirLocation() {
+        String location = null;
+    
+        location = getDir(globus_dir + "certificates");
+        if (location != null) return location;
+        
+        if (getOS() == UNIX_OS) {
+            location = getDir( "/etc/grid-security/certificates");
+            if (location != null) return location;
+        }
+
+        String suffix = File.separator + "share" + File.separator + 
+            "certificates";
+            
+        location = getDir(System.getProperty("GLOBUS_LOCATION") +
+                          suffix);
+        if (location != null) return location;
+        
+        return null;
+    }
+
+  
+    public static int getOS() {
+        if (osType != UNDEFINED_OS) {
+            return osType;
+        }
+
+        String osname = System.getProperty("os.name");
+        if (osname != null) {
+            osname = osname.toLowerCase();
+            if (osname.indexOf("windows") != -1) {
+                osType = WINDOWS_OS;
+            } else if ( (osname.indexOf("solaris") != -1) ||
+                        (osname.indexOf("sunos") != -1) ||
+                        (osname.indexOf("linux") != -1) ||
+                        (osname.indexOf("aix") != -1) ||
+                        (osname.indexOf("hp-ux") != -1) ||
+                        (osname.indexOf("compaq's digital unix") != -1) ||
+                        (osname.indexOf("osf1") != -1) ||
+                        (osname.indexOf("mac os x") != -1) ||
+                        (osname.indexOf("netbsd") != -1) ||
+                        (osname.indexOf("freebsd") != -1) ||
+                        (osname.indexOf("irix") != -1) ) {
+                osType = UNIX_OS;
+            } else if (osname.indexOf("mac") != -1) {
+                osType = MAC_OS;
+            } else {
+                osType = OTHER_OS;
+            }
+        } else {
+            osType = OTHER_OS;
+        }
+    
+        return osType;
+    }
+
+    private static String getDir(String directory) {
+        if (directory == null) return null;
+        File f = new File(directory);
+        if (f.isDirectory() && f.canRead()) {
+            return f.getAbsolutePath();
+        } else {
+            return null;
+        }
+    }
+    
+}
Index: pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/NXDInvSchema.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/org/griphyn/vdl/dbschema/NXDInvSchema.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,204 +0,0 @@
-/*
- * This file or a portion of this file is licensed under the terms of
- * the Globus Toolkit Public License, found in file ../GTPL, or at
- * http://www.globus.org/toolkit/download/license.html. This notice must
- * appear in redistributions of this file, with or without modification.
- *
- * Redistributions of this Software, with or without modification, must
- * reproduce the GTPL in: (1) the Software, or (2) the Documentation or
- * some other similar material which is provided with the Software (if
- * any).
- *
- * Copyright 1999-2004 University of Chicago and The University of
- * Southern California. All rights reserved.
- */
-package org.griphyn.vdl.dbschema;
-
-import edu.isi.pegasus.planner.invocation.InvocationRecord;
-import java.sql.*;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Properties;
-import java.io.*;
-import java.lang.reflect.*;
-import java.net.InetAddress;
-
-import org.xmldb.api.base.*;
-import org.xmldb.api.modules.*;
-import org.xmldb.api.*;
-import javax.xml.transform.OutputKeys;
-import javax.xml.parsers.DocumentBuilder; 
-import javax.xml.parsers.DocumentBuilderFactory; 
-import javax.xml.parsers.ParserConfigurationException;
-
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.DOMException; 
-import org.xml.sax.SAXException; 
-import org.xml.sax.SAXParseException; 
-import org.xml.sax.InputSource;
-
-import org.griphyn.vdl.util.ChimeraProperties;
-import org.griphyn.vdl.util.Logging;
-import org.griphyn.vdl.parser.*;
-
-import edu.isi.pegasus.common.util.Separator;
-import org.xml.sax.InputSource;
-/**
- * This class provides basic functionalities to interact with the
- * backend database for invocation records, such as insertion, deletion,
- * and search.
- *
- * @author Jens-S. Vöckler
- * @author Yong Zhao
- * @version $Revision: 2587 $
- */
-public class NXDInvSchema extends DatabaseSchema 
-  implements PTC
-{
-  private DocumentBuilderFactory m_factory;
-
-  private DocumentBuilder m_builder;
-
-  protected Collection m_db;
-
-  protected Collection m_ptc;
-
-  protected CollectionManagementService m_dbColService; 
-
-  protected CollectionManagementService m_ptcColService; 
-
-  protected XPathQueryService m_dbQrySvc;
-
-  protected XPathQueryService m_ptcQrySvc;
-
-  /**
-   * Default constructor for the provenance tracking.
-   *
-   * @param dbDriverName is the database driver name
-   */
-  public NXDInvSchema( String dbDriverName ) 
-    throws ClassNotFoundException, 
-	   NoSuchMethodException, InstantiationException, 
-	   IllegalAccessException, InvocationTargetException,
-	   SQLException, IOException, ParserConfigurationException
-  {
-    // load the driver from the properties
-    super(); // call minimalistic c'tor, no driver loading!
-    ChimeraProperties props = ChimeraProperties.instance();
-
-    m_dbschemaprops =
-        props.getDatabaseSchemaProperties( PROPERTY_PREFIX );
-
-    // extract those properties specific to the database driver.
-    // use default settings.
-    String driverPrefix = null;
-    String driverName = props.getDatabaseDriverName(driverPrefix);
-    Properties driverprops = props.getDatabaseDriverProperties(driverPrefix);
-    String url = props.getDatabaseURL(driverPrefix);
-
-    try {
-	m_factory = DocumentBuilderFactory.newInstance();
-	m_builder = m_factory.newDocumentBuilder();
-
-
-	Class cl = Class.forName(driverName);
-	Database database = (Database) cl.newInstance();
-	DatabaseManager.registerDatabase(database);
-
-	// get the collection
-	m_db = DatabaseManager.getCollection(url + "/db");
-	m_dbColService = (CollectionManagementService)
-	    m_db.getService("CollectionManagementService", "1.0");
-	
-	m_ptc = m_db.getChildCollection("ptc");
-
-	if(m_ptc == null) {
-	    // collection does not exist, create
-	    m_ptc = m_dbColService.createCollection("ptc");
-	}
-	m_ptc.setProperty(OutputKeys.INDENT, "no");
-	
-	m_ptcColService = (CollectionManagementService)
-	    m_ptc.getService("CollectionManagementService", "1.0");
-
-        m_dbQrySvc = (XPathQueryService) m_db.getService("XPathQueryService", "1.0");
-
-        m_ptcQrySvc = (XPathQueryService) m_ptc.getService("XPathQueryService", "1.0");
-
-        m_dbQrySvc.setProperty("indent", "no");
-
-        m_ptcQrySvc.setProperty("indent", "no");
-    } catch (XMLDBException e) {
-	throw new SQLException (e.getMessage());
-    } 
-  }
-
-  /**
-   * Checks the existence of an invocation record in the database.
-   * The information is based on the (start,host,pid) tuple, although
-   * with private networks, cases may arise that have this tuple
-   * identical, yet are different. 
-   *
-   * @param start is the start time of the grid launcher
-   * @param host is the address of the host it ran upon
-   * @param pid is the process id of the grid launcher itself. 
-   * @return the id of the existing record, or -1 
-   */
-  public long
-    getInvocationID( java.util.Date start, InetAddress host, int pid )
-    throws SQLException
-  {
-    long result = -1;
-    Logging.instance().log("xaction", 1, "START select invocation id" );
-    
-    String xquery = "/invocation[@start='" + start + "']";
-    xquery += "[@host='" + host.getHostAddress() + "']";
-    xquery += "[@pid=" + pid + "]"; 
-
-    try {
-      Logging.instance().log( "nxd", 2, xquery );
-      ResourceSet rs = m_dbQrySvc.query(xquery);
-      ResourceIterator i = rs.getIterator();
-      if (i.hasMoreResources()) {
-        result = 1;
-      } else {
-        result = -1;
-      }
-    } catch (XMLDBException e) {
-	throw new SQLException (e.getMessage());
-    } 
-
-    Logging.instance().log("xaction", 1, "FINAL select invocation id" );
-    return result;
-  }
-
-  /**
-   * Inserts an invocation record into the database.
-   *
-   * @param ivr is the invocation record to store.
-   * @return true, if insertion was successful, false otherwise. 
-   */
-  public boolean
-    saveInvocation( InvocationRecord ivr )
-      throws SQLException
-  {
-    try {
-      StringWriter sw = new StringWriter();
-      
-      ivr.toXML(sw, "", null);
-      // create new XMLResource; an id will be assigned to the new resource
-      XMLResource document = (XMLResource)m_ptc.createResource(null, "XMLResource");
-      document.setContent(sw.toString());
-      System.out.println(sw.toString());
-      m_ptc.storeResource(document);
-      return true;
-    } catch (Exception e) {
-	throw new SQLException (e.getMessage());
-    } 
-  }
-}
-
-
-
-
Index: pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/NXDSchema.java
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/src/org/griphyn/vdl/dbschema/NXDSchema.java	2012-05-24 16:47:52.064156782 -0700
+++ /dev/null	1970-01-01 00:00:00.000000000 +0000
@@ -1,1656 +0,0 @@
-/*
- * This file or a portion of this file is licensed under the terms of
- * the Globus Toolkit Public License, found in file ../GTPL, or at
- * http://www.globus.org/toolkit/download/license.html. This notice must
- * appear in redistributions of this file, with or without modification.
- *
- * Redistributions of this Software, with or without modification, must
- * reproduce the GTPL in: (1) the Software, or (2) the Documentation or
- * some other similar material which is provided with the Software (if
- * any).
- *
- * Copyright 1999-2004 University of Chicago and The University of
- * Southern California. All rights reserved.
- */
-package org.griphyn.vdl.dbschema;
-
-import java.sql.*;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Properties;
-import java.io.*;
-import java.lang.reflect.*;
-
-import org.xmldb.api.base.*;
-import org.xmldb.api.modules.*;
-import org.xmldb.api.*;
-import javax.xml.transform.OutputKeys;
-import javax.xml.parsers.DocumentBuilder; 
-import javax.xml.parsers.DocumentBuilderFactory; 
-import javax.xml.parsers.ParserConfigurationException;
-
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.DOMException; 
-import org.xml.sax.SAXException; 
-import org.xml.sax.SAXParseException; 
-import org.xml.sax.InputSource;
-
-import org.griphyn.vdl.util.ChimeraProperties;
-import org.griphyn.vdl.classes.*;
-import org.griphyn.vdl.util.Logging;
-import org.griphyn.vdl.parser.*;
-import org.griphyn.vdl.router.Cache;
-import org.griphyn.vdl.annotation.*;
-import edu.isi.pegasus.common.util.Separator;
-import org.xml.sax.InputSource;
-
-/**
- * This class provides basic functionalities to interact with the
- * backend database, such as insertion, deletion, and search of
- * entities in the VDC.
- *
- * @author Jens-S. Vöckler
- * @author Yong Zhao
- * @version $Revision: 2079 $ 
- */
-public class NXDSchema extends DatabaseSchema implements XDC
-{
-  /**
-   * An instance of the VDLx XML parser.
-   */
-  private org.griphyn.vdl.parser.VDLxParser m_parser;
-
-  private DocumentBuilderFactory m_factory;
-
-  private DocumentBuilder m_builder;
-
-  /**
-   * reference to collection '/db/vdc';
-   */
-  protected Collection m_db;
-
-  protected Collection m_vdc;
-
-  protected Collection m_meta;
-  
-  protected CollectionManagementService m_dbColService; 
-
-  protected CollectionManagementService m_vdcColService; 
-
-  protected XPathQueryService m_dbQrySvc;
-
-  protected XPathQueryService m_vdcQrySvc;
-
-  protected XPathQueryService m_metaQrySvc;
-
-  protected XUpdateQueryService m_xupdQrySvc;
-
-  /**
-   * A cache for definitions to avoid reloading from the database.
-   */
-  protected Cache m_cache;  
-
-  /**
-   * Instantiates an XML parser for VDLx on demand. Since XML parsing
-   * and parser instantiation is an expensive business, the reader will
-   * only be generated on demand.
-   *
-   * @return a valid VDLx parser instance. 
-   */
-  private org.griphyn.vdl.parser.VDLxParser parserInstance()
-  {
-    if ( this.m_parser == null ) {
-      // obtain the schema location URL from the schema properties: 
-      // url is a list of strings representing schema locations. The
-      // content exists in pairs, one of the namespace URI, one of the
-      // location URL.
-      String url = null;
-      try {	
-        ChimeraProperties props = ChimeraProperties.instance();
-        url = 
-	  m_dbschemaprops.getProperty( "xml.url", props.getVDLSchemaLocation() );
-      } catch (IOException e) {
-	Logging.instance().log("nxd", 0, "ignored " + e);  
-      }
-      this.m_parser = new org.griphyn.vdl.parser.VDLxParser(url);
-    }
-
-    // done
-    return this.m_parser;
-  }
-
-  /**
-   * Default constructor for the NXD schema.
-   */
-  public NXDSchema(String dbDriver) 
-    throws ClassNotFoundException, 
-	   NoSuchMethodException, InstantiationException, 
-	   IllegalAccessException, InvocationTargetException,
-	   SQLException, IOException,ParserConfigurationException
-  {
-    // load the driver from the properties
-    super(); // call minimalistic c'tor, no driver loading!
-    ChimeraProperties props = ChimeraProperties.instance();
-
-    m_dbschemaprops =
-        props.getDatabaseSchemaProperties( PROPERTY_PREFIX );
-
-    m_cache = null;
-    m_parser = null;
-
-    // extract those properties specific to the database driver.
-    // use default settings.
-    String driverPrefix = null;
-    String driverName = props.getDatabaseDriverName(driverPrefix);
-    Properties driverprops = props.getDatabaseDriverProperties(driverPrefix);
-    String url = props.getDatabaseURL(driverPrefix);
-    String user = driverprops.getProperty("user", "guest");
-    String passwd = driverprops.getProperty("password", "guest");
-
-    try {
-	m_factory = DocumentBuilderFactory.newInstance();
-	m_builder = m_factory.newDocumentBuilder();
-
-
-	Class cl = Class.forName(driverName);
-	Database database = (Database) cl.newInstance();
-	DatabaseManager.registerDatabase(database);
-
-	// get the collection
-	m_db = DatabaseManager.getCollection(url + "/db", user, passwd);
-	m_dbColService = (CollectionManagementService)
-	    m_db.getService("CollectionManagementService", "1.0");
-	
-	m_vdc = m_db.getChildCollection("vdc");
-
-	if(m_vdc == null) {
-	    // collection does not exist, create
-	    m_vdc = m_dbColService.createCollection("vdc");
-	}
-	m_vdc.setProperty(OutputKeys.INDENT, "no");
-	
-	m_meta = m_db.getChildCollection("metadata");
-
-	if(m_meta == null) {
-	    // collection does not exist, create
-	    m_meta = m_dbColService.createCollection("metadata");
-	}
-	m_meta.setProperty(OutputKeys.INDENT, "no");
-	
-	m_vdcColService = (CollectionManagementService)
-	    m_vdc.getService("CollectionManagementService", "1.0");
-
-        m_dbQrySvc = (XPathQueryService) m_db.getService("XPathQueryService", "1.0");
-
-        m_vdcQrySvc = (XPathQueryService) m_vdc.getService("XPathQueryService", "1.0");
-
-        m_metaQrySvc = (XPathQueryService) m_meta.getService("XPathQueryService", "1.0");
-
-        m_dbQrySvc.setProperty("indent", "no");
-
-        m_vdcQrySvc.setProperty("indent", "no");
-
-        m_metaQrySvc.setProperty("indent", "no");
-
-	XUpdateQueryService m_xupdQrySvc = (XUpdateQueryService) m_meta.getService("XUpdateQueryService", "1.0");
-	
-    } catch (XMLDBException e) {
-	throw new SQLException (e.getMessage());
-    } 
-  }
-
-    private String getDefinitionId(Definition def) {
-	String prefix = (def.getType() == Definition.TRANSFORMATION)?"TR_":"DV_";
-	String version = def.getVersion();
-	String suffix = "";
-	if (version != null)
-	    suffix = "_" + version;
-	return prefix + def.getName() + suffix;
-    }
-
-    private String getDefinitionId(String name,
-				   String version,
-				   int type) {
-	String prefix = (type == Definition.TRANSFORMATION)?"TR_":"DV_";
-	String suffix = "";
-	if (version != null)
-	    suffix = "_" + version;
-	return prefix + name + suffix;
-    }
-
-  //
-  // lower level methods, working directly on specific definitions
-  //
-
-  /**
-   * Loads a single Definition from the backend database into an Java object.
-   * This method does not allow wildcarding!
-   *
-   * @param namespace   namespace, null will be converted into empty string
-   * @param name        name, null will be converted into empty string
-   * @param version     version, null will be converted into empty string
-   * @param type     type of the definition (TR or DV), must not be -1.
-   * @return the Definition as specified, or null if not found.
-   *
-   * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION
-   * @see org.griphyn.vdl.classes.Definition#DERIVATION
-   * @see #saveDefinition( Definition, boolean )
-   */
-  public Definition 
-    loadDefinition( String namespace,
-		    String name,
-		    String version,
-		    int type )
-    throws SQLException
-  {
-    Definition result = null;
-    Logging.instance().log("xaction", 1, "START load definition" );
-
-    try {
-	Collection col;
-	if (namespace != null) 
-	    col = m_vdc.getChildCollection(namespace);
-	else 
-	    col = m_vdc;
-	String id = getDefinitionId(name, version, type);
-	//Logging.instance().log( "nxd", 0, "Definition id " + id );
-	XMLResource res = (XMLResource)col.getResource(id);
-	if(res != null) {
-	    MyCallbackHandler cb = new MyCallbackHandler();
-	    
-	    parserInstance().parse( new org.xml.sax.InputSource(new StringReader((String)res.getContent())), cb );
-	    result = cb.getDefinition();
-
-	} else {
-	    Logging.instance().log( "nxd", 0, "Definition not found" );
-	}
-    } catch (Exception e) {
-	throw new SQLException(e.getMessage());
-    }
-    Logging.instance().log("xaction", 1, "FINAL load definition" );
-    return result;
-  }
-
-  /**
-   * Saves a Definition, that is either a Transformation or Derivation,
-   * into the backend database. This method, of course, does not allow
-   * wildcarding. The definition has to be completely specified and
-   * valid.
-   *
-   * @param definition is the new Definition to store.
-   * @param overwrite true, if existing defitions will be overwritten by
-   * new ones with the same primary (or secondary) key (-set), or false,
-   * if a new definition will be rejected on key matches.
-   *
-   * @return true, if the backend database was changed, or
-   *         false, if the definition was not accepted into the backend.
-   *
-   * @see org.griphyn.vdl.classes.Definition
-   * @see org.griphyn.vdl.classes.Transformation
-   * @see org.griphyn.vdl.classes.Derivation
-   * @see #loadDefinition( String, String, String, int )
-   */
-  public boolean 
-    saveDefinition( Definition definition, 
-		    boolean overwrite )
-    throws SQLException
-  {
-      Logging.instance().log( "nxd", 2, "START save definition");
-
-    try {
-	String namespace = definition.getNamespace();
-	Collection col;
-	if (namespace != null)
-	    col = m_vdc.getChildCollection(namespace);
-	else
-	    col = m_vdc;
-	String id = getDefinitionId(definition);
-
-	if (col == null) {
-	  // collection does not exist, create
-	  col = m_vdcColService.createCollection(namespace);
-        } else if (!overwrite) {
-	  if (col.getResource(id) != null){
-	    Logging.instance().log( "app", 0, definition.shortID() +
-				    " already exists, ignoring" );
-	    return false;
-	  }
-	}
-
-        // create new XMLResource; an id will be assigned to the new resource
-        XMLResource document = (XMLResource)col.createResource(id, "XMLResource");
-        document.setContent(definition.toXML("", null));
-        col.storeResource(document);
-
-	// add to cache
-	//if ( m_cache != null ) m_cache.set( new Long(id), definition );
-
-    } catch (Exception e) {
-	throw new SQLException(e.getMessage());
-    }
-    // done
-    Logging.instance().log( "nxd", 2, "FINAL save definition");
-    return true;
-  }
-
-  /**
-   * Search the database for the existence of a definition.
-   *
-   * @param definition the definition object to search for
-   * @return true, if the definition exists, false if not found
-   */
-  public boolean containsDefinition( Definition definition )
-    throws SQLException 
-  {
-    boolean result = false;
-    try {
-	String namespace = definition.getNamespace();
-	Collection col;
-	if (namespace != null)
-	    col = m_vdc.getChildCollection(namespace);
-	else
-	    col = m_vdc;
-	String id = getDefinitionId(definition);
-
-	if(col != null) {
-	    if (col.getResource(id) != null){
-		result = true;
-	    }
-	}
-    } catch ( Exception e ) {
-	throw new SQLException(e.getMessage());
-    }
-    return result;
-  }
-
-  /**
-   * Delete a specific Definition objects from the database. No wildcard
-   * matching will be done. "Fake" definitions are permissable, meaning
-   * it just has the secondary key triple.
-   *
-   * @param definition is the definition specification to delete
-   * @return true is something was deleted, false if non existent.
-   *
-   * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION
-   * @see org.griphyn.vdl.classes.Definition#DERIVATION 
-   */
-  public boolean deleteDefinition( Definition definition )
-    throws SQLException 
-  {
-    boolean result = false;
-
-    Logging.instance().log("xaction", 1, "START delete definition" );
-
-    try {
-	String namespace = definition.getNamespace();
-	Collection col;
-	if (namespace != null)
-	    col = m_vdc.getChildCollection(namespace);
-	else
-	    col = m_vdc;
-	String id = getDefinitionId(definition);
-
-	if(col != null) {
-	    XMLResource res = (XMLResource)col.getResource(id);
-	    if (res != null){
-		col.removeResource(res);
-		result = true;
-	    }
-	}
-    } catch ( Exception e ) {
-      // ignore
-    }
-
-    Logging.instance().log("xaction", 1, "FINAL delete definition" );
-    return result;
-  }
-
-  /**
-   * Delete Definition objects from the database. This method allows for
-   * wildcards in the usual fashion. Use null for strings as wildcards,
-   * and -1 for the type wildcard.
-   *
-   * @param namespace   namespace, null to match any namespace
-   * @param name        name, null to match any name
-   * @param version     version, null to match any version
-   * @param type        definition type (TR or DV)
-   * @return            a list containing all Definitions that were deleted
-   *
-   * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION
-   * @see org.griphyn.vdl.classes.Definition#DERIVATION 
-   */
-  public java.util.List deleteDefinition( String namespace,
-					  String name,
-					  String version,
-					  int type )
-    throws SQLException 
-  {
-    Logging.instance().log("xaction", 1, "START delete definitions" );
-
-    java.util.List result = searchDefinition(namespace, name, version, type);
-    for (int i=0; i < result.size(); i++)
-	deleteDefinition((Definition)result.get(i));
-
-    Logging.instance().log("xaction", 1, "FINAL delete definitions" );
-    return result;
-  }
-
-  /**
-   * Search the database for definitions by ns::name:version triple
-   * and by type (either Transformation or Derivation). This version
-   * of the search allows for jokers expressed as null value
-   *
-   * @param namespace   namespace, null to match any namespace
-   * @param name        name, null to match any name
-   * @param version     version, null to match any version
-   * @param type        type of definition (TR/DV, or both)
-   * @return            a list of definitions
-   *
-   * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION
-   * @see org.griphyn.vdl.classes.Definition#DERIVATION
-   */
-  public java.util.List searchDefinition( String namespace,
-					  String name,
-					  String version,
-					  int type )
-    throws SQLException 
-  {
-    String xquery = ""; //"declare namespace vdl='http://www.griphyn.org/chimera/VDL';"
-
-    String triple = "";
-    if (namespace != null)
-	triple += "[@namespace='" + namespace + "']";
-
-    if (name != null)
-	triple += "[@name='" + name + "']";
-
-    if (version != null)
-	triple += "[@version='" + version + "']";
-
-    if (type != -1) {
-	if (type == Definition.TRANSFORMATION)
-	    xquery = "//transformation" + triple;
-	else
-	    xquery += "//derivation" + triple;
-    } else
-	xquery = "//derivation" + triple + "|//transformation" + triple;
-
-    return searchDefinition(xquery);
-  }
-
-  /**
-   * Searches the database for all derivations that satisfies a certain query.
-   *
-   * @param xquery    the query statement
-   * @return  a list of definitions
-   */
-  public java.util.List searchDefinition( String xquery )
-    throws SQLException 
-  {
-    if ( xquery == null )
-      throw new NullPointerException("You must specify a query!");
-
-    java.util.List result = new ArrayList();
-
-    try {
-	Logging.instance().log("xaction", 1, "query: " + xquery );
-
-	ResourceSet rs = m_dbQrySvc.query(xquery);
-	ResourceIterator i = rs.getIterator();
-	while(i.hasMoreResources()) {
-	    Resource res = i.nextResource();
-	    
-	    MyCallbackHandler cb = new MyCallbackHandler();
-	    
-	    parserInstance().parse( new org.xml.sax.InputSource(new StringReader((String)res.getContent())), cb );
-	    result.add(cb.getDefinition());
-	}
-
-    } catch ( Exception e ) {
-	throw new SQLException(e.getMessage());
-    }
-
-    return result;
-  }
-
-  /**
-   * Searches the database for elements that satisfies a certain query.
-   *
-   * @param xquery    the query statement
-   * @return  a list of string
-   */
-  public java.util.List searchElements( String xquery )
-    throws SQLException 
-  {
-    if ( xquery == null )
-      throw new NullPointerException("You must specify a query!");
-
-    java.util.List result = new ArrayList();
-
-    try {
-	Logging.instance().log("nxd", 1, "query: " + xquery );
-
-	ResourceSet rs = m_dbQrySvc.query(xquery);
-	ResourceIterator i = rs.getIterator();
-	while(i.hasMoreResources()) {
-	    Resource res = i.nextResource();
-	    result.add((String)res.getContent());
-	}
-    } catch ( Exception e ) {
-	throw new SQLException(e.getMessage());
-    }
-    return result;
-  }
-
-  /**
-   * Searches the database for annotations that satisfies a certain query.
-   *
-   * @param xquery    the query statement
-   * @return true if found, false otherwise 
-   */
-  public XMLResource findAnnotation( String xquery )
-    throws SQLException 
-  {
-    if ( xquery == null )
-	return null;
-
-    try {
-	Logging.instance().log("nxd", 1, "query: " + xquery );
-
-	ResourceSet rs = m_metaQrySvc.query(xquery);
-	ResourceIterator i = rs.getIterator();
-	if (i.hasMoreResources()) {
-	    XMLResource res = (XMLResource)i.nextResource();
-	    return res;
-	} else 
-	    return null;
-    } catch ( Exception e ) {
-	throw new SQLException(e.getMessage());
-    }
-  }
-
-  public java.util.List searchFilename( String lfn, int link )
-    throws SQLException 
-  {
-    if ( lfn == null )
-      throw new NullPointerException("You must query for a filename");
-
-    String linkQuery = "";
-    String type = LFN.toString(link);
-    if (type != null)
-	linkQuery = "[@link = '" + type + "']";
-
-    String xquery = 
-	"//derivation[.//lfn[@file = '" + lfn + "']" + linkQuery + "]";
-    java.util.List result = searchDefinition(xquery);
-
-    Logging.instance().log("xaction", 1, "FINAL select LFNs" );
-    return result;
-  }
-
-
-  /**
-   * Delete one or more definitions from the backend database. The key
-   * triple parameters may be wildcards. Wildcards are expressed as
-   * <code>null</code> value, or have regular expression.
-   *
-   * @param namespace   namespace
-   * @param name        name
-   * @param version     version
-   * @param type        definition type (TR or DV)
-   * @return            a list of definitions that were deleted.
-   *
-   * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION
-   * @see org.griphyn.vdl.classes.Definition#DERIVATION 
-   */
-  public java.util.List 
-    deleteDefinitionEx( String namespace,
-		      String name,
-		      String version,
-		      int type )
-    throws SQLException {
-    Logging.instance().log("xaction", 1, "START delete definitions ex" );
-
-    java.util.List result = searchDefinitionEx(namespace, name, version, type);
-    for (int i=0; i < result.size(); i++)
-	deleteDefinition((Definition)result.get(i));
-
-    Logging.instance().log("xaction", 1, "FINAL delete definitions ex" );
-    return result;
-  }
-
-  /**
-   * Searches the database for definitions by ns::name:version triple
-   * and by type (either Transformation or Derivation). This version of
-   * the search allows for jokers expressed as null value
-   *
-   * @param namespace   namespace, null to match any namespace
-   * @param name        name, null to match any name
-   * @param version     version, null to match any version
-   * @param type        type of definition, see below, or -1 as wildcard
-   * @return            a list of Definition items, which may be empty
-   *
-   * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION
-   * @see org.griphyn.vdl.classes.Definition#DERIVATION
-   * @see #loadDefinition( String, String, String, int )
-   */
-  public java.util.List 
-    searchDefinitionEx( String namespace,
-		      String name,
-		      String version,
-		      int type )
-    throws SQLException {
-    String xquery = ""; 
-
-    String triple = "";
-    if (namespace != null)
-	triple += "[matches(@namespace, '" + namespace + "')]";
-
-    if (name != null)
-	triple += "[matches(@name, '" + name + "')]";
-
-    if (version != null)
-	triple += "[matches(@version, '" + version + "')]";
-
-    if (type != -1) {
-	if (type == Definition.TRANSFORMATION)
-	    xquery = "//transformation" + triple;
-	else
-	    xquery += "//derivation" + triple;
-    } else
-	xquery = "//derivation" + triple + "|//transformation" + triple;
-    return searchDefinition(xquery);
-  }
-
-  /**
-   * Searches the database for all LFNs that match a certain pattern.
-   * The linkage is an additional constraint. This method allows
-   * regular expression
-   *
-   * @param lfn    the LFN name
-   * @param link   the linkage type of the LFN
-   * @return       a list of filenames that match the criterion.
-   *
-   * @see org.griphyn.vdl.classes.LFN#NONE
-   * @see org.griphyn.vdl.classes.LFN#INPUT
-   * @see org.griphyn.vdl.classes.LFN#OUTPUT
-   * @see org.griphyn.vdl.classes.LFN#INOUT
-   */
-  public java.util.List 
-    searchLFN( String lfn, int link )
-    throws SQLException {
-    if ( lfn == null )
-      throw new NullPointerException("You must query for a filename");
-
-    String linkQuery = "";
-    String type = LFN.toString(link);
-    if (type != null)
-	linkQuery = "[@link = '" + type + "')]";
-
-    String xquery = 
-	//	"//lfn[matches(@file, '" + lfn + "')]" + linkQuery + "/@file";
-	"//lfn" + linkQuery + "/@file[matches(.,  '" + lfn + "')]";
-    java.util.List result = searchElements(xquery);
-
-    Logging.instance().log("xaction", 1, "FINAL select LFNs" );
-    return result;
-  }
-
-  /**
-   * Searches the database for a list of namespaces of the definitions
-   * Sorted in ascending order.
-   *
-   * @param type   type of definition, see below, or -1 for both
-   * @return       a list of namespaces 
-   *
-   * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION
-   * @see org.griphyn.vdl.classes.Definition#DERIVATION
-   */
-  public java.util.List 
-    getNamespaceList( int type )
-    throws SQLException {
-
-    String xquery = "";
-    if (type == Definition.TRANSFORMATION)
-	xquery = "for $n in distinct-values(//transformation/@namespace) order by $n return $n"; 
-    else if (type == Definition.DERIVATION)
-	xquery = "for $n in distinct-values(//derivation/@namespace) order by $n return $n";
-    else
-       xquery = "for $n in distinct-values(//derivation/@namespace|//transformation/@namespace) order by $n return $n";
-
-    java.util.List result = searchElements(xquery);
-
-    Logging.instance().log("xaction", 1, "FINAL select LFNs" );
-    return result;
-  }
-
-  /**
-   * Searches the database for a list of fully-qualified names of 
-   * the definitions sorted in ascending order.
-   *
-   * @param type   type of definition, see below, or -1 for both.
-   * @return       a list of FQDNs 
-   *
-   * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION
-   * @see org.griphyn.vdl.classes.Definition#DERIVATION
-   */
-  public java.util.List 
-    getFQDNList( int type )
-    throws SQLException {
-    String xquery = "";
-
-    if (type == Definition.TRANSFORMATION)
-	xquery = "for $d in //transformation order by $d/@namespace empty least, $d/@name, $d/@version return string-join((string-join(($d/@namespace, $d/@name), '::'), $d/@version), ':')";
-    else if (type == Definition.DERIVATION)
-	xquery = "for $d in //derivation order by $d/@namespace empty least, $d/@name, $d/@version return string-join((string-join(($d/@namespace, $d/@name), '::'), $d/@version), ':')";
-    else
-	xquery = "for $d in (//transformation|//derivation) order by $d/@namespace empty least, $d/@name, $d/@version return string-join((string-join(($d/@namespace, $d/@name), '::'), $d/@version), ':')";
-
-    java.util.List result = searchElements(xquery);
-
-    Logging.instance().log("xaction", 1, "FINAL select LFNs" );
-    return result;
-  }
-
-  /**
-   * Deletes an annotation with the specified key.
-   *
-   * @param primary is the primary object specifier for the class. 
-   * According to the type, this is either the FQDI, or the filename.
-   * @param secondary is a helper argument for annotations to calls
-   * and formal arguments, and should be null for all other classes.
-   * For calls, the argument must be packed into {@link java.lang.Integer}.
-   * @param kind defines the kind/class of object to annotate.
-   * @param key is the annotation key.
-   * @return true, if the database was modified, false otherwise.
-   * @exception SQLException, if something went wrong during database
-   * access.
-   */
-  public boolean deleteAnnotation( String primary, Object secondary,
-				   int kind, String key )
-    throws SQLException, IllegalArgumentException
-  {
-    String subject = "";
-    String select = null;
-
-    switch ( kind ) {
-    case CLASS_TRANSFORMATION:
-      subject = "tr";
-      break;
-    case CLASS_DERIVATION:
-      subject = "dv";
-      break;
-    case CLASS_CALL:
-      // may throw ClassCastException
-      subject = "tr";	
-      select = "call[" +  ((Integer) secondary).intValue() + "]";
-      break;
-    case CLASS_DECLARE:
-      subject = "tr";
-      // may throw ClassCastException
-      //select = "declare[@name='" + (String)secondary + "']";
-      select = (String)secondary;
-      break;
-    case CLASS_FILENAME:
-      subject = "lfn";
-      break;
-    default:
-      throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" );
-    }
-
-    try {
-    XMLResource res = null;
-    String xquery = "/annotation/metadata[@subject=\"" + subject + "\"][@name=\"" +
-	primary + "\"]";
-    if (select == null) {
-	if (kind != CLASS_FILENAME) {
-	    xquery += "[empty(@select)]";
-	}
-    } else {
-	xquery += "[@select=\"" + select + "\"]";
-    }
-
-    xquery +=  "/attribute[@name=\"" + key + "\"]";
-
-    if ((res = findAnnotation(xquery)) != null) {
-	String id = res.getDocumentId();
-
-	// get the document
-	XMLResource document = (XMLResource)m_meta.getResource(id);
-	m_meta.removeResource(document);
-	return true;
-    }
-    return false;
-    } catch (XMLDBException e) {
-      throw new SQLException(e.getMessage());
-    }
-  }
-
-  /**
-   * Deletes a specific key in an annotated transformation.
-   *
-   * @param fqdi is the FQDI of the transformation
-   * @param key is the key to search for
-   * @return true, if the database was modified, false otherwise.
-   * @see org.griphyn.vdl.classes.Transformation
-   */
-  public boolean deleteAnnotationTransformation( String fqdi, String key )
-    throws SQLException, IllegalArgumentException
-  {
-    return deleteAnnotation(fqdi, null, CLASS_TRANSFORMATION, key);
-  }
-
-  /**
-   * Deletes a specific key in an annotated derivation.
-   *
-   * @param fqdi is the FQDI of the derivation
-   * @param key is the key to search for
-   * @return true, if the database was modified, false otherwise.
-   * @see org.griphyn.vdl.classes.Derivation
-   */
-  public boolean deleteAnnotationDerivation( String fqdi, String key )
-    throws SQLException, IllegalArgumentException
-  {
-    return deleteAnnotation(fqdi, null, CLASS_DERIVATION, key);
-  }   
-
-  /**
-   * Deletes a specific key in an annotated formal argument.
-   *
-   * @param fqdi is the FQDI of the transformation
-   * @param farg is the name of the formal argument
-   * @param key is the key to search for
-   * @return true, if the database was modified, false otherwise.
-   * @see org.griphyn.vdl.classes.Declare
-   */
-  public boolean deleteAnnotationDeclare( String fqdi, String farg,
-				      String key )
-    throws SQLException, IllegalArgumentException
-  {
-    return deleteAnnotation(fqdi, farg, CLASS_DECLARE, key);
-  }
-
-  /**
-   * Deletes a specific key for a call statement.
-   *
-   * @param fqdi is the FQDI of the transformation
-   * @param index is the number of the call to annotate.
-   * @param key is the key to search for
-   * @return true, if the database was modified, false otherwise.
-   * @see org.griphyn.vdl.classes.Call
-   */
-  public boolean deleteAnnotationCall( String fqdi, int index, 
-				   String key )
-    throws SQLException, IllegalArgumentException
-  {
-    return deleteAnnotation(fqdi, new Integer(index), CLASS_CALL, key);
-  }
-
-  /**
-   * Deletes a specific key in an annotated filename.
-   *
-   * @param filename is the name of the file that was annotated.
-   * @param key is the key to search for
-   * @return true, if the database was modified, false otherwise.
-   * @see org.griphyn.vdl.classes.LFN
-   */
-  public boolean deleteAnnotationFilename( String filename, String key )
-    throws SQLException, IllegalArgumentException
-  {
-    return deleteAnnotation(filename, null, CLASS_FILENAME, key);
-  }
-
-  /**
-   * Annotates a transformation with a tuple.
-   *
-   * @param fqdi is the FQDI to annotate
-   * @param annotation is the value to place
-   * @param overwrite is a predicate on replace or maintain.
-   * @return the insertion id, or -1, if the database was untouched
-   * @see org.griphyn.vdl.classes.Transformation
-   */
-  public long saveAnnotationTransformation( String fqdi, 
-					    Tuple annotation, 
-					    boolean overwrite )
-    throws SQLException, IllegalArgumentException
-  {
-    /* 
-    try {
-     String key = annotation.getKey();
-     String type = annotation.getTypeString();
-     Object value = annotation.getValue();
-
-     Logging.instance().log( "nxd", 2, "INSERT INTO anno_tr" );
-
-     String id = null;
-     XMLResource res = null;
-
-     String xupdate = 
-	 "<xu:modifications version=\"1.0\" " +
-	 "xmlns:xu=\"http://www.xmldb.org/xupdate\">";
-     String xquery = "//annotation/metadata[@subject='tr'][@name='" +
-	  fqdi + "']";
-     if ((res = findAnnotation(xquery)) != null) {
-	 //annotation for tr exists
-	 id = res.getDocumentId();
-	 String xquery_attr = xquery + "/attribute[@name='" + key + "']/text()";
-	 if (findAnnotation(xquery_attr)!=null) {
-	     //attribute already exists
-	     Logging.instance().log( "nxd", 2, "Attribute already exists." );
-
-	     if (!overwrite) 
-		 return -1;
-	     xupdate += "<xu:update select=\"" + xquery + "\">" +
-		 value + "</xu:update>";
-	     //xupdate += "<xu:update select=\"" + xquery_attr + "/@type\">" + 
-	     //type + "</xu:update>";
-	 } else {
-	     //attribute does not exist
-	     Logging.instance().log( "nxd", 2, "Attribute does not exist." );
-
-	     xupdate += "<xu:append select=\"" + xquery + "\">" +
-		 "<xu:element name=\"attribute\">" +
-		 "<xu:attribute name=\"name\">" + key + "</xu:attribute>" +
-		 "<xu:attribute name=\"type\">" + type + "</xu:attribute>" +
-		 value + "</xu:element>" +
-		 "</xu:append>";
-	 }
-	 xupdate += "</xu:modifications>";
-	 System.out.println(xupdate);
-	 long l = m_xupdQrySvc.update(xupdate);
-     } else {
-	 //create the annotation
-	 String anno = "<annotation><metadata subject=\"tr\" name=\"" + fqdi + "\">" +
-	     "<attribute name=\"" + key + "\" type=\"" + type + "\">" + value + "</attribute>" +
-	     "</metadata></annotation>";
-
-	 // create new XMLResource; an id will be assigned to the new resource
-	 XMLResource document = (XMLResource)m_meta.createResource(null, "XMLResource");
-	 document.setContent(anno);
-	 m_meta.storeResource(document);
-     }
-     return 0; 
-    } catch (XMLDBException e) {
-	throw new SQLException(e.getMessage());
-    }
-    */
-    return saveAnnotation(fqdi, null, CLASS_TRANSFORMATION, annotation, overwrite);
-  }
-
-  /**
-   * Annotates a derivation with a tuple.
-   *
-   * @param fqdi is the FQDI to annotate
-   * @param annotation is the value to place
-   * @param overwrite is a predicate on replace or maintain.
-   * @return the insertion id, or -1, if the database was untouched
-   * @see org.griphyn.vdl.classes.Derivation
-   */
-  public long saveAnnotationDerivation( String fqdi, 
-					Tuple annotation, 
-					boolean overwrite )
-    throws SQLException, IllegalArgumentException
-  {
-    return saveAnnotation(fqdi, null, CLASS_DERIVATION, annotation, overwrite);
-  }
-
-  /**
-   * Annotates a transformation argument with a tuple.
-   *
-   * @param fqdi is the FQDI to annotate
-   * @param formalname is the name of the formal argument to annotoate.
-   * @param annotation is the value to place
-   * @param overwrite is a predicate on replace or maintain.
-   * @return the insertion id, or -1, if the database was untouched
-   * @see org.griphyn.vdl.classes.Declare
-   */
-  public long saveAnnotationDeclare( String fqdi, 
-				     String formalname,
-				     Tuple annotation, 
-				     boolean overwrite )
-    throws SQLException, IllegalArgumentException
-  {
-    return saveAnnotation(fqdi, formalname, CLASS_DECLARE, annotation, overwrite);
-  }
-
-  /**
-   * Annotates a transformation call with a tuple.
-   *
-   * @param fqdi is the FQDI to annotate
-   * @param index is the number of the call to annotate.
-   * @param annotation is the value to place
-   * @param overwrite is a predicate on replace or maintain.
-   * @return the insertion id, or -1, if the database was untouched
-   * @see org.griphyn.vdl.classes.Call
-   */
-  public long saveAnnotationCall( String fqdi, 
-				  int index,
-				  Tuple annotation, 
-				  boolean overwrite )
-    throws SQLException, IllegalArgumentException
-  {
-    return saveAnnotation(fqdi, new Integer(index), CLASS_CALL, annotation, overwrite);
-  }
-
-  /**
-   * Annotates a logical filename with a tuple.
-   *
-   * @param filename is the FQDI to annotate
-   * @param annotation is the value to place
-   * @param overwrite is a predicate on replace or maintain.
-   * @return the insertion id, or -1, if the database was untouched
-   * @see org.griphyn.vdl.classes.LFN
-   */
-  public long saveAnnotationFilename( String filename, 
-				      Tuple annotation, 
-				      boolean overwrite )
-    throws SQLException, IllegalArgumentException
-  {
-    return saveAnnotation(filename, null, CLASS_FILENAME, annotation, overwrite);
-  }
-
-  /**
-   * Annotates any of the annotatable classes with the specified tuple.
-   * This is an interface method to the various class-specific methods.
-   *
-   * @param primary is the primary object specifier for the class. 
-   * According to the type, this is either the FQDI, or the filename.
-   * @param secondary is a helper argument for annotations to calls
-   * and formal arguments, and should be null for all other classes.
-   * For calls, the argument must be packed into {@link java.lang.Integer}.
-   * @param kind defines the kind/class of object to annotate.
-   * @param annotation is the value to place into the class.
-   * @param overwrite is a predicate on replace or maintain.
-   * @return the insertion id, or -1, if the database was untouched
-   * @see #saveAnnotationTransformation( String, Tuple, boolean )
-   * @see #saveAnnotationDerivation( String, Tuple, boolean )
-   * @see #saveAnnotationCall( String, int, Tuple, boolean )
-   * @see #saveAnnotationDeclare( String, String, Tuple, boolean )
-   * @see #saveAnnotationFilename( String, Tuple, boolean )
-   */
-  public long saveAnnotation( String primary, Object secondary,
-			      int kind, Tuple annotation, boolean overwrite )
-    throws SQLException, IllegalArgumentException
-  {
-    long result = -1;
-    String subject = "";
-    String select = null;
-    String q_sec = null;
-    String defn = "transformation";
-
-    switch ( kind ) {
-    case CLASS_TRANSFORMATION:
-      subject = "tr";
-      break;
-    case CLASS_DERIVATION:
-      subject = "dv";
-      defn = "derivation";
-      break;
-    case CLASS_CALL:
-      // may throw ClassCastException
-      subject = "tr";	
-      select = "call[" +  ((Integer) secondary).intValue() + "]";
-      q_sec = select;
-      break;
-    case CLASS_DECLARE:
-      subject = "tr";
-      // may throw ClassCastException
-      q_sec = "declare[@name='" + (String)secondary + "']";
-      select = (String)secondary;
-      break;
-    case CLASS_FILENAME:
-      subject = "lfn";
-      break;
-    default:
-      throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" );
-    }
-
-    try {
-
-      if (kind != CLASS_FILENAME) {
-	String[] names = Separator.split(primary);
-	String q_ns, q_name, q_ver;
-
-	if (names[0] == null)
-	  q_ns = "[empty(@namespace)]";
-	else
-	  q_ns = "[@namespace='" + names[0] + "']";
-
-	if (names[1] == null)
-	  q_name = "[empty(@name)]";
-	else
-	  q_name = "[@name='" + names[1] + "']";
-
-	if (names[2] == null)
-	  q_ver = "[empty(@version)]";
-	else
-	  q_ver = "[@version='" + names[2] + "']";
-
-	//check if tr/dv is valid
-	String xquery = "//" + defn + q_ns + q_name + q_ver;
-	if (q_sec != null )
-	  xquery += "/" + q_sec;
-	Logging.instance().log("nxd", 0, "query: " + xquery);
-	ResourceSet rs = m_vdcQrySvc.query(xquery);
-	ResourceIterator i = rs.getIterator();
-	if (!i.hasMoreResources()) {
-	  Logging.instance().log("app", 0, "definition not found!");
-	  return -1;
-	}
-      }
-
-      String key = annotation.getKey();
-      String type = annotation.getTypeString();
-      Object value = annotation.getValue();
-    
-      String id = null;
-      XMLResource res = null;
-      String xquery = "/annotation/metadata[@subject=\"" + subject + "\"][@name=\"" +
-	primary + "\"]";
-      if (select == null) {
-	if (kind != CLASS_FILENAME) {
-	  xquery += "[empty(@select)]";
-	}
-      } else {
-	xquery += "[@select=\"" + select + "\"]";
-      }
-
-      xquery +=  "/attribute[@name=\"" + key + "\"]";
-	    
-      if ((res = findAnnotation(xquery)) != null) {
-	if (!overwrite) {
-	  System.err.println("key " + key + " already defined!");
-	  return -1;
-	}
-	id = res.getDocumentId();
-      }
-	    
-      //create the annotation
-      String anno = "<annotation xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xs=\"http://www.w3.org/2001/XMLSchema\"><metadata subject=\"" + subject + "\" name=\"" + primary + "\"";
-      if (select !=null)
-	anno += " select=\"" + select + "\"";
-      anno += ">" + "<attribute name=\"" + key + "\" xsi:type=\"xs:" + type + "\">" + value + "</attribute>" +
-	"</metadata></annotation>";
-	
-      // create new XMLResource; an id will be assigned to the new resource
-      XMLResource document = (XMLResource)m_meta.createResource(id, "XMLResource");
-	
-      document.setContent(anno);
-      m_meta.storeResource(document);
-      
-      return 0;
-    } catch (XMLDBException e) {
-      throw new SQLException(e.getMessage());
-    }
-  }
-
-  /**
-   * Obtains the value to a specific key in an annotated transformation.
-   *
-   * @param fqdi is the FQDI of the transformation
-   * @param key is the key to search for
-   * @return the annotated value, or null if not found.
-   * @see org.griphyn.vdl.classes.Transformation
-   */
-  public Tuple loadAnnotationTransformation( String fqdi, String key )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(fqdi, null, CLASS_TRANSFORMATION, key);
-  }
-
-  /**
-   * Obtains the value to a specific key in an annotated derivation.
-   *
-   * @param fqdi is the FQDI of the derivation
-   * @param key is the key to search for
-   * @return the annotated value, or null if not found.
-   * @see org.griphyn.vdl.classes.Derivation
-   */
-  public Tuple loadAnnotationDerivation( String fqdi, String key )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(fqdi, null, CLASS_DERIVATION, key);
-  }
-
-  /**
-   * Obtains the value to a specific key in an annotated formal argument.
-   *
-   * @param fqdi is the FQDI of the transformation
-   * @param farg is the name of the formal argument
-   * @param key is the key to search for
-   * @return the annotated value, or null if not found
-   * @see org.griphyn.vdl.classes.Declare
-   */
-  public Tuple loadAnnotationDeclare( String fqdi, String farg,
-				      String key )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(fqdi, farg, CLASS_DECLARE, key);
-  }
-
-  /**
-   * Obtains the value to a specific key for a call statement.
-   *
-   * @param fqdi is the FQDI of the transformation
-   * @param index is the number of the call to annotate.
-   * @param key is the key to search for
-   * @return the annotated value, or null if not found
-   * @see org.griphyn.vdl.classes.Call
-   */
-  public Tuple loadAnnotationCall( String fqdi, int index, 
-				   String key )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(fqdi, new Integer(index), CLASS_CALL, key);
-  }
-
-  /**
-   * Obtains the value to a specific key in an annotated filename.
-   *
-   * @param filename is the name of the file that was annotated.
-   * @param key is the key to search for
-   * @return the annotated value, or null if not found.
-   * @see org.griphyn.vdl.classes.LFN
-   */
-  public Tuple loadAnnotationFilename( String filename, String key )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(filename, null, CLASS_FILENAME, key);
-  }
-
-  /**
-   * Retrieves a specific annotation from an annotatable classes with
-   * the specified tuple. This is an interface method to the various
-   * class-specific methods.
-   *
-   * @param primary is the primary object specifier for the class. 
-   * According to the type, this is either the FQDI, or the filename.
-   * @param secondary is a helper argument for annotations to calls
-   * and formal arguments, and should be null for all other classes.
-   * For calls, the argument must be packed into {@link java.lang.Integer}.
-   * @param kind defines the kind/class of object to annotate.
-   * @param key is the key to look for.
-   * @return null if not found, otherwise the annotation tuple.
-   * @see #loadAnnotationTransformation( String, String )
-   * @see #loadAnnotationDerivation( String, String )
-   * @see #loadAnnotationCall( String, int, String )
-   * @see #loadAnnotationDeclare( String, String, String )
-   * @see #loadAnnotationFilename( String, String )
-   */
-  public Tuple loadAnnotation( String primary, Object secondary,
-			       int kind, String key )
-    throws SQLException, IllegalArgumentException
-  {
-    Tuple result = null;
-    String subject = "";
-    String select = null;
-
-    switch ( kind ) {
-    case CLASS_TRANSFORMATION:
-      subject = "tr";
-      break;
-    case CLASS_DERIVATION:
-      subject = "dv";
-      break;
-    case CLASS_CALL:
-      // may throw ClassCastException
-      subject = "tr";	
-      select = "call[" +  ((Integer) secondary).intValue() + "]";
-      break;
-    case CLASS_DECLARE:
-      subject = "tr";
-      // may throw ClassCastException
-      //select = "declare[@name='" + (String)secondary + "']";
-      select = (String)secondary;
-      break;
-    case CLASS_FILENAME:
-      subject = "lfn";
-      break;
-    default:
-      throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" );
-    }
-
-    try {
-    String id = null;
-    String xquery = "/annotation/metadata[@subject=\"" + subject + "\"][@name=\"" +
-	primary + "\"]";
-    if (select == null) {
-	if (kind != CLASS_FILENAME) {
-	    xquery += "[empty(@select)]";
-	}
-    } else {
-	xquery += "[@select=\"" + select + "\"]";
-    }
-
-    xquery +=  "/attribute[@name=\"" + key + "\"]";
-
-    XMLResource res = null;
-    if ((res = findAnnotation(xquery)) != null) {
-	result = loadAnnotationResource(res);
-    }
-    return result;
-    } catch (Exception e) {
-	throw new SQLException(e.getMessage());
-    }
-  }
-
-  /**
-   * get the annotation from a XML resource
-   */
-  protected Tuple loadAnnotationResource(XMLResource res)  
-    throws SQLException
-  {
-      Tuple result = null;
-
-      if (res == null) 
-	  return result;
-
-      Element elem;
-      try {
-	  elem = (Element)res.getContentAsDOM();
-      } catch (Exception e) {
-	  throw new SQLException( e.getMessage());
-      }
-      if (elem != null) {
-	  String key = elem.getAttribute("name");
-	  String type = elem.getAttributeNS("http://www.w3.org/2001/XMLSchema-instance","type");
-	  String value = elem.getFirstChild().getNodeValue();
-	  if (key == null || type == null || value == null)
-	      return result;
-	  if (type.equals("xs:string")) {
-	      result = new TupleString(key, null);
-	      result.setValue(value);
-	      return result;
-	  }
-	  if (type.equals("xs:float")){
-	      result = new TupleFloat(key, 0);
-	      result.setValue(value);
-	      return result;
-	  }
-	  if (type.equals("xs:int")){
-	      result = new TupleInteger(key, 0);
-	      result.setValue(value);
-	      return result;
-	  }
-	  if (type.equals("xs:boolean")) {
-	      result = new TupleBoolean(key, false);
-	      result.setValue(value);
-	      return result;
-	  }
-	  if (type.equals("xs:date")) {
-	      result = new TupleDate(key, null);
-	      result.setValue(value);
-	      return result;
-	  }
-      }
-      return result;
-  }
-
-  /**
-   * Lists all annotations for a transformation.
-   *
-   * @param fqdi is the FQDI of the transformation
-   * @return a list of tuples, which may be empty.
-   * @see org.griphyn.vdl.classes.Transformation
-   */
-  public java.util.List loadAnnotationTransformation( String fqdi )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(fqdi, null, CLASS_TRANSFORMATION);
-  }
-
-  /**
-   * Lists all annotations for a derivation.
-   *
-   * @param fqdi is the FQDI of the derivation
-   * @return a list of tuples, which may be empty.
-   * @see org.griphyn.vdl.classes.Derivation
-   */
-  public java.util.List loadAnnotationDerivation( String fqdi )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(fqdi, null, CLASS_DERIVATION);
-  }
-
-
-  /**
-   * Lists all annotations for a formal argument.
-   *
-   * @param fqdi is the FQDI of the transformation
-   * @param farg is the name of the formal argument
-   * @return a list of tuples, which may be empty.
-   * @see org.griphyn.vdl.classes.Declare
-   */
-  public java.util.List loadAnnotationDeclare( String fqdi, String farg )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(fqdi, farg, CLASS_DECLARE);
-  }
-
-  /**
-   * Lists all annotations for a call statement.
-   *
-   * @param fqdi is the FQDI of the transformation
-   * @param index is the number of the call to annotate.
-   * @return a list of tuples, which may be empty.
-   * @see org.griphyn.vdl.classes.Call
-   */
-  public java.util.List loadAnnotationCall( String fqdi, int index )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(fqdi, new Integer(index), CLASS_CALL);
-  }
-
-  /**
-   * Lists all annotations for a logical filename.
-   *
-   * @param filename is the logical filename.
-   * @return a list of tuples, which may be empty.
-   * @see org.griphyn.vdl.classes.LFN
-   */
-  public java.util.List loadAnnotationFilename( String filename )
-    throws SQLException, IllegalArgumentException
-  {
-      return loadAnnotation(filename, null, CLASS_FILENAME);
-  }
-
-  /**
-   * Retrieves all annotations from an annotatable classes with
-   * the specified tuple. This is an interface method to the various
-   * class-specific methods.
-   *
-   * @param primary is the primary object specifier for the class. 
-   * According to the type, this is either the FQDI, or the filename.
-   * @param secondary is a helper argument for annotations to calls
-   * and formal arguments, and should be null for all other classes.
-   * For calls, the argument must be packed into {@link java.lang.Integer}.
-   * @param kind defines the kind/class of object to annotate.
-   *
-   * @return null if not found, otherwise the annotation tuple.
-   * @see #loadAnnotationTransformation( String )
-   * @see #loadAnnotationDerivation( String )
-   * @see #loadAnnotationCall( String, int )
-   * @see #loadAnnotationDeclare( String, String )
-   * @see #loadAnnotationFilename( String )
-   */
-  public java.util.List loadAnnotation( String primary, 
-					Object secondary,
-					int kind )
-    throws SQLException, IllegalArgumentException
-  {
-    java.util.List result = new java.util.ArrayList();
-    String subject = "";
-    String select = null;
-
-    switch ( kind ) {
-    case CLASS_TRANSFORMATION:
-      subject = "tr";
-      break;
-    case CLASS_DERIVATION:
-      subject = "dv";
-      break;
-    case CLASS_CALL:
-      // may throw ClassCastException
-      subject = "tr";	
-      select = "call[" +  ((Integer) secondary).intValue() + "]";
-      break;
-    case CLASS_DECLARE:
-      subject = "tr";
-      // may throw ClassCastException
-      //select = "declare[@name='" + (String)secondary + "']";
-      select = (String)secondary;
-      break;
-    case CLASS_FILENAME:
-      subject = "lfn";
-      break;
-    default:
-      throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" );
-    }
-
-    try {
-    String id = null;
-    String xquery = "/annotation/metadata[@subject=\"" + subject + "\"][@name=\"" +
-	primary + "\"]";
-    if (select == null) {
-	if (kind != CLASS_FILENAME) {
-	    xquery += "[empty(@select)]";
-	}
-    } else {
-	xquery += "[@select=\"" + select + "\"]";
-    }
-
-    xquery +=  "/attribute";
-    Logging.instance().log("nxd", 1, "query: " + xquery );
-
-    ResourceSet rs = m_metaQrySvc.query(xquery);
-    ResourceIterator i = rs.getIterator();
-    while (i.hasMoreResources()) {
-	XMLResource res = (XMLResource)i.nextResource();
-	Tuple tuple = loadAnnotationResource(res);
-	if (tuple != null) {
-	    result.add(tuple);
-	}
-    }
-    return result;
-    } catch (Exception e) {
-	throw new SQLException(e.getMessage());
-    }
-  }
-
-
-  /**
-   * Search for LFNs or Definitions that has certain annotations
-   *
-   * @param kind defines the kind/class of object annotated.
-   * @param arg is used only for TR ARG and TR CALL. For the former
-   * it is the name of the argument (String), for the latter the position of 
-   * the call (Integer).
-   * @param tree stores the query tree to query the annotation
-   * @return a list of LFNs if search for filenames, otherwise a list of
-   * definitions.
-   * @exception SQLException if something goes wrong with the database.
-   * @see org.griphyn.vdl.annotation.QueryTree
-   */
-  public java.util.List searchAnnotation( int kind,
-					  Object arg,
-					  QueryTree tree)
-    throws SQLException
-  {
-    java.util.List result = new java.util.ArrayList();
-
-    if ( tree == null)
-	return result;
-
-    String subject = "";
-    String defn = "transformation";
-    String select = null;
-
-    switch ( kind ) {
-    case CLASS_TRANSFORMATION:
-      subject = "tr";
-      break;
-    case CLASS_DERIVATION:
-      subject = "dv";
-      defn = "derivation";
-      break;
-    case CLASS_CALL:
-      // may throw ClassCastException
-      subject = "tr";	
-      select = "call[" +  ((Integer) arg).intValue() + "]";
-      break;
-    case CLASS_DECLARE:
-      subject = "tr";
-      // may throw ClassCastException
-      //select = "declare[@name='" + (String)arg + "']";
-      select = (String)arg;
-      break;
-    case CLASS_FILENAME:
-      subject = "lfn";
-      break;
-    default:
-      throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" );
-    }
-
-    try {
-    String id = null;
-    String cond = "[@subject=\"" + subject + "\"]";
-    if (select == null) {
-	if (kind != CLASS_FILENAME) {
-	    cond += "[empty(@select)]";
-	}
-    } else {
-	cond += "[@select=\"" + select + "\"]";
-    }
-    String xquery = "for $mn in distinct-values(//annotation/metadata" + cond + "/@name) " +
-	"let $m := //annotation/metadata[@name=$mn]" + cond; 
-
-    String where = " where ";
-    where += tree.toXQuery("$m/attribute");
-
-    if (kind == CLASS_FILENAME) {
-	xquery += ", $r := $m";
-	xquery += where;
-	xquery += " return $mn";
-
-	return searchElements(xquery);
-    } else {
-	xquery += ", $n := substring-before($mn, '::'), $na := substring-after($mn, '::'), $iv := if ($na) then $na else $mn, $v := substring-after($iv, ':'), $ib := substring-before($iv, ':'), $i := if ($ib) then $ib else $iv,";
-	xquery += " $t := if ($n) then if ($v) then //" + defn + "[@namespace=$n][@name=$i][@version=$v] else //" + defn + "[@namespace=$n][@name=$i][empty(@version)] else if ($v) then //" + defn + "[empty(@namespace)][@name=$i][@version=$v] else //" + defn + "[empty(@namespace)][@name=$i][empty(@version)]";
-	
-	xquery += where;
-	if (kind == CLASS_DECLARE)
-	    xquery += " return $t[" + "declare[@name='" + select + "']" + "]";
-	else
-	    xquery += " return $t";
-	return searchDefinition(xquery);
-    }
-
-    } catch (Exception e) {
-	throw new SQLException(e.getMessage());
-    }
-  }
-
-
-  /**
-   * pass-thru to driver.
-   * @return true, if it is feasible to cache results from the driver
-   * false, if requerying the driver is sufficiently fast (e.g. driver
-   * is in main memory, or driver does caching itself).
-   */
-  public boolean cachingMakesSense()
-  {
-      return true;
-  }
-
-  public void close()
-    throws SQLException
-  {
-      try {
-	  //m_vdc.close();
-	  //m_db.close();
-      } catch (Exception e){
-	  throw new SQLException(e.getMessage());
-      }
-  }
-}
Index: pegasus-wms_4.0.1+dfsg/build.xml
===================================================================
--- pegasus-wms_4.0.1+dfsg.orig/build.xml	2012-05-24 16:47:52.064156782 -0700
+++ pegasus-wms_4.0.1+dfsg/build.xml	2012-05-24 16:47:54.096166867 -0700
@@ -117,7 +117,7 @@
     <javac destdir="${build.src}" srcdir="src"
            target="${build.target}" source="${build.source}"
            encoding="UTF-8" debug="true"
-           includes="edu/isi/pegasus/planner/**/*.java"
+	   includes="edu/isi/pegasus/planner/**/*.java,org/globus/**/*.java"
            excludes="edu/isi/pegasus/planner/**/*Test.java,org/griphyn/**/*.java"
            classpathref="build.classpath"/>
   </target>
@@ -148,7 +148,7 @@
           description="Create the PEGASUS JAR files excluding tests">
     <mkdir dir="${dist.jars}"/>
     <jar destfile="${dist.jars}/pegasus.jar" basedir="${build.src}"
-         includes="edu/isi/pegasus/**/*.class,edu/isi/ikcap/**/*.class,edu/clemson/**/*.class"
+	 includes="edu/isi/pegasus/**/*.class,edu/isi/ikcap/**/*.class,edu/clemson/**/*.class,org/globus/**/*.class"
          excludes="*.class,edu/isi/pegasus/**/*Test.class,org/griphyn/**/*Test.class,org/griphyn/**/*.class"/>
     <chmod perm="0644" file="${dist.jars}/pegasus.jar"/>
   </target>
