package ca.ucalgary.seahawk.gui;

import ca.ucalgary.seahawk.util.*;
import org.biomoby.shared.data.*;
import org.w3c.dom.*;
import javax.xml.xpath.XPathConstants;
import java.net.URL;
import java.util.*;
import java.util.logging.*;

/**
 * Builds a MobyContentInstance based on the requested selection criteria. Ties itself to a MobyContentPane
 * for data retrieval efficiency in case the source document is already loaded in-memory for the GUI. 
 */
public class MobyPayloadCreator implements MobyPayloadRequestListener{
    private MobyContentPane srcPane;
    private String requestKey;

    private static Logger logger = Logger.getLogger(MobyPayloadCreator.class.getName());

    public MobyPayloadCreator(MobyContentPane pane, String requestKey){
        srcPane = pane;
	this.requestKey = requestKey;
    }

    public MobyContentInstance createPayload(String targetJobArgName){
	// The request key is the sample data's app context: 
        // url#xpath <tab> sample_xptr <tab> filter_regex <tab> filter_xpath <tab> xpath_desc <tab> case_sensitive_boolean <tab> inverse selection
        // from which we will build the peer dataset to submit.
	logger.log(Level.FINER, "Request key is " + requestKey);
	String[] requestKeyFields = requestKey.split("\t");

	String[] requestParts = requestKeyFields[0].split("#", 2);
	URL targetURL = null;
	try{
	    targetURL = new URL(requestParts[0]);
	} catch(Exception e){
            logger.log(Level.SEVERE, "Could not create URL from "+requestParts[0], e);
	    return null;
        }
        String selectionXPath = requestParts[1];

	// Check that we are still on this doc, otherwise filteredData will not be appropriate.
        // This could easily happen if the service asks for secondary params in a dialog, but you navigate away before 
	// htting "Execute Service".
	// Load up filteredData for the given doc, whether it's currently showing or not.
	// Such a functionality also implies that we need to store the state of the filter in the
        // request key, so it captures the intention at the moment of menu creation, not whatever state 
        // we left the page in (which is what is in filterHistory), or have changed to since the menu was created
        // getCurrentFilter().
	Map<String,String> xptrsToFilter = new HashMap<String,String>();
	FilterSearch filter = null;
	// Was there a filter in the request?
	if(requestKeyFields.length >= 7){
 	    filter = new FilterSearch(requestKeyFields[2], 
				      new XPathOption(requestKeyFields[3], requestKeyFields[4]), 
                                      Boolean.parseBoolean(requestKeyFields[5]), 
				      Boolean.parseBoolean(requestKeyFields[6]));
        }
	Document doc = null;
        // Get the list of nodes to filter, using in-memory data if possible
	if(srcPane != null && targetURL.equals(srcPane.getCurrentURL())){
            doc = srcPane.getCurrentDoc();

            if(filter == null){
		xptrsToFilter = null;
	    }
	    else if(filter.equals(srcPane.getFilter())){
                // no need to redo the filtering, it's the current display's filtering
		logger.log(Level.FINE, "Reusing current filter");
                xptrsToFilter = srcPane.getFilteredData();
            }
            else{
                // we have the doc loaded, but the filter has changed since
	        DataUtils.findFilteredNodes(doc, filter, null, xptrsToFilter, srcPane.getJobXPtrs(), null, null, true);
            }
        }
	else{
            // the data is not the currently displayed doc, load it all from scratch and use the spec filter
       	    doc = DataUtils.findFilteredNodes(targetURL, filter, xptrsToFilter);	    
        }

	MobyDataInstance mobyDataContext = new MobyDataObject("whatever");
	mobyDataContext.setUserData(requestKey);
	NodeList peerElements = DataUtils.getPeerElements(doc, mobyDataContext, 
							  xptrsToFilter, DataUtils.DATATYPE_PEERS_MODE);

	MobyContentInstance payload = new MobyContentInstance();
	if(peerElements == null){
	    return payload;
	}

        // find the peers of defined by the userData selection constraints and put them into a MobyContentInstance
        Map<String,Integer> jobPrefixes = new LinkedHashMap<String,Integer>();

	logger.log(Level.FINE, "Loading MobyContentInstance with "+peerElements.getLength()+" elements for xpath "+selectionXPath);
        for(int i = 0; i < peerElements.getLength(); i++){
            Element peerElement = (Element) peerElements.item(i);
	    // Try to make the job names trackable to the inputs:
            // Name the jobs according to their original job + a iterated number suffix, e.g. originatingJob_1, originatingJob_2
            String jobKey = DataUtils.findMobyJobName(peerElement);
            if(!jobPrefixes.containsKey(jobKey)){
                jobPrefixes.put(jobKey, new Integer(1));
                if(peerElements.getLength() > 1){
                    jobKey += "_1"; // only add a suffix if multiple loop members for the job
                }
            }
            else{
                int suffixCount = jobPrefixes.get(jobKey).intValue();
                jobPrefixes.put(jobKey, new Integer(suffixCount+1));
                jobKey += "_"+(suffixCount+1);
            }
	    logger.log(Level.FINE, "Adding job "+jobKey+" for "+peerElement.getLocalName());
            MobyDataInstance mdi  = null;
            try{
                mdi = MobyDataObject.createInstanceFromDOM(peerElement, SeahawkOptions.getRegistry());
            } catch(Exception e){
                logger.log(Level.SEVERE, "Could not create Moby object from peer " + peerElement.getLocalName() + " #"+i, e);
                continue;
            }
            // Make sure the created data reflects the selection conditions, by setting userData
            DataUtils.setUserData(mdi, targetURL, selectionXPath, DataUtils.getXPtr(peerElement), filter);
            MobyDataJob job = new MobyDataJob();
            job.put(targetJobArgName, mdi);
            payload.put(jobKey, job);
        }
	
        return payload;
    }
}

