<% // LANGUAGE SETTINGS Locale locale; String lang = request.getParameter("Language").toLowerCase(); if (lang == null) // set English as the default lang = "english"; if (lang.equals("german")) locale=Locale.GERMAN; else // no parameter or default locale=Locale.ENGLISH; session.setAttribute("myLocale", locale); ResourceBundle bundle = ResourceBundle.getBundle("Message", locale); for (Enumeration e = bundle.getKeys(); e.hasMoreElements();) { String key = (String)e.nextElement(); String s = bundle.getString(key); session.setAttribute(key, s); } %> <% response.setHeader("Content-Type", "text/html; charset=UTF-8"); // QUERY PARAMETERS String net = request.getParameter("Network"); String reg = request.getParameter("Registry"); String url = request.getParameter("URL"); String type = request.getParameter("Type"); String requestIP = request.getRemoteAddr(); String paleoName = request.getParameter("PaleoName"); String higherTaxon = request.getParameter("HigherTaxon"); String chrono = request.getParameter("Chrono"); String mineralName = request.getParameter("MineralName"); String mineralRockGroup = request.getParameter("MineralRockGroup"); String isoCountry = request.getParameter("ISOCountry"); String namedArea = request.getParameter("NamedArea"); String locality = request.getParameter("Locality"); String mailAddr = request.getParameter("Email"); String returnLimit = request.getParameter("returnLimit"); String timeLimit = request.getParameter("TimeLimit"); String TRSynonyms = request.getParameter("TRSynonyms"); String TROutput = request.getParameter("TROutput"); session.setAttribute("network", net); session.setAttribute("registry", reg); session.setAttribute("url", url); session.setAttribute("type", type); session.setAttribute("ip", requestIP); session.setAttribute("paleoName", paleoName); session.setAttribute("higherTaxon", higherTaxon); session.setAttribute("Chrono", chrono); session.setAttribute("mineralName", mineralName); session.setAttribute("mineralRockGroup", mineralRockGroup); session.setAttribute("isocountry", isoCountry); session.setAttribute("namedArea", namedArea); session.setAttribute("locality", locality); session.setAttribute("mail", mailAddr); session.setAttribute("returnlimit", returnLimit); session.setAttribute("timelimit", timeLimit); session.setAttribute("TRSynonyms", TRSynonyms); session.setAttribute("TROutput", TROutput); %> Simple ABCDEFG provider access portal

Simple ABCDEFG provider access portal

Important Note:

This is a simple data portal for the GeoCASe network. It uses BioCASe technology to perform a distributed query on ABCDEFG providers, using the BioCASE protocol. Since searches are performed on the orginal provider's databases, they might take some time and yield less results than expected, if some of the providers are offline.


<%@page import = "java.util.logging.*" %> <%@page import = "java.util.*" %> <%@page import = "java.text.SimpleDateFormat" %> <%@page import = "unitloader.*" %> <%@page import = "unitrequestor.*" %> <%@page import = "queryextender.*" %> <%@page import = "translator.*" %> <%@page import = "taxoRef.*" %> <%@page import = "java.io.*" %> <%@page import = "java.sql.*" %> <%@page import = "javax.xml.transform.*" %> <%@page import =" javax.xml.transform.stream.*" %> <% /* ---------------------------------------------------------------------------------------- */ /* Version 1.1 */ /* Authors: a.guentsch, a.hahn, j. de la torre, j. holetschek, m.doering */ /* REVISION: $Id: UnitLoader.jsp,v 1.5 2005/06/23 12:18:56 markus Exp $ */ /* ---------------------------------------------------------------------------------------- */ /* requires: */ /* - a JDBC Driver for SQL-Server (mssqlserver.jar in /WEB-INF/lib) */ /* - a JDBC Driver for PostgreSQL (pg73jdbc2.jar in /WEB-INF/lib) */ /* (on linux moved to $JAVA_HOME/lib/ext */ /* - the libraries jaxen-full.jar, jdom.jar, msbase.jar, msutil.jar, saxpath.jar */ /* in /WEB-INF/lib */ /* - the configuration file "consultantDBs.xml" in /WEB-INF */ /* - the queryextender, unitloader and unitrepquestor-classes to be stored in */ /* /WEB-INF/classes */ /* - the queryconcepts.xml, operators.xml and configuration.xml- files in */ /* /WEB-INF/classes/unitloader */ /* ---------------------------------------------------------------------------------------- */ /* known issues and problems: */ /* - the country field is free text entry - it should be replaced with */ /* a combobox offering country names and operating on ISO codes in the */ /* "background" (22.8.03, a.hahn) */ /* ---------------------------------------------------------------------------------------- */ /* -------------------------- CONSTANTS --------------------------------------------------- */ /* These constants give identifier strings for consulted external sources. */ /* Their connection details have to be defined in 'consultantDBs.xml' */ /* ---------------------------------------------------------------------------------------- */ final Level LOG_LEVEL = Level.ALL; String CONFIGFILENAME = application.getRealPath("/") +"WEB-INF/conf/consultantDBs.xml"; final String TRANSFORMER_NAME = "en_ABCD2.0_All_parts.xslt"; final String TAX_EXTENDER_DB = "EM_bgbm22"; final String LOGGING_DB = "corm_write"; /* --------------------- USER INPUT AND PASSED PARAMETERS --------------------------------- */ /* get FullName, ISOCode and email parameters posted from form */ /* - concepts have to be registered in: /unitloader/queryconcepts.xml */ /* the request for a scientific name should always be automatically extended - double ** */ /* must be avoided(turns up in the Request Protocol string and leads to inacurate queries) */ /* ---------------------------------------------------------------------------------------- */ // allow user to limit the maximum number of results per provider and waiting time int timeout = 30000; int waitBeforeFetch = 15000; int maxResultNo = 10; try { maxResultNo = Integer.parseInt(request.getParameter("returnLimit")); waitBeforeFetch = Integer.parseInt(request.getParameter("TimeLimit")) * 1000; timeout = waitBeforeFetch; } catch (Exception e) { } //Intialize Taxonomic Referencing System taxoRef taxonomicReference = new taxoRef(); //Initialize ArrayLists Taxonomic Referencing System ArrayList paleoNameSynonyms = new ArrayList(); ArrayList higherTaxonSynonyms = new ArrayList(); //Initialize Translation ArrayLists ArrayList translationsCountry = new ArrayList(); ArrayList translationsHigherTaxon = new ArrayList(); /* Initialize TranslationsParsers */ TranslationsParser translationsParserIsoCountry = new TranslationsParser(); TranslationsParser translationsParserHigherTaxon = new TranslationsParser(); /* Get synonyms from Paleobiology Database (http://pbdb.org) */ if ((!paleoName.equals("*")) && (TRSynonyms != null)) { String searchTerm = paleoName; searchTerm = searchTerm.replace("*", ""); searchTerm = searchTerm.replaceAll(" ", "%20"); paleoNameSynonyms = taxonomicReference.getTaxonomicSynonyms("http://paleodb.org/cgi-bin/bridge.pl?action=getTaxonomyXML&name=" + searchTerm + "&response=full"); for (int i = 0; i < paleoNameSynonyms.size(); i++) { paleoNameSynonyms.set(i, paleoNameSynonyms.get(i) + "*"); } } if ((!higherTaxon.equals("*")) && (TRSynonyms != null)) { ArrayList synonyms = new ArrayList(); String searchTerm = higherTaxon; searchTerm = searchTerm.replace("*", ""); searchTerm = searchTerm.replaceAll(" ", "%20"); higherTaxonSynonyms = taxonomicReference.getTaxonomicSynonyms("http://paleodb.org/cgi-bin/bridge.pl?action=getTaxonomyXML&name=" + searchTerm + "&response=full"); for (int i = 0; i < higherTaxonSynonyms.size(); i++) { higherTaxonSynonyms.set(i, higherTaxonSynonyms.get(i) + "*"); } } /* Get translations from XML lists */ if (!isoCountry.equals("*")) { translationsCountry = translationsParserIsoCountry.parse("Country",isoCountry,application.getRealPath("/" + "translations/Countries.xml")); } if (!higherTaxon.equals("*")) { translationsHigherTaxon = translationsParserHigherTaxon.parse("Taxon",higherTaxon,application.getRealPath("/" + "translations/Taxa.xml")); } /* --- Status Output ---------------------------------------------------------------------- */ /* Print the stages that have to be done */ /* The setStage JavaScript function is used to boldface the current stage; */ /* out.flush() is required to force the buffer to be flushed to the client */ /* ---------------------------------------------------------------------------------------- */ String criteria = ""; criteria += paleoName.equals("*")? "" : "ScientificName: " + paleoName + ", "; criteria += higherTaxon.equals("*")? "" : "HigherTaxon: " + higherTaxon + ", "; criteria += chrono.equals("*")? "" : "ChronoAttrib: " + chrono + ", "; criteria += mineralName.equals("*")? "" : "MineralName: " + mineralName + ", "; criteria += mineralRockGroup.equals("*")? "" : "MineralRockGroup : " + mineralRockGroup + ", "; criteria += isoCountry.equals("*")? "" : "Country: " + isoCountry + ", "; criteria += namedArea.equals("*")? "" : "NamedArea: " + namedArea + ", "; criteria += locality.equals("*")? "" : "Locality: " + locality + ", "; criteria = criteria.substring(0, criteria .lastIndexOf(",")); out.print("

" + session.getAttribute("main.queryheader") + "
" + criteria + "
"); if (translationsCountry.size() > 1) { out.print("
Searching for Translations: " + translationsCountry + "
"); } if (translationsHigherTaxon.size() > 1) { out.print("
Searching for Translations: " + translationsHigherTaxon + "
"); } if (paleoNameSynonyms.size() > 0) { out.print("
Including Synonyms (Paleobiology Database): " + paleoNameSynonyms + "
"); } if (higherTaxonSynonyms.size() > 0) { out.print("
Searching for Synonyms (Paleobiology Database): " + higherTaxonSynonyms + "
"); } out.print("

"); %>

<%=session.getAttribute("process.message")%> 

<%=session.getAttribute("process.stage1")%>

<%=session.getAttribute("process.stage2")%>

<%=session.getAttribute("process.stage3")%>

<%=session.getAttribute("process.stage4")%>

<%=session.getAttribute("process.stage5")%>




© 2007 Freie Universität Berlin, Botanischer Garten und Botanisches Museum Berlin-Dahlem
Seitenverantwortliche/Page editors: J. Holetschek, A. Hahn, M. Döring, C. Oancea, A. Güntsch
BGBM Impressum/Imprint; Haftungsausschluss/Disclaimer

<%out.flush(); /* -------------------------------- LOGGER ------------------------------------------------ */ /* create a logger that will write logging information into a file with the pattern */ /* /log/SimpleUI_.log */ /* New entries for each search will be appended, file size is limited to 4MB */ /* -------------------------------------------------------------------------------------- */ final Logger log = Logger.getLogger("org.biocase.geoCASE"); try { class myFormatter extends SimpleFormatter { final SimpleDateFormat time_pattern = new SimpleDateFormat("HH:mm:ss.SSS"); public String format(LogRecord record) { return time_pattern.format(new java.util.Date(record.getMillis())) + " " + record.getLevel() + "\t" + record.getMessage() + "\r\n"; } } log.setLevel(LOG_LEVEL); String logFileName = application.getRealPath("/") + "log/geoCASE " + new SimpleDateFormat("yyyy-MM-dd").format(new java.util.Date()) + ".log"; FileHandler handler = new FileHandler(logFileName, 4096 * 1024, 1, true); handler.setFormatter(new myFormatter()); log.addHandler(handler); log.info("Successfully created/opened log file."); out.println(""); } catch (Exception e) { log.severe("Can't create log file handler"); out.println(""); } out.flush(); log.info("Search process started."); log.info("Request sent from URL: " + session.getAttribute("url")); log.info("Remote host: " + request.getRemoteHost() + ", IP: " + request.getRemoteAddr()); log.info("Searching for " + criteria); log.config("Time parameters used for query (in seconds): " + "maxResultNo = " + maxResultNo + ", " + "waitBeforeFetch = " + waitBeforeFetch/1000 + ", " + "timeOut = " + timeout/1000); if (criteria.indexOf("href") != -1) { log.info("Spam abuse detected (href found in one of the entered values). Query aborted."); throw new SecurityException(); } /* --- Query Generation Part ----------------------------------------------------------- */ /* to construct a complex query from user input, set up a query generator and feed it */ /* with sub-queries from the different input fields */ /* ------------------------------------------------------------------------------------- */ // create the search request ProtocolQuery pq = new ProtocolQuery("search"); pq.addSenderIP((String) session.getAttribute("ip")); ArrayList providerList = new ArrayList(); List taxNames = new ArrayList(); taxNames.add(paleoName); QueryGenerator qGenerator = new QueryGenerator(); // TaxonName if (!paleoName.equals("*")) { paleoNameSynonyms.add(paleoName); qGenerator.addSubquery("FullName", "in", paleoNameSynonyms); } // Higher Taxon if (!higherTaxon.equals("*")) { ArrayList higherTax = new ArrayList(); higherTax = translationsHigherTaxon; higherTax.addAll(higherTaxonSynonyms); qGenerator.addSubquery("HigherTaxon", "in", higherTax); } // Chronostratigraphic attribution if (!chrono.equals("*")) { ArrayList terms = new ArrayList(); terms.add(chrono); qGenerator.addSubquery("ChronostratigraphicAttribution", "like", terms); } // ISO Country if (!isoCountry.equals("*")) { ArrayList isoTerms = new ArrayList(); isoTerms = translationsCountry; //isoTerms.add(isoTerms); qGenerator.addSubquery("ISOCountry", "in", isoTerms); } // NamedArea if (!namedArea.equals("*")) { ArrayList terms = new ArrayList(); terms.add(namedArea); qGenerator.addSubquery("NamedArea", "like", terms); } // Locality if (!locality.equals("*")) { ArrayList terms = new ArrayList(); terms.add(locality); qGenerator.addSubquery("LocalityText", "like", terms); } // Mineral classified Name if (!mineralName.equals("*")) { ArrayList terms = new ArrayList(); terms.add(mineralName); qGenerator.addSubquery("MineralClassifiedName", "like", terms); } // MineralRockGroup if (!mineralRockGroup.equals("*")) { ArrayList terms = new ArrayList(); terms.add(mineralRockGroup); qGenerator.addSubquery("MineralRockGroup", "like", terms); } // formally close query content construction and generate ProtocolQuery qGenerator.finaliseQuery(); Object queryObj = qGenerator.getQuery(); if (queryObj instanceof QBool) { pq.setQuery(0, maxResultNo,(QBool)queryObj, false); log.info("QueryGeneration: Done."); out.println(""); } else if (queryObj instanceof Comp) { pq.setQuery(0,maxResultNo,(Comp)queryObj, false); log.info("QueryGeneration: Done."); out.println(""); } else { log.severe("QueryGeneration: Failed."); out.println(""); } out.flush(); /* ----- Provider Selection/ Query Sending Part ---------------------------------------- */ /* get a list of providers /* ------------------------------------------------------------------------------------- */ ProviderLimiter provFilter = new ProviderLimiter(CONFIGFILENAME); String emailRequestMessage = ""; List resultList = new ArrayList(); try { providerList = (ArrayList)provFilter.getRelevantProviders(reg, net); String s = session.getAttribute("main.providerselect1") + " " + providerList.size() + " " + session.getAttribute("main.providerselect2"); log.config("Provider selection: Done. Query will be sent to " + providerList.size() + " providers."); out.println(""); } catch (Exception e) { log.severe("ProviderSelection: Failed, cannot get provider list. Reason: " + e.getMessage()); } if (providerList.isEmpty()) { log.severe("ReqSearchInterface: No list of providers avalailable; queries will not be sent."); out.println(""); } else { // use ReqSearchInterface to send the request ... int okResp = providerList.size(); String req = pq.getProtocolRequestString(); ReqSearchInterface rsi; // rudimental checking for email validity - if a valid address is supplied, // the full response (xml) gets sent by email, otherwise only html output is generated. if (StaticStuff.isEmailAdr(mailAddr )) { rsi = new ReqSearchInterface(providerList, mailAddr , req, timeout, waitBeforeFetch, okResp); log.info("ReqSearchInterface constructed with wait = " + waitBeforeFetch + " and okResp = " + okResp); emailRequestMessage = " Response will be sent to " + mailAddr + "."; log.info("ReqSearchInterface: A full response will be sent to " + mailAddr + "."); } else { rsi = new ReqSearchInterface(providerList, req, timeout, waitBeforeFetch, okResp); log.info("ReqSearchInterface constructed with wait = " + waitBeforeFetch + " and okResp = " + okResp); emailRequestMessage = " No email response requested."; log.info("ReqSearchInterface: No eMail response requested."); } log.info("ReqSearchInterface: Call done; query sent off to providers."); resultList = rsi.getIntermediate(); log.info("ReqSearchInterface: Collected results from providers."); out.println(""); } out.flush(); /* --- Logging of request details --------------------------------------------------------- */ String insertStatement = "INSERT INTO webspy_unit(highertax, taxonname, country, context, " + " email, resultlimit, waitfor, ip_address) " + " VALUES (?,?,?,?,?,?,?,?)"; try { Connection logConn = new ConnectionMaker(CONFIGFILENAME, LOGGING_DB).getConnection(); PreparedStatement logQueryPs = logConn.prepareStatement(insertStatement); logQueryPs.setString(1, request.getParameter("HigherTaxon")); logQueryPs.setString(2, request.getParameter("FullName")); logQueryPs.setString(3, request.getParameter("ISOCountry")); logQueryPs.setString(4, net); if (mailAddr.length() > 1) logQueryPs.setString (5, "true"); else logQueryPs.setString(5, "false"); logQueryPs.setString(6, request.getParameter("returnLimit")); logQueryPs.setString(7, request.getParameter("TimeLimit")); logQueryPs.setString(8, request.getRemoteAddr()); logQueryPs.executeUpdate(); logQueryPs.close(); logConn.close(); log.info("QueryLogging: Query details logged to CORM."); out.println(""); } catch (DbNotConfiguredException e) { // no database configured log.config("No database configured for logging the query. Logging skipped."); out.println(""); } catch (Exception e) { // something went wrong log.severe("QueryLogging: Connection to CORM database failed, query cannot be logged. Reason: " + e.getMessage()); out.println(""); } out.flush(); log.info("ResultTransformation: Started."); /* --- OUTPUT SECTION --------------------------------------------------------- */ /* Transform the XML documents to HTML and view it */ /* ---------------------------------------------------------------------------- */ %>
 
 
<% String resultXML = StaticStuff.getResultXml(resultList); if (resultXML.length() == 0) { log.info("ResultTransformation: Could not be done (no results to be transformed)."); out.println(""); } else try { Source xsltSource = new StreamSource( application.getRealPath("/") + TRANSFORMER_NAME); Transformer transformer = TransformerFactory.newInstance().newTransformer(xsltSource); Source xmlSource; if (TROutput != null) { // Modify XML Source with PBDB taxononmy, if extended taxononmic output is enabled String taxoModString = taxonomicReference.concat(resultXML); xmlSource = new StreamSource(new StringReader(taxoModString)); } else { // No Modiications applied, if PBDB taxononmy is disabled xmlSource = new StreamSource(new StringReader(resultXML)); } StringWriter htmlBuffer = new StringWriter(); Result htmlResult = new StreamResult(htmlBuffer); transformer.transform(xmlSource, htmlResult); log.info("ResultTransformation: Processing done."); // and write to the client out.println(""); out.println(htmlBuffer.getBuffer()); // Finally, jump to the results section out.println(""); } catch (TransformerConfigurationException tce) { log.severe("ResultTransformation: Failed. No JAXP-compliant XSLT processor found."); out.println(""); out.flush(); } catch (TransformerException te) { log.severe("ResultTransformation: Error occured while transforming document. Reason: " + te.getMessage()); out.println(""); out.flush(); } // Close logger log.info("Search process finished.\r\n" ); Handler fh = log.getHandlers()[0]; log.removeHandler(fh); fh.close();%>