Merge helma 1 trunk from revision 8828 to 9332 minus the commit 9325 for case sensitive HopObject properties which is going to Helma 1.7 exclusively.

svn merge -r 8828:HEAD https://dev.helma.org/svn/helma/helma/trunk/
svn merge -r 9325:9324 https://dev.helma.org/svn/helma/helma/trunk/
This commit is contained in:
hns 2008-10-20 12:48:53 +00:00
parent 718d43690d
commit d1f972591f
33 changed files with 458 additions and 1174 deletions

View file

@ -1,4 +1,4 @@
This is the README file for version 1.6.2 of the Helma Javascript This is the README file for version 1.6.3 of the Helma Javascript
Web Application Framework. Web Application Framework.
============== ==============

View file

@ -1,541 +0,0 @@
<?xml version="1.0"?>
<project name="Helma" default="usage" basedir=".">
<!-- =================================================================== -->
<!-- Initializes some variables -->
<!-- =================================================================== -->
<target name="init">
<property name="Name" value="helma"/>
<property name="year" value="1998-${year}"/>
<property name="version" value="1.6.2"/>
<property name="project" value="helma"/>
<property name="cvs.root.apps" value=":pserver:anonymous@adele.helma.at:/opt/cvs/apps"/>
<property name="cvs.root.helma" value=":pserver:anonymous@adele.helma.at:/opt/cvs/helma"/>
<property name="cvs.apps.tag" value="HEAD"/>
<property name="home.dir" value=".."/>
<property name="build.dir" value="${home.dir}/build"/>
<property name="build.src" value="${home.dir}/src"/>
<property name="build.lib" value="${home.dir}/lib"/>
<property name="build.classes" value="${home.dir}/classes"/>
<property name="build.docs" value="${home.dir}/docs"/>
<property name="build.javadocs" value="${home.dir}/docs/api"/>
<property name="build.externals" value="${build.dir}/externals"/>
<property name="build.work" value="${home.dir}/work"/>
<property name="build.dist" value="${home.dir}/dist"/>
<property name="jar.name" value="${project}"/>
<property name="package.name" value="${project}-${version}"/>
<property name="core.name" value="${project}-core-${version}"/>
<property name="debug" value="on"/>
<property name="optimize" value="on"/>
<property name="deprecation" value="off"/>
<property name="build.jsdocs" value="${home.dir}/docs/framework"/>
<property name="jsdoc" value="${home.dir}/work/reference/templates/jsdoc.pl"/>
<path id="build.class.path">
<fileset dir="${home.dir}/lib">
<exclude name="**/helma*.jar" />
<include name="**/*.jar" />
</fileset>
</path>
<tstamp/>
<filter token="year" value="${year}"/>
<filter token="version" value="${version}"/>
<filter token="date" value="${TODAY}"/>
</target>
<!-- =================================================================== -->
<!-- Help on usage -->
<!-- =================================================================== -->
<target name="help" depends="usage" />
<target name="usage">
<echo message=""/>
<echo message=""/>
<echo message="Helma build instructions"/>
<echo message="-------------------------------------------------------------"/>
<echo message=""/>
<echo message=" available targets are:"/>
<echo message=""/>
<echo message=" compile --> compiles the source code to ./classes"/>
<echo message=" jar --> generates the ./lib/helma-YYYYMMDD.jar file"/>
<echo message=" javadocs --> generates the API docs"/>
<echo message=" jsdocs --> generates the framework docs"/>
<!--<echo message=" docs -> tries to retrieve the HTML documentation "/> -->
<!--<echo message=" (may need proxy settings in startscript)"/> -->
<echo message=" package --> generates the distribution (zip and tar.gz)"/>
<echo message=" app [name] --> gets an application from the cvs and zips it"/>
<echo message=" module [name] --> gets a module from the cvs and zips it"/>
<echo message=" core --> generates core for production updates (zip and tar.gz)"/>
<echo message=" clean --> clean up temporary build directories and files"/>
<echo message=""/>
<echo message=" usage --> provides help on using the build tool (default)"/>
<echo message=""/>
<echo message=" See comments inside the build.xml file for more details."/>
<echo message="-------------------------------------------------------------"/>
<echo message=""/>
<echo message=""/>
</target>
<!-- =================================================================== -->
<!-- Compiles the source directory -->
<!-- =================================================================== -->
<target name="compile" depends="init">
<mkdir dir="${build.classes}"/>
<!-- copy the imageio file -->
<copy file="${build.src}/META-INF/services/javax.imageio.spi.ImageWriterSpi"
todir="${build.classes}/META-INF/services"/>
<!-- copy helma db style sheet -->
<copy file="${build.src}/helma/objectmodel/dom/helma.xsl"
todir="${build.classes}/helma/objectmodel/dom" />
<!-- copy source files over to work directory -->
<delete dir="${build.work}/src" quiet="true"/>
<mkdir dir="${build.work}/src" />
<copy todir="${build.work}/src" overwrite="true">
<fileset dir="${build.src}" includes="**/*.java"/>
</copy>
<replace file="${build.work}/src/helma/main/Server.java"
token="__builddate__" value="${TODAY}"/>
<javac srcdir="${build.work}/src"
source="1.5"
target="1.5"
destdir="${build.classes}"
debug="${debug}"
deprecation="${deprecation}"
optimize="${optimize}"
includeAntRuntime="no">
<classpath refid="build.class.path" />
</javac>
<delete dir="${build.work}/src"/>
<rmic classname="helma.framework.core.RemoteApplication" base="${build.classes}"/>
</target>
<!-- =================================================================== -->
<!-- Creates a helma.jar file (snapshot) in the lib-directory -->
<!-- =================================================================== -->
<target name="jar" depends="compile">
<jar jarfile="${build.lib}/${jar.name}-${DSTAMP}.jar"
basedir="${build.classes}"
excludes="**/package.html,**/main/launcher/**"/>
<jar jarfile="${home.dir}/launcher.jar"
basedir="${build.classes}"
includes="**/main/launcher/**"
manifest="${build.src}/helma/main/launcher/manifest.txt"/>
<!-- Copy timestamped helma jar file to lib/helma.jar -->
<copy file="${build.lib}/${jar.name}-${DSTAMP}.jar"
tofile="${build.lib}/${jar.name}.jar"/>
</target>
<!-- =================================================================== -->
<!-- Creates the javadoc API documentation -->
<!-- =================================================================== -->
<target name="javadocs" depends="init">
<mkdir dir="${build.javadocs}"/>
<javadoc packagenames="helma.*"
sourcepath="${build.src}"
destdir="${build.javadocs}"
author="false"
private="false"
version="false"
windowtitle="${Name} ${version} API"
doctitle="${Name} ${version} API"
bottom="Copyright &#169; ${year} Helma.org. All Rights Reserved."
classpathref="build.class.path"
/>
</target>
<!-- =================================================================== -->
<!-- Create the jsdoc Framework documentation -->
<!-- =================================================================== -->
<target name="jsdocs" depends="init, package-modules">
<!-- cvs cvsRoot="${cvs.root.apps}" command="export" tag="${cvs.apps.tag}" package="reference" dest="${build.work}" /-->
<!-- add a copy of the reference -->
<mkdir dir="${build.work}/reference"/>
<copy todir="${build.work}/reference">
<fileset dir="${build.externals}/reference"/>
</copy>
<!-- add a copy of the modules -->
<mkdir dir="${build.work}/reference/modules"/>
<copy todir="${build.work}/reference/modules">
<fileset dir="${build.externals}/modules/"/>
</copy>
<mkdir dir="${build.jsdocs}"/>
<java dir="${home.dir}" fork="true" jar="${build.lib}/rhino.jar">
<sysproperty key="jsdoc.dir" value="work/reference"/>
<arg value="work/reference/app/run.js"/>
<arg value="-t=work/reference/templates"/>
<arg value="-d=docs/framework"/>
<arg value="-r=3"/>
<arg value="work/reference/coreEnvironment"/>
<arg value="work/reference/coreExtensions"/>
<arg value="work/reference/modules"/>
</java>
<delete dir="${build.work}/reference" />
</target>
<!-- =================================================================== -->
<!-- Get the documentation (currently can fail due to request time-out -->
<!-- or missing support for proxies) -->
<!-- =================================================================== -->
<!-- <target name="docs" depends="init"> -->
<!-- <get src="http://www.helma.org/docs/reference/print" -->
<!-- dest="${build.docs}/reference.html" -->
<!-- ignoreerrors="true" -->
<!-- /> -->
<!-- </target> -->
<!-- =================================================================== -->
<!-- Builds and packages only the core for the deployment and updating -->
<!-- of production environments -->
<!-- =================================================================== -->
<target name="core" depends="init, jar">
<mkdir dir="${build.work}"/>
<!-- copy all libraries except helma-YYYYMMDD.jar -->
<copy todir="${build.work}/lib">
<fileset dir="${home.dir}/lib">
<exclude name="**/helma-*.jar" />
<include name="**/*.jar" />
</fileset>
</copy>
<!-- copy the launcher jar and start files-->
<copy file="${home.dir}/launcher.jar" todir="${build.work}/lib"/>
<!-- create lib/ext directory -->
<mkdir dir="${build.work}/lib/ext"/>
<!-- copy the license files -->
<copy todir="${build.work}/licenses">
<fileset dir="${home.dir}/licenses" excludes="**/CVS**"/>
</copy>
<copy file="${home.dir}/license.txt" todir="${build.work}/licenses"/>
<!-- zip up the whole thing -->
<antcall target="package-zip">
<param name="filename" value="${core.name}"/>
</antcall>
<antcall target="package-tgz">
<param name="filename" value="${core.name}"/>
</antcall>
<!-- clean up -->
<delete dir="${build.work}"/>
</target>
<!-- =================================================================== -->
<!-- Creates the full helma distribution -->
<!-- =================================================================== -->
<target name="package" depends="init">
<mkdir dir="${build.work}"/>
<!-- checkout the demo apps (and zip manage-app) -->
<antcall target="package-apps" />
<!-- generate the framework and modules documentation -->
<antcall target="jsdocs" />
<!-- create the main part of helma -->
<antcall target="package-raw">
<param name="distribution" value="main" />
</antcall>
<chmod perm="755">
<fileset dir="${build.work}">
<include name="start.sh"/>
</fileset>
</chmod>
<!-- zip up the whole thing -->
<antcall target="package-zip">
<param name="filename" value="${package.name}"/>
</antcall>
<antcall target="package-tgz">
<param name="filename" value="${package.name}"/>
</antcall>
<!-- make the src distributions -->
<antcall target="javadocs"/>
<antcall target="package-src-zip">
<param name="filename" value="${package.name}"/>
</antcall>
<antcall target="package-src-tgz">
<param name="filename" value="${package.name}"/>
</antcall>
<!-- clean up -->
<delete dir="${build.work}"/>
</target>
<!-- =================================================================== -->
<!-- Compile Helma and prepare the skeleton in a temporary directory. -->
<!-- Used by package . -->
<!-- =================================================================== -->
<target name="package-raw" depends="init, jar">
<!-- copy the framework (apps.props, server.props, hop/db, hop/static) -->
<copy todir="${build.work}">
<fileset dir="${build.dir}/${distribution}" excludes="**/CVS**"/>
</copy>
<!-- copy the launcher jar and start files -->
<copy file="${home.dir}/launcher.jar" todir="${build.work}/"/>
<copy file="${home.dir}/start.sh" todir="${build.work}"/>
<copy file="${home.dir}/start.bat" todir="${build.work}"/>
<!-- copy README.txt -->
<copy file="${home.dir}/README.txt" todir="${build.work}/"/>
<!-- copy the whole docs-directory -->
<copy todir="${build.work}/docs">
<fileset dir="${build.docs}"/>
</copy>
<!-- copy all libraries except helma-YYYYMMDD.jar -->
<copy todir="${build.work}/lib">
<fileset dir="${home.dir}/lib">
<exclude name="**/helma-*.jar" />
<include name="**/*.jar" />
</fileset>
</copy>
<!-- create lib/ext directory -->
<mkdir dir="${build.work}/lib/ext"/>
<!-- copy the license files -->
<copy todir="${build.work}/licenses">
<fileset dir="${home.dir}/licenses" excludes="**/CVS**"/>
</copy>
<copy file="${home.dir}/license.txt" todir="${build.work}/licenses"/>
<!-- copy the scripts directory -->
<copy todir="${build.work}/scripts">
<fileset dir="${home.dir}/scripts" excludes="**/CVS**"/>
</copy>
<!-- zip the sourcecode -->
<!-- mkdir dir="${build.work}/src"/>
<tar tarfile="${build.work}/src/helma-src.tar" basedir="${build.src}/">
<tarfileset dir="${build.src}">
<include name="${build.src}/**"/>
</tarfileset>
</tar>
<gzip zipfile="${build.work}/src/helma-src.tar.gz" src="${build.work}/src/helma-src.tar"/>
<delete file="${build.work}/src/helma-src.tar"/ -->
</target>
<!-- =================================================================== -->
<!-- Checkout demo apps, put them in work directory and zip manage app -->
<!-- =================================================================== -->
<target name="package-apps" depends="init">
<mkdir dir="${build.work}/apps" />
<!-- get demo apps -->
<!--cvs cvsRoot="${cvs.root.apps}" command="export" tag="${cvs.apps.tag}" package="welcome" dest="${build.work}/apps" /-->
<!-- add a copy of the welcome app -->
<mkdir dir="${build.work}/apps/welcome"/>
<copy todir="${build.work}/apps/welcome">
<fileset dir="${build.externals}/welcome"/>
</copy>
<antcall target="package-manage" />
</target>
<!-- =================================================================== -->
<!-- Checkout and zip manage application -->
<!-- =================================================================== -->
<target name="package-manage" depends="init">
<!--cvs cvsRoot="${cvs.root.apps}" command="export" tag="${cvs.apps.tag}" package="manage" dest="${build.work}" /-->
<mkdir dir="${build.work}/apps/manage"/>
<zip zipfile="${build.work}/apps/manage/manage.zip" basedir="${build.externals}/manage/" includes="**" excludes="**/properties,readme/**" />
<copy todir="${build.work}/apps/manage">
<fileset dir="${build.externals}/manage" includes="app.properties,class.properties,readme.txt"/>
</copy>
<!-- delete dir="${build.work}/manage" /-->
</target>
<!-- =================================================================== -->
<!-- Checkout modules, and include helmaTools -->
<!-- =================================================================== -->
<target name="package-modules" depends="init">
<!--cvs cvsRoot="${cvs.root.apps}" command="export" tag="${cvs.apps.tag}" package="modules" dest="${build.work}" /-->
<!-- add a copy of the modules -->
<mkdir dir="${build.work}/modules"/>
<copy todir="${build.work}/modules">
<fileset dir="${build.externals}/modules"/>
</copy>
<antcall target="package-helmaTools" />
</target>
<!-- =================================================================== -->
<!-- Checkout and zip helmaTools -->
<!-- =================================================================== -->
<target name="package-helmaTools" depends="init">
<!-- cvs cvsRoot="${cvs.root.apps}" command="export" package="helmaTools" dest="${build.work}" /-->
<mkdir dir="${build.work}/modules"/>
<zip zipfile="${build.work}/modules/helmaTools.zip" basedir="${build.externals}/helmaTools/" includes="**" excludes="**/*.txt, **/*.html, **/*.bat, **/*.sh" />
<!--delete dir="${build.work}/helmaTools" /-->
</target>
<!-- =================================================================== -->
<!-- Packages the work directory with TAR-GZIP -->
<!-- needs parameter ${filename} for final dist-file -->
<!-- =================================================================== -->
<target name="package-tgz" depends="init">
<mkdir dir="${build.dist}" />
<fixcrlf srcdir="${build.work}" eol="lf" eof="remove" includes="**/*.txt, **/*.properties, **/*.hac, **/*.js, **/*.skin" />
<tar tarfile="${build.dist}/${filename}.tar" basedir="${build.work}" excludes="**">
<tarfileset prefix="${filename}" dir="${build.work}" mode="755">
<include name="start.sh"/>
</tarfileset>
<tarfileset prefix="${filename}" dir="${build.work}">
<include name="**"/>
<exclude name="start.sh"/>
<exclude name="lib/jimi.jar"/>
<exclude name="lib/apache-dom.jar"/>
<exclude name="docs/api/**"/>
</tarfileset>
</tar>
<gzip zipfile="${build.dist}/${filename}.tar.gz" src="${build.dist}/${filename}.tar"/>
<delete file="${build.dist}/${filename}.tar"/>
</target>
<!-- =================================================================== -->
<!-- Packages the work directory with ZIP -->
<!-- needs parameter ${filename} for final dist-file -->
<!-- =================================================================== -->
<target name="package-zip" depends="init">
<mkdir dir="${build.dist}" />
<fixcrlf srcdir="${build.work}" eol="crlf" includes="**/*.txt, **/*.properties, **/*.hac, **/*.js, **/*.skin, **/*.xml" />
<zip zipfile="${build.dist}/${filename}.zip">
<zipfileset dir="${build.work}" prefix="${filename}">
<include name="**"/>
<exclude name="start.sh"/>
<exclude name="lib/jimi.jar"/>
<exclude name="lib/apache-dom.jar"/>
<exclude name="docs/api/**"/>
</zipfileset>
</zip>
</target>
<!-- =================================================================== -->
<!-- Packages Helma src and build directories with TAR-GZIP -->
<!-- needs parameter ${filename} for final dist-file -->
<!-- =================================================================== -->
<target name="package-src-tgz" depends="init">
<mkdir dir="${build.dist}" />
<tar tarfile="${build.dist}/${filename}-src.tar">
<tarfileset prefix="${filename}" dir="${home.dir}">
<include name="src/**"/>
<include name="build/**"/>
<include name="docs/**"/>
<include name="licenses/**"/>
<include name="license.txt"/>
<include name="lib/jimi.jar"/>
<include name="lib/apache-dom.jar"/>
<exclude name="docs/modules/**"/>
</tarfileset>
</tar>
<gzip zipfile="${build.dist}/${filename}-src.tar.gz" src="${build.dist}/${filename}-src.tar"/>
<delete file="${build.dist}/${filename}-src.tar"/>
</target>
<!-- =================================================================== -->
<!-- Packages Helma src and build directories with ZIP -->
<!-- needs parameter ${filename} for final dist-file -->
<!-- =================================================================== -->
<target name="package-src-zip" depends="init">
<mkdir dir="${build.dist}" />
<zip zipfile="${build.dist}/${filename}-src.zip">
<zipfileset dir="${home.dir}" prefix="${filename}">
<include name="src/**"/>
<include name="build/**"/>
<include name="docs/**"/>
<include name="licenses/**"/>
<include name="license.txt"/>
<include name="lib/jimi.jar"/>
<include name="lib/apache-dom.jar"/>
<exclude name="docs/modules/**"/>
</zipfileset>
</zip>
</target>
<!-- =================================================================== -->
<!-- Cleans up temporary build directories -->
<!-- =================================================================== -->
<target name="clean" depends="init">
<delete dir="${build.work}" />
<delete dir="${build.classes}" />
</target>
<!-- =================================================================== -->
<!-- Gets an application from the cvs and zips/targzs it -->
<!-- =================================================================== -->
<target name="app" depends="init">
<mkdir dir="${build.dist}" />
<mkdir dir="${build.work}" />
<!-- to retrieve special versions of an application insert
additional attributes: tag="TAGNAME" or date="1972-09-24 20:05" -->
<!--cvs cvsRoot="${cvs.root.apps}" command="export" tag="${cvs.apps.tag}" package="${application}" dest="${build.work}" /-->
<fixcrlf srcdir="${build.work}" eol="crlf" eof="add" includes="**/*.txt, **/*.properties, **/*.hac, **/*.js, **/*.skin, **/*.xml" />
<zip zipfile="${build.dist}/${application}-${DSTAMP}.zip" basedir="${build.work}" includes="**"/>
<fixcrlf srcdir="${build.work}" eol="lf" eof="remove" includes="**/*.txt, **/*.properties, **/*.hac, **/*.js, **/*.skin" />
<tar tarfile="${build.dist}/${application}-${DSTAMP}.tar" basedir="${build.work}">
<tarfileset dir="${build.work}">
<include name="${build.work}/**"/>
</tarfileset>
</tar>
<gzip zipfile="${build.dist}/${application}-${DSTAMP}.tar.gz" src="${build.dist}/${application}-${DSTAMP}.tar" />
<delete file="${build.dist}/${application}-${DSTAMP}.tar" />
<!--delete dir="${build.work}" /-->
</target>
<!-- =================================================================== -->
<!-- Checkout and zip a module -->
<!-- =================================================================== -->
<target name="module" depends="init">
<cvs cvsRoot="${cvs.root.apps}" command="export" tag="${cvs.apps.tag}" package="${application}" dest="${build.work}" />
<mkdir dir="${build.work}/modules"/>
<zip zipfile="${build.work}/modules/${application}-${DSTAMP}.zip" basedir="${build.work}/${application}/" includes="**" excludes="**/*.txt, **/*.html, **/*.bat, **/*.sh" />
<delete dir="${build.work}/${application}" />
</target>
</project>

View file

@ -1,4 +1,4 @@
Copyright (c) 1999-2006 Helma Project. All rights reserved. Copyright (c) 1999-2008 Helma Project. All rights reserved.
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions modification, are permitted provided that the following conditions

View file

@ -164,7 +164,18 @@ public class RequestTrans implements Serializable {
* @return true if this might be an XML-RPC request. * @return true if this might be an XML-RPC request.
*/ */
public synchronized boolean checkXmlRpc() { public synchronized boolean checkXmlRpc() {
return "POST".equals(method) && "text/xml".equals(request.getContentType()); if ("POST".equalsIgnoreCase(method)) {
String contentType = request.getContentType();
if (contentType == null) {
return false;
}
int semi = contentType.indexOf(";");
if (semi > -1) {
contentType = contentType.substring(0, semi);
}
return "text/xml".equalsIgnoreCase(contentType.trim());
}
return false;
} }
/** /**

View file

@ -493,11 +493,11 @@ public final class ResponseTrans extends Writer implements Serializable {
/** /**
* Allow to directly set the byte array for the response. Calling this more than once will * Allow to directly set the byte array for the response. Calling this more than once will
* overwrite the previous output. We take a generic object as parameter to be able to * overwrite the previous output.
* generate a better error message, but it must be byte[]. * @param bytes an arbitrary byte array
*/ */
public void writeBinary(byte[] what) { public void writeBinary(byte[] bytes) {
response = what; response = bytes;
} }
/** /**
@ -649,6 +649,11 @@ public final class ResponseTrans extends Writer implements Serializable {
// there's no point in closing the response buffer // there's no point in closing the response buffer
HttpServletResponse res = reqtrans.getServletResponse(); HttpServletResponse res = reqtrans.getServletResponse();
if (res != null && res.isCommitted()) { if (res != null && res.isCommitted()) {
// response was committed using HttpServletResponse directly. We need
// set response to null and notify waiters in order to let attached
// requests know they can't reuse this response.
response = null;
notifyAll();
return; return;
} }
@ -664,7 +669,8 @@ public final class ResponseTrans extends Writer implements Serializable {
boolean encodingError = false; boolean encodingError = false;
// only close if the response hasn't been closed yet // only close if the response hasn't been closed yet, and if no
// response was generated using writeBinary().
if (response == null) { if (response == null) {
// if debug buffer exists, append it to main buffer // if debug buffer exists, append it to main buffer
if (contentType != null && if (contentType != null &&
@ -747,7 +753,7 @@ public final class ResponseTrans extends Writer implements Serializable {
* @return the response body * @return the response body
*/ */
public byte[] getContent() { public byte[] getContent() {
return (response == null) ? new byte[0] : response; return response;
} }
/** /**

View file

@ -342,7 +342,7 @@ public final class Application implements Runnable {
String ignoreDirs; String ignoreDirs;
Initializer(String dirs) { Initializer(String dirs) {
super("INIT-" + name); super(name + "-init");
ignoreDirs = dirs; ignoreDirs = dirs;
} }
@ -489,7 +489,7 @@ public final class Application implements Runnable {
releaseEvaluator(eval); releaseEvaluator(eval);
} }
worker = new Thread(this, "Worker-" + name); worker = new Thread(this, name + "-worker");
worker.setPriority(Thread.NORM_PRIORITY + 1); worker.setPriority(Thread.NORM_PRIORITY + 1);
worker.start(); worker.start();
} }
@ -720,6 +720,14 @@ public final class Application implements Runnable {
if (ev != null) { if (ev != null) {
res = ev.attachHttpRequest(req); res = ev.attachHttpRequest(req);
if (res != null) {
// we can only use the existing response object if the response
// wasn't written to the HttpServletResponse directly.
res.waitForClose();
if (res.getContent() == null) {
res = null;
}
}
} }
if (res == null) { if (res == null) {
@ -752,8 +760,6 @@ public final class Application implements Runnable {
} catch (UnsupportedEncodingException uee) { } catch (UnsupportedEncodingException uee) {
logError("Unsupported response encoding", uee); logError("Unsupported response encoding", uee);
} }
} else {
res.waitForClose();
} }
} }
@ -1462,7 +1468,7 @@ public final class Application implements Runnable {
/** /**
* get the app's event log. * get the app's event log.
*/ */
Log getEventLog() { public Log getEventLog() {
if (eventLog == null) { if (eventLog == null) {
eventLog = getLogger(eventLogName); eventLog = getLogger(eventLogName);
// set log level for event log in case it is a helma.util.Logger // set log level for event log in case it is a helma.util.Logger
@ -1479,7 +1485,7 @@ public final class Application implements Runnable {
/** /**
* get the app's access log. * get the app's access log.
*/ */
Log getAccessLog() { public Log getAccessLog() {
if (accessLog == null) { if (accessLog == null) {
accessLog = getLogger(accessLogName); accessLog = getLogger(accessLogName);
} }
@ -1735,28 +1741,6 @@ public final class Application implements Runnable {
return Collections.unmodifiableList(repositories); return Collections.unmodifiableList(repositories);
} }
/**
* Set the code resource currently being evaluated/compiled. This is used
* to set the proper parent repository when a new repository is added
* via app.addRepository().
*
* @param resource the resource being currently evaluated/compiled
*/
public void setCurrentCodeResource(Resource resource) {
currentCodeResource = resource;
}
/**
* Set the code resource currently being evaluated/compiled. This is used
* to set the proper parent repository when a new repository is added
* via app.addRepository().
* @return the resource being currently evaluated/compiled
*/
public Resource getCurrentCodeResource() {
return currentCodeResource;
}
/** /**
* Return the directory of the Helma server * Return the directory of the Helma server
*/ */

View file

@ -38,7 +38,7 @@ import org.apache.commons.logging.LogFactory;
* application specific functionality. * application specific functionality.
*/ */
public class ApplicationBean implements Serializable { public class ApplicationBean implements Serializable {
Application app; transient Application app;
WrappedMap properties = null; WrappedMap properties = null;
/** /**
@ -137,33 +137,27 @@ public class ApplicationBean implements Serializable {
* *
* @param obj the repository, relative or absolute path to the library. * @param obj the repository, relative or absolute path to the library.
*/ */
public void addRepository(Object obj) { public synchronized void addRepository(Object obj) {
Resource current = app.getCurrentCodeResource();
Repository parent = current == null ?
null : current.getRepository().getRootRepository();
Repository rep; Repository rep;
if (obj instanceof String) { if (obj instanceof String) {
String path = (String) obj; String path = (String) obj;
File file = new File(path).getAbsoluteFile(); File file = findResource(null, path);
if (!file.exists()) { if (!file.exists()) {
file = new File(path + ".zip").getAbsoluteFile(); file = findResource(app.hopHome, path);
} }
if (!file.exists()) { if (!file.exists()) {
file = new File(path + ".js").getAbsoluteFile(); throw new RuntimeException("Repository path does not exist: " + file);
}
if (!file.exists()) {
throw new RuntimeException("Repository path does not exist: " + obj);
} }
if (file.isDirectory()) { if (file.isDirectory()) {
rep = new FileRepository(file, parent); rep = new FileRepository(file);
} else if (file.isFile()) { } else if (file.isFile()) {
if (file.getName().endsWith(".zip")) { if (file.getName().endsWith(".zip")) {
rep = new ZipRepository(file, parent); rep = new ZipRepository(file);
} else { } else {
rep = new SingleFileRepository(file, parent); rep = new SingleFileRepository(file);
} }
} else { } else {
throw new RuntimeException("Unrecognized file type in addRepository: " + obj); throw new RuntimeException("Unsupported file type in addRepository: " + file);
} }
} else if (obj instanceof Repository) { } else if (obj instanceof Repository) {
rep = (Repository) obj; rep = (Repository) obj;
@ -178,6 +172,23 @@ public class ApplicationBean implements Serializable {
} }
} }
/**
* Helper method to resolve a repository path.
* @param parent the parent file
* @param path the repository path
* @return our best guess of what the file may be
*/
private File findResource(File parent, String path) {
File file = new File(parent, path).getAbsoluteFile();
if (!file.exists()) {
file = new File(parent, path + ".zip").getAbsoluteFile();
}
if (!file.exists()) {
file = new File(parent, path + ".js").getAbsoluteFile();
}
return file;
}
/** /**
* Get the app's classloader * Get the app's classloader
* @return the app's classloader * @return the app's classloader

View file

@ -25,6 +25,7 @@ import java.util.*;
import org.apache.xmlrpc.XmlRpcRequestProcessor; import org.apache.xmlrpc.XmlRpcRequestProcessor;
import org.apache.xmlrpc.XmlRpcServerRequest; import org.apache.xmlrpc.XmlRpcServerRequest;
import org.apache.commons.logging.Log;
/** /**
* This class does the work for incoming requests. It holds a transactor thread * This class does the work for incoming requests. It holds a transactor thread
@ -79,6 +80,10 @@ public final class RequestEvaluator implements Runnable {
// the exception thrown by the evaluator, if any. // the exception thrown by the evaluator, if any.
private volatile Exception exception; private volatile Exception exception;
// For numbering threads.
private int threadId;
/** /**
* Create a new RequestEvaluator for this application. * Create a new RequestEvaluator for this application.
* @param app the application * @param app the application
@ -155,6 +160,12 @@ public final class RequestEvaluator implements Runnable {
// request path object // request path object
RequestPath requestPath = new RequestPath(app); RequestPath requestPath = new RequestPath(app);
String txname = req.getMethod().toLowerCase() + ":" + req.getPath();
Log eventLog = app.getEventLog();
if (eventLog.isDebugEnabled()) {
eventLog.debug(txname + " starting");
}
int tries = 0; int tries = 0;
boolean done = false; boolean done = false;
Throwable error = null; Throwable error = null;
@ -198,14 +209,14 @@ public final class RequestEvaluator implements Runnable {
throw new IllegalStateException("No function name in non-internal request "); throw new IllegalStateException("No function name in non-internal request ");
} }
// Transaction name is used for logging etc. // Update transaction name in case we're processing an error
StringBuffer txname = new StringBuffer(app.getName()); if (error != null) {
txname.append(":").append(req.getMethod().toLowerCase()).append(":"); txname = "error:" + txname;
txname.append((error == null) ? req.getPath() : "error"); }
// begin transaction // begin transaction
transactor = Transactor.getInstance(app.nmgr); transactor = Transactor.getInstance(app.nmgr);
transactor.begin(txname.toString()); transactor.begin(txname);
Object root = app.getDataRoot(); Object root = app.getDataRoot();
initGlobals(root, requestPath); initGlobals(root, requestPath);
@ -398,6 +409,7 @@ public final class RequestEvaluator implements Runnable {
ScriptingEngine.ARGS_WRAP_XMLRPC, ScriptingEngine.ARGS_WRAP_XMLRPC,
false); false);
res.writeXmlRpcResponse(result); res.writeXmlRpcResponse(result);
app.xmlrpcCount += 1;
} else { } else {
scriptingEngine.invoke(currentElement, scriptingEngine.invoke(currentElement,
actionProcessor, actionProcessor,
@ -478,7 +490,7 @@ public final class RequestEvaluator implements Runnable {
return; return;
} }
abortTransaction(); abortTransaction();
app.logError(txname + ": " + error, x); app.logError(txname + " " + error, x);
// If the transactor thread has been killed by the invoker thread we don't have to // If the transactor thread has been killed by the invoker thread we don't have to
// bother for the error message, just quit. // bother for the error message, just quit.
@ -514,7 +526,7 @@ public final class RequestEvaluator implements Runnable {
return; return;
} }
abortTransaction(); abortTransaction();
app.logError(txname + ": " + error, x); app.logError(txname + " " + error, x);
// If the transactor thread has been killed by the invoker thread we don't have to // If the transactor thread has been killed by the invoker thread we don't have to
// bother for the error message, just quit. // bother for the error message, just quit.
@ -598,9 +610,7 @@ public final class RequestEvaluator implements Runnable {
done = false; done = false;
error = x; error = x;
Transactor tx = Transactor.getInstance(); app.logError(txname + " " + error, x);
String txname = tx == null ? "no-txn" : tx.getTransactionName();
app.logError(txname + ": " + error, x);
if (req.isXmlRpc()) { if (req.isXmlRpc()) {
// if it's an XML-RPC exception immediately generate error response // if it's an XML-RPC exception immediately generate error response
@ -619,8 +629,9 @@ public final class RequestEvaluator implements Runnable {
} finally { } finally {
app.setCurrentRequestEvaluator(null); app.setCurrentRequestEvaluator(null);
// exit execution context // exit execution context
if (scriptingEngine != null) if (scriptingEngine != null) {
scriptingEngine.exitContext(); scriptingEngine.exitContext();
}
} }
} }
@ -667,7 +678,7 @@ public final class RequestEvaluator implements Runnable {
if ((thread == null) || !thread.isAlive()) { if ((thread == null) || !thread.isAlive()) {
// app.logEvent ("Starting Thread"); // app.logEvent ("Starting Thread");
thread = new Thread(app.threadgroup, this); thread = new Thread(app.threadgroup, this, app.getName() + "-" + (++threadId));
thread.setContextClassLoader(app.getClassLoader()); thread.setContextClassLoader(app.getClassLoader());
thread.start(); thread.start();
} else { } else {
@ -783,7 +794,7 @@ public final class RequestEvaluator implements Runnable {
// Get a reference to the res object at the time we enter // Get a reference to the res object at the time we enter
ResponseTrans localRes = res; ResponseTrans localRes = res;
if ((localRes == null) || !req.equals(this.req)) { if (localRes == null || !req.equals(this.req)) {
return null; return null;
} }

View file

@ -32,6 +32,8 @@ import java.util.*;
*/ */
public class Session implements Serializable { public class Session implements Serializable {
static final long serialVersionUID = -6149094040363012913L;
transient protected Application app; transient protected Application app;
protected String sessionId; protected String sessionId;

View file

@ -437,13 +437,8 @@ public final class Skin {
if (state == PARSE_MACRONAME && "//".equals(b.toString())) { if (state == PARSE_MACRONAME && "//".equals(b.toString())) {
isCommentMacro = true; isCommentMacro = true;
// search macro end tag // just continue parsing the macro as this is the only way
while (i < length - 1 && // to correctly catch embedded macros - see bug 588
(source[i] != '%' || source[i + 1] != '>')) {
i++;
}
state = PARSE_DONE;
break loop;
} }
break; break;
@ -639,7 +634,7 @@ public final class Skin {
} }
if ((sandbox != null) && !sandbox.contains(name)) { if ((sandbox != null) && !sandbox.contains(name)) {
throw new RuntimeException("Macro " + name + " not allowed in sandbox"); throw new MacroException("Macro not allowed in sandbox: " + name);
} }
Object handler = null; Object handler = null;
@ -705,7 +700,7 @@ public final class Skin {
buffer.setLength(bufLength); buffer.setLength(bufLength);
} }
} else if (standardParams.verboseFailmode(handler, engine)) { } else if (standardParams.verboseFailmode(handler, engine)) {
throw new UnhandledMacroException(name); throw new MacroException("Unhandled macro: " + name);
} }
} else { } else {
value = engine.getProperty(handler, propName); value = engine.getProperty(handler, propName);
@ -713,7 +708,7 @@ public final class Skin {
return filter(value, cx); return filter(value, cx);
} }
} else if (standardParams.verboseFailmode(handler, engine)) { } else if (standardParams.verboseFailmode(handler, engine)) {
throw new UnhandledMacroException(name); throw new MacroException("Unhandled macro: " + name);
} }
return filter(null, cx); return filter(null, cx);
} }
@ -786,8 +781,8 @@ public final class Skin {
throw concur; throw concur;
} catch (TimeoutException timeout) { } catch (TimeoutException timeout) {
throw timeout; throw timeout;
} catch (UnhandledMacroException unhandled) { } catch (MacroException mx) {
String msg = "Unhandled Macro: " + unhandled.getMessage(); String msg = mx.getMessage();
cx.reval.getResponse().write(" [" + msg + "] "); cx.reval.getResponse().write(" [" + msg + "] ");
app.logError(msg); app.logError(msg);
} catch (Exception x) { } catch (Exception x) {
@ -816,9 +811,9 @@ public final class Skin {
throws Exception { throws Exception {
if (name == null) { if (name == null) {
throw new RuntimeException("Empty macro filter"); throw new MacroException("Empty macro filter");
} else if (sandbox != null && !sandbox.contains(name)) { } else if (sandbox != null && !sandbox.contains(name)) {
throw new RuntimeException("Macro " + name + " not allowed in sandbox"); throw new MacroException("Macro not allowed in sandbox: " + name);
} }
Object handlerObject = null; Object handlerObject = null;
@ -840,7 +835,7 @@ public final class Skin {
return filter(retval, cx); return filter(retval, cx);
} else { } else {
throw new RuntimeException("Undefined Filter " + name); throw new MacroException("Undefined macro filter: " + name);
} }
} }
@ -1103,9 +1098,9 @@ public final class Skin {
// limiting to 50 passes to avoid infinite loops // limiting to 50 passes to avoid infinite loops
int maxloop = 50; int maxloop = 50;
while (obj != null && maxloop-- > 0) { while (obj != null && maxloop-- > 0) {
Prototype proto = app.getPrototype(obj); String protoName = app.getPrototypeName(obj);
if ((proto != null) && proto.isInstanceOf(handlerName)) { if (handlerName.equalsIgnoreCase(protoName)) {
if (handlerCache != null) if (handlerCache != null)
handlerCache.put(handlerName, obj); handlerCache.put(handlerName, obj);
return obj; return obj;
@ -1126,12 +1121,13 @@ public final class Skin {
} }
/** /**
* Exception type for unhandled macros * Exception type for unhandled, forbidden or failed macros
*/ */
class UnhandledMacroException extends Exception { class MacroException extends Exception {
UnhandledMacroException(String name) { MacroException(String message) {
super(name); super(message);
} }
} }
} }

View file

@ -101,7 +101,7 @@ public final class TypeManager {
* Run through application's prototype directories and create prototypes, but don't * Run through application's prototype directories and create prototypes, but don't
* compile or evaluate any scripts. * compile or evaluate any scripts.
*/ */
public void createPrototypes() throws IOException { public synchronized void createPrototypes() throws IOException {
// create standard prototypes. // create standard prototypes.
for (int i = 0; i < standardTypes.length; i++) { for (int i = 0; i < standardTypes.length; i++) {
createPrototype(standardTypes[i], null); createPrototype(standardTypes[i], null);
@ -126,7 +126,7 @@ public final class TypeManager {
lastCheck = System.currentTimeMillis(); lastCheck = System.currentTimeMillis();
} }
protected void checkRepository(Repository repository, boolean update) throws IOException { protected synchronized void checkRepository(Repository repository, boolean update) throws IOException {
Repository[] list = repository.getRepositories(); Repository[] list = repository.getRepositories();
for (int i = 0; i < list.length; i++) { for (int i = 0; i < list.length; i++) {
@ -183,7 +183,7 @@ public final class TypeManager {
* Run through application's prototype sources and check if * Run through application's prototype sources and check if
* there are any prototypes to be created. * there are any prototypes to be created.
*/ */
private void checkRepositories() throws IOException { private synchronized void checkRepositories() throws IOException {
List list = app.getRepositories(); List list = app.getRepositories();
// walk through repositories and check if any of them have changed. // walk through repositories and check if any of them have changed.
@ -197,12 +197,21 @@ public final class TypeManager {
} }
} }
boolean debug = "true".equalsIgnoreCase(app.getProperty("helma.debugTypeManager"));
if (debug) {
System.err.println("Starting CHECK loop in " + Thread.currentThread());
}
// loop through prototypes and check if type.properties needs updates // loop through prototypes and check if type.properties needs updates
// it's important that we do this _after_ potentially new prototypes // it's important that we do this _after_ potentially new prototypes
// have been created in the previous loop. // have been created in the previous loop.
for (Iterator i = prototypes.values().iterator(); i.hasNext();) { for (Iterator i = prototypes.values().iterator(); i.hasNext();) {
Prototype proto = (Prototype) i.next(); Prototype proto = (Prototype) i.next();
if (debug) {
System.err.println("CHECK: " + proto.getName() + " in " + Thread.currentThread());
}
// update prototype's type mapping // update prototype's type mapping
DbMapping dbmap = proto.getDbMapping(); DbMapping dbmap = proto.getDbMapping();
@ -216,6 +225,9 @@ public final class TypeManager {
dbmap.update(); dbmap.update();
} }
} }
if (debug) {
System.err.println("Finished CHECK in " + Thread.currentThread());
}
} }
private boolean isValidTypeName(String str) { private boolean isValidTypeName(String str) {
@ -263,14 +275,14 @@ public final class TypeManager {
* *
* @return a collection containing the prototypes * @return a collection containing the prototypes
*/ */
public Collection getPrototypes() { public synchronized Collection getPrototypes() {
return Collections.unmodifiableCollection(prototypes.values()); return Collections.unmodifiableCollection(prototypes.values());
} }
/** /**
* Get a prototype defined for this application * Get a prototype defined for this application
*/ */
public Prototype getPrototype(String typename) { public synchronized Prototype getPrototype(String typename) {
if (typename == null) { if (typename == null) {
return null; return null;
} }
@ -284,12 +296,14 @@ public final class TypeManager {
* @param repository the first prototype source * @param repository the first prototype source
* @return the newly created prototype * @return the newly created prototype
*/ */
public Prototype createPrototype(String typename, Repository repository) { public synchronized Prototype createPrototype(String typename, Repository repository) {
if ("true".equalsIgnoreCase(app.getProperty("helma.debugTypeManager"))) {
System.err.println("CREATE: " + typename + " from " + repository + " in " + Thread.currentThread());
// Thread.dumpStack();
}
Prototype proto = new Prototype(typename, repository, app); Prototype proto = new Prototype(typename, repository, app);
// put the prototype into our map // put the prototype into our map
prototypes.put(proto.getLowerCaseName(), proto); prototypes.put(proto.getLowerCaseName(), proto);
return proto; return proto;
} }

View file

@ -281,6 +281,14 @@ public class ApplicationManager implements XmlRpcHandler {
return server.getLogger(); return server.getLogger();
} }
private String findResource(String path) {
File file = new File(path);
if (!file.isAbsolute() && !file.exists()) {
file = new File(server.getHopHome(), path);
}
return file.getAbsolutePath();
}
/** /**
* Inner class that describes an application and its start settings. * Inner class that describes an application and its start settings.
*/ */
@ -350,7 +358,7 @@ public class ApplicationManager implements XmlRpcHandler {
ignoreDirs = conf.getProperty("ignore"); ignoreDirs = conf.getProperty("ignore");
// read and configure app repositories // read and configure app repositories
ArrayList<Repository> repositoryList = new ArrayList<Repository>(); ArrayList repositoryList = new ArrayList();
Class[] parameters = { String.class }; Class[] parameters = { String.class };
for (int i = 0; true; i++) { for (int i = 0; true; i++) {
String repositoryArgs = conf.getProperty("repository." + i); String repositoryArgs = conf.getProperty("repository." + i);
@ -362,10 +370,13 @@ public class ApplicationManager implements XmlRpcHandler {
if (repositoryImpl == null) { if (repositoryImpl == null) {
// implementation not set manually, have to guess it // implementation not set manually, have to guess it
if (repositoryArgs.endsWith(".zip")) { if (repositoryArgs.endsWith(".zip")) {
repositoryArgs = findResource(repositoryArgs);
repositoryImpl = "helma.framework.repository.ZipRepository"; repositoryImpl = "helma.framework.repository.ZipRepository";
} else if (repositoryArgs.endsWith(".js")) { } else if (repositoryArgs.endsWith(".js")) {
repositoryArgs = findResource(repositoryArgs);
repositoryImpl = "helma.framework.repository.SingleFileRepository"; repositoryImpl = "helma.framework.repository.SingleFileRepository";
} else { } else {
repositoryArgs = findResource(repositoryArgs);
repositoryImpl = "helma.framework.repository.FileRepository"; repositoryImpl = "helma.framework.repository.FileRepository";
} }
} }
@ -373,7 +384,7 @@ public class ApplicationManager implements XmlRpcHandler {
try { try {
Repository newRepository = (Repository) Class.forName(repositoryImpl) Repository newRepository = (Repository) Class.forName(repositoryImpl)
.getConstructor(parameters) .getConstructor(parameters)
.newInstance(repositoryArgs); .newInstance(new Object[] {repositoryArgs});
repositoryList.add(newRepository); repositoryList.add(newRepository);
} catch (Exception ex) { } catch (Exception ex) {
getLogger().error("Adding repository " + repositoryArgs + " failed. " + getLogger().error("Adding repository " + repositoryArgs + " failed. " +
@ -397,7 +408,7 @@ public class ApplicationManager implements XmlRpcHandler {
new File(server.getAppsHome(), appName))); new File(server.getAppsHome(), appName)));
} }
repositories = new Repository[repositoryList.size()]; repositories = new Repository[repositoryList.size()];
repositories = repositoryList.toArray(repositories); repositories = (Repository[]) repositoryList.toArray(repositories);
} }

View file

@ -32,10 +32,6 @@ import java.io.*;
import java.rmi.registry.*; import java.rmi.registry.*;
import java.rmi.server.*; import java.rmi.server.*;
import java.util.*; import java.util.*;
import java.net.Socket;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.net.InetSocketAddress;
import helma.util.ResourceProperties; import helma.util.ResourceProperties;
@ -44,7 +40,7 @@ import helma.util.ResourceProperties;
*/ */
public class Server implements Runnable { public class Server implements Runnable {
// version string // version string
public static final String version = "1.6.2 (__builddate__)"; public static final String version = "1.6.3 (__builddate__)";
// static server instance // static server instance
private static Server server; private static Server server;
@ -109,7 +105,9 @@ public class Server implements Runnable {
// create system properties // create system properties
sysProps = new ResourceProperties(); sysProps = new ResourceProperties();
sysProps.addResource(new FileResource(config.getPropFile())); if (config.hasPropFile()) {
sysProps.addResource(new FileResource(config.getPropFile()));
}
} }
@ -304,13 +302,6 @@ public class Server implements Runnable {
if (!config.hasHomeDir()) { if (!config.hasHomeDir()) {
throw new Exception ("couldn't determine helma directory"); throw new Exception ("couldn't determine helma directory");
} }
// try to transform hopHome directory to its canonical representation
try {
config.setHomeDir(config.getHomeDir().getCanonicalFile());
} catch (IOException iox) {
config.setHomeDir(config.getHomeDir().getAbsoluteFile());
}
} }

View file

@ -93,7 +93,7 @@ public class ServerConfig {
} }
public void setPropFile(File propFile) { public void setPropFile(File propFile) {
this.propFile = propFile; this.propFile = propFile == null ? null : propFile.getAbsoluteFile();
} }
public File getHomeDir() { public File getHomeDir() {
@ -101,6 +101,6 @@ public class ServerConfig {
} }
public void setHomeDir(File homeDir) { public void setHomeDir(File homeDir) {
this.homeDir = homeDir; this.homeDir = homeDir == null ? null : homeDir.getAbsoluteFile();
} }
} }

View file

@ -19,6 +19,7 @@ package helma.objectmodel;
import helma.framework.IPathElement; import helma.framework.IPathElement;
import helma.objectmodel.db.DbMapping; import helma.objectmodel.db.DbMapping;
import helma.objectmodel.db.Relation; import helma.objectmodel.db.Relation;
import helma.objectmodel.db.Node;
import helma.util.*; import helma.util.*;
import java.io.*; import java.io.*;
import java.util.Date; import java.util.Date;
@ -588,7 +589,7 @@ public class TransientNode implements INode, Serializable {
} }
private Property makeVirtualNode(String propname, Relation rel) { private Property makeVirtualNode(String propname, Relation rel) {
INode node = new helma.objectmodel.db.Node(rel.getPropName(), rel.getPrototype(), INode node = new Node(rel.getPropName(), rel.getPrototype(),
dbmap.getWrappedNodeManager()); dbmap.getWrappedNodeManager());
// node.setState (TRANSIENT); // node.setState (TRANSIENT);

View file

@ -124,7 +124,10 @@ public final class DbMapping {
HashSet dependentMappings = new HashSet(); HashSet dependentMappings = new HashSet();
// does this DbMapping describe a virtual node (collection, mountpoint, groupnode)? // does this DbMapping describe a virtual node (collection, mountpoint, groupnode)?
private boolean virtual = false; private boolean isVirtual = false;
// does this Dbmapping describe a group node?
private boolean isGroup = false;
/** /**
* Create an internal DbMapping used for "virtual" mappings aka collections, mountpoints etc. * Create an internal DbMapping used for "virtual" mappings aka collections, mountpoints etc.
@ -132,7 +135,7 @@ public final class DbMapping {
public DbMapping(Application app, String parentTypeName) { public DbMapping(Application app, String parentTypeName) {
this(app, parentTypeName, null); this(app, parentTypeName, null);
// DbMappings created with this constructor always define virtual nodes // DbMappings created with this constructor always define virtual nodes
virtual = true; isVirtual = true;
if (parentTypeName != null) { if (parentTypeName != null) {
parentMapping = app.getDbMapping(parentTypeName); parentMapping = app.getDbMapping(parentTypeName);
if (parentMapping == null) { if (parentMapping == null) {
@ -311,9 +314,6 @@ public final class DbMapping {
} }
rel.update(dbField, props); rel.update(dbField, props);
// store relation with lower case property name
// (ResourceProperties now preserve key capitalization!)
p2d.put(propName.toLowerCase(), rel); p2d.put(propName.toLowerCase(), rel);
if ((rel.columnName != null) && rel.isPrimitiveOrReference()) { if ((rel.columnName != null) && rel.isPrimitiveOrReference()) {
@ -756,9 +756,9 @@ public final class DbMapping {
* db-mapping with the right relations to create the group-by nodes * db-mapping with the right relations to create the group-by nodes
*/ */
public synchronized DbMapping getGroupbyMapping() { public synchronized DbMapping getGroupbyMapping() {
if ((subRelation == null) && (parentMapping != null)) { if ((subRelation == null) && (parentMapping != null)) {
return parentMapping.getGroupbyMapping(); return parentMapping.getGroupbyMapping();
} else if (subRelation.groupby == null) { } else if (subRelation == null || subRelation.groupby == null) {
return null; return null;
} else if (groupbyMapping == null) { } else if (groupbyMapping == null) {
initGroupbyMapping(); initGroupbyMapping();
@ -774,6 +774,7 @@ public final class DbMapping {
// if a prototype is defined for groupby nodes, use that // if a prototype is defined for groupby nodes, use that
// if mapping doesn' exist or isn't defined, create a new (anonymous internal) one // if mapping doesn' exist or isn't defined, create a new (anonymous internal) one
groupbyMapping = new DbMapping(app, subRelation.groupbyPrototype); groupbyMapping = new DbMapping(app, subRelation.groupbyPrototype);
groupbyMapping.isGroup = true;
// set subnode and property relations // set subnode and property relations
groupbyMapping.subRelation = subRelation.getGroupbySubnodeRelation(); groupbyMapping.subRelation = subRelation.getGroupbySubnodeRelation();
@ -1547,7 +1548,7 @@ public final class DbMapping {
* a utility method to escape single quotes used for inserting * a utility method to escape single quotes used for inserting
* string-values into relational databases. * string-values into relational databases.
* Searches for "'" characters and escapes them by duplicating them (= "''") * Searches for "'" characters and escapes them by duplicating them (= "''")
* @param str the string to escape * @param value the string to escape
* @return the escaped string * @return the escaped string
*/ */
static String escapeString(Object value) { static String escapeString(Object value) {
@ -1574,7 +1575,7 @@ public final class DbMapping {
/** /**
* Utility method to check whether the argument is a number literal. * Utility method to check whether the argument is a number literal.
* @param str a string representing a number literal * @param value a string representing a number literal
* @return the argument, if it conforms to the number literal syntax * @return the argument, if it conforms to the number literal syntax
* @throws IllegalArgumentException if the argument does not represent a number * @throws IllegalArgumentException if the argument does not represent a number
*/ */
@ -1596,6 +1597,14 @@ public final class DbMapping {
* @return true if this instance describes a virtual node. * @return true if this instance describes a virtual node.
*/ */
public boolean isVirtual() { public boolean isVirtual() {
return virtual; return isVirtual;
}
/**
* Find if this DbMapping describes a group node.
* @return true if this instance describes a group node.
*/
public boolean isGroup() {
return isGroup;
} }
} }

View file

@ -869,26 +869,10 @@ public final class Node implements INode, Serializable {
loadNodes(); loadNodes();
// check if this node has a group-by subnode-relation // check if this node has a group-by subnode-relation
if (dbmap != null) { INode groupbyNode = getGroupbySubnode(node, true);
Relation srel = dbmap.getSubnodeRelation(); if (groupbyNode != null) {
groupbyNode.addNode(node);
if ((srel != null) && (srel.groupby != null)) { return node;
Relation groupbyRel = srel.otherType.columnNameToRelation(srel.groupby);
String groupbyProp = (groupbyRel != null) ? groupbyRel.propName
: srel.groupby;
String groupbyValue = node.getString(groupbyProp);
INode groupbyNode = (INode) getChildElement(groupbyValue);
// if group-by node doesn't exist, we'll create it
if (groupbyNode == null) {
groupbyNode = getGroupbySubnode(groupbyValue, true);
} else {
groupbyNode.setDbMapping(dbmap.getGroupbyMapping());
}
groupbyNode.addNode(node);
return node;
}
} }
NodeHandle nhandle = node.getHandle(); NodeHandle nhandle = node.getHandle();
@ -1198,6 +1182,38 @@ public final class Node implements INode, Serializable {
return retval; return retval;
} }
protected Node getGroupbySubnode(Node node, boolean create) {
if (node.dbmap != null && node.dbmap.isGroup()) {
return null;
}
if (dbmap != null) {
Relation srel = dbmap.getSubnodeRelation();
if ((srel != null) && (srel.groupby != null)) {
Relation groupbyRel = srel.otherType.columnNameToRelation(srel.groupby);
String groupbyProp = (groupbyRel != null) ? groupbyRel.propName
: srel.groupby;
String groupbyValue = node.getString(groupbyProp);
Node groupbyNode = (Node) getChildElement(groupbyValue);
// if group-by node doesn't exist, we'll create it
if (groupbyNode == null) {
groupbyNode = getGroupbySubnode(groupbyValue, create);
// mark subnodes as changed as we have a new group node
if (create && groupbyNode != null) {
Transactor.getInstance().visitParentNode(this);
}
} else {
groupbyNode.setDbMapping(dbmap.getGroupbyMapping());
}
return groupbyNode;
}
}
return null;
}
/** /**
* *
* *
@ -1211,10 +1227,7 @@ public final class Node implements INode, Serializable {
throw new IllegalArgumentException("Can't create group by null"); throw new IllegalArgumentException("Can't create group by null");
} }
if (state == TRANSIENT) { boolean persistent = state != TRANSIENT;
throw new RuntimeException("Can't add grouped child on transient node. "+
"Make parent persistent before adding grouped nodes.");
}
loadNodes(); loadNodes();
@ -1228,34 +1241,44 @@ public final class Node implements INode, Serializable {
boolean relational = groupbyMapping.getSubnodeMapping().isRelational(); boolean relational = groupbyMapping.getSubnodeMapping().isRelational();
if (relational || create) { if (relational || create) {
Node node = relational ? new Node(this, sid, nmgr, null) Node node;
: new Node(sid, null, nmgr); if (relational && persistent) {
node = new Node(this, sid, nmgr, null);
} else {
node = new Node(sid, null, nmgr);
node.setParent(this);
}
// set "groupname" property to value of groupby field // set "groupname" property to value of groupby field
node.setString("groupname", sid); node.setString("groupname", sid);
// Set the dbmapping on the group node
node.setDbMapping(groupbyMapping); node.setDbMapping(groupbyMapping);
node.setPrototype(groupbyMapping.getTypeName());
if (!relational) { // if we're relational and persistent, make new node persistable
// if we're not transient, make new node persistable if (!relational && persistent) {
if (state != TRANSIENT) { node.makePersistable();
node.makePersistable(); node.checkWriteLock();
node.checkWriteLock(); }
}
subnodes.add(node.getHandle()); // if we created a new node, check if we need to add it to subnodes
if (create) {
NodeHandle handle = node.getHandle();
if (!subnodes.contains(handle))
subnodes.add(handle);
} }
// Set the dbmapping on the group node
node.setPrototype(groupbyMapping.getTypeName());
// If we created the group node, we register it with the // If we created the group node, we register it with the
// nodemanager. Otherwise, we just evict whatever was there before // nodemanager. Otherwise, we just evict whatever was there before
if (create) { if (persistent) {
// register group node with transactor if (create) {
Transactor tx = Transactor.getInstanceOrFail(); // register group node with transactor
tx.visitCleanNode(node); Transactor tx = Transactor.getInstanceOrFail();
nmgr.registerNode(node); tx.visitCleanNode(node);
} else { nmgr.registerNode(node);
nmgr.evictKey(node.getKey()); } else {
nmgr.evictKey(node.getKey());
}
} }
return node; return node;
@ -1299,6 +1322,13 @@ public final class Node implements INode, Serializable {
* {@link #removeNode(INode)}. * {@link #removeNode(INode)}.
*/ */
protected void releaseNode(Node node) { protected void releaseNode(Node node) {
Node groupNode = getGroupbySubnode(node, false);
if (groupNode != null) {
groupNode.releaseNode(node);
return;
}
INode parent = node.getParent(); INode parent = node.getParent();
checkWriteLock(); checkWriteLock();
@ -1314,7 +1344,9 @@ public final class Node implements INode, Serializable {
synchronized (subnodes) { synchronized (subnodes) {
removed = subnodes.remove(node.getHandle()); removed = subnodes.remove(node.getHandle());
} }
if (removed) { if (dbmap != null && dbmap.isGroup() && subnodes.size() == 0) {
remove();
} else if (removed) {
registerSubnodeChange(); registerSubnodeChange();
} }
} }
@ -1341,6 +1373,12 @@ public final class Node implements INode, Serializable {
nmgr.evictKey(new SyntheticKey(getKey(), prop)); nmgr.evictKey(new SyntheticKey(getKey(), prop));
} }
} }
} else if (prel.groupby != null) {
String prop = node.getString("groupname");
if (prop != null && state != TRANSIENT) {
nmgr.evictKey(new SyntheticKey(getKey(), prop));
}
} }
// TODO: We should unset constraints to actually remove subnodes here, // TODO: We should unset constraints to actually remove subnodes here,
// but omit it by convention and to keep backwards compatible. // but omit it by convention and to keep backwards compatible.
@ -2443,7 +2481,7 @@ public final class Node implements INode, Serializable {
lastmodified = System.currentTimeMillis(); lastmodified = System.currentTimeMillis();
if (state == CLEAN) { if (state == CLEAN && isPersistableProperty(propname)) {
markAs(MODIFIED); markAs(MODIFIED);
} }
} else if (dbmap != null) { } else if (dbmap != null) {

View file

@ -381,7 +381,7 @@ public class OrderedSubnodeList extends SubnodeList {
return 0; return 0;
} }
public List getOrderedView (String order) { public SubnodeList getOrderedView (String order) {
if (origin != null) { if (origin != null) {
return origin.getOrderedView(order); return origin.getOrderedView(order);
} else { } else {

View file

@ -104,7 +104,7 @@ public class SubnodeList extends ArrayList {
} }
} }
public List getOrderedView (String order) { public SubnodeList getOrderedView (String order) {
String key = order.trim().toLowerCase(); String key = order.trim().toLowerCase();
// long start = System.currentTimeMillis(); // long start = System.currentTimeMillis();
if (views == null) { if (views == null) {

View file

@ -24,6 +24,8 @@ import java.sql.Statement;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.*; import java.util.*;
import org.apache.commons.logging.Log;
/** /**
* A subclass of thread that keeps track of changed nodes and triggers * A subclass of thread that keeps track of changed nodes and triggers
* changes in the database when a transaction is commited. * changes in the database when a transaction is commited.
@ -34,13 +36,13 @@ public class Transactor {
NodeManager nmgr; NodeManager nmgr;
// List of nodes to be updated // List of nodes to be updated
private HashMap dirtyNodes; private Map dirtyNodes;
// List of visited clean nodes // List of visited clean nodes
private HashMap cleanNodes; private Map cleanNodes;
// List of nodes whose child index has been modified // List of nodes whose child index has been modified
private HashSet parentNodes; private Set parentNodes;
// Is a transaction in progress? // Is a transaction in progress?
private volatile boolean active; private volatile boolean active;
@ -50,10 +52,10 @@ public class Transactor {
protected ITransaction txn; protected ITransaction txn;
// Transactions for SQL data sources // Transactions for SQL data sources
private HashMap sqlConnections; private Map sqlConnections;
// Set of SQL connections that already have been verified // Set of SQL connections that already have been verified
private HashSet testedConnections; private Map testedConnections;
// when did the current transaction start? // when did the current transaction start?
private long tstart; private long tstart;
@ -64,7 +66,7 @@ public class Transactor {
// the thread we're associated with // the thread we're associated with
private Thread thread; private Thread thread;
private static final ThreadLocal <Transactor> txtor = new ThreadLocal <Transactor> (); private static final ThreadLocal txtor = new ThreadLocal();
/** /**
* Creates a new Transactor object. * Creates a new Transactor object.
@ -75,12 +77,12 @@ public class Transactor {
this.thread = Thread.currentThread(); this.thread = Thread.currentThread();
this.nmgr = nmgr; this.nmgr = nmgr;
dirtyNodes = new HashMap(); dirtyNodes = new LinkedHashMap();
cleanNodes = new HashMap(); cleanNodes = new HashMap();
parentNodes = new HashSet(); parentNodes = new HashSet();
sqlConnections = new HashMap(); sqlConnections = new HashMap();
testedConnections = new HashSet(); testedConnections = new HashMap();
active = false; active = false;
killed = false; killed = false;
} }
@ -90,7 +92,7 @@ public class Transactor {
* @return the transactor associated with the current thread * @return the transactor associated with the current thread
*/ */
public static Transactor getInstance() { public static Transactor getInstance() {
return txtor.get(); return (Transactor) txtor.get();
} }
/** /**
@ -99,7 +101,7 @@ public class Transactor {
* @throws IllegalStateException if no transactor is associated with the current thread * @throws IllegalStateException if no transactor is associated with the current thread
*/ */
public static Transactor getInstanceOrFail() throws IllegalStateException { public static Transactor getInstanceOrFail() throws IllegalStateException {
Transactor tx = txtor.get(); Transactor tx = (Transactor) txtor.get();
if (tx == null) if (tx == null)
throw new IllegalStateException("Operation requires a Transactor, " + throw new IllegalStateException("Operation requires a Transactor, " +
"but current thread does not have one."); "but current thread does not have one.");
@ -112,7 +114,7 @@ public class Transactor {
* @return the transactor associated with the current thread * @return the transactor associated with the current thread
*/ */
public static Transactor getInstance(NodeManager nmgr) { public static Transactor getInstance(NodeManager nmgr) {
Transactor t = txtor.get(); Transactor t = (Transactor) txtor.get();
if (t == null) { if (t == null) {
t = new Transactor(nmgr); t = new Transactor(nmgr);
txtor.set(t); txtor.set(t);
@ -240,7 +242,7 @@ public class Transactor {
public void registerConnection(DbSource src, Connection con) { public void registerConnection(DbSource src, Connection con) {
sqlConnections.put(src, con); sqlConnections.put(src, con);
// we assume a freshly created connection is ok. // we assume a freshly created connection is ok.
testedConnections.add(src); testedConnections.put(src, new Long(System.currentTimeMillis()));
} }
/** /**
@ -250,13 +252,15 @@ public class Transactor {
*/ */
public Connection getConnection(DbSource src) { public Connection getConnection(DbSource src) {
Connection con = (Connection) sqlConnections.get(src); Connection con = (Connection) sqlConnections.get(src);
if (con != null && !testedConnections.contains(src)) { Long tested = (Long) testedConnections.get(src);
long now = System.currentTimeMillis();
if (con != null && (tested == null || now - tested.longValue() > 10000)) {
// Check if the connection is still alive by executing a simple statement. // Check if the connection is still alive by executing a simple statement.
try { try {
Statement stmt = con.createStatement(); Statement stmt = con.createStatement();
stmt.execute("SELECT 1"); stmt.execute("SELECT 1");
stmt.close(); stmt.close();
testedConnections.add(src); testedConnections.put(src, new Long(now));
} catch (SQLException sx) { } catch (SQLException sx) {
try { try {
con.close(); con.close();
@ -326,6 +330,7 @@ public class Transactor {
// the set to collect DbMappings to be marked as changed // the set to collect DbMappings to be marked as changed
HashSet dirtyDbMappings = new HashSet(); HashSet dirtyDbMappings = new HashSet();
Log eventLog = nmgr.app.getEventLog();
for (int i = 0; i < dirty.length; i++) { for (int i = 0; i < dirty.length; i++) {
Node node = (Node) dirty[i]; Node node = (Node) dirty[i];
@ -346,8 +351,10 @@ public class Transactor {
} }
inserted++; inserted++;
nmgr.app.logEvent("inserted: Node " + node.getPrototype() + "/" + if (eventLog.isDebugEnabled()) {
node.getID()); eventLog.debug("inserted node: " + node.getPrototype() + "/" +
node.getID());
}
} else if (nstate == Node.MODIFIED) { } else if (nstate == Node.MODIFIED) {
// only mark DbMapping as dirty if updateNode returns true // only mark DbMapping as dirty if updateNode returns true
if (nmgr.updateNode(nmgr.db, txn, node)) { if (nmgr.updateNode(nmgr.db, txn, node)) {
@ -363,8 +370,10 @@ public class Transactor {
} }
updated++; updated++;
nmgr.app.logEvent("updated: Node " + node.getPrototype() + "/" + if (eventLog.isDebugEnabled()) {
node.getID()); eventLog.debug("updated node: " + node.getPrototype() + "/" +
node.getID());
}
} else if (nstate == Node.DELETED) { } else if (nstate == Node.DELETED) {
nmgr.deleteNode(nmgr.db, txn, node); nmgr.deleteNode(nmgr.db, txn, node);
dirtyDbMappings.add(node.getDbMapping()); dirtyDbMappings.add(node.getDbMapping());
@ -377,6 +386,10 @@ public class Transactor {
} }
deleted++; deleted++;
if (eventLog.isDebugEnabled()) {
eventLog.debug("removed node: " + node.getPrototype() + "/" +
node.getID());
}
} }
node.clearWriteLock(); node.clearWriteLock();
@ -419,10 +432,15 @@ public class Transactor {
txn = null; txn = null;
} }
nmgr.app.logAccess(tname + " " + inserted + StringBuffer msg = new StringBuffer(tname).append(" done in ")
" inserted, " + updated + .append(now - tstart).append(" millis");
" updated, " + deleted + " deleted in " + if(inserted + updated + deleted > 0) {
(now - tstart) + " millis"); msg.append(" [+")
.append(inserted).append(", ~")
.append(updated).append(", -")
.append(deleted).append("]");
}
nmgr.app.logAccess(msg.toString());
// unset transaction name // unset transaction name
tname = null; tname = null;

View file

@ -268,7 +268,7 @@ public class XmlWriter extends OutputStreamWriter implements XmlConstants {
throws IOException { throws IOException {
Enumeration e = null; Enumeration e = null;
if (dbmode && node instanceof helma.objectmodel.db.Node) { if (dbmode && node instanceof Node) {
// a newly constructed db.Node doesn't have a propMap, // a newly constructed db.Node doesn't have a propMap,
// but returns an enumeration of all it's db-mapped properties // but returns an enumeration of all it's db-mapped properties
Hashtable props = ((Node) node).getPropMap(); Hashtable props = ((Node) node).getPropMap();
@ -392,7 +392,7 @@ public class XmlWriter extends OutputStreamWriter implements XmlConstants {
* loop through the children-array and print them as <hop:child> * loop through the children-array and print them as <hop:child>
*/ */
private void writeChildren(INode node, int level) throws IOException { private void writeChildren(INode node, int level) throws IOException {
if (dbmode && node instanceof helma.objectmodel.db.Node) { if (dbmode && node instanceof Node) {
Node dbNode = (Node) node; Node dbNode = (Node) node;
DbMapping smap = (dbNode.getDbMapping() == null) ? null DbMapping smap = (dbNode.getDbMapping() == null) ? null
: dbNode.getDbMapping() : dbNode.getDbMapping()

View file

@ -20,6 +20,7 @@ import helma.framework.core.*;
import helma.framework.repository.Resource; import helma.framework.repository.Resource;
import helma.objectmodel.*; import helma.objectmodel.*;
import helma.objectmodel.db.*; import helma.objectmodel.db.*;
import helma.objectmodel.db.Node;
import org.mozilla.javascript.*; import org.mozilla.javascript.*;
import java.lang.reflect.Method; import java.lang.reflect.Method;
@ -170,13 +171,13 @@ public class HopObject extends ScriptableObject implements Wrapper, PropertyReco
*/ */
private void checkNode() { private void checkNode() {
if (node != null && node.getState() == INode.INVALID) { if (node != null && node.getState() == INode.INVALID) {
if (node instanceof helma.objectmodel.db.Node) { if (node instanceof Node) {
NodeHandle handle = ((helma.objectmodel.db.Node) node).getHandle(); NodeHandle handle = ((Node) node).getHandle();
node = handle.getNode(core.app.getWrappedNodeManager()); node = handle.getNode(core.app.getWrappedNodeManager());
if (node == null) { if (node == null) {
// we probably have a deleted node. Replace with empty transient node // we probably have a deleted node. Replace with empty transient node
// to avoid throwing an exception. // to avoid throwing an exception.
node = new helma.objectmodel.TransientNode(); node = new TransientNode();
// throw new RuntimeException("Tried to access invalid/removed node " + handle + "."); // throw new RuntimeException("Tried to access invalid/removed node " + handle + ".");
} }
} }
@ -455,14 +456,14 @@ public class HopObject extends ScriptableObject implements Wrapper, PropertyReco
} }
private void prefetchChildren(int start, int length) { private void prefetchChildren(int start, int length) {
if (!(node instanceof helma.objectmodel.db.Node)) { if (!(node instanceof Node)) {
return; return;
} }
checkNode(); checkNode();
try { try {
((helma.objectmodel.db.Node) node).prefetchChildren(start, length); ((Node) node).prefetchChildren(start, length);
} catch (Exception x) { } catch (Exception x) {
core.app.logError("Error in HopObject.prefetchChildren: " + x, x); core.app.logError("Error in HopObject.prefetchChildren: " + x, x);
} }
@ -638,8 +639,8 @@ public class HopObject extends ScriptableObject implements Wrapper, PropertyReco
checkNode(); checkNode();
if (node instanceof helma.objectmodel.db.Node) { if (node instanceof Node) {
((helma.objectmodel.db.Node) node).persist(); ((Node) node).persist();
return node.getID(); return node.getID();
} }
return null; return null;
@ -649,19 +650,19 @@ public class HopObject extends ScriptableObject implements Wrapper, PropertyReco
* Invalidate the node itself or a subnode * Invalidate the node itself or a subnode
*/ */
public boolean jsFunction_invalidate(Object childId) { public boolean jsFunction_invalidate(Object childId) {
if (childId != null && node instanceof helma.objectmodel.db.Node) { if (childId != null && node instanceof Node) {
if (childId == Undefined.instance) { if (childId == Undefined.instance) {
if (node.getState() == INode.INVALID) { if (node.getState() == INode.INVALID) {
return true; return true;
} }
((helma.objectmodel.db.Node) node).invalidate(); ((Node) node).invalidate();
} else { } else {
checkNode(); checkNode();
((helma.objectmodel.db.Node) node).invalidateNode(childId.toString()); ((Node) node).invalidateNode(childId.toString());
} }
} }
@ -675,7 +676,7 @@ public class HopObject extends ScriptableObject implements Wrapper, PropertyReco
* @return true if the the wrapped Node has a valid database id. * @return true if the the wrapped Node has a valid database id.
*/ */
public boolean jsFunction_isPersistent() { public boolean jsFunction_isPersistent() {
if (!(node instanceof helma.objectmodel.db.Node)) { if (!(node instanceof Node)) {
return false; return false;
} }
checkNode(); checkNode();
@ -690,7 +691,7 @@ public class HopObject extends ScriptableObject implements Wrapper, PropertyReco
* @return true if the the wrapped Node is not stored in a database. * @return true if the the wrapped Node is not stored in a database.
*/ */
public boolean jsFunction_isTransient() { public boolean jsFunction_isTransient() {
if (!(node instanceof helma.objectmodel.db.Node)) { if (!(node instanceof Node)) {
return true; return true;
} }
checkNode(); checkNode();
@ -1096,10 +1097,10 @@ public class HopObject extends ScriptableObject implements Wrapper, PropertyReco
* do have a higher id than the last record loaded by this collection * do have a higher id than the last record loaded by this collection
*/ */
public int jsFunction_update() { public int jsFunction_update() {
if (!(node instanceof helma.objectmodel.db.Node)) if (!(node instanceof Node))
throw new RuntimeException ("update only callabel on persistent HopObjects"); throw new RuntimeException ("update only callabel on persistent HopObjects");
checkNode(); checkNode();
helma.objectmodel.db.Node n = (helma.objectmodel.db.Node) node; Node n = (Node) node;
return n.updateSubnodes(); return n.updateSubnodes();
} }
@ -1111,18 +1112,19 @@ public class HopObject extends ScriptableObject implements Wrapper, PropertyReco
* @return ListViewWrapper holding the information of the ordered view * @return ListViewWrapper holding the information of the ordered view
*/ */
public Object jsFunction_getOrderedView(String expr) { public Object jsFunction_getOrderedView(String expr) {
if (!(node instanceof helma.objectmodel.db.Node)) { if (!(node instanceof Node)) {
throw new RuntimeException ( throw new RuntimeException (
"getOrderedView only callable on persistent HopObjects"); "getOrderedView only callable on persistent HopObjects");
} }
helma.objectmodel.db.Node n = (helma.objectmodel.db.Node) node; Node n = (Node) node;
n.loadNodes(); n.loadNodes();
SubnodeList subnodes = n.getSubnodeList(); SubnodeList subnodes = n.getSubnodeList();
if (subnodes == null) { if (subnodes == null) {
throw new RuntimeException ( throw new RuntimeException (
"getOrderedView only callable on already existing subnode-collections"); "getOrderedView only callable on already existing subnode-collections");
} }
return new ListViewWrapper (subnodes.getOrderedView(expr), Node subnode = new Node("OrderedView", "HopObject", core.app.getWrappedNodeManager());
core, core.app.getWrappedNodeManager(), this); subnode.setSubnodes(subnodes.getOrderedView(expr));
return new HopObject("HopObject", core, subnode, core.getPrototype("HopObject"));
} }
} }

View file

@ -15,14 +15,15 @@
*/ */
package helma.scripting.rhino; package helma.scripting.rhino;
import org.mozilla.javascript.*;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import helma.objectmodel.INode; import helma.objectmodel.INode;
import helma.objectmodel.db.DbMapping; import helma.objectmodel.db.DbMapping;
import helma.objectmodel.db.DbKey; import helma.objectmodel.db.DbKey;
import helma.objectmodel.db.Node;
import org.mozilla.javascript.*;
public class HopObjectCtor extends FunctionObject { public class HopObjectCtor extends FunctionObject {
@ -89,8 +90,8 @@ public class HopObjectCtor extends FunctionObject {
throw new EvaluatorException(x.toString()); throw new EvaluatorException(x.toString());
} }
} else { } else {
INode node = new helma.objectmodel.db.Node(protoname, protoname, INode node = new Node(protoname, protoname,
core.app.getWrappedNodeManager()); core.app.getWrappedNodeManager());
Scriptable proto = core.getPrototype(protoname); Scriptable proto = core.getPrototype(protoname);
HopObject hobj = new HopObject(protoname, core, node, proto); HopObject hobj = new HopObject(protoname, core, node, proto);

View file

@ -1,337 +0,0 @@
/*
* Helma License Notice
*
* The contents of this file are subject to the Helma License
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. A copy of the License is available at
* http://adele.helma.org/download/helma/license.txt
*
* Copyright 1998-2003 Helma Software. All Rights Reserved.
*
* $RCSfile$
* $Author$
* $Revision$
* $Date$
*/
package helma.scripting.rhino;
import helma.objectmodel.INode;
import helma.objectmodel.db.Key;
import helma.objectmodel.db.NodeHandle;
import helma.objectmodel.db.OrderedSubnodeList;
import helma.objectmodel.db.WrappedNodeManager;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.EvaluatorException;
import org.mozilla.javascript.FunctionObject;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject;
import org.mozilla.javascript.Undefined;
import org.mozilla.javascript.Wrapper;
import org.mozilla.javascript.ScriptRuntime;
public class ListViewWrapper extends ScriptableObject implements Wrapper, Scriptable {
final List list;
final RhinoCore core;
final WrappedNodeManager wnm;
final HopObject hObj;
INode node;
static ListViewWrapper listViewProto;
/**
* Private constructor used to create the object prototype.
*/
private ListViewWrapper() {
list = null;
core = null;
wnm = null;
node = null;
hObj = null;
}
/**
* Create a JS wrapper around a subnode list.
* @param list
* @param core
* @param wnm
* @param hObj
*/
ListViewWrapper (List list, RhinoCore core, WrappedNodeManager wnm, HopObject hObj) {
if (list == null) {
throw new IllegalArgumentException ("ListWrapper unable to wrap null list.");
}
this.core = core;
this.list = list;
this.wnm = wnm;
this.hObj = hObj;
this.node = hObj.node;
if (listViewProto == null) {
listViewProto = new ListViewWrapper();
listViewProto.init();
}
setPrototype(listViewProto);
}
/**
* Init JS functions from methods.
*/
void init() {
int attributes = DONTENUM | PERMANENT;
Method[] methods = getClass().getDeclaredMethods();
for (int i=0; i<methods.length; i++) {
String methodName = methods[i].getName();
if (methodName.startsWith("jsFunction_")) {
methodName = methodName.substring(11);
FunctionObject func = new FunctionObject(methodName,
methods[i], this);
this.defineProperty(methodName, func, attributes);
}
}
}
public Object unwrap() {
return list;
}
public Object jsFunction_get(Object idxObj) {
if (idxObj instanceof Number)
return jsFunction_get(((Number) idxObj).intValue());
else // fallback to this View's HopObject's get-function
return hObj.jsFunction_get(idxObj);
}
public Object jsFunction_get(int idx) {
// return null if given index is out of bounds
if (list.size() <= idx)
return null;
Object obj = list.get(idx);
// return null if indexed object is null
if (obj == null)
return null;
// return HopObject in case of a NodeHandle
if (obj instanceof NodeHandle) {
return Context.toObject(((NodeHandle) obj).getNode(wnm), core.global);
} else if (!(obj instanceof Scriptable)) {
// do NOT wrap primitives - otherwise they'll be wrapped as Objects,
// which makes them unusable for many purposes (e.g. ==)
if (obj instanceof String ||
obj instanceof Number ||
obj instanceof Boolean) {
return obj;
}
return Context.toObject(obj, core.global);
}
return obj;
}
public Object jsFunction_getById(Object id) {
return hObj.jsFunction_getById(id);
}
/**
* Prefetch child objects from (relational) database.
*/
public void jsFunction_prefetchChildren(Object startArg, Object lengthArg)
throws Exception {
// check if we were called with no arguments
if (startArg == Undefined.instance && lengthArg == Undefined.instance) {
prefetchChildren(0, 1000);
} else {
int start = (int) ScriptRuntime.toNumber(startArg);
int length = (int) ScriptRuntime.toNumber(lengthArg);
prefetchChildren(start, length);
}
}
private void prefetchChildren(int start, int length) {
if (!(node instanceof helma.objectmodel.db.Node))
return;
checkNode();
start = Math.max(start, 0);
length = Math.min(list.size() - start, length);
if (length < 1)
return;
Key[] keys = new Key[length];
for (int i = start; i < start+length; i++) {
keys[i - start] = ((NodeHandle) list.get(i)).getKey();
}
try {
((helma.objectmodel.db.Node) node).prefetchChildren(keys);
} catch (Exception x) {
System.err.println("Error in HopObject.prefetchChildren(): " + x);
}
}
public int jsFunction_size() {
if (list==null)
return 0;
return list.size();
}
public int jsFunction_count() {
return jsFunction_size();
}
public void jsFunction_add(Object child) {
if (this.hObj==null)
throw new RuntimeException("ListWrapper has no knowledge about any HopObject or collection");
hObj.jsFunction_add(child);
}
/**
* Return the full list of child objects in a JavaScript Array.
* This is called by jsFunction_list() if called with no arguments.
*
* @return A JavaScript Array containing all child objects
*/
private Scriptable list() {
checkNode();
ArrayList a = new ArrayList();
for (Iterator i = list.iterator(); i.hasNext(); ) {
NodeHandle nh = (NodeHandle) i.next();
if (nh!=null)
a.add(Context.toObject(nh.getNode(wnm), core.global));
}
return Context.getCurrentContext().newArray(core.global, a.toArray());
}
/**
* Return a JS array of child objects with the given start and length.
*
* @return A JavaScript Array containing the specified child objects
*/
public Scriptable jsFunction_list(Object startArg, Object lengthArg) {
if (startArg == Undefined.instance && lengthArg == Undefined.instance) {
return list();
}
int start = (int) ScriptRuntime.toNumber(startArg);
int length = (int) ScriptRuntime.toNumber(lengthArg);
if (start < 0 || length < 0) {
throw new EvaluatorException("Arguments must not be negative in HopObject.list(start, length)");
}
checkNode();
start = Math.max(start, 0);
length = Math.min(list.size() - start, length);
prefetchChildren(start, length);
ArrayList a = new ArrayList();
for (int i=start; i<start+length; i++) {
NodeHandle nh = (NodeHandle) list.get(i);
if (nh != null)
a.add(Context.toObject(nh.getNode(wnm), core.global));
}
return Context.getCurrentContext().newArray(core.global, a.toArray());
}
/**
* Remove this object from the database.
*/
public boolean jsFunction_remove(Object arg) {
if (this.hObj==null)
throw new RuntimeException("ListWrapper has no knowledge about any HopObject or collection");
return hObj.jsFunction_remove(arg);
}
/**
* Remove a child node from this node's collection without deleting
* it from the database.
*/
public boolean jsFunction_removeChild(Object child) {
if (this.hObj==null)
throw new RuntimeException("ListWrapper has no knowledge about any HopObject or collection");
return hObj.jsFunction_removeChild(child);
}
/**
* Invalidate the node itself or a subnode
*/
public boolean jsFunction_invalidate(Object childId) {
if (this.hObj==null)
throw new RuntimeException("ListWrapper has no knowledge about any HopObject or collection");
return hObj.jsFunction_invalidate(childId);
}
/**
* Check if node is contained in subnodes
*/
public int jsFunction_contains(Object obj) {
if (obj instanceof HopObject) {
INode n = ((HopObject) obj).node;
if (n instanceof helma.objectmodel.db.Node)
return list.indexOf(((helma.objectmodel.db.Node) n).getHandle());
}
return -1;
}
/**
* This method represents the Java-Script-exposed function for updating Subnode-Collections.
* The following conditions must be met to make a subnodecollection updateable.
* .) the collection must be specified with collection.updateable=true
* .) the id's of this collection must be in ascending order, meaning, that new records
* do have a higher id than the last record loaded by this collection
*/
public int jsFunction_update() {
if (!(node instanceof helma.objectmodel.db.Node))
throw new RuntimeException ("updateSubnodes only callabel on persistent HopObjects");
checkNode();
helma.objectmodel.db.Node n = (helma.objectmodel.db.Node) node;
return n.updateSubnodes();
}
/**
* Retrieve a view having a different order from this Node's subnodelist.
* The underlying OrderedSubnodeList will keep those views and updates them
* if the original collection has been updated.
* @param expr the order (like sql-order using the properties instead)
* @return ListViewWrapper holding the information of the ordered view
*/
public Object jsFunction_getOrderedView(String expr) {
if (!(list instanceof OrderedSubnodeList))
throw new RuntimeException ("getOrderedView only callable on persistent HopObjects");
checkNode();
OrderedSubnodeList osl = (OrderedSubnodeList) list;
return new ListViewWrapper (osl.getOrderedView(expr), core, wnm, hObj);
}
public String toString() {
if (list==null)
return "[ListWrapper{}]";
else
return "[ListWrapper"+ list.toString() + "]";
}
/**
* Check if the node has been invalidated. If so, it has to be re-fetched
* from the db via the app's node manager.
*/
private final void checkNode() {
if (node != null && node.getState() == INode.INVALID) {
if (node instanceof helma.objectmodel.db.Node) {
NodeHandle handle = ((helma.objectmodel.db.Node) node).getHandle();
node = handle.getNode(core.app.getWrappedNodeManager());
}
}
}
public String getClassName() {
return "[ListWrapper]";
}
}

View file

@ -131,7 +131,7 @@ public final class RhinoCore implements ScopeProvider {
wrapper = new WrapMaker(); wrapper = new WrapMaker();
wrapper.setJavaPrimitiveWrap(false); wrapper.setJavaPrimitiveWrap(false);
Context context = contextFactory.enter(); Context context = contextFactory.enterContext();
try { try {
// create global object // create global object
@ -182,7 +182,7 @@ public final class RhinoCore implements ScopeProvider {
app.logError("Cannot initialize interpreter", e); app.logError("Cannot initialize interpreter", e);
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} finally { } finally {
contextFactory.exit(); Context.exit();
isInitialized = true; isInitialized = true;
} }
} }
@ -799,9 +799,6 @@ public final class RhinoCore implements ScopeProvider {
String sourceName = code.getName(); String sourceName = code.getName();
Reader reader = null; Reader reader = null;
Resource previousCurrentResource = app.getCurrentCodeResource();
app.setCurrentCodeResource(code);
String encoding = app.getProperty("sourceCharset"); String encoding = app.getProperty("sourceCharset");
try { try {
@ -837,7 +834,6 @@ public final class RhinoCore implements ScopeProvider {
wrappercache.clear(); wrappercache.clear();
} }
} finally { } finally {
app.setCurrentCodeResource(previousCurrentResource);
if (reader != null) { if (reader != null) {
try { try {
reader.close(); reader.close();
@ -1128,7 +1124,7 @@ public final class RhinoCore implements ScopeProvider {
protected void onContextCreated(Context cx) { protected void onContextCreated(Context cx) {
cx.setWrapFactory(wrapper); cx.setWrapFactory(wrapper);
cx.setOptimizationLevel(optLevel); cx.setOptimizationLevel(optLevel);
// cx.setInstructionObserverThreshold(5000); cx.setInstructionObserverThreshold(10000);
if (cx.isValidLanguageVersion(languageVersion)) { if (cx.isValidLanguageVersion(languageVersion)) {
cx.setLanguageVersion(languageVersion); cx.setLanguageVersion(languageVersion);
} else { } else {
@ -1159,9 +1155,11 @@ public final class RhinoCore implements ScopeProvider {
* This can be used to customize {@link Context} without introducing * This can be used to customize {@link Context} without introducing
* additional subclasses. * additional subclasses.
*/ */
/* protected void observeInstructionCount(Context cx, int instructionCount) { protected void observeInstructionCount(Context cx, int instructionCount) {
if (instructionCount >= 0xfffffff) RhinoEngine engine = RhinoEngine.getRhinoEngine();
throw new EvaluatorException("Exceeded instruction count, interrupting"); if (engine != null && engine.thread != Thread.currentThread()) {
} */ throw new EvaluatorException("Request timed out");
}
}
} }
} }

View file

@ -26,6 +26,7 @@ import helma.main.Server;
import helma.objectmodel.*; import helma.objectmodel.*;
import helma.objectmodel.db.DbMapping; import helma.objectmodel.db.DbMapping;
import helma.objectmodel.db.Relation; import helma.objectmodel.db.Relation;
import helma.objectmodel.db.Node;
import helma.scripting.*; import helma.scripting.*;
import helma.scripting.rhino.debug.Tracer; import helma.scripting.rhino.debug.Tracer;
import helma.util.StringUtils; import helma.util.StringUtils;
@ -86,7 +87,7 @@ public class RhinoEngine implements ScriptingEngine {
this.reval = reval; this.reval = reval;
initRhinoCore(app); initRhinoCore(app);
context = core.contextFactory.enter(); context = core.contextFactory.enterContext();
try { try {
extensionGlobals = new HashMap(); extensionGlobals = new HashMap();
@ -113,7 +114,7 @@ public class RhinoEngine implements ScriptingEngine {
app.logError("Cannot initialize interpreter", e); app.logError("Cannot initialize interpreter", e);
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} finally { } finally {
core.contextFactory.exit (); Context.exit();
} }
} }
@ -162,7 +163,7 @@ public class RhinoEngine implements ScriptingEngine {
// (chicken and egg problem, kind of) // (chicken and egg problem, kind of)
thread = Thread.currentThread(); thread = Thread.currentThread();
global = new GlobalObject(core, app, true); global = new GlobalObject(core, app, true);
context = core.contextFactory.enter(); context = core.contextFactory.enterContext();
if (core.hasTracer) { if (core.hasTracer) {
context.setDebugger(new Tracer(getResponse()), null); context.setDebugger(new Tracer(getResponse()), null);
@ -214,7 +215,7 @@ public class RhinoEngine implements ScriptingEngine {
public synchronized void exitContext() { public synchronized void exitContext() {
// unregister the engine threadlocal // unregister the engine threadlocal
engines.set(null); engines.set(null);
core.contextFactory.exit(); Context.exit();
thread = null; thread = null;
global = null; global = null;
} }
@ -345,7 +346,7 @@ public class RhinoEngine implements ScriptingEngine {
* Let the evaluator know that the current evaluation has been * Let the evaluator know that the current evaluation has been
* aborted. * aborted.
*/ */
public void abort() { public void abort() {
// current request has been aborted. // current request has been aborted.
Thread t = thread; Thread t = thread;
// set thread to null // set thread to null
@ -528,7 +529,7 @@ public class RhinoEngine implements ScriptingEngine {
* @throws java.io.IOException * @throws java.io.IOException
*/ */
public void serialize(Object obj, OutputStream out) throws IOException { public void serialize(Object obj, OutputStream out) throws IOException {
core.contextFactory.enter(); core.contextFactory.enterContext();
engines.set(this); engines.set(this);
try { try {
// use a special ScriptableOutputStream that unwraps Wrappers // use a special ScriptableOutputStream that unwraps Wrappers
@ -536,8 +537,8 @@ public class RhinoEngine implements ScriptingEngine {
protected Object replaceObject(Object obj) throws IOException { protected Object replaceObject(Object obj) throws IOException {
if (obj instanceof HopObject) if (obj instanceof HopObject)
return new HopObjectProxy((HopObject) obj); return new HopObjectProxy((HopObject) obj);
if (obj instanceof helma.objectmodel.db.Node) if (obj instanceof Node)
return new HopObjectProxy((helma.objectmodel.db.Node) obj); return new HopObjectProxy((Node) obj);
if (obj instanceof GlobalObject) if (obj instanceof GlobalObject)
return new GlobalProxy((GlobalObject) obj); return new GlobalProxy((GlobalObject) obj);
if (obj instanceof ApplicationBean) if (obj instanceof ApplicationBean)
@ -557,7 +558,7 @@ public class RhinoEngine implements ScriptingEngine {
sout.writeObject(obj); sout.writeObject(obj);
sout.flush(); sout.flush();
} finally { } finally {
core.contextFactory.exit(); Context.exit();
} }
} }
@ -571,7 +572,7 @@ public class RhinoEngine implements ScriptingEngine {
* @throws java.io.IOException * @throws java.io.IOException
*/ */
public Object deserialize(InputStream in) throws IOException, ClassNotFoundException { public Object deserialize(InputStream in) throws IOException, ClassNotFoundException {
core.contextFactory.enter(); core.contextFactory.enterContext();
engines.set(this); engines.set(this);
try { try {
ObjectInputStream sin = new ScriptableInputStream(in, core.global) { ObjectInputStream sin = new ScriptableInputStream(in, core.global) {
@ -584,7 +585,7 @@ public class RhinoEngine implements ScriptingEngine {
}; };
return sin.readObject(); return sin.readObject();
} finally { } finally {
core.contextFactory.exit(); Context.exit();
} }
} }

View file

@ -18,6 +18,7 @@ package helma.scripting.rhino;
import helma.objectmodel.INode; import helma.objectmodel.INode;
import helma.objectmodel.db.NodeHandle; import helma.objectmodel.db.NodeHandle;
import helma.objectmodel.db.Node;
import org.mozilla.javascript.Context; import org.mozilla.javascript.Context;
import java.io.Serializable; import java.io.Serializable;
@ -80,18 +81,19 @@ class HopObjectProxy implements SerializationProxy {
HopObjectProxy(HopObject obj) { HopObjectProxy(HopObject obj) {
INode n = obj.getNode(); INode n = obj.getNode();
if (n == null) if (n == null) {
ref = obj.getClassName(); ref = obj.getClassName();
else { } else {
if (n instanceof helma.objectmodel.db.Node) if (n instanceof Node) {
ref = new NodeHandle(((helma.objectmodel.db.Node) n).getKey()); ref = new NodeHandle((Node) n);
else } else {
ref = n; ref = n;
}
} }
wrapped = true; wrapped = true;
} }
HopObjectProxy(helma.objectmodel.db.Node node) { HopObjectProxy(Node node) {
ref = new NodeHandle(node.getKey()); ref = new NodeHandle(node.getKey());
} }

View file

@ -246,7 +246,7 @@ public class XmlObject {
converter = new XmlConverter(); converter = new XmlConverter();
} }
INode node = new helma.objectmodel.db.Node(null, null, INode node = new Node(null, null,
core.getApplication().getWrappedNodeManager()); core.getApplication().getWrappedNodeManager());
INode result = converter.convert(url, node); INode result = converter.convert(url, node);

View file

@ -345,17 +345,17 @@ public abstract class AbstractServletClient extends HttpServlet {
res.setContentLength(hopres.getContentLength()); res.setContentLength(hopres.getContentLength());
res.setContentType(hopres.getContentType()); res.setContentType(hopres.getContentType());
if ("HEAD".equalsIgnoreCase(req.getMethod())) { if (!"HEAD".equalsIgnoreCase(req.getMethod())) {
return; byte[] content = hopres.getContent();
} if (content != null) {
try {
try { OutputStream out = res.getOutputStream();
OutputStream out = res.getOutputStream(); out.write(content);
out.flush();
out.write(hopres.getContent()); } catch (Exception iox) {
out.flush(); log("Exception in writeResponse: " + iox);
} catch (Exception iox) { }
log("Exception in writeResponse: " + iox); }
} }
} }
} }
@ -545,24 +545,25 @@ public abstract class AbstractServletClient extends HttpServlet {
addIPAddress(buffer, request.getHeader("X-Forwarded-For")); addIPAddress(buffer, request.getHeader("X-Forwarded-For"));
addIPAddress(buffer, request.getHeader("Client-ip")); addIPAddress(buffer, request.getHeader("Client-ip"));
if (reqtrans.getSession() == null || !reqtrans.getSession().startsWith(buffer.toString())) { if (reqtrans.getSession() == null || !reqtrans.getSession().startsWith(buffer.toString())) {
response.addCookie(createSession(buffer.toString(), reqtrans, domain)); createSession(response, buffer.toString(), reqtrans, domain);
} }
} else if (reqtrans.getSession() == null) { } else if (reqtrans.getSession() == null) {
response.addCookie(createSession("", reqtrans, domain)); createSession(response, "", reqtrans, domain);
} }
} }
/** /**
* Create a new session cookie. * Create a new session cookie.
* *
* @param response the servlet response
* @param prefix the session id prefix * @param prefix the session id prefix
* @param reqtrans the request object * @param reqtrans the request object
* @param domain the cookie domain * @param domain the cookie domain
* @return the session cookie
*/ */
private Cookie createSession(String prefix, private void createSession(HttpServletResponse response,
RequestTrans reqtrans, String prefix,
String domain) { RequestTrans reqtrans,
String domain) {
Application app = getApplication(); Application app = getApplication();
String id = null; String id = null;
while (id == null || app.getSession(id) != null) { while (id == null || app.getSession(id) != null) {
@ -575,12 +576,20 @@ public abstract class AbstractServletClient extends HttpServlet {
} }
reqtrans.setSession(id); reqtrans.setSession(id);
Cookie cookie = new Cookie(sessionCookieName, id);
cookie.setPath("/");
if (domain != null)
cookie.setDomain(domain);
return cookie; StringBuffer buffer = new StringBuffer(sessionCookieName);
buffer.append("=").append(id).append("; Path=/");
if (domain != null) {
// lowercase domain for IE
buffer.append("; Domain=").append(domain.toLowerCase());
}
if (!"false".equalsIgnoreCase(app.getProperty("httpOnlySessionCookie"))) {
buffer.append("; HttpOnly");
}
if ("true".equalsIgnoreCase(app.getProperty("secureSessionCookie"))) {
buffer.append("; Secure");
}
response.addHeader("Set-Cookie", buffer.toString());
} }
/** /**

View file

@ -19,6 +19,9 @@ package helma.servlet;
import helma.framework.repository.Repository; import helma.framework.repository.Repository;
import helma.framework.core.Application; import helma.framework.core.Application;
import helma.framework.repository.FileRepository; import helma.framework.repository.FileRepository;
import helma.main.ServerConfig;
import helma.main.Server;
import java.io.*; import java.io.*;
import javax.servlet.*; import javax.servlet.*;
import java.util.*; import java.util.*;
@ -40,7 +43,7 @@ public final class StandaloneServletClient extends AbstractServletClient {
private String appName; private String appName;
private String appDir; private String appDir;
private String dbDir; private String dbDir;
// private String hopDir; private String hopDir;
private Repository[] repositories; private Repository[] repositories;
/** /**
@ -53,7 +56,12 @@ public final class StandaloneServletClient extends AbstractServletClient {
public void init(ServletConfig init) throws ServletException { public void init(ServletConfig init) throws ServletException {
super.init(init); super.init(init);
// hopDir = init.getInitParameter("hopdir"); hopDir = init.getInitParameter("hopdir");
if (hopDir == null) {
// assume helmaDir to be current directory
hopDir = ".";
}
appName = init.getInitParameter("application"); appName = init.getInitParameter("application");
@ -70,7 +78,7 @@ public final class StandaloneServletClient extends AbstractServletClient {
} }
Class[] parameters = { String.class }; Class[] parameters = { String.class };
ArrayList<Repository> repositoryList = new ArrayList<Repository>(); ArrayList repositoryList = new ArrayList();
for (int i = 0; true; i++) { for (int i = 0; true; i++) {
String repositoryArgs = init.getInitParameter("repository." + i); String repositoryArgs = init.getInitParameter("repository." + i);
@ -92,7 +100,7 @@ public final class StandaloneServletClient extends AbstractServletClient {
try { try {
Repository newRepository = (Repository) Class.forName(repositoryImpl) Repository newRepository = (Repository) Class.forName(repositoryImpl)
.getConstructor(parameters) .getConstructor(parameters)
.newInstance(repositoryArgs); .newInstance(new Object[] {repositoryArgs});
repositoryList.add(newRepository); repositoryList.add(newRepository);
log("adding repository: " + repositoryArgs); log("adding repository: " + repositoryArgs);
} catch (Exception ex) { } catch (Exception ex) {
@ -115,7 +123,7 @@ public final class StandaloneServletClient extends AbstractServletClient {
} }
repositories = new Repository[repositoryList.size()]; repositories = new Repository[repositoryList.size()];
repositories = repositoryList.toArray(repositories); repositories = (Repository[]) repositoryList.toArray(repositories);
} }
@ -146,8 +154,14 @@ public final class StandaloneServletClient extends AbstractServletClient {
try { try {
File dbHome = new File(dbDir); File dbHome = new File(dbDir);
File appHome = new File(appDir); File appHome = new File(appDir);
File hopHome = new File(hopDir);
app = new Application(appName, null, repositories, appHome, dbHome); ServerConfig config = new ServerConfig();
config.setHomeDir(hopHome);
Server server = new Server(config);
server.init();
app = new Application(appName, server, repositories, appHome, dbHome);
app.init(); app.init();
app.start(); app.start();
} catch (Exception x) { } catch (Exception x) {

View file

@ -633,9 +633,12 @@ public final class HtmlEncoder {
} }
} }
// we didn't reach a break, so encode the ampersand as HTML entity // we didn't reach a break, so encode as entity unless inside a tag
ret.append("&amp;"); if (insideMacroTag) {
ret.append('&');
} else {
ret.append("&amp;");
}
break; break;
case '\\': case '\\':
@ -763,7 +766,7 @@ public final class HtmlEncoder {
break; break;
} }
} }
if (c < 128) { if (c < 128 || insideMacroTag) {
ret.append(c); ret.append(c);
} else if ((c >= 128) && (c < 256)) { } else if ((c >= 128) && (c < 256)) {
ret.append(transform[c - 128]); ret.append(transform[c - 128]);

View file

@ -143,7 +143,9 @@ public class Logger implements Log {
// has gone. the 2000 entries threshold is somewhat arbitrary. // has gone. the 2000 entries threshold is somewhat arbitrary.
if (entries.size() < 2000) { if (entries.size() < 2000) {
String message = msg == null ? "null" : msg.toString(); String message = msg == null ? "null" : msg.toString();
entries.add(new Entry(dateCache, level, message, exception)); Thread thread = Thread.currentThread();
String threadId = "[" + thread.getName() + "] ";
entries.add(new Entry(dateCache, level, message, threadId, exception));
} }
} }
@ -164,6 +166,7 @@ public class Logger implements Log {
Entry entry = (Entry) entries.remove(0); Entry entry = (Entry) entries.remove(0);
writer.print(entry.date); writer.print(entry.date);
writer.print(entry.level); writer.print(entry.level);
writer.print(entry.threadId);
writer.println(entry.message); writer.println(entry.message);
if (entry.exception != null) if (entry.exception != null)
entry.exception.printStackTrace(writer); entry.exception.printStackTrace(writer);
@ -294,13 +297,14 @@ public class Logger implements Log {
} }
class Entry { class Entry {
final String date, level, message; final String date, level, message, threadId;
final Throwable exception; final Throwable exception;
Entry(String date, String level, String message, Throwable exception) { Entry(String date, String level, String message, String threadId, Throwable exception) {
this.date = date; this.date = date;
this.level = level; this.level = level;
this.message = message; this.message = message;
this.threadId = threadId;
this.exception = exception; this.exception = exception;
} }
} }

View file

@ -59,6 +59,12 @@ public class ResourceProperties extends Properties {
// lower case key to original key mapping for case insensitive lookups // lower case key to original key mapping for case insensitive lookups
private Properties keyMap = new Properties(); private Properties keyMap = new Properties();
// prefix for sub-properties
private String prefix;
// parent properties for sub-properties
private ResourceProperties parentProperties;
/** /**
* Constructs an empty ResourceProperties * Constructs an empty ResourceProperties
* Resources must be added manually afterwards * Resources must be added manually afterwards
@ -123,6 +129,22 @@ public class ResourceProperties extends Properties {
forceUpdate(); forceUpdate();
} }
/**
* Constructs a properties object containing all entries where the key matches
* the given string prefix from the source map to the target map, cutting off
* the prefix from the original key.
* @see #getSubProperties(String)
* @param parentProperties the parent properties
* @param prefix the property name prefix
*/
private ResourceProperties(ResourceProperties parentProperties, String prefix) {
this.parentProperties = parentProperties;
this.prefix = prefix;
resources = new HashSet();
setIgnoreCase(parentProperties.ignoreCase);
forceUpdate();
}
/** /**
* Updates the properties regardless of an actual need * Updates the properties regardless of an actual need
*/ */
@ -208,6 +230,21 @@ public class ResourceProperties extends Properties {
} }
} }
// if these are subproperties, reload them from the parent properties
if (parentProperties != null && prefix != null) {
parentProperties.update();
Iterator it = parentProperties.entrySet().iterator();
int prefixLength = prefix.length();
while (it.hasNext()) {
Map.Entry entry = (Map.Entry) it.next();
String key = entry.getKey().toString();
if (key.regionMatches(ignoreCase, 0, prefix, 0, prefixLength)) {
temp.put(key.substring(prefixLength), entry.getValue());
}
}
}
// at last we try to load properties from the resource list // at last we try to load properties from the resource list
if (resources != null) { if (resources != null) {
Iterator iterator = resources.iterator(); Iterator iterator = resources.iterator();
@ -247,25 +284,13 @@ public class ResourceProperties extends Properties {
* against the prefix. * against the prefix.
* *
* @param prefix the string prefix to match against * @param prefix the string prefix to match against
* @return a new subproperties instance
*/ */
public ResourceProperties getSubProperties(String prefix) { public ResourceProperties getSubProperties(String prefix) {
if (prefix == null) if (prefix == null) {
throw new NullPointerException("prefix"); throw new NullPointerException("prefix");
if ((System.currentTimeMillis() - lastCheck) > CACHE_TIME) {
update();
} }
ResourceProperties subprops = new ResourceProperties(); return new ResourceProperties(this, prefix);
subprops.setIgnoreCase(ignoreCase);
Iterator it = entrySet().iterator();
int prefixLength = prefix.length();
while (it.hasNext()) {
Map.Entry entry = (Map.Entry) it.next();
String key = entry.getKey().toString();
if (key.regionMatches(ignoreCase, 0, prefix, 0, prefixLength)) {
subprops.put(key.substring(prefixLength), entry.getValue());
}
}
return subprops;
} }
/** /**
@ -322,7 +347,7 @@ public class ResourceProperties extends Properties {
if (strkey == null) if (strkey == null)
return null; return null;
} }
return (String) super.get(strkey); return super.get(strkey);
} }
/** /**