answer
stringlengths 17
10.2M
|
|---|
package com.thaiopensource.relaxng.output.rnc;
import com.thaiopensource.relaxng.edit.Pattern;
import com.thaiopensource.relaxng.edit.ComponentVisitor;
import com.thaiopensource.relaxng.edit.NameClassVisitor;
import com.thaiopensource.relaxng.edit.PatternVisitor;
import com.thaiopensource.relaxng.edit.OneOrMorePattern;
import com.thaiopensource.relaxng.edit.ElementPattern;
import com.thaiopensource.relaxng.edit.NameNameClass;
import com.thaiopensource.relaxng.edit.ZeroOrMorePattern;
import com.thaiopensource.relaxng.edit.OptionalPattern;
import com.thaiopensource.relaxng.edit.NameClassedPattern;
import com.thaiopensource.relaxng.edit.AttributePattern;
import com.thaiopensource.relaxng.edit.UnaryPattern;
import com.thaiopensource.relaxng.edit.RefPattern;
import com.thaiopensource.relaxng.edit.ParentRefPattern;
import com.thaiopensource.relaxng.edit.ExternalRefPattern;
import com.thaiopensource.relaxng.edit.TextPattern;
import com.thaiopensource.relaxng.edit.EmptyPattern;
import com.thaiopensource.relaxng.edit.ListPattern;
import com.thaiopensource.relaxng.edit.MixedPattern;
import com.thaiopensource.relaxng.edit.AnyNameNameClass;
import com.thaiopensource.relaxng.edit.NsNameNameClass;
import com.thaiopensource.relaxng.edit.ChoiceNameClass;
import com.thaiopensource.relaxng.edit.ChoicePattern;
import com.thaiopensource.relaxng.edit.GroupPattern;
import com.thaiopensource.relaxng.edit.InterleavePattern;
import com.thaiopensource.relaxng.edit.GrammarPattern;
import com.thaiopensource.relaxng.edit.DivComponent;
import com.thaiopensource.relaxng.edit.IncludeComponent;
import com.thaiopensource.relaxng.edit.DataPattern;
import com.thaiopensource.relaxng.edit.ValuePattern;
import com.thaiopensource.relaxng.edit.NotAllowedPattern;
import com.thaiopensource.relaxng.edit.DefineComponent;
import com.thaiopensource.relaxng.edit.Combine;
import com.thaiopensource.relaxng.edit.Component;
import com.thaiopensource.relaxng.edit.Container;
import com.thaiopensource.relaxng.edit.CompositePattern;
import com.thaiopensource.relaxng.edit.NameClass;
import com.thaiopensource.relaxng.edit.NullVisitor;
import com.thaiopensource.relaxng.edit.Param;
import com.thaiopensource.relaxng.edit.Annotated;
import com.thaiopensource.relaxng.edit.AttributeAnnotation;
import com.thaiopensource.relaxng.edit.ElementAnnotation;
import com.thaiopensource.relaxng.edit.TextAnnotation;
import com.thaiopensource.relaxng.output.OutputDirectory;
import com.thaiopensource.relaxng.output.common.ErrorReporter;
import com.thaiopensource.relaxng.parse.SchemaBuilder;
import com.thaiopensource.xml.util.WellKnownNamespaces;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.HashSet;
import java.util.Vector;
import java.util.Collections;
import java.util.Map;
import java.util.HashMap;
/*
Comments (top-level most important)
Annotations
Use \x{} escapes for characters not in repertoire of selected encoding
Avoid lines with excessive complexity
Make use of ##
Make long literals pretty
Take advantage of
default namespace x = "..."
*/
class Output {
private final Prettyprinter pp;
private final String sourceUri;
private final OutputDirectory od;
private final ErrorReporter er;
private final NamespaceManager.NamespaceBindings nsb;
private final Map datatypeLibraryMap = new HashMap();
private final NameClassVisitor nameClassOutput = new NameClassOutput(true);
private final NameClassVisitor noParenNameClassOutput = new NameClassOutput(false);
private final PatternVisitor noParenPatternOutput = new PatternOutput(false);
private final PatternVisitor patternOutput = new PatternOutput(true);
private final ComponentVisitor componentOutput = new ComponentOutput();
private boolean isAttributeNameClass;
static private final String indent = " ";
static private final String[] keywords = {
"attribute", "default", "datatypes", "div", "element", "empty", "external",
"grammar", "include", "inherit", "list", "mixed", "namespace", "notAllowed",
"parent", "start", "string", "text", "token"
};
static private final Set keywordSet = new HashSet();
static {
for (int i = 0; i < keywords.length; i++)
keywordSet.add(keywords[i]);
}
static void output(Pattern p, String sourceUri, OutputDirectory od, ErrorReporter er) throws IOException {
try {
new Output(sourceUri, od, er, NamespaceVisitor.createBindings(p)).topLevel(p);
}
catch (Prettyprinter.WrappedException e) {
throw e.getIOException();
}
}
private Output(String sourceUri, OutputDirectory od, ErrorReporter er,
NamespaceManager.NamespaceBindings nsb) throws IOException {
this.sourceUri = sourceUri;
this.od = od;
this.er = er;
this.pp = new StreamingPrettyprinter(od.getLineLength(), od.getLineSeparator(), od.open(sourceUri));
this.nsb = nsb;
}
private void topLevel(Pattern p) {
outputNamespaceDeclarations();
outputDatatypeLibraryDeclarations(p);
if (p instanceof GrammarPattern)
innerBody(((GrammarPattern)p).getComponents());
else
p.accept(patternOutput);
pp.hardNewline();
pp.close();
}
private void outputNamespaceDeclarations() {
List prefixes = new Vector();
prefixes.addAll(nsb.getPrefixes());
Collections.sort(prefixes);
boolean needNewline = false;
for (Iterator iter = prefixes.iterator(); iter.hasNext();) {
String prefix = (String)iter.next();
String ns = nsb.getNamespaceUri(prefix);
if (prefix.length() == 0) {
if (!ns.equals(SchemaBuilder.INHERIT_NS)) {
pp.text("default namespace = ");
literal(ns);
pp.hardNewline();
needNewline = true;
}
}
else if (!prefix.equals("xml")) {
pp.text("namespace ");
pp.text(prefix);
pp.text(" = ");
if (ns.equals(SchemaBuilder.INHERIT_NS))
pp.text("inherit");
else
literal(ns);
pp.hardNewline();
needNewline = true;
}
}
if (needNewline)
pp.hardNewline();
}
private void outputDatatypeLibraryDeclarations(Pattern p) {
datatypeLibraryMap.put(WellKnownNamespaces.XML_SCHEMA_DATATYPES, "xsd");
List datatypeLibraries = new Vector();
datatypeLibraries.addAll(DatatypeLibraryVisitor.findDatatypeLibraries(p));
if (datatypeLibraries.isEmpty())
return;
Collections.sort(datatypeLibraries);
for (int i = 0, len = datatypeLibraries.size(); i < len; i++) {
String prefix = "d";
if (len > 1)
prefix += Integer.toString(i + 1);
String uri = (String)datatypeLibraries.get(i);
datatypeLibraryMap.put(uri, prefix);
pp.text("datatypes ");
pp.text(prefix);
pp.text(" = ");
literal(uri);
pp.hardNewline();
}
pp.hardNewline();
}
static class DatatypeLibraryVisitor extends NullVisitor {
private Set datatypeLibraries = new HashSet();
public void nullVisitValue(ValuePattern p) {
noteDatatypeLibrary(p.getDatatypeLibrary());
super.nullVisitValue(p);
}
public void nullVisitData(DataPattern p) {
noteDatatypeLibrary(p.getDatatypeLibrary());
super.nullVisitData(p);
}
private void noteDatatypeLibrary(String uri) {
if (!uri.equals("") && !uri.equals(WellKnownNamespaces.XML_SCHEMA_DATATYPES))
datatypeLibraries.add(uri);
}
static Set findDatatypeLibraries(Pattern p) {
DatatypeLibraryVisitor dlv = new DatatypeLibraryVisitor();
p.accept(dlv);
return dlv.datatypeLibraries;
}
}
static class NamespaceVisitor extends NullVisitor {
private NamespaceManager nsm = new NamespaceManager();
private boolean isAttribute;
public void nullVisitInclude(IncludeComponent c) {
super.nullVisitInclude(c);
nsm.requireNamespace(c.getNs(), true);
}
public void nullVisitExternalRef(ExternalRefPattern p) {
super.nullVisitExternalRef(p);
nsm.requireNamespace(p.getNs(), true);
}
public void nullVisitElement(ElementPattern p) {
isAttribute = false;
super.nullVisitElement(p);
}
public void nullVisitAttribute(AttributePattern p) {
isAttribute = true;
super.nullVisitAttribute(p);
}
public void nullVisitName(NameNameClass nc) {
super.nullVisitName(nc);
if (!isAttribute || nc.getNamespaceUri().length() != 0)
nsm.requireNamespace(nc.getNamespaceUri(), !isAttribute);
if (nc.getPrefix() == null) {
if (!isAttribute)
nsm.preferBinding("", nc.getNamespaceUri());
}
else
nsm.preferBinding(nc.getPrefix(), nc.getNamespaceUri());
}
public void nullVisitNsName(NsNameNameClass nc) {
super.nullVisitNsName(nc);
nsm.requireNamespace(nc.getNs(), false);
}
public void nullVisitValue(ValuePattern p) {
super.nullVisitValue(p);
for (Iterator iter = p.getPrefixMap().entrySet().iterator(); iter.hasNext();) {
Map.Entry entry = (Map.Entry)iter.next();
nsm.requireBinding((String)entry.getKey(), (String)entry.getValue());
}
}
static NamespaceManager.NamespaceBindings createBindings(Pattern p) {
NamespaceVisitor nsv = new NamespaceVisitor();
p.accept(nsv);
return nsv.nsm.createBindings();
}
}
class ComponentOutput implements ComponentVisitor {
public Object visitDefine(DefineComponent c) {
pp.startGroup();
String name = c.getName();
if (name == DefineComponent.START)
pp.text("start");
else
identifier(name);
Combine combine = c.getCombine();
String op;
if (combine == null)
op = " =";
else if (combine == Combine.CHOICE)
op = " |=";
else
op = " &=";
pp.text(op);
pp.startNest(indent);
pp.softNewline(" ");
c.getBody().accept(noParenPatternOutput);
pp.endNest();
pp.endGroup();
return null;
}
public Object visitDiv(DivComponent c) {
pp.text("div");
body(c);
return null;
}
public Object visitInclude(IncludeComponent c) {
pp.text("include ");
literal(od.reference(sourceUri, c.getHref()));
inherit(c.getNs());
List components = c.getComponents();
if (!components.isEmpty())
body(components);
return null;
}
}
class PatternOutput implements PatternVisitor {
private final boolean alwaysUseParens;
PatternOutput(boolean alwaysUseParens) {
this.alwaysUseParens = alwaysUseParens;
}
public Object visitGrammar(GrammarPattern p) {
startAnnotations(p);
pp.text("grammar");
body(p);
endAnnotations(p);
return null;
}
public Object visitElement(ElementPattern p) {
isAttributeNameClass = false;
nameClassed(p, "element ");
return null;
}
public Object visitAttribute(AttributePattern p) {
isAttributeNameClass = true;
nameClassed(p, "attribute ");
return null;
}
private void nameClassed(NameClassedPattern p, String key) {
startAnnotations(p);
pp.text(key);
pp.startNest(key);
p.getNameClass().accept(noParenNameClassOutput);
pp.endNest();
pp.startGroup();
pp.text(" {");
pp.startNest(indent);
pp.softNewline(" ");
p.getChild().accept(noParenPatternOutput);
pp.endNest();
pp.softNewline(" ");
pp.text("}");
pp.endGroup();
endAnnotations(p);
}
public Object visitOneOrMore(OneOrMorePattern p) {
postfix(p, "+");
return null;
}
public Object visitZeroOrMore(ZeroOrMorePattern p) {
postfix(p, "*");
return null;
}
public Object visitOptional(OptionalPattern p) {
postfix(p, "?");
return null;
}
private void postfix(UnaryPattern p, String op) {
if (!startAnnotations(p)) {
p.getChild().accept(patternOutput);
pp.text(op);
}
else {
pp.text("(");
pp.startNest("(");
p.getChild().accept(patternOutput);
pp.endNest();
pp.text(")");
pp.text(op);
}
endAnnotations(p);
}
public Object visitRef(RefPattern p) {
startAnnotations(p);
identifier(p.getName());
endAnnotations(p);
return null;
}
public Object visitParentRef(ParentRefPattern p) {
startAnnotations(p);
pp.text("parent ");
identifier(p.getName());
endAnnotations(p);
return null;
}
public Object visitExternalRef(ExternalRefPattern p) {
startAnnotations(p);
pp.text("external ");
literal(od.reference(sourceUri, p.getHref()));
inherit(p.getNs());
endAnnotations(p);
return null;
}
public Object visitText(TextPattern p) {
startAnnotations(p);
pp.text("text");
endAnnotations(p);
return null;
}
public Object visitEmpty(EmptyPattern p) {
startAnnotations(p);
pp.text("empty");
endAnnotations(p);
return null;
}
public Object visitNotAllowed(NotAllowedPattern p) {
startAnnotations(p);
pp.text("notAllowed");
endAnnotations(p);
return null;
}
public Object visitList(ListPattern p) {
prefix(p, "list");
return null;
}
public Object visitMixed(MixedPattern p) {
prefix(p, "mixed");
return null;
}
private void prefix(UnaryPattern p, String key) {
startAnnotations(p);
pp.text(key);
pp.text(" {");
pp.startNest(indent);
pp.softNewline(" ");
p.getChild().accept(noParenPatternOutput);
pp.endNest();
pp.softNewline(" ");
pp.text("}");
endAnnotations(p);
}
public Object visitChoice(ChoicePattern p) {
composite(p, "| ", false);
return null;
}
public Object visitInterleave(InterleavePattern p) {
composite(p, "& ", false);
return null;
}
public Object visitGroup(GroupPattern p) {
composite(p, ",", true);
return null;
}
void composite(CompositePattern p, String sep, boolean sepBeforeNewline) {
boolean useParens = alwaysUseParens;
if (startAnnotations(p))
useParens = true;
pp.startGroup();
if (useParens) {
pp.text("(");
pp.startNest("(");
}
boolean first = true;
for (Iterator iter = p.getChildren().iterator(); iter.hasNext();) {
if (!first) {
if (sepBeforeNewline)
pp.text(sep);
pp.softNewline(" ");
if (!sepBeforeNewline) {
pp.text(sep);
pp.startNest(sep);
}
}
((Pattern)iter.next()).accept(patternOutput);
if (first)
first = false;
else if (!sepBeforeNewline)
pp.endNest();
}
if (useParens) {
pp.endNest();
pp.text(")");
}
pp.endGroup();
endAnnotations(p);
}
public Object visitData(DataPattern p) {
startAnnotations(p);
String lib = p.getDatatypeLibrary();
String qn;
if (!lib.equals(""))
qn = (String)datatypeLibraryMap.get(lib) + ":" + p.getType();
else
qn = p.getType();
pp.text(qn);
List params = p.getParams();
if (params.size() > 0) {
pp.startGroup();
pp.text(" {");
pp.startNest(indent);
for (Iterator iter = params.iterator(); iter.hasNext();) {
pp.softNewline(" ");
Param param = (Param)iter.next();
pp.text(param.getName());
pp.text(" = ");
literal(param.getValue());
}
pp.endNest();
pp.softNewline(" ");
pp.text("}");
pp.endGroup();
}
Pattern e = p.getExcept();
if (e != null) {
if (params.size() == 0) {
pp.text(" - ");
pp.startNest(qn + " - ");
e.accept(patternOutput);
pp.endNest();
}
else {
pp.startGroup();
pp.softNewline(" ");
pp.text("- ");
pp.startNest("- ");
// XXX think we need parentheses if e has following annotations
e.accept(patternOutput);
pp.endNest();
pp.endGroup();
}
}
endAnnotations(p);
return null;
}
public Object visitValue(ValuePattern p) {
startAnnotations(p);
String lib = p.getDatatypeLibrary();
if (lib.equals("")) {
if (!p.getType().equals("token"))
pp.text(p.getType() + " ");
}
else
pp.text((String)datatypeLibraryMap.get(lib) + ":" + p.getType() + " ");
literal(p.getValue());
endAnnotations(p);
return null;
}
}
class NameClassOutput implements NameClassVisitor {
private final boolean alwaysUseParens;
NameClassOutput(boolean alwaysUseParens) {
this.alwaysUseParens = alwaysUseParens;
}
public Object visitAnyName(AnyNameNameClass nc) {
NameClass e = nc.getExcept();
if (e == null) {
startAnnotations(nc);
pp.text("*");
}
else {
boolean useParens = startAnnotations(nc) || alwaysUseParens;
String s = useParens ? "(* - " : "* - ";
pp.text(s);
pp.startNest(s);
e.accept(nameClassOutput);
if (useParens)
pp.text(")");
pp.endNest();
}
endAnnotations(nc);
return null;
}
public Object visitNsName(NsNameNameClass nc) {
NameClass e = nc.getExcept();
String prefix = nsb.getNonEmptyPrefix(nc.getNs());
if (e == null) {
startAnnotations(nc);
pp.text(prefix);
pp.text(":*");
}
else {
boolean useParens = startAnnotations(nc) || alwaysUseParens;
String s = useParens ? "(" : "";
s += prefix;
s += ":* - ";
pp.text(s);
pp.startNest(s);
e.accept(nameClassOutput);
pp.endNest();
if (useParens)
pp.text(")");
}
endAnnotations(nc);
return null;
}
public Object visitName(NameNameClass nc) {
startAnnotations(nc);
pp.text(qualifyName(nc.getNamespaceUri(), nc.getPrefix(), nc.getLocalName(), isAttributeNameClass));
endAnnotations(nc);
return null;
}
public Object visitChoice(ChoiceNameClass nc) {
boolean useParens = alwaysUseParens;
if (startAnnotations(nc))
useParens = true;
else if (nc.getChildren().size() == 1)
useParens = false;
if (useParens) {
pp.text("(");
pp.startNest("(");
}
pp.startGroup();
boolean first = true;
for (Iterator iter = nc.getChildren().iterator(); iter.hasNext();) {
if (first)
first = false;
else {
pp.softNewline(" ");
pp.text("| ");
}
((NameClass)iter.next()).accept(nameClassOutput);
}
pp.endGroup();
if (useParens) {
pp.endNest();
pp.text(")");
}
endAnnotations(nc);
return null;
}
}
private static boolean hasAnnotations(Annotated annotated) {
return (!annotated.getChildElementAnnotations().isEmpty()
|| !annotated.getAttributeAnnotations().isEmpty()
|| !annotated.getFollowingElementAnnotations().isEmpty());
}
private boolean startAnnotations(Annotated annotated) {
if (!annotated.getLeadingComments().isEmpty()) {
// XXX output the comments
if (!hasAnnotations(annotated))
return false;
}
else if (!hasAnnotations(annotated))
return false;
pp.startGroup();
List before = (annotated.mayContainText()
? annotated.getFollowingElementAnnotations()
: annotated.getChildElementAnnotations());
if (!annotated.getAttributeAnnotations().isEmpty()
|| !before.isEmpty()) {
annotationBody(annotated.getAttributeAnnotations(), before);
pp.softNewline(" ");
}
return true;
}
private void endAnnotations(Annotated annotated) {
if (!annotated.mayContainText()) {
for (Iterator iter = annotated.getFollowingElementAnnotations().iterator(); iter.hasNext();) {
Object obj = iter.next();
if (obj instanceof ElementAnnotation) {
pp.softNewline(" ");
pp.text(">> ");
pp.startNest(">> ");
elementAnnotation((ElementAnnotation)obj);
pp.endNest();
}
}
}
if (hasAnnotations(annotated))
pp.endGroup();
}
private void annotationBody(List attributes, List children) {
pp.startGroup();
pp.text("[");
pp.startNest(indent);
for (Iterator iter = attributes.iterator(); iter.hasNext();) {
AttributeAnnotation att = (AttributeAnnotation)iter.next();
pp.softNewline(" ");
pp.text(qualifyName(att.getNamespaceUri(), att.getPrefix(), att.getLocalName(), true));
pp.text(" = ");
literal(att.getValue());
}
for (Iterator iter = children.iterator(); iter.hasNext();) {
Object obj = iter.next();
if (obj instanceof ElementAnnotation) {
pp.softNewline(" ");
elementAnnotation((ElementAnnotation)obj);
}
else if (obj instanceof TextAnnotation) {
pp.softNewline(" ");
literal(((TextAnnotation)obj).getValue());
}
}
pp.endNest();
pp.softNewline(" ");
pp.text("]");
pp.endGroup();
}
private void elementAnnotation(ElementAnnotation elem) {
pp.text(qualifyName(elem.getNamespaceUri(), elem.getPrefix(), elem.getLocalName(),
// unqualified annotation element names have "" namespace
true));
pp.text(" ");
annotationBody(elem.getAttributes(), elem.getChildren());
}
private void body(Container container) {
body(container.getComponents());
}
private void body(List components) {
if (components.size() == 0)
pp.text(" { }");
else {
pp.text(" {");
pp.startNest(indent);
pp.hardNewline();
innerBody(components);
pp.endNest();
pp.hardNewline();
pp.text("}");
}
}
private void innerBody(List components) {
boolean first = true;
for (Iterator iter = components.iterator(); iter.hasNext();) {
if (first)
first = false;
else
pp.hardNewline();
((Component)iter.next()).accept(componentOutput);
}
}
private void inherit(String ns) {
if (ns.equals(nsb.getNamespaceUri("")))
return;
pp.text(" inherit = ");
pp.text(nsb.getNonEmptyPrefix(ns));
}
private void identifier(String name) {
if (keywordSet.contains(name))
pp.text("\\");
pp.text(name);
}
|
package rhogenwizard.debugger.model;
import org.eclipse.core.resources.IMarkerDelta;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.debug.core.DebugEvent;
import org.eclipse.debug.core.DebugException;
import org.eclipse.debug.core.DebugPlugin;
import org.eclipse.debug.core.ILaunch;
import org.eclipse.debug.core.model.IBreakpoint;
import org.eclipse.debug.core.model.IDebugTarget;
import org.eclipse.debug.core.model.IExpression;
import org.eclipse.debug.core.model.IMemoryBlock;
import org.eclipse.debug.core.model.IProcess;
import org.eclipse.debug.core.model.IStackFrame;
import org.eclipse.debug.core.model.IThread;
import org.eclipse.debug.core.model.IValue;
import org.eclipse.debug.core.model.IVariable;
import org.eclipse.dltk.internal.debug.core.model.ScriptLineBreakpoint;
import org.eclipse.swt.graphics.Resource;
import rhogenwizard.debugger.DebugServer;
import rhogenwizard.debugger.DebugVariableType;
import rhogenwizard.debugger.IDebugCallback;
import rhogenwizard.debugger.RhogenConstants;
import rhogenwizard.launcher.RhogenLaunchDelegate;
/**
* PDA Debug Target
*/
public class RhogenDebugTarget extends RhogenDebugElement implements IDebugTarget, IDebugCallback
{
// associated system process (VM)
private IProcess fProcess;
// containing launch object
private ILaunch fLaunch;
// program name
private String fName;
// suspend state
private boolean fSuspended = true;
// terminated state
private boolean fTerminated = false;
// threads
private RhogenThread fThread;
private IThread[] fThreads;
private static DebugServer m_debugServer = null;
public RhogenDebugTarget(ILaunch launch, IProcess process) throws CoreException
{
super(null);
fLaunch = launch;
fTarget = this;
fProcess = process;
fThread = new RhogenThread(this);
fThreads = new IThread[] {fThread};
DebugServer.setDebugOutputStream(System.out);
if (m_debugServer != null)
{
m_debugServer.shutdown();
}
m_debugServer = new DebugServer(this);
m_debugServer.start();
DebugPlugin.getDefault().getBreakpointManager().addBreakpointListener(this);
}
public void setProcess(IProcess p)
{
fProcess = p;
}
public IProcess getProcess()
{
return fProcess;
}
public IThread[] getThreads() throws DebugException
{
return fThreads;
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IDebugTarget#hasThreads()
*/
public boolean hasThreads() throws DebugException
{
return true; // WTB Changed per bug #138600
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IDebugTarget#getName()
*/
public String getName() throws DebugException
{
if (fName == null)
{
try
{
fName = getLaunch().getLaunchConfiguration().getAttribute(RhogenLaunchDelegate.projectNameCfgAttribute, "");
}
catch (CoreException e)
{
fName = "";
}
}
return fName;
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IDebugTarget#supportsBreakpoint(org.eclipse.debug.core.model.IBreakpoint)
*/
public boolean supportsBreakpoint(IBreakpoint breakpoint)
{
if (breakpoint.getModelIdentifier().equals(RhogenConstants.debugModelId))
{
return true;
}
return false;
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IDebugElement#getDebugTarget()
*/
public IDebugTarget getDebugTarget()
{
return this;
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IDebugElement#getLaunch()
*/
public ILaunch getLaunch()
{
return fLaunch;
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.ITerminate#canTerminate()
*/
public boolean canTerminate()
{
return getProcess().canTerminate();
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.ITerminate#isTerminated()
*/
public boolean isTerminated()
{
return getProcess().isTerminated();
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.ITerminate#terminate()
*/
public void terminate() throws DebugException
{
m_debugServer.debugTerminate();
exited();
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.ISuspendResume#canResume()
*/
public boolean canResume()
{
return !isTerminated() && isSuspended();
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.ISuspendResume#canSuspend()
*/
public boolean canSuspend()
{
return !isTerminated() && !isSuspended();
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.ISuspendResume#isSuspended()
*/
public boolean isSuspended()
{
return fSuspended;
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.ISuspendResume#resume()
*/
public void resume() throws DebugException
{
cleanState();
resumed(DebugEvent.CLIENT_REQUEST);
m_debugServer.debugResume();
}
/**
* Notification the target has resumed for the given reason
*
* @param detail reason for the resume
*/
private void resumed(int detail)
{
fSuspended = false;
fThread.fireResumeEvent(detail);
}
/**
* Notification the target has suspended for the given reason
*
* @param detail reason for the suspend
*/
private void suspended(int detail)
{
fSuspended = true;
fThread.fireSuspendEvent(detail);
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.ISuspendResume#suspend()
*/
public void suspend() throws DebugException
{
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.IBreakpointListener#breakpointAdded(org.eclipse.debug.core.model.IBreakpoint)
*/
public void breakpointAdded(IBreakpoint breakpoint)
{
if (supportsBreakpoint(breakpoint))
{
try
{
if (breakpoint.isEnabled())
{
ScriptLineBreakpoint lineBr = (ScriptLineBreakpoint) breakpoint;
int lineNum = lineBr.getLineNumber();
String srcFile = prepareResNameForDebugger(lineBr.getResourcePath().toOSString());
m_debugServer.debugBreakpoint(srcFile, lineNum);
}
}
catch (CoreException e)
{
}
}
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.IBreakpointListener#breakpointRemoved(org.eclipse.debug.core.model.IBreakpoint, org.eclipse.core.resources.IMarkerDelta)
*/
public void breakpointRemoved(IBreakpoint breakpoint, IMarkerDelta delta)
{
if (supportsBreakpoint(breakpoint))
{
try
{
if (breakpoint.isEnabled())
{
ScriptLineBreakpoint lineBr = (ScriptLineBreakpoint) breakpoint;
int lineNum = lineBr.getLineNumber();
String srcFile = prepareResNameForDebugger(lineBr.getResourcePath().toOSString());
m_debugServer.debugRemoveBreakpoint(srcFile, lineNum);
}
}
catch (CoreException e)
{
}
}
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.IBreakpointListener#breakpointChanged(org.eclipse.debug.core.model.IBreakpoint, org.eclipse.core.resources.IMarkerDelta)
*/
public void breakpointChanged(IBreakpoint breakpoint, IMarkerDelta delta)
{
if (supportsBreakpoint(breakpoint))
{
try
{
if (breakpoint.isEnabled())
{
breakpointAdded(breakpoint);
}
else
{
breakpointRemoved(breakpoint, null);
}
}
catch (CoreException e)
{
}
}
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IDisconnect#canDisconnect()
*/
public boolean canDisconnect()
{
return false;
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IDisconnect#disconnect()
*/
public void disconnect() throws DebugException
{
}
public void stepOver()
{
cleanState();
fThread.setStepping(true);
m_debugServer.debugStepOver();
}
public void stepInto()
{
cleanState();
fThread.setStepping(true);
m_debugServer.debugStepInto();
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IDisconnect#isDisconnected()
*/
public boolean isDisconnected()
{
return false;
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IMemoryBlockRetrieval#supportsStorageRetrieval()
*/
public boolean supportsStorageRetrieval()
{
return false;
}
/* (non-Javadoc)
* @see org.eclipse.debug.core.model.IMemoryBlockRetrieval#getMemoryBlock(long, long)
*/
public IMemoryBlock getMemoryBlock(long startAddress, long length) throws DebugException
{
return null;
}
static String prepareResNameForDebugger(String resName)
{
resName = resName.replace('\\', '/');
String[] segments = resName.split("app/");
if (segments.length > 1)
return segments[1];
return segments[0];
}
/**
* Install breakpoints that are already registered with the breakpoint
* manager.
*/
private void installDeferredBreakpoints()
{
IBreakpoint[] breakpoints = DebugPlugin.getDefault().getBreakpointManager().getBreakpoints(RhogenConstants.debugModelId);
for (int i = 0; i < breakpoints.length; i++)
{
breakpointAdded(breakpoints[i]);
}
}
/**
* Returns the current stack frames in the target.
*
* @return the current stack frames in the target
* @throws DebugException if unable to perform the request
*/
protected IStackFrame[] getStackFrames() throws DebugException
{
StackData stackData = new StackData(m_debugServer.debugGetFile(), m_debugServer.debugGetLine());
IStackFrame[] theFrames = new IStackFrame[1];
theFrames[0] = new RhogenStackFrame(fThread, stackData, 0);
DebugPlugin.getDefault().getExpressionManager().removeExpressions(DebugPlugin.getDefault().getExpressionManager().getExpressions());
m_debugServer.debugGetVariables(new DebugVariableType[] {
DebugVariableType.LOCAL, DebugVariableType.INSTANCE, DebugVariableType.CLASS, DebugVariableType.GLOBAL });
return theFrames;
}
/**
* Returns the current value of the given variable.
*
* @param variable
* @return variable value
* @throws DebugException if the request fails
*/
protected IValue getVariableValue(RhogenVariable variable) throws DebugException
{
m_debugServer.debugEvaluate(variable.getName());
return new RhogenValue(this, "");
}
@Override
public void connected()
{
try
{
cleanState();
fireCreationEvent();
installDeferredBreakpoints();
resume();
}
catch (DebugException e)
{
}
}
@Override
public void breakpoint(String file, int lineNumber, String className, String method)
{
cleanState();
IBreakpoint[] breakpoints = DebugPlugin.getDefault().getBreakpointManager().getBreakpoints(RhogenConstants.debugModelId);
for (int i = 0; i < breakpoints.length; i++)
{
IBreakpoint breakpoint = breakpoints[i];
if (breakpoint instanceof ScriptLineBreakpoint)
{
ScriptLineBreakpoint lineBreakpoint = (ScriptLineBreakpoint) breakpoint;
String resPath = prepareResNameForDebugger(lineBreakpoint.getResourcePath().toOSString());
try
{
if (lineBreakpoint.getLineNumber() == lineNumber && resPath.equals(file))
{
fThread.setBreakpoints(new IBreakpoint[]{breakpoint});
break;
}
}
catch (CoreException e)
{
}
}
}
suspended(DebugEvent.BREAKPOINT);
}
@Override
public void step(String file, int line, String className, String method)
{
// cleanState();
// fThread.setStepping(true);
// resumed(DebugEvent.STEP_OVER);
}
@Override
public void unknown(String cmd)
{
}
@Override
public void exited()
{
fTerminated = true;
fSuspended = false;
DebugPlugin.getDefault().getBreakpointManager().removeBreakpointListener(this);
fireTerminateEvent();
m_debugServer.shutdown();
}
@Override
public void resumed()
{
cleanState();
fSuspended = false;
resumed(DebugEvent.CLIENT_REQUEST);
}
void cleanState()
{
fThread.setBreakpoints(null);
fThread.setStepping(false);
}
@Override
public void evaluation(boolean valid, String code, String value)
{
// try
// IStackFrame[] frames = fThread.getStackFrames();
// for(int i=0; i<frames.length; ++i)
// IStackFrame frame = frames[i];
// IVariable[] stackVars = frame.getVariables();
// for (int v=0; v<stackVars.length; ++v)
// IVariable currVar = stackVars[v];
// if (currVar instanceof RhogenVariable)
// if (currVar.getName().equals(code))
// RhogenVariable rhoVar = (RhogenVariable) currVar;
// RhogenValue rhoValue = (RhogenValue) rhoVar.getValue();
// rhoValue.setValue(value);
// catch (DebugException e)
// e.printStackTrace();
}
@Override
public void watch(DebugVariableType type, String variable, String value)
{
IValue val = new RhogenValue(this, value);
DebugPlugin.getDefault().getExpressionManager().addExpression(new RhogenExpression(this, fLaunch, variable, val));
}
@Override
public void watchBOL(DebugVariableType type)
{
// TODO Auto-generated method stub
}
@Override
public void watchEOL(DebugVariableType type)
{
// TODO Auto-generated method stub
}
}
|
package org.commcare.util.engine;
import org.commcare.modern.reference.ArchiveFileRoot;
import org.commcare.modern.reference.JavaFileRoot;
import org.commcare.modern.reference.JavaHttpRoot;
import org.commcare.resources.ResourceManager;
import org.commcare.resources.model.InstallCancelledException;
import org.commcare.resources.model.Resource;
import org.commcare.resources.model.ResourceTable;
import org.commcare.resources.model.TableStateListener;
import org.commcare.resources.model.UnresolvedResourceException;
import org.commcare.suite.model.Detail;
import org.commcare.suite.model.DetailField;
import org.commcare.suite.model.EntityDatum;
import org.commcare.suite.model.Entry;
import org.commcare.suite.model.FormIdDatum;
import org.commcare.suite.model.Menu;
import org.commcare.suite.model.OfflineUserRestore;
import org.commcare.suite.model.Profile;
import org.commcare.suite.model.PropertySetter;
import org.commcare.suite.model.SessionDatum;
import org.commcare.suite.model.Suite;
import org.commcare.util.CommCarePlatform;
import org.javarosa.core.io.BufferedInputStream;
import org.javarosa.core.io.StreamsUtil;
import org.javarosa.core.model.FormDef;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.model.instance.FormInstance;
import org.javarosa.core.reference.ReferenceManager;
import org.javarosa.core.reference.ResourceReferenceFactory;
import org.javarosa.core.services.PropertyManager;
import org.javarosa.core.services.locale.Localization;
import org.javarosa.core.services.storage.*;
import org.javarosa.core.services.storage.util.DummyIndexedStorageUtility;
import org.javarosa.core.util.externalizable.LivePrototypeFactory;
import org.javarosa.core.util.externalizable.PrototypeFactory;
import org.javarosa.xml.util.UnfullfilledRequirementsException;
import org.javarosa.xpath.XPathMissingInstanceException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Hashtable;
import java.util.Vector;
import java.util.zip.ZipFile;
/**
* @author ctsims
*/
public class CommCareConfigEngine {
protected ResourceTable table;
protected ResourceTable updateTable;
protected ResourceTable recoveryTable;
protected CommCarePlatform platform;
private final PrototypeFactory mLiveFactory;
private final PrintStream print;
private ArchiveFileRoot mArchiveRoot;
private static IStorageIndexedFactory storageFactory;
public CommCareConfigEngine() {
this(new LivePrototypeFactory());
}
public CommCareConfigEngine(PrototypeFactory prototypeFactory) {
this(System.out, prototypeFactory);
}
public CommCareConfigEngine(OutputStream output, PrototypeFactory prototypeFactory) {
this.print = new PrintStream(output);
this.platform = new CommCarePlatform(2, 32);
this.mLiveFactory = prototypeFactory;
if (storageFactory == null) {
setupDummyStorageFactory();
}
setRoots();
table = ResourceTable.RetrieveTable(storageFactory.newStorage("GLOBAL_RESOURCE_TABLE", Resource.class));
updateTable = ResourceTable.RetrieveTable(storageFactory.newStorage("GLOBAL_UPGRADE_TABLE", Resource.class));
recoveryTable = ResourceTable.RetrieveTable(storageFactory.newStorage("GLOBAL_RECOVERY_TABLE", Resource.class));
//All of the below is on account of the fact that the installers
//aren't going through a factory method to handle them differently
//per device.
StorageManager.forceClear();
StorageManager.setStorageFactory(storageFactory);
PropertyManager.initDefaultPropertyManager();
StorageManager.registerStorage(Profile.STORAGE_KEY, Profile.class);
StorageManager.registerStorage(Suite.STORAGE_KEY, Suite.class);
StorageManager.registerStorage(FormDef.STORAGE_KEY, FormDef.class);
StorageManager.registerStorage(FormInstance.STORAGE_KEY, FormInstance.class);
StorageManager.registerStorage(OfflineUserRestore.STORAGE_KEY, OfflineUserRestore.class);
}
private void setupDummyStorageFactory() {
CommCareConfigEngine.setStorageFactory(new IStorageIndexedFactory() {
@Override
public IStorageUtilityIndexed newStorage(String name, Class type) {
return new DummyIndexedStorageUtility(type, mLiveFactory);
}
});
}
public static void setStorageFactory(IStorageIndexedFactory storageFactory) {
CommCareConfigEngine.storageFactory = storageFactory;
}
protected void setRoots() {
ReferenceManager.instance().addReferenceFactory(new JavaHttpRoot());
this.mArchiveRoot = new ArchiveFileRoot();
ReferenceManager.instance().addReferenceFactory(mArchiveRoot);
ReferenceManager.instance().addReferenceFactory(new ResourceReferenceFactory());
}
public void initFromArchive(String archiveURL) {
String fileName;
if (archiveURL.startsWith("http")) {
fileName = downloadToTemp(archiveURL);
} else {
fileName = archiveURL;
}
ZipFile zip;
try {
zip = new ZipFile(fileName);
} catch (IOException e) {
print.println("File at " + archiveURL + ": is not a valid CommCare Package. Downloaded to: " + fileName);
e.printStackTrace(print);
return;
}
String archiveGUID = this.mArchiveRoot.addArchiveFile(zip);
init("jr://archive/" + archiveGUID + "/profile.ccpr");
}
private String downloadToTemp(String resource) {
try {
URL url = new URL(resource);
HttpURLConnection conn = (HttpURLConnection)url.openConnection();
conn.setInstanceFollowRedirects(true); //you still need to handle redirect manully.
HttpURLConnection.setFollowRedirects(true);
File file = File.createTempFile("commcare_", ".ccz");
FileOutputStream fos = new FileOutputStream(file);
StreamsUtil.writeFromInputToOutput(new BufferedInputStream(conn.getInputStream()), fos);
return file.getAbsolutePath();
} catch (IOException e) {
print.println("Issue downloading or create stream for " + resource);
e.printStackTrace(print);
System.exit(-1);
return null;
}
}
public void initFromLocalFileResource(String resource) {
String reference = setFileSystemRootFromResourceAndReturnRelativeRef(resource);
init(reference);
}
private String setFileSystemRootFromResourceAndReturnRelativeRef(String resource) {
int lastSeparator = resource.lastIndexOf(File.separator);
String rootPath;
String filePart;
if (lastSeparator == -1) {
rootPath = new File("").getAbsolutePath();
filePart = resource;
} else {
//Get the location of the file. In the future, we'll treat this as the resource root
rootPath = resource.substring(0, resource.lastIndexOf(File.separator));
//cut off the end
filePart = resource.substring(resource.lastIndexOf(File.separator) + 1);
}
//(That root now reads as jr://file/)
ReferenceManager.instance().addReferenceFactory(new JavaFileRoot(rootPath));
//Now build the testing reference we'll use
return "jr://file/" + filePart;
}
private void init(String profileRef) {
try {
installAppFromReference(profileRef);
print.println("Table resources intialized and fully resolved.");
print.println(table);
} catch (InstallCancelledException e) {
print.println("Install was cancelled by the user or system");
e.printStackTrace(print);
System.exit(-1);
} catch (UnresolvedResourceException e) {
print.println("While attempting to resolve the necessary resources, one couldn't be found: " + e.getResource().getResourceId());
e.printStackTrace(print);
System.exit(-1);
} catch (UnfullfilledRequirementsException e) {
print.println("While attempting to resolve the necessary resources, a requirement wasn't met");
e.printStackTrace(print);
System.exit(-1);
}
}
public void installAppFromReference(String profileReference) throws UnresolvedResourceException,
UnfullfilledRequirementsException, InstallCancelledException {
ResourceManager.installAppResources(platform, profileReference, this.table, true);
}
public void initEnvironment() {
Localization.init(true);
try {
table.initializeResources(platform, false);
} catch (RuntimeException e) {
print.println("Error while initializing one of the resolved resources");
e.printStackTrace(print);
System.exit(-1);
}
//Make sure there's a default locale, since the app doesn't necessarily use the
//localization engine
Localization.getGlobalLocalizerAdvanced().addAvailableLocale("default");
Localization.setDefaultLocale("default");
print.println("Locales defined: ");
for (String locale : Localization.getGlobalLocalizerAdvanced().getAvailableLocales()) {
System.out.println("* " + locale);
}
setDefaultLocale();
}
private void setDefaultLocale() {
String defaultLocale = "default";
for (PropertySetter prop : platform.getCurrentProfile().getPropertySetters()) {
if ("cur_locale".equals(prop.getKey())) {
defaultLocale = prop.getValue();
break;
}
}
print.println("Setting locale to: " + defaultLocale);
Localization.setLocale(defaultLocale);
}
public void describeApplication() {
print.println("Locales defined: ");
for (String locale : Localization.getGlobalLocalizerAdvanced().getAvailableLocales()) {
System.out.println("* " + locale);
}
Localization.setDefaultLocale("default");
Vector<Menu> root = new Vector<>();
Hashtable<String, Vector<Menu>> mapping = new Hashtable<>();
mapping.put("root", new Vector<Menu>());
for (Suite s : platform.getInstalledSuites()) {
for (Menu m : s.getMenus()) {
if (m.getId().equals("root")) {
root.add(m);
} else {
Vector<Menu> menus = mapping.get(m.getRoot());
if (menus == null) {
menus = new Vector<>();
}
menus.add(m);
mapping.put(m.getRoot(), menus);
}
}
}
for (String locale : Localization.getGlobalLocalizerAdvanced().getAvailableLocales()) {
Localization.setLocale(locale);
print.println("Application details for locale: " + locale);
print.println("CommCare");
for (Menu m : mapping.get("root")) {
print.println("|- " + m.getName().evaluate());
for (String command : m.getCommandIds()) {
for (Suite s : platform.getInstalledSuites()) {
if (s.getEntries().containsKey(command)) {
print(s, s.getEntries().get(command), 2);
}
}
}
}
for (Menu m : root) {
for (String command : m.getCommandIds()) {
for (Suite s : platform.getInstalledSuites()) {
if (s.getEntries().containsKey(command)) {
print(s, s.getEntries().get(command), 1);
}
}
}
}
}
}
public CommCarePlatform getPlatform() {
return platform;
}
public FormDef loadFormByXmlns(String xmlns) {
IStorageUtilityIndexed<FormDef> formStorage =
(IStorageUtilityIndexed)StorageManager.getStorage(FormDef.STORAGE_KEY);
return formStorage.getRecordForValue("XMLNS", xmlns);
}
private void print(Suite s, Entry e, int level) {
String head = "";
String emptyhead = "";
for (int i = 0; i < level; ++i) {
head += "|- ";
emptyhead += " ";
}
if (e.isView()) {
print.println(head + "View: " + e.getText().evaluate());
} else {
print.println(head + "Entry: " + e.getText().evaluate());
}
for (SessionDatum datum : e.getSessionDataReqs()) {
if (datum instanceof FormIdDatum) {
print.println(emptyhead + "Form: " + datum.getValue());
} else if (datum instanceof EntityDatum) {
String shortDetailId = ((EntityDatum)datum).getShortDetail();
if (shortDetailId != null) {
Detail d = s.getDetail(shortDetailId);
try {
print.println(emptyhead + "|Select: " + d.getTitle().getText().evaluate(new EvaluationContext(null)));
} catch (XPathMissingInstanceException ex) {
print.println(emptyhead + "|Select: " + "(dynamic title)");
}
print.print(emptyhead + "| ");
for (DetailField f : d.getFields()) {
print.print(f.getHeader().evaluate() + " | ");
}
print.print("\n");
}
}
}
}
final static private class QuickStateListener implements TableStateListener {
int lastComplete = 0;
@Override
public void simpleResourceAdded() {
}
@Override
public void compoundResourceAdded(ResourceTable table) {
}
@Override
public void incrementProgress(int complete, int total) {
int diff = complete - lastComplete;
lastComplete = complete;
for (int i = 0; i < diff; ++i) {
System.out.print(".");
}
}
}
public void attemptAppUpdate(String updateTarget) {
ResourceTable global = table;
// Ok, should figure out what the state of this bad boy is.
Resource profileRef = global.getResourceWithId(CommCarePlatform.APP_PROFILE_RESOURCE_ID);
Profile profileObj = this.getPlatform().getCurrentProfile();
global.setStateListener(new QuickStateListener());
updateTable.setStateListener(new QuickStateListener());
// When profileRef points is http, add appropriate dev flags
String authRef = profileObj.getAuthReference();
try {
URL authUrl = new URL(authRef);
// profileRef couldn't be parsed as a URL, so don't worry
// about adding dev flags to the url's query
// If we want to be using/updating to the latest build of the
// app (instead of latest release), add it to the query tags of
// the profile reference
if (updateTarget != null &&
("https".equals(authUrl.getProtocol()) ||
"http".equals(authUrl.getProtocol()))) {
if (authUrl.getQuery() != null) {
// If the profileRef url already have query strings
// just add a new one to the end
authRef = authRef + "&target=" + updateTarget;
} else {
// otherwise, start off the query string with a ?
authRef = authRef + "?target" + updateTarget;
}
}
} catch (MalformedURLException e) {
System.out.print("Warning: Unrecognized URL format: " + authRef);
}
try {
// This populates the upgrade table with resources based on
// binary files, starting with the profile file. If the new
// profile is not a newer version, statgeUpgradeTable doesn't
// actually pull in all the new references
System.out.println("Checking for updates....");
ResourceManager resourceManager = new ResourceManager(platform, global, updateTable, recoveryTable);
resourceManager.stageUpgradeTable(authRef, true);
Resource newProfile = updateTable.getResourceWithId(CommCarePlatform.APP_PROFILE_RESOURCE_ID);
if (!newProfile.isNewer(profileRef)) {
System.out.println("Your app is up to date!");
return;
}
System.out.println("Update found. New Version: " + newProfile.getVersion());
System.out.println("Downloading / Preparing Update");
resourceManager.prepareUpgradeResources();
System.out.print("Installing update");
// Replaces global table with temporary, or w/ recovery if
// something goes wrong
resourceManager.upgrade();
} catch (UnresolvedResourceException e) {
System.out.println("Update Failed! Couldn't find or install one of the remote resources");
e.printStackTrace();
return;
} catch (UnfullfilledRequirementsException e) {
System.out.println("Update Failed! This CLI host is incompatible with the app");
e.printStackTrace();
return;
} catch (Exception e) {
System.out.println("Update Failed! There is a problem with one of the resources");
e.printStackTrace();
return;
}
// Initializes app resources and the app itself, including doing a check to see if this
// app record was converted by the db upgrader
initEnvironment();
}
}
|
package io.syndesis.qe.bdd;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.util.EnumSet;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import cucumber.api.java.en.Given;
import cucumber.api.java.en.Then;
import cucumber.api.java.en.When;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.client.KubernetesClientException;
import io.fabric8.openshift.api.model.Build;
import io.syndesis.common.model.connection.Connection;
import io.syndesis.qe.Component;
import io.syndesis.qe.TestConfiguration;
import io.syndesis.qe.endpoints.ConnectionsEndpoint;
import io.syndesis.qe.endpoints.TestSupport;
import io.syndesis.qe.templates.SyndesisTemplate;
import io.syndesis.qe.utils.LogCheckerUtils;
import io.syndesis.qe.utils.OpenShiftUtils;
import io.syndesis.qe.utils.RestUtils;
import io.syndesis.qe.utils.TestUtils;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class CommonSteps {
@Autowired
private ConnectionsEndpoint connectionsEndpoint;
@Given("^clean default namespace")
public void cleanNamespace() {
OpenShiftUtils.client().apps().statefulSets().inNamespace(TestConfiguration.openShiftNamespace()).delete();
OpenShiftUtils.client().extensions().deployments().inNamespace(TestConfiguration.openShiftNamespace()).delete();
OpenShiftUtils.client().serviceAccounts().withName("syndesis-oauth-client").delete();
OpenShiftUtils.getInstance().cleanAndAssert();
OpenShiftUtils.xtf().getTemplates().forEach(OpenShiftUtils.xtf()::deleteTemplate);
}
@Given("^clean all builds")
public void cleanBuilds() {
OpenShiftUtils.getInstance().getBuildConfigs().forEach(OpenShiftUtils.getInstance()::deleteBuildConfig);
OpenShiftUtils.getInstance().getBuilds().forEach(OpenShiftUtils.getInstance()::deleteBuild);
}
@When("^deploy Syndesis$")
public void deploySyndesis() {
SyndesisTemplate.deploy();
}
@When("^deploy Syndesis from template$")
public void deploySyndesisFromTemplate() {
SyndesisTemplate.deployUsingTemplate();
}
@Then("^wait for Syndesis to become ready")
public void waitForSyndesis() {
EnumSet<Component> components = EnumSet.allOf(Component.class);
ExecutorService executorService = Executors.newFixedThreadPool(components.size());
components.forEach(c -> {
Runnable runnable = () ->
OpenShiftUtils.xtf().waiters()
.areExactlyNPodsReady(1, "syndesis.io/component", c.getName())
.interval(TimeUnit.SECONDS, 20)
.timeout(TimeUnit.MINUTES, 12)
.assertEventually();
executorService.submit(runnable);
});
executorService.shutdown();
try {
if (!executorService.awaitTermination(20, TimeUnit.MINUTES)) {
executorService.shutdownNow();
fail("Syndesis wasn't initilized in time");
}
} catch (InterruptedException e) {
fail("Syndesis wasn't initilized in time");
}
}
@Then("^verify s2i build of integration \"([^\"]*)\" was finished in duration (\\d+) min$")
public void verifyBuild(String integrationName, int duration) {
String sanitizedName = integrationName.toLowerCase().replaceAll(" ", "-");
Optional<Build> s2iBuild = OpenShiftUtils.getInstance().getBuilds().stream().filter(b -> b.getMetadata().getName().contains(sanitizedName)).findFirst();
if (s2iBuild.isPresent()) {
Build build = s2iBuild.get();
String buildPodName = build.getMetadata().getAnnotations().get("openshift.io/build.pod-name");
Optional<Pod> buildPod = OpenShiftUtils.getInstance().getPods().stream().filter(p -> p.getMetadata().getName().equals(buildPodName)).findFirst();
if (buildPod.isPresent()) {
try {
boolean[] patternsInLogs = LogCheckerUtils.findPatternsInLogs(buildPod.get(), Pattern.compile(".*Downloading: \\b.*"));
assertThat(patternsInLogs).containsOnly(false);
} catch (IOException e) {
e.printStackTrace();
}
}
assertThat(build.getStatus().getPhase()).isEqualTo("Complete");
// % 1_000L is there to parse OpenShift ms format
assertThat(build.getStatus().getDuration() % 1_000L).isLessThan(duration * 60 * 1000);
} else {
fail("No build found for integration with name " + sanitizedName);
}
}
@Given("^clean application state")
public void resetState() {
//check that postgreSQl connection has been created
int i = 0;
while (i < 10) {
TestSupport.getInstance().resetDB();
Optional<Connection> optConnection = connectionsEndpoint.list().stream().filter(s -> s.getName().equals("PostgresDB")).findFirst();
if (optConnection.isPresent()) {
return;
}
i++;
}
fail("Default PostgresDB connection has not been created, please contact engineering!");
}
@Then("^sleep for jenkins delay or \"([^\"]*)\" seconds")
public void sleepForJenkinsDelay(int secs) {
TestUtils.sleepForJenkinsDelayIfHigher(secs);
}
@When("^refresh server port-forward")
public void refreshPortForward() {
RestUtils.reset();
RestUtils.getRestUrl();
}
}
|
package org.jboss.marshalling.river;
import java.io.Externalizable;
import java.io.IOException;
import java.io.InvalidClassException;
import java.io.InvalidObjectException;
import java.io.NotSerializableException;
import java.io.ObjectOutput;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.lang.reflect.Proxy;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.TreeMap;
import java.util.TreeSet;
import org.jboss.marshalling.AbstractMarshaller;
import org.jboss.marshalling.ClassExternalizerFactory;
import org.jboss.marshalling.ClassTable;
import org.jboss.marshalling.Externalizer;
import org.jboss.marshalling.MarshallerObjectOutput;
import org.jboss.marshalling.MarshallingConfiguration;
import org.jboss.marshalling.ObjectResolver;
import org.jboss.marshalling.ObjectTable;
import org.jboss.marshalling.UTFUtils;
import org.jboss.marshalling.reflect.SerializableClass;
import org.jboss.marshalling.reflect.SerializableClassRegistry;
import org.jboss.marshalling.reflect.SerializableField;
import org.jboss.marshalling.util.IdentityIntMap;
import static org.jboss.marshalling.river.Protocol.*;
public class RiverMarshaller extends AbstractMarshaller {
private final IdentityIntMap<Object> instanceCache;
private final IdentityIntMap<Class<?>> classCache;
private final IdentityHashMap<Class<?>, Externalizer> externalizers;
private int instanceSeq;
private int classSeq;
private final SerializableClassRegistry registry;
private RiverObjectOutputStream objectOutputStream;
private ObjectOutput objectOutput;
private BlockMarshaller blockMarshaller;
protected RiverMarshaller(final RiverMarshallerFactory marshallerFactory, final SerializableClassRegistry registry, final MarshallingConfiguration configuration) throws IOException {
super(marshallerFactory, configuration);
if (configuredVersion > MAX_VERSION) {
throw new IOException("Unsupported protocol version " + configuredVersion);
}
this.registry = registry;
final float loadFactor = 0x0.5p0f;
instanceCache = new IdentityIntMap<Object>((int) ((double)configuration.getInstanceCount() / (double)loadFactor), loadFactor);
classCache = new IdentityIntMap<Class<?>>((int) ((double)configuration.getClassCount() / (double)loadFactor), loadFactor);
externalizers = new IdentityHashMap<Class<?>, Externalizer>(configuration.getClassCount());
}
protected void doWriteObject(final Object original, final boolean unshared) throws IOException {
final ClassExternalizerFactory classExternalizerFactory = this.classExternalizerFactory;
final ObjectResolver objectResolver = this.objectResolver;
Object obj = original;
Class<?> objClass;
int id;
boolean isArray, isEnum;
SerializableClass info;
boolean unreplaced = true;
final int configuredVersion = this.configuredVersion;
try {
for (;;) {
if (obj == null) {
write(ID_NULL);
return;
}
final int rid;
if (! unshared && (rid = instanceCache.get(obj, -1)) != -1) {
if (configuredVersion >= 2) {
final int diff = rid - instanceSeq;
if (diff >= -256) {
write(ID_REPEAT_OBJECT_NEAR);
write(diff);
} else if (diff >= -65536) {
write(ID_REPEAT_OBJECT_NEARISH);
writeShort(diff);
}
return;
}
write(ID_REPEAT_OBJECT_FAR);
writeInt(rid);
return;
}
final ObjectTable.Writer objectTableWriter;
if (! unshared && (objectTableWriter = objectTable.getObjectWriter(obj)) != null) {
write(ID_PREDEFINED_OBJECT);
if (configuredVersion == 1) {
objectTableWriter.writeObject(getBlockMarshaller(), obj);
writeEndBlock();
} else {
objectTableWriter.writeObject(this, obj);
}
return;
}
objClass = obj.getClass();
id = (configuredVersion >= 2 ? BASIC_CLASSES_V2 : BASIC_CLASSES).get(objClass, -1);
// First, non-replaceable classes
if (id == ID_CLASS_CLASS) {
final Class<?> classObj = (Class<?>) obj;
if (configuredVersion >= 2) {
final int cid = BASIC_CLASSES_V2.get(classObj, -1);
switch (cid) {
case -1:
case ID_SINGLETON_MAP_OBJECT:
case ID_SINGLETON_SET_OBJECT:
case ID_SINGLETON_LIST_OBJECT:
case ID_EMPTY_MAP_OBJECT:
case ID_EMPTY_SET_OBJECT:
case ID_EMPTY_LIST_OBJECT: {
break;
}
default: {
write(cid);
return;
}
}
}
write(ID_NEW_OBJECT);
writeClassClass(classObj);
return;
}
isEnum = obj instanceof Enum;
isArray = objClass.isArray();
// objects with id != -1 will never make use of the "info" param in *any* way
info = isArray || isEnum || id != -1 ? null : registry.lookup(objClass);
// replace once - objects with id != -1 will not have replacement methods but might be globally replaced
if (unreplaced) {
if (info != null) {
// check for a user replacement
if (info.hasWriteReplace()) {
obj = info.callWriteReplace(obj);
}
}
// Check for a global replacement
obj = objectResolver.writeReplace(obj);
if (obj != original) {
unreplaced = false;
continue;
} else {
break;
}
} else {
break;
}
}
if (isEnum) {
// objClass cannot equal Enum.class because it is abstract
final Enum<?> theEnum = (Enum<?>) obj;
// enums are always shared
write(ID_NEW_OBJECT);
writeEnumClass(theEnum.getDeclaringClass());
writeString(theEnum.name());
instanceCache.put(obj, instanceSeq++);
return;
}
// Now replaceable classes
switch (id) {
case ID_BYTE_CLASS: {
if (configuredVersion >= 2) {
write(ID_BYTE_OBJECT);
writeByte(((Byte) obj).byteValue());
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_BYTE_CLASS);
writeByte(((Byte) obj).byteValue());
}
return;
}
case ID_BOOLEAN_CLASS: {
if (configuredVersion >= 2) {
write(((Boolean) obj).booleanValue() ? ID_BOOLEAN_OBJECT_TRUE : ID_BOOLEAN_OBJECT_FALSE);
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_BOOLEAN_CLASS);
writeBoolean(((Boolean) obj).booleanValue());
}
return;
}
case ID_CHARACTER_CLASS: {
if (configuredVersion >= 2) {
write(ID_CHARACTER_OBJECT);
writeChar(((Character) obj).charValue());
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_CHARACTER_CLASS);
writeChar(((Character) obj).charValue());
}
return;
}
case ID_DOUBLE_CLASS: {
if (configuredVersion >= 2) {
write(ID_DOUBLE_OBJECT);
writeDouble(((Double) obj).doubleValue());
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_DOUBLE_CLASS);
writeDouble(((Double) obj).doubleValue());
}
return;
}
case ID_FLOAT_CLASS: {
if (configuredVersion >= 2) {
write(ID_FLOAT_OBJECT);
writeFloat(((Float) obj).floatValue());
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_FLOAT_CLASS);
writeFloat(((Float) obj).floatValue());
}
return;
}
case ID_INTEGER_CLASS: {
if (configuredVersion >= 2) {
write(ID_INTEGER_OBJECT);
writeInt(((Integer) obj).intValue());
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_INTEGER_CLASS);
writeInt(((Integer) obj).intValue());
}
return;
}
case ID_LONG_CLASS: {
if (configuredVersion >= 2) {
write(ID_LONG_OBJECT);
writeLong(((Long) obj).longValue());
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_LONG_CLASS);
writeLong(((Long) obj).longValue());
}
return;
}
case ID_SHORT_CLASS: {
if (configuredVersion >= 2) {
write(ID_SHORT_OBJECT);
writeShort(((Short) obj).shortValue());
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_SHORT_CLASS);
writeShort(((Short) obj).shortValue());
}
return;
}
case ID_STRING_CLASS: {
final String string = (String) obj;
if (configuredVersion >= 2) {
final int len = string.length();
if (len == 0) {
write(ID_STRING_EMPTY);
// don't cache empty strings
return;
} else if (len <= 256) {
write(ID_STRING_SMALL);
write(len);
} else if (len <= 65336) {
write(ID_STRING_MEDIUM);
writeShort(len);
} else {
write(ID_STRING_LARGE);
writeInt(len);
}
flush();
UTFUtils.writeUTFBytes(byteOutput, string);
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_STRING_CLASS);
writeString(string);
}
if (unshared) {
instanceCache.put(obj, -1);
instanceSeq++;
} else {
instanceCache.put(obj, instanceSeq++);
}
return;
}
case ID_BYTE_ARRAY_CLASS: {
if (! unshared) {
instanceCache.put(obj, instanceSeq++);
}
final byte[] bytes = (byte[]) obj;
final int len = bytes.length;
if (configuredVersion >= 2) {
if (len == 0) {
write(unshared ? ID_ARRAY_EMPTY_UNSHARED : ID_ARRAY_EMPTY);
write(ID_PRIM_BYTE);
} else if (len <= 256) {
write(unshared ? ID_ARRAY_SMALL_UNSHARED : ID_ARRAY_SMALL);
write(len);
write(ID_PRIM_BYTE);
write(bytes, 0, len);
} else if (len <= 65536) {
write(unshared ? ID_ARRAY_MEDIUM_UNSHARED : ID_ARRAY_MEDIUM);
writeShort(len);
write(ID_PRIM_BYTE);
write(bytes, 0, len);
} else {
write(unshared ? ID_ARRAY_LARGE_UNSHARED : ID_ARRAY_LARGE);
writeInt(len);
write(ID_PRIM_BYTE);
write(bytes, 0, len);
}
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_BYTE_ARRAY_CLASS);
writeInt(len);
write(bytes, 0, len);
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_BOOLEAN_ARRAY_CLASS: {
if (! unshared) {
instanceCache.put(obj, instanceSeq++);
}
final boolean[] booleans = (boolean[]) obj;
final int len = booleans.length;
if (configuredVersion >= 2) {
if (len == 0) {
write(unshared ? ID_ARRAY_EMPTY_UNSHARED : ID_ARRAY_EMPTY);
write(ID_PRIM_BOOLEAN);
} else if (len <= 256) {
write(unshared ? ID_ARRAY_SMALL_UNSHARED : ID_ARRAY_SMALL);
write(len);
write(ID_PRIM_BOOLEAN);
writeBooleanArray(booleans);
} else if (len <= 65536) {
write(unshared ? ID_ARRAY_MEDIUM_UNSHARED : ID_ARRAY_MEDIUM);
writeShort(len);
write(ID_PRIM_BOOLEAN);
writeBooleanArray(booleans);
} else {
write(unshared ? ID_ARRAY_LARGE_UNSHARED : ID_ARRAY_LARGE);
writeInt(len);
write(ID_PRIM_BOOLEAN);
writeBooleanArray(booleans);
}
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_BOOLEAN_ARRAY_CLASS);
writeInt(len);
writeBooleanArray(booleans);
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_CHAR_ARRAY_CLASS: {
if (! unshared) {
instanceCache.put(obj, instanceSeq++);
}
final char[] chars = (char[]) obj;
final int len = chars.length;
if (configuredVersion >= 2) {
if (len == 0) {
write(unshared ? ID_ARRAY_EMPTY_UNSHARED : ID_ARRAY_EMPTY);
write(ID_PRIM_CHAR);
} else if (len <= 256) {
write(unshared ? ID_ARRAY_SMALL_UNSHARED : ID_ARRAY_SMALL);
write(len);
write(ID_PRIM_CHAR);
for (int i = 0; i < len; i ++) {
writeChar(chars[i]);
}
} else if (len <= 65536) {
write(unshared ? ID_ARRAY_MEDIUM_UNSHARED : ID_ARRAY_MEDIUM);
writeShort(len);
write(ID_PRIM_CHAR);
for (int i = 0; i < len; i ++) {
writeChar(chars[i]);
}
} else {
write(unshared ? ID_ARRAY_LARGE_UNSHARED : ID_ARRAY_LARGE);
writeInt(len);
write(ID_PRIM_CHAR);
for (int i = 0; i < len; i ++) {
writeChar(chars[i]);
}
}
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_CHAR_ARRAY_CLASS);
writeInt(len);
for (int i = 0; i < len; i ++) {
writeChar(chars[i]);
}
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_SHORT_ARRAY_CLASS: {
if (! unshared) {
instanceCache.put(obj, instanceSeq++);
}
final short[] shorts = (short[]) obj;
final int len = shorts.length;
if (configuredVersion >= 2) {
if (len == 0) {
write(unshared ? ID_ARRAY_EMPTY_UNSHARED : ID_ARRAY_EMPTY);
write(ID_PRIM_SHORT);
} else if (len <= 256) {
write(unshared ? ID_ARRAY_SMALL_UNSHARED : ID_ARRAY_SMALL);
write(len);
write(ID_PRIM_SHORT);
for (int i = 0; i < len; i ++) {
writeShort(shorts[i]);
}
} else if (len <= 65536) {
write(unshared ? ID_ARRAY_MEDIUM_UNSHARED : ID_ARRAY_MEDIUM);
writeShort(len);
write(ID_PRIM_SHORT);
for (int i = 0; i < len; i ++) {
writeShort(shorts[i]);
}
} else {
write(unshared ? ID_ARRAY_LARGE_UNSHARED : ID_ARRAY_LARGE);
writeInt(len);
write(ID_PRIM_SHORT);
for (int i = 0; i < len; i ++) {
writeShort(shorts[i]);
}
}
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_SHORT_ARRAY_CLASS);
writeInt(len);
for (int i = 0; i < len; i ++) {
writeShort(shorts[i]);
}
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_INT_ARRAY_CLASS: {
if (! unshared) {
instanceCache.put(obj, instanceSeq++);
}
final int[] ints = (int[]) obj;
final int len = ints.length;
if (configuredVersion >= 2) {
if (len == 0) {
write(unshared ? ID_ARRAY_EMPTY_UNSHARED : ID_ARRAY_EMPTY);
write(ID_PRIM_INT);
} else if (len <= 256) {
write(unshared ? ID_ARRAY_SMALL_UNSHARED : ID_ARRAY_SMALL);
write(len);
write(ID_PRIM_INT);
for (int i = 0; i < len; i ++) {
writeInt(ints[i]);
}
} else if (len <= 65536) {
write(unshared ? ID_ARRAY_MEDIUM_UNSHARED : ID_ARRAY_MEDIUM);
writeShort(len);
write(ID_PRIM_INT);
for (int i = 0; i < len; i ++) {
writeInt(ints[i]);
}
} else {
write(unshared ? ID_ARRAY_LARGE_UNSHARED : ID_ARRAY_LARGE);
writeInt(len);
write(ID_PRIM_INT);
for (int i = 0; i < len; i ++) {
writeInt(ints[i]);
}
}
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_INT_ARRAY_CLASS);
writeInt(len);
for (int i = 0; i < len; i ++) {
writeInt(ints[i]);
}
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_LONG_ARRAY_CLASS: {
if (! unshared) {
instanceCache.put(obj, instanceSeq++);
}
final long[] longs = (long[]) obj;
final int len = longs.length;
if (configuredVersion >= 2) {
if (len == 0) {
write(unshared ? ID_ARRAY_EMPTY_UNSHARED : ID_ARRAY_EMPTY);
write(ID_PRIM_LONG);
} else if (len <= 256) {
write(unshared ? ID_ARRAY_SMALL_UNSHARED : ID_ARRAY_SMALL);
write(len);
write(ID_PRIM_LONG);
for (int i = 0; i < len; i ++) {
writeLong(longs[i]);
}
} else if (len <= 65536) {
write(unshared ? ID_ARRAY_MEDIUM_UNSHARED : ID_ARRAY_MEDIUM);
writeShort(len);
write(ID_PRIM_LONG);
for (int i = 0; i < len; i ++) {
writeLong(longs[i]);
}
} else {
write(unshared ? ID_ARRAY_LARGE_UNSHARED : ID_ARRAY_LARGE);
writeInt(len);
write(ID_PRIM_LONG);
for (int i = 0; i < len; i ++) {
writeLong(longs[i]);
}
}
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_LONG_ARRAY_CLASS);
writeInt(len);
for (int i = 0; i < len; i ++) {
writeLong(longs[i]);
}
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_FLOAT_ARRAY_CLASS: {
if (! unshared) {
instanceCache.put(obj, instanceSeq++);
}
final float[] floats = (float[]) obj;
final int len = floats.length;
if (configuredVersion >= 2) {
if (len == 0) {
write(unshared ? ID_ARRAY_EMPTY_UNSHARED : ID_ARRAY_EMPTY);
write(ID_PRIM_FLOAT);
} else if (len <= 256) {
write(unshared ? ID_ARRAY_SMALL_UNSHARED : ID_ARRAY_SMALL);
write(len);
write(ID_PRIM_FLOAT);
for (int i = 0; i < len; i ++) {
writeFloat(floats[i]);
}
} else if (len <= 65536) {
write(unshared ? ID_ARRAY_MEDIUM_UNSHARED : ID_ARRAY_MEDIUM);
writeShort(len);
write(ID_PRIM_FLOAT);
for (int i = 0; i < len; i ++) {
writeFloat(floats[i]);
}
} else {
write(unshared ? ID_ARRAY_LARGE_UNSHARED : ID_ARRAY_LARGE);
writeInt(len);
write(ID_PRIM_FLOAT);
for (int i = 0; i < len; i ++) {
writeFloat(floats[i]);
}
}
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_FLOAT_ARRAY_CLASS);
writeInt(len);
for (int i = 0; i < len; i ++) {
writeFloat(floats[i]);
}
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_DOUBLE_ARRAY_CLASS: {
instanceCache.put(obj, instanceSeq++);
final double[] doubles = (double[]) obj;
final int len = doubles.length;
if (configuredVersion >= 2) {
if (len == 0) {
write(unshared ? ID_ARRAY_EMPTY_UNSHARED : ID_ARRAY_EMPTY);
write(ID_PRIM_DOUBLE);
} else if (len <= 256) {
write(unshared ? ID_ARRAY_SMALL_UNSHARED : ID_ARRAY_SMALL);
write(len);
write(ID_PRIM_DOUBLE);
for (int i = 0; i < len; i ++) {
writeDouble(doubles[i]);
}
} else if (len <= 65536) {
write(unshared ? ID_ARRAY_MEDIUM_UNSHARED : ID_ARRAY_MEDIUM);
writeShort(len);
write(ID_PRIM_DOUBLE);
for (int i = 0; i < len; i ++) {
writeDouble(doubles[i]);
}
} else {
write(unshared ? ID_ARRAY_LARGE_UNSHARED : ID_ARRAY_LARGE);
writeInt(len);
write(ID_PRIM_DOUBLE);
for (int i = 0; i < len; i ++) {
writeDouble(doubles[i]);
}
}
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
write(ID_DOUBLE_ARRAY_CLASS);
writeInt(len);
for (int i = 0; i < len; i ++) {
writeDouble(doubles[i]);
}
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_CC_HASH_SET:
case ID_CC_LINKED_HASH_SET:
case ID_CC_TREE_SET:
case ID_CC_ARRAY_LIST:
case ID_CC_LINKED_LIST: {
instanceCache.put(obj, instanceSeq++);
final Collection<?> collection = (Collection<?>) obj;
final int len = collection.size();
if (len == 0) {
write(unshared ? ID_COLLECTION_EMPTY_UNSHARED : ID_COLLECTION_EMPTY);
write(id);
if (id == ID_CC_TREE_SET) {
doWriteObject(((TreeSet)collection).comparator(), false);
}
} else if (len <= 256) {
write(unshared ? ID_COLLECTION_SMALL_UNSHARED : ID_COLLECTION_SMALL);
write(len);
write(id);
if (id == ID_CC_TREE_SET) {
doWriteObject(((TreeSet)collection).comparator(), false);
}
for (Object o : collection) {
doWriteObject(o, false);
}
} else if (len <= 65536) {
write(unshared ? ID_COLLECTION_MEDIUM_UNSHARED : ID_COLLECTION_MEDIUM);
writeShort(len);
write(id);
if (id == ID_CC_TREE_SET) {
doWriteObject(((TreeSet)collection).comparator(), false);
}
for (Object o : collection) {
doWriteObject(o, false);
}
} else {
write(unshared ? ID_COLLECTION_LARGE_UNSHARED : ID_COLLECTION_LARGE);
writeInt(len);
write(id);
if (id == ID_CC_TREE_SET) {
doWriteObject(((TreeSet)collection).comparator(), false);
}
for (Object o : collection) {
doWriteObject(o, false);
}
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_CC_HASH_MAP:
case ID_CC_HASHTABLE:
case ID_CC_IDENTITY_HASH_MAP:
case ID_CC_LINKED_HASH_MAP:
case ID_CC_TREE_MAP: {
instanceCache.put(obj, instanceSeq++);
final Map<?, ?> map = (Map<?, ?>) obj;
final int len = map.size();
if (len == 0) {
write(unshared ? ID_COLLECTION_EMPTY_UNSHARED : ID_COLLECTION_EMPTY);
write(id);
if (id == ID_CC_TREE_MAP) {
doWriteObject(((TreeMap)map).comparator(), false);
}
} else if (len <= 256) {
write(unshared ? ID_COLLECTION_SMALL_UNSHARED : ID_COLLECTION_SMALL);
write(len);
write(id);
if (id == ID_CC_TREE_MAP) {
doWriteObject(((TreeMap)map).comparator(), false);
}
for (Map.Entry<?, ?> entry : map.entrySet()) {
doWriteObject(entry.getKey(), false);
doWriteObject(entry.getValue(), false);
}
} else if (len <= 65536) {
write(unshared ? ID_COLLECTION_MEDIUM_UNSHARED : ID_COLLECTION_MEDIUM);
writeShort(len);
write(id);
if (id == ID_CC_TREE_MAP) {
doWriteObject(((TreeMap)map).comparator(), false);
}
for (Map.Entry<?, ?> entry : map.entrySet()) {
doWriteObject(entry.getKey(), false);
doWriteObject(entry.getValue(), false);
}
} else {
write(unshared ? ID_COLLECTION_LARGE_UNSHARED : ID_COLLECTION_LARGE);
writeInt(len);
write(id);
if (id == ID_CC_TREE_MAP) {
doWriteObject(((TreeMap)map).comparator(), false);
}
for (Map.Entry<?, ?> entry : map.entrySet()) {
doWriteObject(entry.getKey(), false);
doWriteObject(entry.getValue(), false);
}
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_EMPTY_MAP_OBJECT:
case ID_EMPTY_SET_OBJECT:
case ID_EMPTY_LIST_OBJECT: {
write(id);
return;
}
case ID_SINGLETON_MAP_OBJECT: {
instanceCache.put(obj, instanceSeq++);
write(id);
final Map.Entry entry = (Map.Entry) ((Map) obj).entrySet().iterator().next();
doWriteObject(entry.getKey(), false);
doWriteObject(entry.getValue(), false);
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case ID_SINGLETON_LIST_OBJECT:
case ID_SINGLETON_SET_OBJECT: {
instanceCache.put(obj, instanceSeq++);
write(id);
doWriteObject(((Collection)obj).iterator().next(), false);
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
case -1: break;
default: throw new NotSerializableException(objClass.getName());
}
if (isArray) {
instanceCache.put(obj, instanceSeq++);
final Object[] objects = (Object[]) obj;
final int len = objects.length;
if (configuredVersion >= 2) {
if (len == 0) {
write(unshared ? ID_ARRAY_EMPTY_UNSHARED : ID_ARRAY_EMPTY);
writeClass(objClass.getComponentType());
} else if (len <= 256) {
write(unshared ? ID_ARRAY_SMALL_UNSHARED : ID_ARRAY_SMALL);
write(len);
writeClass(objClass.getComponentType());
for (int i = 0; i < len; i++) {
doWriteObject(objects[i], unshared);
}
} else if (len <= 65536) {
write(unshared ? ID_ARRAY_MEDIUM_UNSHARED : ID_ARRAY_MEDIUM);
writeShort(len);
writeClass(objClass.getComponentType());
for (int i = 0; i < len; i++) {
doWriteObject(objects[i], unshared);
}
} else {
write(unshared ? ID_ARRAY_LARGE_UNSHARED : ID_ARRAY_LARGE);
writeInt(len);
writeClass(objClass.getComponentType());
for (int i = 0; i < len; i++) {
doWriteObject(objects[i], unshared);
}
}
} else {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
writeObjectArrayClass(objClass);
writeInt(len);
for (int i = 0; i < len; i++) {
doWriteObject(objects[i], unshared);
}
}
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
// serialize proxies efficiently
if (Proxy.isProxyClass(objClass)) {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
instanceCache.put(obj, instanceSeq++);
writeProxyClass(objClass);
doWriteObject(Proxy.getInvocationHandler(obj), false);
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
// it's a user type
// user type #1: externalizer
Externalizer externalizer;
if (externalizers.containsKey(objClass)) {
externalizer = externalizers.get(objClass);
} else {
externalizer = classExternalizerFactory.getExternalizer(objClass);
externalizers.put(objClass, externalizer);
}
if (externalizer != null) {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
writeExternalizerClass(objClass, externalizer);
instanceCache.put(obj, instanceSeq++);
final ObjectOutput objectOutput;
objectOutput = getObjectOutput();
externalizer.writeExternal(obj, objectOutput);
writeEndBlock();
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
// user type #2: externalizable
if (obj instanceof Externalizable) {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
instanceCache.put(obj, instanceSeq++);
final Externalizable ext = (Externalizable) obj;
final ObjectOutput objectOutput = getObjectOutput();
writeExternalizableClass(objClass);
ext.writeExternal(objectOutput);
writeEndBlock();
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
// user type #3: serializable
if (obj instanceof Serializable) {
write(unshared ? ID_NEW_OBJECT_UNSHARED : ID_NEW_OBJECT);
writeSerializableClass(objClass);
instanceCache.put(obj, instanceSeq++);
doWriteSerializableObject(info, obj, objClass);
if (unshared) {
instanceCache.put(obj, -1);
}
return;
}
throw new NotSerializableException(objClass.getName());
} finally {
if (! unreplaced && obj != original) {
final int replId = instanceCache.get(obj, -1);
if (replId != -1) {
instanceCache.put(original, replId);
}
}
}
}
private void writeBooleanArray(final boolean[] booleans) throws IOException {
final int len = booleans.length;
final int bc = len & ~7;
for (int i = 0; i < bc;) {
write(
(booleans[i++] ? 1 : 0)
| (booleans[i++] ? 2 : 0)
| (booleans[i++] ? 4 : 0)
| (booleans[i++] ? 8 : 0)
| (booleans[i++] ? 16 : 0)
| (booleans[i++] ? 32 : 0)
| (booleans[i++] ? 64 : 0)
| (booleans[i++] ? 128 : 0)
);
}
if (bc < len) {
int out = 0;
int bit = 1;
for (int i = bc; i < len; i++) {
if (booleans[i]) out |= bit;
bit <<= 1;
}
write(out);
}
}
private void writeEndBlock() throws IOException {
final BlockMarshaller blockMarshaller = this.blockMarshaller;
if (blockMarshaller != null) {
blockMarshaller.flush();
writeByte(ID_END_BLOCK_DATA);
}
}
protected ObjectOutput getObjectOutput() {
final ObjectOutput output = objectOutput;
return output == null ? configuredVersion == 0 ? (objectOutput = new MarshallerObjectOutput(this)) : (objectOutput = getBlockMarshaller()) : output;
}
protected BlockMarshaller getBlockMarshaller() {
final BlockMarshaller blockMarshaller = this.blockMarshaller;
return blockMarshaller == null ? (this.blockMarshaller = new BlockMarshaller(this, bufferSize)) : blockMarshaller;
}
private RiverObjectOutputStream getObjectOutputStream() throws IOException {
final RiverObjectOutputStream objectOutputStream = this.objectOutputStream;
return objectOutputStream == null ? this.objectOutputStream = createObjectOutputStream() : objectOutputStream;
}
private final PrivilegedExceptionAction<RiverObjectOutputStream> createObjectOutputStreamAction = new PrivilegedExceptionAction<RiverObjectOutputStream>() {
public RiverObjectOutputStream run() throws IOException {
return new RiverObjectOutputStream(configuredVersion == 0 ? RiverMarshaller.this : getBlockMarshaller(), RiverMarshaller.this);
}
};
private RiverObjectOutputStream createObjectOutputStream() throws IOException {
try {
return AccessController.doPrivileged(createObjectOutputStreamAction);
} catch (PrivilegedActionException e) {
throw (IOException) e.getCause();
}
}
protected void doWriteSerializableObject(final SerializableClass info, final Object obj, final Class<?> objClass) throws IOException {
final Class<?> superclass = objClass.getSuperclass();
if (Serializable.class.isAssignableFrom(superclass)) {
doWriteSerializableObject(registry.lookup(superclass), obj, superclass);
}
if (info.hasWriteObject()) {
final RiverObjectOutputStream objectOutputStream = getObjectOutputStream();
final SerializableClass oldInfo = objectOutputStream.swapClass(info);
final Object oldObj = objectOutputStream.swapCurrent(obj);
final RiverObjectOutputStream.State restoreState = objectOutputStream.start();
boolean ok = false;
try {
info.callWriteObject(obj, objectOutputStream);
writeEndBlock();
objectOutputStream.finish(restoreState);
objectOutputStream.swapCurrent(oldObj);
objectOutputStream.swapClass(oldInfo);
ok = true;
} finally {
if (! ok) {
objectOutputStream.fullReset();
}
}
} else {
doWriteFields(info, obj);
}
}
protected void doWriteFields(final SerializableClass info, final Object obj) throws IOException {
final SerializableField[] serializableFields = info.getFields();
for (SerializableField serializableField : serializableFields) {
try {
final Field field = serializableField.getField();
switch (serializableField.getKind()) {
case BOOLEAN: {
writeBoolean(field.getBoolean(obj));
break;
}
case BYTE: {
writeByte(field.getByte(obj));
break;
}
case SHORT: {
writeShort(field.getShort(obj));
break;
}
case INT: {
writeInt(field.getInt(obj));
break;
}
case CHAR: {
writeChar(field.getChar(obj));
break;
}
case LONG: {
writeLong(field.getLong(obj));
break;
}
case DOUBLE: {
writeDouble(field.getDouble(obj));
break;
}
case FLOAT: {
writeFloat(field.getFloat(obj));
break;
}
case OBJECT: {
doWriteObject(field.get(obj), serializableField.isUnshared());
break;
}
}
} catch (IllegalAccessException e) {
final InvalidObjectException ioe = new InvalidObjectException("Unexpected illegal access exception");
ioe.initCause(e);
throw ioe;
}
}
}
protected void writeProxyClass(final Class<?> objClass) throws IOException {
if (! writeKnownClass(objClass)) {
writeNewProxyClass(objClass);
}
}
protected void writeNewProxyClass(final Class<?> objClass) throws IOException {
ClassTable.Writer classTableWriter = classTable.getClassWriter(objClass);
if (classTableWriter != null) {
write(ID_PREDEFINED_PROXY_CLASS);
classCache.put(objClass, classSeq++);
writeClassTableData(objClass, classTableWriter);
} else {
write(ID_PROXY_CLASS);
final String[] names = classResolver.getProxyInterfaces(objClass);
writeInt(names.length);
for (String name : names) {
writeString(name);
}
classCache.put(objClass, classSeq++);
if (configuredVersion == 1) {
final BlockMarshaller blockMarshaller = getBlockMarshaller();
classResolver.annotateProxyClass(blockMarshaller, objClass);
writeEndBlock();
} else {
classResolver.annotateProxyClass(this, objClass);
}
}
}
protected void writeEnumClass(final Class<? extends Enum> objClass) throws IOException {
if (! writeKnownClass(objClass)) {
writeNewEnumClass(objClass);
}
}
protected void writeNewEnumClass(final Class<? extends Enum> objClass) throws IOException {
ClassTable.Writer classTableWriter = classTable.getClassWriter(objClass);
if (classTableWriter != null) {
write(ID_PREDEFINED_ENUM_TYPE_CLASS);
classCache.put(objClass, classSeq++);
writeClassTableData(objClass, classTableWriter);
} else {
write(ID_ENUM_TYPE_CLASS);
writeString(classResolver.getClassName(objClass));
classCache.put(objClass, classSeq++);
doAnnotateClass(objClass);
}
}
protected void writeClassClass(final Class<?> classObj) throws IOException {
write(ID_CLASS_CLASS);
writeClass(classObj);
// not cached
}
protected void writeObjectArrayClass(final Class<?> objClass) throws IOException {
write(ID_OBJECT_ARRAY_TYPE_CLASS);
writeClass(objClass.getComponentType());
classCache.put(objClass, classSeq++);
}
protected void writeClass(final Class<?> objClass) throws IOException {
if (! writeKnownClass(objClass)) {
writeNewClass(objClass);
}
}
private static final IdentityIntMap<Class<?>> BASIC_CLASSES;
private static final IdentityIntMap<Class<?>> BASIC_CLASSES_V2;
static {
final IdentityIntMap<Class<?>> map = new IdentityIntMap<Class<?>>(0x0.6p0f);
map.put(byte.class, ID_PRIM_BYTE);
map.put(boolean.class, ID_PRIM_BOOLEAN);
map.put(char.class, ID_PRIM_CHAR);
map.put(double.class, ID_PRIM_DOUBLE);
map.put(float.class, ID_PRIM_FLOAT);
map.put(int.class, ID_PRIM_INT);
map.put(long.class, ID_PRIM_LONG);
map.put(short.class, ID_PRIM_SHORT);
map.put(void.class, ID_VOID);
map.put(Byte.class, ID_BYTE_CLASS);
map.put(Boolean.class, ID_BOOLEAN_CLASS);
map.put(Character.class, ID_CHARACTER_CLASS);
map.put(Double.class, ID_DOUBLE_CLASS);
map.put(Float.class, ID_FLOAT_CLASS);
map.put(Integer.class, ID_INTEGER_CLASS);
map.put(Long.class, ID_LONG_CLASS);
map.put(Short.class, ID_SHORT_CLASS);
map.put(Void.class, ID_VOID_CLASS);
map.put(Object.class, ID_OBJECT_CLASS);
map.put(Class.class, ID_CLASS_CLASS);
map.put(String.class, ID_STRING_CLASS);
map.put(Enum.class, ID_ENUM_CLASS);
map.put(byte[].class, ID_BYTE_ARRAY_CLASS);
map.put(boolean[].class, ID_BOOLEAN_ARRAY_CLASS);
map.put(char[].class, ID_CHAR_ARRAY_CLASS);
map.put(double[].class, ID_DOUBLE_ARRAY_CLASS);
map.put(float[].class, ID_FLOAT_ARRAY_CLASS);
map.put(int[].class, ID_INT_ARRAY_CLASS);
map.put(long[].class, ID_LONG_ARRAY_CLASS);
map.put(short[].class, ID_SHORT_ARRAY_CLASS);
BASIC_CLASSES = map.clone();
map.put(ArrayList.class, ID_CC_ARRAY_LIST);
map.put(LinkedList.class, ID_CC_LINKED_LIST);
map.put(HashSet.class, ID_CC_HASH_SET);
map.put(LinkedHashSet.class, ID_CC_LINKED_HASH_SET);
map.put(TreeSet.class, ID_CC_TREE_SET);
map.put(IdentityHashMap.class, ID_CC_IDENTITY_HASH_MAP);
map.put(HashMap.class, ID_CC_HASH_MAP);
map.put(Hashtable.class, ID_CC_HASHTABLE);
map.put(LinkedHashMap.class, ID_CC_LINKED_HASH_MAP);
map.put(TreeMap.class, ID_CC_TREE_MAP);
map.put(emptyListClass, ID_EMPTY_LIST_OBJECT); // special case
map.put(singletonListClass, ID_SINGLETON_LIST_OBJECT); // special case
map.put(emptySetClass, ID_EMPTY_SET_OBJECT); // special case
map.put(singletonSetClass, ID_SINGLETON_SET_OBJECT); // special case
map.put(emptyMapClass, ID_EMPTY_MAP_OBJECT); // special case
map.put(singletonMapClass, ID_SINGLETON_MAP_OBJECT); // special case
BASIC_CLASSES_V2 = map;
}
protected void writeNewClass(final Class<?> objClass) throws IOException {
if (objClass.isEnum()) {
writeNewEnumClass(objClass.asSubclass(Enum.class));
} else if (Proxy.isProxyClass(objClass)) {
writeNewProxyClass(objClass);
} else if (objClass.isArray()) {
writeObjectArrayClass(objClass);
} else if (! objClass.isInterface() && Serializable.class.isAssignableFrom(objClass)) {
if (Externalizable.class.isAssignableFrom(objClass)) {
writeNewExternalizableClass(objClass);
} else {
writeNewSerializableClass(objClass);
}
} else {
ClassTable.Writer classTableWriter = classTable.getClassWriter(objClass);
if (classTableWriter != null) {
write(ID_PREDEFINED_PLAIN_CLASS);
classCache.put(objClass, classSeq++);
writeClassTableData(objClass, classTableWriter);
} else {
write(ID_PLAIN_CLASS);
writeString(classResolver.getClassName(objClass));
doAnnotateClass(objClass);
classCache.put(objClass, classSeq++);
}
}
}
private void writeClassTableData(final Class<?> objClass, final ClassTable.Writer classTableWriter) throws IOException {
if (configuredVersion == 1) {
classTableWriter.writeClass(getBlockMarshaller(), objClass);
writeEndBlock();
} else {
classTableWriter.writeClass(this, objClass);
}
}
protected boolean writeKnownClass(final Class<?> objClass) throws IOException {
int i = (configuredVersion >= 2 ? BASIC_CLASSES_V2 : BASIC_CLASSES).get(objClass, -1);
if (i != -1) {
write(i);
return true;
}
i = classCache.get(objClass, -1);
if (i != -1) {
if (configuredVersion >= 2) {
final int diff = i - classSeq;
if (diff >= -256) {
write(ID_REPEAT_CLASS_NEAR);
write(diff);
} else if (diff >= -65536) {
write(ID_REPEAT_CLASS_NEARISH);
writeShort(diff);
}
return true;
}
write(ID_REPEAT_CLASS_FAR);
writeInt(i);
return true;
}
return false;
}
protected void writeSerializableClass(final Class<?> objClass) throws IOException {
if (! writeKnownClass(objClass)) {
writeNewSerializableClass(objClass);
}
}
protected void writeNewSerializableClass(final Class<?> objClass) throws IOException {
ClassTable.Writer classTableWriter = classTable.getClassWriter(objClass);
if (classTableWriter != null) {
write(ID_PREDEFINED_SERIALIZABLE_CLASS);
classCache.put(objClass, classSeq++);
writeClassTableData(objClass, classTableWriter);
} else {
final SerializableClass info = registry.lookup(objClass);
if (configuredVersion > 0 && info.hasWriteObject()) {
write(ID_WRITE_OBJECT_CLASS);
} else {
write(ID_SERIALIZABLE_CLASS);
}
writeString(classResolver.getClassName(objClass));
writeLong(info.getEffectiveSerialVersionUID());
classCache.put(objClass, classSeq++);
doAnnotateClass(objClass);
final SerializableField[] fields = info.getFields();
final int cnt = fields.length;
writeInt(cnt);
for (int i = 0; i < cnt; i++) {
SerializableField field = fields[i];
writeUTF(field.getName());
try {
writeClass(field.getType());
} catch (ClassNotFoundException e) {
throw new InvalidClassException("Class of field was unloaded");
}
writeBoolean(field.isUnshared());
}
}
writeClass(objClass.getSuperclass());
}
protected void writeExternalizableClass(final Class<?> objClass) throws IOException {
if (! writeKnownClass(objClass)) {
writeNewExternalizableClass(objClass);
}
}
protected void writeNewExternalizableClass(final Class<?> objClass) throws IOException {
ClassTable.Writer classTableWriter = classTable.getClassWriter(objClass);
if (classTableWriter != null) {
write(ID_PREDEFINED_EXTERNALIZABLE_CLASS);
classCache.put(objClass, classSeq++);
writeClassTableData(objClass, classTableWriter);
} else {
write(ID_EXTERNALIZABLE_CLASS);
writeString(classResolver.getClassName(objClass));
writeLong(registry.lookup(objClass).getEffectiveSerialVersionUID());
classCache.put(objClass, classSeq++);
doAnnotateClass(objClass);
}
}
protected void writeExternalizerClass(final Class<?> objClass, final Externalizer externalizer) throws IOException {
if (! writeKnownClass(objClass)) {
writeNewExternalizerClass(objClass, externalizer);
}
}
protected void writeNewExternalizerClass(final Class<?> objClass, final Externalizer externalizer) throws IOException {
ClassTable.Writer classTableWriter = classTable.getClassWriter(objClass);
if (classTableWriter != null) {
write(ID_PREDEFINED_EXTERNALIZER_CLASS);
classCache.put(objClass, classSeq++);
writeClassTableData(objClass, classTableWriter);
} else {
write(ID_EXTERNALIZER_CLASS);
writeString(classResolver.getClassName(objClass));
classCache.put(objClass, classSeq++);
doAnnotateClass(objClass);
}
writeObject(externalizer);
}
protected void doAnnotateClass(final Class<?> objClass) throws IOException {
if (configuredVersion == 1) {
classResolver.annotateClass(getBlockMarshaller(), objClass);
writeEndBlock();
} else {
classResolver.annotateClass(this, objClass);
}
}
public void clearInstanceCache() throws IOException {
instanceCache.clear();
instanceSeq = 0;
if (byteOutput != null) {
write(ID_CLEAR_INSTANCE_CACHE);
}
}
public void clearClassCache() throws IOException {
classCache.clear();
externalizers.clear();
classSeq = 0;
instanceCache.clear();
instanceSeq = 0;
if (byteOutput != null) {
write(ID_CLEAR_CLASS_CACHE);
}
}
protected void doStart() throws IOException {
super.doStart();
final int configuredVersion = this.configuredVersion;
if (configuredVersion > 0) {
writeByte(configuredVersion);
}
}
private void writeString(String string) throws IOException {
writeInt(string.length());
flush();
UTFUtils.writeUTFBytes(byteOutput, string);
}
// Replace writeUTF with a faster, non-scanning version
public void writeUTF(final String string) throws IOException {
writeInt(string.length());
flush();
UTFUtils.writeUTFBytes(byteOutput, string);
}
}
|
package de.saumya.mojo.rspec;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.List;
import java.util.Properties;
import org.apache.maven.plugin.MojoExecutionException;
import de.saumya.mojo.gem.AbstractGemMojo;
/**
* executes the jruby command.
*
* @goal test
* @phase test
* @requiresDependencyResolution test
*/
public class RSpecMojo extends AbstractGemMojo {
/**
* The project base directory
*
* @parameter expression="${basedir}"
* @required
* @readonly
*/
protected File basedir;
/**
* The classpath elements of the project being tested.
*
* @parameter expression="${project.testClasspathElements}"
* @required
* @readonly
*/
protected List<String> classpathElements;
/**
* The flag to skip tests (optional, defaults to "false")
*
* @parameter expression="${maven.test.skip}"
*/
protected boolean skipTests;
/**
* The directory containing the RSpec source files
*
* @parameter expression="spec"
*/
protected String specSourceDirectory;
/**
* The directory where the RSpec report will be written to
*
* @parameter expression="${basedir}/target"
* @required
*/
protected String outputDirectory;
/**
* The name of the RSpec report (optional, defaults to "rspec-report.html")
*
* @parameter expression="rspec-report.html"
*/
protected String reportName;
/**
* List of system properties to set for the tests.
*
* @parameter
*/
protected Properties systemProperties;
private RSpecScriptFactory rspecScriptFactory = new RSpecScriptFactory();
private ShellScriptFactory shellScriptFactory = new ShellScriptFactory();
private File specSourceDirectory() {
return new File(launchDirectory(), specSourceDirectory);
}
@Override
public void executeWithGems() throws MojoExecutionException {
if (skipTests) {
getLog().info("Skipping RSpec tests");
return;
}
final File specSourceDirectory = specSourceDirectory();
if (!specSourceDirectory.exists()) {
getLog().info("Skipping RSpec tests since " + specSourceDirectory + " is missing");
return;
}
getLog().info("Running RSpec tests from " + specSourceDirectory);
String reportPath = new File(outputDirectory, reportName).getAbsolutePath();
initScriptFactory(rspecScriptFactory, reportPath);
initScriptFactory(shellScriptFactory, reportPath);
try {
rspecScriptFactory.emit();
} catch (Exception e) {
getLog().error("error emitting .rb", e);
}
try {
shellScriptFactory.emit();
} catch (Exception e) {
getLog().error("error emitting .sh", e);
}
execute(rspecScriptFactory.getScriptFile().getPath());
File reportFile = new File(reportPath);
Reader in = null;
try {
in = new FileReader(reportFile);
BufferedReader reader = new BufferedReader(in);
String line = null;
while ((line = reader.readLine()) != null) {
if (line.contains("0 failures")) {
return;
}
}
} catch (IOException e) {
throw new MojoExecutionException("Unable to read test report file: " + reportFile);
} finally {
if ( in != null ) {
try {
in.close();
} catch (IOException e) {
throw new MojoExecutionException( e.getMessage() );
}
}
}
throw new MojoExecutionException("There were test failures");
}
private void initScriptFactory(ScriptFactory factory, String reportPath) {
factory.setBaseDir(basedir.getAbsolutePath());
factory.setClasspathElements(classpathElements);
factory.setOutputDir(new File( outputDirectory) );
factory.setReportPath(reportPath);
factory.setSourceDir(specSourceDirectory().getAbsolutePath());
Properties props = systemProperties;
if (props == null) {
props = new Properties();
}
factory.setSystemProperties(props);
}
}
|
package com.vip.saturn.job.java;
import com.vip.saturn.job.SaturnJobExecutionContext;
import com.vip.saturn.job.SaturnJobReturn;
import com.vip.saturn.job.SaturnSystemErrorGroup;
import com.vip.saturn.job.SaturnSystemReturnCode;
import com.vip.saturn.job.basic.*;
import com.vip.saturn.job.exception.JobException;
import com.vip.saturn.job.internal.config.JobConfiguration;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
public class SaturnJavaJob extends CrondJob {
private static Logger log = LoggerFactory.getLogger(SaturnJavaJob.class);
private Map<Integer, ShardingItemFutureTask> futureTaskMap;
private Object jobBusinessInstance = null;
public JavaShardingItemCallable createCallable(String jobName, Integer item, String itemValue, int timeoutSeconds,
SaturnExecutionContext shardingContext, AbstractSaturnJob saturnJob){
return new JavaShardingItemCallable(jobName, item, itemValue,
timeoutSeconds, shardingContext, saturnJob);
}
@Override
public void init() throws SchedulerException{
super.init();
createJobBusinessInstanceIfNecessary();
getJobVersionIfNecessary();
}
private void getJobVersionIfNecessary() throws SchedulerException {
if (jobBusinessInstance != null) {
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(saturnExecutorService.getJobClassLoader());
try {
String version = (String) jobBusinessInstance.getClass().getMethod("getJobVersion").invoke(jobBusinessInstance);
setJobVersion(version);
} catch (Throwable t) {
log.error(String.format(SaturnConstant.ERROR_LOG_FORMAT, jobName, "error throws during get job version"), t);
throw new SchedulerException(t);
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
}
}
private void createJobBusinessInstanceIfNecessary() throws SchedulerException {
JobConfiguration currentConf = configService.getJobConfiguration();
String jobClassStr = currentConf.getJobClass();
if (jobClassStr != null && !jobClassStr.trim().isEmpty()) {
if (jobBusinessInstance == null) {
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader jobClassLoader = saturnExecutorService.getJobClassLoader();
Thread.currentThread().setContextClassLoader(jobClassLoader);
try {
Class<?> jobClass = jobClassLoader.loadClass(currentConf.getJobClass());
try {
Method getObject = jobClass.getMethod("getObject");
if (getObject != null) {
try {
jobBusinessInstance = getObject.invoke(null);
} catch (Throwable t) {
log.error(String.format(SaturnConstant.ERROR_LOG_FORMAT, jobName, jobClassStr + " getObject error"), t);
}
}
} catch (Exception ex) {//NOSONAR
//log.error("",ex);
}
if (jobBusinessInstance == null) {
jobBusinessInstance = jobClass.newInstance();
}
SaturnApi saturnApi = new SaturnApi(getNamespace(), executorName);
saturnApi.setConfigService(getConfigService());
jobClass.getMethod("setSaturnApi", Object.class).invoke(jobBusinessInstance, saturnApi);
} catch (Throwable t) {
log.error(String.format(SaturnConstant.ERROR_LOG_FORMAT, jobName, "create job business instance error"), t);
throw new SchedulerException(t);
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
}
}
if (jobBusinessInstance == null) {
throw new SchedulerException("init job business instance failed, the job class is " + jobClassStr);
}
}
@Override
protected Map<Integer, SaturnJobReturn> handleJob(final SaturnExecutionContext shardingContext) {
final Map<Integer, SaturnJobReturn> retMap = new HashMap<Integer, SaturnJobReturn>();
final String jobName = shardingContext.getJobName();
final int timeoutSeconds = getTimeoutSeconds();
ExecutorService executorService = getExecutorService();
futureTaskMap = new HashMap<Integer, ShardingItemFutureTask>();
String jobParameter = shardingContext.getJobParameter();
// shardingItemParametersKey/Value
Map<Integer, String> shardingItemParameters = shardingContext.getShardingItemParameters();
for (final Entry<Integer, String> shardingItem : shardingItemParameters.entrySet()) {
final Integer key = shardingItem.getKey();
try {
String jobValue = shardingItem.getValue();
final String itemVal = getRealItemValue(jobParameter, jobValue);
ShardingItemFutureTask shardingItemFutureTask = new ShardingItemFutureTask(createCallable(jobName, key, itemVal,
timeoutSeconds, shardingContext, this),null);
Future<?> callFuture = executorService.submit(shardingItemFutureTask);
if (timeoutSeconds > 0) {
TimeoutSchedulerExecutor.scheduleTimeoutJob(shardingContext.getExecutorName(), timeoutSeconds,
shardingItemFutureTask);
}
shardingItemFutureTask.setCallFuture(callFuture);
futureTaskMap.put(key, shardingItemFutureTask);
} catch (Throwable t) {
log.error(String.format(SaturnConstant.ERROR_LOG_FORMAT, jobName, t.getMessage()), t);
retMap.put(key, new SaturnJobReturn(SaturnSystemReturnCode.SYSTEM_FAIL, t.getMessage(),
SaturnSystemErrorGroup.FAIL));
}
}
for (Entry<Integer, ShardingItemFutureTask> entry : futureTaskMap.entrySet()) {
Integer item = entry.getKey();
ShardingItemFutureTask futureTask = entry.getValue();
try {
futureTask.getCallFuture().get();
} catch (Exception e) {
log.error(String.format(SaturnConstant.ERROR_LOG_FORMAT, jobName, e.getMessage()), e);
retMap.put(item, new SaturnJobReturn(SaturnSystemReturnCode.SYSTEM_FAIL, e.getMessage(),
SaturnSystemErrorGroup.FAIL));
continue;
}
retMap.put(item, futureTask.getCallable().getSaturnJobReturn());
}
return retMap;
}
@Override
public void abort() {
super.abort();
forceStop();
}
@Override
public void forceStop() {
super.forceStop();
if (futureTaskMap != null) {
for (ShardingItemFutureTask shardingItemFutureTask : futureTaskMap.values()) {
JavaShardingItemCallable shardingItemCallable = shardingItemFutureTask.getCallable();
Thread currentThread = shardingItemCallable.getCurrentThread();
if (currentThread != null) {
try {
log.info("[{}] msg=force stop {} - {}", jobName, shardingItemCallable.getJobName(),
shardingItemCallable.getItem());
if (shardingItemCallable.forceStop()) {
ShardingItemFutureTask.killRunningBusinessThread(shardingItemFutureTask);
}
} catch (Throwable t) {
log.error(String.format(SaturnConstant.ERROR_LOG_FORMAT, jobName, t.getMessage()), t);
}
}
}
}
}
@Override
public SaturnJobReturn doExecution(String jobName, Integer key, String value,
SaturnExecutionContext shardingContext, JavaShardingItemCallable callable) throws Throwable{
return handleJavaJob(jobName, key, value, shardingContext,callable);
}
public SaturnJobReturn handleJavaJob(String jobName, Integer key, String value,
SaturnExecutionContext shardingContext, JavaShardingItemCallable callable) throws Throwable {
String jobClass = shardingContext.getJobConfiguration().getJobClass();
log.info("[{}] msg=Running SaturnJavaJob, jobClass is {} ", jobName, jobClass);
try {
if( jobBusinessInstance == null){
throw new JobException("the job business instance is not initialized");
}
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader jobClassLoader = saturnExecutorService.getJobClassLoader();
Thread.currentThread().setContextClassLoader(jobClassLoader);
try {
Class<?> saturnJobExecutionContextClazz = jobClassLoader
.loadClass(SaturnJobExecutionContext.class.getCanonicalName());
Object ret = jobBusinessInstance.getClass()
.getMethod("handleJavaJob", String.class, Integer.class, String.class,
saturnJobExecutionContextClazz)
.invoke(jobBusinessInstance, jobName, key, value, callable.getContextForJob(jobClassLoader));
SaturnJobReturn saturnJobReturn = (SaturnJobReturn) JavaShardingItemCallable.cloneObject(ret, saturnExecutorService.getExecutorClassLoader());
if(saturnJobReturn != null) {
callable.setBusinessReturned(true);
}
return saturnJobReturn;
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
} catch (Exception e) {
if (e.getCause() instanceof ThreadDeath) {
throw e.getCause();
}
String message = logBusinessExceptionIfNecessary(jobName, e);
return new SaturnJobReturn(SaturnSystemReturnCode.USER_FAIL, message, SaturnSystemErrorGroup.FAIL);
}
}
public void postTimeout(String jobName, Integer key, String value, SaturnExecutionContext shardingContext,
JavaShardingItemCallable callable) {
String jobClass = shardingContext.getJobConfiguration().getJobClass();
log.info("[{}] msg=SaturnJavaJob onTimeout, jobClass is {} ", jobName, jobClass);
try {
if( jobBusinessInstance == null){
throw new JobException("the job business instance is not initialized");
}
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader jobClassLoader = saturnExecutorService.getJobClassLoader();
Thread.currentThread().setContextClassLoader(jobClassLoader);
try {
Class<?> saturnJobExecutionContextClazz = jobClassLoader
.loadClass(SaturnJobExecutionContext.class.getCanonicalName());
jobBusinessInstance.getClass()
.getMethod("onTimeout", String.class, Integer.class, String.class,
saturnJobExecutionContextClazz)
.invoke(jobBusinessInstance, jobName, key, value, callable.getContextForJob(jobClassLoader));
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
} catch (Exception e) {
logBusinessExceptionIfNecessary(jobName, e);
}
}
public void beforeTimeout(String jobName, Integer key, String value, SaturnExecutionContext shardingContext,
JavaShardingItemCallable callable) {
String jobClass = shardingContext.getJobConfiguration().getJobClass();
log.info("[{}] msg=SaturnJavaJob beforeTimeout, jobClass is {} ", jobName, jobClass);
try {
if( jobBusinessInstance == null){
throw new JobException("the job business instance is not initialized");
}
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader jobClassLoader = saturnExecutorService.getJobClassLoader();
Thread.currentThread().setContextClassLoader(jobClassLoader);
try {
Class<?> saturnJobExecutionContextClazz = jobClassLoader
.loadClass(SaturnJobExecutionContext.class.getCanonicalName());
jobBusinessInstance.getClass()
.getMethod("beforeTimeout", String.class, Integer.class, String.class,
saturnJobExecutionContextClazz)
.invoke(jobBusinessInstance, jobName, key, value, callable.getContextForJob(jobClassLoader));
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
} catch (Exception e) {
logBusinessExceptionIfNecessary(jobName, e);
}
}
public void postForceStop(String jobName, Integer key, String value, SaturnExecutionContext shardingContext,
JavaShardingItemCallable callable) {
String jobClass = shardingContext.getJobConfiguration().getJobClass();
log.info("[{}] msg=SaturnJavaJob postForceStop, jobClass is {} ", jobName, jobClass);
try {
if( jobBusinessInstance == null){
throw new JobException("the job business instance is not initialized");
}
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader jobClassLoader = saturnExecutorService.getJobClassLoader();
Thread.currentThread().setContextClassLoader(jobClassLoader);
try {
Class<?> saturnJobExecutionContextClazz = jobClassLoader
.loadClass(SaturnJobExecutionContext.class.getCanonicalName());
jobBusinessInstance.getClass()
.getMethod("postForceStop", String.class, Integer.class, String.class,
saturnJobExecutionContextClazz)
.invoke(jobBusinessInstance, jobName, key, value, callable.getContextForJob(jobClassLoader));
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
} catch (Exception e) {
logBusinessExceptionIfNecessary(jobName, e);
}
}
@Override
public void notifyJobEnabled() {
String jobClass = configService.getJobConfiguration().getJobClass();
log.info("[{}] msg=SaturnJavaJob onEnabled, jobClass is {} ", jobName, jobClass);
try {
if (jobBusinessInstance == null) {
throw new JobException("the job business instance is not initialized");
}
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader jobClassLoader = saturnExecutorService.getJobClassLoader();
Thread.currentThread().setContextClassLoader(jobClassLoader);
try {
jobBusinessInstance.getClass().getMethod("onEnabled", String.class).invoke(jobBusinessInstance, jobName);
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
} catch (Exception e) {
logBusinessExceptionIfNecessary(jobName, e);
}
}
@Override
public void notifyJobDisabled() {
String jobClass = configService.getJobConfiguration().getJobClass();
log.info("[{}] msg=SaturnJavaJob onDisabled, jobClass is {} ", jobName, jobClass);
try {
if (jobBusinessInstance == null) {
throw new JobException("the job business instance is not initialized");
}
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader jobClassLoader = saturnExecutorService.getJobClassLoader();
Thread.currentThread().setContextClassLoader(jobClassLoader);
try {
jobBusinessInstance.getClass().getMethod("onDisabled", String.class).invoke(jobBusinessInstance, jobName);
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
} catch (Exception e) {
logBusinessExceptionIfNecessary(jobName, e);
}
}
@Override
public void onForceStop(int item) {
}
@Override
public void onTimeout(int item) {
}
@Override
public void onNeedRaiseAlarm(int item, String alarmMessage) {
//TODO: need to raise alarm by implementor
}
}
|
package com.jme3.gde.core.properties;
import com.jme3.gde.core.scene.SceneApplication;
import com.jme3.gde.core.sceneexplorer.nodes.JmeSpatial;
import com.jme3.gde.core.sceneexplorer.nodes.actions.UserDataDialog;
import com.jme3.scene.Spatial;
import java.awt.Component;
import java.beans.PropertyEditor;
import java.beans.PropertyEditorSupport;
import java.lang.reflect.InvocationTargetException;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JFrame;
import org.openide.nodes.PropertySupport;
import org.openide.util.Exceptions;
/**
*
* @author normenhansen
*/
public class UserDataProperty extends PropertySupport.ReadWrite<String> {
private Spatial spatial;
private JmeSpatial node;
private String name = "null";
private int type = 0;
private List<ScenePropertyChangeListener> listeners = new LinkedList<ScenePropertyChangeListener>();
public UserDataProperty(JmeSpatial node, String name) {
super(name, String.class, name, "");
this.spatial = node.getLookup().lookup(Spatial.class);
this.node = node;
this.name = name;
this.type = getObjectType(spatial.getUserData(name));
}
public static int getObjectType(Object type) {
if (type instanceof Integer) {
return 0;
} else if (type instanceof Float) {
return 1;
} else if (type instanceof Boolean) {
return 2;
} else if (type instanceof String) {
return 3;
} else if (type instanceof Long) {
return 4;
} else {
// Logger.getLogger(UserDataProperty.class.getName()).log(Level.WARNING, "UserData not editable" + (type == null ? "null" : type.getClass()));
return -1;
}
}
@Override
public String getValue() throws IllegalAccessException, InvocationTargetException {
return spatial.getUserData(name) + "";
}
@Override
public void setValue(final String val) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
if (spatial == null) {
return;
}
try {
SceneApplication.getApplication().enqueue(new Callable<Void>() {
public Void call() throws Exception {
switch (type) {
case 0:
spatial.setUserData(name, Integer.parseInt(val));
break;
case 1:
spatial.setUserData(name, Float.parseFloat(val));
break;
case 2:
spatial.setUserData(name, Boolean.parseBoolean(val));
break;
case 3:
spatial.setUserData(name, val);
break;
case 4:
spatial.setUserData(name, Long.parseLong(val));
break;
default:
// throw new UnsupportedOperationException();
}
return null;
}
}).get();
notifyListeners(null, val);
} catch (InterruptedException ex) {
Exceptions.printStackTrace(ex);
} catch (ExecutionException ex) {
Exceptions.printStackTrace(ex);
}
}
@Override
public PropertyEditor getPropertyEditor() {
return new PropertyEditorSupport(spatial.getUserData(name)) {
@Override
public boolean supportsCustomEditor() {
return true;
}
@Override
public Component getCustomEditor() {
return new UserDataDialog(new JFrame(), true, node, name);
}
};
// return new AnimationPropertyEditor(control);
}
public void addPropertyChangeListener(ScenePropertyChangeListener listener) {
listeners.add(listener);
}
public void removePropertyChangeListener(ScenePropertyChangeListener listener) {
listeners.remove(listener);
}
private void notifyListeners(Object before, Object after) {
for (Iterator<ScenePropertyChangeListener> it = listeners.iterator(); it.hasNext();) {
ScenePropertyChangeListener propertyChangeListener = it.next();
propertyChangeListener.propertyChange("PROP_USER_CHANGE", getName(), before, after);
}
}
}
|
package org.matrix.androidsdk;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import junit.framework.TestCase;
import org.json.JSONArray;
import org.json.JSONObject;
import org.matrix.androidsdk.api.EventsApi;
import org.matrix.androidsdk.api.response.PublicRoom;
import org.matrix.androidsdk.api.response.TokensChunkResponse;
import org.matrix.androidsdk.test.JSONUtils;
import org.matrix.androidsdk.test.RetrofitUtils;
import org.mockito.ArgumentCaptor;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.List;
import retrofit.Callback;
import retrofit.RetrofitError;
import retrofit.client.Response;
import static org.mockito.Mockito.*;
/**
* Unit tests MXApiService.
*/
public class MXApiServiceTest extends TestCase {
private static final String BASE_URL = "http://localhost:8008/_matrix/client/api/v1";
private static final String PATH = "/publicRooms";
private Gson mGson = new GsonBuilder()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.create();
protected void setUp() throws Exception {
super.setUp();
}
protected void tearDown() throws Exception {
super.tearDown();
}
/**
* Tests: MXApiService.loadPublicRooms(LoadPublicRoomsCallback)
* Summary: Mocks up a single public room in the response and asserts that the callback contains
* the mocked information.
*/
public void testPublicRooms() throws Exception {
final String roomId = "!faifuhew9:localhost";
final String roomTopic = "This is a test room.";
final String roomName = "Test Room";
final int roomMembers = 6;
JSONArray rooms = new JSONArray();
final JSONObject json = JSONUtils.createChunk(rooms);
JSONObject room = new JSONObject().put("name", roomName)
.put("num_joined_members", roomMembers).put("room_id", roomId).put("topic", roomTopic);
rooms.put(room);
final TokensChunkResponse<PublicRoom> roomsChunk = mGson.fromJson(json.toString(),
new TypeToken<TokensChunkResponse<PublicRoom>>(){}.getType());
EventsApi eventsApi = mock(EventsApi.class);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Callback<TokensChunkResponse<PublicRoom>> callback =
(Callback<TokensChunkResponse<PublicRoom>>)invocation.getArguments()[0];
Response response = null;
try {
response = RetrofitUtils.createJsonResponse(BASE_URL + PATH, 200,
json);
}
catch (Exception e) {
assertTrue("Exception thrown: "+e, false);
}
callback.success(roomsChunk, response);
return null;
}
}).when(eventsApi).publicRooms(any(Callback.class));
MXApiService service = new MXApiService(eventsApi);
MXApiService.LoadPublicRoomsCallback cb = mock(MXApiService.LoadPublicRoomsCallback.class);
// run the method being tested
service.loadPublicRooms(cb);
ArgumentCaptor<List> captor = ArgumentCaptor.forClass(List.class);
verify(cb, times(1)).onRoomsLoaded(captor.capture());
List<PublicRoom> publicRooms = (List<PublicRoom>) captor.getValue();
assertEquals(1, publicRooms.size());
PublicRoom pr = publicRooms.get(0);
assertEquals(roomName, pr.name);
assertEquals(roomId, pr.roomId);
assertEquals(roomTopic, pr.topic);
assertEquals(roomMembers, pr.numJoinedMembers);
}
/**
* Tests: MXApiService.loadPublicRooms(LoadPublicRoomsCallback)
* Summary: Fails the public rooms HTTP call.
*/
public void testPublicRoomsError() throws Exception {
EventsApi eventsApi = mock(EventsApi.class);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Callback<TokensChunkResponse<PublicRoom>> callback =
(Callback<TokensChunkResponse<PublicRoom>>) invocation.getArguments()[0];
callback.failure(RetrofitUtils.createMatrixError(BASE_URL + PATH,
JSONUtils.error(500)));
return null;
}
}).when(eventsApi).publicRooms(any(Callback.class));
MXApiService service = new MXApiService(eventsApi);
MXApiService.LoadPublicRoomsCallback cb = mock(MXApiService.LoadPublicRoomsCallback.class);
// run the method being tested
service.loadPublicRooms(cb);
verify(cb, times(0)).onRoomsLoaded(any(List.class));
}
}
|
package io.spine.server.storage;
import com.google.common.testing.NullPointerTester;
import com.google.common.truth.IterableSubject;
import com.google.protobuf.Any;
import com.google.protobuf.BoolValue;
import com.google.protobuf.Message;
import io.spine.base.Identifier;
import io.spine.client.CompositeFilter;
import io.spine.client.Filter;
import io.spine.client.Filters;
import io.spine.client.IdFilter;
import io.spine.client.TargetFilters;
import io.spine.server.entity.storage.ColumnName;
import io.spine.server.entity.storage.CompositeQueryParameter;
import io.spine.server.entity.storage.EntityColumns;
import io.spine.server.entity.storage.LifecycleColumn;
import io.spine.server.entity.storage.QueryParameters;
import io.spine.server.entity.storage.given.TestEntity;
import io.spine.server.entity.storage.given.TestProjection;
import io.spine.test.entity.ProjectId;
import io.spine.testdata.Sample;
import io.spine.testing.UtilityClassTest;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import java.util.Collection;
import java.util.List;
import static com.google.common.collect.Iterators.size;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.truth.Truth.assertThat;
import static io.spine.client.CompositeFilter.CompositeOperator.EITHER;
import static io.spine.server.entity.storage.LifecycleColumn.archived;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
@DisplayName("`MessageQueries` utility should")
//TODO:2020-04-01:alex.tymchenko: add the tests for the rest of the factory methods.
class MessageQueriesTest extends UtilityClassTest<MessageQueries> {
private MessageQueriesTest() {
super(MessageQueries.class);
}
@Override
protected void configure(NullPointerTester tester) {
super.configure(tester);
tester.setDefault(TargetFilters.class, TargetFilters.getDefaultInstance())
.setDefault(QueryParameters.class, QueryParameters.newBuilder()
.build())
.setDefault(MessageColumn.class, sampleColumn())
.setDefault(Columns.class, MessageColumns.emptyOf(Any.class))
.testStaticMethods(getUtilityClass(), NullPointerTester.Visibility.PACKAGE);
}
private static MessageColumn<String, Any> sampleColumn() {
return new MessageColumn<>(ColumnName.of("sample"), String.class, (v) -> "");
}
@Test
@DisplayName("check filter type")
void checkFilterType() {
// `Boolean` column is queried for an Integer value.
Filter filter = Filters.gt(archived.name(), 42);
CompositeFilter compositeFilter = Filters.all(filter);
TargetFilters filters = TargetFilters
.newBuilder()
.addFilter(compositeFilter)
.build();
EntityColumns columns = EntityColumns.of(TestEntity.class);
assertThrows(IllegalArgumentException.class,
() -> MessageQueries.from(filters, columns));
}
@Test
@DisplayName("not create query for non-existing column")
void notCreateForNonExisting() {
// `Boolean` column is queried for an Integer value.
Filter filter = Filters.gt("column that does not exist", 42);
CompositeFilter compositeFilter = Filters.all(filter);
TargetFilters filters = TargetFilters
.newBuilder()
.addFilter(compositeFilter)
.build();
EntityColumns columns = EntityColumns.of(TestEntity.class);
assertThrows(IllegalArgumentException.class,
() -> MessageQueries.from(filters, columns));
}
@Test
@DisplayName("construct empty queries")
void constructEmptyQueries() {
TargetFilters filters = TargetFilters.getDefaultInstance();
EntityColumns columns = EntityColumns.of(TestEntity.class);
MessageQuery<?> query = MessageQueries.from(filters, columns);
assertNotNull(query);
assertTrue(query.getIds()
.isEmpty());
QueryParameters parameters = query.getParameters();
assertEquals(0, size(parameters.iterator()));
}
@Test
@DisplayName("construct non-empty queries")
void constructNonEmptyQueries() {
Message someGenericId = Sample.messageOfType(ProjectId.class);
Any entityId = Identifier.pack(someGenericId);
IdFilter idFilter = IdFilter
.newBuilder()
.addId(entityId)
.build();
BoolValue archived = BoolValue
.newBuilder()
.setValue(true)
.build();
Filter archivedFilter = Filters
.eq(LifecycleColumn.archived.name(), archived);
CompositeFilter aggregatingFilter = CompositeFilter
.newBuilder()
.addFilter(archivedFilter)
.setOperator(EITHER)
.build();
TargetFilters filters = TargetFilters
.newBuilder()
.setIdFilter(idFilter)
.addFilter(aggregatingFilter)
.build();
EntityColumns columns = EntityColumns.of(TestProjection.class);
MessageQuery<?> query = MessageQueries.from(filters, columns);
assertNotNull(query);
Collection<?> ids = query.getIds();
assertFalse(ids.isEmpty());
assertThat(ids).hasSize(1);
Object singleId = ids.iterator()
.next();
assertEquals(someGenericId, singleId);
QueryParameters parameters = query.getParameters();
List<CompositeQueryParameter> values = newArrayList(parameters);
assertThat(values).hasSize(1);
CompositeQueryParameter singleParam = values.get(0);
Collection<Filter> columnFilters = singleParam.filters()
.values();
assertEquals(EITHER, singleParam.operator());
IterableSubject assertColumnFilters = assertThat(columnFilters);
assertColumnFilters.contains(archivedFilter);
}
}
|
package com.freiheit.fuava.sftp;
import com.freiheit.fuava.sftp.util.FileType;
import com.freiheit.fuava.sftp.util.RemoteConfiguration;
import com.freiheit.fuava.simplebatch.BatchJob;
import com.freiheit.fuava.simplebatch.fetch.FetchedItem;
import com.freiheit.fuava.simplebatch.fsjobs.downloader.CtlDownloaderJob;
import com.freiheit.fuava.simplebatch.logging.BatchStatisticsLoggingListener;
import com.freiheit.fuava.simplebatch.logging.ItemProgressLoggingListener;
import com.freiheit.fuava.simplebatch.processor.ControlFilePersistenceOutputInfo;
import com.freiheit.fuava.simplebatch.processor.Processor;
import com.freiheit.fuava.simplebatch.processor.Processors;
import com.freiheit.fuava.simplebatch.util.FileUtils;
/**
* Standard Sftp Downloader Job for the purpose of downloading and processing
* the newest file in a given directory on a remote system.
*
* @author Thomas Ostendorf (thomas.ostendorf@freiheit.com)
*/
public class SftpDownloaderJob {
private SftpDownloaderJob() {
}
/**
* creates the batch job.
*
* @param config
* configuration of downloader job.
* @param client
* remote client operations <b>The caller is responsible to
* release resources after the Job executes, if applicable.<b>
* @param remoteConfiguration
* remote client storage configuration.
* @param fileType
* type of file that one wants to download.
* @return Batch Job that can be executed.
*/
public static BatchJob<SftpFilename, ControlFilePersistenceOutputInfo> makeDownloaderJob(
final CtlDownloaderJob.Configuration config,
final RemoteClient client,
final RemoteConfiguration remoteConfiguration,
final FileType fileType ) {
final Processor<FetchedItem<SftpFilename>, SftpFilename, ControlFilePersistenceOutputInfo> downloader =
Processors.controlledFileWriter( config.getDownloadDirPath(), config.getControlFileEnding(),
new SftpDownloadingFileWriterAdapter( client ) );
final SftpResultFileMover remoteFileMover = new SftpResultFileMover( client, FileUtils.getCurrentDateDirPath(remoteConfiguration.getArchivedFolder()) );
return new BatchJob.Builder<SftpFilename, ControlFilePersistenceOutputInfo>()
.setFetcher(
new SftpOldFilesMovingLatestFileFetcher(
client,
FileUtils.getCurrentDateDirPath( remoteConfiguration.getSkippedFolder() ),
remoteConfiguration.getProcessingFolder(),
remoteConfiguration.getIncomingFolder(),
fileType ) )
.addListener( new BatchStatisticsLoggingListener<>( "BATCH" ) )
.addListener( new ItemProgressLoggingListener<>( "ITEM" ) )
.setProcessor( Processors.compose( remoteFileMover, downloader ) )
.setProcessingBatchSize( 1 /*No advantage in processing multiple files at once*/ )
.build();
}
}
|
package org.researchstack.skin.ui;
import android.animation.ArgbEvaluator;
import android.animation.ValueAnimator;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.TabLayout;
import android.support.v4.content.ContextCompat;
import android.support.v4.view.ViewPager;
import android.support.v7.widget.AppCompatButton;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import org.researchstack.backbone.StorageAccess;
import org.researchstack.backbone.result.TaskResult;
import org.researchstack.backbone.task.OrderedTask;
import org.researchstack.backbone.ui.PinCodeActivity;
import org.researchstack.backbone.ui.ViewTaskActivity;
import org.researchstack.backbone.utils.ResUtils;
import org.researchstack.skin.AppPrefs;
import org.researchstack.skin.R;
import org.researchstack.skin.ResourceManager;
import org.researchstack.skin.TaskProvider;
import org.researchstack.skin.UiManager;
import org.researchstack.skin.model.StudyOverviewModel;
import org.researchstack.skin.step.PassCodeCreationStep;
import org.researchstack.skin.task.OnboardingTask;
import org.researchstack.skin.task.SignInTask;
import org.researchstack.skin.task.SignUpTask;
import org.researchstack.skin.ui.adapter.OnboardingPagerAdapter;
import org.researchstack.skin.utils.JsonUtils;
public class OnboardingActivity extends PinCodeActivity implements View.OnClickListener
{
public static final int REQUEST_CODE_SIGN_UP = 21473;
public static final int REQUEST_CODE_SIGN_IN = 31473;
public static final int REQUEST_CODE_PASSCODE = 41473;
private View pagerFrame;
private View pagerContainer;
private TabLayout tabStrip;
private Button skip;
private Button signUp;
private TextView signIn;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
super.setContentView(R.layout.activity_onboarding);
ImageView logoView = (ImageView) findViewById(R.id.layout_studyoverview_landing_logo);
TextView titleView = (TextView) findViewById(R.id.layout_studyoverview_landing_title);
TextView subtitleView = (TextView) findViewById(R.id.layout_studyoverview_landing_subtitle);
LinearLayout linearLayout = (LinearLayout) findViewById(R.id.layout_studyoverview_main);
StudyOverviewModel model = parseStudyOverviewModel();
// The first item is used for the main activity and not the tabbed dialog
StudyOverviewModel.Question welcomeQuestion = model.getQuestions().remove(0);
titleView.setText(welcomeQuestion.getTitle());
if(! TextUtils.isEmpty(welcomeQuestion.getDetails()))
{
subtitleView.setText(welcomeQuestion.getDetails());
}
else
{
subtitleView.setVisibility(View.GONE);
}
// add Read Consent option to list and tabbed dialog
if("yes".equals(welcomeQuestion.getShowConsent()))
{
StudyOverviewModel.Question consent = new StudyOverviewModel.Question();
consent.setTitle(getString(R.string.rss_read_consent_doc));
int consentHtmlId = ResourceManager.getInstance().getConsentHtml();
consent.setDetails(getResources().getResourceEntryName(consentHtmlId));
model.getQuestions().add(0, consent);
}
for(int i = 0; i < model.getQuestions().size(); i++)
{
AppCompatButton button = (AppCompatButton) LayoutInflater.from(this)
.inflate(R.layout.rss_button_study_overview, linearLayout, false);
button.setText(model.getQuestions().get(i).getTitle());
// set the index for opening the viewpager to the correct page on click
button.setTag(i);
linearLayout.addView(button);
button.setOnClickListener(this);
}
signUp = (Button) findViewById(R.id.intro_sign_up);
signIn = (TextView) findViewById(R.id.intro_sign_in);
skip = (Button) findViewById(R.id.intro_skip);
skip.setVisibility(UiManager.getInstance().isConsentSkippable() ? View.VISIBLE : View.GONE);
int resId = ResUtils.getDrawableResourceId(this, model.getLogoName(), R.mipmap.ic_launcher);
logoView.setImageResource(resId);
pagerContainer = findViewById(R.id.pager_container);
pagerContainer.setTranslationY(48);
pagerContainer.setAlpha(0);
pagerContainer.setScaleX(.9f);
pagerContainer.setScaleY(.9f);
pagerFrame = findViewById(R.id.pager_frame);
pagerFrame.setAlpha(0);
pagerFrame.setOnClickListener(v -> hidePager());
OnboardingPagerAdapter adapter = new OnboardingPagerAdapter(this, model.getQuestions());
ViewPager pager = (ViewPager) findViewById(R.id.pager);
pager.setOffscreenPageLimit(2);
pager.setAdapter(adapter);
tabStrip = (TabLayout) findViewById(R.id.pager_title_strip);
tabStrip.setupWithViewPager(pager);
}
private StudyOverviewModel parseStudyOverviewModel()
{
int fileResId = ResourceManager.getInstance().getStudyOverviewSections();
return JsonUtils.loadClass(OnboardingActivity.this, StudyOverviewModel.class, fileResId);
}
@Override
public void onClick(View v)
{
showPager((int) v.getTag());
}
private void showPager(int index)
{
pagerFrame.animate().alpha(1)
.setDuration(150)
.withStartAction(() -> pagerFrame.setVisibility(View.VISIBLE))
.withEndAction(() -> {
pagerContainer.animate()
.translationY(0)
.setDuration(100)
.alpha(1)
.scaleX(1)
.scaleY(1);
});
tabStrip.getTabAt(index).select();
skip.setActivated(true);
signUp.setActivated(true);
int colorFrom = ContextCompat.getColor(this, android.R.color.black);
int colorTo = ContextCompat.getColor(this, android.R.color.white);
ValueAnimator colorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), colorFrom, colorTo);
colorAnimation.setDuration(150);
colorAnimation.addUpdateListener(animator -> signIn.setTextColor((int) animator.getAnimatedValue()));
colorAnimation.start();
}
private void hidePager()
{
pagerContainer.animate()
.translationY(48)
.alpha(0)
.setDuration(100)
.scaleX(.9f)
.scaleY(.9f)
.withEndAction(() -> {
pagerFrame.animate()
.alpha(0)
.setDuration(150)
.withEndAction(() -> pagerFrame.setVisibility(View.GONE));
skip.setActivated(false);
signUp.setActivated(false);
});
int colorFrom = ContextCompat.getColor(this, android.R.color.white);
int colorTo = ContextCompat.getColor(this, android.R.color.black);
ValueAnimator colorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), colorFrom, colorTo);
colorAnimation.setDuration(150);
colorAnimation.addUpdateListener(animator -> signIn.setTextColor((int) animator.getAnimatedValue()));
colorAnimation.start();
}
@Override
public void onBackPressed()
{
if(pagerFrame.getVisibility() == View.VISIBLE)
{
hidePager();
}
else
{
super.onBackPressed();
}
}
public void onSignUpClicked(View view)
{
hidePager();
boolean hasPin = StorageAccess.getInstance().hasPinCode(this);
SignUpTask task = (SignUpTask) TaskProvider.getInstance().get(TaskProvider.TASK_ID_SIGN_UP);
task.setHasPasscode(hasPin);
startActivityForResult(SignUpTaskActivity.newIntent(this, task), REQUEST_CODE_SIGN_UP);
}
public void onSkipClicked(View view)
{
hidePager();
boolean hasPasscode = StorageAccess.getInstance().hasPinCode(this);
if(! hasPasscode)
{
PassCodeCreationStep step = new PassCodeCreationStep(OnboardingTask.SignUpPassCodeCreationStepIdentifier,
R.string.rss_passcode);
OrderedTask task = new OrderedTask("PasscodeTask", step);
startActivityForResult(ViewTaskActivity.newIntent(this, task), REQUEST_CODE_PASSCODE);
}
else
{
startMainActivity();
}
}
public void onSignInClicked(View view)
{
hidePager();
boolean hasPasscode = StorageAccess.getInstance().hasPinCode(this);
SignInTask task = (SignInTask) TaskProvider.getInstance().get(TaskProvider.TASK_ID_SIGN_IN);
task.setHasPasscode(hasPasscode);
startActivityForResult(SignUpTaskActivity.newIntent(this, task), REQUEST_CODE_SIGN_IN);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data)
{
if(requestCode == REQUEST_CODE_SIGN_IN && resultCode == RESULT_OK)
{
finish();
TaskResult result = (TaskResult) data.getSerializableExtra(ViewTaskActivity.EXTRA_TASK_RESULT);
String email = (String) result.getStepResult(OnboardingTask.SignInStepIdentifier)
.getResultForIdentifier(SignInTask.ID_EMAIL);
String password = (String) result.getStepResult(OnboardingTask.SignInStepIdentifier)
.getResultForIdentifier(SignInTask.ID_PASSWORD);
if(email == null || password == null)
{
startMainActivity();
}
else
{
Intent intent = new Intent(this, EmailVerificationActivity.class);
intent.putExtra(EmailVerificationActivity.EXTRA_EMAIL, email);
intent.putExtra(EmailVerificationActivity.EXTRA_PASSWORD, password);
startActivity(intent);
}
}
else if(requestCode == REQUEST_CODE_SIGN_UP && resultCode == RESULT_OK)
{
finish();
TaskResult result = (TaskResult) data.getSerializableExtra(ViewTaskActivity.EXTRA_TASK_RESULT);
String email = (String) result.getStepResult(OnboardingTask.SignUpStepIdentifier)
.getResultForIdentifier(SignUpTask.ID_EMAIL);
String password = (String) result.getStepResult(OnboardingTask.SignUpStepIdentifier)
.getResultForIdentifier(SignUpTask.ID_PASSWORD);
Intent intent = new Intent(this, EmailVerificationActivity.class);
intent.putExtra(EmailVerificationActivity.EXTRA_EMAIL, email);
intent.putExtra(EmailVerificationActivity.EXTRA_PASSWORD, password);
startActivity(intent);
}
else if(requestCode == REQUEST_CODE_PASSCODE && resultCode == RESULT_OK)
{
TaskResult result = (TaskResult) data.getSerializableExtra(ViewTaskActivity.EXTRA_TASK_RESULT);
String passcode = (String) result.getStepResult(OnboardingTask.SignUpPassCodeCreationStepIdentifier)
.getResult();
StorageAccess.getInstance().setPinCode(this, passcode);
startMainActivity();
}
else
{
super.onActivityResult(requestCode, resultCode, data);
}
}
private void startMainActivity()
{
// Onboarding completion is checked in splash activity. The check allows us to pass through
// to MainActivity even if we haven't signed in. We want to set this true in every case so
// the user is really only forced through Onboarding once. If they leave the study, they must
// re-enroll in Settings, which starts OnboardingActivty.
AppPrefs.getInstance(this).setOnboardingComplete(true);
// Start MainActivity w/ clear_top and single_top flags. MainActivity may
// already be on the activity-task. We want to re-use that activity instead
// of creating a new instance and have two instance active.
Intent intent = new Intent(this, MainActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
finish();
}
}
|
package com.msdpe.pietalk.activities;
import java.io.InputStream;
import uk.co.senab.actionbarpulltorefresh.library.PullToRefreshAttacher;
import uk.co.senab.actionbarpulltorefresh.library.PullToRefreshAttacher.OnRefreshListener;
import android.app.ActionBar;
import android.app.Dialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnPreparedListener;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.support.v4.app.NavUtils;
import android.view.GestureDetector;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.AdapterView.OnItemLongClickListener;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.RelativeLayout.LayoutParams;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.VideoView;
import com.microsoft.windowsazure.mobileservices.ApiOperationCallback;
import com.microsoft.windowsazure.mobileservices.ServiceFilterResponse;
import com.msdpe.pietalk.Constants;
import com.msdpe.pietalk.R;
import com.msdpe.pietalk.TestSettingsActivity;
import com.msdpe.pietalk.adapters.PiesArrayAdapter;
import com.msdpe.pietalk.base.BaseActivity;
import com.msdpe.pietalk.datamodels.Pie;
import com.msdpe.pietalk.util.PieTalkAlert;
import com.msdpe.pietalk.util.PieTalkLogger;
import com.msdpe.pietalk.util.PieTalkResponse;
public class PiesListActivity extends BaseActivity {
private final String TAG = "PiesListActivity";
private ListView mLvPies;
private PiesArrayAdapter mAdapter;
private PullToRefreshAttacher mPullToRefreshAttacher;
private boolean mIsViewingPicture;
private boolean mIsViewingVideo;
private Dialog mViewingDialog;
private ImageView mImagePicture;
private VideoView mVideoView;
private GestureDetector mGestureDetector;
private int mTappedRowPosition = -1;
@Override
protected void onCreate(Bundle savedInstanceState) {
getWindow().addFlags(WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS);
super.onCreate(savedInstanceState, true);
setContentView(R.layout.activity_pies_list);
// Show the Up button in the action bar.
setupActionBar();
mLvPies = (ListView) findViewById(R.id.lvPies);
mPullToRefreshAttacher = PullToRefreshAttacher.get(this);
mPullToRefreshAttacher.addRefreshableView(mLvPies, new OnRefreshListener() {
@Override
public void onRefreshStarted(View arg0) {
// TODO Auto-generated method stub
mPieTalkService.getPies();
}
});
mGestureDetector = new GestureDetector(this, new GestureListener());
// mAdapter = new ArrayAdapter<String>(this,
// android.R.layout.simple_list_item_1, mPieTalkService.getLocalPieUsernames());
// mAdapter = new ArrayAdapter<String>(this,
// R.layout.list_row_pie, R.id.text1, mPieTalkService.getLocalPieUsernames());
//mAdapter = new PiesArrayAdapter(this, mPieTalkService.getLocalPieUsernames());
mAdapter = new PiesArrayAdapter(this, mPieTalkService.getLocalPies());
mLvPies.setAdapter(mAdapter);
mLvPies.setOnItemClickListener(pieClickListener);
mLvPies.setOnItemLongClickListener(pieLongClickListener);
// mLvPies.setOnKeyListener(new View.OnKeyListener() {
// @Override
// public boolean onKey(View v, int keyCode, KeyEvent event) {
// // TODO Auto-generated method stub
// PieTalkLogger.i(TAG, "onKey");
// return false;
mLvPies.setOnTouchListener(new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
// PieTalkLogger.i(TAG, "onTouch");
mGestureDetector.onTouchEvent(event);
if (event.getAction() == MotionEvent.ACTION_UP) {
if (mIsViewingPicture || mIsViewingVideo) {
mViewingDialog.dismiss();
mIsViewingPicture = false;
mIsViewingVideo = false;
if (mImagePicture != null) {
mImagePicture = null;
} else if (mVideoView != null) {
mVideoView.stopPlayback();
mVideoView = null;
}
}
}
//Ensures we can still pull to refresh on this page
mPullToRefreshAttacher.onTouch(v, event);
return false;
}
});
}
// @Override
// public boolean onKeyDown(int keyCode, KeyEvent event) {
// // TODO Auto-generated method stub
// PieTalkLogger.i(TAG, "onkeydown");
// return super.onKeyDown(keyCode, event);
private OnItemClickListener pieClickListener = new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
mTappedRowPosition = position;
}
};
private OnItemLongClickListener pieLongClickListener = new OnItemLongClickListener() {
@Override
public boolean onItemLongClick(AdapterView<?> parent, View view,
final int position, long id) {
final Pie pie = mPieTalkService.getLocalPies().get(position);
if (pie.getType().equals("FriendRequest")) {
//Friend and update the pie
mPieTalkService.acceptFriendRequest(pie, new ApiOperationCallback<PieTalkResponse>() {
@Override
public void onCompleted(PieTalkResponse response, Exception ex,
ServiceFilterResponse serviceFilterResponse) {
PieTalkLogger.i(TAG, "Response received");
if (ex != null || response.Error != null) {
//Display error
if (ex != null)
PieTalkAlert.showSimpleErrorDialog(mActivity, ex.getCause().getMessage());
else
Toast.makeText(mActivity, response.Error, Toast.LENGTH_SHORT).show();
} else {
mAdapter.remove(pie);
mPieTalkService.getFriends();
}
}
});
} else if (pie.getType().equals("Pie")) {
if (pie.getHasUserSeen()) {
//Do nothing, they should double tap to reply
} else {
//Get SAS for pie
mPieTalkService.getPieForRecipient(pie, new ApiOperationCallback<PieTalkResponse>() {
@Override
public void onCompleted(PieTalkResponse response,
Exception ex, ServiceFilterResponse serviceFilterResponse) {
if (ex != null || response.Error != null) {
//Display error
if (ex != null)
PieTalkAlert.showSimpleErrorDialog(mActivity, ex.getCause().getMessage());
else
PieTalkAlert.showSimpleErrorDialog(mActivity, response.Error);
} else {
PieTalkLogger.d(TAG, response.PieUrl);
//dislay the pie depending on the type
mViewingDialog = new Dialog(mActivity, android.R.style.Theme_Black_NoTitleBar);
if (pie.getIsPicture()) {
mIsViewingPicture = true;
if (mImagePicture == null)
mImagePicture = new ImageView(mActivity);
new DownloadPiePictureTask().execute(response.PieUrl);
mViewingDialog.setContentView(mImagePicture);
} else if (pie.getIsVideo()) {
mIsViewingVideo = true;
if (mVideoView == null) {
mVideoView = new VideoView(mActivity);
}
RelativeLayout newLayout = new RelativeLayout(mActivity);
// RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams) newLayout.getLayoutParams();
// layoutParams.width = LayoutParams.MATCH_PARENT;
// layoutParams.height = LayoutParams.MATCH_PARENT;
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
// newLayout.setLayoutParams(layoutParams);
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_TOP);
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_LEFT);
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
mVideoView.setLayoutParams(layoutParams);
mVideoView.setOnPreparedListener(new OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer mp) {
mp.setLooping(true);
}
});
Uri uri = Uri.parse(response.PieUrl);
mVideoView.setVideoURI(uri);
mVideoView.start();
newLayout.addView(mVideoView);
mViewingDialog.setContentView(newLayout);
}
mViewingDialog.show();
//Start countdown
int position = mPieTalkService.getLocalPies().indexOf(pie);
View view = mLvPies.getChildAt(position);
final TextView lblTime = (TextView) view.findViewById(R.id.lblTime);
//Only start a countdown if we haven't already
if (lblTime.getText().equals("")) {
final ImageView imgIndicator = (ImageView) view.findViewById(R.id.imgIndicator);
final TextView lblInstructions = (TextView) view.findViewById(R.id.lblInstructions);
int timeToLive = pie.getTimeToLive();
lblTime.setText(timeToLive + "");
new CountDownTimer(timeToLive * 1000, 1000) {
public void onTick(long millisUntilFinished) {
lblTime.setText(millisUntilFinished / 1000 + "");
}
public void onFinish() {
imgIndicator.setImageResource(R.drawable.pie_seen);
lblTime.setText(R.string.empty_string);
lblInstructions.setText(R.string.instructions_seen_pie);
pie.setHasUserSeen(true);
if (mViewingDialog.isShowing())
mViewingDialog.dismiss();
}
}.start();
}
}
}
});
//Update PIE as being seen at time
//Show PIE in dialog (require hold down)
//Start local countdown
//Change loal pie when countdown is up
//Block access on server after time is up
}
}
return false;
}
};
private class DownloadPiePictureTask extends AsyncTask<String, Void, Bitmap> {
public DownloadPiePictureTask() { }
@Override
protected Bitmap doInBackground(String... piePictureUrl) {
Bitmap pieImage = null;
try {
InputStream in = new java.net.URL(piePictureUrl[0]).openStream();
pieImage = BitmapFactory.decodeStream(in);
} catch (Exception ex) {
PieTalkLogger.e(TAG, "Error pulling down pie for url: " + piePictureUrl[0]);
}
return pieImage;
}
protected void onPostExecute(Bitmap pieImage) {
mImagePicture.setImageBitmap(pieImage);
}
}
@Override
protected void onResume() {
IntentFilter filter = new IntentFilter();
mIsViewingPicture = false;
mIsViewingVideo = false;
//filter.addAction(Constants.BROADCAST_PIES_UPDATED);
filter.addAction(Constants.BROADCAST_PIES_UPDATED);
filter.addAction(Constants.BROADCAST_PIE_SENT);
registerReceiver(receiver, filter);
super.onResume();
}
@Override
protected void onPause() {
unregisterReceiver(receiver);
super.onPause();
}
private BroadcastReceiver receiver = new BroadcastReceiver() {
public void onReceive(Context context, android.content.Intent intent) {
if (intent.getAction().equals(Constants.BROADCAST_PIES_UPDATED)) {
mAdapter.clear();
//for (String item : mPieTalkService.getLocalPieUsernames()) {
for (Pie pie : mPieTalkService.getLocalPies()) {
mAdapter.add(pie);
}
PieTalkLogger.i(TAG, "Refresh complete");
mPullToRefreshAttacher.setRefreshComplete();
mPullToRefreshAttacher.setRefreshing(false);
} else if (intent.getAction().equals(Constants.BROADCAST_PIE_SENT)) {
mPieTalkService.getPies();
}
}
};
/**
* Set up the {@link android.app.ActionBar}.
*/
private void setupActionBar() {
getActionBar().setDisplayHomeAsUpEnabled(true);
ActionBar bar = getActionBar();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.pies_list, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
// This ID represents the Home or Up button. In the case of this
// activity, the Up button is shown. Use NavUtils to allow users
// to navigate up one level in the application structure. For
// more details, see the Navigation pattern on Android Design:
// http://developer.android.com/design/patterns/navigation.html#up-vs-back
NavUtils.navigateUpFromSameTask(this);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_right);
return true;
case R.id.menuSettings:
Intent intent = new Intent(mActivity, TestSettingsActivity.class);
startActivity(intent);
finish();
//mPieTalkService.getPies();
//mPullToRefreshAttacher.setRefreshing(true);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onBackPressed() {
// TODO Auto-generated method stub
//super.onBackPressed();
NavUtils.navigateUpFromSameTask(this);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_right);
}
public class GestureListener extends GestureDetector.SimpleOnGestureListener {
@Override
public boolean onDoubleTap(MotionEvent e) {
if (mTappedRowPosition > -1) {
Pie tappedPie = mPieTalkService.getLocalPies().get(mTappedRowPosition);
if (tappedPie.getType().equalsIgnoreCase("pie") && tappedPie.getHasUserSeen()) {
PieTalkLogger.i(TAG, "DoubleTap row: " + mTappedRowPosition);
}
}
return super.onDoubleTap(e);
}
}
}
|
package edu.wustl.cab2b.common.ejb;
/**
* Interface which has all the constants representing EJB names.
* Updated for ejb 3.0
* Format is : EAR_NAME /BEAN_NAME/TYPE_OF_BUSINESS_INTERFACE.
* All the beans are accessed by remote business interface
*
* @author Chandrakant Talele
* @author lalit_chand
*/
public interface EjbNamesConstants {
final static String EXPERIMENT = "cab2bServer/ExperimentSessionBean/remote";
final static String EXPERIMENT_GROUP = "cab2bServer/ExperimentGroupSessionBean/remote";
final static String SQL_QUERY_BEAN = "cab2bServer/SQLQueryBean/remote";
final static String PATH_FINDER_BEAN = "cab2bServer/PathFinderBean/remote";
final static String PATH_BUILDER_BEAN = "cab2bServer/PathBuilderBean/remote";
final static String QUERY_ENGINE_BEAN = "cab2bServer/QueryEngineBean/remote";
final static String CATEGORY_BEAN = "cab2bServer/CategoryBean/remote";
final static String DATACATEGORY_BEAN = "cab2bServer/DataCategoryBean/remote";
final static String DATALIST_BEAN = "cab2bServer/DataListBean/remote";
final static String ANALYTICAL_SERVICE_BEAN = "cab2bServer/AnalyticalServiceOperationsBean/remote";
final static String UTILITY_BEAN = "cab2bServer/UtilityBean/remote";
final static String USER_BEAN = "cab2bServer/UserBean/remote";
final static String SERVICE_URL_BEAN = "cab2bServer/ServiceURLBean/remote";
}
|
package org.jfree.chart.util;
import java.awt.GradientPaint;
import java.awt.geom.Rectangle2D;
import java.awt.Shape;
import org.jfree.ui.GradientPaintTransformer;
public class DirectionalGradientPaintTransformer
implements GradientPaintTransformer {
/**
* Default constructor.
*/
public DirectionalGradientPaintTransformer() {
super();
}
/**
* Transforms a <code>GradientPaint</code> instance to fit some target
* shape.
*
* @param paint the original paint (not <code>null</code>).
* @param target the reference area (not <code>null</code>).
*
* @return A transformed paint.
*/
@Override
public GradientPaint transform(GradientPaint paint, Shape target) {
//get the coordinates of the original GradientPaint
final double px1 = paint.getPoint1().getX();
final double py1 = paint.getPoint1().getY();
final double px2 = paint.getPoint2().getX();
final double py2 = paint.getPoint2().getY();
//get the coordinates of the shape that is to be filled
final Rectangle2D bounds = target.getBounds();
final float bx = (float)bounds.getX();
final float by = (float)bounds.getY();
final float bw = (float)bounds.getWidth();
final float bh = (float)bounds.getHeight();
//reserve variables to store the coordinates of the resulting GradientPaint
float rx1, ry1, rx2, ry2;
if (px1 == 0 && py1 == 0) {
//start point is upper left corner
rx1 = bx;
ry1 = by;
if (px2 != 0.0f && py2 != 0.0f) {
//end point is lower right corner --> diagonal gradient
float offset = (paint.isCyclic()) ? (bw + bh) / 4.0f
: (bw + bh) / 2.0f ;
rx2 = bx + offset;
ry2 = by + offset;
}
else {
//end point is either lower left corner --> vertical gradient
//or end point is upper right corner --> horizontal gradient
rx2 = (px2 == 0) ? rx1 : (paint.isCyclic() ? (rx1 + bw / 2.0f)
: (rx1 + bw));
ry2 = (py2 == 0) ? ry1 : (paint.isCyclic() ? (ry1 + bh / 2.0f)
: (ry1 + bh));
}
}
else {
//start point is lower left right corner --> diagonal gradient
rx1 = bx;
ry1 = by + bh;
float offset = (paint.isCyclic()) ? (bw + bh) / 4.0f
: (bw + bh) / 2.0f;
rx2 = bx + offset;
ry2 = by + bh - offset;
}
return new GradientPaint(rx1, ry1, paint.getColor1(), rx2, ry2,
paint.getColor2(), paint.isCyclic());
}
}
|
package spark.kmedoids.eval.sa;
import info.debatty.jinu.Case;
import info.debatty.jinu.TestInterface;
import java.util.Arrays;
import java.util.List;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import info.debatty.spark.kmedoids.Clusterer;
import info.debatty.spark.kmedoids.L2Similarity;
import info.debatty.spark.kmedoids.Solution;
import info.debatty.spark.kmedoids.budget.SimilaritiesBudget;
import info.debatty.spark.kmedoids.neighborgenerator.SANeighborGenerator;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
/**
*
* @author tibo
*/
public class VaryT0 implements TestInterface {
private static final double GAMMA = 0.99;
private static String dataset_path;
private static long similarities;
/**
* @param args the command line arguments
* @throws java.lang.Exception if anything goes wrong
*/
public static void main(final String[] args) throws Exception {
OptionParser parser = new OptionParser("d:s:r:t:");
OptionSet options = parser.parse(args);
similarities = Long.valueOf((String) options.valueOf("s"));
dataset_path = (String) options.valueOf("d");
List<String> t0s_list = (List<String>) options.valuesOf("t");
double[] t0s = new double[t0s_list.size()];
for (int i = 0; i < t0s.length; i++) {
t0s[i] = Double.valueOf(t0s_list.get(i));
}
// Reduce Spark output logs
Logger.getLogger("org").setLevel(Level.WARN);
Logger.getLogger("akka").setLevel(Level.WARN);
Case test = new Case();
test.setDescription(VaryT0.class.getName() + " : "
+ String.join(" ", Arrays.asList(args)));
test.setIterations(20);
test.setParallelism(1);
test.commitToGit(false);
test.setBaseDir((String) options.valueOf("r"));
test.setParamValues(t0s);
test.addTest(VaryT0.class);
test.run();
}
@Override
public final double[] run(final double t0) throws Exception {
SparkConf conf = new SparkConf();
conf.setAppName("Spark k-medoids clusterer");
conf.setIfMissing("spark.master", "local[*]");
Solution<double[]> solution;
try (JavaSparkContext sc = new JavaSparkContext(conf)) {
JavaRDD<double[]> data = sc.objectFile(dataset_path);
Clusterer<double[]> clusterer = new Clusterer<>();
clusterer.setK(10);
clusterer.setSimilarity(new L2Similarity());
clusterer.setNeighborGenerator(
new SANeighborGenerator<>(t0, GAMMA));
clusterer.setBudget(new SimilaritiesBudget(similarities));
solution = clusterer.cluster(data);
}
return new double[]{solution.getTotalSimilarity()};
}
}
|
package mpicbg.imglib.interpolation.linear;
import mpicbg.imglib.algorithm.math.MathLib;
import mpicbg.imglib.cursor.LocalizableByDimCursor;
import mpicbg.imglib.image.Image;
import mpicbg.imglib.interpolation.InterpolatorFactory;
import mpicbg.imglib.interpolation.InterpolatorImpl;
import mpicbg.imglib.outofbounds.OutOfBoundsStrategyFactory;
import mpicbg.imglib.type.numeric.NumericType;
public class LinearInterpolator<T extends NumericType<T>> extends InterpolatorImpl<T>
{
final LocalizableByDimCursor<T> cursor;
final T tmp1, tmp2;
// the offset in each dimension and a temporary array for computing the global coordinates
final int[] baseDim, location;
// the weights and inverse weights in each dimension
final float[][] weights;
// to save the temporary values in each dimension when computing the final value
// the value in [ 0 ][ 0 ] will be the interpolated value
final T[][] tree;
// the half size of the second array in each tree step - speedup
final int[] halfTreeLevelSizes;
// the locations where to initially grab pixels from
final boolean[][] positions;
protected LinearInterpolator( final Image<T> img, final InterpolatorFactory<T> interpolatorFactory, final OutOfBoundsStrategyFactory<T> outOfBoundsStrategyFactory )
{
this( img, interpolatorFactory, outOfBoundsStrategyFactory, true );
}
protected LinearInterpolator( final Image<T> img, final InterpolatorFactory<T> interpolatorFactory, final OutOfBoundsStrategyFactory<T> outOfBoundsStrategyFactory, boolean initGenericStructures )
{
super(img, interpolatorFactory, outOfBoundsStrategyFactory);
// Principle of interpolation used
// example: 3d
// STEP 1 - Interpolate in dimension 0 (x)
// | /| [6] [7]
// | / [2] / [3] / |
// |/ [0] [1]
// STEP 2 - Interpolate in dimension 1 (y)
// [2][3] [6][7]
// [0][1] [4][5]
// [2] [3]
// [0] [1]
// STEP 3 - Interpolate in dimension 1 (z)
// [0] [1]
// yiels the interpolated value in 3 dimensions
cursor = img.createLocalizableByDimCursor( outOfBoundsStrategyFactory );
tmp1 = img.createType();
tmp2 = img.createType();
baseDim = new int[ numDimensions ];
location = new int[ numDimensions ];
weights = new float[ numDimensions ][ 2 ];
if ( initGenericStructures )
{
// create the temporary datastructure for computing the actual interpolation
// example: 3d-image
// 3d: get values from image and interpolate in dimension 0
// see above and below which coordinates are [0]...[7]
// [0] [1] [2] [3] [4] [5] [6] [7]
// interp in 3d | | | | | | | |
// store in 2d: \ / \ / \ / \ /
// [0] [1] [2] [3]
// interp in 2d \ / \ /
// and store in \ / \ /
// [0] [1]
// interpolate in 1d \ /
// and store \ /
// the final \ /
// result \ /
// final interpolated value [0]
tree = tmp1.createArray2D( numDimensions + 1, 1 );
halfTreeLevelSizes = new int[ numDimensions + 1 ];
for ( int d = 0; d < tree.length; d++ )
{
tree[ d ] = tmp1.createArray1D( MathLib.pow( 2, d ));
for ( int i = 0; i < tree[ d ].length; i++ )
tree[ d ][ i ] = img.createType();
halfTreeLevelSizes[ d ] = tree[ d ].length / 2;
}
// recursively get the coordinates we need for interpolation
// ( relative location to the offset in each dimension )
// example for 3d:
// x y z index
// 0 0 0 [0]
// 1 0 0 [1]
// 0 1 0 [2]
// 1 1 0 [3]
// 0 0 1 [4]
// 1 0 1 [5]
// 0 1 1 [6]
// 1 1 1 [7]
positions = new boolean[ MathLib.pow( 2, numDimensions ) ][ numDimensions ];
MathLib.setCoordinateRecursive( numDimensions - 1, numDimensions, new int[ numDimensions ], positions );
moveTo( position );
}
else
{
tree = null;
positions = null;
halfTreeLevelSizes = null;
}
}
@Override
public void close() { cursor.close(); }
@Override
public T getType() { return tree[ 0 ][ 0 ]; }
@Override
public void moveTo( final float[] position )
{
// compute the offset (Math.floor) in each dimension
for (int d = 0; d < numDimensions; d++)
{
this.position[ d ] = position[ d ];
baseDim[ d ] = position[ d ] > 0 ? (int)position[ d ]: (int)position[ d ]-1;
cursor.move( baseDim[ d ] - cursor.getPosition(d), d );
}
// compute the weights [0...1] in each dimension and the inverse (1-weight) [1...0]
for (int d = 0; d < numDimensions; d++)
{
final float w = position[ d ] - baseDim[ d ];
weights[ d ][ 1 ] = w;
weights[ d ][ 0 ] = 1 - w;
}
// compute the output value
// the the values from the image
for ( int i = 0; i < positions.length; ++i )
{
// move to the position
for ( int d = 0; d < numDimensions; ++d )
if ( positions[ i ][ d ] )
cursor.fwd(d);
tree[ numDimensions ][ i ].set( cursor.getType() );
// move back to the offset position
for ( int d = 0; d < numDimensions; ++d )
if ( positions[ i ][ d ] )
cursor.bck(d);
}
// interpolate down the tree as shown above
for ( int d = numDimensions; d > 0; --d )
{
for ( int i = 0; i < halfTreeLevelSizes[ d ]; i++ )
{
tmp1.set( tree[ d ][ i*2 ] );
tmp2.set( tree[ d ][ i*2+1 ] );
//tmp1.mul( weights[d - 1][ 0 ] );
//tmp2.mul( weights[d - 1][ 1 ] );
tmp1.mul( weights[ numDimensions - d ][ 0 ] );
tmp2.mul( weights[ numDimensions - d ][ 1 ] );
tmp1.add( tmp2 );
tree[ d - 1 ][ i ].set( tmp1 );
}
}
}
@Override
public void setPosition( final float[] position )
{
// compute the offset (Math.floor) in each dimension
for (int d = 0; d < numDimensions; d++)
{
this.position[ d ] = position[ d ];
baseDim[ d ] = position[ d ] > 0 ? (int)position[ d ]: (int)position[ d ]-1;
}
cursor.setPosition( baseDim );
// compute the weights [0...1] in each dimension and the inverse (1-weight) [1...0]
for (int d = 0; d < numDimensions; d++)
{
final float w = position[ d ] - baseDim[ d ];
weights[ d ][ 1 ] = w;
weights[ d ][ 0 ] = 1 - w;
}
// compute the output value
// the the values from the image
for ( int i = 0; i < positions.length; ++i )
{
// move to the position
for ( int d = 0; d < numDimensions; ++d )
if ( positions[ i ][ d ] )
cursor.fwd(d);
tree[ numDimensions ][ i ].set( cursor.getType() );
// move back to the offset position
for ( int d = 0; d < numDimensions; ++d )
if ( positions[ i ][ d ] )
cursor.bck(d);
}
// interpolate down the tree as shown above
for ( int d = numDimensions; d > 0; --d )
{
for ( int i = 0; i < halfTreeLevelSizes[ d ]; i++ )
{
tmp1.set( tree[ d ][ i*2 ] );
tmp2.set( tree[ d ][ i*2+1 ] );
tmp1.mul( weights[ numDimensions - d ][ 0 ] );
tmp2.mul( weights[ numDimensions - d ][ 1 ] );
tmp1.add( tmp2 );
tree[ d - 1 ][ i ].set( tmp1 );
}
}
}
}
|
package it.near.sdk.recipes;
import android.content.Context;
import android.content.SharedPreferences;
import android.support.annotation.NonNull;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import it.near.sdk.recipes.models.Recipe;
import it.near.sdk.utils.CurrentTime;
import static it.near.sdk.utils.NearUtils.checkNotNull;
public class RecipeCooler {
private static final String RECIPE_COOLER_PREFS_NAME = "NearRecipeCoolerPrefsName";
private static final String LOG_MAP = "LOG_MAP";
static final long NEVER_REPEAT = -1L;
private static final String LATEST_LOG = "LATEST_LOG";
static final String GLOBAL_COOLDOWN = "global_cooldown";
static final String SELF_COOLDOWN = "self_cooldown";
private final SharedPreferences sharedPreferences;
private final CurrentTime currentTime;
private Map<String, Long> mRecipeLogMap;
private Long mLatestLogEntry;
public RecipeCooler(@NonNull SharedPreferences sharedPreferences, @NonNull CurrentTime currentTime) {
this.sharedPreferences = checkNotNull(sharedPreferences);
this.currentTime = checkNotNull(currentTime);
}
/**
* Filters a recipe list against the log of recipes that have been marked as shown and its cooldown period.
*
* @param recipes the recipe list to filter. This object will be modified.
*/
public void filterRecipe(List<Recipe> recipes) {
for (Iterator<Recipe> it = recipes.iterator(); it.hasNext(); ) {
Recipe recipe = it.next();
if (!canShowRecipe(recipe)) {
it.remove();
}
}
}
private boolean canShowRecipe(Recipe recipe) {
Map<String, Object> cooldown = recipe.getCooldown();
return cooldown == null ||
(globalCooldownCheck(cooldown) && selfCooldownCheck(recipe, cooldown));
}
private boolean globalCooldownCheck(Map<String, Object> cooldown) {
if (!cooldown.containsKey(GLOBAL_COOLDOWN) ||
cooldown.get(GLOBAL_COOLDOWN) == null) return true;
long expiredSeconds = (currentTime.currentTimestamp() - getLatestLogEntry()) / 1000;
return expiredSeconds >= (Long) cooldown.get(GLOBAL_COOLDOWN);
}
private boolean selfCooldownCheck(Recipe recipe, Map<String, Object> cooldown) {
if (!cooldown.containsKey(SELF_COOLDOWN) ||
cooldown.get(SELF_COOLDOWN) == null ||
!getRecipeLogMap().containsKey(recipe.getId())) return true;
if ((Long)cooldown.get(SELF_COOLDOWN) == NEVER_REPEAT &&
getRecipeLogMap().containsKey(recipe.getId())) return false;
long recipeLatestEntry = getRecipeLogMap().get(recipe.getId());
long expiredSeconds = (currentTime.currentTimestamp() - recipeLatestEntry) / 1000;
return expiredSeconds >= (Long) cooldown.get(SELF_COOLDOWN);
}
/**
* Get the latest recipe shown event timestamp.
*
* @return the timestamp for the last recipe shown event.
*/
public Long getLatestLogEntry() {
if (mLatestLogEntry == null) {
mLatestLogEntry = loadLatestEntry();
}
return mLatestLogEntry;
}
/**
* Get the map of recipe shown event timestamps.
*
* @return the map of timestamps.
*/
public Map<String, Long> getRecipeLogMap() {
if (mRecipeLogMap == null) {
mRecipeLogMap = loadMap();
}
return mRecipeLogMap;
}
/**
* Register the recipe as shown for future cooldown evaluation.
*
* @param recipeId the recipe identifier.
*/
public void markRecipeAsShown(String recipeId) {
long timeStamp = currentTime.currentTimestamp();
getRecipeLogMap().put(recipeId, timeStamp);
saveMap(mRecipeLogMap);
saveLatestEntry(currentTime.currentTimestamp());
}
private void saveMap(Map<String, Long> inputMap) {
if (sharedPreferences != null) {
JSONObject jsonObject = new JSONObject(inputMap);
String jsonString = jsonObject.toString();
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.remove(LOG_MAP).commit();
editor.putString(LOG_MAP, jsonString);
editor.commit();
}
}
private Map<String, Long> loadMap() {
Map<String, Long> outputMap = new HashMap<String, Long>();
try {
if (sharedPreferences != null) {
String jsonString = sharedPreferences.getString(LOG_MAP, (new JSONObject()).toString());
JSONObject jsonObject = new JSONObject(jsonString);
Iterator<String> keysItr = jsonObject.keys();
while (keysItr.hasNext()) {
String key = keysItr.next();
Long value = (Long) jsonObject.get(key);
outputMap.put(key, value);
}
}
} catch (Exception e) {
// e.printStackTrace();
}
return outputMap;
}
private Long loadLatestEntry() {
if (sharedPreferences != null) {
return sharedPreferences.getLong(LATEST_LOG, 0L);
}
return 0L;
}
private void saveLatestEntry(long timestamp) {
mLatestLogEntry = timestamp;
if (sharedPreferences != null) {
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.remove(LATEST_LOG).commit();
editor.putLong(LATEST_LOG, timestamp);
editor.commit();
}
}
public static SharedPreferences getSharedPreferences(Context context) {
return context.getSharedPreferences(RECIPE_COOLER_PREFS_NAME, Context.MODE_PRIVATE);
}
}
|
package com.jetbrains.python.debugger;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.intellij.execution.process.ProcessEvent;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.process.ProcessListener;
import com.intellij.execution.ui.ConsoleView;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.execution.ui.ExecutionConsole;
import com.intellij.openapi.application.ApplicationInfo;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.xdebugger.*;
import com.intellij.xdebugger.breakpoints.XBreakpoint;
import com.intellij.xdebugger.breakpoints.XBreakpointHandler;
import com.intellij.xdebugger.breakpoints.XLineBreakpoint;
import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
import com.intellij.xdebugger.frame.XValueChildrenList;
import com.intellij.xdebugger.stepping.XSmartStepIntoHandler;
import com.jetbrains.django.util.DjangoUtil;
import com.jetbrains.python.console.pydev.PydevCompletionVariant;
import com.jetbrains.python.debugger.django.DjangoExceptionBreakpointHandler;
import com.jetbrains.python.debugger.pydev.*;
import com.jetbrains.python.debugger.remote.vfs.PyRemotePositionConverter;
import com.jetbrains.python.remote.PyRemoteProcessHandlerBase;
import com.jetbrains.python.run.PythonProcessHandler;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.net.ServerSocket;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import static javax.swing.SwingUtilities.invokeLater;
/**
* @author yole
*/
// todo: bundle messages
// todo: pydevd supports module reloading - look for a way to use the feature
public class PyDebugProcess extends XDebugProcess implements IPyDebugProcess, ProcessListener {
private final ProcessDebugger myDebugger;
private final XBreakpointHandler[] myBreakpointHandlers;
private final PyDebuggerEditorsProvider myEditorsProvider;
private final ProcessHandler myProcessHandler;
private final ExecutionConsole myExecutionConsole;
private final Map<PySourcePosition, XLineBreakpoint> myRegisteredBreakpoints = new ConcurrentHashMap<PySourcePosition, XLineBreakpoint>();
private final Map<String, XBreakpoint<? extends ExceptionBreakpointProperties>> myRegisteredExceptionBreakpoints =
new ConcurrentHashMap<String, XBreakpoint<? extends ExceptionBreakpointProperties>>();
private final List<PyThreadInfo> mySuspendedThreads = Lists.newArrayList();
private final Map<String, XValueChildrenList> myStackFrameCache = Maps.newHashMap();
private final Map<String, PyDebugValue> myNewVariableValue = Maps.newHashMap();
private boolean myClosing = false;
private PyPositionConverter myPositionConverter;
private XSmartStepIntoHandler<?> mySmartStepIntoHandler;
private boolean myWaitingForConnection = false;
private PyStackFrame myStackFrameBeforeResume;
public PyDebugProcess(final @NotNull XDebugSession session,
@NotNull final ServerSocket serverSocket,
@NotNull final ExecutionConsole executionConsole,
@Nullable final ProcessHandler processHandler, boolean multiProcess) {
super(session);
session.setPauseActionSupported(true);
if (multiProcess) {
myDebugger = createMultiprocessDebugger(serverSocket);
}
else {
myDebugger = new RemoteDebugger(this, serverSocket, 10000);
}
myBreakpointHandlers = new XBreakpointHandler[]{new PyLineBreakpointHandler(this), new PyExceptionBreakpointHandler(this),
new DjangoLineBreakpointHandler(this), new DjangoExceptionBreakpointHandler(this)};
myEditorsProvider = new PyDebuggerEditorsProvider();
mySmartStepIntoHandler = new PySmartStepIntoHandler(this);
myProcessHandler = processHandler;
myExecutionConsole = executionConsole;
if (myProcessHandler != null) {
myProcessHandler.addProcessListener(this);
}
if (processHandler instanceof PyRemoteProcessHandlerBase) {
myPositionConverter = new PyRemotePositionConverter(this, ((PyRemoteProcessHandlerBase)processHandler).getMappingSettings());
}
else {
myPositionConverter = new PyLocalPositionConverter();
}
myDebugger.addCloseListener(new RemoteDebuggerCloseListener() {
@Override
public void closed() {
handleStop();
}
@Override
public void communicationError() {
handleCommunicationError();
}
@Override
public void exitEvent() {
handleCommunicationError();
}
});
session.addSessionListener(new XDebugSessionAdapter() {
@Override
public void beforeSessionResume() {
if (session.getCurrentStackFrame() instanceof PyStackFrame) {
myStackFrameBeforeResume = (PyStackFrame)session.getCurrentStackFrame();
}
else {
myStackFrameBeforeResume = null;
}
}
});
}
private MultiProcessDebugger createMultiprocessDebugger(ServerSocket serverSocket) {
MultiProcessDebugger debugger = new MultiProcessDebugger(this, serverSocket, 10000);
debugger.setOtherDebuggerCloseListener(new MultiProcessDebugger.DebuggerProcessListener() {
@Override
public void threadsClosed(Set<String> threadIds) {
for (PyThreadInfo t : mySuspendedThreads) {
if (threadIds.contains(t.getId())) {
if (getSession().isSuspended()) {
getSession().resume();
break;
}
}
}
}
});
return debugger;
}
protected void handleCommunicationError() {
getSession().stop();
}
protected void handleStop() {
getSession().stop();
}
public void setPositionConverter(PyPositionConverter positionConverter) {
myPositionConverter = positionConverter;
}
@Override
public PyPositionConverter getPositionConverter() {
return myPositionConverter;
}
@Override
public XBreakpointHandler<?>[] getBreakpointHandlers() {
return myBreakpointHandlers;
}
@Override
@NotNull
public XDebuggerEditorsProvider getEditorsProvider() {
return myEditorsProvider;
}
@Override
@Nullable
protected ProcessHandler doGetProcessHandler() {
return myProcessHandler;
}
@Override
@NotNull
public ExecutionConsole createConsole() {
return myExecutionConsole;
}
@Override
public XSmartStepIntoHandler<?> getSmartStepIntoHandler() {
return mySmartStepIntoHandler;
}
@Override
public void sessionInitialized() {
super.sessionInitialized();
waitForConnection(getConnectionMessage(), getConnectionTitle());
}
protected void waitForConnection(final String connectionMessage, String connectionTitle) {
ProgressManager.getInstance().run(new Task.Backgroundable(getSession().getProject(), connectionTitle, false) {
public void run(@NotNull final ProgressIndicator indicator) {
indicator.setText(connectionMessage);
try {
beforeConnect();
myWaitingForConnection = true;
myDebugger.waitForConnect();
myWaitingForConnection = false;
afterConnect();
handshake();
init();
myDebugger.run();
}
catch (final Exception e) {
myWaitingForConnection = false;
myProcessHandler.destroyProcess();
if (!myClosing) {
invokeLater(new Runnable() {
public void run() {
Messages.showErrorDialog("Unable to establish connection with debugger:\n" + e.getMessage(), getConnectionTitle());
}
});
}
}
}
});
}
public void init() {
getSession().rebuildViews();
registerBreakpoints();
}
@Override
public int handleDebugPort(int localPort) throws IOException {
if (myProcessHandler instanceof PyRemoteProcessHandlerBase) {
PyRemoteProcessHandlerBase remoteProcessHandler = (PyRemoteProcessHandlerBase)myProcessHandler;
try {
Pair<String, Integer> remoteSocket = remoteProcessHandler.obtainRemoteSocket();
remoteProcessHandler.addRemoteForwarding(remoteSocket.getSecond(), localPort);
return remoteSocket.getSecond();
}
catch (Exception e) {
throw new IOException(e);
}
}
else {
return localPort;
}
}
protected void afterConnect() {
}
protected void beforeConnect() {
}
protected String getConnectionMessage() {
return "Connecting to debugger...";
}
protected String getConnectionTitle() {
return "Connecting to debugger";
}
private void handshake() throws PyDebuggerException {
String remoteVersion = myDebugger.handshake();
String currentBuild = ApplicationInfo.getInstance().getBuild().asStringWithoutProductCode();
if ("@@BUILD_NUMBER@@".equals(remoteVersion)) {
remoteVersion = currentBuild;
}
else if (remoteVersion.startsWith("PY-")) {
remoteVersion = remoteVersion.substring(3);
}
else {
remoteVersion = null;
}
printToConsole("Connected to pydev debugger (build " + remoteVersion + ")\n", ConsoleViewContentType.SYSTEM_OUTPUT);
if (remoteVersion != null) {
if (!remoteVersion.equals(currentBuild)) {
printToConsole("Warning: wrong debugger version. Use pycharm-debugger.egg from PyCharm installation folder.\n",
ConsoleViewContentType.ERROR_OUTPUT);
}
}
}
public void printToConsole(String text, ConsoleViewContentType contentType) {
((ConsoleView)myExecutionConsole).print(text, contentType);
}
private void registerBreakpoints() {
registerLineBreakpoints();
registerExceptionBreakpoints();
}
private void registerExceptionBreakpoints() {
for (XBreakpoint<? extends ExceptionBreakpointProperties> bp : myRegisteredExceptionBreakpoints.values()) {
addExceptionBreakpoint(bp);
}
}
public void registerLineBreakpoints() {
for (Map.Entry<PySourcePosition, XLineBreakpoint> entry : myRegisteredBreakpoints.entrySet()) {
addBreakpoint(entry.getKey(), entry.getValue());
}
}
@Override
public void startStepOver() {
passToCurrentThread(ResumeOrStepCommand.Mode.STEP_OVER);
}
@Override
public void startStepInto() {
passToCurrentThread(ResumeOrStepCommand.Mode.STEP_INTO);
}
@Override
public void startStepOut() {
passToCurrentThread(ResumeOrStepCommand.Mode.STEP_OUT);
}
public void startSmartStepInto(String functionName) {
dropFrameCaches();
if (isConnected()) {
for (PyThreadInfo suspendedThread : mySuspendedThreads) {
myDebugger.smartStepInto(suspendedThread.getId(), functionName);
}
}
}
@Override
public void stop() {
myDebugger.disconnect();
}
@Override
public void resume() {
passToAllThreads(ResumeOrStepCommand.Mode.RESUME);
}
@Override
public void startPausing() {
if (isConnected()) {
myDebugger.suspendAllThreads();
}
}
private void passToAllThreads(final ResumeOrStepCommand.Mode mode) {
dropFrameCaches();
if (isConnected()) {
for (PyThreadInfo suspendedThread : mySuspendedThreads) {
myDebugger.resumeOrStep(suspendedThread.getId(), mode);
}
}
}
private void passToCurrentThread(final ResumeOrStepCommand.Mode mode) {
dropFrameCaches();
if (isConnected()) {
String threadId = threadIdBeforeResumeOrStep();
for (PyThreadInfo suspendedThread : mySuspendedThreads) {
if (StringUtil.isEmpty(threadId) || threadId.equals(suspendedThread.getId())) {
myDebugger.resumeOrStep(suspendedThread.getId(), mode);
break;
}
}
}
}
@Nullable
private String threadIdBeforeResumeOrStep() {
String threadId = null;
if (myStackFrameBeforeResume != null) {
threadId = myStackFrameBeforeResume.getThreadId();
}
return threadId;
}
protected boolean isConnected() {
return myDebugger.isConnected();
}
protected void disconnect() {
myDebugger.disconnect();
cleanUp();
}
private void cleanUp() {
mySuspendedThreads.clear();
}
@Override
public void runToPosition(@NotNull final XSourcePosition position) {
dropFrameCaches();
if (isConnected() && !mySuspendedThreads.isEmpty()) {
final PySourcePosition pyPosition = myPositionConverter.convert(position);
String type = PyLineBreakpointType.ID;
final Document document = FileDocumentManager.getInstance().getDocument(position.getFile());
if (document != null) {
if (DjangoUtil.isDjangoTemplateDocument(document, getSession().getProject())) {
type = DjangoTemplateLineBreakpointType.ID;
}
}
myDebugger.setTempBreakpoint(type, pyPosition.getFile(), pyPosition.getLine());
passToCurrentThread(ResumeOrStepCommand.Mode.RESUME);
}
}
public PyDebugValue evaluate(final String expression, final boolean execute, boolean doTrunc) throws PyDebuggerException {
dropFrameCaches();
final PyStackFrame frame = currentFrame();
return evaluate(expression, execute, frame, doTrunc);
}
private PyDebugValue evaluate(String expression, boolean execute, PyStackFrame frame, boolean trimResult) throws PyDebuggerException {
return myDebugger.evaluate(frame.getThreadId(), frame.getFrameId(), expression, execute, trimResult);
}
public String consoleExec(String command) throws PyDebuggerException {
dropFrameCaches();
final PyStackFrame frame = currentFrame();
return myDebugger.consoleExec(frame.getThreadId(), frame.getFrameId(), command);
}
@Nullable
public XValueChildrenList loadFrame() throws PyDebuggerException {
final PyStackFrame frame = currentFrame();
//do not reload frame every time it is needed, because due to bug in pdb, reloading frame clears all variable changes
if (!myStackFrameCache.containsKey(frame.getThreadFrameId())) {
XValueChildrenList values = myDebugger.loadFrame(frame.getThreadId(), frame.getFrameId());
myStackFrameCache.put(frame.getThreadFrameId(), values);
}
return applyNewValue(myStackFrameCache.get(frame.getThreadFrameId()), frame.getThreadFrameId());
}
private XValueChildrenList applyNewValue(XValueChildrenList pyDebugValues, String threadFrameId) {
if (myNewVariableValue.containsKey(threadFrameId)) {
PyDebugValue newValue = myNewVariableValue.get(threadFrameId);
XValueChildrenList res = new XValueChildrenList();
for (int i = 0; i < pyDebugValues.size(); i++) {
final String name = pyDebugValues.getName(i);
if (name.equals(newValue.getName())) {
res.add(name, newValue);
}
else {
res.add(name, pyDebugValues.getValue(i));
}
}
return res;
}
else {
return pyDebugValues;
}
}
@Override
public XValueChildrenList loadVariable(final PyDebugValue var) throws PyDebuggerException {
final PyStackFrame frame = currentFrame();
return myDebugger.loadVariable(frame.getThreadId(), frame.getFrameId(), var);
}
@Override
public void changeVariable(final PyDebugValue var, final String value) throws PyDebuggerException {
final PyStackFrame frame = currentFrame();
PyDebugValue newValue = myDebugger.changeVariable(frame.getThreadId(), frame.getFrameId(), var, value);
myNewVariableValue.put(frame.getThreadFrameId(), newValue);
}
@Nullable
public String loadSource(String path) {
return myDebugger.loadSource(path);
}
@Override
public boolean isVariable(String name) {
final Project project = getSession().getProject();
return PyDebugSupportUtils.isVariable(project, name);
}
private PyStackFrame currentFrame() throws PyDebuggerException {
if (!isConnected()) {
throw new PyDebuggerException("Disconnected");
}
final PyStackFrame frame = (PyStackFrame)getSession().getCurrentStackFrame();
if (frame == null) {
throw new PyDebuggerException("Process is running");
}
return frame;
}
public void addBreakpoint(final PySourcePosition position, final XLineBreakpoint breakpoint) {
myRegisteredBreakpoints.put(position, breakpoint);
if (isConnected()) {
myDebugger.setBreakpoint(breakpoint.getType().getId(), position.getFile(), position.getLine(),
breakpoint.getCondition(),
breakpoint.getLogExpression());
}
}
public void removeBreakpoint(final PySourcePosition position) {
XLineBreakpoint breakpoint = myRegisteredBreakpoints.get(position);
if (breakpoint != null) {
myRegisteredBreakpoints.remove(position);
if (isConnected()) {
myDebugger.removeBreakpoint(breakpoint.getType().getId(), position.getFile(), position.getLine());
}
}
}
public void addExceptionBreakpoint(XBreakpoint<? extends ExceptionBreakpointProperties> breakpoint) {
myRegisteredExceptionBreakpoints.put(breakpoint.getProperties().getException(), breakpoint);
if (isConnected()) {
myDebugger.addExceptionBreakpoint(breakpoint.getProperties());
}
}
public void removeExceptionBreakpoint(XBreakpoint<? extends ExceptionBreakpointProperties> breakpoint) {
myRegisteredExceptionBreakpoints.remove(breakpoint.getProperties().getException());
if (isConnected()) {
myDebugger.removeExceptionBreakpoint(breakpoint.getProperties());
}
}
public Collection<PyThreadInfo> getThreads() {
return myDebugger.getThreads();
}
@Override
public void threadSuspended(final PyThreadInfo threadInfo) {
if (!mySuspendedThreads.contains(threadInfo)) {
mySuspendedThreads.add(threadInfo);
final List<PyStackFrameInfo> frames = threadInfo.getFrames();
if (frames != null) {
final PySuspendContext suspendContext = new PySuspendContext(this, threadInfo);
XBreakpoint<?> breakpoint = null;
if (threadInfo.isStopOnBreakpoint()) {
final PySourcePosition position = frames.get(0).getPosition();
breakpoint = myRegisteredBreakpoints.get(position);
if (breakpoint == null) {
myDebugger.removeTempBreakpoint(position.getFile(), position.getLine());
}
}
else if (threadInfo.isExceptionBreak()) {
String exceptionName = threadInfo.getMessage();
threadInfo.setMessage(null);
if (exceptionName != null) {
breakpoint = myRegisteredExceptionBreakpoints.get(exceptionName);
}
}
if (breakpoint != null) {
if (!getSession().breakpointReached(breakpoint, threadInfo.getMessage(), suspendContext)) {
resume();
}
}
else {
getSession().positionReached(suspendContext);
}
}
}
}
@Override
public void threadResumed(final PyThreadInfo threadInfo) {
mySuspendedThreads.remove(threadInfo);
}
private void dropFrameCaches() {
myStackFrameCache.clear();
myNewVariableValue.clear();
}
@NotNull
public List<PydevCompletionVariant> getCompletions(String prefix) throws Exception {
if (isConnected()) {
dropFrameCaches();
final PyStackFrame frame = currentFrame();
return myDebugger.getCompletions(frame.getThreadId(), frame.getFrameId(), prefix);
}
return Lists.newArrayList();
}
@Override
public void startNotified(ProcessEvent event) {
}
@Override
public void processTerminated(ProcessEvent event) {
myDebugger.close();
}
@Override
public void processWillTerminate(ProcessEvent event, boolean willBeDestroyed) {
myClosing = true;
setKillingStrategy();
}
private void setKillingStrategy() {
if (getSession().isSuspended() && myProcessHandler instanceof PythonProcessHandler) {
((PythonProcessHandler)myProcessHandler)
.setShouldTryToKillSoftly(false); //while process is suspended it can't terminate softly, so its better to kill all the tree hard
}
}
@Override
public void onTextAvailable(ProcessEvent event, Key outputType) {
}
public PyStackFrame createStackFrame(PyStackFrameInfo frameInfo) {
return new PyStackFrame(this, frameInfo);
}
@Override
public String getCurrentStateMessage() {
if (getSession().isStopped()) {
return XDebuggerBundle.message("debugger.state.message.disconnected");
}
else if (isConnected()) {
return XDebuggerBundle.message("debugger.state.message.connected");
}
else {
return "Waiting for connection...";
}
}
public void addProcessListener(ProcessListener listener) {
ProcessHandler handler = doGetProcessHandler();
if (handler != null) {
handler.addProcessListener(listener);
}
}
public boolean isWaitingForConnection() {
return myWaitingForConnection;
}
public void setWaitingForConnection(boolean waitingForConnection) {
myWaitingForConnection = waitingForConnection;
}
}
|
package com.jetbrains.python.packaging;
import com.intellij.openapi.util.text.StringUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author vlan
*/
public class PyRequirement {
private static final Pattern NAME = Pattern.compile("\\s*((\\w|[-.])+)\\s*(.*)");
private static final Pattern VERSION = Pattern.compile("\\s*(<=?|>=?|==|!=)\\s*((\\w|[-.])+).*");
public enum Relation {
LT("<"),
LTE("<="),
GT(">"),
GTE(">="),
EQ("=="),
NE("!=");
@NotNull private final String myValue;
Relation(@NotNull String value) {
myValue = value;
}
@NotNull
@Override
public String toString() {
return myValue;
}
@Nullable
public static Relation fromString(@NotNull String value) {
for (Relation relation : Relation.values()) {
if (relation.myValue.equals(value)) {
return relation;
}
}
return null;
}
}
private final String myName;
private final Relation myRelation;
private final String myVersion;
public static final Comparator<String> VERSION_COMPARATOR = new Comparator<String>() {
@Override
public int compare(String version1, String version2) {
final List<String> vs1 = parse(version1);
final List<String> vs2 = parse(version2);
int result = 0;
for (int i = 0; i < vs1.size() && i < vs2.size(); i++) {
result = vs1.get(i).compareTo(vs2.get(i));
if (result != 0) {
break;
}
}
if (result == 0) {
return vs1.size() - vs2.size();
}
return result;
}
@NotNull
private List<String> parse(@Nullable String s) {
// TODO: Take version modificators (dev, alpha, beta, b, etc.) into account
return s != null ? StringUtil.split(s, ".") : Collections.<String>emptyList();
}
};
public PyRequirement(@NotNull String name) {
this(name, null, null);
}
public PyRequirement(@NotNull String name, @NotNull String version) {
this(name, Relation.EQ, version);
}
public PyRequirement(@NotNull String name, @Nullable Relation relation, @Nullable String version) {
if (relation == null) {
assert version == null;
}
myName = name;
myRelation = relation;
myVersion = version;
}
@NotNull
@Override
public String toString() {
if (myRelation != null && myVersion != null) {
return myName + myRelation + myVersion;
}
else {
return myName;
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PyRequirement that = (PyRequirement)o;
if (!myName.toLowerCase().equals(that.myName.toLowerCase())) return false;
if (myRelation != null ? !myRelation.equals(that.myRelation) : that.myRelation != null) return false;
if (myVersion != null ? !myVersion.equals(that.myVersion) : that.myVersion != null) return false;
return true;
}
@Override
public int hashCode() {
int result = myName.toLowerCase().hashCode();
result = 31 * result + (myRelation != null ? myRelation.hashCode() : 0);
result = 31 * result + (myVersion != null ? myVersion.hashCode() : 0);
return result;
}
public boolean match(@NotNull List<PyPackage> packages) {
for (PyPackage pkg : packages) {
if (myName.equalsIgnoreCase(pkg.getName())) {
// TODO: Multiple versions in requirements spec
if (myVersion == null) {
return true;
}
final int cmp = VERSION_COMPARATOR.compare(pkg.getVersion(), myVersion);
switch (myRelation) {
case LT:
return cmp < 0;
case LTE:
return cmp <= 0;
case GT:
return cmp > 0;
case GTE:
return cmp >= 0;
case EQ:
return cmp == 0;
case NE:
return cmp != 0;
}
}
}
return false;
}
@Nullable
public static PyRequirement fromString(@NotNull String s) {
// TODO: Extras, multi-line requirements '\'
final Matcher nameMatcher = NAME.matcher(s);
if (!nameMatcher.matches()) {
return null;
}
final String name = nameMatcher.group(1);
final String rest = nameMatcher.group(3);
final String rel;
final String version;
if (!rest.trim().isEmpty()) {
final Matcher versionMatcher = VERSION.matcher(rest);
if (versionMatcher.matches()) {
rel = versionMatcher.group(1);
version = versionMatcher.group(2);
}
else {
return null;
}
}
else {
rel = null;
version = null;
}
Relation relation = null;
if (rel != null) {
relation = Relation.fromString(rel);
if (relation == null) {
return null;
}
}
if (relation == null && version != null) {
return null;
}
return new PyRequirement(name, relation, version);
}
@Nullable
public static List<PyRequirement> parse(@NotNull String s) {
final List<PyRequirement> result = new ArrayList<PyRequirement>();
for (String line : StringUtil.splitByLines(s)) {
final String trimmed = line.trim();
if (!trimmed.isEmpty()) {
final PyRequirement req = fromString(line);
if (req != null) {
result.add(req);
}
else {
return null;
}
}
}
return result;
}
@NotNull
public String getName() {
return myName;
}
@Nullable
public Relation getRelation() {
return myRelation;
}
@Nullable
public String getVersion() {
return myVersion;
}
}
|
package org.rakam.analysis;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.GroupingElement;
import com.facebook.presto.sql.tree.LongLiteral;
import com.facebook.presto.sql.tree.Node;
import com.facebook.presto.sql.tree.NodeLocation;
import com.facebook.presto.sql.tree.QualifiedName;
import com.facebook.presto.sql.tree.QualifiedNameReference;
import com.facebook.presto.sql.tree.Query;
import com.facebook.presto.sql.tree.QuerySpecification;
import com.facebook.presto.sql.tree.Relation;
import com.facebook.presto.sql.tree.SelectItem;
import com.facebook.presto.sql.tree.SingleColumn;
import com.facebook.presto.sql.tree.SortItem;
import com.facebook.presto.sql.tree.Union;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.primitives.Ints;
import io.airlift.log.Logger;
import io.netty.channel.EventLoopGroup;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.swagger.annotations.ApiModelProperty;
import org.rakam.collection.SchemaField;
import org.rakam.http.ForHttpServer;
import org.rakam.plugin.EventStore.CopyType;
import org.rakam.report.QueryExecution;
import org.rakam.report.QueryExecutorService;
import org.rakam.report.QueryResult;
import org.rakam.server.http.HttpService;
import org.rakam.server.http.RakamHttpRequest;
import org.rakam.server.http.Response;
import org.rakam.server.http.annotations.Api;
import org.rakam.server.http.annotations.ApiOperation;
import org.rakam.server.http.annotations.ApiParam;
import org.rakam.server.http.annotations.Authorization;
import org.rakam.server.http.annotations.BodyParam;
import org.rakam.server.http.annotations.IgnoreApi;
import org.rakam.server.http.annotations.JsonRequest;
import org.rakam.util.ExportUtil;
import org.rakam.util.JsonHelper;
import org.rakam.util.RakamException;
import org.rakam.util.LogUtil;
import javax.inject.Inject;
import javax.inject.Named;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import static io.netty.handler.codec.http.HttpHeaders.Names.ACCEPT;
import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST;
import static io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN;
import static java.util.Objects.requireNonNull;
import static org.rakam.analysis.ApiKeyService.AccessKeyType.READ_KEY;
import static org.rakam.report.QueryExecutorService.MAX_QUERY_RESULT_LIMIT;
import static org.rakam.server.http.HttpServer.errorMessage;
import static org.rakam.util.JsonHelper.encode;
import static org.rakam.util.JsonHelper.jsonObject;
@Path("/query")
@Api(value = "/query", nickname = "query", description = "Execute query", tags = {"query"})
@Produces({"application/json"})
public class QueryHttpService
extends HttpService
{
private static final Logger LOGGER = Logger.get(QueryHttpService.class);
private final QueryExecutorService executorService;
private final ApiKeyService apiKeyService;
private EventLoopGroup eventLoopGroup;
private final SqlParser sqlParser = new SqlParser();
@Inject
public QueryHttpService(ApiKeyService apiKeyService, QueryExecutorService executorService)
{
this.executorService = executorService;
this.apiKeyService = apiKeyService;
}
@Path("/execute")
@ApiOperation(value = "Execute query on event data-set",
authorizations = @Authorization(value = "read_key")
)
@JsonRequest
public CompletableFuture<Response<QueryResult>> execute(
@Named("project") String project,
@BodyParam ExportQuery query)
{
QueryExecution queryExecution = executorService.executeQuery(project, query.query,
query.limit == null ? MAX_QUERY_RESULT_LIMIT : query.limit);
return queryExecution
.getResult().thenApply(result -> {
if (result.isFailed()) {
return Response.value(result, BAD_REQUEST);
}
return Response.ok(result);
});
}
@Path("/export")
@ApiOperation(value = "Export query results",
authorizations = @Authorization(value = "read_key")
)
@IgnoreApi
@JsonRequest
public void export(RakamHttpRequest request, @Named("project") String project, @BodyParam ExportQuery query)
{
executorService.executeQuery(project, query.query, query.limit == null ? MAX_QUERY_RESULT_LIMIT : query.limit).getResult().thenAccept(result -> {
if (result.isFailed()) {
throw new RakamException(result.getError().toString(), BAD_REQUEST);
}
byte[] bytes;
switch (query.exportType) {
case CSV:
bytes = ExportUtil.exportAsCSV(result);
break;
case AVRO:
bytes = ExportUtil.exportAsAvro(result);
break;
case JSON:
bytes = JsonHelper.encodeAsBytes(result.getResult());
default:
throw new IllegalStateException();
}
request.response(bytes).end();
});
}
@GET
@Consumes("text/event-stream")
@IgnoreApi
@ApiOperation(value = "Analyze events asynchronously", request = ExportQuery.class,
authorizations = @Authorization(value = "read_key")
)
@Path("/execute")
public void execute(RakamHttpRequest request)
{
handleServerSentQueryExecution(request, ExportQuery.class, (project, query) ->
executorService.executeQuery(project, query.query,
query.limit == null ? MAX_QUERY_RESULT_LIMIT : query.limit));
}
public <T> void handleServerSentQueryExecution(RakamHttpRequest request, Class<T> clazz, BiFunction<String, T, QueryExecution> executorFunction)
{
handleServerSentQueryExecution(request, clazz, executorFunction, READ_KEY, true);
}
private static Duration RETRY_DURATION = Duration.ofSeconds(600);
public <T> void handleServerSentQueryExecution(RakamHttpRequest request, Class<T> clazz, BiFunction<String, T, QueryExecution> executorFunction, ApiKeyService.AccessKeyType keyType, boolean killOnConnectionClose)
{
if (!Objects.equals(request.headers().get(ACCEPT), "text/event-stream")) {
request.response("The endpoint only supports text/event-stream as Accept header", HttpResponseStatus.NOT_ACCEPTABLE).end();
return;
}
RakamHttpRequest.StreamResponse response = request.streamResponse(RETRY_DURATION);
List<String> data = request.params().get("data");
if (data == null || data.isEmpty()) {
response.send("result", encode(errorMessage("data query parameter is required", BAD_REQUEST))).end();
return;
}
T query;
try {
query = JsonHelper.readSafe(data.get(0), clazz);
}
catch (IOException e) {
response.send("result", encode(errorMessage("JSON couldn't parsed: " + e.getMessage(), BAD_REQUEST))).end();
return;
}
List<String> apiKey = request.params().get(keyType.getKey());
if (apiKey == null || data.isEmpty()) {
String message = keyType.getKey() + " query parameter is required";
LogUtil.logException(request, new RakamException(message, BAD_REQUEST));
response.send("result", encode(errorMessage(message, BAD_REQUEST))).end();
return;
}
String project;
try {
project = apiKeyService.getProjectOfApiKey(apiKey.get(0), keyType);
}
catch (RakamException e) {
if (e.getStatusCode() == FORBIDDEN) {
response.send("result", encode(errorMessage(e.getMessage(), FORBIDDEN))).end();
}
else {
response.send("result", encode(errorMessage(e.getMessage(), e.getStatusCode()))).end();
}
return;
}
QueryExecution execute;
try {
execute = executorFunction.apply(project, query);
}
catch (RakamException e) {
LogUtil.logException(request, e);
response.send("result", encode(errorMessage("Couldn't execute query: " + e.getMessage(), BAD_REQUEST))).end();
return;
}
catch (Exception e) {
LOGGER.error(e, "Error while executing query");
response.send("result", encode(errorMessage("Couldn't execute query: Internal error", BAD_REQUEST))).end();
return;
}
handleServerSentQueryExecutionInternal(response, execute, killOnConnectionClose);
}
public void handleServerSentQueryExecution(RakamHttpRequest request, QueryExecution query, boolean killOnConnectionClose)
{
RakamHttpRequest.StreamResponse response = request.streamResponse(RETRY_DURATION);
handleServerSentQueryExecutionInternal(response, query, killOnConnectionClose);
}
private void handleServerSentQueryExecutionInternal(RakamHttpRequest.StreamResponse response, QueryExecution query, boolean killOnConnectionClose)
{
if (query == null) {
// TODO: custom message
response.send("result", encode(jsonObject()
.put("success", false)
.put("query", query.getQuery())
.put("error", "Not running"))).end();
return;
}
query.getResult().whenComplete((result, ex) -> {
if (ex != null) {
LOGGER.error(ex, "Error while executing query");
response.send("result", encode(jsonObject()
.put("success", false)
.put("query", query.getQuery())
.put("error", ex.getCause() instanceof RakamException ?
ex.getCause().getMessage() :
"Internal error"))).end();
}
else if (result.isFailed()) {
response.send("result", encode(jsonObject()
.put("success", false)
.put("query", query.getQuery())
.putPOJO("error", result.getError()))).end();
}
else {
List<? extends SchemaField> metadata = result.getMetadata();
response.send("result", encode(jsonObject()
.put("success", true)
.putPOJO("query", query.getQuery())
.putPOJO("properties", result.getProperties())
.putPOJO("result", result.getResult())
.putPOJO("metadata", metadata))).end();
}
});
eventLoopGroup.schedule(new Runnable()
{
@Override
public void run()
{
if (response.isClosed() && !query.isFinished() && killOnConnectionClose) {
query.kill();
}
else if (!query.isFinished()) {
if (!response.isClosed()) {
String encode = encode(query.currentStats());
response.send("stats", encode);
}
eventLoopGroup.schedule(this, 500, TimeUnit.MILLISECONDS);
}
}
}, 500, TimeUnit.MILLISECONDS);
}
@Inject
public void setWorkerGroup(@ForHttpServer EventLoopGroup eventLoopGroup)
{
this.eventLoopGroup = eventLoopGroup;
}
public static class ExportQuery
{
@ApiModelProperty(example = "SELECT 1", value = "SQL query that will be executed on data-set")
public final String query;
public final Integer limit;
public final CopyType exportType;
@JsonCreator
public ExportQuery(
@ApiParam("query") String query,
@ApiParam("export_type") CopyType exportType,
@ApiParam(value = "limit", required = false) Integer limit)
{
this.query = requireNonNull(query, "query is empty").trim().replaceAll(";+$", "");
if (limit != null && limit > MAX_QUERY_RESULT_LIMIT) {
throw new IllegalArgumentException("maximum value of limit is " + MAX_QUERY_RESULT_LIMIT);
}
this.exportType = exportType;
this.limit = limit;
}
}
@JsonRequest
@ApiOperation(value = "Explain query", authorizations = @Authorization(value = "read_key"))
@Path("/explain")
public ResponseQuery explain(@ApiParam(value = "query", description = "Query") String query)
{
try {
Query statement = (Query) sqlParser.createStatement(query);
Map<String, NodeLocation> map = statement.getWith().map(with -> {
ImmutableMap.Builder<String, NodeLocation> builder = ImmutableMap.builder();
with.getQueries().stream()
.forEach(withQuery ->
builder.put(withQuery.getName(), withQuery.getQuery().getLocation().orElse(null)));
return builder.build();
}).orElse(null);
if (statement.getQueryBody() instanceof QuerySpecification) {
return parseQuerySpecification((QuerySpecification) statement.getQueryBody(),
statement.getLimit(), statement.getOrderBy(), map);
}
else if (statement.getQueryBody() instanceof Union) {
Relation relation = ((Union) statement.getQueryBody()).getRelations().get(0);
while (relation instanceof Union) {
relation = ((Union) relation).getRelations().get(0);
}
if (relation instanceof QuerySpecification) {
return parseQuerySpecification((QuerySpecification) relation,
statement.getLimit(), statement.getOrderBy(), map);
}
}
return new ResponseQuery(map,
statement.getQueryBody().getLocation().orElse(null),
ImmutableList.of(), ImmutableList.of(),
statement.getLimit().map(l -> Long.parseLong(l)).orElse(null));
}
catch (Throwable e) {
return ResponseQuery.UNKNOWN;
}
}
@JsonRequest
@ApiOperation(value = "Test query", authorizations = @Authorization(value = "read_key"))
@Path("/metadata")
public CompletableFuture<List<SchemaField>> metadata(@javax.inject.Named("project") String project, @ApiParam("query") String query)
{
return executorService.metadata(project, query);
}
private ResponseQuery parseQuerySpecification(QuerySpecification queryBody, Optional<String> limitOutside, List<SortItem> orderByOutside, Map<String, NodeLocation> with)
{
Function<Node, Integer> mapper = item -> {
if (item instanceof GroupingElement) {
return findSelectIndex(((GroupingElement) item).enumerateGroupingSets(), queryBody.getSelect().getSelectItems()).orElse(null);
}
else if (item instanceof LongLiteral) {
return Ints.checkedCast(((LongLiteral) item).getValue());
}
else {
return null;
}
};
List<GroupBy> groupBy = queryBody.getGroupBy().map(value -> value.getGroupingElements().stream()
.map(item -> new GroupBy(mapper.apply(item), item.enumerateGroupingSets().toString()))
.collect(Collectors.toList())).orElse(ImmutableList.of());
List<Ordering> orderBy;
if (orderByOutside != null) {
orderBy = orderByOutside.stream()
.map(e -> new Ordering(e.getOrdering(), mapper.apply(e.getSortKey()), e.getSortKey().toString()))
.collect(Collectors.toList());
}
else {
orderBy = queryBody.getOrderBy().stream().map(item ->
new Ordering(item.getOrdering(), mapper.apply(item.getSortKey()), item.getSortKey().toString()))
.collect(Collectors.toList());
}
String limitStr = limitOutside.orElse(queryBody.getLimit().orElse(null));
Long limit = null;
if (limitStr != null) {
try {
limit = Long.parseLong(limitStr);
}
catch (NumberFormatException e) {
}
}
return new ResponseQuery(with, queryBody.getLocation().orElse(null), groupBy, orderBy, limit);
}
private Optional<Integer> findSelectIndex(List<Set<Expression>> items, List<SelectItem> selectItems)
{
if (items.size() == 1) {
Set<Expression> item = items.get(0);
if (item.size() == 1) {
Expression next = item.iterator().next();
if (next instanceof LongLiteral) {
return Optional.of(((int) ((LongLiteral) next).getValue()));
}
else if (next instanceof QualifiedNameReference) {
for (int i = 0; i < selectItems.size(); i++) {
if (selectItems.get(i) instanceof SingleColumn) {
if (((SingleColumn) selectItems.get(i)).getExpression().equals(next)) {
return Optional.of(i + 1);
}
}
}
}
return Optional.empty();
}
}
return Optional.empty();
}
public static class ResponseQuery
{
public static final ResponseQuery UNKNOWN = new ResponseQuery(null, null, ImmutableList.of(), ImmutableList.of(), null);
public final Map<String, NodeLocation> with;
public final List<GroupBy> groupBy;
public final List<Ordering> orderBy;
public final Long limit;
public final NodeLocation queryLocation;
@JsonCreator
public ResponseQuery
(Map<String, NodeLocation> with,
NodeLocation queryLocation,
List<GroupBy> groupBy,
List<Ordering> orderBy, Long limit)
{
this.with = with;
this.queryLocation = queryLocation;
this.groupBy = groupBy;
this.orderBy = orderBy;
this.limit = limit;
}
}
public static class Ordering
{
public final SortItem.Ordering ordering;
public final Integer index;
public final String expression;
@JsonCreator
public Ordering(SortItem.Ordering ordering, Integer index, String expression)
{
this.ordering = ordering;
this.index = index;
this.expression = expression;
}
}
public static class GroupBy
{
public final Integer index;
public final String expression;
@JsonCreator
public GroupBy(Integer index, String expression)
{
this.index = index;
this.expression = expression;
}
}
}
|
package org.hyperic.sigar.win32.test;
import java.util.ArrayList;
import org.hyperic.sigar.test.SigarTestCase;
import org.hyperic.sigar.win32.RegistryKey;
public class TestRegistryKey extends SigarTestCase {
private static final boolean TEST_WRITE = false;
public TestRegistryKey(String name) {
super(name);
}
public void testRegistryRead() throws Exception {
RegistryKey software =
RegistryKey.LocalMachine.openSubKey("SOFTWARE");
String[] keys = software.getSubKeyNames();
assertTrue(keys.length > 0);
software.close();
}
public void testHardwareValues() throws Exception {
RegistryKey hw =
RegistryKey.LocalMachine.openSubKey("HARDWARE\\DESCRIPTION\\System");
try {
ArrayList values = new ArrayList();
hw.getMultiStringValue("SystemBiosVersion", values);
assertGtZeroTrace("SystemBiosVersion.size()", values.size());
traceln("SystemBiosVersion=" + values);
} catch (Exception e) {}
RegistryKey cpu0 = hw.openSubKey("CentralProcessor\\0");
String cpu = cpu0.getStringValue("ProcessorNameString");
assertLengthTrace("cpu0", cpu);
cpu0.close();
hw.close();
}
public void testSoftwareValues() throws Exception {
RegistryKey ms =
RegistryKey.LocalMachine.openSubKey("SOFTWARE\\Microsoft");
RegistryKey msmq = null;
try {
msmq = ms.openSubKey("MSMQ\\Parameters");
} catch (Exception e) { /*not installed - ok*/ }
if (msmq != null) {
traceln("MSMQ...");
assertTrue(msmq.getSubKeyNames().length > 0);
String build = msmq.getStringValue("CurrentBuild");
assertLengthTrace("CurrentBuild", build);
int id = msmq.getIntValue("SeqID");
assertGtZeroTrace("SeqID", id);
msmq.close();
}
RegistryKey sql = null;
try {
sql = ms.openSubKey("Microsoft SQL Server\\MSSQL.1\\Setup");
} catch (Exception e) { /*not installed - ok*/ }
if (sql != null) {
traceln("MsSQL...");
String edition = sql.getStringValue("Edition");
assertLengthTrace("Edition", edition);
sql.close();
}
ms.close();
}
//dont want to be writing to the registry
public void testRegistryWrite() throws Exception {
if (!TEST_WRITE) {
return;
}
RegistryKey key = RegistryKey.LocalMachine.
createSubKey("SOFTWARE\\Hyperic\\Test", "Hyperic Test");
key.setStringValue("TestString", "Hello World");
key.setIntValue("Test Int", 100);
String[] astrNames = key.getValueNames();
String strValue = key.getStringValue(astrNames[0]);
//assertTrue(strValue.equals("Covalent Test"));
int iValue = key.getIntValue(astrNames[1]);
//assertTrue(iValue == 100);
key = RegistryKey.LocalMachine.openSubKey("SOFTWARE\\Hyperic");
astrNames = key.getSubKeyNames();
// Clean up
key.deleteSubKey("Test");
}
}
|
package org.intermine.bio.ontology;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.intermine.util.StringUtil;
/**
* This class handles the ontologies for OboToModel.
*
* @author Julie Sullivan
*/
public class OboToModelMapping
{
private String namespace;
private Map<String, Set<String>> childToParents, parentToChildren, partOfs;
// SO terms to filter on, eg. sequence_feature
private Set<String> termsToKeep = new HashSet<String>();
// list of classes to load into the model. OboOntology is an object that contains the SO term
// value eg. sequence_feature and the Java name, eg. org.intermine.bio.SequenceFeature
private Map<String, OboOntology> validOboTerms = new HashMap<String, OboOntology>();
// contains ALL non-obsolete terms. key = sequence_feature, value = SO:001
private Map<String, String> oboNameToIdentifier = new HashMap<String, String>();
// special case for sequence_feature, we always need this term in the model
private static final String SEQUENCE_FEATURE = "SO:0000110";
private static final boolean DEBUG = false;
/**
* Constructor.
*
* @param termsToKeep list of terms to filter the results on
* @param namespace the namespace to use in generating URI-based identifiers
*/
public OboToModelMapping(Set<String> termsToKeep, String namespace) {
this.namespace = namespace;
this.termsToKeep = termsToKeep;
}
/**
* Returns name of the package, eg. org.intermine.bio.
*
* @return name of the package, eg. org.intermine.bio
*/
public String getNamespace() {
return namespace;
}
/**
* @param childIdentifier the oboterm to get relationships for
* @return all collections for given class
*/
public Set<String> getPartOfs(String childIdentifier) {
return partOfs.get(childIdentifier);
}
/**
* Test is class is in the model. Only used after the obo terms have been processed and
* trimmed.
*
* @param identifier for obo term
* @return true if the class is in the model
*/
public boolean classInModel(String identifier) {
return validOboTerms.containsKey(identifier);
}
/**
* Test whether a term is in the list the user provided.
*
* @param identifier for obo term
* @return false if oboterm isn't in list user provided, true if term in list or list empty
*/
private boolean validTerm(String identifier) {
OboOntology o = validOboTerms.get(identifier);
if (o == null) {
return false;
}
String oboName = o.getOboTermName();
if (termsToKeep.isEmpty() || termsToKeep.contains(oboName)
|| SEQUENCE_FEATURE.equals(identifier)) {
return true;
}
return false;
}
// this term has children
private boolean validParent(String identifier) {
return parentToChildren.containsKey(identifier);
}
/**
* @param childIdentifier identifier for obo term of interest
* @return list of identifiers for parent obo terms
*/
public Set<String> getParents(String childIdentifier) {
return childToParents.get(childIdentifier);
}
/**
* @param identifier for obo term
* @return name of term, eg. sequence_feature
*/
public String getName(String identifier) {
OboOntology o = validOboTerms.get(identifier);
if (o == null) {
return null;
}
return o.getOboTermName();
}
/**
* Returns list of (valid for this model) obo term identifiers.
*
* @return set of obo term identifiers to process, eg. SO:001
*/
public Set<String> getOboTermIdentifiers() {
return validOboTerms.keySet();
}
/**
* Processes obo relations from OBOEdit. Parses each relationship and builds collections and
* parents. Also assigns grandchildren the collections of the grandparents and trims/flattens
* terms if the user has provided a terms list.
*
* @param oboRelations List of obo relations from OBOEdit
*/
public void processRelations(List<OboRelation> oboRelations) {
childToParents = new HashMap<String, Set<String>>();
partOfs = new HashMap<String, Set<String>>();
for (OboRelation r : oboRelations) {
String child = r.childTermId;
String parent = r.parentTermId;
if (StringUtils.isEmpty(child) || StringUtils.isEmpty(parent) || !classInModel(child)) {
continue;
}
String relationshipType = r.getRelationship().getName();
if ((relationshipType.equals("part_of") || relationshipType.equals("member_of"))
&& r.direct) {
assignPartOf(parent, child);
assignPartOf(child, parent);
} else if (relationshipType.equals("is_a") && r.direct) {
Set<String> parents = childToParents.get(child);
if (parents == null) {
parents = new HashSet<String>();
childToParents.put(child, parents);
}
parents.add(parent);
}
}
buildParentsMap();
for (OboRelation r : oboRelations) {
String child = r.childTermId;
String parent = r.parentTermId;
String relationshipType = r.getRelationship().getName();
if (relationshipType.equals("is_a") && r.direct) {
assignPartOfsToChild(parent, child);
}
}
if (!termsToKeep.isEmpty()) {
trimModel();
}
// remove tRNA.genes if Transcript.genes exists
removeRedundantCollections();
setReverseReferences();
}
private void assignPartOf(String parent, String child) {
Set<String> colls = partOfs.get(child);
if (colls == null) {
colls = new HashSet<String>();
partOfs.put(child, colls);
}
colls.add(parent);
}
private void assignPartOfsToChild(String parent, String child) {
transferPartOfs(parent, child);
// keep going up the tree
Set<String> grandparents = childToParents.get(parent);
if (grandparents != null && !grandparents.isEmpty()) {
for (String grandparent : grandparents) {
assignPartOfsToChild(grandparent, child);
}
}
}
private void transferPartOfs(String parent, String child) {
Set<String> parentPartOfs = partOfs.get(parent);
if (parentPartOfs != null && !parentPartOfs.isEmpty()) {
Set<String> childPartOfs = partOfs.get(child);
if (childPartOfs == null) {
childPartOfs = new HashSet<String>();
partOfs.put(child, childPartOfs);
}
childPartOfs.addAll(parentPartOfs);
}
}
// build parent --> children map
private void buildParentsMap() {
parentToChildren = new HashMap<String, Set<String>>();
for (String child : childToParents.keySet()) {
Set<String> parents = childToParents.get(child);
for (String parent : parents) {
Set<String> kids = parentToChildren.get(parent);
if (kids == null) {
kids = new HashSet<String>();
parentToChildren.put(parent, kids);
}
kids.add(child);
}
}
}
private void trimModel() {
Map<String, OboOntology> oboTermsCopy = new HashMap<String, OboOntology>(validOboTerms);
System.out .println("Total terms: " + validOboTerms.size());
for (String oboTermIdentifier : oboTermsCopy.keySet()) {
prune(oboTermIdentifier);
}
System.out .println("Total terms, post-pruning: " + validOboTerms.size());
oboTermsCopy = new HashMap<String, OboOntology>(validOboTerms);
for (String oboTermIdentifier : oboTermsCopy.keySet()) {
if (!validTerm(oboTermIdentifier)) {
flatten(oboTermIdentifier);
}
}
System.out .println("Total terms, post-flattening: " + validOboTerms.size());
}
/*
* remove term if:
* 1. not in list of desired terms
* 2. no children
*/
private void prune(String oboTermIdentifier) {
// process each child term
if (parentToChildren.get(oboTermIdentifier) != null) {
Set<String> children = new HashSet<String>(parentToChildren.get(oboTermIdentifier));
for (String child : children) {
prune(child);
}
}
// if this term has no children AND it's not on our list = DELETE
if (!validParent(oboTermIdentifier) && !validTerm(oboTermIdentifier)) {
removeTerm(oboTermIdentifier);
debugOutput(oboTermIdentifier, "Pruning [no children, not on list]");
}
}
private void flatten(String oboTerm) {
Set<String> parents = childToParents.get(oboTerm);
Set<String> kids = parentToChildren.get(oboTerm);
// has both parents and children
if (parents != null && kids != null) {
// multiple parents and children. can't flatten.
if (parents.size() > 1 && kids.size() > 1) {
return;
}
// term only has one parent. remove term and assign this terms parents and children
// to each other
if (parents.size() == 1) {
String parent = parents.toArray()[0].toString();
// add children to new parent
parentToChildren.get(parent).addAll(kids);
// add parent to new children
for (String kid : kids) {
Set<String> otherParents = childToParents.get(kid);
otherParents.remove(oboTerm);
otherParents.add(parent);
}
removeTerm(oboTerm);
debugOutput(oboTerm, "Flattening [term had only one parent]");
return;
}
// term has only one child. remove term and assign child to new parents.
if (kids.size() == 1) {
String kid = kids.toArray()[0].toString();
// add parents to new kid
childToParents .get(kid).addAll(parents);
// reassign parents to new kid
for (String parent : parents) {
Set<String> otherChildren = parentToChildren.get(parent);
otherChildren.remove(oboTerm);
otherChildren.add(kid);
}
removeTerm(oboTerm);
debugOutput(oboTerm, "Flattening [term had only one child]");
return;
}
// root term
} else if (parents == null) {
removeTerm(oboTerm);
debugOutput(oboTerm, "Flattening [root term]");
}
// no children, delete!
if (kids == null) {
removeTerm(oboTerm);
debugOutput(oboTerm, "Flattening [no children]");
}
}
// make sure collection is at the highest level term
// eg. Gene.transcripts should mean that Gene.mRNAs never happens
private void removeRedundantCollections() {
for (String parent : validOboTerms.keySet()) {
Set<String> collections = partOfs.get(parent);
if (collections != null) {
for (String collectionName : collections) {
removeCollection(parent, collectionName);
}
}
}
}
// remove collection from children of the specified term
private void removeCollection(String parent, String collectionName) {
Set<String> children = parentToChildren.get(parent);
if (children == null) {
return;
}
for (String child : children) {
Set<String> childCollections = partOfs.get(child);
if (childCollections != null) {
childCollections.remove(collectionName);
}
removeCollection(child, collectionName);
}
}
private void setReverseReferences() {
for (Map.Entry<String, Set<String>> entry : partOfs.entrySet()) {
String oboTerm = entry.getKey();
Set<String> colls = new HashSet<String>(entry.getValue());
for (String collectionName : colls) {
assignPartOf(oboTerm, collectionName);
}
}
}
private void debugOutput(String oboTerm, String err) {
if (DEBUG) {
err += " " + oboTerm + " Valid terms count: " + validOboTerms.size();
System.out .println(err);
}
}
// remove term from every map
private void removeTerm(String oboTerm) {
validOboTerms.remove(oboTerm);
childToParents.remove(oboTerm);
parentToChildren.remove(oboTerm);
partOfs.remove(oboTerm);
removeCollections(oboTerm);
// remove mention in maps
Map<String, Set<String>> mapCopy
= new HashMap<String, Set<String>>(parentToChildren);
for (Map.Entry<String, Set<String>> entry : mapCopy.entrySet()) {
String parent = entry.getKey();
Set<String> children = entry.getValue();
// remove current term
children.remove(oboTerm);
// if parent is childless, remove
if (children.size() == 0) {
parentToChildren.remove(parent);
}
}
mapCopy = new HashMap<String, Set<String>>(childToParents);
for (Map.Entry<String, Set<String>> entry : mapCopy.entrySet()) {
String child = entry.getKey();
Set<String> parents = entry.getValue();
// remove current term
parents.remove(oboTerm);
// if child has no parents remove from p
if (parents.size() == 0) {
childToParents.remove(child);
}
}
}
private void removeCollections(String oboTerm) {
Map<String, Set<String>> mapCopy = new HashMap<String, Set<String>>(partOfs);
for (Map.Entry<String, Set<String>> entry : mapCopy.entrySet()) {
Set<String> collections = entry.getValue();
if (collections.contains(oboTerm)) {
collections.remove(oboTerm);
}
}
}
/**
* For each term in our list, add to our map if the term is not obsolete.
*
* @param terms set of obo terms to process
*/
public void processOboTerms(Set<OboTerm> terms) {
for (OboTerm term : terms) {
if (!term.isObsolete()) {
String identifier = term.getId().trim();
String name = term.getName().trim();
if (!StringUtils.isEmpty(identifier) && !StringUtils.isEmpty(name)) {
OboOntology c = new OboOntology(identifier, name);
validOboTerms.put(identifier, c);
oboNameToIdentifier.put(name, identifier);
}
}
}
}
/**
* Check that each OBO term in file provided by user is in OBO file.
*
* @param oboFilename name of obo file - used for error message only
* @param termsToKeepFileName file containing obo terms - used for error message only
*/
public void validateTermsToKeep(String oboFilename, String termsToKeepFileName) {
List<String> invalidTermsConfigured = new ArrayList<String>();
for (String soTermInModel : termsToKeep) {
if (oboNameToIdentifier.get(soTermInModel) == null) {
invalidTermsConfigured.add(soTermInModel);
}
}
if (!invalidTermsConfigured.isEmpty()) {
throw new RuntimeException("The following terms specified in "
+ termsToKeepFileName + " are not valid Sequence Ontology terms"
+ " according to: " + oboFilename + ": "
+ StringUtil.prettyList(invalidTermsConfigured));
}
}
/**
* Represents a class/oboterm in the Model.
*
* @author julie sullivan
*/
public class OboOntology
{
private String oboTermIdentifier, oboTermName;
/**
* The constructor.
*
* @param oboTermIdentifier the sequence ontology term identifier, eg. SO:001
* @param oboTermName name of so term, eg. sequence_feature
*/
protected OboOntology(String oboTermIdentifier, String oboTermName) {
super();
this.oboTermIdentifier = oboTermIdentifier;
this.oboTermName = oboTermName;
}
/**
* @return the identifier, eg. SO:001
*/
protected String getOboTermIdentifier() {
return oboTermIdentifier;
}
/**
* @return obo term name, eg. sequence_feature
*/
protected String getOboTermName() {
return oboTermName;
}
}
}
|
package com.github.podd.client.api;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.openrdf.model.Model;
import org.openrdf.model.URI;
import org.openrdf.model.vocabulary.RDFS;
import org.openrdf.rio.RDFFormat;
import org.semanticweb.owlapi.model.IRI;
import com.github.ansell.restletutils.RestletUtilRole;
import com.github.podd.api.DanglingObjectPolicy;
import com.github.podd.api.DataReferenceVerificationPolicy;
import com.github.podd.api.data.DataReference;
import com.github.podd.utils.InferredOWLOntologyID;
import com.github.podd.utils.PoddUser;
/**
* An interface defining the operations that are currently implemented by the PODD Web Services.
*
* @author Peter Ansell p_ansell@yahoo.com
*/
public interface PoddClient
{
/**
* Fetch all of the properties for the given objects under the given parent with the given type.
*/
public static final String TEMPLATE_SPARQL_BY_TYPE_AND_PARENT_ALL_PROPERTIES =
new StringBuilder()
.append("CONSTRUCT { ?parent ?parentPredicate ?object . ?object a ?type . ?object ?predicate ?label . }")
.append(" WHERE { ?parent ?parentPredicate ?object . ?object a ?type . OPTIONAL { ?object ?predicate ?label . } }")
.append(" VALUES (?parent ?parentPredicate ?type ) { ( %s %s %s ) }").toString();
/**
* Fetch type and label and barcode statements for the given object type.
*/
public static final String TEMPLATE_SPARQL_BY_TYPE_WITH_LABEL = new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ")
.append(" ?object <http://www.w3.org/2000/01/rdf-schema#label> ?label . ")
.append(" ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . } ")
.append(" WHERE { ?object a ?type . ")
.append(" OPTIONAL { ?object <http://www.w3.org/2000/01/rdf-schema#label> ?label . }")
.append(" OPTIONAL { ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . } }")
.append(" VALUES (?type) { ( %s ) }").toString();
/**
* Fetch all of the properties for the given objects with the given type
*/
public static final String TEMPLATE_SPARQL_BY_TYPE_ALL_PROPERTIES = new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ?object ?predicate ?value . }")
.append(" WHERE { ?object a ?type . ?object ?predicate ?value . }").append(" VALUES (?type) { ( %s ) }")
.toString();
public static final String TEMPLATE_SPARQL_BY_TYPE_LABEL_STRSTARTS =
new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ?object <http://www.w3.org/2000/01/rdf-schema#label> ?label . }")
.append(" WHERE { ?object a ?type . ?object <http://www.w3.org/2000/01/rdf-schema#label> ?label . FILTER(STRSTARTS(?label, \"%s\")) }")
.append(" VALUES (?type) { ( %s ) }").toString();
public static final String TEMPLATE_SPARQL_BY_BARCODE_STRSTARTS =
new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . }")
.append(" WHERE { ?object a ?type . ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . FILTER(STRSTARTS(?barcode, \"%s\")) }")
.append(" VALUES (?type) { ( %s ) }").toString();
public static final String TEMPLATE_SPARQL_BY_BARCODE_MATCH_NO_TYPE =
new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . }")
.append(" WHERE { ?object a ?type . ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . FILTER(STR(?barcode) = \"%s\") }")
.toString();
/**
* NOTE: Both the first and second arguments are the predicate, the first being the mapped
* predicate, and the second being the original predicate.
*/
public static final String TEMPLATE_SPARQL_BY_TYPE_LABEL_STRSTARTS_PREDICATE = new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ?object %s ?label . }")
.append(" WHERE { ?object a ?type . ?object %s ?label . FILTER(STRSTARTS(?label, \"%s\")) }")
.append(" VALUES (?type) { ( %s ) }").toString();
/**
* Adds the given role for the given user to the given artifact
*
* @param userIdentifier
* @param role
* @param artifact
* @throws PoddClientException
* If there is an error setting the role for the given user.
*/
void addRole(String userIdentifier, RestletUtilRole role, InferredOWLOntologyID artifact)
throws PoddClientException;
/**
* Submits a request to the PODD Edit Artifact service to append to the artifact using the RDF
* triples that are contained in the given {@link InputStream}.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology would need to be manually merged, and
* the update would need to be attempted again.
*
* @param ontologyIRI
* The IRI of the Artifact to update.
* @param format
* The format of the RDF triples in the given InputStream.
* @param partialInputStream
* The partial set of RDF triples serialised into an InputStream in the given format
* that will be appended to the given artifact.
* @return An {@link InferredOWLOntologyID} object containing the details of the updated
* artifact.
*/
InferredOWLOntologyID appendArtifact(InferredOWLOntologyID ontologyId, InputStream partialInputStream,
RDFFormat format) throws PoddClientException;
InferredOWLOntologyID appendArtifact(InferredOWLOntologyID ontologyId, InputStream partialInputStream,
RDFFormat format, DanglingObjectPolicy danglingObjectPolicy,
DataReferenceVerificationPolicy dataReferenceVerificationPolicy) throws PoddClientException;
/**
* Appends multiple artifacts in PODD.
*
* @param uploadQueue
* A Map containing the keys for the artifacts, and Models containing the appended
* content for each of the artifacts.
* @return A map from the original keys to the new artifact keys after the changes.
* @throws PoddClientException
* If an error occurred.
*/
Map<InferredOWLOntologyID, InferredOWLOntologyID> appendArtifacts(Map<InferredOWLOntologyID, Model> uploadQueue)
throws PoddClientException;
/**
* Submits a request to the PODD File Reference Attachment service to attach a file reference
* from a registered repository into the artifact as a child of the given object IRI.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology would need to be manually merged, and
* the update would need to be attempted again.
*
* @param ontologyId
* The {@link InferredOWLOntologyID} of the artifact to attach the file reference to.
* @param objectIRI
* The IRI of the object to attach the file reference to.
* @param label
* A label to attach to the file reference.
* @param repositoryAlias
* The alias of the repository that the file is located in.
* @param filePathInRepository
* The path inside of the repository that can be used to locate the file.
* @return An {@link InferredOWLOntologyID} object containing the details of the updated
* artifact.
*/
InferredOWLOntologyID attachDataReference(DataReference ref) throws PoddClientException;
/**
* Creates a new PoddUser using the details in the given PoddUser.
*
* @param user
* The user to create.
* @return An instance of PoddUser containing the actual details of the created user, except for
* the password.
* @throws PoddClientException
*/
PoddUser createUser(PoddUser user) throws PoddClientException;
/**
* Submits a request to the PODD Delete Artifact service to delete the artifact identified by
* the given IRI.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology deletion would need to be attempted
* again using the up to date version, or alternatively, by omitting the version IRI.
*
* @param ontologyId
* The OWLOntologyID of the artifact to delete.
* @return True if the artifact was deleted and false otherwise.
*/
boolean deleteArtifact(InferredOWLOntologyID ontologyId) throws PoddClientException;
/**
* Performs a CONSTRUCT or DESCRIBE SPARQL query on the given artifact.
*
* @param queryString
* The CONSTRUCT or DESCRIBE SPARQL query on the given artifact.
* @param artifacts
* The PODD artifacts to perform the query on.
* @return A {@link Model} containing the results of the SPARQL query.
* @throws PoddClientException
* If an error occurred.
*/
Model doSPARQL(String queryString, Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Submits a request to the PODD Get Artifact service to download the artifact identified by the
* given {@link InferredOWLOntologyID}, optionally including a version IRI if it is specifically
* known.
* <p>
* If the version is not currently available, the latest version will be returned.
*
* @param artifactId
* The {@link InferredOWLOntologyID} of the artifact to be downloaded, including
* version as necessary to fetch old versions.
* @return A model containing the RDF statements
* @throws PoddClientException
* If the artifact could not be downloaded for any reason
*/
Model downloadArtifact(InferredOWLOntologyID artifactId) throws PoddClientException;
/**
* Submits a request to the PODD Get Artifact service to download the artifact identified by the
* given {@link InferredOWLOntologyID}, optionally including a version IRI if it is specifically
* known.
* <p>
* If the version is not currently available, the latest version will be returned.
*
* @param artifactId
* The {@link InferredOWLOntologyID} of the artifact to be downloaded, including
* version as necessary to fetch old versions.
* @param outputStream
* The {@link OutputStream} to download the artifact to.
* @param format
* The format of the RDF information to be downloaded to the output stream.
* @throws PoddClientException
* If the artifact could not be downloaded for any reason
*/
void downloadArtifact(InferredOWLOntologyID artifactId, OutputStream outputStream, RDFFormat format)
throws PoddClientException;
/**
* Returns RDF statements containing the types and labels for all objects in the given artifacts
* with the given types. If there are no artifacts specified then all accessible artifacts will
* be searched. The type is the fully inferred type for the object, not just its concrete types.
*
* @param type
* The URI with the RDF Type to search for. Must not be null.
* @param artifacts
* An optional list of artifacts which are to be searched.
* @return A {@link Model} containing the RDF statements which describe the matching objects.
* @throws PoddClientException
* If there is an exception while executing the query.
*/
Model getObjectsByType(URI type, Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Returns RDF statements containing the types and labels for all objects in the given artifacts
* with the given types linked to from the given parent with the given predicate. If there are
* no artifacts specified then all accessible artifacts will be searched. The type is the fully
* inferred type for the object, not just its concrete types, and the parentPredicate may be a
* super-property of the concrete property that was used.
*
* @param type
* The URI with the RDF Type to search for. Must not be null.
* @param labelPrefix
* The string which must start the {@link RDFS#LABEL} for the object for it to be
* matched.
* @param artifacts
* An optional list of artifacts which are to be searched.
* @return A {@link Model} containing the RDF statements which describe the matching objects.
* @throws PoddClientException
* If there is an exception while executing the query.
*/
Model getObjectsByTypeAndParent(URI parent, URI parentPredicate, URI type,
Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Returns RDF statements containing the types and labels for all objects in the given artifacts
* with the given types, whose labels start with the given prefix. If there are no artifacts
* specified then all accessible artifacts will be searched. The type is the fully inferred type
* for the object, not just its concrete types.
*
* @param type
* The URI with the RDF Type to search for. Must not be null.
* @param labelPrefix
* The string which must start the {@link RDFS#LABEL} for the object for it to be
* matched.
* @param artifacts
* An optional list of artifacts which are to be searched.
* @return A {@link Model} containing the RDF statements which describe the matching objects.
* @throws PoddClientException
* If there is an exception while executing the query.
*/
Model getObjectsByTypeAndPrefix(URI type, String labelPrefix, Collection<InferredOWLOntologyID> artifacts)
throws PoddClientException;
String getPoddServerUrl();
/**
*
* @param userIdentifier
* The user identifier to fetch details for, or null to fetch the current user
* details.
* @return A {@link PoddUser} object containing the relevant details for the user.
* @throws PoddClientException
* If the user is not accessible, including if the user does not exist.
*/
PoddUser getUserDetails(String userIdentifier) throws PoddClientException;
/**
* Returns the current login status.
*
* @return True if the client was logged in after the last request, and false otherwise.
*/
boolean isLoggedIn();
/**
* Lists the artifacts that are accessible and returns the details as a {@link Model}.
*
* @param published
* If true, requests are made for published artifacts. If this is false, unpublished
* must NOT be false.
* @param unpublished
* If true, requests are made for the unpublished artifacts accessible to the current
* user. If this is false, published must NOT be false.
* @return A Model containing RDF statements describing the artifact.
* @throws PoddClientException
* If an error occurred.
*/
Model listArtifacts(boolean published, boolean unpublished) throws PoddClientException;
/**
*
* @return A list of Strings identifying the possible values for the repository alias in calls
* to {@link #attachFileReference(IRI, String, String)}.
*/
List<String> listDataReferenceRepositories() throws PoddClientException;
/**
*
* @return A map of the {@link InferredOWLOntologyID}s to top object labels, identifying the
* artifacts that the user has access to which are published. This may include artifacts
* that the user cannot modify or fork.
*/
Set<PoddArtifact> listPublishedArtifacts() throws PoddClientException;
/**
* List the roles that have been assigned to the given artifact.
*
* @param artifactId
* The {@link InferredOWLOntologyID} identifying an artifact to fetch roles for.
*
* @return A map of {@link RestletUtilRole}s identifying PODD roles attached to the given
* artifact to users who have each role.
* @throws PoddClientException
*/
Map<RestletUtilRole, Collection<String>> listRoles(InferredOWLOntologyID artifactId) throws PoddClientException;
/**
* List the roles that have been assigned to the given user, or the currently logged in user if
* the user is not specified.
*
* @param userIdentifier
* If not null, specifies a specific user to request information about.
*
* @return A map of {@link RestletUtilRole}s identifying roles that have been given to the user,
* optionally to artifacts that the role maps to for this user.
* @throws PoddClientException
*/
Map<RestletUtilRole, Collection<URI>> listRoles(String userIdentifier) throws PoddClientException;
/**
*
* @return A map of the {@link InferredOWLOntologyID}s to labels, identifying the artifacts that
* the user has access to which are unpublished.
*/
Set<PoddArtifact> listUnpublishedArtifacts() throws PoddClientException;
/**
*
* @return A list of the current users registered with the system, masked by the abilities of
* the current user to view each users existence. If the current user is a repository
* administrator they should be able to view all users. Some other roles may only be
* able to see some other users.
*/
List<PoddUser> listUsers() throws PoddClientException;
/**
* Submits a request to the PODD Login service to login the user with the given username and
* password.
* <p>
* Once the user is logged in, future queries using this client, prior to calling the logout
* method, will be authenticated as the given user, barring any session timeouts that may occur.
* <p>
* If the given user is already logged in, this method may return true immediately without
* reauthentication.
*
* @param username
* The username to submit to the login service.
* @param password
* A character array containing the password to submit to the login service.
* @return True if the user was successfully logged in and false otherwise.
*/
boolean login(String username, String password) throws PoddClientException;
/**
* Submits a request to the PODD Logout service to logout the user and close the session.
*
* @return True if the user was successfully logged out and false otherwise.
*/
boolean logout() throws PoddClientException;
/**
* Submits a request to the PODD Publish Artifact service to publish an artifact that was
* previously unpublished.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology would need to be manually merged, and
* the publish would need to be attempted again.
*
* @param ontologyId
* The {@link InferredOWLOntologyID} of the unpublished artifact that is to be
* published.
* @return The {@link InferredOWLOntologyID} of the artifact that was published. Artifacts may
* be given a different IRI after they are published, to distinguish them from the
* previously unpublished artifact.
*/
InferredOWLOntologyID publishArtifact(InferredOWLOntologyID ontologyId) throws PoddClientException;
/**
* Removes the given role for the given user to the given artifact.
*
* @param userIdentifier
* @param role
* @param artifact
* @throws PoddClientException
* If there is an error removing the role for the given user.
*/
void removeRole(String userIdentifier, RestletUtilRole role, InferredOWLOntologyID artifact)
throws PoddClientException;
void setPoddServerUrl(String serverUrl);
/**
* Submits a request to the PODD Unpublish Artifact service to unpublish an artifact that was
* previously published.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException will be thrown, as the published artifact must have an accurate version
* to ensure consistency. To avoid this, the operation may be attempted omitting the version
* IRI.
*
* @param ontologyId
* @return The {@link InferredOWLOntologyID} of the artifact after it has been unpublished.
* Artifacts may be given a different IRI after they unpublished, to distinguish them
* from the previously available artifact.
*/
InferredOWLOntologyID unpublishArtifact(InferredOWLOntologyID ontologyId) throws PoddClientException;
/**
* Submits a request to the PODD Edit Artifact service to update the entire artifact, replacing
* the existing content with the content in the given {@link InputStream}.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology would need to be manually merged, and
* the update would need to be attempted again.
*
* @param ontologyId
* The OWLOntologyID of the Artifact to update.
* @param format
* The format of the RDF triples in the given InputStream.
* @param fullInputStream
* The full set of RDF triples serialised into the InputStream in the given format
* that will be used to update the given artifact.
* @return An {@link InferredOWLOntologyID} object containing the details of the updated
* artifact.
*/
InferredOWLOntologyID updateArtifact(InferredOWLOntologyID ontologyId, InputStream fullInputStream, RDFFormat format)
throws PoddClientException;
/**
* Submits a request to the PODD Load Artifact service.
*
* @param input
* The {@link InputStream} containing the artifact to load.
* @param format
* The format of the RDF triples in the given InputStream.
* @return An {@link InferredOWLOntologyID} object containing the details of the loaded
* artifact. The {@link InferredOWLOntologyID#getOntologyIRI()} method can be used to
* get the artifact IRI for future requests, while the
* {@link InferredOWLOntologyID#getVersionIRI()} method can be used to get the version
* IRI to determine if there have been changes to the ontology in future.
*/
InferredOWLOntologyID uploadNewArtifact(InputStream input, RDFFormat format) throws PoddClientException;
InferredOWLOntologyID uploadNewArtifact(InputStream input, RDFFormat format,
DanglingObjectPolicy danglingObjectPolicy, DataReferenceVerificationPolicy dataReferenceVerificationPolicy)
throws PoddClientException;
/**
* Submits a request to the PODD Load Artifact service.
*
* @param model
* The {@link Model} containing the artifact to load.
* @return An {@link InferredOWLOntologyID} object containing the details of the loaded
* artifact. The {@link InferredOWLOntologyID#getOntologyIRI()} method can be used to
* get the artifact IRI for future requests, while the
* {@link InferredOWLOntologyID#getVersionIRI()} method can be used to get the version
* IRI to determine if there have been changes to the ontology in future.
*/
InferredOWLOntologyID uploadNewArtifact(Model model) throws PoddClientException;
Model getObjectsByTypePredicateAndPrefix(URI type, URI predicate, String labelPrefix,
Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Try to automatically login using the properties defined in poddclient.properties.
*
* @return True if the login was successful and false if it was unsuccessful.
* @throws PoddClientException
* If there was an exception accessing PODD.
*/
boolean autologin() throws PoddClientException;
Model getObjectsByTypeAndBarcode(URI type, String barcode, Collection<InferredOWLOntologyID> artifacts)
throws PoddClientException;
Model getObjectsByBarcode(String barcode, Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
}
|
package com.github.podd.client.api;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.openrdf.model.Model;
import org.openrdf.model.URI;
import org.openrdf.model.vocabulary.RDFS;
import org.openrdf.rio.RDFFormat;
import org.semanticweb.owlapi.model.IRI;
import com.github.ansell.restletutils.RestletUtilRole;
import com.github.podd.api.DanglingObjectPolicy;
import com.github.podd.api.DataReferenceVerificationPolicy;
import com.github.podd.api.data.DataReference;
import com.github.podd.utils.InferredOWLOntologyID;
import com.github.podd.utils.PoddUser;
/**
* An interface defining the operations that are currently implemented by the PODD Web Services.
*
* @author Peter Ansell p_ansell@yahoo.com
*/
public interface PoddClient
{
/**
* Fetch all of the properties for the given object URI.
*/
public static final String TEMPLATE_SPARQL_BY_URI = new StringBuilder()
.append("CONSTRUCT { ?object ?predicate ?value . }").append(" WHERE { ?object ?predicate ?value . }")
.append(" VALUES ( ?object ) { ( %s ) }").toString();
/**
* Fetch all of the properties for the given objects under the given parent with the given type.
*/
public static final String TEMPLATE_SPARQL_BY_TYPE_AND_PARENT_ALL_PROPERTIES =
new StringBuilder()
.append("CONSTRUCT { ?parent ?parentPredicate ?object . ?object a ?type . ?object ?predicate ?label . }")
.append(" WHERE { ?parent ?parentPredicate ?object . ?object a ?type . OPTIONAL { ?object ?predicate ?label . } }")
.append(" VALUES (?parent ?parentPredicate ?type ) { ( %s %s %s ) }").toString();
/**
* Fetch type and label and barcode statements for the given object type.
*/
public static final String TEMPLATE_SPARQL_BY_TYPE_WITH_LABEL = new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ")
.append(" ?object <http://www.w3.org/2000/01/rdf-schema#label> ?label . ")
.append(" ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . } ")
.append(" WHERE { ?object a ?type . ")
.append(" OPTIONAL { ?object <http://www.w3.org/2000/01/rdf-schema#label> ?label . }")
.append(" OPTIONAL { ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . } }")
.append(" VALUES (?type) { ( %s ) }").toString();
/**
* Fetch all of the properties for the given objects with the given type
*/
public static final String TEMPLATE_SPARQL_BY_TYPE_ALL_PROPERTIES = new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ?object ?predicate ?value . }")
.append(" WHERE { ?object a ?type . ?object ?predicate ?value . }").append(" VALUES (?type) { ( %s ) }")
.toString();
public static final String TEMPLATE_SPARQL_BY_TYPE_LABEL_STRSTARTS =
new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ?object <http://www.w3.org/2000/01/rdf-schema#label> ?label . }")
.append(" WHERE { ?object a ?type . ?object <http://www.w3.org/2000/01/rdf-schema#label> ?label . FILTER(STRSTARTS(?label, \"%s\")) }")
.append(" VALUES (?type) { ( %s ) }").toString();
public static final String TEMPLATE_SPARQL_BY_BARCODE_STRSTARTS =
new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . ?object ?property ?value . }")
.append(" WHERE { ?object a ?type . ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . FILTER(STRSTARTS(?barcode, \"%s\")). ?object ?property ?value . }")
.append(" VALUES (?type) { ( %s ) }").toString();
public static final String TEMPLATE_SPARQL_BY_BARCODE_MATCH_NO_TYPE =
new StringBuilder()
.append("CONSTRUCT { ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . ?object ?property ?value . }")
.append(" WHERE { ?object <http://purl.org/podd/ns/poddScience#hasBarcode> ?barcode . FILTER(STR(?barcode) = \"%s\"). ?object ?property ?value . }")
.toString();
public static final String TEMPLATE_SPARQL_CONTAINERS_TO_MATERIAL_AND_GENOTYPE =
new StringBuilder()
.append("CONSTRUCT { ?container <http://purl.org/podd/ns/poddScience
.append(" WHERE { ?container <http://purl.org/podd/ns/poddScience
.append(" VALUES (?container) { %s }").toString();
/**
* NOTE: Both the first and second arguments are the predicate, the first being the mapped
* predicate, and the second being the original predicate.
*/
public static final String TEMPLATE_SPARQL_BY_TYPE_LABEL_STRSTARTS_PREDICATE = new StringBuilder()
.append("CONSTRUCT { ?object a ?type . ?object %s ?label . }")
.append(" WHERE { ?object a ?type . ?object %s ?label . FILTER(STRSTARTS(?label, \"%s\")) }")
.append(" VALUES (?type) { ( %s ) }").toString();
/**
* NOTE: Both the first and second arguments are the predicate, the first being the mapped
* predicate, and the second being the original predicate.
*/
public static final String TEMPLATE_SPARQL_BY_PREDICATE = new StringBuilder()
.append("CONSTRUCT { ?object %s ?property . }").append(" WHERE { ?object %s ?property . }").toString();
public static final String TEMPLATE_SPARQL_TRAY_POT_NUMBER_TO_BARCODE = new StringBuilder().append("CONSTRUCT { ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasPotNumberTray> ?potNumberTray . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasPotNumber> ?potNumberOverall . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasBarcode> ?potBarcode . }")
.append(" WHERE { ?tray <http://purl.org/podd/ns/poddScience#hasBarcode> ?trayBarcode . ")
.append(" ?tray <http://purl.org/podd/ns/poddScience#hasPot> ?pot . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasPotNumberTray> ?potNumberTray . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasPotNumber> ?potNumberOverall . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasBarcode> ?potBarcode . ")
.append(" FILTER(STR(?trayBarcode) = \"%s\") }").toString();
public static final String TEMPLATE_SPARQL_TRAY_POT_NUMBER_TO_BARCODE_ALL = new StringBuilder().append("CONSTRUCT { ")
.append(" ?tray a ?trayType . ")
.append(" ?tray <http://purl.org/podd/ns/poddScience#hasBarcode> ?trayBarcode . ")
.append(" ?tray <http://purl.org/podd/ns/poddScience#hasPot> ?pot . ")
.append(" ?pot a ?potType . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasPotNumberTray> ?potNumberTray . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasPotNumber> ?potNumberOverall . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasBarcode> ?potBarcode . }")
.append(" WHERE { ")
.append(" ?tray a ?trayType . ")
.append(" ?tray <http://purl.org/podd/ns/poddScience#hasBarcode> ?trayBarcode . ")
.append(" ?tray <http://purl.org/podd/ns/poddScience#hasPot> ?pot . ")
.append(" ?pot a ?potType . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasPotNumberTray> ?potNumberTray . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasPotNumber> ?potNumberOverall . ")
.append(" ?pot <http://purl.org/podd/ns/poddScience#hasBarcode> ?potBarcode . }").toString();
/**
* Adds the given role for the given user to the given artifact
*
* @param userIdentifier
* @param role
* @param artifact
* @throws PoddClientException
* If there is an error setting the role for the given user.
*/
void addRole(String userIdentifier, RestletUtilRole role, InferredOWLOntologyID artifact)
throws PoddClientException;
/**
* Submits a request to the PODD Edit Artifact service to append to the artifact using the RDF
* triples that are contained in the given {@link InputStream}.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology would need to be manually merged, and
* the update would need to be attempted again.
*
* @param ontologyIRI
* The IRI of the Artifact to update.
* @param format
* The format of the RDF triples in the given InputStream.
* @param partialInputStream
* The partial set of RDF triples serialised into an InputStream in the given format
* that will be appended to the given artifact.
* @return An {@link InferredOWLOntologyID} object containing the details of the updated
* artifact.
*/
InferredOWLOntologyID appendArtifact(InferredOWLOntologyID ontologyId, InputStream partialInputStream,
RDFFormat format) throws PoddClientException;
InferredOWLOntologyID appendArtifact(InferredOWLOntologyID ontologyId, InputStream partialInputStream,
RDFFormat format, DanglingObjectPolicy danglingObjectPolicy,
DataReferenceVerificationPolicy dataReferenceVerificationPolicy) throws PoddClientException;
/**
* Appends multiple artifacts in PODD.
*
* @param uploadQueue
* A Map containing the keys for the artifacts, and Models containing the appended
* content for each of the artifacts.
* @return A map from the original keys to the new artifact keys after the changes.
* @throws PoddClientException
* If an error occurred.
*/
Map<InferredOWLOntologyID, InferredOWLOntologyID> appendArtifacts(Map<InferredOWLOntologyID, Model> uploadQueue)
throws PoddClientException;
/**
* Submits a request to the PODD File Reference Attachment service to attach a file reference
* from a registered repository into the artifact as a child of the given object IRI.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology would need to be manually merged, and
* the update would need to be attempted again.
*
* @param ontologyId
* The {@link InferredOWLOntologyID} of the artifact to attach the file reference to.
* @param objectIRI
* The IRI of the object to attach the file reference to.
* @param label
* A label to attach to the file reference.
* @param repositoryAlias
* The alias of the repository that the file is located in.
* @param filePathInRepository
* The path inside of the repository that can be used to locate the file.
* @return An {@link InferredOWLOntologyID} object containing the details of the updated
* artifact.
*/
InferredOWLOntologyID attachDataReference(DataReference ref) throws PoddClientException;
/**
* Creates a new PoddUser using the details in the given PoddUser.
*
* @param user
* The user to create.
* @return An instance of PoddUser containing the actual details of the created user, except for
* the password.
* @throws PoddClientException
*/
PoddUser createUser(PoddUser user) throws PoddClientException;
/**
* Submits a request to the PODD Delete Artifact service to delete the artifact identified by
* the given IRI.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology deletion would need to be attempted
* again using the up to date version, or alternatively, by omitting the version IRI.
*
* @param ontologyId
* The OWLOntologyID of the artifact to delete.
* @return True if the artifact was deleted and false otherwise.
*/
boolean deleteArtifact(InferredOWLOntologyID ontologyId) throws PoddClientException;
/**
* Performs a CONSTRUCT or DESCRIBE SPARQL query on the given artifact.
*
* @param queryString
* The CONSTRUCT or DESCRIBE SPARQL query on the given artifact.
* @param artifacts
* The PODD artifacts to perform the query on.
* @return A {@link Model} containing the results of the SPARQL query.
* @throws PoddClientException
* If an error occurred.
*/
Model doSPARQL(String queryString, Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Submits a request to the PODD Get Artifact service to download the artifact identified by the
* given {@link InferredOWLOntologyID}, optionally including a version IRI if it is specifically
* known.
* <p>
* If the version is not currently available, the latest version will be returned.
*
* @param artifactId
* The {@link InferredOWLOntologyID} of the artifact to be downloaded, including
* version as necessary to fetch old versions.
* @return A model containing the RDF statements
* @throws PoddClientException
* If the artifact could not be downloaded for any reason
*/
Model downloadArtifact(InferredOWLOntologyID artifactId) throws PoddClientException;
/**
* Submits a request to the PODD Get Artifact service to download the artifact identified by the
* given {@link InferredOWLOntologyID}, optionally including a version IRI if it is specifically
* known.
* <p>
* If the version is not currently available, the latest version will be returned.
*
* @param artifactId
* The {@link InferredOWLOntologyID} of the artifact to be downloaded, including
* version as necessary to fetch old versions.
* @param outputStream
* The {@link OutputStream} to download the artifact to.
* @param format
* The format of the RDF information to be downloaded to the output stream.
* @throws PoddClientException
* If the artifact could not be downloaded for any reason
*/
void downloadArtifact(InferredOWLOntologyID artifactId, OutputStream outputStream, RDFFormat format)
throws PoddClientException;
/**
* Returns RDF statements containing all of the directly linked statements from the URI.
*
* @param object
* The URI of the object to search for. Must not be null.
* @param artifacts
* An optional list of artifacts which are to be searched.
* @return A {@link Model} containing the RDF statements which describe the matching object.
* @throws PoddClientException
* If there is an exception while executing the query.
*/
Model getObjectByURI(URI object, Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Returns RDF statements containing the types and labels for all objects in the given artifacts
* with the given types. If there are no artifacts specified then all accessible artifacts will
* be searched. The type is the fully inferred type for the object, not just its concrete types.
*
* @param type
* The URI with the RDF Type to search for. Must not be null.
* @param artifacts
* An optional list of artifacts which are to be searched.
* @return A {@link Model} containing the RDF statements which describe the matching objects.
* @throws PoddClientException
* If there is an exception while executing the query.
*/
Model getObjectsByType(URI type, Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Returns RDF statements containing the predicate.
*
* @param predicate
* The URI with the RDF predicate to search for. Must not be null.
* @param artifacts
* An optional list of artifacts which are to be searched.
* @return A {@link Model} containing the RDF statements which describe the matching objects.
* @throws PoddClientException
* If there is an exception while executing the query.
*/
Model getObjectsByPredicate(URI predicate, Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Returns RDF statements containing the types and labels for all objects in the given artifacts
* with the given types linked to from the given parent with the given predicate. If there are
* no artifacts specified then all accessible artifacts will be searched. The type is the fully
* inferred type for the object, not just its concrete types, and the parentPredicate may be a
* super-property of the concrete property that was used.
*
* @param type
* The URI with the RDF Type to search for. Must not be null.
* @param labelPrefix
* The string which must start the {@link RDFS#LABEL} for the object for it to be
* matched.
* @param artifacts
* An optional list of artifacts which are to be searched.
* @return A {@link Model} containing the RDF statements which describe the matching objects.
* @throws PoddClientException
* If there is an exception while executing the query.
*/
Model getObjectsByTypeAndParent(URI parent, URI parentPredicate, URI type,
Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Returns RDF statements containing the types and labels for all objects in the given artifacts
* with the given types, whose labels start with the given prefix. If there are no artifacts
* specified then all accessible artifacts will be searched. The type is the fully inferred type
* for the object, not just its concrete types.
*
* @param type
* The URI with the RDF Type to search for. Must not be null.
* @param labelPrefix
* The string which must start the {@link RDFS#LABEL} for the object for it to be
* matched.
* @param artifacts
* An optional list of artifacts which are to be searched.
* @return A {@link Model} containing the RDF statements which describe the matching objects.
* @throws PoddClientException
* If there is an exception while executing the query.
*/
Model getObjectsByTypeAndPrefix(URI type, String labelPrefix, Collection<InferredOWLOntologyID> artifacts)
throws PoddClientException;
String getPoddServerUrl();
/**
*
* @param userIdentifier
* The user identifier to fetch details for, or null to fetch the current user
* details.
* @return A {@link PoddUser} object containing the relevant details for the user.
* @throws PoddClientException
* If the user is not accessible, including if the user does not exist.
*/
PoddUser getUserDetails(String userIdentifier) throws PoddClientException;
/**
* Returns the current login status.
*
* @return True if the client was logged in after the last request, and false otherwise.
*/
boolean isLoggedIn();
/**
* Lists the artifacts that are accessible and returns the details as a {@link Model}.
*
* @param published
* If true, requests are made for published artifacts. If this is false, unpublished
* must NOT be false.
* @param unpublished
* If true, requests are made for the unpublished artifacts accessible to the current
* user. If this is false, published must NOT be false.
* @return A Model containing RDF statements describing the artifact.
* @throws PoddClientException
* If an error occurred.
*/
Model listArtifacts(boolean published, boolean unpublished) throws PoddClientException;
/**
*
* @return A list of Strings identifying the possible values for the repository alias in calls
* to {@link #attachFileReference(IRI, String, String)}.
*/
List<String> listDataReferenceRepositories() throws PoddClientException;
/**
*
* @return A map of the {@link InferredOWLOntologyID}s to top object labels, identifying the
* artifacts that the user has access to which are published. This may include artifacts
* that the user cannot modify or fork.
*/
Set<PoddArtifact> listPublishedArtifacts() throws PoddClientException;
/**
* List the roles that have been assigned to the given artifact.
*
* @param artifactId
* The {@link InferredOWLOntologyID} identifying an artifact to fetch roles for.
*
* @return A map of {@link RestletUtilRole}s identifying PODD roles attached to the given
* artifact to users who have each role.
* @throws PoddClientException
*/
Map<RestletUtilRole, Collection<String>> listRoles(InferredOWLOntologyID artifactId) throws PoddClientException;
/**
* List the roles that have been assigned to the given user, or the currently logged in user if
* the user is not specified.
*
* @param userIdentifier
* If not null, specifies a specific user to request information about.
*
* @return A map of {@link RestletUtilRole}s identifying roles that have been given to the user,
* optionally to artifacts that the role maps to for this user.
* @throws PoddClientException
*/
Map<RestletUtilRole, Collection<URI>> listRoles(String userIdentifier) throws PoddClientException;
/**
*
* @return A map of the {@link InferredOWLOntologyID}s to labels, identifying the artifacts that
* the user has access to which are unpublished.
*/
Set<PoddArtifact> listUnpublishedArtifacts() throws PoddClientException;
/**
*
* @return A list of the current users registered with the system, masked by the abilities of
* the current user to view each users existence. If the current user is a repository
* administrator they should be able to view all users. Some other roles may only be
* able to see some other users.
*/
List<PoddUser> listUsers() throws PoddClientException;
/**
* Submits a request to the PODD Login service to login the user with the given username and
* password.
* <p>
* Once the user is logged in, future queries using this client, prior to calling the logout
* method, will be authenticated as the given user, barring any session timeouts that may occur.
* <p>
* If the given user is already logged in, this method may return true immediately without
* reauthentication.
*
* @param username
* The username to submit to the login service.
* @param password
* A character array containing the password to submit to the login service.
* @return True if the user was successfully logged in and false otherwise.
*/
boolean login(String username, String password) throws PoddClientException;
/**
* Submits a request to the PODD Logout service to logout the user and close the session.
*
* @return True if the user was successfully logged out and false otherwise.
*/
boolean logout() throws PoddClientException;
/**
* Submits a request to the PODD Publish Artifact service to publish an artifact that was
* previously unpublished.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology would need to be manually merged, and
* the publish would need to be attempted again.
*
* @param ontologyId
* The {@link InferredOWLOntologyID} of the unpublished artifact that is to be
* published.
* @return The {@link InferredOWLOntologyID} of the artifact that was published. Artifacts may
* be given a different IRI after they are published, to distinguish them from the
* previously unpublished artifact.
*/
InferredOWLOntologyID publishArtifact(InferredOWLOntologyID ontologyId) throws PoddClientException;
/**
* Removes the given role for the given user to the given artifact.
*
* @param userIdentifier
* @param role
* @param artifact
* @throws PoddClientException
* If there is an error removing the role for the given user.
*/
void removeRole(String userIdentifier, RestletUtilRole role, InferredOWLOntologyID artifact)
throws PoddClientException;
void setPoddServerUrl(String serverUrl);
/**
* Submits a request to the PODD Unpublish Artifact service to unpublish an artifact that was
* previously published.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException will be thrown, as the published artifact must have an accurate version
* to ensure consistency. To avoid this, the operation may be attempted omitting the version
* IRI.
*
* @param ontologyId
* @return The {@link InferredOWLOntologyID} of the artifact after it has been unpublished.
* Artifacts may be given a different IRI after they unpublished, to distinguish them
* from the previously available artifact.
*/
InferredOWLOntologyID unpublishArtifact(InferredOWLOntologyID ontologyId) throws PoddClientException;
/**
* Submits a request to the PODD Edit Artifact service to update the entire artifact, replacing
* the existing content with the content in the given {@link InputStream}.
* <p>
* If the given ontologyId contains a version IRI and the version is out of date, a
* PoddClientException may be thrown if the server refuses to complete the operation due to the
* version being out of date. In these cases the ontology would need to be manually merged, and
* the update would need to be attempted again.
*
* @param ontologyId
* The OWLOntologyID of the Artifact to update.
* @param format
* The format of the RDF triples in the given InputStream.
* @param fullInputStream
* The full set of RDF triples serialised into the InputStream in the given format
* that will be used to update the given artifact.
* @return An {@link InferredOWLOntologyID} object containing the details of the updated
* artifact.
*/
InferredOWLOntologyID updateArtifact(InferredOWLOntologyID ontologyId, InputStream fullInputStream, RDFFormat format)
throws PoddClientException;
/**
* Submits a request to the PODD Load Artifact service.
*
* @param input
* The {@link InputStream} containing the artifact to load.
* @param format
* The format of the RDF triples in the given InputStream.
* @return An {@link InferredOWLOntologyID} object containing the details of the loaded
* artifact. The {@link InferredOWLOntologyID#getOntologyIRI()} method can be used to
* get the artifact IRI for future requests, while the
* {@link InferredOWLOntologyID#getVersionIRI()} method can be used to get the version
* IRI to determine if there have been changes to the ontology in future.
*/
InferredOWLOntologyID uploadNewArtifact(InputStream input, RDFFormat format) throws PoddClientException;
InferredOWLOntologyID uploadNewArtifact(InputStream input, RDFFormat format,
DanglingObjectPolicy danglingObjectPolicy, DataReferenceVerificationPolicy dataReferenceVerificationPolicy)
throws PoddClientException;
/**
* Submits a request to the PODD Load Artifact service.
*
* @param model
* The {@link Model} containing the artifact to load.
* @return An {@link InferredOWLOntologyID} object containing the details of the loaded
* artifact. The {@link InferredOWLOntologyID#getOntologyIRI()} method can be used to
* get the artifact IRI for future requests, while the
* {@link InferredOWLOntologyID#getVersionIRI()} method can be used to get the version
* IRI to determine if there have been changes to the ontology in future.
*/
InferredOWLOntologyID uploadNewArtifact(Model model) throws PoddClientException;
Model getObjectsByTypePredicateAndPrefix(URI type, URI predicate, String labelPrefix,
Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
/**
* Try to automatically login using the properties defined in poddclient.properties.
*
* @return True if the login was successful and false if it was unsuccessful.
* @throws PoddClientException
* If there was an exception accessing PODD.
*/
boolean autologin() throws PoddClientException;
Model getObjectsByTypeAndBarcode(URI type, String barcode, Collection<InferredOWLOntologyID> artifacts)
throws PoddClientException;
Model getObjectsByBarcode(String barcode, Collection<InferredOWLOntologyID> artifacts) throws PoddClientException;
}
|
package org.gluu.oxtrust.api.client;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.X509Certificate;
import java.util.logging.Level;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.core.Feature;
import org.glassfish.jersey.logging.LoggingFeature;
import org.gluu.oxtrust.api.client.saml.TrustRelationshipClient;
import java.util.logging.Logger;
/**
* oxTrust REST webservice client general class.
*
* @author Dmitry Ognyannikov
*/
public class OxTrustClient {
private final String baseURI;
private final TrustRelationshipClient trustRelationshipClient;
private final SSLContext sslContext;
private final HostnameVerifier verifier;
private final Client client;
public OxTrustClient(String baseURI, String user, String password) throws NoSuchAlgorithmException, KeyManagementException {
this.baseURI = baseURI;
sslContext = initSSLContext();
verifier = initHostnameVerifier();
Feature loggingFeature = new LoggingFeature(Logger.getLogger(getClass().getName()),
Level.ALL,
LoggingFeature.Verbosity.PAYLOAD_TEXT,
32768);// all up to 32768 bytes
client = ClientBuilder.newBuilder().sslContext(sslContext).hostnameVerifier(verifier)
.register(loggingFeature)
.build();
//TODO: login
trustRelationshipClient = new TrustRelationshipClient(client, baseURI);
}
private SSLContext initSSLContext() throws NoSuchAlgorithmException, KeyManagementException {
SSLContext context = SSLContext.getInstance("TLS");
context.init(null, new TrustManager[] { new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] arg0, String arg1) {}
@Override
public void checkServerTrusted(X509Certificate[] arg0, String arg1) {}
@Override
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
} }, new java.security.SecureRandom());
return context;
}
private HostnameVerifier initHostnameVerifier() {
return new HostnameVerifier() {
@Override
public boolean verify(String string, SSLSession ssls) {
return true;
}
};
}
/**
* @return the baseURI
*/
public String getBaseURI() {
return baseURI;
}
/**
* @return the trustRelationshipClient
*/
public TrustRelationshipClient getTrustRelationshipClient() {
return trustRelationshipClient;
}
public void close() {
client.close();
}
}
|
package org.jetel.data.formatter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Date;
import javax.naming.InvalidNameException;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFCellStyle;
import org.apache.poi.hssf.usermodel.HSSFDataFormat;
import org.apache.poi.hssf.usermodel.HSSFFont;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.jetel.data.DataRecord;
import org.jetel.data.primitive.Decimal;
import org.jetel.exception.ComponentNotReadyException;
import org.jetel.metadata.DataFieldMetadata;
import org.jetel.metadata.DataRecordMetadata;
import org.jetel.util.XLSUtils;
public class XLSDataFormatter implements Formatter {
private HSSFWorkbook wb;
private HSSFSheet sheet;
private HSSFRow row;
private HSSFCell cell;
private HSSFCellStyle[] cellStyle;
private HSSFDataFormat dataFormat;
private DataRecordMetadata metadata;
private FileOutputStream out;
private int firstRow = 0;
private int recCounter;
private boolean saveNames;
private int namesRow = -1;
private boolean append;
private String sheetName = null;
private int sheetNumber = -1;
private String firstColumnIndex = "A";
private int firstColumn;
public XLSDataFormatter(boolean saveNames, boolean append){
this.saveNames = saveNames;
this.append = append;
}
/* (non-Javadoc)
* @see org.jetel.data.formatter.Formatter#open(java.lang.Object, org.jetel.metadata.DataRecordMetadata)
*/
public void open(Object out, DataRecordMetadata _metadata) throws ComponentNotReadyException{
this.metadata = _metadata;
try{
if (((File)out).length() > 0) {//if xls file exist add to it new data
wb = new HSSFWorkbook(new FileInputStream((File)out));
}else{//create new xls file
wb = new HSSFWorkbook();
}
this.out = new FileOutputStream((File)out);
}catch(IOException ex){
throw new RuntimeException(ex);
}
//get or create sheet depending of its existence and append attribute
if (sheetName != null){
sheet = wb.getSheet(sheetName);
if (sheet == null) {
sheet = wb.createSheet(sheetName);
}else if (!append){
wb.removeSheetAt(wb.getSheetIndex(sheetName));
sheet = wb.createSheet(sheetName);
}
}else if (sheetNumber > -1){
try {
sheet = wb.getSheetAt(sheetNumber);
}catch(IndexOutOfBoundsException ex){
throw new ComponentNotReadyException("There is no sheet with number \"" + sheetNumber +"\"");
}
}else {
sheet = wb.createSheet();
}
recCounter = 0;
//set recCounter for proper row
if (append) {
if (sheet.getLastRowNum() != 0){
recCounter = sheet.getLastRowNum() + 1;
}
}
try {
firstColumn = XLSUtils.getCellNum(firstColumnIndex);
}catch(InvalidNameException ex){
throw new ComponentNotReadyException(ex);
}
//save metadata names
if (saveNames && (!append || recCounter == 0)){//saveNames=true, but if append=true save names only if there are no records on this sheet
recCounter = namesRow > -1 ? namesRow : 0;
HSSFCellStyle metaStyle = wb.createCellStyle();
HSSFFont font = wb.createFont();
font.setBoldweight(HSSFFont.BOLDWEIGHT_BOLD);//save metadata names bold
metaStyle.setFont(font);
row = sheet.createRow(recCounter);
String name;
for (short i=0;i<metadata.getNumFields();i++){
cell = row.createCell((short)(firstColumn + i));
name = metadata.getField(i).getName();
if (sheet.getColumnWidth((short)(firstColumn + i)) < name.length() * 256 ) {
sheet.setColumnWidth((short)(firstColumn + i),(short)(256 * name.length()));
}
cell.setCellStyle(metaStyle);
cell.setCellValue(name);
}
recCounter++;
}
//creating cell formats from metadata formats
dataFormat = wb.createDataFormat();
cellStyle = new HSSFCellStyle[metadata.getNumFields()];
String format;
for (short i=0;i<metadata.getNumFields();i++){
cellStyle[i] = wb.createCellStyle();
format = metadata.getField(i).getFormatStr();
if (format!=null){
cellStyle[i].setDataFormat(dataFormat.getFormat(format));
}
if (sheet.getColumnWidth((short)(firstColumn + i)) < metadata.getField(i).getSize() * 256) {
sheet.setColumnWidth((short)(firstColumn + i),(short)( metadata.getField(i).getSize() * 256));
}
}
if (firstRow > recCounter) {
recCounter = firstRow;
}
}
/* (non-Javadoc)
* @see org.jetel.data.formatter.Formatter#close()
*/
public void close() {
try {
wb.write(out);//write workbook to file
out.close();
}catch(IOException ex){
ex.printStackTrace();
}
}
/* (non-Javadoc)
* @see org.jetel.data.formatter.Formatter#write(org.jetel.data.DataRecord)
*/
public void write(DataRecord record) throws IOException {
row = sheet.createRow(recCounter);
char metaType;//metadata field type
Object value;//field value
short colNum;
for (short i=0;i<metadata.getNumFields();i++){
metaType = metadata.getField(i).getType();
colNum = (short)(firstColumn + i);
cell = row.createCell(colNum);
value = record.getField(i).getValue();
if (value == null) continue;
cell.setCellStyle(cellStyle[i]);
if (metaType == DataFieldMetadata.BYTE_FIELD || metaType == DataFieldMetadata.STRING_FIELD){
cell.setCellType(HSSFCell.CELL_TYPE_STRING);
cell.setCellValue(value.toString());
}else{
cell.setCellType(HSSFCell.CELL_TYPE_NUMERIC);
switch (metaType) {
case DataFieldMetadata.DATE_FIELD:
case DataFieldMetadata.DATETIME_FIELD:
cell.setCellValue((Date)value);
break;
case DataFieldMetadata.INTEGER_FIELD:
cell.setCellValue((Integer)value);
break;
case DataFieldMetadata.LONG_FIELD:
cell.setCellValue((Long)value);
break;
case DataFieldMetadata.DECIMAL_FIELD:
cell.setCellValue(((Decimal)value).getDouble());
break;
case DataFieldMetadata.NUMERIC_FIELD:
cell.setCellValue((Double)value);
break;
}
}
}
recCounter++;
}
/* (non-Javadoc)
* @see org.jetel.data.formatter.Formatter#flush()
*/
public void flush() throws IOException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see org.jetel.data.formatter.Formatter#setOneRecordPerLinePolicy(boolean)
*/
public void setOneRecordPerLinePolicy(boolean b) {
// TODO Auto-generated method stub
}
public void setSheetName(String sheetName) {
this.sheetName = sheetName;
}
public void setSheetNumber(int sheetNumber) {
this.sheetNumber = sheetNumber;
}
public void setFirstRow(int firstRow){
this.firstRow = firstRow-1;
}
public void setFirstColumn(String firstColumn){
this.firstColumnIndex = firstColumn;
}
public int getFirstColumn() {
return firstColumn;
}
public void setNamesRow(int namesRow) {
this.namesRow = namesRow-1;
}
public boolean isAppend() {
return append;
}
public boolean isSaveNames() {
return saveNames;
}
public int getFirstRow() {
return firstRow+1;
}
public int getNamesRow() {
return namesRow+1;
}
public String getSheetName() {
return sheetName;
}
public int getSheetNumber() {
return sheetNumber;
}
}
|
package org.jetel.data.lookup;
import org.jetel.data.DataRecord;
public interface LookupTableIterator {
/**
* Returns DataRecord stored in lookup table.
*
* @param keyRecord DataRecord to be used for looking up data.
* @return DataRecord associated with specified key or <code>null</code> if not found.
*/
public DataRecord get(DataRecord keyRecord);
/**
* Next DataRecord stored under the same key as the previous one successfully
* retrieved while calling get() method.
*
* @return DataRecord or <code>null</code> if no other DataRecord is stored under the same key.
*/
public DataRecord getNext();
}
|
// FILE: c:/projects/jetel/org/jetel/data/DelimitedDataParser.java
package org.jetel.data.parser;
import java.io.IOException;
import java.io.InputStream;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CoderResult;
import org.jetel.data.DataRecord;
import org.jetel.data.Defaults;
import org.jetel.exception.BadDataFormatException;
import org.jetel.exception.IParserExceptionHandler;
import org.jetel.exception.JetelException;
import org.jetel.exception.PolicyType;
import org.jetel.metadata.DataFieldMetadata;
import org.jetel.metadata.DataRecordMetadata;
import org.jetel.util.StringUtils;
/**
* Parsing delimited text data. Supports delimiters with the length of up to 32
* characters. Delimiter for each individual field must be specified - through
* metadata definition. The maximum length of one parseable field is denoted by
* <b>FIELD_BUFFER_LENGTH</b> . Parser handles quoted strings (single or double).
* This class is using the new IO (NIO) features
* introduced in Java 1.4 - directly mapped byte buffers & character
* encoders/decoders
*
*@author D.Pavlis
*@since March 27, 2002
*@see Parser
*@see org.jetel.data.Defaults
* @revision $Revision$
*/
public class DelimitedDataParser implements Parser {
private String charSet = null;
private IParserExceptionHandler exceptionHandler;
private ByteBuffer dataBuffer;
private CharBuffer charBuffer;
private CharBuffer fieldStringBuffer;
private char[] delimiterCandidateBuffer;
private DataRecordMetadata metadata;
private ReadableByteChannel reader;
private CharsetDecoder decoder;
private int recordCounter;
private char[][] delimiters;
private char[] fieldTypes;
private boolean isEof;
private boolean skipRows=false;
// this will be added as a parameter to constructor
private boolean handleQuotedStrings = true;
// Attributes
// maximum length of delimiter
private final static int DELIMITER_CANDIDATE_BUFFER_LENGTH = 32;
// Associations
// Operations
/**
* Constructor for the DelimitedDataParser object. With default size and
* default character encoding.
*
*@since March 28, 2002
*/
public DelimitedDataParser() {
this(Defaults.DataParser.DEFAULT_CHARSET_DECODER);
}
/**
* Constructor for the DelimitedDataParser object
*
*@param charsetDecoder Charset Decoder used for converting input data into
* UNICODE chars
*@since March 28, 2002
*/
public DelimitedDataParser(String charsetDecoder) {
this.charSet = charsetDecoder;
dataBuffer = ByteBuffer.allocateDirect(Defaults.DEFAULT_INTERNAL_IO_BUFFER_SIZE);
charBuffer = CharBuffer.allocate(Defaults.DEFAULT_INTERNAL_IO_BUFFER_SIZE);
fieldStringBuffer = CharBuffer.allocate(Defaults.DataParser.FIELD_BUFFER_LENGTH);
delimiterCandidateBuffer = new char [DELIMITER_CANDIDATE_BUFFER_LENGTH];
decoder = Charset.forName(charsetDecoder).newDecoder();
}
/**
* Returs next data record parsed from input stream or NULL if no more data
* available
*
*@return The Next value
*@exception IOException Description of Exception
*@since May 2, 2002
*/
public DataRecord getNext() throws JetelException {
// create a new data record
DataRecord record = new DataRecord(metadata);
record.init();
record = parseNext(record);
if(exceptionHandler != null ) { //use handler only if configured
while(exceptionHandler.isExceptionThrowed()) {
exceptionHandler.handleException();
record = parseNext(record);
}
}
return record;
}
/**
* Returs next data record parsed from input stream or NULL if no more data
* available The specified DataRecord's fields are altered to contain new
* values.
*
*@param record Description of Parameter
*@return The Next value
*@exception IOException Description of Exception
*@since May 2, 2002
*/
public DataRecord getNext(DataRecord record) throws JetelException {
record = parseNext(record);
if(exceptionHandler != null ) { //use handler only if configured
while(exceptionHandler.isExceptionThrowed()) {
exceptionHandler.handleException();
//record.init(); //redundant
record = parseNext(record);
}
}
return record;
}
/**
* An operation that opens/initializes parser.
*
*@param in InputStream of delimited text data
*@param _metadata Metadata describing the structure of data
*@since March 27, 2002
*/
public void open(Object in, DataRecordMetadata metadata) {
DataFieldMetadata fieldMetadata;
this.metadata = metadata;
reader = Channels.newChannel((InputStream) in);
// create array of delimiters & initialize them
delimiters = new char[metadata.getNumFields()][];
fieldTypes = new char[metadata.getNumFields()];
for (int i = 0; i < metadata.getNumFields(); i++) {
fieldMetadata = metadata.getField(i);
delimiters[i] = fieldMetadata.getDelimiter().toCharArray();
fieldTypes[i] = fieldMetadata.getType();
// we handle only one character delimiters
}
decoder.reset();// reset CharsetDecoder
dataBuffer.clear();
dataBuffer.flip();
charBuffer.clear();
charBuffer.flip();
recordCounter = 1;// reset record counter
isEof=false;
}
/**
* Description of the Method
*
*@since May 2, 2002
*/
public void close() {
if (reader != null) {
try {
reader.close();
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
/**
* Assembles error message when exception occures during parsing
*
*@param exceptionMessage message from exception getMessage() call
*@param recNo recordNumber
*@param fieldNo fieldNumber
*@return error message
*@since September 19, 2002
*/
private String getErrorMessage(String exceptionMessage,CharSequence value, int recNo, int fieldNo) {
StringBuffer message = new StringBuffer();
message.append(exceptionMessage);
message.append(" when parsing record
message.append(recordCounter);
message.append(" field ");
message.append(metadata.getField(fieldNo).getName());
if (value!=null){
message.append(" value \"").append(value).append("\"");
}
return message.toString();
}
/**
* Description of the Method
*
* @return Description of the Returned Value
* @exception IOException
* Description of Exception
* @since May 13, 2002
*/
private int readChar() throws IOException {
CoderResult result;
if (charBuffer.hasRemaining()) {
return charBuffer.get();
}
if (isEof)
return -1;
charBuffer.clear();
if (dataBuffer.hasRemaining())
dataBuffer.compact();
else
dataBuffer.clear();
if (reader.read(dataBuffer) == -1) {
isEof = true;
}
dataBuffer.flip();
result = decoder.decode(dataBuffer, charBuffer, isEof);
if (result == CoderResult.UNDERFLOW) {
// try to load additional data
dataBuffer.compact();
if (reader.read(dataBuffer) == -1) {
isEof = true;
}
dataBuffer.flip();
decoder.decode(dataBuffer, charBuffer, isEof);
} else if (result.isError()) {
throw new IOException(result.toString()+" when converting from "+decoder.charset());
}
if (isEof) {
result = decoder.flush(charBuffer);
if (result.isError()) {
throw new IOException(result.toString()+" when converting from "+decoder.charset());
}
}
charBuffer.flip();
return charBuffer.hasRemaining() ? charBuffer.get() : -1;
}
/**
* An operation that does ...
*
*@param record Description of Parameter
*@return Next DataRecord (parsed from input data) or null if
* no more records available
*@exception IOException Description of Exception
*@since March 27, 2002
*/
private DataRecord parseNext(DataRecord record) throws JetelException {
int result;
int fieldCounter = 0;
int character;
int totalCharCounter = 0;
int delimiterPosition;
long size = 0;
int charCounter;
boolean isWithinQuotes;
char quoteChar=' ';
// populate all data fields
while (fieldCounter < metadata.getNumFields()) {
// we clear our buffer
fieldStringBuffer.clear();
character = 0;
isWithinQuotes=false;
// read data till we reach delimiter, end of file or exceed buffer size
// exceeded buffer is indicated by BufferOverflowException
charCounter = 0;
delimiterPosition = 0;
try {
while ((character = readChar()) != -1) {
// causes problem when composed delimiter "\r\n" is used
// if(character=='\r') //fix for new line being \r\n
// continue;
totalCharCounter++;
// handle quoted strings
if (handleQuotedStrings && StringUtils.isQuoteChar((char)character)){
if (!isWithinQuotes){
if (charCounter==0){
quoteChar=(char)character;
isWithinQuotes=true;
}
}else if (quoteChar==(char)character){
isWithinQuotes=false;
}
}
if ((result = is_delimiter((char) character, fieldCounter, delimiterPosition,isWithinQuotes)) == 1) {
/*
* DELIMITER
*/
break;
} else if (result == 0) {
/*
* NOT A DELIMITER
*/
if (delimiterPosition > 0) {
fieldStringBuffer.put(delimiterCandidateBuffer,0,delimiterPosition);
} else {
try{
fieldStringBuffer.put((char) character);
}catch(BufferOverflowException ex){
throw new IOException("Field too long or can not find delimiter ["+String.valueOf(delimiters[fieldCounter])+"]");
}
}
delimiterPosition = 0;
} else {
/*
* CAN'T DECIDE DELIMITER
*/
delimiterCandidateBuffer[delimiterPosition]=((char) character);
delimiterPosition++;
}
charCounter++;
}
if ((character == -1) && (totalCharCounter > 1)) {
//- incomplete record - do something
throw new RuntimeException("Incomplete record");
}
} catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException(getErrorMessage(ex.getClass().getName()+":"+ex.getMessage(),null,
recordCounter, fieldCounter),ex);
}
// did we have EOF situation ?
if (character == -1) {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
throw new JetelException(e.getMessage());
}
return null;
}
// set field's value
// are we skipping this row/field ?
if (!skipRows){
fieldStringBuffer.flip();
populateField(record, fieldCounter, fieldStringBuffer);
}
fieldCounter++;
}
recordCounter++;
return record;
}
/**
* Description of the Method
*
*@param record Description of Parameter
*@param fieldNum Description of Parameter
*@param data Description of Parameter
*@since March 28, 2002
*/
private void populateField(DataRecord record, int fieldNum, CharBuffer data) {
String strData = buffer2String(data, fieldNum,handleQuotedStrings);
try {
record.getField(fieldNum).fromString(strData);
} catch (BadDataFormatException bdfe) {
if(exceptionHandler != null ) { //use handler only if configured
exceptionHandler.populateHandler(getErrorMessage(bdfe.getMessage(),data,recordCounter, fieldNum), record, -1, fieldNum, strData, bdfe);
} else {
bdfe.setRecordNumber(recordCounter);
bdfe.setFieldNumber(fieldNum);
bdfe.setOffendingValue(strData);
throw bdfe;
}
} catch (Exception ex) {
throw new RuntimeException(getErrorMessage(ex.getMessage(),null,recordCounter, fieldNum),ex);
}
}
/**
* Transfers CharBuffer into string and handles quoting of strings (removes quotes)
*
*@param buffer Character buffer to work on
*@param removeQuotes true/false remove quotation characters
*@return String with quotes removed if specified
*/
private String buffer2String(CharBuffer buffer,int fieldNum, boolean removeQuotes) {
if (removeQuotes && buffer.hasRemaining() &&
metadata.getField(fieldNum).getType()== DataFieldMetadata.STRING_FIELD) {
/* if first & last characters are quotes (and quoted is at least one character, remove quotes */
if (StringUtils.isQuoteChar(buffer.charAt(0))) {
if (StringUtils.isQuoteChar(buffer.charAt(buffer.limit()-1))) {
if (buffer.remaining()>2){
return buffer.subSequence(1, buffer.limit() - 1).toString();
}else{
return ""; //empty string after quotes removed
}
}
}
}
return buffer.toString();
}
/**
* Decides whether delimiter was encountered
*
*@param character character to compare with delimiter
*@param fieldCounter delimiter for which field
*@param delimiterPosition current position within delimiter string
*@return 1 if delimiter matched; -1 if can't decide yet; 0 if not part of delimiter
*/
private int is_delimiter(char character, int fieldCounter, int delimiterPosition, boolean isWithinQuotes) {
if (isWithinQuotes){
return 0;
}
if (character == delimiters[fieldCounter][delimiterPosition]) {
if (delimiterPosition == delimiters[fieldCounter].length - 1) {
return 1;
// whole delimiter matched
} else {
return -1;
// can't decide
}
} else {
return 0;
// not a match
}
}
/**
* Returns charset name of this parser
* @return Returns name of the charset used to construct or null if none was specified
*/
public String getCharsetName() {
return(this.charSet);
}
/**
* Returns data policy type for this parser
* @return Data policy type or null if none was specified
*/
public PolicyType getPolicyType() {
if (this.exceptionHandler != null) {
return this.exceptionHandler.getType();
} else {
return null;
}
}
/**
* @return Returns the skipRows.
*/
public boolean isSkipRows() {
return skipRows;
}
/**
* @param skipRows The skipRows to set.
*/
public void setSkipRows(boolean skipRows) {
this.skipRows = skipRows;
}
public void setExceptionHandler(IParserExceptionHandler handler) {
this.exceptionHandler = handler;
}
public IParserExceptionHandler getExceptionHandler() {
return exceptionHandler;
}
public int skip(int nRec) {
throw new UnsupportedOperationException("Not yet implemented");
// return 0;
}
}
/*
* end class DelimitedDataParser
*/
|
package com.mifos.services;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.util.Log;
import com.mifos.objects.SearchedEntity;
import com.mifos.objects.User;
import com.mifos.objects.accounts.ClientAccounts;
import com.mifos.objects.accounts.loan.LoanRepaymentRequest;
import com.mifos.objects.accounts.loan.LoanRepaymentResponse;
import com.mifos.objects.accounts.savings.SavingsAccountWithAssociations;
import com.mifos.objects.client.Client;
import com.mifos.objects.client.Page;
import com.mifos.objects.db.CollectionSheet;
import com.mifos.objects.templates.loans.LoanRepaymentTemplate;
import com.mifos.services.data.Payload;
import com.mifos.services.data.CollectionSheetPayload;
import com.mifos.services.data.SaveResponse;
import com.mifos.utils.Constants;
import retrofit.*;
import retrofit.client.Response;
import retrofit.http.*;
import retrofit.mime.TypedByteArray;
import java.util.Iterator;
import java.util.List;
public class API {
//This instance has more Data for Testing
public static String mInstanceUrl = "https://developer.openmf.org/mifosng-provider/api/v1";
public static final String ACCEPT_JSON = "Accept: application/json";
public static final String CONTENT_TYPE_JSON = "Content-Type: application/json";
static RestAdapter sRestAdapter;
public static CenterService centerService;
public static ClientAccountsService clientAccountsService;
public static ClientService clientService;
public static LoanService loanService;
public static SavingsAccountService savingsAccountService;
public static SearchService searchService;
public static UserAuthService userAuthService;
static {
init();
}
private static synchronized void init() {
sRestAdapter = createRestAdapter(getInstanceUrl());
centerService = sRestAdapter.create(CenterService.class);
clientAccountsService = sRestAdapter.create(ClientAccountsService.class);
clientService = sRestAdapter.create(ClientService.class);
loanService = sRestAdapter.create(LoanService.class);
savingsAccountService = sRestAdapter.create(SavingsAccountService.class);
searchService = sRestAdapter.create(SearchService.class);
userAuthService = sRestAdapter.create(UserAuthService.class);
}
private static RestAdapter createRestAdapter(final String url) {
RestAdapter restAdapter = new RestAdapter.Builder().setEndpoint(url)
.setRequestInterceptor(new RequestInterceptor() {
@Override
public void intercept(RequestFacade request) {
if (url.contains("developer")) {
request.addHeader("X-Mifos-Platform-TenantId", "developer");
} else {
request.addHeader("X-Mifos-Platform-TenantId", "default");
}
// request.addHeader("Authorization", "Basic VXNlcjE6dGVjaDRtZg==");
SharedPreferences pref = PreferenceManager
.getDefaultSharedPreferences(Constants.applicationContext);
String authToken = pref.getString(User.AUTHENTICATION_KEY, "NA");
if (authToken != null && !"NA".equals(authToken)) {
request.addHeader("Authorization", authToken);
}
}
})
.setErrorHandler(new MifosRestErrorHandler())
.build();
// TODO: This logging is sometimes excessive, e.g. for client image requests.
restAdapter.setLogLevel(RestAdapter.LogLevel.FULL);
return restAdapter;
}
static class MifosRestErrorHandler implements ErrorHandler {
@Override
public Throwable handleError(RetrofitError retrofitError) {
Response r = retrofitError.getResponse();
if (r != null && r.getStatus() == 401) {
Log.e("Status", "Authentication Error.");
}else if(r.getStatus() == 400){
Log.d("Status","Bad Request - Invalid Parameter or Data Integrity Issue.");
Log.d("URL", r.getUrl());
List<retrofit.client.Header> headersList = r.getHeaders();
Iterator<retrofit.client.Header> iterator = headersList.iterator();
while(iterator.hasNext())
{ retrofit.client.Header header = iterator.next();
Log.d("Header ",header.toString());
}
}
return retrofitError;
}
}
public interface CenterService {
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@GET("/centers")
public void getAllCenters(Callback<List<com.mifos.objects.Center>> callback);
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@POST("/centers/2026?command=generateCollectionSheet")
public void getCenter(@Body Payload payload, Callback<CollectionSheet> callback);
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@POST("/centers/2026?command=saveCollectionSheet")
public SaveResponse saveCollectionSheet(@Body CollectionSheetPayload collectionSheetPayload);
}
public interface ClientAccountsService {
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@GET("/clients/{clientId}/accounts")
public void getAllAccountsOfClient(@Path("clientId") int clientId, Callback<ClientAccounts> callback);
}
public interface ClientService {
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@GET("/clients")
public void listAllClients(Callback<Page<Client>> callback);
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@GET("/clients/{clientId}")
public void getClient(@Path("clientId") int clientId, Callback<Client> callback);
@Headers({"Accept: application/octet-stream", CONTENT_TYPE_JSON})
@GET("/clients/{clientId}/images")
public void getClientImage(@Path("clientId") int clientId, Callback<TypedByteArray> callback);
}
public interface SearchService {
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@GET("/search?resource=clients")
public void searchClientsByName(@Query("query") String clientName, Callback<List<SearchedEntity>> callback);
}
public interface LoanService {
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@GET("/loans/{loanId}")
public void getLoanById(@Path("loanId") int loanId, Callback<com.mifos.objects.accounts.loan.Loan> callback);
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@GET("/loans/{loanId}/transactions/template?command=repayment")
public void getLoanRepaymentTemplate(@Path("loanId") int loanId, Callback<LoanRepaymentTemplate> callback);
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@POST("/loans/{loanId}/transactions?command=repayment")
public void submitPayment(@Path("loanId") int loanId,
@Body LoanRepaymentRequest loanRepaymentRequest,
Callback<LoanRepaymentResponse> loanRepaymentResponseCallback);
}
public interface SavingsAccountService {
/**
*
* @param savingsAccountId - savingsAccountId for which information is requested
* @param association - Mention Type of Association Needed, Like :- all, transactions etc.
* @param savingsAccountWithAssociationsCallback - callback to receive the response
*/
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@GET("/savingsaccounts/{savingsAccountId}")
public void getSavingsAccountWithAssociations(@Path("savingsAccountId") int savingsAccountId,
@Query("associations") String association,
Callback<SavingsAccountWithAssociations> savingsAccountWithAssociationsCallback);
}
public interface UserAuthService {
@Headers({ACCEPT_JSON, CONTENT_TYPE_JSON})
@POST("/authentication")
public void authenticate(@Query("username") String username, @Query("password") String password, Callback<User> userCallback);
}
public static <T> Callback<T> getCallback(T t) {
Callback<T> cb = new Callback<T>() {
@Override
public void success(T o, Response response) {
System.out.println("Object " + o);
}
@Override
public void failure(RetrofitError retrofitError) {
System.out.println("Error: " + retrofitError);
}
};
return cb;
}
public static <T> Callback<List<T>> getCallbackList(List<T> t) {
Callback<List<T>> cb = new Callback<List<T>>() {
@Override
public void success(List<T> o, Response response) {
System.out.println("Object " + o);
}
@Override
public void failure(RetrofitError retrofitError) {
System.out.println("Error: " + retrofitError);
}
};
return cb;
}
public static synchronized void setInstanceUrl(String url) {
mInstanceUrl = url;
init();
}
public static synchronized String getInstanceUrl() {
return mInstanceUrl;
}
}
|
package com.cradle.iitc_mobile.async;
import android.os.AsyncTask;
import android.util.Log;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
public class DownloadTile extends AsyncTask<String, Void, Boolean> {
private String mFilePath;
private String mFileName;
public DownloadTile(String path, String fileName) {
mFilePath = path;
mFileName = fileName;
}
@Override
protected Boolean doInBackground(String... urls) {
URL tileUrl = null;
URLConnection conn = null;
try {
tileUrl = new URL(urls[0]);
conn = tileUrl.openConnection();
File file = new File(mFilePath, mFileName);
// update tile if needed, else return
if (conn.getLastModified() < file.lastModified()) return true;
InputStream is = null;
is = conn.getInputStream();
Log.d("iitcm", "writing to file: " + file.toString());
File output = writeTileToFile(is, file, mFilePath);
} catch (IOException e) {
e.printStackTrace();
return false;
} catch (Exception e) {
e.printStackTrace();
return false;
}
return true;
}
private File writeTileToFile(InputStream inStream, File file, String path) throws Exception {
File filePath = new File(path);
filePath.mkdirs();
FileOutputStream outStream = new FileOutputStream(file);
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
int len = 0;
while ((len = inStream.read(buffer)) != -1) {
outStream.write(buffer, 0, len);
}
if(outStream!=null) outStream.close();
return file;
}
}
|
package io.quarkus.runtime;
import java.time.Duration;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import org.jboss.logging.Logger;
import org.jboss.threads.EnhancedQueueExecutor;
import org.jboss.threads.JBossExecutors;
import org.jboss.threads.JBossThreadFactory;
import org.wildfly.common.cpu.ProcessorInfo;
import io.quarkus.runtime.annotations.Recorder;
@Recorder
public class ExecutorRecorder {
private static final Logger log = Logger.getLogger("io.quarkus.thread-pool");
public ExecutorRecorder() {
}
/**
* In dev mode for now we need the executor to last for the life of the app, as it is used by Undertow. This will likely
* change
*/
static volatile CleanableExecutor devModeExecutor;
private static volatile Executor current;
public ExecutorService setupRunTime(ShutdownContext shutdownContext, ThreadPoolConfig threadPoolConfig,
LaunchMode launchMode) {
if (devModeExecutor != null) {
current = devModeExecutor;
return devModeExecutor;
}
final EnhancedQueueExecutor underlying = createExecutor(threadPoolConfig);
ExecutorService executor;
Runnable shutdownTask = createShutdownTask(threadPoolConfig, underlying);
if (launchMode == LaunchMode.DEVELOPMENT) {
devModeExecutor = new CleanableExecutor(underlying);
shutdownContext.addShutdownTask(new Runnable() {
@Override
public void run() {
devModeExecutor.clean();
}
});
executor = devModeExecutor;
Runtime.getRuntime().addShutdownHook(new Thread(shutdownTask, "Executor shutdown thread"));
} else {
shutdownContext.addLastShutdownTask(shutdownTask);
executor = underlying;
}
if (threadPoolConfig.prefill) {
underlying.prestartAllCoreThreads();
}
current = executor;
return executor;
}
public static ExecutorService createDevModeExecutorForFailedStart(ThreadPoolConfig config) {
EnhancedQueueExecutor underlying = createExecutor(config);
Runnable task = createShutdownTask(config, underlying);
devModeExecutor = new CleanableExecutor(underlying);
Runtime.getRuntime().addShutdownHook(new Thread(task, "Executor shutdown thread"));
current = devModeExecutor;
return devModeExecutor;
}
static void shutdownDevMode() {
if (devModeExecutor != null) {
devModeExecutor.shutdown();
}
}
private static Runnable createShutdownTask(ThreadPoolConfig threadPoolConfig, EnhancedQueueExecutor executor) {
return new Runnable() {
@Override
public void run() {
executor.shutdown();
final Duration shutdownTimeout = threadPoolConfig.shutdownTimeout;
final Optional<Duration> optionalInterval = threadPoolConfig.shutdownCheckInterval;
long remaining = shutdownTimeout.toNanos();
final long interval = optionalInterval.orElse(Duration.ofNanos(Long.MAX_VALUE)).toNanos();
long intervalRemaining = interval;
long interruptRemaining = threadPoolConfig.shutdownInterrupt.toNanos();
long start = System.nanoTime();
for (;;)
try {
if (!executor.awaitTermination(Math.min(remaining, intervalRemaining), TimeUnit.MILLISECONDS)) {
long elapsed = System.nanoTime() - start;
intervalRemaining -= elapsed;
remaining -= elapsed;
interruptRemaining -= elapsed;
if (interruptRemaining <= 0) {
executor.shutdown(true);
}
if (remaining <= 0) {
// done waiting
final List<Runnable> runnables = executor.shutdownNow();
if (!runnables.isEmpty()) {
log.warnf("Thread pool shutdown failed: discarding %d tasks, %d threads still running",
runnables.size(), executor.getActiveCount());
} else {
log.warnf("Thread pool shutdown failed: %d threads still running",
executor.getActiveCount());
}
break;
}
if (intervalRemaining <= 0) {
intervalRemaining = interval;
// do some probing
final int queueSize = executor.getQueueSize();
final Thread[] runningThreads = executor.getRunningThreads();
log.infof("Awaiting thread pool shutdown; %d thread(s) running with %d task(s) waiting",
runningThreads.length, queueSize);
// make sure no threads are stuck in {@code exit()}
int realWaiting = runningThreads.length;
for (Thread thr : runningThreads) {
final StackTraceElement[] stackTrace = thr.getStackTrace();
for (int i = 0; i < stackTrace.length && i < 8; i++) {
if (stackTrace[i].getClassName().equals("java.lang.System")
&& stackTrace[i].getMethodName().equals("exit")) {
final Throwable t = new Throwable();
t.setStackTrace(stackTrace);
log.errorf(t, "Thread %s is blocked in System.exit(); pooled (Executor) threads "
+ "should never call this method because it never returns, thus preventing "
+ "the thread pool from shutting down in a timely manner. This is the "
+ "stack trace of the call", thr.getName());
// don't bother waiting for exit() to return
realWaiting
break;
}
}
}
if (realWaiting == 0 && queueSize == 0) {
// just exit
executor.shutdownNow();
break;
}
}
}
return;
} catch (InterruptedException ignored) {
}
}
};
}
private static EnhancedQueueExecutor createExecutor(ThreadPoolConfig threadPoolConfig) {
final JBossThreadFactory threadFactory = new JBossThreadFactory(new ThreadGroup("executor"), Boolean.TRUE, null,
"executor-thread-%t", JBossExecutors.loggingExceptionHandler("org.jboss.executor.uncaught"), null);
final EnhancedQueueExecutor.Builder builder = new EnhancedQueueExecutor.Builder()
.setRegisterMBean(false)
.setHandoffExecutor(JBossExecutors.rejectingExecutor())
.setThreadFactory(JBossExecutors.resettingThreadFactory(threadFactory));
final int cpus = ProcessorInfo.availableProcessors();
// run time config variables
builder.setCorePoolSize(threadPoolConfig.coreThreads);
builder.setMaximumPoolSize(threadPoolConfig.maxThreads.orElse(Math.max(8 * cpus, 200)));
if (threadPoolConfig.queueSize.isPresent()) {
if (threadPoolConfig.queueSize.getAsInt() < 0) {
builder.setMaximumQueueSize(Integer.MAX_VALUE);
} else {
builder.setMaximumQueueSize(threadPoolConfig.queueSize.getAsInt());
}
}
builder.setGrowthResistance(threadPoolConfig.growthResistance);
builder.setKeepAliveTime(threadPoolConfig.keepAliveTime);
return builder.build();
}
public static Executor getCurrent() {
return current;
}
}
|
package org.jpos.ee.status;
import java.net.ConnectException;
import java.net.Socket;
import java.io.IOException;
import org.jpos.core.Configurable;
import org.jpos.core.Configuration;
import org.jpos.core.ConfigurationException;
import org.jpos.util.Log;
public class Ping extends Log implements MonitorTask, Configurable {
String host;
int port = 7;
// code taken from Alireza's blog
public String checkService () {
boolean rc = false;
String detail = "";
long start = System.currentTimeMillis();
try {
Socket socket = new Socket(host, port);
socket.setSoLinger (true, 0);
socket.close();
rc = true;
} catch (ConnectException e) {
rc = true;
} catch (IOException e) {
String msg = e.getMessage().toUpperCase();
rc = false;
detail = " " + msg;
}
long elapsed = System.currentTimeMillis() - start;
return (rc ? Status.OK : Status.WARN) + detail
+ " time=" + elapsed + "ms";
}
public void setConfiguration (Configuration cfg)
throws ConfigurationException
{
host = cfg.get ("host", null);
if (host == null)
throw new ConfigurationException (
"host property has not been specified"
);
port = cfg.getInt ("port", 7);
}
}
|
package ualberta.g12.adventurecreator;
import android.graphics.drawable.Drawable;
import java.util.LinkedList;
import java.util.List;
public class Fragment {
private String title;
private String bodyText;
private List<Drawable> illustrations;
//private LinkedList<Sound> sounds;
//private LinkedList<Video> videos;
private List<Choice> choices;
private List<String> displayOrder; //Contains one character representations of each type to display
//True if at least one page references it, can be used a flag for isolated pages
//will have to be controlled from the story object
private boolean isLinkedTo;
//private Annotation annotations;
public Fragment() {
this.title = "Choose a Title";
this.bodyText = "Story body here.";
this.choices = new LinkedList<Choice>();
this.choices.add(new Choice()); //Add one default choice to start
//true because it is only possible to create a new page from a parent page
//or if it is the first page in a story
isLinkedTo = true;
}
public String getTitle() {
return this.title;
}
public void setTitle(String newTitle) {
this.title = newTitle;
}
public String getBodyText() {
return bodyText;
}
public void setBodyText(String bodyText) {
this.bodyText = bodyText;
}
public List<Drawable> getIllustrations() {
return illustrations;
}
public void setIllustrations(LinkedList<Drawable> illustrations) {
this.illustrations = illustrations;
}
// public LinkedList<Sound> getSounds() {
// return sounds;
// public void setSounds(LinkedList<Sound> sounds) {
// this.sounds = sounds;
// public LinkedList<Video> getVideos() {
// return videos;
// public void setVideos(LinkedList<Video> videos) {
// this.videos = videos;
public List<Choice> getChoices() {
return choices;
}
public void setChoices(LinkedList<Choice> newChoices) {
this.choices = newChoices;
}
public void removeChoice(Choice oldChoice) {
this.choices.remove(oldChoice);
}
public List<String> getDisplayOrder() {
return displayOrder;
}
public void setDisplayOrder(LinkedList<String> displayOrder) {
this.displayOrder = displayOrder;
}
public boolean isLinkedTo() {
return isLinkedTo;
}
public void setLinkedTo(boolean isLinkedTo) {
this.isLinkedTo = isLinkedTo;
}
// public Annotation getAnnotations() {
// return annotations;
// public void setAnnotations(Annotation annotations) {
// this.annotations = annotations;
}
|
// modification, are permitted provided that the following conditions are met:
// documentation and/or other materials provided with the distribution.
// * Neither the name of the <organization> nor the
// names of its contributors may be used to endorse or promote products
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL DAVID J. PEARCE BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package wyil.checks;
import java.util.*;
import wybs.lang.Builder;
import wycc.lang.Transform;
import wycc.util.Pair;
import wyfs.lang.Path;
import wyil.util.*;
import wyil.util.dfa.*;
import wyil.lang.*;
import static wycc.lang.SyntaxError.*;
import static wyil.lang.Code.Block.*;
import static wyil.util.ErrorMessages.*;
/**
* <p>
* The purpose of this class is to check that all variables are defined before
* being used. For example:
* </p>
*
* <pre>
* int f() {
* int z;
* return z + 1;
* }
* </pre>
*
* <p>
* In the above example, variable z is used in the return statement before it
* has been defined any value. This is considered a syntax error in whiley.
* </p>
* @author David J. Pearce
*
*/
public class DefiniteAssignmentCheck extends
ForwardFlowAnalysis<HashSet<Integer>> implements Transform<WyilFile> {
public DefiniteAssignmentCheck(Builder builder) {
}
@Override
public HashSet<Integer> initialStore() {
HashSet<Integer> defined = new HashSet<Integer>();
int diff = 0;
for(int i=0;i!=method.type().params().size();++i) {
defined.add(i+diff);
}
return defined;
}
@Override
public HashSet<Integer> propagate(int idx, Entry entry, HashSet<Integer> in) {
Code code = entry.code;
checkUses(code,entry,in);
int def = defs(code,entry);
if(def >= 0) {
in = new HashSet<Integer>(in);
in.add(def);
}
return in;
}
@Override
public Pair<HashSet<Integer>, HashSet<Integer>> propagate(int index,
Codes.If igoto, Entry entry, HashSet<Integer> in) {
if (!in.contains(igoto.leftOperand) || !in.contains(igoto.rightOperand)) {
syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED), filename,
entry);
}
return new Pair(in, in);
}
@Override
public Pair<HashSet<Integer>, HashSet<Integer>> propagate(int index,
Codes.IfIs iftype, Entry entry, HashSet<Integer> in) {
if (!in.contains(iftype.operand)) {
syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED), filename,
entry);
}
return new Pair(in,in);
}
@Override
public List<HashSet<Integer>> propagate(int index, Codes.Switch sw,
Entry entry, HashSet<Integer> in) {
if (!in.contains(sw.operand)) {
syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED), filename,
entry);
}
ArrayList<HashSet<Integer>> stores = new ArrayList();
for (int i = 0; i != sw.branches.size(); ++i) {
stores.add(in);
}
return stores;
}
@Override
public HashSet<Integer> propagate(Type handler, Codes.TryCatch tc, HashSet<Integer> in) {
in = new HashSet<Integer>(in);
in.add(tc.operand);
return in;
}
@Override
public HashSet<Integer> propagate(int start, int end, Codes.Loop loop,
Entry entry, HashSet<Integer> in, List<Codes.TryCatch> handlers) {
if (loop instanceof Codes.ForAll) {
Codes.ForAll fall = (Codes.ForAll) loop;
if (!in.contains(fall.sourceOperand)) {
syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED),
filename, entry);
}
in = new HashSet<Integer>(in);
in.add(fall.indexOperand);
}
HashSet<Integer> r = propagate(start + 1, end, in, handlers);
return join(in, r);
}
protected HashSet<Integer> join(HashSet<Integer> s1, HashSet<Integer> s2) {
HashSet<Integer> r = new HashSet<Integer>();
// set intersection
for (Integer s : s1) {
if (s2.contains(s)) {
r.add(s);
}
}
return r;
}
public void checkUses(Code code, Entry entry, HashSet<Integer> in) {
if(code instanceof Code.AbstractUnaryOp) {
Code.AbstractUnaryOp a = (Code.AbstractUnaryOp) code;
if(a.operand == Codes.NULL_REG || in.contains(a.operand)) {
return;
}
} else if(code instanceof Code.AbstractBinaryOp) {
Code.AbstractBinaryOp a = (Code.AbstractBinaryOp) code;
if (in.contains(a.leftOperand) && in.contains(a.rightOperand)) {
return;
}
} else if(code instanceof Code.AbstractNaryAssignable) {
Code.AbstractNaryAssignable a = (Code.AbstractNaryAssignable) code;
for(int operand : a.operands()) {
if(operand != Codes.NULL_REG && !in.contains(operand)) {
syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED),
filename, entry);
}
}
if(code instanceof Codes.Update && !in.contains(a.target())) {
// In this case, we are assigning to an index or field.
// Therefore, the target register must already be defined.
syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED),
filename, entry);
}
return;
} else {
// includes abstract-assignables and branching bytecodes
return;
}
syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED),
filename, entry);
}
public int defs(Code code, Entry entry) {
if (code instanceof Code.AbstractAssignable) {
Code.AbstractAssignable aa = (Code.AbstractAssignable) code;
return aa.target();
}
return Codes.NULL_REG;
}
}
|
package de.danoeh.antennapod.core.util;
import android.content.ContentResolver;
import android.content.Context;
import android.net.Uri;
import androidx.annotation.NonNull;
import android.util.Log;
import org.apache.commons.io.IOUtils;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import de.danoeh.antennapod.core.feed.Chapter;
import de.danoeh.antennapod.core.util.comparator.ChapterStartTimeComparator;
import de.danoeh.antennapod.core.util.id3reader.ChapterReader;
import de.danoeh.antennapod.core.util.id3reader.ID3ReaderException;
import de.danoeh.antennapod.core.util.playback.Playable;
import de.danoeh.antennapod.core.util.vorbiscommentreader.VorbisCommentChapterReader;
import de.danoeh.antennapod.core.util.vorbiscommentreader.VorbisCommentReaderException;
import org.apache.commons.io.input.CountingInputStream;
/**
* Utility class for getting chapter data from media files.
*/
public class ChapterUtils {
private static final String TAG = "ChapterUtils";
private ChapterUtils() {
}
public static int getCurrentChapterIndex(Playable media, int position) {
if (media == null || media.getChapters() == null || media.getChapters().size() == 0) {
return -1;
}
List<Chapter> chapters = media.getChapters();
for (int i = 0; i < chapters.size(); i++) {
if (chapters.get(i).getStart() > position) {
return i - 1;
}
}
return chapters.size() - 1;
}
public static void loadChaptersFromStreamUrl(Playable media, Context context) {
ChapterUtils.readID3ChaptersFromPlayableStreamUrl(media, context);
if (media.getChapters() == null) {
ChapterUtils.readOggChaptersFromPlayableStreamUrl(media, context);
}
}
public static void loadChaptersFromFileUrl(Playable media) {
if (!media.localFileAvailable()) {
Log.e(TAG, "Could not load chapters from file url: local file not available");
return;
}
ChapterUtils.readID3ChaptersFromPlayableFileUrl(media);
if (media.getChapters() == null) {
ChapterUtils.readOggChaptersFromPlayableFileUrl(media);
}
}
/**
* Uses the download URL of a media object of a feeditem to read its ID3
* chapters.
*/
private static void readID3ChaptersFromPlayableStreamUrl(Playable p, Context context) {
if (p == null || p.getStreamUrl() == null) {
Log.e(TAG, "Unable to read ID3 chapters: media or download URL was null");
return;
}
Log.d(TAG, "Reading id3 chapters from item " + p.getEpisodeTitle());
CountingInputStream in = null;
try {
if (p.getStreamUrl().startsWith(ContentResolver.SCHEME_CONTENT)) {
Uri uri = Uri.parse(p.getStreamUrl());
in = new CountingInputStream(context.getContentResolver().openInputStream(uri));
} else {
URL url = new URL(p.getStreamUrl());
in = new CountingInputStream(url.openStream());
}
List<Chapter> chapters = readChaptersFrom(in);
if (!chapters.isEmpty()) {
p.setChapters(chapters);
}
Log.i(TAG, "Chapters loaded");
} catch (IOException | ID3ReaderException | IllegalArgumentException e) {
Log.e(TAG, Log.getStackTraceString(e));
} finally {
IOUtils.closeQuietly(in);
}
}
/**
* Uses the file URL of a media object of a feeditem to read its ID3
* chapters.
*/
private static void readID3ChaptersFromPlayableFileUrl(Playable p) {
if (p == null || !p.localFileAvailable() || p.getLocalMediaUrl() == null) {
return;
}
Log.d(TAG, "Reading id3 chapters from item " + p.getEpisodeTitle());
File source = new File(p.getLocalMediaUrl());
if (!source.exists()) {
Log.e(TAG, "Unable to read id3 chapters: Source doesn't exist");
return;
}
CountingInputStream in = null;
try {
in = new CountingInputStream(new BufferedInputStream(new FileInputStream(source)));
List<Chapter> chapters = readChaptersFrom(in);
if (!chapters.isEmpty()) {
p.setChapters(chapters);
}
Log.i(TAG, "Chapters loaded");
} catch (IOException | ID3ReaderException e) {
Log.e(TAG, Log.getStackTraceString(e));
} finally {
IOUtils.closeQuietly(in);
}
}
@NonNull
private static List<Chapter> readChaptersFrom(CountingInputStream in) throws IOException, ID3ReaderException {
ChapterReader reader = new ChapterReader();
reader.readInputStream(in);
List<Chapter> chapters = reader.getChapters();
if (chapters == null) {
Log.i(TAG, "ChapterReader could not find any ID3 chapters");
return Collections.emptyList();
}
Collections.sort(chapters, new ChapterStartTimeComparator());
enumerateEmptyChapterTitles(chapters);
if (!chaptersValid(chapters)) {
Log.e(TAG, "Chapter data was invalid");
return Collections.emptyList();
}
return chapters;
}
private static void readOggChaptersFromPlayableStreamUrl(Playable media, Context context) {
if (media == null || !media.streamAvailable()) {
return;
}
InputStream input = null;
try {
if (media.getStreamUrl().startsWith(ContentResolver.SCHEME_CONTENT)) {
Uri uri = Uri.parse(media.getStreamUrl());
input = context.getContentResolver().openInputStream(uri);
} else {
URL url = new URL(media.getStreamUrl());
input = url.openStream();
}
if (input != null) {
readOggChaptersFromInputStream(media, input);
}
} catch (IOException | IllegalArgumentException e) {
Log.e(TAG, Log.getStackTraceString(e));
} finally {
IOUtils.closeQuietly(input);
}
}
private static void readOggChaptersFromPlayableFileUrl(Playable media) {
if (media == null || media.getLocalMediaUrl() == null) {
return;
}
File source = new File(media.getLocalMediaUrl());
if (source.exists()) {
InputStream input = null;
try {
input = new BufferedInputStream(new FileInputStream(source));
readOggChaptersFromInputStream(media, input);
} catch (FileNotFoundException e) {
Log.e(TAG, Log.getStackTraceString(e));
} finally {
IOUtils.closeQuietly(input);
}
}
}
private static void readOggChaptersFromInputStream(Playable p, InputStream input) {
Log.d(TAG, "Trying to read chapters from item with title " + p.getEpisodeTitle());
try {
VorbisCommentChapterReader reader = new VorbisCommentChapterReader();
reader.readInputStream(input);
List<Chapter> chapters = reader.getChapters();
if (chapters == null) {
Log.i(TAG, "ChapterReader could not find any Ogg vorbis chapters");
return;
}
Collections.sort(chapters, new ChapterStartTimeComparator());
enumerateEmptyChapterTitles(chapters);
if (chaptersValid(chapters)) {
p.setChapters(chapters);
Log.i(TAG, "Chapters loaded");
} else {
Log.e(TAG, "Chapter data was invalid");
}
} catch (VorbisCommentReaderException e) {
e.printStackTrace();
}
}
/**
* Makes sure that chapter does a title and an item attribute.
*/
private static void enumerateEmptyChapterTitles(List<Chapter> chapters) {
for (int i = 0; i < chapters.size(); i++) {
Chapter c = chapters.get(i);
if (c.getTitle() == null) {
c.setTitle(Integer.toString(i));
}
}
}
private static boolean chaptersValid(List<Chapter> chapters) {
if (chapters.isEmpty()) {
return false;
}
for (Chapter c : chapters) {
if (c.getStart() < 0) {
return false;
}
}
return true;
}
}
|
package lucee.commons.io.res.type.file;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.CopyOption;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
import lucee.commons.cli.Command;
import lucee.commons.io.IOUtil;
import lucee.commons.io.ModeUtil;
import lucee.commons.io.SystemUtil;
import lucee.commons.io.res.ContentType;
import lucee.commons.io.res.Resource;
import lucee.commons.io.res.ResourceProvider;
import lucee.commons.io.res.filter.ResourceFilter;
import lucee.commons.io.res.filter.ResourceNameFilter;
import lucee.commons.io.res.util.ResourceOutputStream;
import lucee.commons.io.res.util.ResourceUtil;
import lucee.commons.lang.ExceptionUtil;
/**
* Implementation og Resource for the local filesystem (java.io.File)
*/
public final class FileResource extends File implements Resource {
private static final long serialVersionUID = -6856656594615376447L;
private static final CopyOption[] COPY_OPTIONS = new CopyOption[] { StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.COPY_ATTRIBUTES };
private final FileResourceProvider provider;
/**
* Constructor for the factory
*
* @param pathname
*/
FileResource(FileResourceProvider provider, String pathname) {
super(pathname);
this.provider = provider;
}
/**
* Inner Constr constructor to create parent/child
*
* @param parent
* @param child
*/
private FileResource(FileResourceProvider provider, File parent, String child) {
super(parent, child);
this.provider = provider;
}
@Override
public void copyFrom(Resource res, boolean append) throws IOException {
if (res instanceof File && (!append || !this.isFile())) {
Files.copy(((File) res).toPath(), this.toPath(), COPY_OPTIONS);
return;
}
IOUtil.copy(res, this.getOutputStream(append), true);
// executable?
boolean e = res instanceof File && ((File) res).canExecute();
boolean w = res.canWrite();
boolean r = res.canRead();
if (e) this.setExecutable(true);
if (w != this.canWrite()) this.setWritable(w);
if (r != this.canRead()) this.setReadable(r);
}
@Override
public void copyTo(Resource res, boolean append) throws IOException {
if (res instanceof File && (!append || !res.isFile())) {
Files.copy(this.toPath(), ((File) res).toPath(), COPY_OPTIONS);
return;
}
IOUtil.copy(this, res.getOutputStream(append), true);
boolean e = canExecute();
boolean w = canWrite();
boolean r = canRead();
if (e && res instanceof File) ((File) res).setExecutable(true);
if (w != res.canWrite()) res.setWritable(w);
if (r != res.canRead()) res.setReadable(r);
}
@Override
public Resource getAbsoluteResource() {
return new FileResource(provider, getAbsolutePath());
}
@Override
public Resource getCanonicalResource() throws IOException {
return new FileResource(provider, getCanonicalPath());
}
@Override
public Resource getParentResource() {
String p = getParent();
if (p == null) return null;
return new FileResource(provider, p);
}
@Override
public Resource[] listResources() {
String[] files = list();
if (files == null) return null;
Resource[] resources = new Resource[files.length];
for (int i = 0; i < files.length; i++) {
resources[i] = getRealResource(files[i]);
}
return resources;
}
@Override
public String[] list(ResourceFilter filter) {
String[] files = list();
if (files == null) return null;
List list = new ArrayList();
FileResource res;
for (int i = 0; i < files.length; i++) {
res = new FileResource(provider, this, files[i]);
if (filter.accept(res)) list.add(files[i]);
}
return (String[]) list.toArray(new String[list.size()]);
}
@Override
public Resource[] listResources(ResourceFilter filter) {
String[] files = list();
if (files == null) return null;
List list = new ArrayList();
Resource res;
for (int i = 0; i < files.length; i++) {
res = getRealResource(files[i]);
if (filter.accept(res)) list.add(res);
}
return (Resource[]) list.toArray(new FileResource[list.size()]);
}
@Override
public String[] list(ResourceNameFilter filter) {
String[] files = list();
if (files == null) return null;
List list = new ArrayList();
for (int i = 0; i < files.length; i++) {
if (filter.accept(this, files[i])) list.add(files[i]);
}
return (String[]) list.toArray(new String[list.size()]);
}
@Override
public Resource[] listResources(ResourceNameFilter filter) {
String[] files = list();
if (files == null) return null;
List list = new ArrayList();
for (int i = 0; i < files.length; i++) {
if (filter.accept(this, files[i])) list.add(getRealResource(files[i]));
}
return (Resource[]) list.toArray(new Resource[list.size()]);
}
@Override
public void moveTo(Resource dest) throws IOException {
if (this.equals(dest)) return;
boolean done = false;
if (dest instanceof File) {
provider.lock(this);
try {
if (dest.exists() && !dest.delete()) throw new IOException("Can't move file [" + this.getAbsolutePath() + "] cannot remove existing file [" + dest.getAbsolutePath() + "]");
done = super.renameTo((File) dest);
/*
* if(!super.renameTo((File)dest)) { throw new
* IOException("can't move file "+this.getAbsolutePath()+" to destination resource "+dest.
* getAbsolutePath()); }
*/
}
finally {
provider.unlock(this);
}
}
if (!done) {
ResourceUtil.checkMoveToOK(this, dest);
IOUtil.copy(getInputStream(), dest, true);
if (!this.delete()) {
throw new IOException("Can't delete resource [" + this.getAbsolutePath() + "]");
}
}
}
@Override
public InputStream getInputStream() throws IOException {
// provider.lock(this);
provider.read(this);
try {
// return new BufferedInputStream(new ResourceInputStream(this,new FileInputStream(this)));
return new BufferedInputStream(new FileInputStream(this));
}
catch (IOException ioe) {
// provider.unlock(this);
throw ioe;
}
}
@Override
public OutputStream getOutputStream() throws IOException {
return getOutputStream(false);
}
@Override
public OutputStream getOutputStream(boolean append) throws IOException {
provider.lock(this);
try {
if (!super.exists() && !super.createNewFile()) {
throw new IOException("Can't create file [" + this + "]");
}
return new BufferedOutputStream(new ResourceOutputStream(this, new FileOutputStream(this, append)));
}
catch (IOException ioe) {
provider.unlock(this);
throw ioe;
}
}
@Override
public void createFile(boolean createParentWhenNotExists) throws IOException {
provider.lock(this);
try {
if (createParentWhenNotExists) {
File p = super.getParentFile();
if (!p.exists()) p.mkdirs();
}
if (!super.createNewFile()) {
if (super.isFile()) throw new IOException("Can't create file [" + this + "], file already exists");
throw new IOException("Can't create file [" + this + "]");
}
}
finally {
provider.unlock(this);
}
}
@Override
public void remove(boolean alsoRemoveChildren) throws IOException {
if (alsoRemoveChildren && isDirectory()) {
Resource[] children = listResources();
for (int i = 0; i < children.length; i++) {
children[i].remove(alsoRemoveChildren);
}
}
provider.lock(this);
try {
if (!super.delete()) {
if (!super.exists()) throw new IOException("Can't delete file [" + this + "], file does not exist");
if (!super.canWrite()) throw new IOException("Can't delete file [" + this + "], no access");
throw new IOException("Can't delete file [" + this + "]");
}
}
finally {
provider.unlock(this);
}
}
@Override
public String getReal(String realpath) {
if (realpath.length() <= 2) {
if (realpath.length() == 0) return getPath();
if (realpath.equals(".")) return getPath();
if (realpath.equals("..")) return getParent();
}
return new FileResource(provider, this, realpath).getPath();
}
@Override
public Resource getRealResource(String realpath) {
if (realpath.length() <= 2) {
if (realpath.length() == 0) return this;
if (realpath.equals(".")) return this;
if (realpath.equals("..")) return getParentResource();
}
return new FileResource(provider, this, realpath);
}
public ContentType getContentType() {
return ResourceUtil.getContentType(this);
}
@Override
public void createDirectory(boolean createParentWhenNotExists) throws IOException {
provider.lock(this);
try {
if (createParentWhenNotExists ? !_mkdirs() : !super.mkdir()) {
if (super.isDirectory()) throw new IOException("Can't create directory [" + this + "], directory already exists");
throw new IOException("Can't create directory [" + this + "]");
}
}
finally {
provider.unlock(this);
}
}
@Override
public ResourceProvider getResourceProvider() {
return provider;
}
@Override
public boolean isReadable() {
return canRead();
}
@Override
public boolean isWriteable() {
return canWrite();
}
@Override
public boolean renameTo(Resource dest) {
try {
moveTo(dest);
return true;
}
catch (IOException e) {}
return false;
}
@Override
public boolean isArchive() {
return getAttribute(ATTRIBUTE_ARCHIVE);
}
@Override
public boolean isSystem() {
return getAttribute(ATTRIBUTE_SYSTEM);
}
@Override
public int getMode() {
if (!exists()) return 0;
if (SystemUtil.isUnix()) {
try {
// TODO geht nur fuer file
String line = Command.execute("ls -ld " + getPath(), false).getOutput();
line = line.trim();
line = line.substring(0, line.indexOf(' '));
// print.ln(getPath());
return ModeUtil.toOctalMode(line);
}
catch (Exception e) {}
}
int mode = SystemUtil.isWindows() && exists() ? 0111 : 0;
if (super.canRead()) mode += 0444;
if (super.canWrite()) mode += 0222;
return mode;
}
@Override
public void setMode(int mode) throws IOException {
// TODO unter windows mit setReadable usw.
if (!SystemUtil.isUnix()) return;
provider.lock(this);
try {
// print.ln(ModeUtil.toStringMode(mode));
if (Runtime.getRuntime().exec(new String[] { "chmod", ModeUtil.toStringMode(mode), getPath() }).waitFor() != 0)
throw new IOException("chmod [" + ModeUtil.toStringMode(mode) + "] [" + toString() + "] failed");
}
catch (InterruptedException e) {
throw new IOException("Interrupted waiting for chmod [" + toString() + "]");
}
finally {
provider.unlock(this);
}
}
@Override
public void setArchive(boolean value) throws IOException {
setAttribute(ATTRIBUTE_ARCHIVE, value);
}
@Override
public void setHidden(boolean value) throws IOException {
setAttribute(ATTRIBUTE_HIDDEN, value);
}
@Override
public void setSystem(boolean value) throws IOException {
setAttribute(ATTRIBUTE_SYSTEM, value);
}
@Override
public boolean setReadable(boolean value) {
if (!SystemUtil.isUnix()) return false;
try {
setMode(ModeUtil.setReadable(getMode(), value));
return true;
}
catch (IOException e) {
return false;
}
}
@Override
public boolean setWritable(boolean value) {
// setReadonly
if (!value) {
try {
provider.lock(this);
if (!super.setReadOnly()) throw new IOException("Can't set resource read-only");
}
catch (IOException ioe) {
return false;
}
finally {
provider.unlock(this);
}
return true;
}
if (SystemUtil.isUnix()) {
// need no lock because get/setmode has one
try {
setMode(ModeUtil.setWritable(getMode(), value));
}
catch (IOException e) {
return false;
}
return true;
}
try {
provider.lock(this);
Runtime.getRuntime().exec("attrib -R " + getAbsolutePath());
}
catch (IOException ioe) {
return false;
}
finally {
provider.unlock(this);
}
return true;
}
@Override
public boolean createNewFile() {
try {
provider.lock(this);
return super.createNewFile();
}
catch (IOException e) {
return false;
}
finally {
provider.unlock(this);
}
}
@Override
public boolean canRead() {
try {
provider.read(this);
}
catch (IOException e) {
return false;
}
return super.canRead();
}
@Override
public boolean canWrite() {
try {
provider.read(this);
}
catch (IOException e) {
return false;
}
return super.canWrite();
}
@Override
public boolean delete() {
try {
provider.lock(this);
return super.delete();
}
catch (IOException e) {
return false;
}
finally {
provider.unlock(this);
}
}
@Override
public boolean exists() {
try {
provider.read(this);
}
catch (IOException e) {}
return super.exists();
}
@Override
public boolean isAbsolute() {
try {
provider.read(this);
}
catch (IOException e) {
return false;
}
return super.isAbsolute();
}
@Override
public boolean isDirectory() {
try {
provider.read(this);
}
catch (IOException e) {
return false;
}
return super.isDirectory();
}
@Override
public boolean isFile() {
try {
provider.read(this);
}
catch (IOException e) {
return false;
}
return super.isFile();
}
@Override
public boolean isHidden() {
try {
provider.read(this);
}
catch (IOException e) {
return false;
}
return super.isHidden();
}
@Override
public long lastModified() {
try {
provider.read(this);
}
catch (IOException e) {
return 0;
}
return super.lastModified();
}
@Override
public long length() {
try {
provider.read(this);
}
catch (IOException e) {
return 0;
}
return super.length();
}
@Override
public String[] list() {
try {
provider.read(this);
}
catch (IOException e) {
return null;
}
return super.list();
}
@Override
public boolean mkdir() {
try {
provider.lock(this);
return super.mkdir();
}
catch (IOException e) {
return false;
}
finally {
provider.unlock(this);
}
}
@Override
public boolean mkdirs() {
try {
provider.lock(this);
return _mkdirs();
}
catch (IOException e) {
return false;
}
finally {
provider.unlock(this);
}
}
private boolean _mkdirs() {
if (super.exists()) return false;
if (super.mkdir()) return true;
File parent = super.getParentFile();
return (parent != null) && (parent.mkdirs() && super.mkdir());
}
@Override
public boolean setLastModified(long time) {
try {
provider.lock(this);
return super.setLastModified(time);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
return false;
}
finally {
provider.unlock(this);
}
}
@Override
public boolean setReadOnly() {
try {
provider.lock(this);
return super.setReadOnly();
}
catch (IOException e) {
return false;
}
finally {
provider.unlock(this);
}
}
@Override
public boolean getAttribute(short attribute) {
if (!SystemUtil.isWindows()) return false;
String attr = null;
if (attribute == ATTRIBUTE_ARCHIVE) attr = "A";
else if (attribute == ATTRIBUTE_HIDDEN) attr = "H";
else if (attribute == ATTRIBUTE_SYSTEM) attr = "S";
try {
provider.lock(this);
String result = Command.execute("attrib " + getAbsolutePath(), false).getOutput();
String[] arr = lucee.runtime.type.util.ListUtil.listToStringArray(result, ' ');
for (int i = 0; i < arr.length; i++) {
if (attr.equals(arr[i])) return true;
}
}
catch (Exception e) {}
finally {
provider.unlock(this);
}
return false;
}
@Override
public void setAttribute(short attribute, boolean value) throws IOException {
String attr = null;
if (attribute == ATTRIBUTE_ARCHIVE) attr = "A";
else if (attribute == ATTRIBUTE_HIDDEN) attr = "H";
else if (attribute == ATTRIBUTE_SYSTEM) attr = "S";
if (!SystemUtil.isWindows()) return;
provider.lock(this);
try {
Runtime.getRuntime().exec("attrib " + (value ? "+" : "-") + attr + " " + getAbsolutePath());
}
finally {
provider.unlock(this);
}
}
@Override
public boolean equals(Object other) {
if (provider.isCaseSensitive()) return super.equals(other);
if (!(other instanceof File)) return false;
return getAbsolutePath().equalsIgnoreCase(((File) other).getAbsolutePath());
}
}
|
package com.sometrik.framework;
import android.app.Dialog;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.Typeface;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.GradientDrawable;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.shapes.RoundRectShape;
import android.text.TextUtils.TruncateAt;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.ScrollView;
import android.widget.TextView;
class ViewStyleManager {
private enum WhiteSpace { NORMAL, NOWRAP };
private enum HorizontalAlignment { LEFT, CENTER, RIGHT };
private enum TextOverflow { CLIP, ELLIPSIS };
private enum FontStyle { NORMAL, ITALIC, OBLIQUE };
private BitmapCache bitmapCache;
private float displayScale = 1.0f;
private float[] padding = null;
private float[] margin = null;
private Integer width = null, height = null;
private Float opacity = null;
private Integer leftPosition = null, topPosition = null;
private Integer rightPosition = null, bottomPosition = null;
private int minWidth = 0, minHeight = 0;
private String title = null;
private Integer weight = null;
private Integer backgroundColor = null;
private Integer color = null;
private Integer gravity = null;
private Float zoom = null;
private Integer shadow = null;
private float[] borderRadius = null;
private Integer borderWidth = null, borderColor = null;
private Integer fontSize = null;
private WhiteSpace whiteSpace = null;
private HorizontalAlignment textAlign = null;
private TextOverflow textOverflow = null;
private Integer fontWeight = null;
private FontStyle fontStyle = null;
private String fontFamily = null;
private String hint = null;
private String iconFile = null;
public ViewStyleManager(BitmapCache bitmapCache, float displayScale, boolean isDefault) {
this.bitmapCache = bitmapCache;
this.displayScale = displayScale;
if (isDefault) setDefaults();
}
private void initPadding() {
padding = new float[4];
padding[0] = padding[1] = padding[2] = padding[3] = 0.0f;
}
private void initMargin() {
margin = new float[4];
margin[0] = margin[1] = margin[2] = margin[3] = 0.0f;
}
public void setDefaults() {
zoom = new Float(1.0);
opacity = new Float(1.0);
shadow = new Integer(0);
color = new Integer(Color.parseColor("#000000"));
backgroundColor = new Integer(0);
fontWeight = new Integer(400);
iconFile = "";
initPadding();
initMargin();
}
public void setStyle(String key, String value) {
if (key.equals("padding")) {
padding = parseFloatArray(value, 4);
} else if (key.equals("padding-top")) {
if (padding == null) initPadding();
padding[0] = Float.parseFloat(value);
} else if (key.equals("padding-right")) {
if (padding == null) initPadding();
padding[1] = Float.parseFloat(value);
} else if (key.equals("padding-bottom")) {
if (padding == null) initPadding();
padding[2] = Float.parseFloat(value);
} else if (key.equals("padding-left")) {
if (padding == null) initPadding();
padding[3] = Float.parseFloat(value);
} else if (key.equals("margin")) {
margin = parseFloatArray(value, 4);
} else if (key.equals("margin-top")) {
if (margin == null) initMargin();
margin[0] = Float.parseFloat(value);
} else if (key.equals("margin-right")) {
if (margin == null) initMargin();
margin[1] = Float.parseFloat(value);
} else if (key.equals("margin-bottom")) {
if (margin == null) initMargin();
margin[2] = Float.parseFloat(value);
} else if (key.equals("margin-left")) {
if (margin == null) initMargin();
margin[3] = Float.parseFloat(value);
} else if (key.equals("weight")) {
weight = new Integer(value);
} else if (key.equals("opacity")) {
opacity = new Float(value);
} else if (key.equals("text-shadow")) {
} else if (key.equals("box-shadow")) {
} else if (key.equals("shadow")) {
shadow = new Integer(value);
} else if (key.equals("left")) {
leftPosition = new Integer(value);
} else if (key.equals("top")) {
topPosition = new Integer(value);
} else if (key.equals("right")) {
rightPosition = new Integer(value);
} else if (key.equals("bottom")) {
bottomPosition = new Integer(value);
} else if (key.equals("min-width")) {
minWidth = Integer.parseInt(value);
} else if (key.equals("min-height")) {
minHeight = Integer.parseInt(value);
} else if (key.equals("title")) {
title = value;
} else if (key.equals("width")) {
if (value.equals("wrap-content")) {
width = new Integer(LayoutParams.WRAP_CONTENT);
} else if (value.equals("match-parent")) {
width = new Integer(LayoutParams.MATCH_PARENT);
} else {
width = new Integer(value);
}
} else if (key.equals("height")) {
if (value.equals("wrap-content")) {
height = new Integer(LayoutParams.WRAP_CONTENT);
} else if (value.equals("match-parent")) {
height = new Integer(LayoutParams.MATCH_PARENT);
} else {
height = new Integer(value);
}
} else if (key.equals("background-color")) {
backgroundColor = new Integer(Color.parseColor(value));
} else if (key.equals("color")) {
color = new Integer(Color.parseColor(value));
} else if (key.equals("gravity")) {
if (value.equals("bottom")) {
gravity = new Integer(Gravity.BOTTOM);
} else if (value.equals("top")) {
gravity = new Integer(Gravity.TOP);
} else if (value.equals("left")) {
gravity = new Integer(Gravity.LEFT);
} else if (value.equals("right")) {
gravity = new Integer(Gravity.RIGHT);
} else if (value.equals("center")) {
gravity = new Integer(Gravity.CENTER);
} else if (value.equals("center-vertical")) {
gravity = new Integer(Gravity.CENTER_VERTICAL);
} else if (value.equals("center-horizontal")) {
gravity = new Integer(Gravity.CENTER_HORIZONTAL);
}
} else if (key.equals("zoom")) {
if (value.equals("inherit")) {
zoom = null;
} else {
zoom = new Float(value);
}
} else if (key.equals("border")) {
if (value.equals("none")) {
borderWidth = new Integer(0);
} else {
borderWidth = new Integer(1);
borderColor = new Integer(Color.parseColor(value));
}
} else if (key.equals("border-radius")) {
borderRadius = parseFloatArray(value, 4);
} else if (key.equals("font-size")) {
if (value.equals("small")){
fontSize = new Integer(9);
} else if (value.equals("medium")){
fontSize = new Integer(12);
} else if (value.equals("large")){
fontSize = new Integer(15);
} else {
fontSize = new Integer(value);
}
} else if (key.equals("white-space")) {
if (value.equals("normal")) whiteSpace = WhiteSpace.NORMAL;
else if (value.equals("nowrap")) whiteSpace = WhiteSpace.NOWRAP;
} else if (key.equals("text-overflow")) {
if (value.equals("ellipsis")) {
textOverflow = TextOverflow.ELLIPSIS;
}
} else if (key.equals("font-weight")) {
if (value.equals("normal")) {
fontWeight = new Integer(400);
} else if (value.equals("bold")) {
fontWeight = new Integer(700);
} else {
fontWeight = new Integer(value);
}
} else if (key.equals("font-style")) {
if (value.equals("italic")) {
fontStyle = FontStyle.ITALIC;
} else if (value.equals("oblique")) {
fontStyle = FontStyle.OBLIQUE;
} else {
fontStyle = FontStyle.NORMAL;
}
} else if (key.equals("text-align")) {
if (value.equals("left")) {
textAlign = HorizontalAlignment.LEFT;
} else if (value.equals("center")) {
textAlign = HorizontalAlignment.CENTER;
} else if (value.equals("right")) {
textAlign = HorizontalAlignment.RIGHT;
}
} else if (key.equals("font-family")) {
fontFamily = value;
} else if (key.equals("hint")) {
hint = value;
} else if (key.equals("icon-attachment")) {
// right, top, bottom, left
} else if (key.equals("icon")) {
if (value.equals("none")) {
iconFile = "";
} else {
iconFile = value;
}
}
}
public void apply(Dialog dialog) {
if (width != null || height != null) {
ViewGroup.LayoutParams params = dialog.getWindow().getAttributes();
if (width != null) params.width = applyScale(width);
if (height != null) params.height = applyScale(height);
dialog.getWindow().setAttributes((android.view.WindowManager.LayoutParams) params);
}
}
public void apply(View view) {
if (opacity != null) view.setAlpha(opacity);
if (zoom != null) {
view.setScaleX(zoom);
view.setScaleY(zoom);
}
if (shadow != null) view.setElevation(shadow);
// if (title != null) view.setTooltipText(title);
// Scaled parameters
if (padding != null) {
view.setPadding((int)applyScale(padding[3]),
(int)applyScale(padding[0]),
(int)applyScale(padding[1]),
(int)applyScale(padding[2]));
}
if (leftPosition != null) view.setLeft(applyScale(leftPosition));
if (rightPosition != null) view.setRight(applyScale(rightPosition));
if (topPosition != null) view.setTop(applyScale(topPosition));
if (bottomPosition != null) view.setBottom(applyScale(bottomPosition));
if (minWidth > 0) view.setMinimumWidth(applyScale(minWidth));
if (minHeight > 0) view.setMinimumHeight(applyScale(minHeight));
if ((borderColor != null && borderWidth != null) || borderRadius != null) {
view.setBackgroundResource(0);
if (borderRadius != null &&
(backgroundColor == null || backgroundColor == 0) &&
(borderWidth == null || borderWidth == 0)) {
if (backgroundColor != null) view.setBackgroundColor(backgroundColor);
RoundRectShape shape = new RoundRectShape(expandRadii(borderRadius), null, null);
ShapeDrawable sd = new ShapeDrawable(shape);
view.setBackground(sd);
} else {
GradientDrawable gd = new GradientDrawable();
if (backgroundColor != null) gd.setColor(backgroundColor);
if (borderRadius != null) {
gd.setCornerRadius(2); // Might be necessary for zero radiuses to work
gd.setCornerRadii(expandRadii(borderRadius));
}
if (borderColor != null && borderWidth != null) {
gd.setStroke(borderWidth, borderColor);
}
view.setBackground(gd);
}
} else if (backgroundColor != null) {
view.setBackgroundColor(backgroundColor);
}
// Layout parameters
if (weight != null || width != null || height != null ||
margin != null || gravity != null) {
if (view.getParent() instanceof ScrollView) { // stupid hack
FrameLayout.LayoutParams params = new FrameLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
view.setLayoutParams(params);
} else if (view.getParent() instanceof LinearLayout) {
LinearLayout.LayoutParams params = (LinearLayout.LayoutParams)view.getLayoutParams();
if (weight != null) params.weight = weight;
if (margin != null) {
params.topMargin = (int)applyScale(margin[0]);
params.rightMargin = (int)applyScale(margin[1]);
params.bottomMargin = (int)applyScale(margin[2]);
params.leftMargin = (int)applyScale(margin[3]);
}
if (width != null) params.width = applyScale(width);
if (height != null) params.height = applyScale(height);
if (gravity != null) params.gravity = gravity;
view.setLayoutParams(params);
} else if (view.getParent() instanceof FrameLayout) {
FrameLayout.LayoutParams params = (FrameLayout.LayoutParams)view.getLayoutParams();
if (margin != null) {
params.topMargin = (int)applyScale(margin[0]);
params.rightMargin = (int)applyScale(margin[1]);
params.bottomMargin = (int)applyScale(margin[2]);
params.leftMargin = (int)applyScale(margin[3]);
}
if (width != null) params.width = applyScale(width);
if (height != null) params.height = applyScale(height);
if (gravity != null) params.gravity = gravity;
view.setLayoutParams(params);
} else if (view.getParent() instanceof RelativeLayout) {
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams)view.getLayoutParams();
if (margin != null) {
params.topMargin = (int)applyScale(margin[0]);
params.rightMargin = (int)applyScale(margin[1]);
params.bottomMargin = (int)applyScale(margin[2]);
params.leftMargin = (int)applyScale(margin[3]);
}
if (width != null) params.width = applyScale(width);
if (height != null) params.height = applyScale(height);
view.setLayoutParams(params);
} else {
System.out.println("this style cannot be applied to view that doesn't have valid layout as parent");
}
}
if (view instanceof EditText) {
EditText editText = (EditText) view;
if (whiteSpace != null) {
switch (whiteSpace) {
case NORMAL:
editText.setSingleLine(false);
// editText.setInputType(InputType.TYPE_TEXT_FLAG_MULTI_LINE);
break;
case NOWRAP:
editText.setSingleLine(true);
break;
}
}
}
if (view instanceof TextView) { // also Buttons
TextView textView = (TextView)view;
if (color != null) textView.setTextColor(color);
if (fontSize != null) textView.setTextSize(fontSize);
if (whiteSpace != null) {
switch (whiteSpace) {
case NORMAL:
textView.setSingleLine(false);
break;
case NOWRAP:
textView.setSingleLine(true);
break;
}
}
if (textAlign != null) {
switch (textAlign) {
case LEFT:
textView.setTextAlignment(TextView.TEXT_ALIGNMENT_TEXT_START);
break;
case CENTER:
textView.setTextAlignment(TextView.TEXT_ALIGNMENT_CENTER);
break;
case RIGHT:
textView.setTextAlignment(TextView.TEXT_ALIGNMENT_TEXT_END);
break;
}
}
if (textOverflow != null) {
switch (textOverflow) {
case CLIP:
textView.setEllipsize(null);
break;
case ELLIPSIS:
textView.setEllipsize(TruncateAt.END);
break;
}
}
if (fontFamily != null || fontWeight != null || fontStyle != null) {
int flags = 0;
if (fontWeight != null && fontWeight > 550) flags |= Typeface.BOLD;
if (fontStyle != null && (fontStyle == FontStyle.ITALIC || fontStyle == FontStyle.OBLIQUE)) flags |= Typeface.ITALIC;
if (fontFamily != null) {
textView.setTypeface(Typeface.create(fontFamily, flags), flags);
} else {
textView.setTypeface(null, flags);
}
}
if (hint != null) textView.setHint(hint);
if (iconFile != null) {
BitmapDrawable drawable = null;
if (!iconFile.isEmpty()) {
Bitmap bitmap = bitmapCache.loadBitmap(iconFile);
if (bitmap != null) drawable = new BitmapDrawable(bitmap);
}
textView.setCompoundDrawablesWithIntrinsicBounds(null, drawable, null, null);
}
}
}
public void applyLinkColor(View view) {
if (view instanceof TextView) { // also Buttons
TextView textView = (TextView)view;
if (color != null) textView.setLinkTextColor(color);
}
}
protected int applyScale(int v) {
return (int)(v * displayScale + 0.5f);
}
protected float applyScale(float v) {
return v * displayScale;
}
protected float[] expandRadii(float[] input) {
float[] r = new float[8];
for (int i = 0; i < 4; i++) {
r[2 * i] = r[2 * i + 1] = applyScale(borderRadius[i]);
}
return r;
}
protected float[] parseFloatArray(String value, int size) {
String[] values = value.split(" ");
float[] r = new float[size];
float prev = 0.0f;
for (int i = 0; i < size; i++) {
if (i < values.length) prev = Float.valueOf(values[i].trim());
r[i] = prev;
}
return r;
}
}
|
package cgeo.geocaching.connector.oc;
import cgeo.CGeoTestCase;
import cgeo.geocaching.Geocache;
import cgeo.geocaching.cgData;
import cgeo.geocaching.enumerations.LoadFlags;
public class OkapiClientTest extends CGeoTestCase {
public static void testGetOCCache() {
final String geoCode = "OU0331";
Geocache cache = OkapiClient.getCache(geoCode);
assertNotNull(cache);
assertEquals(geoCode, cache.getGeocode());
assertEquals("Oshkosh Municipal Tank", cache.getName());
assertTrue(cache.isDetailed());
// cache should be stored to DB (to listID 0) when loaded above
cache = cgData.loadCache(geoCode, LoadFlags.LOAD_ALL_DB_ONLY);
assertNotNull(cache);
assertEquals(geoCode, cache.getGeocode());
assertEquals("Oshkosh Municipal Tank", cache.getName());
assertTrue(cache.isDetailed());
}
public static void testOCSearchMustWorkWithoutOAuthAccessTokens() {
final String geoCode = "OC1234";
Geocache cache = OkapiClient.getCache(geoCode);
assertNotNull("You must have a valid OKAPI key installed for running this test (but you do not need to set credentials in the app).", cache);
assertEquals("Wupper-Schein", cache.getName());
}
}
|
package org.openntf.domino;
import lotus.domino.View;
public interface ViewColumn extends Base<lotus.domino.ViewColumn>, lotus.domino.ViewColumn {
@Override
public int getAlignment();
@Override
public int getColumnValuesIndex();
@Override
public int getDateFmt();
@Override
public int getFontColor();
@Override
public String getFontFace();
@Override
public int getFontPointSize();
@Override
public int getFontStyle();
@Override
public String getFormula();
@Override
public int getHeaderAlignment();
@Override
public int getHeaderFontColor();
@Override
public String getHeaderFontFace();
@Override
public int getHeaderFontPointSize();
@Override
public int getHeaderFontStyle();
@Override
public String getItemName();
@Override
public int getListSep();
@Override
public int getNumberAttrib();
@Override
public int getNumberDigits();
@Override
public int getNumberFormat();
@Override
public View getParent();
@Override
public int getPosition();
@Override
public String getResortToViewName();
@Override
public int getSecondaryResortColumnIndex();
@Override
public int getTimeDateFmt();
@Override
public int getTimeFmt();
@Override
public int getTimeZoneFmt();
@Override
public String getTitle();
@Override
public int getWidth();
@Override
public boolean isAccentSensitiveSort();
@Override
public boolean isCaseSensitiveSort();
@Override
public boolean isCategory();
@Override
public boolean isConstant();
@Override
public boolean isField();
@Override
public boolean isFontBold();
@Override
public boolean isFontItalic();
@Override
public boolean isFontStrikethrough();
@Override
public boolean isFontUnderline();
@Override
public boolean isFormula();
@Override
public boolean isHeaderFontBold();
@Override
public boolean isHeaderFontItalic();
@Override
public boolean isHeaderFontStrikethrough();
@Override
public boolean isHeaderFontUnderline();
@Override
public boolean isHidden();
@Override
public boolean isHideDetail();
@Override
public boolean isHideFormula();
@Override
public boolean isIcon();
@Override
public boolean isNumberAttribParens();
@Override
public boolean isNumberAttribPercent();
@Override
public boolean isNumberAttribPunctuated();
@Override
public boolean isResize();
@Override
public boolean isResortAscending();
@Override
public boolean isResortDescending();
@Override
public boolean isResortToView();
@Override
public boolean isResponse();
@Override
public boolean isSecondaryResort();
@Override
public boolean isSecondaryResortDescending();
@Override
public boolean isShowTwistie();
@Override
public boolean isSortDescending();
@Override
public boolean isSorted();
@Override
public void setAccentSensitiveSort(boolean flag);
@Override
public void setAlignment(int alignment);
@Override
public void setCaseSensitiveSort(boolean flag);
@Override
public void setDateFmt(int format);
@Override
public void setFontBold(boolean flag);
@Override
public void setFontColor(int color);
@Override
public void setFontFace(String face);
@Override
public void setFontItalic(boolean flag);
@Override
public void setFontPointSize(int size);
@Override
public void setFontStrikethrough(boolean flag);
@Override
public void setFontStyle(int style);
@Override
public void setFontUnderline(boolean flag);
@Override
public void setFormula(String formula);
@Override
public void setHeaderAlignment(int alignment);
@Override
public void setHeaderFontBold(boolean flag);
@Override
public void setHeaderFontColor(int color);
@Override
public void setHeaderFontFace(String face);
@Override
public void setHeaderFontItalic(boolean flag);
@Override
public void setHeaderFontPointSize(int size);
@Override
public void setHeaderFontStrikethrough(boolean flag);
@Override
public void setHeaderFontStyle(int style);
@Override
public void setHeaderFontUnderline(boolean flag);
@Override
public void setHidden(boolean flag);
@Override
public void setHideDetail(boolean flag);
@Override
public void setHideFormula(boolean flag);
@Override
public void setListSep(int separator);
@Override
public void setNumberAttrib(int attributes);
@Override
public void setNumberAttribParens(boolean flag);
@Override
public void setNumberAttribPercent(boolean flag);
@Override
public void setNumberAttribPunctuated(boolean flag);
@Override
public void setNumberDigits(int digits);
@Override
public void setNumberFormat(int format);
@Override
public void setPosition(int position);
@Override
public void setResize(boolean flag);
@Override
public void setResortAscending(boolean flag);
@Override
public void setResortDescending(boolean flag);
@Override
public void setResortToView(boolean flag);
@Override
public void setResortToViewName(String name);
@Override
public void setSecondaryResort(boolean flag);
@Override
public void setSecondaryResortColumnIndex(int index);
@Override
public void setSecondaryResortDescending(boolean flag);
@Override
public void setShowTwistie(boolean flag);
@Override
public void setSortDescending(boolean flag);
@Override
public void setSorted(boolean flag);
@Override
public void setTimeDateFmt(int format);
@Override
public void setTimeFmt(int format);
@Override
public void setTimeZoneFmt(int format);
@Override
public void setTitle(String title);
@Override
public void setWidth(int width);
}
|
//Liangrui Lu
//1366461
//This class is used to email the claim
package com.example.traveltracker;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Locale;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import android.app.Activity;
import android.content.ClipData.Item;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
public class Emailactivity extends Activity{
private Claimlist datafile;
private Claimlist claimlist;
private Claim claim;
private String claimID;
private String claimname;
private String datefrom;
private String dateto;
private String claimdescription;
final Context context = this;
private String test;
private ArrayList<Item> itemlist;
private EditText emailaddress;
//private Button buttonEmailCancle;
private Button emailsend;
private static final String FILENAME = "save.sav";
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.email);
//Intent intent = getIntent();
//final int claimID = intent.getIntExtra("claimID",0);
emailaddress = (EditText)findViewById(R.id.emailaddress);
emailsend = (Button)findViewById(R.id.emailsend);
//connect to a tool which has email function
emailsend.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
Intent emailintent = new Intent(Intent.ACTION_SEND);
emailintent.setType("plain/text");
String[] recipients = {emailaddress.getText().toString()};
emailintent.putExtra(Intent.EXTRA_EMAIL , recipients);
emailintent.putExtra(Intent.EXTRA_SUBJECT,"Travel Claim: "+claim.getPlace());
emailintent.putExtra(Intent.EXTRA_TEXT,content());
try {
startActivity(Intent.createChooser(emailintent, "Sending...Please wait.."));
} catch (android.content.ActivityNotFoundException ex) {
Toast.makeText(Emailactivity.this, "Sending Email failed", Toast.LENGTH_SHORT).show();
}
saveInFile();
}
});
}
//collect all the informaiton about this claim, store them into a buffer as a string
@Override
protected void onStart() {
super.onStart();
claimlist = this.loadFromFile();
datafile = loadFromFile();
Intent intent = getIntent();
final int claimID = intent.getIntExtra("claimID",0);
claim = datafile.getClaimlist().get(claimID);
TextView emailcontent = (TextView)findViewById(R.id.emailcontent);
emailcontent.setText(content());
}
public String content() {
StringBuffer buffer = new StringBuffer();
buffer.append("Travel Claim \n");
claimname = claim.getPlace();
buffer.append("Claim Name: "+claimname+"\n");
datefrom= claim.getDatefrom();
buffer.append("From: "+datefrom+"\n");
dateto= claim.getDateto();
buffer.append("To: "+dateto+"\n");
claimdescription = claim.getClaimdescription();
buffer.append("Description: "+claimdescription+"\n");
buffer.append("Item:\n");
for (int i = 0; i < claim.getItemlist().size();i++){
String itemId = String.valueOf(i+1);
buffer.append(itemId);
buffer.append("\n");
String itemname = claim.getItemlist().get(i).getItem();
buffer.append("Name: "+itemname);
buffer.append("\n");
String itemdate = claim.getItemlist().get(i).getDate();
buffer.append("Date: "+itemdate);
buffer.append("\n");
String itemCategory = claim.getItemlist().get(i).getCategory();
buffer.append("Category: "+itemCategory);
buffer.append("\n");
String itemAmount =String.valueOf(claim.getItemlist().get(i).getAmount());
String itemUnit = claim.getItemlist().get(i).getUnit();
buffer.append("Amount Spend: "+itemAmount+" "+itemUnit);
buffer.append("\n");
String itemDescription = claim.getItemlist().get(i).getDescription();
buffer.append("Description: "+itemDescription);
buffer.append("\n");
}
return buffer.toString();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
//getMenuInflater().inflate(R.menu.emailclaim, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//if (id == R.id.action_settings) {
// return true;
return super.onOptionsItemSelected(item);
}
//Load and save
private Claimlist loadFromFile(){
Gson gson = new Gson();
datafile = new Claimlist();
try{
FileInputStream fis = openFileInput(FILENAME);
InputStreamReader in = new InputStreamReader(fis);
Type typeOfT = new TypeToken<Claimlist>(){}.getType();
datafile = gson.fromJson(in, typeOfT);
fis.close();
} catch(FileNotFoundException e){
e.printStackTrace();
}catch (IOException e){
e.printStackTrace();
}
return datafile;
}
private void saveInFile(){
Gson gson = new Gson();
try{
FileOutputStream fos = openFileOutput(FILENAME,0);
OutputStreamWriter osw = new OutputStreamWriter(fos);
gson.toJson(datafile,osw);
osw.flush();
fos.close();
} catch(FileNotFoundException e){
e.printStackTrace();
}catch (IOException e){
e.printStackTrace();
}
}
}
|
package com.thoughtworks.xstream.io.xml;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.net.URL;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.FactoryConfigurationError;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import com.thoughtworks.xstream.io.HierarchicalStreamReader;
import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
import com.thoughtworks.xstream.io.StreamException;
import com.thoughtworks.xstream.io.naming.NameCoder;
public class DomDriver extends AbstractXmlDriver {
private final String encoding;
private DocumentBuilderFactory documentBuilderFactory;
/**
* Construct a DomDriver.
*/
public DomDriver() {
this(null);
}
/**
* Construct a DomDriver with a specified encoding. The created DomReader will ignore any encoding attribute of the
* XML header though.
*/
public DomDriver(final String encoding) {
this(encoding, new XmlFriendlyNameCoder());
}
/**
* @since 1.4
*/
public DomDriver(final String encoding, final NameCoder nameCoder) {
super(nameCoder);
this.encoding = encoding;
}
/**
* @since 1.2
* @deprecated As of 1.4, use {@link #DomDriver(String, NameCoder)} instead.
*/
@Deprecated
public DomDriver(final String encoding, final XmlFriendlyReplacer replacer) {
this(encoding, (NameCoder)replacer);
}
@Override
public HierarchicalStreamReader createReader(final Reader in) {
return createReader(new InputSource(in));
}
@Override
public HierarchicalStreamReader createReader(final InputStream in) {
return createReader(new InputSource(in));
}
@Override
public HierarchicalStreamReader createReader(final URL in) {
return createReader(new InputSource(in.toExternalForm()));
}
@Override
public HierarchicalStreamReader createReader(final File in) {
return createReader(new InputSource(in.toURI().toASCIIString()));
}
private HierarchicalStreamReader createReader(final InputSource source) {
try {
if (documentBuilderFactory == null) {
synchronized (this) {
if (documentBuilderFactory == null) {
documentBuilderFactory = createDocumentBuilderFactory();
}
}
}
final DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
if (encoding != null) {
source.setEncoding(encoding);
}
final Document document = documentBuilder.parse(source);
return new DomReader(document, getNameCoder());
} catch (final FactoryConfigurationError e) {
throw new StreamException(e);
} catch (final ParserConfigurationException e) {
throw new StreamException(e);
} catch (final SAXException e) {
throw new StreamException(e);
} catch (final IOException e) {
throw new StreamException(e);
}
}
@Override
public HierarchicalStreamWriter createWriter(final Writer out) {
return new PrettyPrintWriter(out, getNameCoder());
}
@Override
public HierarchicalStreamWriter createWriter(final OutputStream out) {
try {
return createWriter(encoding != null ? new OutputStreamWriter(out, encoding) : new OutputStreamWriter(out));
} catch (final UnsupportedEncodingException e) {
throw new StreamException(e);
}
}
/**
* Create the DocumentBuilderFactory instance.
*
* @return the new instance
* @since upcoming
*/
protected DocumentBuilderFactory createDocumentBuilderFactory() {
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
try {
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
} catch (final ParserConfigurationException e) {
throw new StreamException(e);
}
return factory;
}
}
|
package cl.json.social;
import android.content.ActivityNotFoundException;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReadableMap;
public class GooglePlusShare extends SingleShareIntent {
private static final String PACKAGE = "com.google.android.apps.plus";
private static final String PLAY_STORE_LINK = "https://plus.google.com/share?url={url}";
public GooglePlusShare(ReactApplicationContext reactContext) {
super(reactContext);
}
@Override
public void open(ReadableMap options) throws ActivityNotFoundException {
super.open(options);
// extra params here
this.openIntentChooser();
}
@Override
protected String getPackage() {
return PACKAGE;
}
@Override
protected String getDefaultWebLink() {
return PLAY_STORE_LINK;
}
@Override
protected String getPlayStoreLink() {
return PLAY_STORE_LINK;
}
}
|
package clarifai2.test;
import clarifai2.api.ClarifaiBuilder;
import clarifai2.api.ClarifaiClient;
import clarifai2.api.ClarifaiResponse;
import clarifai2.api.request.input.SearchClause;
import clarifai2.api.request.model.Action;
import clarifai2.api.request.model.PredictRequest;
import clarifai2.dto.ClarifaiStatus;
import clarifai2.dto.PointF;
import clarifai2.dto.Radius;
import clarifai2.dto.input.ClarifaiInput;
import clarifai2.dto.input.SearchHit;
import clarifai2.dto.input.image.ClarifaiImage;
import clarifai2.dto.input.image.Crop;
import clarifai2.dto.model.ConceptModel;
import clarifai2.dto.model.DefaultModels;
import clarifai2.dto.model.Model;
import clarifai2.dto.model.ModelTrainingStatus;
import clarifai2.dto.model.ModelVersion;
import clarifai2.dto.model.output.ClarifaiOutput;
import clarifai2.dto.model.output_info.ConceptOutputInfo;
import clarifai2.dto.prediction.Color;
import clarifai2.dto.prediction.Concept;
import clarifai2.dto.prediction.Embedding;
import clarifai2.dto.prediction.Focus;
import clarifai2.dto.prediction.Logo;
import clarifai2.dto.prediction.Region;
import clarifai2.exception.ClarifaiException;
import clarifai2.internal.JSONObjectBuilder;
import com.google.gson.JsonNull;
import com.google.gson.JsonObject;
import com.kevinmost.junit_retry_rule.Retry;
import okhttp3.OkHttpClient;
import org.jetbrains.annotations.NotNull;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static clarifai2.api.request.input.SearchClause.matchConcept;
import static clarifai2.internal.InternalUtil.assertNotNull;
import static clarifai2.internal.InternalUtil.sleep;
import static com.sun.tools.internal.ws.wsdl.parser.Util.fail;
import static java.lang.reflect.Modifier.isPublic;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class CommonWorkflowTests extends BaseClarifaiAPITest {
private static long startTime;
@BeforeClass
public static void recordTime() {
startTime = System.nanoTime();
}
@Retry
@Test public void t00_deleteAllInputs() {
assertSuccess(client.deleteAllInputs());
retryAndTimeout(1, TimeUnit.MINUTES, () ->
client.getInputs().build().getPage(1).executeSync().get().isEmpty()
);
}
@Retry
@Test public void t01a_addInputs() throws Exception {
assertSuccess(client.addInputs()
.plus(ClarifaiInput.forImage(ClarifaiImage.of(KOTLIN_LOGO_IMAGE_FILE)
.withCrop(Crop.create()
.top(0.1F)
.left(0.1F)
.bottom(0.9F)
.right(0.9F)
)
)
.withID("foo1")
.withConcepts(Concept.forID("concept1").withValue(false))
)
);
}
@Retry
@Test public void t01b_addInputs_bulk() throws Exception {
final Concept ferrari23 = Concept.forID("ferrari23");
final Concept outdoors23 = Concept.forID("outdoors23");
assertSuccess(client.addInputs()
.plus(
ClarifaiInput.forImage(ClarifaiImage.of(
"https://s3.amazonaws.com/clarifai-img/5e/00/cb/8476bca5632276903b28701736.png"))
.withConcepts(
ferrari23.withValue(true)
),
ClarifaiInput.forImage(ClarifaiImage.of(
"https://s3.amazonaws.com/clarifai-img/00/c3/ad/78d5ae3b3f2a84fe2bfb69dc28.jpg"))
.withConcepts(
ferrari23.withValue(true),
outdoors23.withValue(false)
),
ClarifaiInput.forImage(ClarifaiImage.of(
"https://s3.amazonaws.com/clarifai-img/d4/89/e0/67f7f1622bf586c876875c3fc6.jpg"))
.withConcepts(
ferrari23.withValue(true),
outdoors23
).withGeo(PointF.at(30, -24)),
ClarifaiInput.forImage(ClarifaiImage.of(
"https://s3.amazonaws.com/clarifai-img/cd/1d/05/8b9cd2d37560ef9f6c436debc6.jpg"))
.withConcepts(
ferrari23.withValue(false),
outdoors23
),
ClarifaiInput.forImage(ClarifaiImage.of(
"https://s3.amazonaws.com/clarifai-img/a3/05/dc/b142653346b98ed0a4998c157f.jpg"))
.withConcepts(
ferrari23.withValue(false),
outdoors23
),
ClarifaiInput.forImage(ClarifaiImage.of(
"https://s3.amazonaws.com/clarifai-img/43/2a/89/163ade86b76b4ba8ec67d22e40.jpg"))
.withConcepts(
ferrari23.withValue(false),
outdoors23
),
ClarifaiInput.forImage(ClarifaiImage.of(
"https://s3.amazonaws.com/clarifai-img/d4/89/e0/67f7f1622bf586c876875c3fc6.jpg"))
.withConcepts(
ferrari23.withValue(false),
outdoors23
)
)
);
}
@Retry
@Test public void t01c_addInputWithMetadata() {
assertSuccess(client.addInputs().plus(ClarifaiInput.forImage(ClarifaiImage.of(KOTLIN_LOGO_IMAGE_FILE))
.withID("inputWithMetadata")
.withMetadata(new JSONObjectBuilder()
.add("foo", "bar")
.build()
)
));
}
@Retry
@Test public void t02_addConceptsToInput() {
assertSuccess(client.mergeConceptsForInput("foo1")
.plus(
Concept.forID("concept2"),
Concept.forID("concept3")
)
);
}
@Retry
@Test public void t03_getAllInputs() {
assertSuccess(client.getInputs());
}
@Retry
@Test public void t04_getInputByID() {
assertSuccess(client.getInputByID("foo1"));
}
@Retry
@Test public void t05_deleteInput() {
assertSuccess(client.deleteInput("foo1"));
}
@Retry
@Test public void t06_getInputsStatus() {
assertSuccess(client.getInputsStatus());
}
@Retry
@Test public void t07_getConcepts() {
assertSuccess(client.getConcepts());
}
@Retry
@Test public void t08_getConceptByID() {
assertSuccess(client.getConceptByID("concept2"));
}
@Retry
@Test public void t09_searchConcepts() {
assertSuccess(client.searchConcepts("conc*"));
}
@Retry
@Test public void t09b_searchConcepts_multi_language() {
assertSuccess(client.searchConcepts("*").withLanguage("zh")); // "zh" = Chinese
}
@Retry
@Test public void t10_getAllModels() {
assertSuccess(client.getModels());
}
@Retry
@Test public void t11_deleteAllModels() {
assertSuccess(client.deleteAllModels());
}
@Retry
@Test public void t12a_createModel() {
assertSuccess(client.createModel(getModelID())
.withOutputInfo(ConceptOutputInfo.forConcepts(
Concept.forID("ferrari23")
))
);
}
@Retry
@Test public void t13_getModelByID() {
assertSuccess(client.getModelByID(getModelID()));
}
@Retry
@Test public void t14a_addConceptsToModel() {
assertSuccess(client.modifyModel(getModelID())
.withConcepts(Action.MERGE, Concept.forID("outdoors23"))
);
}
@Retry
@Test public void t14b_addConceptsToModel_00() {
assertSuccess(client.getModelByID(getModelID()).executeSync().get().asConceptModel()
.modify().withConcepts(Action.MERGE, Concept.forID("outdoors23"))
);
}
@Retry
@Test public void t14c_addConceptsToModel_multi_lang() {
assertSuccess(client.getModelByID(getModelID()).executeSync().get().asConceptModel()
.modify().withConcepts(Action.MERGE, Concept.forID("outdoors23")).withLanguage("zh"));
}
@Retry
@Test public void t15_trainModel() {
assertSuccess(client.addInputs()
.plus(ClarifaiInput.forImage(ClarifaiImage.of("https://samples.clarifai.com/penguin.bmp"))
.withConcepts(Concept.forID("outdoors23"))
)
.allowDuplicateURLs(true)
);
assertSuccess(client.trainModel(getModelID()));
retryAndTimeout(2, TimeUnit.MINUTES, () -> {
final ModelVersion version = assertSuccess(client.getModelByID(getModelID())).modelVersion();
assertNotNull(version);
final ModelTrainingStatus status = version.status();
if (!status.isTerminalEvent()) {
return false;
}
if (status == ModelTrainingStatus.TRAINED) {
return true;
}
fail("Version had error while training: " + version.status());
return false;
});
}
@Retry
@Test public void t16a_predictWithModel() {
assertSuccess(client.predict(client.getDefaultModels().generalModel().id())
.withInputs(ClarifaiInput.forImage(ClarifaiImage.of(KOTLIN_LOGO_IMAGE_FILE)))
);
}
@Retry
@Test public void t16b_predictWithModel_00() {
assertSuccess(client.getDefaultModels().generalModel().predict()
.withInputs(ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_URL)
.withCrop(Crop.create()
.top(0.1F)
.bottom(0.8F)
)
)));
}
@Retry
@Test public void t16c_predictBatchWithModel_01() {
List<ClarifaiInput> inputs = new ArrayList<ClarifaiInput>();
inputs.add(ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_URL)).withID("myID1"));
inputs.add(ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_URL)).withID("myID2"));
PredictRequest<Concept> request = client.getDefaultModels().generalModel().predict()
.withInputs(inputs);
assertSuccess(request);
ClarifaiResponse<List<ClarifaiOutput<Concept>>> response = request.executeSync();
assertTrue(response.isSuccessful());
}
@Retry
@Test public void t16d_predictBatchBase64WithModel() {
List<ClarifaiInput> inputs = new ArrayList<ClarifaiInput>();
inputs.add(ClarifaiInput.forImage(ClarifaiImage.of(KOTLIN_LOGO_IMAGE_FILE)).withID("myID1"));
inputs.add(ClarifaiInput.forImage(ClarifaiImage.of(KOTLIN_LOGO_IMAGE_FILE)).withID("myID2"));
PredictRequest<Concept> request = client.getDefaultModels().generalModel().predict()
.withInputs(inputs);
assertSuccess(request);
ClarifaiResponse<List<ClarifaiOutput<Concept>>> response = request.executeSync();
assertTrue(response.isSuccessful());
}
@Retry
@Test public void t16f_predictWithModel_multi_lang() {
assertSuccess(client.predict(client.getDefaultModels().generalModel().id())
.withInputs(ClarifaiInput.forImage(ClarifaiImage.of(KOTLIN_LOGO_IMAGE_FILE)))
.withLanguage("zh")
);
}
@Retry
@Test public void t17a_searchInputsWithModel() {
assertSuccess(client.searchInputs(
SearchClause.matchImageURL(ClarifaiImage.of(METRO_NORTH_IMAGE_URL))
));
}
@Retry
@Test public void t17b_searchInputsWithModel_complexSearch() {
assertSuccess(
client.searchInputs(matchConcept(Concept.forID("outdoors23").withValue(true)))
.and(SearchClause.matchImageURL(ClarifaiImage.of(METRO_NORTH_IMAGE_URL)))
.build()
);
}
@Retry
@Test public void t17c_searchInputsWithModel_metadata() {
final List<SearchHit> hits = assertSuccess(
client.searchInputs(SearchClause.matchMetadata(new JSONObjectBuilder().add("foo", "bar").build()))
);
final ClarifaiInput hit = hits.stream()
.filter(someHit -> "inputWithMetadata".equals(someHit.input().id()))
.findFirst()
.orElseThrow(() -> new AssertionError(""))
.input();
assertEquals("inputWithMetadata", hit.id());
assertEquals(new JSONObjectBuilder().add("foo", "bar").build(), hit.metadata());
}
@Retry
@Test public void t17d_searchInputsWithModel_multi_language() {
assertSuccess(client.searchInputs(
SearchClause.matchImageURL(ClarifaiImage.of(METRO_NORTH_IMAGE_URL))).withLanguage("zh"));
}
@Test public void t17e_searchInputsWithModel_geo() {
assertSuccess(client.addInputs().plus(
ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_URL))
.withGeo(PointF.at(90F, 23F))
));
assertSuccess(
client.searchInputs(matchConcept(Concept.forID("outdoors23").withValue(true)))
.and(SearchClause.matchImageURL(ClarifaiImage.of(METRO_NORTH_IMAGE_URL)))
.and(SearchClause.matchGeo(PointF.at(90F, 23F), Radius.of(5, Radius.Unit.MILE)))
.build()
);
}
@Retry
@Test public void t18_testGeo() {
{
final List<SearchHit> hitsBeforeAdding = assertSuccess(
client.searchInputs(SearchClause.matchGeo(PointF.at(59F, 29.75F), Radius.of(500, Radius.Unit.MILE)))
);
assertEquals(0, hitsBeforeAdding.size());
}
assertSuccess(client.addInputs().plus(
ClarifaiInput.forImage(ClarifaiImage.of(KOTLIN_LOGO_IMAGE_FILE))
.withGeo(PointF.at(60F, 29.75F))
));
{
final List<SearchHit> hitsAfterAdding = assertSuccess(
client.searchInputs(SearchClause.matchGeo(PointF.at(59F, 29.75F), Radius.of(500, Radius.Unit.MILE)))
);
assertEquals(1, hitsAfterAdding.size());
}
{
final List<SearchHit> hits = assertSuccess(
client.searchInputs(SearchClause.matchGeo(PointF.at(3F, 0F), PointF.at(70, 30F)))
);
assertEquals(1, hits.size());
}
}
@Retry
@Test public void t19_testBatch_partialFailure() {
List<ClarifaiInput> batch = new ArrayList<>();
batch.add(ClarifaiInput.forImage(
ClarifaiImage.of("https://s3.amazonaws.com/clarifai-img/5e/00/cb/8476bca5632276903b28701736.png")));
batch.add(ClarifaiInput.forImage(
ClarifaiImage.of("https://s3.amazonaws.com/clarifai-img/00/c3/ad/78d5ae3b3f2a84fe2bfb69dc28.jpg")));
batch.add(ClarifaiInput.forImage(ClarifaiImage.of("https://this_should_fail.jpg")));
ClarifaiResponse<List<ClarifaiOutput<Concept>>> response = client.getDefaultModels().generalModel().predict()
.withInputs(batch).executeSync();
assertTrue(response.isMixedSuccess());
assertNotNull(response.get());
List<ClarifaiOutput<Concept>> concepts = response.get();
assertEquals(concepts.get(2).status().statusCode(), 30002);
}
@Retry
@Test public void t20_testDemographicsModel() {
ClarifaiResponse<List<ClarifaiOutput<Region>>> faceDetects = client.getDefaultModels().demographicsModel().predict()
.withInputs(ClarifaiInput.forImage(ClarifaiImage.of("https://samples.clarifai.com/demographics.jpg")))
.executeSync();
Assert.assertNotNull(faceDetects.get().get(0).data().get(0).crop());
Assert.assertNotNull(faceDetects.get().get(0).data().get(0).ageAppearances());
Assert.assertNotNull(faceDetects.get().get(0).data().get(0).genderAppearances());
Assert.assertNotNull(faceDetects.get().get(0).data().get(0).multiculturalAppearances());
}
@Retry
@Test public void t21_testApparelModel() {
assertSuccess(client.predict(client.getDefaultModels().apparelModel().id())
.withInputs(ClarifaiInput.forImage(ClarifaiImage.of("https://samples.clarifai.com/family.jpg")))
);
}
@Retry
@Test public void t22_testFocusModel() {
ClarifaiResponse<List<ClarifaiOutput<Focus>>> focii = client.getDefaultModels().focusModel().predict()
.withInputs(ClarifaiInput.forImage(ClarifaiImage.of("https://samples.clarifai.com/demographics.jpg")))
.executeSync();
Assert.assertNotNull(focii.get());
Assert.assertNotNull(focii.get().get(0));
Assert.assertNotNull(focii.get().get(0).data());
Assert.assertNotNull(focii.get().get(0).data().get(0));
Assert.assertNotNull(focii.get().get(0).data().get(0).crop());
}
@Retry
@Test public void t23_testgeneralEmbedModel() {
ClarifaiResponse<List<ClarifaiOutput<Embedding>>> embeddings = client.getDefaultModels().generalEmbeddingModel()
.predict()
.withInputs(ClarifaiInput.forImage(ClarifaiImage.of("https://samples.clarifai.com/demographics.jpg")))
.executeSync();
Assert.assertNotNull(embeddings.get());
Assert.assertNotNull(embeddings.get().get(0));
Assert.assertNotNull(embeddings.get().get(0).data());
Assert.assertNotNull(embeddings.get().get(0).data().get(0));
Assert.assertNotNull(embeddings.get().get(0).data().get(0).embedding());
}
@Retry
@Test public void t23_testLogoModel() {
ClarifaiResponse<List<ClarifaiOutput<Logo>>> logos = client.getDefaultModels().logoModel().predict()
.withInputs(ClarifaiInput.forImage(ClarifaiImage.of("https://samples.clarifai.com/nike_building.jpg")))
.executeSync();
Assert.assertNotNull(logos.get());
Assert.assertNotNull(logos.get().get(0));
Assert.assertNotNull(logos.get().get(0).data());
Assert.assertNotNull(logos.get().get(0).data().get(0));
Assert.assertNotNull(logos.get().get(0).data().get(0).boundingBox());
Assert.assertNotNull(logos.get().get(0).data().get(0).concepts());
}
@Retry
@Test public void t23_testColorModel() {
ClarifaiResponse<List<ClarifaiOutput<Color>>> colors = client.getDefaultModels().colorModel().predict()
.withInputs(ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_URL)))
.executeSync();
Assert.assertNotNull(colors.get());
Assert.assertNotNull(colors.get().get(0));
Assert.assertNotNull(colors.get().get(0).data());
Assert.assertNotNull(colors.get().get(0).data().get(0));
Assert.assertNotNull(colors.get().get(0).data().get(0).hex());
Assert.assertNotNull(colors.get().get(0).data().get(0).webSafeHex());
Assert.assertNotNull(colors.get().get(0).data().get(0).webSafeColorName());
}
@Test public void errorsExposedToUser() {
final ClarifaiResponse<ConceptModel> response = client.getDefaultModels().generalModel().modify()
.withConcepts(Action.MERGE, Concept.forID("concept2"))
.executeSync();
if (response.isSuccessful()) {
fail("You shouldn't be able to add concepts to the built-in general model");
}
logger.debug(response.getStatus().toString());
}
@Retry
@Test public void testDeleteBatch() {
assertSuccess(client.addInputs().plus(
ClarifaiInput.forImage(ClarifaiImage.of(KOTLIN_LOGO_IMAGE_FILE)).withID("kotlin"),
ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_FILE)).withID("train")
));
sleep(5000);
assertSuccess(client.deleteInputsBatch().plus("kotlin", "train"));
}
@Test public void testSyncNetworkExceptions() throws ExecutionException, InterruptedException {
final ClarifaiResponse<List<Model<?>>> badResponse = new ClarifaiBuilder(appID, appSecret)
.baseURL(baseURL)
.client(new OkHttpClient.Builder()
.connectTimeout(5, TimeUnit.SECONDS)
.readTimeout(5, TimeUnit.SECONDS)
.writeTimeout(5, TimeUnit.SECONDS)
.addInterceptor(chain -> {
// Don't mess with the token request that happens behind the scenes
if (chain.request().url().pathSegments().contains("token")) {
return chain.proceed(chain.request());
}
// Change the port on our actual requests so that we get IOExceptions
return chain.proceed(chain.request().newBuilder()
.url(chain.request().url().newBuilder().port(383).build())
.build()
);
})
.build()
)
.buildSync()
.getModels()
.getPage(1)
.executeSync();
if (badResponse.isSuccessful()) {
fail("this response used a bad port, it should not have been successful. Response: " + badResponse.get());
}
final ClarifaiStatus details = badResponse.getStatus();
assertTrue(details.networkErrorOccurred());
logger.debug(details.errorDetails());
}
@Test public void testBuildClientAsync() throws InterruptedException, ExecutionException {
final Future<ClarifaiClient> futureClient = new ClarifaiBuilder(appID, appSecret)
.baseURL(baseURL)
.build();
retryAndTimeout(30, TimeUnit.SECONDS, futureClient::isDone);
final ClarifaiClient client = futureClient.get();
logger.debug(client.getToken().toString());
}
@Test(expected = ClarifaiException.class)
public void testClosingClientWorks() {
final ClarifaiClient toBeClosed = new ClarifaiBuilder(appID, appSecret).buildSync();
toBeClosed.close();
toBeClosed.getModels().getPage(1).executeSync();
}
@Retry
@Test
public void testCreateModel() {
final String modelID = "creatingModel" + System.nanoTime();
assertSuccess(client.createModel(modelID).withOutputInfo(
ConceptOutputInfo.forConcepts(
Concept.forID("foo")
)
));
}
@Ignore
@Test
public void testCreateModel_multi_lang() {
final String modelID = "creatingModel" + System.nanoTime();
assertSuccess(client.createModel(modelID).withOutputInfo(
ConceptOutputInfo.forConcepts(
Concept.forID("foo")
).withLanguage("zh")
));
}
@Retry
@Test
public void testCreateModel_multi_lang() {
final String modelID = "creatingModel" + System.nanoTime();
assertSuccess(client.createModel(modelID).withOutputInfo(
ConceptOutputInfo.forConcepts(
Concept.forID("foo")
).withLanguage("zh")
));
}
@Retry
@Test
public void testModifyModel() {
final String modelID = "modifyingModel" + System.nanoTime();
assertSuccess(client.createModel(modelID).withOutputInfo(
ConceptOutputInfo.forConcepts(
Concept.forID("foo")
)
));
assertSuccess(client.modifyModel(modelID)
.withConcepts(Action.OVERWRITE, Concept.forID("bar"))
);
final List<Concept> concepts =
assertSuccess(client.getModelByID(modelID)).asConceptModel().outputInfo().concepts();
assertEquals(1, concepts.size());
assertEquals("bar", concepts.get(0).name());
}
@Retry
@Test
public void testMergeMetadata() {
final String inputID = assertSuccess(client.addInputs()
.allowDuplicateURLs(true)
.plus(ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_URL))
)
).get(0).id();
assertNotNull(inputID);
final JsonObject newMetadata = assertSuccess(
client.addMetadataForInput(
inputID,
new JSONObjectBuilder()
.add("foo", "bar")
.build()
)
).metadata();
assertEquals(new JSONObjectBuilder().add("foo", "bar").build(), newMetadata);
}
@Test public void testMetadataDoesNotAllowNullDictionaryValues() {
thrown.expect(IllegalArgumentException.class);
client.addInputs()
.allowDuplicateURLs(true)
.plus(ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_URL))
// Will throw IAE because we have a null value
.withMetadata(new JSONObjectBuilder().add("foo", JsonNull.INSTANCE).build())
)
.executeSync();
}
@Test public void testDefaultModels() throws InvocationTargetException, IllegalAccessException {
final DefaultModels defaultModels = client.getDefaultModels();
// Use reflection just to ensure we don't miss any models when we add new ones
for (final Method method : DefaultModels.class.getMethods()) {
if (isPublic(method.getModifiers()) && Model.class.isAssignableFrom(method.getReturnType())) {
final Model<?> model = (Model<?>) method.invoke(defaultModels);
assertSuccess(
model.predict().withInputs(ClarifaiInput.forImage(ClarifaiImage.of(METRO_NORTH_IMAGE_URL)))
);
}
}
}
// Workaround since we can't delete models right now, so we'll make a new model every time that is different every
// time we run the app
@NotNull private static String getModelID() {
return "mod1ID" + startTime;
}
}
|
package cgeo.geocaching;
import cgeo.geocaching.activity.AbstractActivity;
import cgeo.geocaching.activity.AbstractListActivity;
import cgeo.geocaching.activity.ActivityMixin;
import cgeo.geocaching.apps.cache.navi.NavigationAppFactory;
import cgeo.geocaching.apps.cachelist.CacheListAppFactory;
import cgeo.geocaching.enumerations.CacheListType;
import cgeo.geocaching.enumerations.CacheType;
import cgeo.geocaching.enumerations.LogType;
import cgeo.geocaching.enumerations.StatusCode;
import cgeo.geocaching.files.GPXImporter;
import cgeo.geocaching.filter.AttributeFilter;
import cgeo.geocaching.filter.IFilter;
import cgeo.geocaching.filter.SizeFilter;
import cgeo.geocaching.filter.StateFilter;
import cgeo.geocaching.filter.TrackablesFilter;
import cgeo.geocaching.filter.TypeFilter;
import cgeo.geocaching.geopoint.Geopoint;
import cgeo.geocaching.maps.CGeoMap;
import cgeo.geocaching.network.Parameters;
import cgeo.geocaching.sorting.CacheComparator;
import cgeo.geocaching.sorting.DateComparator;
import cgeo.geocaching.sorting.DifficultyComparator;
import cgeo.geocaching.sorting.EventDateComparator;
import cgeo.geocaching.sorting.FindsComparator;
import cgeo.geocaching.sorting.GeocodeComparator;
import cgeo.geocaching.sorting.InventoryComparator;
import cgeo.geocaching.sorting.NameComparator;
import cgeo.geocaching.sorting.PopularityComparator;
import cgeo.geocaching.sorting.RatingComparator;
import cgeo.geocaching.sorting.SizeComparator;
import cgeo.geocaching.sorting.StateComparator;
import cgeo.geocaching.sorting.TerrainComparator;
import cgeo.geocaching.sorting.VisitComparator;
import cgeo.geocaching.sorting.VoteComparator;
import cgeo.geocaching.ui.CacheListAdapter;
import cgeo.geocaching.utils.RunnableWithArgument;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpResponse;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.Configuration;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.ContextMenu;
import android.view.ContextMenu.ContextMenuInfo;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SubMenu;
import android.view.View;
import android.view.WindowManager;
import android.widget.AdapterView.AdapterContextMenuInfo;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class cgeocaches extends AbstractListActivity {
private static final int MAX_LIST_ITEMS = 1000;
private static final String EXTRAS_LIST_TYPE = "type";
private static final int MENU_REFRESH_STORED = 2;
private static final int MENU_CACHE_DETAILS = 4;
private static final int MENU_DROP_CACHES = 5;
private static final int MENU_IMPORT_GPX = 6;
private static final int MENU_CREATE_LIST = 7;
private static final int MENU_DROP_LIST = 8;
private static final int MENU_INVERT_SELECTION = 9;
private static final int MENU_SORT_DISTANCE = 10;
private static final int MENU_SORT_DIFFICULTY = 11;
private static final int MENU_SORT_TERRAIN = 12;
private static final int MENU_SORT_SIZE = 13;
private static final int MENU_SORT_FAVORITES = 14;
private static final int MENU_SORT_NAME = 15;
private static final int MENU_SORT_GEOCODE = 16;
private static final int MENU_SWITCH_LIST = 17;
private static final int MENU_SORT_RATING = 18;
private static final int MENU_SORT_VOTE = 19;
private static final int MENU_SORT_INVENTORY = 20;
private static final int MENU_IMPORT_WEB = 21;
private static final int MENU_EXPORT_NOTES = 22;
private static final int MENU_REMOVE_FROM_HISTORY = 23;
private static final int MENU_DROP_CACHE = 24;
private static final int MENU_MOVE_TO_LIST = 25;
private static final int MENU_FILTER_CLEAR = 26;
private static final int MENU_FILTER_TRACKABLES = 27;
private static final int SUBMENU_FILTER_SIZE = 28;
private static final int SUBMENU_FILTER_TYPE = 29;
private static final int MENU_SWITCH_SELECT_MODE = 52;
private static final int SUBMENU_SHOW_MAP = 54;
private static final int SUBMENU_MANAGE_LISTS = 55;
private static final int SUBMENU_MANAGE_OFFLINE = 56;
private static final int SUBMENU_SORT = 57;
private static final int SUBMENU_FILTER = 58;
private static final int SUBMENU_IMPORT = 59;
private static final int SUBMENU_MANAGE_HISTORY = 60;
private static final int MENU_SORT_DATE = 61;
private static final int MENU_SORT_FINDS = 62;
private static final int MENU_SORT_STATE = 63;
private static final int MENU_RENAME_LIST = 64;
private static final int MENU_DROP_CACHES_AND_LIST = 65;
private static final int MENU_DEFAULT_NAVIGATION = 66;
private static final int SUBMENU_FILTER_ATTRIBUTES = 67;
private static final int SUBMENU_FILTER_STATE = 68;
private String action = null;
private CacheListType type = null;
private Geopoint coords = null;
private CacheType cacheType = Settings.getCacheType();
private String keyword = null;
private String address = null;
private String username = null;
private SearchResult search = null;
private List<cgCache> cacheList = new ArrayList<cgCache>();
private CacheListAdapter adapter = null;
private LayoutInflater inflater = null;
private View listFooter = null;
private TextView listFooterText = null;
private ProgressDialog waitDialog = null;
private Float northHeading = 0f;
private cgGeo geo = null;
private cgDirection dir = null;
private UpdateLocationCallback geoUpdate = new UpdateLocation();
private UpdateDirectionCallback dirUpdate = new UpdateDirection();
private String title = "";
private int detailTotal = 0;
private int detailProgress = 0;
private long detailProgressTime = 0L;
private LoadDetailsThread threadDetails = null;
private LoadFromWebThread threadWeb = null;
private DropDetailsThread threadR = null;
private ExportFieldNotesThread threadF = null;
private RemoveFromHistoryThread threadH = null;
private int listId = 0;
private List<StoredList> lists = null;
private GeocodeComparator gcComparator = new GeocodeComparator();
private Handler loadCachesHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
try {
if (search != null) {
setTitle(title + " [" + SearchResult.getCount(search) + "]");
cacheList.clear();
final List<cgCache> cacheListTmp = app.getCaches(search, false);
if (CollectionUtils.isNotEmpty(cacheListTmp)) {
cacheList.addAll(cacheListTmp);
cacheListTmp.clear();
Collections.sort(cacheList, gcComparator);
}
} else {
setTitle(title);
}
setAdapter();
setDateComparatorForEventList();
if (cacheList == null) {
showToast(res.getString(R.string.err_list_load_fail));
}
setMoreCaches();
if (cacheList != null && SearchResult.getError(search) == StatusCode.UNAPPROVED_LICENSE) {
AlertDialog.Builder dialog = new AlertDialog.Builder(cgeocaches.this);
dialog.setTitle(res.getString(R.string.license));
dialog.setMessage(res.getString(R.string.err_license));
dialog.setCancelable(true);
dialog.setNegativeButton(res.getString(R.string.license_dismiss), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
cgBase.clearCookies();
dialog.cancel();
}
});
dialog.setPositiveButton(res.getString(R.string.license_show), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
cgBase.clearCookies();
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("http:
}
});
AlertDialog alert = dialog.create();
alert.show();
} else if (app != null && SearchResult.getError(search) != null) {
showToast(res.getString(R.string.err_download_fail) + " " + SearchResult.getError(search).getErrorString(res) + ".");
hideLoading();
showProgress(false);
finish();
return;
}
if (geo != null && geo.coordsNow != null) {
adapter.setActualCoordinates(geo.coordsNow);
adapter.setActualHeading(northHeading);
}
} catch (Exception e) {
showToast(res.getString(R.string.err_detail_cache_find_any));
Log.e(Settings.tag, "cgeocaches.loadCachesHandler: " + e.toString());
hideLoading();
showProgress(false);
finish();
return;
}
try {
hideLoading();
showProgress(false);
} catch (Exception e2) {
Log.e(Settings.tag, "cgeocaches.loadCachesHandler.2: " + e2.toString());
}
if (adapter != null) {
adapter.setSelectMode(false, true);
}
}
};
private Handler loadNextPageHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
try {
if (search != null) {
setTitle(title + " [" + SearchResult.getCount(search) + "]");
cacheList.clear();
final List<cgCache> cacheListTmp = app.getCaches(search, false);
if (CollectionUtils.isNotEmpty(cacheListTmp)) {
cacheList.addAll(cacheListTmp);
cacheListTmp.clear();
Collections.sort(cacheList, gcComparator);
}
if (adapter != null) {
adapter.reFilter();
}
} else {
setTitle(title);
}
setAdapter();
if (cacheList == null) {
showToast(res.getString(R.string.err_list_load_fail));
}
setMoreCaches();
if (SearchResult.getError(search) != null) {
showToast(res.getString(R.string.err_download_fail) + " " + SearchResult.getError(search).getErrorString(res) + ".");
listFooter.setOnClickListener(new MoreCachesListener());
hideLoading();
showProgress(false);
finish();
return;
}
if (geo != null && geo.coordsNow != null) {
adapter.setActualCoordinates(geo.coordsNow);
adapter.setActualHeading(northHeading);
}
} catch (Exception e) {
showToast(res.getString(R.string.err_detail_cache_find_next));
Log.e(Settings.tag, "cgeocaches.loadNextPageHandler: " + e.toString());
}
hideLoading();
showProgress(false);
if (adapter != null) {
adapter.setSelectMode(false, true);
}
}
};
private Handler loadDetailsHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
setAdapter();
if (msg.what > -1) {
if (waitDialog != null) {
cacheList.get(msg.what).setStatusChecked(false);
if (adapter != null) {
adapter.notifyDataSetChanged();
}
int secondsElapsed = (int) ((System.currentTimeMillis() - detailProgressTime) / 1000);
int minutesRemaining = ((detailTotal - detailProgress) * secondsElapsed / ((detailProgress > 0) ? detailProgress : 1) / 60);
waitDialog.setProgress(detailProgress);
if (minutesRemaining < 1) {
waitDialog.setMessage(res.getString(R.string.caches_downloading) + " " + res.getString(R.string.caches_eta_ltm));
} else if (minutesRemaining == 1) {
waitDialog.setMessage(res.getString(R.string.caches_downloading) + " " + minutesRemaining + " " + res.getString(R.string.caches_eta_min));
} else {
waitDialog.setMessage(res.getString(R.string.caches_downloading) + " " + minutesRemaining + " " + res.getString(R.string.caches_eta_mins));
}
}
} else {
if (cacheList != null && search != null) {
final List<cgCache> cacheListTmp = app.getCaches(search, false);
if (CollectionUtils.isNotEmpty(cacheListTmp)) {
cacheList.clear();
cacheList.addAll(cacheListTmp);
cacheListTmp.clear();
Collections.sort(cacheList, gcComparator);
}
}
if (geo != null && geo.coordsNow != null) {
adapter.setActualCoordinates(geo.coordsNow);
adapter.setActualHeading(northHeading);
}
showProgress(false);
if (waitDialog != null) {
waitDialog.dismiss();
waitDialog.setOnCancelListener(null);
}
if (geo == null) {
geo = app.startGeo(geoUpdate);
}
if (Settings.isLiveList() && Settings.isUseCompass() && dir == null) {
dir = app.startDir(cgeocaches.this, dirUpdate);
}
}
}
};
private Handler downloadFromWebHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
setAdapter();
if (adapter != null) {
adapter.notifyDataSetChanged();
}
if (msg.what == 0) { //no caches
waitDialog.setMessage(res.getString(R.string.web_import_waiting));
} else if (msg.what == 1) { //cache downloading
waitDialog.setMessage(res.getString(R.string.web_downloading) + " " + (String) msg.obj + "...");
} else if (msg.what == 2) { //Cache downloaded
waitDialog.setMessage(res.getString(R.string.web_downloaded) + " " + (String) msg.obj + ".");
refreshCurrentList();
} else if (msg.what == -2) {
if (waitDialog != null) {
waitDialog.dismiss();
waitDialog.setOnCancelListener(null);
}
getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
showToast(res.getString(R.string.sendToCgeo_download_fail));
finish();
return;
} else if (msg.what == -3) {
if (waitDialog != null) {
waitDialog.dismiss();
waitDialog.setOnCancelListener(null);
}
getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
showToast(res.getString(R.string.sendToCgeo_no_registration));
finish();
return;
} else {
if (adapter != null) {
adapter.setSelectMode(false, true);
}
cacheList.clear();
final List<cgCache> cacheListTmp = app.getCaches(search, false);
if (CollectionUtils.isNotEmpty(cacheListTmp)) {
cacheList.addAll(cacheListTmp);
cacheListTmp.clear();
Collections.sort(cacheList, gcComparator);
}
if (waitDialog != null) {
waitDialog.dismiss();
waitDialog.setOnCancelListener(null);
}
getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
}
};
private Handler dropDetailsHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (adapter != null) {
adapter.setSelectMode(false, true);
}
refreshCurrentList();
cacheList.clear();
final List<cgCache> cacheListTmp = app.getCaches(search, false);
if (CollectionUtils.isNotEmpty(cacheListTmp)) {
cacheList.addAll(cacheListTmp);
cacheListTmp.clear();
Collections.sort(cacheList, gcComparator);
}
if (waitDialog != null) {
waitDialog.dismiss();
waitDialog.setOnCancelListener(null);
}
}
};
private Handler removeFromHistoryHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
setAdapter();
if (msg.what > -1) {
cacheList.get(msg.what).setStatusChecked(false);
} else {
if (adapter != null) {
adapter.setSelectMode(false, true);
}
// TODO: Reload cacheList
if (waitDialog != null) {
waitDialog.dismiss();
waitDialog.setOnCancelListener(null);
}
}
}
};
private Handler exportFieldNotesHandler = new Handler() {
@Override
public void handleMessage(Message msg)
{
setAdapter();
if (msg.what > -1)
{
cacheList.get(msg.what).setStatusChecked(false);
waitDialog.setProgress(detailProgress);
}
else if (-2 == msg.what)
{
showToast(res.getString(R.string.info_fieldnotes_exported_to) + ": " + msg.obj.toString());
}
else if (-3 == msg.what)
{
showToast(res.getString(R.string.err_fieldnotes_export_failed));
}
else
{
if (adapter != null)
{
adapter.setSelectMode(false, true);
}
if (waitDialog != null)
{
waitDialog.dismiss();
waitDialog.setOnCancelListener(null);
}
}
}
};
private Handler importGpxAttachementFinishedHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
refreshCurrentList();
}
};
private ContextMenuInfo lastMenuInfo;
/**
* the navigation menu item for the cache list (not the context menu!), or <code>null</code>
*/
private MenuItem navigationMenu;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// init
app.setAction(action);
setTheme();
setContentView(R.layout.caches);
setTitle("caches");
// get parameters
final Bundle extras = getIntent().getExtras();
if (extras != null) {
Object typeObject = extras.get(EXTRAS_LIST_TYPE);
type = (typeObject instanceof CacheListType) ? (CacheListType) typeObject : CacheListType.OFFLINE;
coords = new Geopoint(extras.getDouble("latitude"), extras.getDouble("longitude"));
cacheType = Settings.getCacheType();
keyword = extras.getString("keyword");
address = extras.getString("address");
username = extras.getString("username");
}
if (Intent.ACTION_VIEW.equals(getIntent().getAction())) {
type = CacheListType.OFFLINE;
if (coords == null) {
coords = new Geopoint(0, 0);
}
}
init();
Thread threadPure;
cgSearchThread thread;
switch (type) {
case OFFLINE:
listId = Settings.getLastList();
if (listId <= 0) {
listId = StoredList.STANDARD_LIST_ID;
title = res.getString(R.string.stored_caches_button);
} else {
final StoredList list = app.getList(listId);
title = list.title;
}
setTitle(title);
showProgress(true);
setLoadingCaches();
threadPure = new LoadByOfflineThread(loadCachesHandler, coords, listId);
threadPure.start();
break;
case HISTORY:
title = res.getString(R.string.caches_history);
setTitle(title);
showProgress(true);
setLoadingCaches();
threadPure = new LoadByHistoryThread(loadCachesHandler);
threadPure.start();
break;
case NEAREST:
action = "pending";
title = res.getString(R.string.caches_nearby);
setTitle(title);
showProgress(true);
setLoadingCaches();
thread = new LoadByCoordsThread(loadCachesHandler, coords);
thread.setRecaptchaHandler(new cgSearchHandler(this, res, thread));
thread.start();
break;
case COORDINATE:
action = "planning";
title = coords.toString();
setTitle(title);
showProgress(true);
setLoadingCaches();
thread = new LoadByCoordsThread(loadCachesHandler, coords);
thread.setRecaptchaHandler(new cgSearchHandler(this, res, thread));
thread.start();
break;
case KEYWORD:
title = keyword;
setTitle(title);
showProgress(true);
setLoadingCaches();
thread = new LoadByKeywordThread(loadCachesHandler, keyword);
thread.setRecaptchaHandler(new cgSearchHandler(this, res, thread));
thread.start();
break;
case ADDRESS:
action = "planning";
if (StringUtils.isNotBlank(address)) {
title = address;
setTitle(title);
showProgress(true);
setLoadingCaches();
} else {
title = coords.toString();
setTitle(title);
showProgress(true);
setLoadingCaches();
}
thread = new LoadByCoordsThread(loadCachesHandler, coords);
thread.setRecaptchaHandler(new cgSearchHandler(this, res, thread));
thread.start();
break;
case USERNAME:
title = username;
setTitle(title);
showProgress(true);
setLoadingCaches();
thread = new LoadByUserNameThread(loadCachesHandler, username);
thread.setRecaptchaHandler(new cgSearchHandler(this, res, thread));
thread.start();
break;
case OWNER:
title = username;
setTitle(title);
showProgress(true);
setLoadingCaches();
thread = new LoadByOwnerThread(loadCachesHandler, username);
thread.setRecaptchaHandler(new cgSearchHandler(this, res, thread));
thread.start();
break;
case MAP:
title = res.getString(R.string.map_map);
setTitle(title);
showProgress(true);
SearchResult result = extras != null ? (SearchResult) extras.get("search") : null;
search = new ParseResult(result);
loadCachesHandler.sendMessage(Message.obtain());
break;
default:
title = "caches";
setTitle(title);
Log.e(Settings.tag, "cgeocaches.onCreate: No action or unknown action specified");
break;
}
prepareFilterBar();
if (Intent.ACTION_VIEW.equals(getIntent().getAction())) {
importGpxAttachement();
}
}
private void importGpxAttachement() {
new AlertDialog.Builder(this)
.setTitle(res.getString(R.string.gpx_import_title))
.setMessage(res.getString(R.string.gpx_import_confirm))
.setCancelable(false)
.setPositiveButton(getString(android.R.string.yes), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
new GPXImporter(cgeocaches.this, listId, importGpxAttachementFinishedHandler).importGPX();
}
})
.setNegativeButton(getString(android.R.string.no), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
})
.create()
.show();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
init();
}
@Override
public void onResume() {
super.onResume();
init();
if (adapter != null && geo != null && geo.coordsNow != null) {
adapter.setActualCoordinates(geo.coordsNow);
adapter.setActualHeading(northHeading);
}
if (adapter != null) {
adapter.setSelectMode(false, true);
if (geo != null && geo.coordsNow != null) {
adapter.forceSort(geo.coordsNow);
}
}
if (loadCachesHandler != null && search != null) {
loadCachesHandler.sendEmptyMessage(0);
}
// refresh standard list if it has changed (new caches downloaded)
if (type == CacheListType.OFFLINE && listId >= StoredList.STANDARD_LIST_ID && search != null) {
SearchResult newSearch = cgBase.searchByOffline(coords, cacheType, listId);
if (newSearch != null && newSearch.totalCnt != search.totalCnt) {
refreshCurrentList();
}
}
}
@Override
public void onDestroy() {
if (adapter != null) {
adapter = null;
}
if (dir != null) {
dir = app.removeDir();
}
if (geo != null) {
geo = app.removeGeo();
}
super.onDestroy();
}
@Override
public void onStop() {
if (dir != null) {
dir = app.removeDir();
}
if (geo != null) {
geo = app.removeGeo();
}
super.onStop();
}
@Override
public void onPause() {
if (dir != null) {
dir = app.removeDir();
}
if (geo != null) {
geo = app.removeGeo();
}
super.onPause();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
SubMenu subMenuFilter = menu.addSubMenu(0, SUBMENU_FILTER, 0, res.getString(R.string.caches_filter)).setIcon(R.drawable.ic_menu_filter);
subMenuFilter.setHeaderTitle(res.getString(R.string.caches_filter_title));
if (Settings.getCacheType() == CacheType.ALL) {
subMenuFilter.add(0, SUBMENU_FILTER_TYPE, 0, res.getString(R.string.caches_filter_type));
}
subMenuFilter.add(0, SUBMENU_FILTER_SIZE, 0, res.getString(R.string.caches_filter_size));
subMenuFilter.add(0, SUBMENU_FILTER_ATTRIBUTES, 0, res.getString(R.string.cache_attributes));
subMenuFilter.add(0, SUBMENU_FILTER_STATE, 0, res.getString(R.string.cache_status));
subMenuFilter.add(0, MENU_FILTER_TRACKABLES, 0, res.getString(R.string.caches_filter_track));
subMenuFilter.add(0, MENU_FILTER_CLEAR, 0, res.getString(R.string.caches_filter_clear));
SubMenu subMenuSort = menu.addSubMenu(0, SUBMENU_SORT, 0, res.getString(R.string.caches_sort)).setIcon(android.R.drawable.ic_menu_sort_alphabetically);
subMenuSort.setHeaderTitle(res.getString(R.string.caches_sort_title));
// sort the context menu labels alphabetically for easier reading
Map<String, Integer> comparators = new HashMap<String, Integer>();
comparators.put(res.getString(R.string.caches_sort_distance), MENU_SORT_DISTANCE);
comparators.put(res.getString(R.string.caches_sort_difficulty), MENU_SORT_DIFFICULTY);
comparators.put(res.getString(R.string.caches_sort_terrain), MENU_SORT_TERRAIN);
comparators.put(res.getString(R.string.caches_sort_size), MENU_SORT_SIZE);
comparators.put(res.getString(R.string.caches_sort_favorites), MENU_SORT_FAVORITES);
comparators.put(res.getString(R.string.caches_sort_name), MENU_SORT_NAME);
comparators.put(res.getString(R.string.caches_sort_gccode), MENU_SORT_GEOCODE);
comparators.put(res.getString(R.string.caches_sort_rating), MENU_SORT_RATING);
comparators.put(res.getString(R.string.caches_sort_vote), MENU_SORT_VOTE);
comparators.put(res.getString(R.string.caches_sort_inventory), MENU_SORT_INVENTORY);
comparators.put(res.getString(R.string.caches_sort_date), MENU_SORT_DATE);
comparators.put(res.getString(R.string.caches_sort_finds), MENU_SORT_FINDS);
comparators.put(res.getString(R.string.caches_sort_state), MENU_SORT_STATE);
List<String> sortedLabels = new ArrayList<String>(comparators.keySet());
Collections.sort(sortedLabels);
for (String label : sortedLabels) {
Integer id = comparators.get(label);
subMenuSort.add(1, id.intValue(), 0, label).setCheckable(true).setChecked(id.intValue() == MENU_SORT_DISTANCE);
}
subMenuSort.setGroupCheckable(1, true, true);
menu.add(0, MENU_SWITCH_SELECT_MODE, 0, res.getString(R.string.caches_select_mode)).setIcon(android.R.drawable.ic_menu_agenda);
menu.add(0, MENU_INVERT_SELECTION, 0, res.getString(R.string.caches_select_invert)).setIcon(R.drawable.ic_menu_mark);
if (type == CacheListType.OFFLINE) {
SubMenu subMenu = menu.addSubMenu(0, SUBMENU_MANAGE_OFFLINE, 0, res.getString(R.string.caches_manage)).setIcon(android.R.drawable.ic_menu_save);
subMenu.add(0, MENU_DROP_CACHES, 0, res.getString(R.string.caches_drop_all)); // delete saved caches
subMenu.add(0, MENU_DROP_CACHES_AND_LIST, 0, res.getString(R.string.caches_drop_all_and_list));
subMenu.add(0, MENU_REFRESH_STORED, 0, res.getString(R.string.cache_offline_refresh)); // download details for all caches
subMenu.add(0, MENU_MOVE_TO_LIST, 0, res.getString(R.string.cache_menu_move_list));
subMenu.add(0, MENU_EXPORT_NOTES, 0, res.getString(R.string.cache_export_fieldnote)); // export field notes
if (Settings.getWebDeviceCode() == null)
{
menu.add(0, MENU_IMPORT_GPX, 0, res.getString(R.string.gpx_import_title)).setIcon(android.R.drawable.ic_menu_upload); // import gpx file
} else {
SubMenu subMenuImport = menu.addSubMenu(0, SUBMENU_IMPORT, 0, res.getString(R.string.import_title)).setIcon(android.R.drawable.ic_menu_upload); // import
subMenuImport.add(1, MENU_IMPORT_GPX, 0, res.getString(R.string.gpx_import_title)).setCheckable(false).setChecked(false);
subMenuImport.add(1, MENU_IMPORT_WEB, 0, res.getString(R.string.web_import_title)).setCheckable(false).setChecked(false);
}
} else {
if (type == CacheListType.HISTORY)
{
SubMenu subMenu = menu.addSubMenu(0, SUBMENU_MANAGE_HISTORY, 0, res.getString(R.string.caches_manage)).setIcon(android.R.drawable.ic_menu_save);
subMenu.add(0, MENU_REMOVE_FROM_HISTORY, 0, res.getString(R.string.cache_clear_history)); // remove from history
subMenu.add(0, MENU_EXPORT_NOTES, 0, res.getString(R.string.cache_export_fieldnote)); // export field notes
}
menu.add(0, MENU_REFRESH_STORED, 0, res.getString(R.string.caches_store_offline)).setIcon(android.R.drawable.ic_menu_set_as); // download details for all caches
}
navigationMenu = CacheListAppFactory.addMenuItems(menu, this, res);
if (type == CacheListType.OFFLINE) {
SubMenu subMenu = menu.addSubMenu(0, SUBMENU_MANAGE_LISTS, 0, res.getString(R.string.list_menu)).setIcon(android.R.drawable.ic_menu_more);
subMenu.add(0, MENU_CREATE_LIST, 0, res.getString(R.string.list_menu_create));
subMenu.add(0, MENU_DROP_LIST, 0, res.getString(R.string.list_menu_drop));
subMenu.add(0, MENU_RENAME_LIST, 0, res.getString(R.string.list_menu_rename));
subMenu.add(0, MENU_SWITCH_LIST, 0, res.getString(R.string.list_menu_change));
}
return true;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
try {
if (adapter != null && adapter.getSelectMode()) {
menu.findItem(MENU_SWITCH_SELECT_MODE).setTitle(res.getString(R.string.caches_select_mode_exit))
.setIcon(R.drawable.ic_menu_clear_playlist);
menu.findItem(MENU_INVERT_SELECTION).setVisible(true);
} else {
menu.findItem(MENU_SWITCH_SELECT_MODE).setTitle(res.getString(R.string.caches_select_mode))
.setIcon(android.R.drawable.ic_menu_agenda);
menu.findItem(MENU_INVERT_SELECTION).setVisible(false);
}
boolean hasSelection = adapter != null && adapter.getChecked() > 0;
boolean isNonDefaultList = listId != 1;
if (type == CacheListType.OFFLINE) { // only offline list
if (hasSelection) {
menu.findItem(MENU_DROP_CACHES).setTitle(res.getString(R.string.caches_drop_selected) + " (" + adapter.getChecked() + ")");
} else {
menu.findItem(MENU_DROP_CACHES).setTitle(res.getString(R.string.caches_drop_all));
}
menu.findItem(MENU_DROP_CACHES_AND_LIST).setVisible(!hasSelection && isNonDefaultList);
if (hasSelection) {
menu.findItem(MENU_REFRESH_STORED).setTitle(res.getString(R.string.caches_refresh_selected) + " (" + adapter.getChecked() + ")");
} else {
menu.findItem(MENU_REFRESH_STORED).setTitle(res.getString(R.string.caches_refresh_all));
}
if (hasSelection) {
menu.findItem(MENU_MOVE_TO_LIST).setTitle(res.getString(R.string.caches_move_selected) + " (" + adapter.getChecked() + ")");
} else {
menu.findItem(MENU_MOVE_TO_LIST).setTitle(res.getString(R.string.caches_move_all));
}
} else { // search and history list (all other than offline)
if (hasSelection) {
menu.findItem(MENU_REFRESH_STORED).setTitle(res.getString(R.string.caches_store_selected) + " (" + adapter.getChecked() + ")");
} else {
menu.findItem(MENU_REFRESH_STORED).setTitle(res.getString(R.string.caches_store_offline));
}
}
// Hide menus if cache-list is empty
int[] hideIfEmptyList = new int[] {
MENU_SWITCH_SELECT_MODE,
SUBMENU_MANAGE_OFFLINE,
SUBMENU_MANAGE_HISTORY,
SUBMENU_SHOW_MAP,
SUBMENU_SORT,
MENU_REFRESH_STORED };
boolean menuVisible = cacheList.size() > 0;
for (int itemId : hideIfEmptyList) {
MenuItem item = menu.findItem(itemId);
if (null != item) {
item.setVisible(menuVisible);
}
}
if (navigationMenu != null) {
navigationMenu.setVisible(menuVisible);
}
MenuItem item = menu.findItem(MENU_DROP_LIST);
if (item != null) {
item.setVisible(isNonDefaultList);
}
item = menu.findItem(MENU_RENAME_LIST);
if (item != null) {
item.setVisible(isNonDefaultList);
}
boolean multipleLists = app.getLists().size() >= 2;
item = menu.findItem(MENU_SWITCH_LIST);
if (item != null) {
item.setVisible(multipleLists);
}
item = menu.findItem(MENU_MOVE_TO_LIST);
if (item != null) {
item.setVisible(multipleLists);
}
item = menu.findItem(MENU_REMOVE_FROM_HISTORY);
if (null != item) {
if (hasSelection) {
item.setTitle(res.getString(R.string.cache_remove_from_history) + " (" + adapter.getChecked() + ")");
} else {
item.setTitle(res.getString(R.string.cache_clear_history));
}
}
item = menu.findItem(MENU_EXPORT_NOTES);
if (null != item) {
// Hide Field Notes export if there are no caches with logs
item.setVisible(false);
for (cgCache cache : cacheList) {
if (cache.isLogOffline()) {
item.setVisible(true);
if (hasSelection) {
item.setTitle(res.getString(R.string.cache_export_fieldnote) + " (" + adapter.getChecked() + ")");
} else {
item.setTitle(res.getString(R.string.cache_export_fieldnote));
}
break;
}
}
}
} catch (Exception e) {
Log.e(Settings.tag, "cgeocaches.onPrepareOptionsMenu: " + e.toString());
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int itemId = item.getItemId();
switch (itemId) {
case MENU_SWITCH_SELECT_MODE:
if (adapter != null) {
adapter.switchSelectMode();
}
return true;
case MENU_REFRESH_STORED:
refreshStored();
return true;
case MENU_DROP_CACHES:
dropStored(false);
return false;
case MENU_DROP_CACHES_AND_LIST:
dropStored(true);
return true;
case MENU_IMPORT_GPX:
importGpx();
return false;
case MENU_CREATE_LIST:
createList(null);
return false;
case MENU_DROP_LIST:
removeList(true);
return false;
case MENU_RENAME_LIST:
renameList();
return false;
case MENU_INVERT_SELECTION:
if (adapter != null) {
adapter.invertSelection();
}
return false;
case MENU_SORT_DISTANCE:
setComparator(item, null);
return false;
case MENU_SORT_DIFFICULTY:
setComparator(item, new DifficultyComparator());
return false;
case MENU_SORT_TERRAIN:
setComparator(item, new TerrainComparator());
return false;
case MENU_SORT_SIZE:
setComparator(item, new SizeComparator());
return false;
case MENU_SORT_FAVORITES:
setComparator(item, new PopularityComparator());
return false;
case MENU_SORT_NAME:
setComparator(item, new NameComparator());
return false;
case MENU_SORT_GEOCODE:
setComparator(item, new GeocodeComparator());
return false;
case MENU_SWITCH_LIST:
selectList(null);
return false;
case MENU_SORT_RATING:
setComparator(item, new RatingComparator());
return false;
case MENU_SORT_VOTE:
setComparator(item, new VoteComparator());
return false;
case MENU_SORT_INVENTORY:
setComparator(item, new InventoryComparator());
return false;
case MENU_SORT_DATE:
setComparator(item, new DateComparator());
return true;
case MENU_SORT_FINDS:
setComparator(item, new FindsComparator(app));
return true;
case MENU_SORT_STATE:
setComparator(item, new StateComparator());
return true;
case SUBMENU_FILTER_TYPE:
showFilterMenu(TypeFilter.getAllFilters(), res.getString(R.string.caches_filter_type_title));
return true;
case SUBMENU_FILTER_SIZE:
showFilterMenu(SizeFilter.getAllFilters(), res.getString(R.string.caches_filter_size_title));
return true;
case SUBMENU_FILTER_ATTRIBUTES:
showFilterMenu(AttributeFilter.getAllFilters(), res.getString(R.string.cache_attributes));
return true;
case SUBMENU_FILTER_STATE:
showFilterMenu(StateFilter.getAllFilters(), res.getString(R.string.cache_status));
return true;
case MENU_FILTER_TRACKABLES:
setFilter(new TrackablesFilter(res.getString(R.string.caches_filter_track)));
return true;
case MENU_FILTER_CLEAR:
if (adapter != null) {
setFilter(null);
}
return false;
case MENU_IMPORT_WEB:
importWeb();
return false;
case MENU_EXPORT_NOTES:
exportFieldNotes();
return false;
case MENU_REMOVE_FROM_HISTORY:
removeFromHistoryCheck();
return false;
case MENU_MOVE_TO_LIST:
moveCachesToOtherList();
return true;
}
return CacheListAppFactory.onMenuItemSelected(item, geo, cacheList, this, search);
}
private void showFilterMenu(final IFilter[] filters, final String menuTitle) {
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(menuTitle);
final String[] names = new String[filters.length];
for (int i = 0; i < filters.length; i++) {
names[i] = filters[i].getName();
}
builder.setItems(names, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
setFilter(filters[item]);
}
});
builder.create().show();
}
private void setComparator(MenuItem item,
CacheComparator comparator) {
if (adapter != null) {
adapter.setComparator(comparator);
}
item.setChecked(true);
}
@Override
public void onCreateContextMenu(final ContextMenu menu, final View view, final ContextMenu.ContextMenuInfo info) {
super.onCreateContextMenu(menu, view, info);
if (adapter == null) {
return;
}
AdapterContextMenuInfo adapterInfo = null;
try {
adapterInfo = (AdapterContextMenuInfo) info;
} catch (Exception e) {
Log.w(Settings.tag, "cgeocaches.onCreateContextMenu: " + e.toString());
}
if (adapterInfo == null || adapterInfo.position >= adapter.getCount()) {
return;
}
final cgCache cache = adapter.getItem(adapterInfo.position);
if (StringUtils.isNotBlank(cache.getName())) {
menu.setHeaderTitle(cache.getName());
} else {
menu.setHeaderTitle(cache.getGeocode());
}
if (cache.getCoords() != null) {
menu.add(0, MENU_DEFAULT_NAVIGATION, 0, NavigationAppFactory.getDefaultNavigationApplication(this).getName());
final SubMenu subMenu = menu.addSubMenu(1, 0, 0, res.getString(R.string.cache_menu_navigate)).setIcon(android.R.drawable.ic_menu_mapmode);
NavigationAppFactory.addMenuItems(subMenu, this);
addVisitMenu(menu, cache);
menu.add(0, MENU_CACHE_DETAILS, 0, res.getString(R.string.cache_menu_details));
}
if (cache.getListId() >= 1) {
menu.add(0, MENU_DROP_CACHE, 0, res.getString(R.string.cache_offline_drop));
final List<StoredList> cacheLists = app.getLists();
final int listCount = cacheLists.size();
if (listCount > 1) {
menu.add(0, MENU_MOVE_TO_LIST, 0, res.getString(R.string.cache_menu_move_list));
}
}
}
private void moveCachesToOtherList() {
final List<StoredList> cacheLists = app.getLists();
ArrayList<String> listNames = new ArrayList<String>();
for (StoredList list : cacheLists) {
listNames.add(list.getTitleAndCount());
}
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(res.getString(R.string.cache_menu_move_list));
builder.setItems(listNames.toArray(new String[listNames.size()]), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
moveCachesToList(cacheLists.get(item));
}
});
builder.create().show();
}
private void moveCachesToList(final StoredList list) {
int newListId = list.id;
final boolean moveAll = adapter.getChecked() == 0;
for (final cgCache c : Collections.unmodifiableList(cacheList)) {
if (moveAll || c.isStatusChecked()) {
app.moveToList(c.getGeocode(), newListId);
}
}
adapter.resetChecks();
refreshCurrentList();
}
@Override
public boolean onContextItemSelected(MenuItem item) {
final int id = item.getItemId();
ContextMenu.ContextMenuInfo info = item.getMenuInfo();
// restore menu info for sub menu items, see
if (info == null) {
info = lastMenuInfo;
lastMenuInfo = null;
}
AdapterContextMenuInfo adapterInfo = null;
try {
adapterInfo = (AdapterContextMenuInfo) info;
} catch (Exception e) {
Log.w(Settings.tag, "cgeocaches.onContextItemSelected: " + e.toString());
}
if (id == MENU_DEFAULT_NAVIGATION) {
final cgCache cache = getCacheFromAdapter(adapterInfo);
final SearchResult singleSearch = cgBase.searchByGeocode(cache.getGeocode(), null, 0, false, null);
NavigationAppFactory.startDefaultNavigationApplication(geo, this, cache, singleSearch, null, null);
return true;
} else if (id == MENU_LOG_VISIT) {
return getCacheFromAdapter(adapterInfo).logVisit(this);
} else if (id == MENU_CACHE_DETAILS) {
final Intent cachesIntent = new Intent(this, CacheDetailActivity.class);
final cgCache cache = getCacheFromAdapter(adapterInfo);
cachesIntent.putExtra("geocode", cache.getGeocode().toUpperCase());
cachesIntent.putExtra("name", cache.getName());
startActivity(cachesIntent);
return true;
} else if (id == MENU_DROP_CACHE) {
cgBase.dropCache(app, getCacheFromAdapter(adapterInfo), new Handler() {
@Override
public void handleMessage(Message msg) {
refreshCurrentList();
}
});
return true;
} else if (id == MENU_MOVE_TO_LIST) {
final String geocode = getCacheFromAdapter(adapterInfo).getGeocode();
final List<StoredList> cacheLists = app.getLists();
ArrayList<String> listNames = new ArrayList<String>();
for (StoredList list : cacheLists) {
listNames.add(list.getTitleAndCount());
}
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(res.getString(R.string.cache_menu_move_list));
builder.setItems(listNames.toArray(new String[listNames.size()]), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
final int newListId = cacheLists.get(item).id;
app.moveToList(geocode, newListId);
adapter.resetChecks();
refreshCurrentList();
}
});
builder.create().show();
return true;
}
// we must remember the menu info for the sub menu, there is a bug
// in Android:
lastMenuInfo = info;
if (adapterInfo != null) {
// create a search for a single cache (as if in details view)
final cgCache cache = getCacheFromAdapter(adapterInfo);
final SearchResult singleSearch = cgBase.searchByGeocode(cache.getGeocode(), null, 0, false, null);
if (NavigationAppFactory.onMenuItemSelected(item, geo, this,
cache, singleSearch, null, null)) {
return true;
}
int logType = id - MENU_LOG_VISIT_OFFLINE;
cache.logOffline(this, LogType.getById(logType));
}
return true;
}
/**
* Extract a cache from adapter data.
*
* @param adapterInfo
* an adapterInfo
* @return the pointed cache
*/
private cgCache getCacheFromAdapter(final AdapterContextMenuInfo adapterInfo) {
return adapter.getItem(adapterInfo.position);
}
private boolean setFilter(IFilter filter) {
if (adapter != null) {
adapter.setFilter(filter);
prepareFilterBar();
return true;
}
return false;
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (adapter != null) {
if (adapter.resetChecks()) {
return true;
} else if (adapter.getSelectMode()) {
adapter.setSelectMode(false, true);
return true;
}
}
}
return super.onKeyDown(keyCode, event);
}
private void setAdapter() {
if (listFooter == null) {
if (inflater == null) {
inflater = getLayoutInflater();
}
listFooter = inflater.inflate(R.layout.caches_footer, null);
listFooter.setClickable(true);
listFooter.setOnClickListener(new MoreCachesListener());
}
if (listFooterText == null) {
listFooterText = (TextView) listFooter.findViewById(R.id.more_caches);
}
if (adapter == null) {
final ListView list = getListView();
registerForContextMenu(list);
list.setLongClickable(true);
list.addFooterView(listFooter);
adapter = new CacheListAdapter(this, cacheList, type);
setListAdapter(adapter);
} else {
adapter.notifyDataSetChanged();
}
adapter.reFilter();
if (geo != null) {
adapter.setActualCoordinates(geo.coordsNow);
}
if (dir != null) {
adapter.setActualHeading(dir.directionNow);
}
}
private void setLoadingCaches() {
if (listFooter == null) {
return;
}
if (listFooterText == null) {
return;
}
listFooterText.setText(res.getString(R.string.caches_more_caches_loading));
listFooter.setClickable(false);
listFooter.setOnClickListener(null);
}
private void setMoreCaches() {
if (listFooter == null) {
return;
}
if (listFooterText == null) {
return;
}
boolean enableMore = type != CacheListType.OFFLINE && cacheList != null && cacheList.size() < MAX_LIST_ITEMS;
if (enableMore) {
final int count = SearchResult.getTotal(search);
enableMore = enableMore && count > 0 && cacheList.size() < count;
}
if (enableMore) {
listFooterText.setText(res.getString(R.string.caches_more_caches) + " (" + res.getString(R.string.caches_more_caches_currently) + ": " + cacheList.size() + ")");
listFooter.setOnClickListener(new MoreCachesListener());
} else {
if (CollectionUtils.isEmpty(cacheList)) {
listFooterText.setText(res.getString(R.string.caches_no_cache));
} else {
listFooterText.setText(res.getString(R.string.caches_more_caches_no));
}
listFooter.setOnClickListener(null);
}
listFooter.setClickable(enableMore);
}
private void init() {
// sensor & geolocation manager
if (geo == null) {
geo = app.startGeo(geoUpdate);
}
if (Settings.isLiveList() && Settings.isUseCompass() && dir == null) {
dir = app.startDir(this, dirUpdate);
}
if (CollectionUtils.isNotEmpty(cacheList)) {
setMoreCaches();
}
setTitle(title);
setAdapter();
if (geo != null) {
geoUpdate.updateLocation(geo);
}
if (dir != null) {
dirUpdate.updateDirection(dir);
}
}
private void importGpx() {
cgeogpxes.startSubActivity(this, listId);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
refreshCurrentList();
}
public void refreshStored() {
if (adapter != null && adapter.getChecked() > 0) {
// there are some checked caches
detailTotal = adapter.getChecked();
} else {
// no checked caches, download everything (when already stored - refresh them)
detailTotal = cacheList.size();
}
detailProgress = 0;
showProgress(false);
waitDialog = new ProgressDialog(this);
waitDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
public void onCancel(DialogInterface arg0) {
try {
if (threadDetails != null) {
threadDetails.kill();
}
if (geo == null) {
geo = app.startGeo(geoUpdate);
}
if (Settings.isLiveList() && Settings.isUseCompass() && dir == null) {
dir = app.startDir(cgeocaches.this, dirUpdate);
}
} catch (Exception e) {
Log.e(Settings.tag, "cgeocaches.onOptionsItemSelected.onCancel: " + e.toString());
}
}
});
waitDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
int etaTime = ((detailTotal * 25) / 60);
if (etaTime < 1) {
waitDialog.setMessage(res.getString(R.string.caches_downloading) + " " + res.getString(R.string.caches_eta_ltm));
} else if (etaTime == 1) {
waitDialog.setMessage(res.getString(R.string.caches_downloading) + " " + etaTime + " " + res.getString(R.string.caches_eta_min));
} else {
waitDialog.setMessage(res.getString(R.string.caches_downloading) + " " + etaTime + " " + res.getString(R.string.caches_eta_mins));
}
waitDialog.setCancelable(true);
waitDialog.setMax(detailTotal);
waitDialog.show();
detailProgressTime = System.currentTimeMillis();
threadDetails = new LoadDetailsThread(loadDetailsHandler, listId);
threadDetails.start();
}
public void removeFromHistoryCheck()
{
AlertDialog.Builder dialog = new AlertDialog.Builder(this);
dialog.setCancelable(true);
dialog.setTitle(res.getString(R.string.caches_removing_from_history));
dialog.setMessage((adapter != null && adapter.getChecked() > 0) ? res.getString(R.string.cache_remove_from_history)
: res.getString(R.string.cache_clear_history));
dialog.setPositiveButton(getString(android.R.string.yes), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
removeFromHistory();
dialog.cancel();
}
});
dialog.setNegativeButton(getString(android.R.string.no), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert = dialog.create();
alert.show();
}
public void removeFromHistory()
{
if (adapter != null && adapter.getChecked() > 0)
{
// there are some checked caches
detailTotal = adapter.getChecked();
}
else
{
// no checked caches, remove all
detailTotal = cacheList.size();
}
detailProgress = 0;
showProgress(false);
waitDialog = new ProgressDialog(this);
waitDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
public void onCancel(DialogInterface arg0)
{
try
{
if (threadH != null)
{
threadH.kill();
}
} catch (Exception e)
{
Log.e(Settings.tag, "cgeocaches.removeFromHistory.onCancel: " + e.toString());
}
}
});
waitDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
waitDialog.setMessage(res.getString(R.string.caches_removing_from_history));
waitDialog.setCancelable(true);
waitDialog.setMax(detailTotal);
waitDialog.show();
threadH = new RemoveFromHistoryThread(removeFromHistoryHandler);
threadH.start();
}
public void exportFieldNotes()
{
if (adapter != null && adapter.getChecked() > 0)
{
// there are some checked caches
detailTotal = adapter.getChecked();
}
else
{
// no checked caches, export all
detailTotal = cacheList.size();
}
detailProgress = 0;
showProgress(false);
waitDialog = new ProgressDialog(this);
waitDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
public void onCancel(DialogInterface arg0)
{
try
{
if (threadF != null)
{
threadF.kill();
}
} catch (Exception e)
{
Log.e(Settings.tag, "cgeocaches.exportFieldNotes.onCancel: " + e.toString());
}
}
});
waitDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
waitDialog.setMessage(res.getString(R.string.caches_exporting_fieldnote));
waitDialog.setCancelable(true);
waitDialog.setMax(detailTotal);
waitDialog.show();
threadF = new ExportFieldNotesThread(exportFieldNotesHandler);
threadF.start();
}
public void importWeb() {
detailProgress = 0;
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
showProgress(false);
waitDialog = new ProgressDialog(this);
waitDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
public void onCancel(DialogInterface arg0) {
try {
if (threadWeb != null) {
threadWeb.kill();
}
if (geo == null) {
geo = app.startGeo(geoUpdate);
}
if (Settings.isLiveList() && Settings.isUseCompass() && dir == null) {
dir = app.startDir(cgeocaches.this, dirUpdate);
}
} catch (Exception e) {
Log.e(Settings.tag, "cgeocaches.importWeb.onCancel: " + e.toString());
}
}
});
waitDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
waitDialog.setMessage(res.getString(R.string.web_import_waiting));
waitDialog.setCancelable(true);
waitDialog.show();
threadWeb = new LoadFromWebThread(downloadFromWebHandler, listId);
threadWeb.start();
}
public void dropStored(final boolean removeListAfterwards) {
AlertDialog.Builder dialog = new AlertDialog.Builder(this);
dialog.setCancelable(true);
dialog.setTitle(res.getString(R.string.caches_drop_stored));
if (adapter != null && adapter.getChecked() > 0) {
dialog.setMessage(res.getString(R.string.caches_drop_selected_ask));
} else {
dialog.setMessage(res.getString(R.string.caches_drop_all_ask));
}
dialog.setPositiveButton(getString(android.R.string.yes), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dropSelected();
if (removeListAfterwards) {
removeList(false);
}
dialog.cancel();
}
});
dialog.setNegativeButton(getString(android.R.string.no), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert = dialog.create();
alert.show();
}
public void dropSelected() {
waitDialog = new ProgressDialog(this);
waitDialog.setMessage(res.getString(R.string.caches_drop_progress));
waitDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
public void onCancel(DialogInterface arg0) {
try {
if (threadR != null) {
threadR.kill();
}
} catch (Exception e) {
Log.e(Settings.tag, "cgeocaches.onOptionsItemSelected.onCancel: " + e.toString());
}
}
});
waitDialog.setCancelable(true);
waitDialog.show();
threadR = new DropDetailsThread(dropDetailsHandler);
threadR.start();
}
private class UpdateLocation implements UpdateLocationCallback {
@Override
public void updateLocation(cgGeo geo) {
if (geo == null) {
return;
}
if (adapter == null) {
return;
}
try {
if (cacheList != null && geo.coordsNow != null) {
adapter.setActualCoordinates(geo.coordsNow);
}
if (!Settings.isUseCompass() || geo.speedNow > 5) { // use GPS when speed is higher than 18 km/h
if (!Settings.isUseCompass()) {
adapter.setActualHeading(geo.bearingNow);
}
if (northHeading != null) {
adapter.setActualHeading(northHeading);
}
}
} catch (Exception e) {
Log.w(Settings.tag, "Failed to UpdateLocation location.");
}
}
}
private class UpdateDirection implements UpdateDirectionCallback {
@Override
public void updateDirection(cgDirection dir) {
if (!Settings.isLiveList()) {
return;
}
if (dir == null || dir.directionNow == null) {
return;
}
northHeading = dir.directionNow;
if (northHeading != null && adapter != null && (geo == null || geo.speedNow <= 5)) { // use compass when speed is lower than 18 km/h) {
adapter.setActualHeading(northHeading);
}
}
}
private class LoadByOfflineThread extends Thread {
final private Handler handler;
final private Geopoint coords;
final private int listId;
public LoadByOfflineThread(final Handler handlerIn, final Geopoint coordsIn, int listIdIn) {
handler = handlerIn;
coords = coordsIn;
listId = listIdIn;
}
@Override
public void run() {
search = cgBase.searchByOffline(coords, Settings.getCacheType(), listId);
handler.sendMessage(new Message());
}
}
private class LoadByHistoryThread extends Thread {
final private Handler handler;
public LoadByHistoryThread(Handler handlerIn) {
handler = handlerIn;
}
@Override
public void run() {
search = cgeoapplication.getInstance().getHistoryOfCaches(true, coords != null ? Settings.getCacheType() : CacheType.ALL);
handler.sendMessage(new Message());
}
}
private class LoadNextPageThread extends cgSearchThread {
private final Handler handler;
public LoadNextPageThread(Handler handlerIn) {
handler = handlerIn;
}
@Override
public void run() {
search = cgBase.searchByNextPage(this, (ParseResult) search, 0, Settings.isShowCaptcha());
handler.sendMessage(new Message());
}
}
private class LoadByCoordsThread extends cgSearchThread {
final private Handler handler;
final private Geopoint coords;
public LoadByCoordsThread(final Handler handler, final Geopoint coords) {
setPriority(Thread.MIN_PRIORITY);
this.handler = handler;
this.coords = coords;
if (coords == null) {
showToast(res.getString(R.string.warn_no_coordinates));
finish();
return;
}
}
@Override
public void run() {
search = cgBase.searchByCoords(this, coords, cacheType, 0, Settings.isShowCaptcha());
handler.sendMessage(new Message());
}
}
private class LoadByKeywordThread extends cgSearchThread {
final private Handler handler;
final private String keyword;
public LoadByKeywordThread(final Handler handler, final String keyword) {
setPriority(Thread.MIN_PRIORITY);
this.handler = handler;
this.keyword = keyword;
if (keyword == null) {
showToast(res.getString(R.string.warn_no_keyword));
finish();
return;
}
}
@Override
public void run() {
search = cgBase.searchByKeyword(this, keyword, cacheType, 0, Settings.isShowCaptcha());
handler.sendMessage(new Message());
}
}
private class LoadByUserNameThread extends cgSearchThread {
final private Handler handler;
final private String username;
public LoadByUserNameThread(final Handler handler, final String username) {
setPriority(Thread.MIN_PRIORITY);
this.handler = handler;
this.username = username;
if (StringUtils.isBlank(username)) {
showToast(res.getString(R.string.warn_no_username));
finish();
return;
}
}
@Override
public void run() {
search = cgBase.searchByUsername(this, username, cacheType, 0, Settings.isShowCaptcha());
handler.sendMessage(new Message());
}
}
private class LoadByOwnerThread extends cgSearchThread {
final private Handler handler;
final private String username;
public LoadByOwnerThread(final Handler handler, final String username) {
setPriority(Thread.MIN_PRIORITY);
this.handler = handler;
this.username = username;
if (StringUtils.isBlank(username)) {
showToast(res.getString(R.string.warn_no_username));
finish();
return;
}
}
@Override
public void run() {
Map<String, String> params = new HashMap<String, String>();
params.put("username", username);
if (cacheType != null) {
params.put("cacheType", cacheType.id);
}
search = cgBase.searchByOwner(this, username, cacheType, 0, Settings.isShowCaptcha());
handler.sendMessage(new Message());
}
}
private class LoadDetailsThread extends Thread {
final private Handler handler;
final private int listIdLD;
private volatile boolean needToStop = false;
private int checked = 0;
private long last = 0L;
public LoadDetailsThread(Handler handlerIn, int listId) {
setPriority(Thread.MIN_PRIORITY);
handler = handlerIn;
this.listIdLD = listId;
if (adapter != null) {
checked = adapter.getChecked();
}
}
public void kill() {
needToStop = true;
}
@Override
public void run() {
if (dir != null) {
dir = app.removeDir();
}
if (geo != null) {
geo = app.removeGeo();
}
final List<cgCache> cacheListTemp = new ArrayList<cgCache>(cacheList);
for (cgCache cache : cacheListTemp) {
if (checked > 0 && !cache.isStatusChecked()) {
handler.sendEmptyMessage(0);
yield();
continue;
}
try {
if (needToStop) {
Log.i(Settings.tag, "Stopped storing process.");
break;
}
if ((System.currentTimeMillis() - last) < 1500) {
try {
int delay = 1000 + ((Double) (Math.random() * 1000)).intValue() - (int) (System.currentTimeMillis() - last);
if (delay < 0) {
delay = 500;
}
Log.i(Settings.tag, "Waiting for next cache " + delay + " ms");
sleep(delay);
} catch (Exception e) {
Log.e(Settings.tag, "cgeocaches.LoadDetailsThread.sleep: " + e.toString());
}
}
if (needToStop) {
Log.i(Settings.tag, "Stopped storing process.");
break;
}
detailProgress++;
cgBase.storeCache(app, cgeocaches.this, cache, null, listIdLD, null);
handler.sendEmptyMessage(cacheList.indexOf(cache));
yield();
} catch (Exception e) {
Log.e(Settings.tag, "cgeocaches.LoadDetailsThread: " + e.toString());
}
last = System.currentTimeMillis();
}
cacheListTemp.clear();
handler.sendEmptyMessage(-1);
}
}
private class LoadFromWebThread extends Thread {
final private Handler handler;
final private int listIdLFW;
private volatile boolean needToStop = false;
public LoadFromWebThread(Handler handlerIn, int listId) {
setPriority(Thread.MIN_PRIORITY);
handler = handlerIn;
listIdLFW = listId;
}
public void kill() {
needToStop = true;
}
@Override
public void run() {
if (dir != null) {
dir = app.removeDir();
}
if (geo != null) {
geo = app.removeGeo();
}
int delay = -1;
int times = 0;
while (times < 3 * 60 / 5) // maximum: 3 minutes, every 5 seconds
{
if (needToStop)
{
handler.sendEmptyMessage(-1);
break;
}
//download new code
String deviceCode = Settings.getWebDeviceCode();
if (deviceCode == null) {
deviceCode = "";
}
final Parameters params = new Parameters("code", deviceCode);
HttpResponse responseFromWeb = cgBase.request("http://send2.cgeo.org/read.html", params, true);
if (responseFromWeb != null && responseFromWeb.getStatusLine().getStatusCode() == 200) {
final String response = cgBase.getResponseData(responseFromWeb);
if (response.length() > 2) {
String GCcode = response;
delay = 1;
Message mes = new Message();
mes.what = 1;
mes.obj = GCcode;
handler.sendMessage(mes);
yield();
cgBase.storeCache(app, cgeocaches.this, null, GCcode,
listIdLFW, null);
Message mes1 = new Message();
mes1.what = 2;
mes1.obj = GCcode;
handler.sendMessage(mes1);
yield();
} else if ("RG".equals(cgBase.getResponseData(responseFromWeb))) {
//Server returned RG (registration) and this device no longer registered.
Settings.setWebNameCode(null, null);
needToStop = true;
handler.sendEmptyMessage(-3);
return;
} else {
delay = 0;
handler.sendEmptyMessage(0);
yield();
}
}
if (responseFromWeb == null || responseFromWeb.getStatusLine().getStatusCode() != 200) {
needToStop = true;
handler.sendEmptyMessage(-2);
return;
}
try {
yield();
if (delay == 0)
{
sleep(5000); //No caches 5s
times++;
} else {
sleep(500); //Cache was loaded 0.5s
times = 0;
}
} catch (InterruptedException e) {
Log.e(Settings.tag, "cgeocaches.LoadFromWebThread.sleep: " + e.toString());
}
}
handler.sendEmptyMessage(-1);
}
}
private class DropDetailsThread extends Thread {
final private Handler handler;
private volatile boolean needToStop = false;
private int checked = 0;
public DropDetailsThread(Handler handlerIn) {
setPriority(Thread.MIN_PRIORITY);
handler = handlerIn;
if (adapter != null) {
checked = adapter.getChecked();
}
}
public void kill() {
needToStop = true;
}
@Override
public void run() {
if (dir != null) {
dir = app.removeDir();
}
if (geo != null) {
geo = app.removeGeo();
}
final List<cgCache> cacheListTemp = new ArrayList<cgCache>(cacheList);
for (cgCache cache : cacheListTemp) {
if (checked > 0 && !cache.isStatusChecked()) {
continue;
}
try {
if (needToStop) {
Log.i(Settings.tag, "Stopped dropping process.");
break;
}
app.markDropped(cache.getGeocode());
} catch (Exception e) {
Log.e(Settings.tag, "cgeocaches.DropDetailsThread: " + e.toString());
}
}
cacheListTemp.clear();
handler.sendEmptyMessage(-1);
}
}
private class RemoveFromHistoryThread extends Thread {
final private Handler handler;
private volatile boolean needToStop = false;
private int checked = 0;
public RemoveFromHistoryThread(Handler handlerIn) {
setPriority(Thread.MIN_PRIORITY);
handler = handlerIn;
if (adapter != null) {
checked = adapter.getChecked();
}
}
public void kill() {
needToStop = true;
}
@Override
public void run() {
for (cgCache cache : cacheList) {
if (checked > 0 && !cache.isStatusChecked()) {
handler.sendEmptyMessage(0);
yield();
continue;
}
try {
if (needToStop) {
Log.i(Settings.tag, "Stopped removing process.");
break;
}
app.clearVisitDate(cache.getGeocode());
handler.sendEmptyMessage(cacheList.indexOf(cache));
yield();
} catch (Exception e) {
Log.e(Settings.tag, "cgeocaches.RemoveFromHistoryThread: " + e.toString());
}
}
handler.sendEmptyMessage(-1);
}
}
private class ExportFieldNotesThread extends Thread
{
private final Handler handler;
private volatile boolean needToStop = false;
private int checked = 0;
public ExportFieldNotesThread(Handler handlerIn)
{
setPriority(Thread.MIN_PRIORITY);
handler = handlerIn;
if (adapter != null)
{
checked = adapter.getChecked();
}
}
public void kill()
{
needToStop = true;
}
@Override
public void run()
{
SimpleDateFormat fieldNoteDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
StringBuffer fieldNoteBuffer = new StringBuffer(500);
// We need our own HashMap because LogType will give us localized and maybe
// different strings than gc.com expects in the field note
// We only need such logtypes that are possible to log via c:geo
Map<LogType, String> logTypes = new HashMap<LogType, String>();
logTypes.put(LogType.LOG_FOUND_IT, "Found it");
logTypes.put(LogType.LOG_DIDNT_FIND_IT, "Didn't find it");
logTypes.put(LogType.LOG_NOTE, "Write Note");
logTypes.put(LogType.LOG_NEEDS_ARCHIVE, "Needs archived");
logTypes.put(LogType.LOG_NEEDS_MAINTENANCE, "Needs Maintenance");
logTypes.put(LogType.LOG_WILL_ATTEND, "Will Attend");
logTypes.put(LogType.LOG_ATTENDED, "Attended");
logTypes.put(LogType.LOG_WEBCAM_PHOTO_TAKEN, "Webcam Photo Taken");
for (cgCache cache : cacheList) {
if (checked > 0 && !cache.isStatusChecked()) {
handler.sendEmptyMessage(0);
yield();
continue;
}
try {
if (needToStop)
{
Log.i(Settings.tag, "Stopped exporting process.");
break;
}
if (cache.isLogOffline())
{
cgLog log = app.loadLogOffline(cache.getGeocode());
if (null != logTypes.get(log.type))
{
fieldNoteBuffer.append(cache.getGeocode())
.append(',')
.append(fieldNoteDateFormat.format(new Date(log.date)))
.append(',')
.append(logTypes.get(log.type))
.append(",\"")
.append(StringUtils.replaceChars(log.log, '"', '\''))
.append("\"\n");
}
}
detailProgress++;
handler.sendEmptyMessage(cacheList.indexOf(cache));
yield();
} catch (Exception e) {
Log.e(Settings.tag, "cgeocaches.ExportFieldNotesThread: " + e.toString());
}
}
if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED))
{
File exportLocation = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/field-notes");
exportLocation.mkdirs();
SimpleDateFormat fileNameDateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
File exportFile = new File(exportLocation + "/" + fileNameDateFormat.format(new Date()) + ".txt");
OutputStream os = null;
Writer fw = null;
try
{
os = new FileOutputStream(exportFile);
fw = new OutputStreamWriter(os, "ISO-8859-1"); // TODO: gc.com doesn't support UTF-8
fw.write(fieldNoteBuffer.toString());
Message.obtain(handler, -2, exportFile).sendToTarget();
} catch (IOException e) {
Log.e(Settings.tag, "cgeocaches.ExportFieldNotesThread: " + e.toString());
handler.sendEmptyMessage(-3);
} finally
{
if (fw != null)
{
try {
fw.close();
} catch (IOException e) {
Log.e(Settings.tag, "cgeocaches.ExportFieldNotesThread: " + e.toString());
}
}
}
}
handler.sendEmptyMessage(-1);
}
}
private class MoreCachesListener implements View.OnClickListener {
@Override
public void onClick(View arg0) {
showProgress(true);
setLoadingCaches();
listFooter.setOnClickListener(null);
LoadNextPageThread thread;
thread = new LoadNextPageThread(loadNextPageHandler);
thread.setRecaptchaHandler(new cgSearchHandler(cgeocaches.this, res, thread));
thread.start();
}
}
private void hideLoading() {
final ListView list = getListView();
final RelativeLayout loading = (RelativeLayout) findViewById(R.id.loading);
if (list.getVisibility() == View.GONE) {
list.setVisibility(View.VISIBLE);
loading.setVisibility(View.GONE);
}
}
/**
* @param view
* unused here but needed since this method is referenced from XML layout
*/
public void selectList(View view) {
if (type != CacheListType.OFFLINE) {
return;
}
lists = app.getLists();
if (lists == null) {
return;
}
final List<CharSequence> listsTitle = new ArrayList<CharSequence>();
for (StoredList list : lists) {
listsTitle.add(list.getTitleAndCount());
}
listsTitle.add("<" + res.getString(R.string.list_menu_create) + ">");
final CharSequence[] items = new CharSequence[listsTitle.size()];
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(res.getString(R.string.list_title));
builder.setItems(listsTitle.toArray(items), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialogInterface, int itemId) {
if (itemId >= lists.size()) {
// create new list on the fly
createList(new RunnableWithArgument<Integer>() {
@Override
public void run(final Integer arg) {
switchListById(arg.intValue());
}
});
}
else {
switchListById(lists.get(itemId).id);
}
}
});
builder.create().show();
}
public void switchListById(int id) {
StoredList list = null;
if (id >= 0) {
list = app.getList(id);
} else {
return;
}
if (list == null) {
return;
}
listId = list.id;
title = list.title;
Settings.saveLastList(listId);
showProgress(true);
setLoadingCaches();
(new MoveCachesToListThread(listId, new MoveHandler())).start();
}
private class MoveHandler extends Handler {
@Override
public void handleMessage(Message msg) {
Thread threadPure = new LoadByOfflineThread(loadCachesHandler, coords, msg.what);
threadPure.start();
}
}
private class MoveCachesToListThread extends Thread {
final private int listId;
final private Handler handler;
public MoveCachesToListThread(int listIdIn, Handler handlerIn) {
listId = listIdIn;
handler = handlerIn;
}
@Override
public void run() {
int checked = adapter.getChecked();
if (checked > 0) {
final List<cgCache> cacheListTemp = new ArrayList<cgCache>(cacheList);
for (cgCache cache : cacheListTemp) {
if (cache.isStatusChecked()) {
app.moveToList(cache.getGeocode(), listId);
}
}
}
handler.sendEmptyMessage(listId);
}
}
private void handleListNameInput(final String defaultValue, int dialogTitle, int buttonTitle, final RunnableWithArgument<String> runnable) {
final AlertDialog.Builder alert = new AlertDialog.Builder(this);
final View view = inflater.inflate(R.layout.list_create_dialog, null);
final EditText input = (EditText) view.findViewById(R.id.text);
input.setText(defaultValue);
alert.setTitle(dialogTitle);
alert.setView(view);
alert.setPositiveButton(buttonTitle, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
// remove whitespaces added by autocompletion of Android keyboard
String listName = StringUtils.trim(input.getText().toString());
if (StringUtils.isNotBlank(listName)) {
runnable.run(listName);
}
}
});
alert.setNegativeButton(res.getString(R.string.list_dialog_cancel), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
dialog.dismiss();
}
});
alert.show();
}
private void createList(final RunnableWithArgument<Integer> runAfterwards) {
handleListNameInput("", R.string.list_dialog_create_title, R.string.list_dialog_create, new RunnableWithArgument<String>() {
@Override
public void run(final String listName) {
final int newId = app.createList(listName);
if (newId >= 10) {
showToast(res.getString(R.string.list_dialog_create_ok));
if (runAfterwards != null) {
runAfterwards.run(newId);
}
} else {
showToast(res.getString(R.string.list_dialog_create_err));
}
}
});
}
private void renameList() {
final StoredList list = app.getList(listId);
handleListNameInput(list.title, R.string.list_dialog_rename_title, R.string.list_dialog_rename, new RunnableWithArgument<String>() {
@Override
public void run(final String listName) {
app.renameList(listId, listName);
refreshCurrentList();
}
});
}
private void removeListInternal() {
boolean status = app.removeList(listId);
if (status) {
showToast(res.getString(R.string.list_dialog_remove_ok));
switchListById(1);
} else {
showToast(res.getString(R.string.list_dialog_remove_err));
}
}
private void removeList(final boolean askForConfirmation) {
// if there are no caches on this list, don't bother the user with questions.
// there is no harm in deleting the list, he could recreate it easily
if (CollectionUtils.isEmpty(cacheList)) {
removeListInternal();
return;
}
if (!askForConfirmation) {
removeListInternal();
return;
}
// ask him, if there are caches on the list
final AlertDialog.Builder alert = new AlertDialog.Builder(this);
alert.setTitle(R.string.list_dialog_remove_title);
alert.setMessage(R.string.list_dialog_remove_description);
alert.setPositiveButton(R.string.list_dialog_remove, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
removeListInternal();
}
});
alert.setNegativeButton(res.getString(R.string.list_dialog_cancel), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
dialog.dismiss();
}
});
alert.show();
}
/**
* @param view
* unused here but needed since this method is referenced from XML layout
*/
public void goMap(View view) {
if (search == null || CollectionUtils.isEmpty(cacheList)) {
showToast(res.getString(R.string.warn_no_cache_coord));
return;
}
SearchResult searchToUse = search;
// apply filter settings (if there's a filter)
if (adapter != null) {
Set<String> geocodes = new HashSet<String>();
for (cgCache cache : adapter.getFilteredList()) {
geocodes.add(cache.getGeocode());
}
searchToUse = new SearchResult(geocodes);
}
int count = SearchResult.getCount(searchToUse);
String mapTitle = title;
if (count > 0) {
mapTitle = title + " [" + count + "]";
}
CGeoMap.startActivitySearch(this, searchToUse, mapTitle, false);
}
@Override
public void goManual(View view) {
switch (type) {
case OFFLINE:
ActivityMixin.goManual(this, "c:geo-stored");
break;
case HISTORY:
ActivityMixin.goManual(this, "c:geo-history");
break;
default:
ActivityMixin.goManual(this, "c:geo-nearby");
break;
}
}
private void refreshCurrentList() {
switchListById(listId);
}
public static void startActivityOffline(final Context context) {
final Intent cachesIntent = new Intent(context, cgeocaches.class);
cachesIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.OFFLINE);
context.startActivity(cachesIntent);
}
public static void startActivityCachesAround(final AbstractActivity context, final Geopoint coords) {
cgeocaches cachesActivity = new cgeocaches();
Intent cachesIntent = new Intent(context, cachesActivity.getClass());
cachesIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.COORDINATE);
cachesIntent.putExtra("latitude", coords.getLatitude());
cachesIntent.putExtra("longitude", coords.getLongitude());
context.startActivity(cachesIntent);
}
public static void startActivityOwner(final AbstractActivity context, final String userName) {
final Intent cachesIntent = new Intent(context, cgeocaches.class);
cachesIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.OWNER);
cachesIntent.putExtra("username", userName);
context.startActivity(cachesIntent);
}
public static void startActivityUserName(final AbstractActivity context, final String userName) {
final Intent cachesIntent = new Intent(context, cgeocaches.class);
cachesIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.USERNAME);
cachesIntent.putExtra("username", userName);
context.startActivity(cachesIntent);
}
private void prepareFilterBar() {
if (Settings.getCacheType() != CacheType.ALL || adapter.isFilter()) {
String filter = "";
String cacheType = Settings.getCacheType().getL10n();
if (adapter.isFilter()) {
filter = ", " + adapter.getFilterName();
}
((TextView) findViewById(R.id.filter_text)).setText(cacheType + filter);
findViewById(R.id.filter_bar).setVisibility(View.VISIBLE);
}
else {
findViewById(R.id.filter_bar).setVisibility(View.GONE);
}
}
/**
* set date comparator for pure event lists
*/
private void setDateComparatorForEventList() {
if (CollectionUtils.isNotEmpty(cacheList)) {
boolean eventsOnly = true;
for (cgCache cache : cacheList) {
if (!cache.isEventCache()) {
eventsOnly = false;
break;
}
}
if (eventsOnly) {
adapter.setComparator(new EventDateComparator());
}
else if (type == CacheListType.HISTORY) {
adapter.setComparator(new VisitComparator());
}
else if (adapter.getCacheComparator() != null && adapter.getCacheComparator() instanceof EventDateComparator) {
adapter.setComparator(null);
}
}
}
public static void startActivityNearest(final Context context, final Geopoint coordsNow) {
final Intent cachesIntent = new Intent(context, cgeocaches.class);
cachesIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.NEAREST);
cachesIntent.putExtra("latitude", coordsNow.getLatitude());
cachesIntent.putExtra("longitude", coordsNow.getLongitude());
context.startActivity(cachesIntent);
}
public static void startActivityHistory(Context context) {
final Intent cachesIntent = new Intent(context, cgeocaches.class);
cachesIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.HISTORY);
context.startActivity(cachesIntent);
}
public static void startActivityAddress(Context context, double latitude, double longitude, String address) {
Intent addressIntent = new Intent(context, cgeocaches.class);
addressIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.ADDRESS);
addressIntent.putExtra("latitude", latitude);
addressIntent.putExtra("longitude", longitude);
addressIntent.putExtra("address", address);
context.startActivity(addressIntent);
}
public static void startActivityCoordinates(final Context context, double latitude, double longitude) {
final Intent cachesIntent = new Intent(context, cgeocaches.class);
cachesIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.COORDINATE);
cachesIntent.putExtra("latitude", latitude);
cachesIntent.putExtra("longitude", longitude);
context.startActivity(cachesIntent);
}
public static void startActivityKeyword(final Context context, final String keyword) {
final Intent cachesIntent = new Intent(context, cgeocaches.class);
cachesIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.KEYWORD);
cachesIntent.putExtra("keyword", keyword);
context.startActivity(cachesIntent);
}
public static void startActivityMap(final Context context, final SearchResult search) {
final Intent cachesIntent = new Intent(context, cgeocaches.class);
cachesIntent.putExtra(EXTRAS_LIST_TYPE, CacheListType.MAP);
cachesIntent.putExtra("search", search);
context.startActivity(cachesIntent);
}
}
|
package com.xpn.xwiki.doc;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.dom4j.dom.DOMDocument;
import org.dom4j.dom.DOMElement;
import org.dom4j.io.OutputFormat;
import org.dom4j.io.SAXReader;
import org.suigeneris.jrcs.rcs.Archive;
import org.suigeneris.jrcs.rcs.Version;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.internal.xml.DOMXMLWriter;
import com.xpn.xwiki.internal.xml.XMLWriter;
public class XWikiAttachment implements Cloneable
{
private static final Log LOG = LogFactory.getLog(XWikiAttachment.class);
private XWikiDocument doc;
private int filesize;
private String filename;
private String author;
private Version version;
private String comment;
private Date date;
private XWikiAttachmentContent attachment_content;
private XWikiAttachmentArchive attachment_archive;
private boolean isMetaDataDirty = false;
public XWikiAttachment(XWikiDocument doc, String filename)
{
this();
setDoc(doc);
setFilename(filename);
}
public XWikiAttachment()
{
this.filesize = 0;
this.filename = "";
this.author = "";
this.comment = "";
this.date = new Date();
}
public long getId()
{
if (this.doc == null) {
return this.filename.hashCode();
} else {
return (this.doc.getFullName() + "/" + this.filename).hashCode();
}
}
public void setDocId(long id)
{
}
public long getDocId()
{
return this.doc.getId();
}
public void setId(long id)
{
}
/**
* {@inheritDoc}
*
* @see java.lang.Object#clone()
*/
@Override
public Object clone()
{
XWikiAttachment attachment = null;
try {
attachment = getClass().newInstance();
} catch (Exception e) {
// This should not happen
LOG.error("exception while attach.clone", e);
}
attachment.setAuthor(getAuthor());
attachment.setComment(getComment());
attachment.setDate(getDate());
attachment.setDoc(getDoc());
attachment.setFilename(getFilename());
attachment.setFilesize(getFilesize());
attachment.setRCSVersion(getRCSVersion());
if (getAttachment_content() != null) {
attachment.setAttachment_content((XWikiAttachmentContent) getAttachment_content().clone());
attachment.getAttachment_content().setAttachment(attachment);
}
if (getAttachment_archive() != null) {
attachment.setAttachment_archive((XWikiAttachmentArchive) getAttachment_archive().clone());
attachment.getAttachment_archive().setAttachment(attachment);
}
return attachment;
}
/**
* @return the cached filesize in byte of the attachment, stored as metadata
*/
public int getFilesize()
{
return this.filesize;
}
/**
* Set cached filesize of the attachment that will be stored as metadata
*
* @param filesize in byte
*/
public void setFilesize(int filesize)
{
if (filesize != this.filesize) {
setMetaDataDirty(true);
}
this.filesize = filesize;
}
/**
* @param context current XWikiContext
* @return the real filesize in byte of the attachment. We cannot trust the metadata that may be publicly changed.
* @throws XWikiException
* @since 2.3M2
*/
public int getContentSize(XWikiContext context) throws XWikiException
{
if (this.attachment_content == null) {
this.doc.loadAttachmentContent(this, context);
}
return this.attachment_content.getSize();
}
public String getFilename()
{
return this.filename;
}
public void setFilename(String filename)
{
filename = filename.replaceAll("\\+", " ");
if (!filename.equals(this.filename)) {
setMetaDataDirty(true);
this.filename = filename;
}
}
public String getAuthor()
{
return this.author;
}
public void setAuthor(String author)
{
if (!author.equals(this.author)) {
setMetaDataDirty(true);
}
this.author = author;
}
public String getVersion()
{
return getRCSVersion().toString();
}
public void setVersion(String version)
{
this.version = new Version(version);
}
public String getNextVersion()
{
if (this.version == null) {
return "1.1";
} else {
return ((Version) this.version.clone()).next().toString();
}
}
public Version getRCSVersion()
{
if (this.version == null) {
return new Version("1.1");
}
return (Version) this.version.clone();
}
public void setRCSVersion(Version version)
{
this.version = version;
}
public String getComment()
{
return this.comment != null ? this.comment : "";
}
public void setComment(String comment)
{
if (!getComment().equals(comment)) {
setMetaDataDirty(true);
}
this.comment = comment;
}
public XWikiDocument getDoc()
{
return this.doc;
}
public void setDoc(XWikiDocument doc)
{
this.doc = doc;
}
public Date getDate()
{
return this.date;
}
public void setDate(Date date)
{
// Make sure we drop milliseconds for consistency with the database
if (date != null) {
date.setTime((date.getTime() / 1000) * 1000);
}
this.date = date;
}
public boolean isContentDirty()
{
if (this.attachment_content == null) {
return false;
} else {
return this.attachment_content.isContentDirty();
}
}
public void incrementVersion()
{
if (this.version == null) {
this.version = new Version("1.1");
} else {
this.version = this.version.next();
}
}
public boolean isMetaDataDirty()
{
return this.isMetaDataDirty;
}
public void setMetaDataDirty(boolean metaDataDirty)
{
this.isMetaDataDirty = metaDataDirty;
}
/**
* Retrieve an attachment as an XML string. You should prefer
* {@link #toXML(com.xpn.xwiki.internal.xml.XMLWriter, boolean, boolean, com.xpn.xwiki.XWikiContext) to avoid memory loads
* when appropriate.
*
* @param bWithAttachmentContent if true, binary content of the attachment is included (base64 encoded)
* @param bWithVersions if true, all archived versions are also included
* @param context current XWikiContext
* @return a string containing an XML representation of the attachment
* @throws XWikiException when an error occurs during wiki operations
*/
public String toStringXML(boolean bWithAttachmentContent, boolean bWithVersions, XWikiContext context)
throws XWikiException
{
// This is very bad. baos holds the entire attachment on the heap, then it makes a copy when toByteArray
// is called, then String forces us to make a copy when we construct a new String.
// Unfortunately this can't be fixed because jrcs demands the content as a String.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
XMLWriter wr = new XMLWriter(baos, new OutputFormat("", true, context.getWiki().getEncoding()));
Document doc = new DOMDocument();
wr.writeDocumentStart(doc);
toXML(wr, bWithAttachmentContent, bWithVersions, context);
wr.writeDocumentEnd(doc);
byte[] array = baos.toByteArray();
baos = null;
return new String(array, context.getWiki().getEncoding());
} catch (IOException e) {
e.printStackTrace();
return "";
}
}
/**
* Retrieve XML representation of attachment's metadata into an {@link Element}.
*
* @return a {@link Element} containing an XML representation of the attachment without content
* @throws XWikiException when an error occurs during wiki operations
*/
public Element toXML(XWikiContext context) throws XWikiException
{
return toXML(false, false, context);
}
/**
* Write an XML representation of the attachment into an {@link com.xpn.xwiki.internal.xml.XMLWriter}
*
* @param wr the XMLWriter to write to
* @param bWithAttachmentContent if true, binary content of the attachment is included (base64 encoded)
* @param bWithVersions if true, all archive version is also included
* @param context current XWikiContext
* @throws IOException when an error occurs during streaming operation
* @throws XWikiException when an error occurs during xwiki operation
* @since 2.3M2
*/
public void toXML(XMLWriter wr, boolean bWithAttachmentContent, boolean bWithVersions, XWikiContext context)
throws IOException, XWikiException
{
// IMPORTANT: we don't use SAX apis here because the specified XMLWriter could be a DOMXMLWriter for retro
// compatibility reasons
Element docel = new DOMElement("attachment");
wr.writeOpen(docel);
Element el = new DOMElement("filename");
el.addText(getFilename());
wr.write(el);
el = new DOMElement("filesize");
el.addText("" + getFilesize());
wr.write(el);
el = new DOMElement("author");
el.addText(getAuthor());
wr.write(el);
long d = getDate().getTime();
el = new DOMElement("date");
el.addText("" + d);
wr.write(el);
el = new DOMElement("version");
el.addText(getVersion());
wr.write(el);
el = new DOMElement("comment");
el.addText(getComment());
wr.write(el);
if (bWithAttachmentContent) {
el = new DOMElement("content");
// We need to make sure content is loaded
loadContent(context);
XWikiAttachmentContent acontent = getAttachment_content();
if (acontent != null) {
wr.writeBase64(el, getAttachment_content().getContentInputStream());
} else {
el.addText("");
wr.write(el);
}
}
if (bWithVersions) {
// We need to make sure content is loaded
XWikiAttachmentArchive aarchive = loadArchive(context);
if (aarchive != null) {
el = new DOMElement("versions");
try {
el.addText(new String(aarchive.getArchive()));
wr.write(el);
} catch (XWikiException e) {
}
}
}
wr.writeClose(docel);
}
/**
* Retrieve XML representation of attachment's metadata into an {@link Element}. You should prefer
* {@link #toXML(com.xpn.xwiki.internal.xml.XMLWriter, boolean, boolean, com.xpn.xwiki.XWikiContext) to avoid memory loads
* when appropriate.
*
* @param bWithAttachmentContent if true, binary content of the attachment is included (base64 encoded)
* @param bWithVersions if true, all archived versions are also included
* @param context current XWikiContext
* @return an {@link Element} containing an XML representation of the attachment
* @throws XWikiException when an error occurs during wiki operations
* @since 2.3M2
*/
public Element toXML(boolean bWithAttachmentContent, boolean bWithVersions, XWikiContext context)
throws XWikiException
{
Document doc = new DOMDocument();
DOMXMLWriter wr = new DOMXMLWriter(doc, new OutputFormat("", true, context.getWiki().getEncoding()));
try {
toXML(wr, bWithAttachmentContent, bWithVersions, context);
} catch (IOException e) {
throw new RuntimeException(e);
}
return doc.getRootElement();
}
public void fromXML(String data) throws XWikiException
{
SAXReader reader = new SAXReader();
Document domdoc = null;
try {
StringReader in = new StringReader(data);
domdoc = reader.read(in);
} catch (DocumentException e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_DOC, XWikiException.ERROR_DOC_XML_PARSING,
"Error parsing xml", e, null);
}
Element docel = domdoc.getRootElement();
fromXML(docel);
}
public void fromXML(Element docel) throws XWikiException
{
setFilename(docel.element("filename").getText());
setFilesize(Integer.parseInt(docel.element("filesize").getText()));
setAuthor(docel.element("author").getText());
setVersion(docel.element("version").getText());
setComment(docel.element("comment").getText());
String sdate = docel.element("date").getText();
Date date = new Date(Long.parseLong(sdate));
setDate(date);
Element contentel = docel.element("content");
if (contentel != null) {
String base64content = contentel.getText();
byte[] content = Base64.decodeBase64(base64content.getBytes());
setContent(content);
}
Element archiveel = docel.element("versions");
if (archiveel != null) {
String archive = archiveel.getText();
setArchive(archive);
}
}
public XWikiAttachmentContent getAttachment_content()
{
return this.attachment_content;
}
public void setAttachment_content(XWikiAttachmentContent attachment_content)
{
this.attachment_content = attachment_content;
}
public XWikiAttachmentArchive getAttachment_archive()
{
return this.attachment_archive;
}
public void setAttachment_archive(XWikiAttachmentArchive attachment_archive)
{
this.attachment_archive = attachment_archive;
}
/**
* Retrive the content of this attachment as a byte array.
*
* @param context current XWikiContext
* @return a byte array containing the binary data content of the attachment
* @throws XWikiException when an error occurs during wiki operation
* @deprecated use {@link #getContentInputStream(XWikiContext)} instead
*/
@Deprecated
public byte[] getContent(XWikiContext context) throws XWikiException
{
if (this.attachment_content == null) {
this.doc.loadAttachmentContent(this, context);
}
return this.attachment_content.getContent();
}
/**
* Retrive the content of this attachment as an input stream.
*
* @param context current XWikiContext
* @return an InputStream to consume for receiving the content of this attachment
* @throws XWikiException when an error occurs during wiki operation
* @since 2.3M2
*/
public InputStream getContentInputStream(XWikiContext context) throws XWikiException
{
if (this.attachment_content == null) {
this.doc.loadAttachmentContent(this, context);
}
return this.attachment_content.getContentInputStream();
}
public Archive getArchive()
{
if (this.attachment_archive == null) {
return null;
} else {
return this.attachment_archive.getRCSArchive();
}
}
public void setArchive(Archive archive)
{
if (this.attachment_archive == null) {
this.attachment_archive = new XWikiAttachmentArchive();
this.attachment_archive.setAttachment(this);
}
this.attachment_archive.setRCSArchive(archive);
}
public void setArchive(String data) throws XWikiException
{
if (this.attachment_archive == null) {
this.attachment_archive = new XWikiAttachmentArchive();
this.attachment_archive.setAttachment(this);
}
this.attachment_archive.setArchive(data.getBytes());
}
public synchronized Version[] getVersions()
{
try {
return getAttachment_archive().getVersions();
} catch (Exception ex) {
LOG.warn(String.format("Cannot retrieve versions of attachment [%s@%s]: %s", getFilename(), getDoc()
.getFullName(), ex.getMessage()));
return new Version[] {new Version(this.getVersion())};
}
}
// We assume versions go from 1.1 to the current one
// This allows not to read the full archive file
public synchronized List<Version> getVersionList() throws XWikiException
{
List<Version> list = new ArrayList<Version>();
Version v = new Version("1.1");
while (true) {
list.add(v);
if (v.toString().equals(this.version.toString())) {
break;
}
v.next();
}
return list;
}
/**
* Set the content of an attachment from a byte array.
*
* @param data a byte array with the binary content of the attachment
* @deprecated use {@link #setContent(java.io.InputStream, int)} instead
*/
@Deprecated
public void setContent(byte[] data)
{
if (this.attachment_content == null) {
this.attachment_content = new XWikiAttachmentContent();
this.attachment_content.setAttachment(this);
}
this.attachment_content.setContent(data);
}
/**
* Set the content of an attachment from an InputStream.
*
* @param is the input stream that will be read
* @param length the length in byte to read
* @throws IOException when an error occurs during streaming operation
* @since 2.3M2
*/
public void setContent(InputStream is, int length) throws IOException
{
if (this.attachment_content == null) {
this.attachment_content = new XWikiAttachmentContent();
this.attachment_content.setAttachment(this);
}
this.attachment_content.setContent(is, length);
}
public void loadContent(XWikiContext context) throws XWikiException
{
if (this.attachment_content == null) {
try {
context.getWiki().getAttachmentStore().loadAttachmentContent(this, context, true);
} catch (Exception ex) {
LOG.warn(String.format("Failed to load content for attachment [%s@%s]. "
+ "This attachment is broken, please consider re-uploading it. " + "Internal error: %s",
getFilename(), (this.doc != null) ? this.doc.getFullName() : "<unknown>", ex.getMessage()));
}
}
}
public XWikiAttachmentArchive loadArchive(XWikiContext context) throws XWikiException
{
if (this.attachment_archive == null) {
try {
this.attachment_archive =
context.getWiki().getAttachmentVersioningStore().loadArchive(this, context, true);
} catch (Exception ex) {
LOG.warn(String.format("Failed to load archive for attachment [%s@%s]. "
+ "This attachment is broken, please consider re-uploading it. " + "Internal error: %s",
getFilename(), (this.doc != null) ? this.doc.getFullName() : "<unknown>", ex.getMessage()));
}
}
return this.attachment_archive;
}
public void updateContentArchive(XWikiContext context) throws XWikiException
{
if (this.attachment_content == null) {
return;
}
loadArchive(context).updateArchive(getContent(context), context);
}
public String getMimeType(XWikiContext context)
{
// Choose the right content type
String mimetype = context.getEngineContext().getMimeType(getFilename().toLowerCase());
if (mimetype != null) {
return mimetype;
} else {
return "application/octet-stream";
}
}
public boolean isImage(XWikiContext context)
{
String contenttype = getMimeType(context);
if (contenttype.startsWith("image/")) {
return true;
} else {
return false;
}
}
public XWikiAttachment getAttachmentRevision(String rev, XWikiContext context) throws XWikiException
{
if (StringUtils.equals(rev, this.getVersion())) {
return this;
}
return loadArchive(context).getRevision(this, rev, context);
}
}
|
package com.exedio.cope.pattern;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import com.exedio.cope.Item;
import com.exedio.cope.ItemAttribute;
import com.exedio.cope.Pattern;
import com.exedio.cope.Query;
import com.exedio.cope.SetValue;
import com.exedio.cope.Type;
import com.exedio.cope.UniqueConstraint;
import com.exedio.cope.UniqueViolationException;
/**
* Makes a set of instances of type <tt>T</tt> available
* on any instance of type <tt>S</tt>.
* This set cannot contain duplicates,
* and the user has no control of the order of it's elements.
*
* @see VectorRelation
* @author Ralf Wiebicke
*/
public final class Relation<S extends Item, T extends Item> extends Pattern
{
final ItemAttribute<S> source;
final ItemAttribute<T> target;
final UniqueConstraint uniqueConstraint;
public Relation(final ItemAttribute<S> source, final ItemAttribute<T> target)
{
this.source = source;
this.target = target;
this.uniqueConstraint = new UniqueConstraint(source, target);
registerSource(source);
registerSource(target);
}
public static final <S extends Item, T extends Item> Relation<S,T> newRelation(final ItemAttribute<S> source, final ItemAttribute<T> target)
{
return new Relation<S, T>(source, target);
}
public ItemAttribute<S> getSource()
{
return source;
}
public ItemAttribute<T> getTarget()
{
return target;
}
public UniqueConstraint getUniqueConstraint()
{
return uniqueConstraint;
}
@Override
public void initialize()
{
final String name = getName();
if(!source.isInitialized())
initialize(source, name + "Source");
if(!target.isInitialized())
initialize(target, name + "Target");
initialize(uniqueConstraint, name + "UniqueConstraint");
}
public List<T> getTargets(final S source)
{
return new Query<T>(target, this.source.equal(source)).search();
}
public List<S> getSources(final T target)
{
return new Query<S>(source, this.target.equal(target)).search();
}
public List<T> getTargetsAndCast(final Item source)
{
return getTargets(this.source.cast(source));
}
public List<S> getSourcesAndCast(final Item target)
{
return getSources(this.target.cast(target));
}
/**
* @return <tt>true</tt> if the result of {@link #getTargets} changed as a result of the call.
*/
public boolean addToTargets(final S source, final T target)
{
try
{
getType().newItem(new SetValue[]{
this.source.map(source),
this.target.map(target),
});
return true;
}
catch(UniqueViolationException e)
{
assert uniqueConstraint==e.getConstraint();
return false;
}
}
/**
* @return <tt>true</tt> if the result of {@link #getSources} changed as a result of the call.
*/
public boolean addToSources(final T target, final S source)
{
return addToTargets(source, target);
}
/**
* @return <tt>true</tt> if the result of {@link #getTargets} changed as a result of the call.
*/
public boolean removeFromTargets(final S source, final T target)
{
final Item item = uniqueConstraint.searchUnique(new Object[]{source, target});
if(item==null)
return false;
else
{
item.deleteCopeItem();
return true;
}
}
/**
* @return <tt>true</tt> if the result of {@link #getSources} changed as a result of the call.
*/
public boolean removeFromSources(final T target, final S source)
{
return removeFromTargets(source, target);
}
private <L extends Item, R extends Item> void set(
final ItemAttribute<L> leftAttribute,
final ItemAttribute<R> rightAttribute,
final L leftItem,
final Collection<? extends R> rightItems)
{
final Type<? extends Item> type = getType();
final Collection<? extends Item> oldTupels = type.search(leftAttribute.equal(leftItem));
// TODO for better performance one could modify tuples, if rightAttribute is not FINAL
final HashSet<R> keptRightItems = new HashSet<R>();
for(final Item tupel : oldTupels)
{
final R rightItem = rightAttribute.get(tupel);
if(rightItems.contains(rightItem))
{
if(!keptRightItems.add(rightItem))
assert false;
}
else
{
tupel.deleteCopeItem();
}
}
for(final R rightItem : rightItems)
{
if(!keptRightItems.contains(rightItem))
{
type.newItem(new SetValue[]{
leftAttribute.map(leftItem),
rightAttribute.map(rightItem),
});
}
}
}
public void setTargets(final S source, final Collection<? extends T> targets)
{
set(this.source, this.target, source, targets);
}
public void setSources(final T target, final Collection<? extends S> sources)
{
set(this.target, this.source, target, sources);
}
public void setTargetsAndCast(final Item source, final Collection<?> targets)
{
setTargets(this.source.cast(source), this.target.castCollection(targets));
}
public void setSourcesAndCast(final Item target, final Collection<?> sources)
{
setSources(this.target.cast(target), this.source.castCollection(sources));
}
private static final HashMap<Type<?>, List<Relation>> cacheForGetRelationsBySource = new HashMap<Type<?>, List<Relation>>();
/**
* Returns all relations where <tt>type</tt> is
* the source type {@link #getSource()}.{@link ItemAttribute#getValueType() getValueType()}.
*
* @see #getRelationsByTarget(Type)
* @see Qualifier#getQualifiers(Type)
*/
public static final List<Relation> getRelationsBySource(final Type<?> type)
{
return getRelations(cacheForGetRelationsBySource, type, true);
}
private static final HashMap<Type<?>, List<Relation>> cacheForGetRelationsByTarget = new HashMap<Type<?>, List<Relation>>();
/**
* Returns all relations where <tt>type</tt> is
* the target type {@link #getTarget()}.{@link ItemAttribute#getValueType() getValueType()}.
*
* @see #getRelationsBySource(Type)
* @see Qualifier#getQualifiers(Type)
*/
public static final List<Relation> getRelationsByTarget(final Type<?> type)
{
return getRelations(cacheForGetRelationsByTarget, type, false);
}
private static final List<Relation> getRelations(final HashMap<Type<?>, List<Relation>> cache, final Type<?> type, final boolean source)
{
synchronized(cache)
{
{
final List<Relation> cachedResult = cache.get(type);
if(cachedResult!=null)
return cachedResult;
}
final ArrayList<Relation> resultModifiable = new ArrayList<Relation>();
for(final ItemAttribute<?> ia : type.getReferences())
for(final Pattern pattern : ia.getPatterns())
{
if(pattern instanceof Relation)
{
final Relation relation = (Relation)pattern;
if(type.equals((source ? relation.source : relation.target).getValueType()))
resultModifiable.add(relation);
}
}
resultModifiable.trimToSize();
final List<Relation> result =
!resultModifiable.isEmpty()
? Collections.unmodifiableList(resultModifiable)
: Collections.<Relation>emptyList();
cache.put(type, result);
return result;
}
}
}
|
package com.openxc.remote;
import java.io.IOException;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.google.common.base.Objects;
import com.openxc.measurements.serializers.JsonSerializer;
import com.openxc.measurements.UnrecognizedMeasurementTypeException;
import android.os.Parcel;
import android.os.Parcelable;
import android.util.Log;
/**
* An untyped measurement used only for the AIDL VehicleService interface.
*
* This abstract base class is intented to be the parent of numerical, state and
* boolean measurements. The architecture ended up using only numerical
* measurements, with other types being coerced to doubles.
*
* A raw measurement can have a value, an event, both or neither. Most
* measurements have only a value - measurements also with an event include
* things like button events (where both the button direction and action need to
* be identified). The value and event are both nullable, for cases where a
* measurement needs to be returned but there is no valid value for it.
*
* This class implements the Parcelable interface, so it can be used directly as
* a return value or function parameter in an AIDL interface.
*
* @see com.openxc.measurements.BaseMeasurement
*/
public class RawMeasurement implements Parcelable {
private static final String TAG = "RawMeasurement";
private String mCachedSerialization;
private double mTimestamp;
private String mName;
private Object mValue;
private Object mEvent;
public RawMeasurement(String name, Object value) {
this();
mName = name;
mValue = value;
}
public RawMeasurement(String name, Object value, Object event) {
this(name, value);
mEvent = event;
}
public RawMeasurement(String name, Object value, Object event,
double timestamp) {
this(name, value, event);
mTimestamp = timestamp;
}
public RawMeasurement(String serialized)
throws UnrecognizedMeasurementTypeException {
deserialize(serialized, this);
}
private RawMeasurement(Parcel in)
throws UnrecognizedMeasurementTypeException {
readFromParcel(in);
}
private RawMeasurement() {
timestamp();
}
public void writeToParcel(Parcel out, int flags) {
out.writeString(getName());
out.writeDouble(getTimestamp());
out.writeValue(getValue());
out.writeValue(getEvent());
}
public void readFromParcel(Parcel in) {
mName = in.readString();
mTimestamp = in.readDouble();
mValue = in.readValue(null);
mEvent = in.readValue(null);
}
public static final Parcelable.Creator<RawMeasurement> CREATOR =
new Parcelable.Creator<RawMeasurement>() {
public RawMeasurement createFromParcel(Parcel in) {
try {
return new RawMeasurement(in);
} catch(UnrecognizedMeasurementTypeException e) {
return new RawMeasurement();
}
}
public RawMeasurement[] newArray(int size) {
return new RawMeasurement[size];
}
};
public String serialize() {
return serialize(false);
}
public String serialize(boolean reserialize) {
if(reserialize || mCachedSerialization == null) {
Double timestamp = isTimestamped() ? getTimestamp() : null;
mCachedSerialization = JsonSerializer.serialize(getName(),
getValue(), getEvent(), timestamp);
}
return mCachedSerialization;
}
private static void deserialize(String measurementString,
RawMeasurement measurement)
throws UnrecognizedMeasurementTypeException {
JsonFactory jsonFactory = new JsonFactory();
JsonParser parser;
try {
parser = jsonFactory.createParser(measurementString);
} catch(IOException e) {
String message = "Couldn't decode JSON from: " + measurementString;
Log.w(TAG, message, e);
throw new UnrecognizedMeasurementTypeException(message, e);
}
try {
parser.nextToken();
while(parser.nextToken() != JsonToken.END_OBJECT) {
String field = parser.getCurrentName();
parser.nextToken();
if(JsonSerializer.NAME_FIELD.equals(field)) {
measurement.mName = parser.getText();
} else if(JsonSerializer.VALUE_FIELD.equals(field)) {
measurement.mValue = parseUnknownType(parser);
} else if(JsonSerializer.EVENT_FIELD.equals(field)) {
measurement.mEvent = parseUnknownType(parser);
} else if(JsonSerializer.TIMESTAMP_FIELD.equals(field)) {
measurement.mTimestamp =
parser.getNumberValue().doubleValue();
}
}
if(measurement.mName == null) {
throw new UnrecognizedMeasurementTypeException(
"Missing name in: " + measurementString);
}
if(measurement.mValue == null) {
throw new UnrecognizedMeasurementTypeException(
"Missing value in: " + measurementString);
}
} catch(IOException e) {
String message = "JSON message didn't have the expected format: "
+ measurementString;
Log.w(TAG, message, e);
throw new UnrecognizedMeasurementTypeException(message, e);
}
measurement.mCachedSerialization = measurementString;
}
private static Object parseUnknownType(JsonParser parser) {
Object value = null;
try {
value = parser.getNumberValue();
} catch(JsonParseException e) {
try {
value = parser.getBooleanValue();
} catch(JsonParseException e2) {
try {
value = parser.getText();
} catch(JsonParseException e3) {
} catch(IOException e4) {
}
} catch(IOException e5) {
}
} catch(IOException e) {
}
return value;
}
public String getName() {
return mName;
}
public Object getValue() {
return mValue;
}
public boolean hasEvent() {
return getEvent() != null;
}
public Object getEvent() {
return mEvent;
}
public boolean isTimestamped() {
return getTimestamp() != null && !Double.isNaN(getTimestamp());
}
public Double getTimestamp() {
return mTimestamp;
}
public void timestamp() {
mTimestamp = System.currentTimeMillis() / 1000.0;
}
public void untimestamp() {
mTimestamp = Double.NaN;
}
public int describeContents() {
return 0;
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("value", getValue())
.add("event", getEvent())
.toString();
}
}
|
package com.exedio.cope.util;
import java.io.File;
import javax.servlet.FilterConfig;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import com.exedio.cope.Cope;
import com.exedio.cope.Model;
public class ServletUtil
{
private static final String PARAMETER_MODEL = "model";
public static final ConnectToken getConnectedModel(final ServletConfig config)
throws ServletException
{
return getConnectedModel(config.getInitParameter(PARAMETER_MODEL), "servlet", config.getServletName(), config.getServletContext());
}
public static final ConnectToken getConnectedModel(final FilterConfig config)
throws ServletException
{
return getConnectedModel(config.getInitParameter(PARAMETER_MODEL), "filter", config.getFilterName(), config.getServletContext());
}
private static final ConnectToken getConnectedModel(final String initParam, final String kind, final String name, final ServletContext context)
throws ServletException
{
final String modelName;
final String modelNameSource;
if(initParam==null)
{
final String contextParam = context.getInitParameter(PARAMETER_MODEL);
if(contextParam==null)
throw new ServletException(kind + ' ' + name + ": neither init-param nor context-param '"+PARAMETER_MODEL+"' set");
modelName = contextParam;
modelNameSource = "context-param";
}
else
{
modelName = initParam;
modelNameSource = "init-param";
}
final Model result;
try
{
result = Cope.getModel(modelName);
}
catch(IllegalArgumentException e)
{
throw new ServletException(kind + ' ' + name + ", " + modelNameSource + ' ' + PARAMETER_MODEL + ':' + ' ' + e.getMessage(), e);
}
return connect(result, context, kind + ' ' + '"' + name + '"');
}
/**
* Connects the model using the properties from
* the file <tt>cope.properties</tt>
* in the directory <tt>WEB-INF</tt>
* of the web application.
* @see Model#connect(com.exedio.cope.Properties)
* @see ConnectToken#issue(Model,com.exedio.cope.Properties,String)
*/
public static final ConnectToken connect(final Model model, final ServletContext context, final String name)
{
return ConnectToken.issue(model,
new com.exedio.cope.Properties(
new File(context.getRealPath("WEB-INF/cope.properties")), new Properties.Context(){
public String get(final String key)
{
return context.getInitParameter(key);
}
@Override
public String toString()
{
return "javax.servlet.ServletContext.getInitParameter of '" + context.getServletContextName() + '\'';
}
}
), name);
}
/**
* @deprecated Use {@link #getConnectedModel(ServletConfig)} instead
*/
@Deprecated
public static final ConnectToken getModel(final ServletConfig config)
throws ServletException
{
return getConnectedModel(config);
}
/**
* @deprecated Use {@link #getConnectedModel(FilterConfig)} instead
*/
@Deprecated
public static final ConnectToken getModel(final FilterConfig config)
throws ServletException
{
return getConnectedModel(config);
}
/**
* @deprecated Renamed to {@link #connect(Model, ServletContext, String)}.
*/
@Deprecated
public static final ConnectToken initialize(final Model model, final ServletContext context, final String name)
{
return connect(model, context, name);
}
}
|
package squeek.spiceoflife;
import java.io.File;
import java.util.Locale;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraftforge.common.config.Configuration;
import net.minecraftforge.common.config.Property;
import squeek.spiceoflife.compat.IByteIO;
import squeek.spiceoflife.compat.PacketDispatcher;
import squeek.spiceoflife.foodtracker.FoodHistory;
import squeek.spiceoflife.foodtracker.FoodModifier;
import squeek.spiceoflife.foodtracker.foodgroups.FoodGroupConfig;
import squeek.spiceoflife.foodtracker.foodgroups.FoodGroupRegistry;
import squeek.spiceoflife.interfaces.IPackable;
import squeek.spiceoflife.interfaces.IPacketProcessor;
import squeek.spiceoflife.network.PacketBase;
import squeek.spiceoflife.network.PacketConfigSync;
import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class ModConfig implements IPackable, IPacketProcessor
{
public static final ModConfig instance = new ModConfig();
protected ModConfig()
{
}
private static Configuration config;
private static final String COMMENT_SERVER_SIDE_OPTIONS =
"These config settings are server-side only\n"
+ "Their values will get synced to all clients on the server";
/*
* MAIN
*/
private static final String CATEGORY_MAIN = " main ";
private static final String CATEGORY_MAIN_COMMENT =
COMMENT_SERVER_SIDE_OPTIONS;
// whether or not food modifier is actually enabled (we either are the server or know the server has it enabled)
public static boolean FOOD_MODIFIER_ENABLED = false;
// the value written in the config file
public static boolean FOOD_MODIFIER_ENABLED_CONFIG_VAL = ModConfig.FOOD_MODIFIER_ENABLED_DEFAULT;
private static final String FOOD_MODIFIER_ENABLED_NAME = "food.modifier.enabled";
private static final boolean FOOD_MODIFIER_ENABLED_DEFAULT = true;
private static final String FOOD_MODIFIER_ENABLED_COMMENT = "If false, disables the entire diminishing returns part of the mod";
/*
* SERVER
*/
private static final String CATEGORY_SERVER = "server";
private static final String CATEGORY_SERVER_COMMENT =
COMMENT_SERVER_SIDE_OPTIONS;
public static int FOOD_HISTORY_LENGTH = ModConfig.FOOD_HISTORY_LENGTH_DEFAULT;
private static final String FOOD_HISTORY_LENGTH_NAME = "food.history.length";
private static final int FOOD_HISTORY_LENGTH_DEFAULT = 12;
private static final String FOOD_HISTORY_LENGTH_COMMENT =
"The maximum amount of eaten foods stored in the history at a time";
public static boolean FOOD_HISTORY_PERSISTS_THROUGH_DEATH = ModConfig.FOOD_HISTORY_PERSISTS_THROUGH_DEATH_DEFAULT;
private static final String FOOD_HISTORY_PERSISTS_THROUGH_DEATH_NAME = "food.history.persists.through.death";
private static final boolean FOOD_HISTORY_PERSISTS_THROUGH_DEATH_DEFAULT = false;
private static final String FOOD_HISTORY_PERSISTS_THROUGH_DEATH_COMMENT =
"If true, food history will not get reset after every death";
public static int FOOD_EATEN_THRESHOLD = ModConfig.FOOD_EATEN_THRESHOLD_DEFAULT;
private static final String FOOD_EATEN_THRESHOLD_NAME = "new.player.food.eaten.threshold";
private static final int FOOD_EATEN_THRESHOLD_DEFAULT = ModConfig.FOOD_HISTORY_LENGTH / 2;
private static final String FOOD_EATEN_THRESHOLD_COMMENT =
"The number of times a new player (by World) needs to eat before this mod has any effect";
public static boolean CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD = ModConfig.CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD_DEFAULT;
private static final String CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD_NAME = "clear.history.after.food.eaten.threshold.reached";
private static final boolean CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD_DEFAULT = false;
private static final String CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD_COMMENT =
"If true, a player's food history will be empty once they pass the " + FOOD_EATEN_THRESHOLD_NAME + "\n"
+ "If false, any food eaten before the threshold is passed will also count after it is passed";
public static boolean USE_FOOD_GROUPS_AS_WHITELISTS = ModConfig.USE_FOOD_GROUPS_AS_WHITELISTS_DEFAULT;
private static final String USE_FOOD_GROUPS_AS_WHITELISTS_NAME = "use.food.groups.as.whitelists";
private static final boolean USE_FOOD_GROUPS_AS_WHITELISTS_DEFAULT = false;
private static final String USE_FOOD_GROUPS_AS_WHITELISTS_COMMENT =
"If true, any foods not in a food group will be excluded from diminishing returns";
public static RoundingMode FOOD_HUNGER_ROUNDING_MODE = null;
public static String FOOD_HUNGER_ROUNDING_MODE_STRING = ModConfig.FOOD_HUNGER_ROUNDING_MODE_DEFAULT;
private static final String FOOD_HUNGER_ROUNDING_MODE_NAME = "food.hunger.rounding.mode";
private static final String FOOD_HUNGER_ROUNDING_MODE_DEFAULT = "round";
private static final String FOOD_HUNGER_ROUNDING_MODE_COMMENT =
"Rounding mode used on the hunger value of foods\n"
+ "Valid options: 'round', 'floor', 'ceiling'";
public static enum RoundingMode
{
ROUND("round")
{
@Override
public double round(double val)
{
return Math.round(val);
}
},
FLOOR("floor")
{
@Override
public double round(double val)
{
return Math.floor(val);
}
},
CEILING("ceiling")
{
@Override
public double round(double val)
{
return Math.ceil(val);
}
};
public final String id;
private RoundingMode(String id)
{
this.id = id;
}
public abstract double round(double val);
}
public static boolean AFFECT_FOOD_HUNGER_VALUES = ModConfig.AFFECT_FOOD_HUNGER_VALUES_DEFAULT;
private static final String AFFECT_FOOD_HUNGER_VALUES_NAME = "affect.food.hunger.values";
private static final boolean AFFECT_FOOD_HUNGER_VALUES_DEFAULT = true;
private static final String AFFECT_FOOD_HUNGER_VALUES_COMMENT =
"If true, foods' hunger value will be multiplied by the current nutritional value\n"
+ "Setting this to false and " + ModConfig.AFFECT_FOOD_SATURATION_MODIFIERS_NAME + " to true will make diminishing returns affect saturation only";
public static boolean AFFECT_NEGATIVE_FOOD_HUNGER_VALUES = ModConfig.AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_DEFAULT;
private static final String AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_NAME = "affect.negative.food.hunger.values";
private static final boolean AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_DEFAULT = false;
private static final String AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_COMMENT =
"If true, foods with negative hunger values will be made more negative as nutritional value decreases\n"
+ "NOTE: " + AFFECT_FOOD_HUNGER_VALUES_NAME + " must be true for this to have any affect";
public static boolean AFFECT_FOOD_SATURATION_MODIFIERS = ModConfig.AFFECT_FOOD_SATURATION_MODIFIERS_DEFAULT;
private static final String AFFECT_FOOD_SATURATION_MODIFIERS_NAME = "affect.food.saturation.modifiers";
private static final boolean AFFECT_FOOD_SATURATION_MODIFIERS_DEFAULT = false;
private static final String AFFECT_FOOD_SATURATION_MODIFIERS_COMMENT =
"If true, foods' saturation modifier will be multiplied by the current nutritional value\n"
+ "NOTE: When " + ModConfig.AFFECT_FOOD_HUNGER_VALUES_NAME + " is true, saturation bonuses of foods will automatically decrease as the hunger value of the food decreases\n"
+ "Setting this to true when " + ModConfig.AFFECT_FOOD_HUNGER_VALUES_NAME + " is true will make saturation bonuses decrease disproportionately more than hunger values\n"
+ "Setting this to true and " + ModConfig.AFFECT_FOOD_HUNGER_VALUES_NAME + " to false will make diminishing returns affect saturation only";
public static boolean AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS = ModConfig.AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_DEFAULT;
private static final String AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_NAME = "affect.negative.food.saturation.modifiers";
private static final boolean AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_DEFAULT = false;
private static final String AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_COMMENT =
"If true, foods with negative saturation modifiers will be made more negative as nutritional value decreases\n"
+ "NOTE: " + AFFECT_FOOD_SATURATION_MODIFIERS_NAME + " must be true for this to have any affect";
public static float FOOD_EATING_SPEED_MODIFIER = ModConfig.FOOD_EATING_SPEED_MODIFIER_DEFAULT;
private static final String FOOD_EATING_SPEED_MODIFIER_NAME = "food.eating.speed.modifier";
private static final float FOOD_EATING_SPEED_MODIFIER_DEFAULT = 1;
private static final String FOOD_EATING_SPEED_MODIFIER_COMMENT =
"If set to greater than zero, food eating speed will be affected by nutritional value\n"
+ "(meaning the lower the nutrtional value, the longer it will take to eat it)\n"
+ "Eating duration is calcualted using the formula (eating_duration / (nutritional_value^eating_speed_modifier))";
public static int FOOD_EATING_DURATION_MAX = ModConfig.FOOD_EATING_DURATION_MAX_DEFAULT;
private static final String FOOD_EATING_DURATION_MAX_NAME = "food.eating.duration.max";
private static final int FOOD_EATING_DURATION_MAX_DEFAULT = 0;
private static final String FOOD_EATING_DURATION_MAX_COMMENT =
"The maximum time it takes to eat a food after being modified by " + ModConfig.FOOD_EATING_SPEED_MODIFIER_NAME + "\n"
+ "The default eating duration is 32. Set this to 0 to remove the limit on eating speed.\n"
+ "Note: If this is set to 0 and " + ModConfig.FOOD_EATING_SPEED_MODIFIER_NAME + " is > 0, a food with 0% nutrtional value will take nearly infinite time to eat";
public static boolean USE_HUNGER_QUEUE = ModConfig.USE_HUNGER_QUEUE_DEFAULT;
private static final String USE_HUNGER_QUEUE_NAME = "use.hunger.restored.for.food.history.length";
private static final boolean USE_HUNGER_QUEUE_DEFAULT = false;
private static final String USE_HUNGER_QUEUE_COMMENT =
"If true, " + FOOD_HISTORY_LENGTH_NAME + " will use amount of hunger restored instead of number of foods eaten for its maximum length\n"
+ "For example, a " + FOOD_HISTORY_LENGTH_NAME + " length of 12 will store a max of 2 foods that restored 6 hunger each, \n"
+ "3 foods that restored 4 hunger each, 12 foods that restored 1 hunger each, etc\n"
+ "NOTE: " + FOOD_HISTORY_LENGTH_NAME + " uses hunger units, where 1 hunger unit = 1/2 hunger bar";
public static boolean USE_TIME_QUEUE = ModConfig.USE_TIME_QUEUE_DEFAULT;
private static final String USE_TIME_QUEUE_NAME = "use.time.for.food.history.length";
private static final boolean USE_TIME_QUEUE_DEFAULT = false;
private static final String USE_TIME_QUEUE_COMMENT =
"If true, " + FOOD_HISTORY_LENGTH_NAME + " will use time (in Minecraft days) instead of number of foods eaten for its maximum length\n"
+ "For example, a " + FOOD_HISTORY_LENGTH_NAME + " length of 12 will store all foods eaten in the last 12 Minecraft days.\n"
+ "Note: On servers, time only advances for each player while they are logged in unless " + ModConfig.PROGRESS_TIME_WHILE_LOGGED_OFF_NAME + " is set to true\n"
+ "Also note: " + USE_HUNGER_QUEUE_NAME + " must be false for this config option to take effect";
public static boolean PROGRESS_TIME_WHILE_LOGGED_OFF = ModConfig.PROGRESS_TIME_WHILE_LOGGED_OFF_DEFAULT;
private static final String PROGRESS_TIME_WHILE_LOGGED_OFF_NAME = "use.time.progress.time.while.logged.off";
private static final boolean PROGRESS_TIME_WHILE_LOGGED_OFF_DEFAULT = false;
private static final String PROGRESS_TIME_WHILE_LOGGED_OFF_COMMENT =
"If true, food history time will still progress for each player while that player is logged out.\n"
+ "NOTE: " + USE_TIME_QUEUE_NAME + " must be true for this to have any affect";
public static String FOOD_MODIFIER_FORMULA = ModConfig.FOOD_MODIFIER_FORMULA_STRING_DEFAULT;
private static final String FOOD_MODIFIER_FORMULA_STRING_NAME = "food.modifier.formula";
private static final String FOOD_MODIFIER_FORMULA_STRING_DEFAULT = "MAX(0, (1 - count/12))^MIN(8, food_hunger_value)";
private static final String FOOD_MODIFIER_FORMULA_STRING_COMMENT =
"Uses the EvalEx expression parser\n"
+ "See: https://github.com/uklimaschewski/EvalEx for syntax/function documentation\n\n"
+ "Available variables:\n"
+ "\tcount : The number of times the food (or its food group) has been eaten within the food history\n"
+ "\thunger_count : The total amount of hunger that the food (or its food group) has restored within the food history (1 hunger unit = 1/2 hunger bar)\n"
+ "\tsaturation_count : The total amount of saturation that the food (or its food group) has restored within the food history (1 saturation unit = 1/2 saturation bar)\n"
+ "\tmax_history_length : The maximum length of the food history (see " + FOOD_HISTORY_LENGTH_NAME + ")\n"
+ "\tcur_history_length : The current length of the food history (<= max_history_length)\n"
+ "\tfood_hunger_value : The default amount of hunger the food would restore in hunger units (1 hunger unit = 1/2 hunger bar)\n"
+ "\tfood_saturation_mod : The default saturation modifier of the food\n"
+ "\tcur_hunger : The current hunger value of the player in hunger units (20 = full)\n"
+ "\tcur_saturation : The current saturation value of the player\n"
+ "\ttotal_food_eaten : The all-time total number of times any food has been eaten by the player\n"
+ "\tfood_group_count : The number of food groups that the food belongs to\n"
+ "\tdistinct_food_groups_eaten : The number of distinct food groups in the player's current food history\n"
+ "\ttotal_food_groups : The total number of enabled food groups\n";
public static boolean GIVE_FOOD_JOURNAL_ON_START = ModConfig.GIVE_FOOD_JOURNAL_ON_START_DEFAULT;
private static final String GIVE_FOOD_JOURNAL_ON_START_NAME = "give.food.journal.as.starting.item";
private static final boolean GIVE_FOOD_JOURNAL_ON_START_DEFAULT = false;
private static final String GIVE_FOOD_JOURNAL_ON_START_COMMENT =
"If true, a food journal will be given to each player as a starting item";
public static boolean GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS = ModConfig.GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS_DEFAULT;
private static final String GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS_NAME = "give.food.journal.on.dimishing.returns.start";
private static final boolean GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS_DEFAULT = false;
private static final String GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS_COMMENT =
"If true, a food journal will be given to each player once diminishing returns start for them\n"
+ "Not given if a player was given a food journal by " + ModConfig.GIVE_FOOD_JOURNAL_ON_START_NAME;
public static float FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD = ModConfig.FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD_DEFAULT;
private static final String FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD_NAME = "food.containers.chance.to.drop.food";
private static final float FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD_DEFAULT = 0.25f;
private static final String FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD_COMMENT =
"The chance for food to drop from an open food container when the player jumps\n"
+ "Temporarily disabled while a better implementation is written (this config option will do nothing)";
public static int FOOD_CONTAINERS_MAX_STACKSIZE = ModConfig.FOOD_CONTAINERS_MAX_STACKSIZE_DEFAULT;
private static final String FOOD_CONTAINERS_MAX_STACKSIZE_NAME = "food.containers.max.stacksize";
private static final int FOOD_CONTAINERS_MAX_STACKSIZE_DEFAULT = 2;
private static final String FOOD_CONTAINERS_MAX_STACKSIZE_COMMENT =
"The maximum stacksize per slot in a food container";
/*
* CLIENT
*/
private static final String CATEGORY_CLIENT = "client";
private static final String CATEGORY_CLIENT_COMMENT =
"These config settings are client-side only";
public static boolean LEFT_CLICK_OPENS_FOOD_CONTAINERS = ModConfig.LEFT_CLICK_OPENS_FOOD_CONTAINERS_DEFAULT;
private static final String LEFT_CLICK_OPENS_FOOD_CONTAINERS_NAME = "left.click.opens.food.containers";
private static final boolean LEFT_CLICK_OPENS_FOOD_CONTAINERS_DEFAULT = false;
private static final String LEFT_CLICK_OPENS_FOOD_CONTAINERS_COMMENT =
"If true, left clicking the air while holding a food container will open it (so that it can be eaten from)";
/*
* ITEMS
*/
public static final String ITEM_FOOD_JOURNAL_NAME = "bookfoodjournal";
public static final String ITEM_LUNCH_BOX_NAME = "lunchbox";
public static final String ITEM_LUNCH_BAG_NAME = "lunchbag";
/*
* FOOD GROUPS
*/
@Deprecated
private static final String CATEGORY_FOODGROUPS = "foodgroups";
private static final String CATEGORY_FOODGROUPS_COMMENT =
"Food groups are defined using .json files in /config/SpiceOfLife/\n"
+ "See /config/SpiceOfLife/example-food-group.json";
/*
* OBSOLETED
*/
// nothing here
public static void init(File file)
{
config = new Configuration(file);
load();
/*
* MAIN
*/
config.getCategory(CATEGORY_MAIN).setComment(CATEGORY_MAIN_COMMENT);
FOOD_MODIFIER_ENABLED_CONFIG_VAL = config.get(CATEGORY_MAIN, FOOD_MODIFIER_ENABLED_NAME, FOOD_MODIFIER_ENABLED_DEFAULT, FOOD_MODIFIER_ENABLED_COMMENT).getBoolean(FOOD_MODIFIER_ENABLED_DEFAULT);
// only use the config value immediately when server-side; the client assumes false until the server syncs the config
if (FMLCommonHandler.instance().getSide() == Side.SERVER)
FOOD_MODIFIER_ENABLED = FOOD_MODIFIER_ENABLED_CONFIG_VAL;
/*
* SERVER
*/
config.getCategory(CATEGORY_SERVER).setComment(CATEGORY_SERVER_COMMENT);
Property FOOD_MODIFIER_PROPERTY = config.get(CATEGORY_SERVER, FOOD_MODIFIER_FORMULA_STRING_NAME, FOOD_MODIFIER_FORMULA_STRING_DEFAULT, FOOD_MODIFIER_FORMULA_STRING_COMMENT);
// enforce the new default if the config has the old default
if (FOOD_MODIFIER_PROPERTY.getString().equals("MAX(0, (1 - count/12))^MAX(0, food_hunger_value-ROUND(MAX(0, 1 - count/12), 0))"))
FOOD_MODIFIER_PROPERTY.set(FOOD_MODIFIER_FORMULA_STRING_DEFAULT);
FOOD_MODIFIER_FORMULA = FOOD_MODIFIER_PROPERTY.getString();
FOOD_HISTORY_LENGTH = config.get(CATEGORY_SERVER, FOOD_HISTORY_LENGTH_NAME, FOOD_HISTORY_LENGTH_DEFAULT, FOOD_HISTORY_LENGTH_COMMENT).getInt(FOOD_HISTORY_LENGTH_DEFAULT);
FOOD_HISTORY_PERSISTS_THROUGH_DEATH = config.get(CATEGORY_SERVER, FOOD_HISTORY_PERSISTS_THROUGH_DEATH_NAME, FOOD_HISTORY_PERSISTS_THROUGH_DEATH_DEFAULT, FOOD_HISTORY_PERSISTS_THROUGH_DEATH_COMMENT).getBoolean(FOOD_HISTORY_PERSISTS_THROUGH_DEATH_DEFAULT);
FOOD_EATEN_THRESHOLD = config.get(CATEGORY_SERVER, FOOD_EATEN_THRESHOLD_NAME, FOOD_EATEN_THRESHOLD_DEFAULT, FOOD_EATEN_THRESHOLD_COMMENT).getInt(FOOD_EATEN_THRESHOLD_DEFAULT);
CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD = config.get(CATEGORY_SERVER, CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD_NAME, CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD_DEFAULT, CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD_COMMENT).getBoolean(CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD_DEFAULT);
USE_FOOD_GROUPS_AS_WHITELISTS = config.get(CATEGORY_SERVER, USE_FOOD_GROUPS_AS_WHITELISTS_NAME, USE_FOOD_GROUPS_AS_WHITELISTS_DEFAULT, USE_FOOD_GROUPS_AS_WHITELISTS_COMMENT).getBoolean(USE_FOOD_GROUPS_AS_WHITELISTS_DEFAULT);
AFFECT_FOOD_HUNGER_VALUES = config.get(CATEGORY_SERVER, AFFECT_FOOD_HUNGER_VALUES_NAME, AFFECT_FOOD_HUNGER_VALUES_DEFAULT, AFFECT_FOOD_HUNGER_VALUES_COMMENT).getBoolean(AFFECT_FOOD_HUNGER_VALUES_DEFAULT);
AFFECT_NEGATIVE_FOOD_HUNGER_VALUES = config.get(CATEGORY_SERVER, AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_NAME, AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_DEFAULT, AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_COMMENT).getBoolean(AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_DEFAULT);
AFFECT_FOOD_SATURATION_MODIFIERS = config.get(CATEGORY_SERVER, AFFECT_FOOD_SATURATION_MODIFIERS_NAME, AFFECT_FOOD_SATURATION_MODIFIERS_DEFAULT, AFFECT_FOOD_SATURATION_MODIFIERS_COMMENT).getBoolean(AFFECT_FOOD_SATURATION_MODIFIERS_DEFAULT);
AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS = config.get(CATEGORY_SERVER, AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_NAME, AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_DEFAULT, AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_COMMENT).getBoolean(AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_DEFAULT);
FOOD_EATING_SPEED_MODIFIER = (float) config.get(CATEGORY_SERVER, FOOD_EATING_SPEED_MODIFIER_NAME, FOOD_EATING_SPEED_MODIFIER_DEFAULT, FOOD_EATING_SPEED_MODIFIER_COMMENT).getDouble(FOOD_EATING_SPEED_MODIFIER_DEFAULT);
FOOD_EATING_DURATION_MAX = config.get(CATEGORY_SERVER, FOOD_EATING_DURATION_MAX_NAME, FOOD_EATING_DURATION_MAX_DEFAULT, FOOD_EATING_DURATION_MAX_COMMENT).getInt(FOOD_EATING_DURATION_MAX_DEFAULT);
USE_HUNGER_QUEUE = config.get(CATEGORY_SERVER, USE_HUNGER_QUEUE_NAME, USE_HUNGER_QUEUE_DEFAULT, USE_HUNGER_QUEUE_COMMENT).getBoolean(USE_HUNGER_QUEUE_DEFAULT);
USE_TIME_QUEUE = config.get(CATEGORY_SERVER, USE_TIME_QUEUE_NAME, USE_TIME_QUEUE_DEFAULT, USE_TIME_QUEUE_COMMENT).getBoolean(USE_TIME_QUEUE_DEFAULT);
PROGRESS_TIME_WHILE_LOGGED_OFF = config.get(CATEGORY_SERVER, PROGRESS_TIME_WHILE_LOGGED_OFF_NAME, PROGRESS_TIME_WHILE_LOGGED_OFF_DEFAULT, PROGRESS_TIME_WHILE_LOGGED_OFF_COMMENT).getBoolean(PROGRESS_TIME_WHILE_LOGGED_OFF_DEFAULT);
GIVE_FOOD_JOURNAL_ON_START = config.get(CATEGORY_SERVER, GIVE_FOOD_JOURNAL_ON_START_NAME, GIVE_FOOD_JOURNAL_ON_START_DEFAULT, GIVE_FOOD_JOURNAL_ON_START_COMMENT).getBoolean(GIVE_FOOD_JOURNAL_ON_START_DEFAULT);
GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS = config.get(CATEGORY_SERVER, GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS_NAME, GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS_DEFAULT, GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS_COMMENT).getBoolean(GIVE_FOOD_JOURNAL_ON_DIMINISHING_RETURNS_DEFAULT);
FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD = (float) config.get(CATEGORY_SERVER, FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD_NAME, FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD_DEFAULT, FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD_COMMENT).getDouble(FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD_DEFAULT);
FOOD_CONTAINERS_MAX_STACKSIZE = config.get(CATEGORY_SERVER, FOOD_CONTAINERS_MAX_STACKSIZE_NAME, FOOD_CONTAINERS_MAX_STACKSIZE_DEFAULT, FOOD_CONTAINERS_MAX_STACKSIZE_COMMENT).getInt(FOOD_CONTAINERS_MAX_STACKSIZE_DEFAULT);
FOOD_HUNGER_ROUNDING_MODE_STRING = config.get(CATEGORY_SERVER, FOOD_HUNGER_ROUNDING_MODE_NAME, FOOD_HUNGER_ROUNDING_MODE_DEFAULT, FOOD_HUNGER_ROUNDING_MODE_COMMENT).getString();
setRoundingMode();
/*
* CLIENT
*/
config.getCategory(CATEGORY_CLIENT).setComment(CATEGORY_CLIENT_COMMENT);
LEFT_CLICK_OPENS_FOOD_CONTAINERS = config.get(CATEGORY_CLIENT, LEFT_CLICK_OPENS_FOOD_CONTAINERS_NAME, LEFT_CLICK_OPENS_FOOD_CONTAINERS_DEFAULT, LEFT_CLICK_OPENS_FOOD_CONTAINERS_COMMENT).getBoolean(LEFT_CLICK_OPENS_FOOD_CONTAINERS_DEFAULT);
/*
* FOOD GROUPS
*/
config.getCategory(CATEGORY_FOODGROUPS).setComment(CATEGORY_FOODGROUPS_COMMENT);
FoodGroupConfig.setup(file.getParentFile());
// remove obsolete config options
config.getCategory(CATEGORY_SERVER).remove("use.food.groups");
config.getCategory(CATEGORY_FOODGROUPS).clear();
// temporarily disable chance to drop food, needs a better implementation
FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD = 0;
save();
}
public static void setRoundingMode()
{
for (RoundingMode roundingMode : RoundingMode.values())
{
if (roundingMode.id.equals(FOOD_HUNGER_ROUNDING_MODE_STRING.toLowerCase(Locale.ROOT)))
{
FOOD_HUNGER_ROUNDING_MODE = roundingMode;
break;
}
}
if (FOOD_HUNGER_ROUNDING_MODE == null)
{
ModSpiceOfLife.Log.warn("Rounding mode '" + FOOD_HUNGER_ROUNDING_MODE_STRING + "' not recognized; defaulting to 'round'");
FOOD_HUNGER_ROUNDING_MODE_STRING = "round";
FOOD_HUNGER_ROUNDING_MODE = RoundingMode.ROUND;
}
}
public static void save()
{
config.save();
}
public static void load()
{
config.load();
}
@Override
public void pack(IByteIO data)
{
data.writeBoolean(FOOD_MODIFIER_ENABLED_CONFIG_VAL);
if (FOOD_MODIFIER_ENABLED_CONFIG_VAL)
{
data.writeUTF(FOOD_MODIFIER_FORMULA);
data.writeShort(FOOD_HISTORY_LENGTH);
data.writeBoolean(FOOD_HISTORY_PERSISTS_THROUGH_DEATH);
data.writeInt(FOOD_EATEN_THRESHOLD);
data.writeBoolean(CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD);
data.writeBoolean(USE_FOOD_GROUPS_AS_WHITELISTS);
data.writeBoolean(AFFECT_FOOD_SATURATION_MODIFIERS);
data.writeBoolean(AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS);
data.writeFloat(FOOD_EATING_SPEED_MODIFIER);
data.writeInt(FOOD_EATING_DURATION_MAX);
data.writeBoolean(USE_HUNGER_QUEUE);
data.writeBoolean(USE_TIME_QUEUE);
data.writeBoolean(PROGRESS_TIME_WHILE_LOGGED_OFF);
data.writeUTF(FOOD_HUNGER_ROUNDING_MODE_STRING);
}
data.writeInt(FOOD_CONTAINERS_MAX_STACKSIZE);
data.writeFloat(FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD);
}
@Override
public void unpack(IByteIO data)
{
FOOD_MODIFIER_ENABLED = data.readBoolean();
if (FOOD_MODIFIER_ENABLED)
{
FOOD_MODIFIER_FORMULA = data.readUTF();
FOOD_HISTORY_LENGTH = data.readShort();
FOOD_HISTORY_PERSISTS_THROUGH_DEATH = data.readBoolean();
FOOD_EATEN_THRESHOLD = data.readInt();
CLEAR_HISTORY_ON_FOOD_EATEN_THRESHOLD = data.readBoolean();
USE_FOOD_GROUPS_AS_WHITELISTS = data.readBoolean();
AFFECT_FOOD_SATURATION_MODIFIERS = data.readBoolean();
AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS = data.readBoolean();
FOOD_EATING_SPEED_MODIFIER = data.readFloat();
FOOD_EATING_DURATION_MAX = data.readInt();
USE_HUNGER_QUEUE = data.readBoolean();
USE_TIME_QUEUE = data.readBoolean();
PROGRESS_TIME_WHILE_LOGGED_OFF = data.readBoolean();
FOOD_HUNGER_ROUNDING_MODE_STRING = data.readUTF();
}
FOOD_CONTAINERS_MAX_STACKSIZE = data.readInt();
FOOD_CONTAINERS_CHANCE_TO_DROP_FOOD = data.readFloat();
}
@Override
public PacketBase processAndReply(Side side, EntityPlayer player)
{
if (FOOD_MODIFIER_ENABLED)
{
setRoundingMode();
FoodModifier.onGlobalFormulaChanged();
FoodHistory.get(player).onHistoryTypeChanged();
FoodGroupRegistry.clear();
}
return null;
}
public static void sync(EntityPlayerMP player)
{
PacketDispatcher.get().sendTo(new PacketConfigSync(), player);
}
@SideOnly(Side.CLIENT)
public static void assumeClientOnly()
{
// assume false until the server syncs
FOOD_MODIFIER_ENABLED = false;
}
}
|
package bisq.network.p2p.network;
import bisq.common.UserThread;
import bisq.common.proto.network.NetworkEnvelope;
import bisq.common.util.Utilities;
import javafx.beans.property.DoubleProperty;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.LongProperty;
import javafx.beans.property.SimpleDoubleProperty;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.property.SimpleLongProperty;
import java.util.Date;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import lombok.extern.slf4j.Slf4j;
/**
* Network statistics per connection. As we are also interested in total network statistics
* we use static properties to get traffic of all connections combined.
*/
@Slf4j
public class Statistic {
// Static
private final static long startTime = System.currentTimeMillis();
private final static LongProperty totalSentBytes = new SimpleLongProperty(0);
private final static DoubleProperty totalSentBytesPerSec = new SimpleDoubleProperty(0);
private final static LongProperty totalReceivedBytes = new SimpleLongProperty(0);
private final static DoubleProperty totalReceivedBytesPerSec = new SimpleDoubleProperty(0);
private final static Map<String, Integer> totalReceivedMessages = new ConcurrentHashMap<>();
private final static Map<String, Integer> totalSentMessages = new ConcurrentHashMap<>();
private final static LongProperty numTotalSentMessages = new SimpleLongProperty(0);
private final static DoubleProperty numTotalSentMessagesPerSec = new SimpleDoubleProperty(0);
private final static LongProperty numTotalReceivedMessages = new SimpleLongProperty(0);
private final static DoubleProperty numTotalReceivedMessagesPerSec = new SimpleDoubleProperty(0);
static {
UserThread.runPeriodically(() -> {
numTotalSentMessages.set(totalSentMessages.values().stream().mapToInt(Integer::intValue).sum());
numTotalReceivedMessages.set(totalReceivedMessages.values().stream().mapToInt(Integer::intValue).sum());
long passed = (System.currentTimeMillis() - startTime) / 1000;
numTotalSentMessagesPerSec.set(((double) numTotalSentMessages.get()) / passed);
numTotalReceivedMessagesPerSec.set(((double) numTotalReceivedMessages.get()) / passed);
totalSentBytesPerSec.set(((double) totalSentBytes.get()) / passed);
totalReceivedBytesPerSec.set(((double) totalReceivedBytes.get()) / passed);
}, 1);
// We log statistics every 5 minutes
UserThread.runPeriodically(() -> {
String ls = System.lineSeparator();
log.info("Accumulated network statistics:" + ls +
"Bytes sent: {};" + ls +
"Number of sent messages/Sent messages: {} / {};" + ls +
"Number of sent messages per sec: {};" + ls +
"Bytes received: {}" + ls +
"Number of received messages/Received messages: {} / {};" + ls +
"Number of received messages per sec: {};" + ls,
Utilities.readableFileSize(totalSentBytes.get()),
numTotalSentMessages.get(), totalSentMessages,
numTotalSentMessagesPerSec.get(),
Utilities.readableFileSize(totalReceivedBytes.get()),
numTotalReceivedMessages.get(), totalReceivedMessages,
numTotalReceivedMessagesPerSec.get());
}, TimeUnit.MINUTES.toSeconds(5));
}
public static LongProperty totalSentBytesProperty() {
return totalSentBytes;
}
public static DoubleProperty totalSentBytesPerSecProperty() {
return totalSentBytesPerSec;
}
public static LongProperty totalReceivedBytesProperty() {
return totalReceivedBytes;
}
public static DoubleProperty totalReceivedBytesPerSecProperty() {
return totalReceivedBytesPerSec;
}
public static LongProperty numTotalSentMessagesProperty() {
return numTotalSentMessages;
}
public static DoubleProperty numTotalSentMessagesPerSecProperty() {
return numTotalSentMessagesPerSec;
}
public static LongProperty numTotalReceivedMessagesProperty() {
return numTotalReceivedMessages;
}
public static DoubleProperty numTotalReceivedMessagesPerSecProperty() {
return numTotalReceivedMessagesPerSec;
}
// Instance fields
private final Date creationDate;
private long lastActivityTimestamp = System.currentTimeMillis();
private final LongProperty sentBytes = new SimpleLongProperty(0);
private final LongProperty receivedBytes = new SimpleLongProperty(0);
private final Map<String, Integer> receivedMessages = new ConcurrentHashMap<>();
private final Map<String, Integer> sentMessages = new ConcurrentHashMap<>();
private final IntegerProperty roundTripTime = new SimpleIntegerProperty(0);
// Constructor
Statistic() {
creationDate = new Date();
}
// Update, increment
void updateLastActivityTimestamp() {
UserThread.execute(() -> lastActivityTimestamp = System.currentTimeMillis());
}
void addSentBytes(int value) {
UserThread.execute(() -> {
sentBytes.set(sentBytes.get() + value);
totalSentBytes.set(totalSentBytes.get() + value);
});
}
void addReceivedBytes(int value) {
UserThread.execute(() -> {
receivedBytes.set(receivedBytes.get() + value);
totalReceivedBytes.set(totalReceivedBytes.get() + value);
});
}
// TODO would need msg inspection to get useful information...
void addReceivedMessage(NetworkEnvelope networkEnvelope) {
String messageClassName = networkEnvelope.getClass().getSimpleName();
int counter = 1;
if (receivedMessages.containsKey(messageClassName)) {
counter = receivedMessages.get(messageClassName) + 1;
}
receivedMessages.put(messageClassName, counter);
counter = 1;
if (totalReceivedMessages.containsKey(messageClassName)) {
counter = totalReceivedMessages.get(messageClassName) + 1;
}
totalReceivedMessages.put(messageClassName, counter);
}
void addSentMessage(NetworkEnvelope networkEnvelope) {
String messageClassName = networkEnvelope.getClass().getSimpleName();
int counter = 1;
if (sentMessages.containsKey(messageClassName)) {
counter = sentMessages.get(messageClassName) + 1;
}
sentMessages.put(messageClassName, counter);
counter = 1;
if (totalSentMessages.containsKey(messageClassName)) {
counter = totalSentMessages.get(messageClassName) + 1;
}
totalSentMessages.put(messageClassName, counter);
}
public void setRoundTripTime(int roundTripTime) {
this.roundTripTime.set(roundTripTime);
}
// Getters
public long getLastActivityTimestamp() {
return lastActivityTimestamp;
}
public long getLastActivityAge() {
return System.currentTimeMillis() - lastActivityTimestamp;
}
public long getSentBytes() {
return sentBytes.get();
}
public LongProperty sentBytesProperty() {
return sentBytes;
}
public long getReceivedBytes() {
return receivedBytes.get();
}
public LongProperty receivedBytesProperty() {
return receivedBytes;
}
public Date getCreationDate() {
return creationDate;
}
public IntegerProperty roundTripTimeProperty() {
return roundTripTime;
}
@Override
public String toString() {
return "Statistic{" +
"\n creationDate=" + creationDate +
",\n lastActivityTimestamp=" + lastActivityTimestamp +
",\n sentBytes=" + sentBytes +
",\n receivedBytes=" + receivedBytes +
",\n receivedMessages=" + receivedMessages +
",\n sentMessages=" + sentMessages +
",\n roundTripTime=" + roundTripTime +
"\n}";
}
}
|
package uk.ac.ebi.phenotype;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.mousephenotype.cda.solr.service.ImpressService;
import org.mousephenotype.cda.solr.service.PhenotypeCenterService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.web.servlet.ServletContextInitializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.FilterType;
import org.springframework.data.solr.core.SolrOperations;
import org.springframework.data.solr.core.SolrTemplate;
import org.springframework.data.solr.repository.config.EnableSolrRepositories;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.validation.constraints.NotNull;
/**
* Read only Solr server bean configuration The writable Solr servers are configured in IndexerConfig.java of the
* indexer module
*/
@Configuration
@EnableSolrRepositories(basePackages = {"org.mousephenotype.cda.solr.repositories"}, multicoreSupport = true)
@ComponentScan(
basePackages = {"org.mousephenotype.cda"},
useDefaultFilters = false,
includeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = {ImpressService.class})
})
public class SolrServerConfig {
public static final int QUEUE_SIZE = 10000;
public static final int THREAD_COUNT = 3;
@NotNull
@Value("${solr.host}")
private String solrBaseUrl;
@Autowired
ImpressService impressService;
@NotNull
@Value("${imits.solr.host}")
private String imitsSolrBaseUrl;
// Required for spring-data-solr repositories
@Bean
public SolrClient solrClient() { return new HttpSolrClient(solrBaseUrl); }
@Bean
public SolrOperations solrTemplate() { return new SolrTemplate(solrClient()); }
// Required for spring-data-solr repositories
@Bean(name = "allele2Core")
HttpSolrClient getAllele2Core() {
return new HttpSolrClient(solrBaseUrl + "/allele2");
}
@Bean(name = "productCore")
HttpSolrClient getProductCore() {
return new HttpSolrClient(imitsSolrBaseUrl + "/product");
}
// Read only solr servers
//Phenodigm2 server
@Bean(name = "phenodigmCore")
public HttpSolrClient getPhenodigmCore() {
return new HttpSolrClient(solrBaseUrl + "/phenodigm");
}
//Configuration
@Bean(name = "configurationCore")
public HttpSolrClient getConfigurationCore() {
return new HttpSolrClient(solrBaseUrl + "/configuration");
}
//Allele
@Bean(name = "alleleCore")
public HttpSolrClient getAlleleCore() {
return new HttpSolrClient(solrBaseUrl + "/allele");
}
//Autosuggest
@Bean(name = "autosuggestCore")
HttpSolrClient getAutosuggestCore() {
return new HttpSolrClient(solrBaseUrl + "/autosuggest");
}
//Gene
@Bean(name = "geneCore")
HttpSolrClient getGeneCore() {
return new HttpSolrClient(solrBaseUrl + "/gene");
}
//GenotypePhenotype
// TK: this core seems to be used only in the test packages - remove?
@Bean(name = "genotypePhenotypeCore")
HttpSolrClient getGenotypePhenotypeCore() {
return new HttpSolrClient(solrBaseUrl + "/genotype-phenotype");
}
//DELETEME
// //GenotypePhenotype
// @Bean(name = "genotypePhenotypeCore")
// HttpSolrClient getGenotypePhenotypeCore() {
// Impc images core
@Bean(name = "impcImagesCore")
HttpSolrClient getImpcImagesCore() {
return new HttpSolrClient(solrBaseUrl + "/impc_images");
}
//SangerImages
@Bean(name = "sangerImagesCore")
HttpSolrClient getImagesCore() {
return new HttpSolrClient(solrBaseUrl + "/images");
}
//ANATOMY
@Bean(name = "anatomyCore")
HttpSolrClient getAnatomyCore() { return new HttpSolrClient(solrBaseUrl + "/anatomy"); }
@Bean(name = "mpCore")
HttpSolrClient getMpCore() { return new HttpSolrClient(solrBaseUrl + "/mp"); }
//EMAP
@Bean(name = "emapCore")
HttpSolrClient getEmapCore() {
return new HttpSolrClient(solrBaseUrl + "/emap");
}
@Bean(name = "experimentCore")
HttpSolrClient getExperimentCore() {
return new HttpSolrClient(solrBaseUrl + "/experiment");
}
//Pipeline
@Bean(name = "pipelineCore")
HttpSolrClient getPipelineCore() {
return new HttpSolrClient(solrBaseUrl + "/pipeline");
}
//Preqc
@Bean(name = "preqcCore")
HttpSolrClient getPreqcCore() {
return new HttpSolrClient(solrBaseUrl + "/preqc");
}
// @Bean(name = "preqcCore") //DELETEME
// HttpSolrClient getPreqcCore() {
// return new HttpSolrClient(solrBaseUrl + "/genotype-phenotype");
//StatisticalResult
@Bean(name = "statisticalResultCore")
HttpSolrClient getStatisticalResultCore() {
return new HttpSolrClient(solrBaseUrl + "/statistical-result");
}
@Bean(name = "phenotypeCenterService")
PhenotypeCenterService phenotypeCenterService() {
return new PhenotypeCenterService(solrBaseUrl + "/experiment", impressService);
}
protected final Logger logger = LoggerFactory.getLogger(this.getClass().getCanonicalName());
@Bean
public ServletContextInitializer servletContextInitializer() {
return new ServletContextInitializer() {
@Override
public void onStartup(ServletContext servletContext) throws ServletException {
boolean secure = true;
logger.info("setSecure({})", secure);
servletContext.getSessionCookieConfig().setSecure(secure);
}
};
}
}
|
package org.zanata.common;
import java.io.Serializable;
import org.codehaus.jackson.annotate.JsonCreator;
import org.codehaus.jackson.annotate.JsonValue;
public class LocaleId implements Serializable
{
private static final long serialVersionUID = 1L;
private String id;
// TODO split up to language code, country code, qualifier etc..
public static final LocaleId EN = new LocaleId("en");
public static final LocaleId EN_US = new LocaleId("en-US");
public static final LocaleId DE = new LocaleId("de");
public static final LocaleId FR = new LocaleId("fr");
public static final LocaleId ES = new LocaleId("es");
// JaxB needs a no-arg constructor :(
@SuppressWarnings("unused")
public LocaleId()
{
id = null;
}
@JsonCreator
public LocaleId(String localeId)
{
if (localeId == null)
throw new IllegalArgumentException("localeId");
if (localeId.indexOf('_') != -1)
throw new IllegalArgumentException("expected lang[-country[-modifier]], got " + localeId);
this.id = localeId.intern();
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
return true;
if (!(obj instanceof LocaleId))
return false;
return this.id.equals(((LocaleId) obj).id);
}
@Override
public int hashCode()
{
return id.hashCode();
}
@Override
@JsonValue
public String toString()
{
return id;
}
public static LocaleId fromJavaName(String localeName)
{
return new LocaleId(localeName.replace('_', '-'));
}
public String toJavaName()
{
return id.replace('-', '_');
}
public String getId()
{
return id;
}
}
|
package datastructures;
/**
* Trie implementation that is backed by an array. Use one of the static factory methods to create one of the predefined
* tries.
*
* Static Factory Methods
* createLowercaseTrie() - Memory efficient but only supports a-z.
* createUppercaseTrie() - Memory efficient but only supports A-Z.
* createAsciiTrie() - Supports all printable ASCII characters at the cost of ~3x the memory.
*
* Supported Operations
* void add(String str) - Adds the given string into the trie.
* int getPrefixCount(String str) - Returns the number of strings in the trie with a certain prefix.
* int getWordCount(String str) - Returns the number of words in the trie that exactly match str.
* int size() - Return the total number of strings in the trie.
* ArrayTrie moveTo(char a) - Returns the child trie specified by the given character. This is useful for
* iterating through the tree nodes. For example:
*
* String str = "trie_walk_example";
* ArrayTrie iter = trie; // Point iter to the root of the trie
* for (int i = 0; i < str.length(); i++) {
* iter = iter.moveTo(str.charAt(i)); // advance to the next character
* if (iter == null) break; // We've fallen off the trie
*
* // Do stuff with iter here:
* iter.getPrefixCount();
* iter.getWordCount();
* }
*/
public class ArrayTrie {
private static int ALPHABET_SIZE;
private static char CHAR_OFFSET;
private int wordCount = 0;
private int prefixCount = 0;
private ArrayTrie[] next;
private ArrayTrie(int alphabetSize, char startingChar) {
ALPHABET_SIZE = alphabetSize;
CHAR_OFFSET = startingChar;
next = new ArrayTrie[ALPHABET_SIZE];
}
private ArrayTrie() {
next = new ArrayTrie[ALPHABET_SIZE];
}
public static ArrayTrie createLowercaseTrie() {
return new ArrayTrie(26, 'a');
}
public static ArrayTrie createUppercaseTrie() {
return new ArrayTrie(26, 'A');
}
// Supports all printable ascii characters, values 32 - 126
public static ArrayTrie createAsciiTrie() {
return new ArrayTrie(95, ' ');
}
public void add(String str) {
ArrayTrie cur = this;
this.prefixCount++;
for (int i=0; i<str.length(); i++) {
cur = cur.getOrSet(str.charAt(i));
cur.prefixCount++;
}
cur.wordCount++;
}
public int getPrefixCount(String str) {
ArrayTrie cur = this;
for (int i=0; i<str.length(); i++) {
cur = cur.next[str.charAt(i) - CHAR_OFFSET];
if (cur == null) {
break;
}
}
if (cur == null) {
return 0;
}
return cur.prefixCount;
}
public int getWordCount(String str) {
ArrayTrie cur = this;
for (int i=0; i<str.length(); i++) {
cur = cur.next[str.charAt(i) - CHAR_OFFSET];
if (cur == null) {
break;
}
}
if (cur == null) {
return 0;
}
return cur.wordCount;
}
public int size() {
return prefixCount;
}
// TODO - think of a better name for this function
public ArrayTrie moveTo(char a) {
return next[a - CHAR_OFFSET];
}
public int getWordCount() {
return wordCount;
}
public int getPrefixCount() {
return prefixCount;
}
// TODO - safeguard all these array accesses with bound checks
private ArrayTrie getOrSet(char a) {
ArrayTrie t = next[a - CHAR_OFFSET];
if (t == null) {
return next[a - CHAR_OFFSET] = new ArrayTrie();
}
return t;
}
}
|
package org.nanohttpd.protocols.http.request;
/**
* HTTP Request methods, with the ability to decode a <code>String</code> back
* to its enum value.
*/
public enum Method {
GET,
PUT,
POST,
DELETE,
HEAD,
OPTIONS,
TRACE,
CONNECT,
PATCH,
PROPFIND,
PROPPATCH,
MKCOL,
MOVE,
COPY,
LOCK,
UNLOCK,
NOTIFY,
SUBSCRIBE;
public static Method lookup(String method) {
if (method == null)
return null;
try {
return valueOf(method);
} catch (IllegalArgumentException e) {
// TODO: Log it?
return null;
}
}
}
|
package bisq.network.p2p.peers;
import bisq.network.p2p.NodeAddress;
import bisq.network.p2p.network.CloseConnectionReason;
import bisq.network.p2p.network.Connection;
import bisq.network.p2p.network.ConnectionListener;
import bisq.network.p2p.network.InboundConnection;
import bisq.network.p2p.network.NetworkNode;
import bisq.network.p2p.network.RuleViolation;
import bisq.network.p2p.peers.peerexchange.Peer;
import bisq.network.p2p.peers.peerexchange.PeerList;
import bisq.network.p2p.seed.SeedNodeRepository;
import bisq.common.ClockWatcher;
import bisq.common.Timer;
import bisq.common.UserThread;
import bisq.common.app.Capabilities;
import bisq.common.app.Capability;
import bisq.common.config.Config;
import bisq.common.persistence.PersistenceManager;
import bisq.common.proto.persistable.PersistedDataHost;
import javax.inject.Inject;
import javax.inject.Named;
import com.google.common.annotations.VisibleForTesting;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import javax.annotation.Nullable;
import static com.google.common.base.Preconditions.checkArgument;
@Slf4j
public final class PeerManager implements ConnectionListener, PersistedDataHost {
// Static
private static final long CHECK_MAX_CONN_DELAY_SEC = 10;
// Use a long delay as the bootstrapping peer might need a while until it knows its onion address
private static final long REMOVE_ANONYMOUS_PEER_SEC = 240;
private static final int MAX_REPORTED_PEERS = 1000;
private static final int MAX_PERSISTED_PEERS = 500;
// max age for reported peers is 14 days
private static final long MAX_AGE = TimeUnit.DAYS.toMillis(14);
// Age of what we consider connected peers still as live peers
private static final long MAX_AGE_LIVE_PEERS = TimeUnit.MINUTES.toMillis(30);
private static final boolean PRINT_REPORTED_PEERS_DETAILS = true;
private boolean shutDownRequested;
// Listener
public interface Listener {
void onAllConnectionsLost();
void onNewConnectionAfterAllConnectionsLost();
void onAwakeFromStandby();
}
// Instance fields
private final NetworkNode networkNode;
private final ClockWatcher clockWatcher;
private final Set<NodeAddress> seedNodeAddresses;
private final PersistenceManager<PeerList> persistenceManager;
private final ClockWatcher.Listener clockWatcherListener;
private final List<Listener> listeners = new CopyOnWriteArrayList<>();
// Persistable peerList
private final PeerList peerList = new PeerList();
// Peers we got reported from other peers
@Getter
private final Set<Peer> reportedPeers = new HashSet<>();
// Most recent peers with activity date of last 30 min.
private final Set<Peer> latestLivePeers = new HashSet<>();
private Timer checkMaxConnectionsTimer;
private boolean stopped;
private boolean lostAllConnections;
private int maxConnections;
@Getter
private int minConnections;
private int disconnectFromSeedNode;
private int maxConnectionsPeer;
private int maxConnectionsNonDirect;
private int maxConnectionsAbsolute;
@Getter
private int peakNumConnections;
@Setter
private boolean allowDisconnectSeedNodes;
@Getter
private int numAllConnectionsLostEvents;
// Constructor
@Inject
public PeerManager(NetworkNode networkNode,
SeedNodeRepository seedNodeRepository,
ClockWatcher clockWatcher,
PersistenceManager<PeerList> persistenceManager,
@Named(Config.MAX_CONNECTIONS) int maxConnections) {
this.networkNode = networkNode;
this.seedNodeAddresses = new HashSet<>(seedNodeRepository.getSeedNodeAddresses());
this.clockWatcher = clockWatcher;
this.persistenceManager = persistenceManager;
this.persistenceManager.initialize(peerList, PersistenceManager.Source.PRIVATE_LOW_PRIO);
this.networkNode.addConnectionListener(this);
setConnectionLimits(maxConnections);
// we check if app was idle for more then 5 sec.
clockWatcherListener = new ClockWatcher.Listener() {
@Override
public void onSecondTick() {
}
@Override
public void onMinuteTick() {
}
@Override
public void onAwakeFromStandby(long missedMs) {
// We got probably stopped set to true when we got a longer interruption (e.g. lost all connections),
// now we get awake again, so set stopped to false.
stopped = false;
listeners.forEach(Listener::onAwakeFromStandby);
}
};
clockWatcher.addListener(clockWatcherListener);
}
public void shutDown() {
shutDownRequested = true;
networkNode.removeConnectionListener(this);
clockWatcher.removeListener(clockWatcherListener);
stopCheckMaxConnectionsTimer();
}
// PersistedDataHost implementation
@Override
public void readPersisted() {
PeerList persisted = persistenceManager.getPersisted();
if (persisted != null) {
peerList.setAll(persisted.getSet());
}
}
// ConnectionListener implementation
@Override
public void onConnection(Connection connection) {
if (isSeedNode(connection)) {
connection.setPeerType(Connection.PeerType.SEED_NODE);
}
doHouseKeeping();
if (lostAllConnections) {
lostAllConnections = false;
stopped = false;
log.info("\n
"Established a new connection from/to {} after all connections lost.\n" +
"
listeners.forEach(Listener::onNewConnectionAfterAllConnectionsLost);
}
connection.getPeersNodeAddressOptional()
.flatMap(this::findPeer)
.ifPresent(Peer::onConnection);
}
@Override
public void onDisconnect(CloseConnectionReason closeConnectionReason, Connection connection) {
log.info("onDisconnect called: nodeAddress={}, closeConnectionReason={}",
connection.getPeersNodeAddressOptional(), closeConnectionReason);
handleConnectionFault(connection);
boolean previousLostAllConnections = lostAllConnections;
lostAllConnections = networkNode.getAllConnections().isEmpty();
if (lostAllConnections) {
stopped = true;
if (!shutDownRequested) {
if (!previousLostAllConnections) {
// If we enter to 'All connections lost' we count the event.
numAllConnectionsLostEvents++;
}
log.warn("\n
"All connections lost\n" +
"
listeners.forEach(Listener::onAllConnectionsLost);
}
}
maybeRemoveBannedPeer(closeConnectionReason, connection);
}
@Override
public void onError(Throwable throwable) {
}
// Connection
public boolean hasSufficientConnections() {
return networkNode.getConfirmedConnections().size() >= minConnections;
}
// Checks if that connection has the peers node address
public boolean isConfirmed(NodeAddress nodeAddress) {
return networkNode.getNodeAddressesOfConfirmedConnections().contains(nodeAddress);
}
public void handleConnectionFault(Connection connection) {
connection.getPeersNodeAddressOptional().ifPresent(nodeAddress -> handleConnectionFault(nodeAddress, connection));
}
public void handleConnectionFault(NodeAddress nodeAddress) {
handleConnectionFault(nodeAddress, null);
}
public void handleConnectionFault(NodeAddress nodeAddress, @Nullable Connection connection) {
boolean doRemovePersistedPeer = false;
removeReportedPeer(nodeAddress);
Optional<Peer> persistedPeerOptional = findPersistedPeer(nodeAddress);
if (persistedPeerOptional.isPresent()) {
Peer persistedPeer = persistedPeerOptional.get();
persistedPeer.onDisconnect();
doRemovePersistedPeer = persistedPeer.tooManyFailedConnectionAttempts();
}
boolean ruleViolation = connection != null && connection.getRuleViolation() != null;
doRemovePersistedPeer = doRemovePersistedPeer || ruleViolation;
if (doRemovePersistedPeer)
removePersistedPeer(nodeAddress);
else
removeTooOldPersistedPeers();
}
public boolean isSeedNode(Connection connection) {
return connection.getPeersNodeAddressOptional().isPresent() &&
seedNodeAddresses.contains(connection.getPeersNodeAddressOptional().get());
}
public boolean isSelf(NodeAddress nodeAddress) {
return nodeAddress.equals(networkNode.getNodeAddress());
}
private boolean isSeedNode(Peer peer) {
return seedNodeAddresses.contains(peer.getNodeAddress());
}
public boolean isSeedNode(NodeAddress nodeAddress) {
return seedNodeAddresses.contains(nodeAddress);
}
public boolean isPeerBanned(CloseConnectionReason closeConnectionReason, Connection connection) {
return closeConnectionReason == CloseConnectionReason.PEER_BANNED &&
connection.getPeersNodeAddressOptional().isPresent();
}
private void maybeRemoveBannedPeer(CloseConnectionReason closeConnectionReason, Connection connection) {
if (connection.getPeersNodeAddressOptional().isPresent() && isPeerBanned(closeConnectionReason, connection)) {
NodeAddress nodeAddress = connection.getPeersNodeAddressOptional().get();
seedNodeAddresses.remove(nodeAddress);
removePersistedPeer(nodeAddress);
removeReportedPeer(nodeAddress);
}
}
// Peer
@SuppressWarnings("unused")
public Optional<Peer> findPeer(NodeAddress peersNodeAddress) {
return getAllPeers().stream()
.filter(peer -> peer.getNodeAddress().equals(peersNodeAddress))
.findAny();
}
public Set<Peer> getAllPeers() {
Set<Peer> allPeers = new HashSet<>(getLivePeers());
allPeers.addAll(getPersistedPeers());
allPeers.addAll(reportedPeers);
return allPeers;
}
public Collection<Peer> getPersistedPeers() {
return peerList.getSet();
}
public void addToReportedPeers(Set<Peer> reportedPeersToAdd,
Connection connection,
Capabilities capabilities) {
applyCapabilities(connection, capabilities);
Set<Peer> peers = reportedPeersToAdd.stream()
.filter(peer -> !isSelf(peer.getNodeAddress()))
.collect(Collectors.toSet());
printNewReportedPeers(peers);
// We check if the reported msg is not violating our rules
if (peers.size() <= (MAX_REPORTED_PEERS + maxConnectionsAbsolute + 10)) {
reportedPeers.addAll(peers);
purgeReportedPeersIfExceeds();
getPersistedPeers().addAll(peers);
purgePersistedPeersIfExceeds();
requestPersistence();
printReportedPeers();
} else {
// If a node is trying to send too many list we treat it as rule violation.
// Reported list include the connected list. We use the max value and give some extra headroom.
// Will trigger a shutdown after 2nd time sending too much
connection.reportInvalidRequest(RuleViolation.TOO_MANY_REPORTED_PEERS_SENT);
}
}
// Delivers the live peers from the last 30 min (MAX_AGE_LIVE_PEERS)
// We include older peers to avoid risks for network partitioning
public Set<Peer> getLivePeers() {
return getLivePeers(null);
}
public Set<Peer> getLivePeers(@Nullable NodeAddress excludedNodeAddress) {
int oldNumLatestLivePeers = latestLivePeers.size();
Set<Peer> peers = new HashSet<>(latestLivePeers);
Set<Peer> currentLivePeers = getConnectedReportedPeers().stream()
.filter(e -> !isSeedNode(e))
.filter(e -> !e.getNodeAddress().equals(excludedNodeAddress))
.collect(Collectors.toSet());
peers.addAll(currentLivePeers);
long maxAge = new Date().getTime() - MAX_AGE_LIVE_PEERS;
latestLivePeers.clear();
Set<Peer> recentPeers = peers.stream()
.filter(peer -> peer.getDateAsLong() > maxAge)
.collect(Collectors.toSet());
latestLivePeers.addAll(recentPeers);
if (oldNumLatestLivePeers != latestLivePeers.size())
log.info("Num of latestLivePeers={}", latestLivePeers.size());
return latestLivePeers;
}
// Capabilities
public boolean peerHasCapability(NodeAddress peersNodeAddress, Capability capability) {
return findPeersCapabilities(peersNodeAddress)
.map(capabilities -> capabilities.contains(capability))
.orElse(false);
}
public Optional<Capabilities> findPeersCapabilities(NodeAddress nodeAddress) {
// We look up first our connections as that is our own data. If not found there we look up the peers which
// include reported peers.
Optional<Capabilities> optionalCapabilities = networkNode.findPeersCapabilities(nodeAddress);
if (optionalCapabilities.isPresent() && !optionalCapabilities.get().isEmpty()) {
return optionalCapabilities;
}
// Reported peers are not trusted data. We could get capabilities which miss the
// peers real capability or we could get maliciously altered capabilities telling us the peer supports a
// capability which is in fact not supported. This could lead to connection loss as we might send data not
// recognized by the peer. As we register a listener on connection if we don't have set the capability from our
// own sources we would get it fixed as soon we have a connection with that peer, rendering such an attack
// inefficient.
// Also this risk is only for not updated peers, so in case that would be abused for an
// attack all users have a strong incentive to update ;-).
return getAllPeers().stream()
.filter(peer -> peer.getNodeAddress().equals(nodeAddress))
.findAny()
.map(Peer::getCapabilities);
}
private void applyCapabilities(Connection connection, Capabilities newCapabilities) {
if (newCapabilities == null || newCapabilities.isEmpty()) {
return;
}
connection.getPeersNodeAddressOptional().ifPresent(nodeAddress -> {
getAllPeers().stream()
.filter(peer -> peer.getNodeAddress().equals(nodeAddress))
.filter(peer -> peer.getCapabilities().hasLess(newCapabilities))
.forEach(peer -> peer.setCapabilities(newCapabilities));
});
requestPersistence();
}
// Housekeeping
private void doHouseKeeping() {
if (checkMaxConnectionsTimer == null) {
printConnectedPeers();
checkMaxConnectionsTimer = UserThread.runAfter(() -> {
stopCheckMaxConnectionsTimer();
if (!stopped) {
Set<Connection> allConnections = new HashSet<>(networkNode.getAllConnections());
int size = allConnections.size();
peakNumConnections = Math.max(peakNumConnections, size);
removeAnonymousPeers();
removeSuperfluousSeedNodes();
removeTooOldReportedPeers();
removeTooOldPersistedPeers();
checkMaxConnections();
} else {
log.debug("We have stopped already. We ignore that checkMaxConnectionsTimer.run call.");
}
}, CHECK_MAX_CONN_DELAY_SEC);
}
}
@VisibleForTesting
boolean checkMaxConnections() {
Set<Connection> allConnections = new HashSet<>(networkNode.getAllConnections());
int size = allConnections.size();
peakNumConnections = Math.max(peakNumConnections, size);
log.info("We have {} connections open. Our limit is {}", size, maxConnections);
if (size <= maxConnections) {
log.debug("We have not exceeded the maxConnections limit of {} " +
"so don't need to close any connections.", size);
return false;
}
log.info("We have too many connections open. " +
"Lets try first to remove the inbound connections of type PEER.");
List<Connection> candidates = allConnections.stream()
.filter(e -> e instanceof InboundConnection)
.filter(e -> e.getPeerType() == Connection.PeerType.PEER)
.collect(Collectors.toList());
if (candidates.isEmpty()) {
log.info("No candidates found. We check if we exceed our " +
"maxConnectionsPeer limit of {}", maxConnectionsPeer);
if (size <= maxConnectionsPeer) {
log.info("We have not exceeded maxConnectionsPeer limit of {} " +
"so don't need to close any connections", maxConnectionsPeer);
return false;
}
log.info("We have exceeded maxConnectionsPeer limit of {}. " +
"Lets try to remove ANY connection of type PEER.", maxConnectionsPeer);
candidates = allConnections.stream()
.filter(e -> e.getPeerType() == Connection.PeerType.PEER)
.collect(Collectors.toList());
if (candidates.isEmpty()) {
log.info("No candidates found. We check if we exceed our " +
"maxConnectionsNonDirect limit of {}", maxConnectionsNonDirect);
if (size <= maxConnectionsNonDirect) {
log.info("We have not exceeded maxConnectionsNonDirect limit of {} " +
"so don't need to close any connections", maxConnectionsNonDirect);
return false;
}
log.info("We have exceeded maxConnectionsNonDirect limit of {} " +
"Lets try to remove any connection which is not " +
"of type DIRECT_MSG_PEER or INITIAL_DATA_REQUEST.", maxConnectionsNonDirect);
candidates = allConnections.stream()
.filter(e -> e.getPeerType() != Connection.PeerType.DIRECT_MSG_PEER &&
e.getPeerType() != Connection.PeerType.INITIAL_DATA_REQUEST)
.collect(Collectors.toList());
if (candidates.isEmpty()) {
log.info("No candidates found. We check if we exceed our " +
"maxConnectionsAbsolute limit of {}", maxConnectionsAbsolute);
if (size <= maxConnectionsAbsolute) {
log.info("We have not exceeded maxConnectionsAbsolute limit of {} " +
"so don't need to close any connections", maxConnectionsAbsolute);
return false;
}
log.info("We reached abs. max. connections. Lets try to remove ANY connection.");
candidates = new ArrayList<>(allConnections);
}
}
}
if (!candidates.isEmpty()) {
candidates.sort(Comparator.comparingLong(o -> o.getStatistic().getLastActivityTimestamp()));
Connection connection = candidates.remove(0);
log.info("checkMaxConnections: Num candidates for shut down={}. We close oldest connection: {}", candidates.size(), connection);
log.debug("We are going to shut down the oldest connection.\n\tconnection={}", connection.toString());
if (!connection.isStopped())
connection.shutDown(CloseConnectionReason.TOO_MANY_CONNECTIONS_OPEN, () -> UserThread.runAfter(this::checkMaxConnections, 100, TimeUnit.MILLISECONDS));
return true;
} else {
log.info("No candidates found to remove.\n\t" +
"size={}, allConnections={}", size, allConnections);
return false;
}
}
private void removeAnonymousPeers() {
networkNode.getAllConnections().stream()
.filter(connection -> !connection.hasPeersNodeAddress())
.forEach(connection -> UserThread.runAfter(() -> {
// We give 240 seconds delay and check again if still no address is set
// Keep the delay long as we don't want to disconnect a peer in case we are a seed node just
// because he needs longer for the HS publishing
if (!connection.hasPeersNodeAddress() && !connection.isStopped()) {
log.debug("We close the connection as the peer address is still unknown.\n\t" +
"connection={}", connection);
connection.shutDown(CloseConnectionReason.UNKNOWN_PEER_ADDRESS);
}
}, REMOVE_ANONYMOUS_PEER_SEC));
}
private void removeSuperfluousSeedNodes() {
if (allowDisconnectSeedNodes) {
if (networkNode.getConfirmedConnections().size() > disconnectFromSeedNode) {
List<Connection> seedNodes = networkNode.getConfirmedConnections().stream()
.filter(this::isSeedNode)
.collect(Collectors.toList());
if (!seedNodes.isEmpty()) {
seedNodes.sort(Comparator.comparingLong(o -> o.getStatistic().getLastActivityTimestamp()));
log.debug("Number of seed node connections to disconnect. Current size=" + seedNodes.size());
Connection connection = seedNodes.get(0);
log.debug("We are going to shut down the oldest connection.\n\tconnection={}", connection.toString());
connection.shutDown(CloseConnectionReason.TOO_MANY_SEED_NODES_CONNECTED,
() -> UserThread.runAfter(this::removeSuperfluousSeedNodes, 200, TimeUnit.MILLISECONDS));
}
}
}
}
// Reported peers
private void removeReportedPeer(Peer reportedPeer) {
reportedPeers.remove(reportedPeer);
printReportedPeers();
}
private void removeReportedPeer(NodeAddress nodeAddress) {
List<Peer> reportedPeersClone = new ArrayList<>(reportedPeers);
reportedPeersClone.stream()
.filter(e -> e.getNodeAddress().equals(nodeAddress))
.findAny()
.ifPresent(this::removeReportedPeer);
}
private void removeTooOldReportedPeers() {
List<Peer> reportedPeersClone = new ArrayList<>(reportedPeers);
Set<Peer> reportedPeersToRemove = reportedPeersClone.stream()
.filter(reportedPeer -> new Date().getTime() - reportedPeer.getDate().getTime() > MAX_AGE)
.collect(Collectors.toSet());
reportedPeersToRemove.forEach(this::removeReportedPeer);
}
private void purgeReportedPeersIfExceeds() {
int size = reportedPeers.size();
if (size > MAX_REPORTED_PEERS) {
log.info("We have already {} reported peers which exceeds our limit of {}." +
"We remove random peers from the reported peers list.", size, MAX_REPORTED_PEERS);
int diff = size - MAX_REPORTED_PEERS;
List<Peer> list = new ArrayList<>(reportedPeers);
// we don't use sorting by lastActivityDate to keep it more random
for (int i = 0; i < diff; i++) {
if (!list.isEmpty()) {
Peer toRemove = list.remove(new Random().nextInt(list.size()));
removeReportedPeer(toRemove);
}
}
} else {
log.trace("No need to purge reported peers.\n\tWe don't have more then {} reported peers yet.", MAX_REPORTED_PEERS);
}
}
private void printReportedPeers() {
if (!reportedPeers.isEmpty()) {
if (PRINT_REPORTED_PEERS_DETAILS) {
StringBuilder result = new StringBuilder("\n\n
"Collected reported peers:");
List<Peer> reportedPeersClone = new ArrayList<>(reportedPeers);
reportedPeersClone.forEach(e -> result.append("\n").append(e));
result.append("\n
log.trace(result.toString());
}
log.debug("Number of reported peers: {}", reportedPeers.size());
}
}
private void printNewReportedPeers(Set<Peer> reportedPeers) {
if (PRINT_REPORTED_PEERS_DETAILS) {
StringBuilder result = new StringBuilder("We received new reportedPeers:");
List<Peer> reportedPeersClone = new ArrayList<>(reportedPeers);
reportedPeersClone.forEach(e -> result.append("\n\t").append(e));
log.trace(result.toString());
}
log.debug("Number of new arrived reported peers: {}", reportedPeers.size());
}
// Persisted peers
private boolean removePersistedPeer(Peer persistedPeer) {
if (getPersistedPeers().contains(persistedPeer)) {
getPersistedPeers().remove(persistedPeer);
requestPersistence();
return true;
} else {
return false;
}
}
private void requestPersistence() {
persistenceManager.requestPersistence();
}
@SuppressWarnings("UnusedReturnValue")
private boolean removePersistedPeer(NodeAddress nodeAddress) {
Optional<Peer> optionalPersistedPeer = findPersistedPeer(nodeAddress);
return optionalPersistedPeer.isPresent() && removePersistedPeer(optionalPersistedPeer.get());
}
private Optional<Peer> findPersistedPeer(NodeAddress nodeAddress) {
return getPersistedPeers().stream()
.filter(e -> e.getNodeAddress().equals(nodeAddress))
.findAny();
}
private void removeTooOldPersistedPeers() {
Set<Peer> persistedPeersToRemove = getPersistedPeers().stream()
.filter(reportedPeer -> new Date().getTime() - reportedPeer.getDate().getTime() > MAX_AGE)
.collect(Collectors.toSet());
persistedPeersToRemove.forEach(this::removePersistedPeer);
}
private void purgePersistedPeersIfExceeds() {
int size = getPersistedPeers().size();
int limit = MAX_PERSISTED_PEERS;
if (size > limit) {
log.trace("We have already {} persisted peers which exceeds our limit of {}." +
"We remove random peers from the persisted peers list.", size, limit);
int diff = size - limit;
List<Peer> list = new ArrayList<>(getPersistedPeers());
// we don't use sorting by lastActivityDate to avoid attack vectors and keep it more random
for (int i = 0; i < diff; i++) {
if (!list.isEmpty()) {
Peer toRemove = list.remove(new Random().nextInt(list.size()));
removePersistedPeer(toRemove);
}
}
} else {
log.trace("No need to purge persisted peers.\n\tWe don't have more then {} persisted peers yet.", MAX_PERSISTED_PEERS);
}
}
// Getters
public int getMaxConnections() {
return maxConnectionsAbsolute;
}
// Listeners
public void addListener(Listener listener) {
listeners.add(listener);
}
public void removeListener(Listener listener) {
listeners.remove(listener);
}
// Private misc
// Modify this to change the relationships between connection limits.
// maxConnections default 12
private void setConnectionLimits(int maxConnections) {
this.maxConnections = maxConnections; // app node 12; seedNode 30
minConnections = Math.max(1, (int) Math.round(maxConnections * 0.7)); // app node 1-8; seedNode 21
disconnectFromSeedNode = maxConnections; // app node 12; seedNode 30
maxConnectionsPeer = Math.max(4, (int) Math.round(maxConnections * 1.3)); // app node 16; seedNode 39
maxConnectionsNonDirect = Math.max(8, (int) Math.round(maxConnections * 1.7)); // app node 20; seedNode 51
maxConnectionsAbsolute = Math.max(12, (int) Math.round(maxConnections * 2.5)); // app node 30; seedNode 66
}
private Set<Peer> getConnectedReportedPeers() {
// networkNode.getConfirmedConnections includes:
// filter(connection -> connection.getPeersNodeAddressOptional().isPresent())
return networkNode.getConfirmedConnections().stream()
.map((Connection connection) -> {
Capabilities supportedCapabilities = new Capabilities(connection.getCapabilities());
// If we have a new connection the supportedCapabilities is empty.
// We lookup if we have already stored the supportedCapabilities at the persisted or reported peers
// and if so we use that.
Optional<NodeAddress> peersNodeAddressOptional = connection.getPeersNodeAddressOptional();
checkArgument(peersNodeAddressOptional.isPresent()); // getConfirmedConnections delivers only connections where we know the address
NodeAddress peersNodeAddress = peersNodeAddressOptional.get();
boolean capabilitiesNotFoundInConnection = supportedCapabilities.isEmpty();
if (capabilitiesNotFoundInConnection) {
// If not found in connection we look up if we got the Capabilities set from any of the
// reported or persisted peers
Set<Peer> persistedAndReported = new HashSet<>(getPersistedPeers());
persistedAndReported.addAll(getReportedPeers());
Optional<Peer> candidate = persistedAndReported.stream()
.filter(peer -> peer.getNodeAddress().equals(peersNodeAddress))
.filter(peer -> !peer.getCapabilities().isEmpty())
.findAny();
if (candidate.isPresent()) {
supportedCapabilities = new Capabilities(candidate.get().getCapabilities());
}
}
Peer peer = new Peer(peersNodeAddress, supportedCapabilities);
// If we did not found the capability from our own connection we add a listener,
// so once we get a connection with that peer and exchange a message containing the capabilities
// we get set the capabilities.
if (capabilitiesNotFoundInConnection) {
connection.addWeakCapabilitiesListener(peer);
}
return peer;
})
.collect(Collectors.toSet());
}
private void stopCheckMaxConnectionsTimer() {
if (checkMaxConnectionsTimer != null) {
checkMaxConnectionsTimer.stop();
checkMaxConnectionsTimer = null;
}
}
private void printConnectedPeers() {
if (!networkNode.getConfirmedConnections().isEmpty()) {
StringBuilder result = new StringBuilder("\n\n
"Connected peers for node " + networkNode.getNodeAddress() + ":");
networkNode.getConfirmedConnections().forEach(e -> result.append("\n")
.append(e.getPeersNodeAddressOptional()).append(" ").append(e.getPeerType()));
result.append("\n
log.debug(result.toString());
}
}
}
|
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.gluu.site.ldap.persistence.LdapEntryManager;
import org.jboss.seam.Component;
import org.jboss.seam.ScopeType;
import org.jboss.seam.annotations.AutoCreate;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Scope;
import org.jboss.seam.log.Log;
import org.xdi.model.SchemaEntry;
import org.xdi.util.OxConstants;
import org.xdi.util.StringHelper;
import org.xdi.util.exception.InvalidSchemaUpdateException;
import com.unboundid.ldap.sdk.schema.AttributeTypeDefinition;
import com.unboundid.ldap.sdk.schema.ObjectClassDefinition;
@Scope(ScopeType.STATELESS)
@Name("schemaService")
@AutoCreate
public class SchemaService {
@Logger
private Log log;
@In
private LdapEntryManager ldapEntryManager;
/**
* Load schema from DS
*
* @return Schema
*/
public SchemaEntry getSchema() {
SchemaEntry schemaEntry = ldapEntryManager.find(SchemaEntry.class, getDnForSchema());
return schemaEntry;
}
/**
* Add new object class with specified attributes
*
* @param objectClass
* Object class name
* @param attributeTypes
* Attribute types
*/
public void addObjectClass(String objectClass, String attributeTypes, String schemaAddObjectClassWithoutAttributeTypesDefinition, String schemaAddObjectClassWithAttributeTypesDefinition) {
SchemaEntry schemaEntry = new SchemaEntry();
schemaEntry.setDn(getDnForSchema());
String objectClassDefinition;
if (StringHelper.isEmpty(attributeTypes)) {
objectClassDefinition = String.format(schemaAddObjectClassWithoutAttributeTypesDefinition,
objectClass, objectClass);
} else {
objectClassDefinition = String.format(schemaAddObjectClassWithAttributeTypesDefinition,
objectClass, objectClass, attributeTypes);
}
schemaEntry.addObjectClass(objectClassDefinition);
log.debug("Adding new objectClass: {0}", schemaEntry);
ldapEntryManager.merge(schemaEntry);
}
/**
* Remove object class
*
* @param objectClass
* Object class name
*/
public void removeObjectClass(String objectClass) {
SchemaEntry schema = getSchema();
String objectClassDefinition = getObjectClassDefinition(schema, objectClass);
if (objectClassDefinition != null) {
SchemaEntry schemaEntry = new SchemaEntry();
schemaEntry.setDn(getDnForSchema());
schemaEntry.addObjectClass(objectClassDefinition);
log.debug("Removing objectClass: {0}", schemaEntry);
ldapEntryManager.remove(schemaEntry);
}
}
/**
* Add attribute type to object class
*
* @param objectClass
* Object class name
* @param attributeType
* Attribute type name
* @throws Exception
*/
public void addAttributeTypeToObjectClass(String objectClass, String attributeType) throws Exception {
SchemaEntry schema = getSchema();
String objectClassDefinition = getObjectClassDefinition(schema, objectClass);
if (objectClassDefinition == null) {
throw new InvalidSchemaUpdateException(String.format(
"Can't add attributeType %s to objectClass %s because objectClass doesn't exist", attributeType, objectClass));
}
String newObjectClassDefinition = null;
String attributeTypesStartPattern = "MAY ( ";
int index = objectClassDefinition.indexOf(attributeTypesStartPattern);
if (index != -1) {
int index2 = objectClassDefinition.indexOf(")", index);
newObjectClassDefinition = objectClassDefinition.substring(0, index2) + "$ " + attributeType + " "
+ objectClassDefinition.substring(index2);
} else {
attributeTypesStartPattern = "MUST objectClass ";
index = objectClassDefinition.indexOf(attributeTypesStartPattern);
if (index != -1) {
int index2 = index + attributeTypesStartPattern.length();
newObjectClassDefinition = objectClassDefinition.substring(0, index2) + "MAY ( " + attributeType + " ) "
+ objectClassDefinition.substring(index2);
}
}
log.debug("Current object class definition:" + objectClassDefinition);
log.debug("New object class definition:" + newObjectClassDefinition);
if (newObjectClassDefinition == null) {
throw new InvalidSchemaUpdateException(String.format("Invalid objectClass definition format"));
}
// Remove current OC definition
SchemaEntry schemaEntry = new SchemaEntry();
schemaEntry.setDn(getDnForSchema());
schemaEntry.addObjectClass(objectClassDefinition);
log.debug("Removing objectClass: {0}", schemaEntry);
ldapEntryManager.remove(schemaEntry);
// Add updated OC defintion
SchemaEntry newSchemaEntry = new SchemaEntry();
newSchemaEntry.setDn(getDnForSchema());
newSchemaEntry.addObjectClass(newObjectClassDefinition);
log.debug("Adding attributeType to objectClass: {0}", newSchemaEntry);
ldapEntryManager.merge(newSchemaEntry);
}
/**
* Remove attribute type from object class
*
* @param objectClass
* Object class name
* @param attributeType
* Attribute type name
* @throws Exception
*/
public void removeAttributeTypeFromObjectClass(String objectClass, String attributeType) throws Exception {
SchemaEntry schema = getSchema();
String objectClassDefinition = getObjectClassDefinition(schema, objectClass);
if (objectClassDefinition == null) {
throw new InvalidSchemaUpdateException(String.format(
"Can't add attributeType %s to objectClass %s because objectClass doesn't exist", attributeType, objectClass));
}
String attributeTypePattern = "$ " + attributeType + " ";
int index = objectClassDefinition.indexOf(attributeTypePattern);
if (index == -1) {
attributeTypePattern = " " + attributeType + " $";
index = objectClassDefinition.indexOf(attributeTypePattern);
if (index == -1) {
attributeTypePattern = " MAY ( " + attributeType + " )";
index = objectClassDefinition.indexOf(attributeTypePattern);
if (index == -1) {
throw new InvalidSchemaUpdateException(String.format("Invalid objectClass definition format"));
}
}
}
String newObjectClassDefinition = objectClassDefinition.substring(0, index)
+ objectClassDefinition.substring(index + attributeTypePattern.length());
SchemaEntry schemaEntry = new SchemaEntry();
schemaEntry.setDn(getDnForSchema());
schemaEntry.addObjectClass(newObjectClassDefinition);
log.debug("Removing attributeType from objectClass: {0}", schemaEntry);
ldapEntryManager.merge(schemaEntry);
}
/**
* Add new attribute type
*/
public void addStringAttribute(String oid, String name, String schemaAddAttributeDefinition) throws Exception {
log.info("getting a new instance SchemaEntry ");
SchemaEntry schemaEntry = new SchemaEntry();
log.info("setting the DN ");
schemaEntry.setDn(getDnForSchema());
log.info("adding attribute name ");
log.info("applicationConfiguration.getSchemaAddAttributeDefinition() : ", schemaAddAttributeDefinition);
log.info("oid : ", oid);
log.info("name : ", name);
schemaEntry.addAttributeType(String.format(schemaAddAttributeDefinition, oid, name));
log.debug("Adding new attributeType: {0}", schemaEntry);
log.info("merging data");
ldapEntryManager.merge(schemaEntry);
}
/**
* Remove string attribute
*
* @param attributeType
* Attribute type name
* @throws Exception
*/
public void removeStringAttribute(String attributeType) throws Exception {
SchemaEntry schema = getSchema();
String attributeTypeDefinition = getAttributeTypeDefinition(schema, attributeType);
if (attributeTypeDefinition != null) {
SchemaEntry schemaEntry = new SchemaEntry();
schemaEntry.setDn(getDnForSchema());
schemaEntry.addAttributeType(attributeTypeDefinition);
log.debug("Removing attributeType: {0}", schemaEntry);
ldapEntryManager.remove(schemaEntry);
}
}
/**
* Get attribute type schema definition string
*
* @param schemaEntry
* Schema
* @param attributeType
* Attribute type name
* @return Attribute type schema definition string
*/
public String getAttributeTypeDefinition(SchemaEntry schemaEntry, String attributeType) {
if ((schemaEntry == null) || (attributeType == null)) {
return null;
}
List<AttributeTypeDefinition> attributeTypes = getAttributeTypeDefinitions(schemaEntry,
Arrays.asList(new String[] { attributeType }));
AttributeTypeDefinition attributeTypeDefinition = getAttributeTypeDefinition(attributeTypes, attributeType);
return (attributeTypeDefinition == null) ? null : attributeTypeDefinition.toString();
}
/**
* Get attribute type schema definition string
*
* @param attributeTypes
* @param attributeType
* Attribute type name
* @return Attribute type schema definition string
*/
public AttributeTypeDefinition getAttributeTypeDefinition(List<AttributeTypeDefinition> attributeTypes, String attributeType) {
if (attributeTypes == null || attributeType == null) {
return null;
}
for (AttributeTypeDefinition definition : attributeTypes) {
for (String name : definition.getNames()) {
if (name.equalsIgnoreCase(attributeType)) {
return definition;
}
}
}
return null;
}
public List<AttributeTypeDefinition> getAttributeTypeDefinitions(SchemaEntry schemaEntry, List<String> attributeNames) {
if (schemaEntry == null) {
return null;
}
String[] attrs = attributeNames.toArray(new String[attributeNames.size()]);
for (int i = 0; i < attrs.length; i++) {
attrs[i] = "'" + attrs[i].toLowerCase() + "'";
}
List<AttributeTypeDefinition> result = new ArrayList<AttributeTypeDefinition>();
for (String attributeTypeDefinition : schemaEntry.getAttributeTypes()) {
for (String name : attrs) {
if (attributeTypeDefinition.toLowerCase().contains(name)) { // Optimization
// reduce
// number
// objects
try {
result.add(new AttributeTypeDefinition(attributeTypeDefinition));
} catch (Exception ex) {
log.error("Failed to get attribute type definition by string {0}", ex, attributeTypeDefinition);
}
}
}
}
return result;
}
/**
* Get object class schema definition string
*
* @param schemaEntry
* Schema
* @param objectClass
* Object class name
* @return Object class schema definition string
*/
public String getObjectClassDefinition(SchemaEntry schemaEntry, String objectClass) {
if ((schemaEntry == null) || (objectClass == null)) {
return null;
}
for (String objectClassDefinition : schemaEntry.getObjectClasses()) {
ObjectClassDefinition definition;
try {
definition = new ObjectClassDefinition(objectClassDefinition);
for (String name : definition.getNames()) {
if (name.equalsIgnoreCase(objectClass)) {
return objectClassDefinition;
}
}
} catch (Exception ex) {
}
}
return null;
}
/**
* Get all attribute names by specified object classes
*
* @param schemaEntry
* Schema
* @param objectClass
* Object class name
* @return Object class schema definition string
*/
public Set<String> getObjectClassesAttributes(SchemaEntry schemaEntry, String[] objectClasses) {
if ((schemaEntry == null) || (objectClasses == null)) {
return null;
}
Map<String, ObjectClassDefinition> objectClassDefinitions = new HashMap<String, ObjectClassDefinition>();
for (String objectClassDefinition : schemaEntry.getObjectClasses()) {
ObjectClassDefinition definition;
try {
definition = new ObjectClassDefinition(objectClassDefinition);
for (String name : definition.getNames()) {
objectClassDefinitions.put(StringHelper.toLowerCase(name), definition);
}
} catch (Exception ex) {
log.error("Failed to parse LDAP object class definition: '{0}'", ex, objectClassDefinition);
}
}
Set<ObjectClassDefinition> resultObjectClassDefinitions = getSuperiorClasses(objectClassDefinitions, objectClasses, true);
Set<String> resultAttributes = getAttributes(resultObjectClassDefinitions, true, true);
return resultAttributes;
}
private Set<String> getAttributes(Set<ObjectClassDefinition> objectClassDefinitions, boolean includeRequired, boolean includeOpional) {
final LinkedHashSet<String> resultAttributes = new LinkedHashSet<String>();
for (final ObjectClassDefinition objectClassDefinition : objectClassDefinitions) {
if (includeRequired) {
for (String attribute : objectClassDefinition.getRequiredAttributes()) {
resultAttributes.add(StringHelper.toLowerCase(attribute));
}
}
if (includeOpional) {
for (String attribute : objectClassDefinition.getOptionalAttributes()) {
resultAttributes.add(StringHelper.toLowerCase(attribute));
}
}
}
return resultAttributes;
}
public Set<ObjectClassDefinition> getSuperiorClasses(final Map<String, ObjectClassDefinition> objectClassDefinitions,
final String[] superiorClasses, final boolean recursive) {
final LinkedHashSet<ObjectClassDefinition> resultObjectClassDefinitions = new LinkedHashSet<ObjectClassDefinition>();
for (final String superiorClass : superiorClasses) {
final ObjectClassDefinition objectClassDefinition = objectClassDefinitions.get(StringHelper.toLowerCase(superiorClass));
if (objectClassDefinition != null) {
resultObjectClassDefinitions.add(objectClassDefinition);
if (recursive) {
getSuperiorClasses(objectClassDefinitions, objectClassDefinition, resultObjectClassDefinitions);
}
}
}
return Collections.unmodifiableSet(resultObjectClassDefinitions);
}
private static void getSuperiorClasses(final Map<String, ObjectClassDefinition> objectClassDefinitions,
final ObjectClassDefinition objectClassDefinition, final Set<ObjectClassDefinition> resultObjectClassDefinitions) {
for (final String superiorClass : objectClassDefinition.getSuperiorClasses()) {
final ObjectClassDefinition superiorObjectClassDefinition = objectClassDefinitions.get(StringHelper.toLowerCase(superiorClass));
if (superiorObjectClassDefinition != null) {
resultObjectClassDefinitions.add(superiorObjectClassDefinition);
getSuperiorClasses(objectClassDefinitions, superiorObjectClassDefinition, resultObjectClassDefinitions);
}
}
}
/**
* Check if schema contains specified attribute
*
* @param attributeType
* Attribute type name
* @return True if schema contains specified attribute
*/
public boolean containsAttributeTypeInSchema(String attributeType) {
SchemaEntry schema = getSchema();
return getAttributeTypeDefinition(schema, attributeType) != null;
}
/**
* Determine object classes by attribute name
*
* @param schemaEntry
* Schema
* @param objectClass
* Object class name
* @return List of object classes
*/
public Set<String> getObjectClassesByAttribute(SchemaEntry schemaEntry, String attributeType) {
if ((schemaEntry == null) || StringHelper.isEmpty(attributeType)) {
return null;
}
String lowerCaseAttributeType = StringHelper.toLowerCase(attributeType);
Set<String> resultObjectClasses = new HashSet<String>();
for (String objectClassDefinition : schemaEntry.getObjectClasses()) {
ObjectClassDefinition definition;
try {
definition = new ObjectClassDefinition(objectClassDefinition);
Set<String> objectClassAttributeTypes = new HashSet<String>();
for (String name : definition.getOptionalAttributes()) {
objectClassAttributeTypes.add(StringHelper.toLowerCase(name));
}
for (String name : definition.getRequiredAttributes()) {
objectClassAttributeTypes.add(StringHelper.toLowerCase(name));
}
if (objectClassAttributeTypes.contains(lowerCaseAttributeType)) {
String objectClassType = definition.getNameOrOID();
resultObjectClasses.add(objectClassType);
}
} catch (Exception ex) {
log.error("Failed to parse LDAP object class definition: '{0}'", ex, objectClassDefinition);
}
}
return resultObjectClasses;
}
/**
* Build DN string for DS schema
*
* @return DN string for DS schema
*/
public String getDnForSchema() {
return OxConstants.schemaDN;
}
/**
* Get schemaService instance
*
* @return SchemaService instance
*/
public static SchemaService instance() {
return (SchemaService) Component.getInstance(SchemaService.class);
}
}
|
package bisq.network.p2p.network;
import bisq.network.p2p.BundleOfEnvelopes;
import bisq.network.p2p.CloseConnectionMessage;
import bisq.network.p2p.ExtendedDataSizePermission;
import bisq.network.p2p.NodeAddress;
import bisq.network.p2p.PrefixedSealedAndSignedMessage;
import bisq.network.p2p.SendersNodeAddressMessage;
import bisq.network.p2p.SupportedCapabilitiesMessage;
import bisq.network.p2p.peers.BanList;
import bisq.network.p2p.peers.getdata.messages.GetDataRequest;
import bisq.network.p2p.peers.getdata.messages.GetDataResponse;
import bisq.network.p2p.peers.keepalive.messages.KeepAliveMessage;
import bisq.network.p2p.peers.keepalive.messages.Ping;
import bisq.network.p2p.storage.messages.AddDataMessage;
import bisq.network.p2p.storage.messages.AddPersistableNetworkPayloadMessage;
import bisq.network.p2p.storage.messages.RefreshOfferMessage;
import bisq.network.p2p.storage.payload.CapabilityRequiringPayload;
import bisq.network.p2p.storage.payload.PersistableNetworkPayload;
import bisq.network.p2p.storage.payload.ProtectedStoragePayload;
import bisq.common.Proto;
import bisq.common.UserThread;
import bisq.common.app.Capabilities;
import bisq.common.app.Capability;
import bisq.common.app.HasCapabilities;
import bisq.common.app.Version;
import bisq.common.config.Config;
import bisq.common.proto.ProtobufferException;
import bisq.common.proto.network.NetworkEnvelope;
import bisq.common.proto.network.NetworkProtoResolver;
import bisq.common.util.Utilities;
import javax.inject.Inject;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.Uninterruptibles;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleObjectProperty;
import java.net.Socket;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InvalidClassException;
import java.io.OptionalDataException;
import java.io.StreamCorruptedException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.lang.ref.WeakReference;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.jetbrains.annotations.Nullable;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
@Slf4j
public class Connection implements HasCapabilities, Runnable, MessageListener {
// Enums
public enum PeerType {
SEED_NODE,
PEER,
DIRECT_MSG_PEER,
INITIAL_DATA_REQUEST
}
// Static
@Inject
private static Config config;
// Leaving some constants package-private for tests to know limits.
private static final int PERMITTED_MESSAGE_SIZE = 200 * 1024; // 200 kb
private static final int MAX_PERMITTED_MESSAGE_SIZE = 10 * 1024 * 1024; // 10 MB (425 offers resulted in about 660 kb, mailbox msg will add more to it) offer has usually 2 kb, mailbox 3kb.
//TODO decrease limits again after testing
private static final int SOCKET_TIMEOUT = (int) TimeUnit.SECONDS.toMillis(120);
public static int getPermittedMessageSize() {
return PERMITTED_MESSAGE_SIZE;
}
// Class fields
private final Socket socket;
// private final MessageListener messageListener;
private final ConnectionListener connectionListener;
@Getter
private final String uid;
private final ExecutorService singleThreadExecutor = Executors.newSingleThreadExecutor(runnable -> new Thread(runnable, "Connection.java executor-service"));
// holder of state shared between InputHandler and Connection
@Getter
private final Statistic statistic;
// set in init
private SynchronizedProtoOutputStream protoOutputStream;
// mutable data, set from other threads but not changed internally.
@Getter
private Optional<NodeAddress> peersNodeAddressOptional = Optional.empty();
@Getter
private volatile boolean stopped;
// Use Peer as default, in case of other types they will set it as soon as possible.
@Getter
private PeerType peerType = PeerType.PEER;
@Getter
private final ObjectProperty<NodeAddress> peersNodeAddressProperty = new SimpleObjectProperty<>();
private final List<Long> messageTimeStamps = new ArrayList<>();
private final CopyOnWriteArraySet<MessageListener> messageListeners = new CopyOnWriteArraySet<>();
private volatile long lastSendTimeStamp = 0;
private final CopyOnWriteArraySet<WeakReference<SupportedCapabilitiesListener>> capabilitiesListeners = new CopyOnWriteArraySet<>();
@Getter
private RuleViolation ruleViolation;
private final ConcurrentHashMap<RuleViolation, Integer> ruleViolations = new ConcurrentHashMap<>();
private final Capabilities capabilities = new Capabilities();
// Constructor
Connection(Socket socket,
MessageListener messageListener,
ConnectionListener connectionListener,
@Nullable NodeAddress peersNodeAddress,
NetworkProtoResolver networkProtoResolver) {
this.socket = socket;
this.connectionListener = connectionListener;
uid = UUID.randomUUID().toString();
statistic = new Statistic();
addMessageListener(messageListener);
this.networkProtoResolver = networkProtoResolver;
init(peersNodeAddress);
}
private void init(@Nullable NodeAddress peersNodeAddress) {
try {
socket.setSoTimeout(SOCKET_TIMEOUT);
// Need to access first the ObjectOutputStream otherwise the ObjectInputStream would block
// See: https://stackoverflow.com/questions/5658089/java-creating-a-new-objectinputstream-blocks/5658109#5658109
// When you construct an ObjectInputStream, in the constructor the class attempts to read a header that
// the associated ObjectOutputStream on the other end of the connection has written.
// It will not return until that header has been read.
protoOutputStream = new SynchronizedProtoOutputStream(socket.getOutputStream(), statistic);
protoInputStream = socket.getInputStream();
// We create a thread for handling inputStream data
singleThreadExecutor.submit(this);
if (peersNodeAddress != null)
setPeersNodeAddress(peersNodeAddress);
UserThread.execute(() -> connectionListener.onConnection(this));
} catch (Throwable e) {
handleException(e);
}
}
// API
@Override
public Capabilities getCapabilities() {
return capabilities;
}
private final Object lock = new Object();
private final Queue<BundleOfEnvelopes> queueOfBundles = new ConcurrentLinkedQueue<>();
private final ScheduledExecutorService bundleSender = Executors.newSingleThreadScheduledExecutor();
// Called from various threads
public void sendMessage(NetworkEnvelope networkEnvelope) {
log.debug(">> Send networkEnvelope of type: " + networkEnvelope.getClass().getSimpleName());
if (!stopped) {
if (noCapabilityRequiredOrCapabilityIsSupported(networkEnvelope)) {
try {
String peersNodeAddress = peersNodeAddressOptional.map(NodeAddress::toString).orElse("null");
protobuf.NetworkEnvelope proto = networkEnvelope.toProtoNetworkEnvelope();
log.trace("Sending message: {}", Utilities.toTruncatedString(proto.toString(), 10000));
if (networkEnvelope instanceof Ping || networkEnvelope instanceof RefreshOfferMessage) {
// pings and offer refresh msg we don't want to log in production
log.trace("\n\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n" +
"Sending direct message to peer" +
"Write object to outputStream to peer: {} (uid={})\ntruncated message={} / size={}" +
"\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n",
peersNodeAddress, uid, proto.toString(), proto.getSerializedSize());
} else if (networkEnvelope instanceof PrefixedSealedAndSignedMessage && peersNodeAddressOptional.isPresent()) {
setPeerType(Connection.PeerType.DIRECT_MSG_PEER);
log.debug("\n\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n" +
"Sending direct message to peer" +
"Write object to outputStream to peer: {} (uid={})\ntruncated message={} / size={}" +
"\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n",
peersNodeAddress, uid, Utilities.toTruncatedString(networkEnvelope), -1);
} else if (networkEnvelope instanceof GetDataResponse && ((GetDataResponse) networkEnvelope).isGetUpdatedDataResponse()) {
setPeerType(Connection.PeerType.PEER);
} else {
log.debug("\n\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n" +
"Write object to outputStream to peer: {} (uid={})\ntruncated message={} / size={}" +
"\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n",
peersNodeAddress, uid, Utilities.toTruncatedString(networkEnvelope), proto.getSerializedSize());
}
// Throttle outbound network_messages
long now = System.currentTimeMillis();
long elapsed = now - lastSendTimeStamp;
if (elapsed < config.sendMsgThrottleTrigger) {
log.debug("We got 2 sendMessage requests in less than {} ms. We set the thread to sleep " +
"for {} ms to avoid flooding our peer. lastSendTimeStamp={}, now={}, elapsed={}, networkEnvelope={}",
config.sendMsgThrottleTrigger, config.sendMsgThrottleSleep, lastSendTimeStamp, now, elapsed,
networkEnvelope.getClass().getSimpleName());
// check if BundleOfEnvelopes is supported
if (getCapabilities().containsAll(new Capabilities(Capability.BUNDLE_OF_ENVELOPES))) {
synchronized (lock) {
// check if current envelope fits size
// - no? create new envelope
if (queueOfBundles.isEmpty() || queueOfBundles.element().toProtoNetworkEnvelope().getSerializedSize() + networkEnvelope.toProtoNetworkEnvelope().getSerializedSize() > MAX_PERMITTED_MESSAGE_SIZE * 0.9) {
// - no? create a bucket
queueOfBundles.add(new BundleOfEnvelopes());
// - and schedule it for sending
lastSendTimeStamp += config.sendMsgThrottleSleep;
bundleSender.schedule(() -> {
if (!stopped) {
synchronized (lock) {
BundleOfEnvelopes current = queueOfBundles.poll();
if (current != null && !stopped) {
if (current.getEnvelopes().size() == 1) {
protoOutputStream.writeEnvelope(current.getEnvelopes().get(0));
} else {
protoOutputStream.writeEnvelope(current);
}
}
}
}
}, lastSendTimeStamp - now, TimeUnit.MILLISECONDS);
}
// - yes? add to bucket
queueOfBundles.element().add(networkEnvelope);
}
return;
}
Thread.sleep(config.sendMsgThrottleSleep);
}
lastSendTimeStamp = now;
if (!stopped) {
protoOutputStream.writeEnvelope(networkEnvelope);
}
} catch (Throwable t) {
handleException(t);
}
}
} else {
log.debug("called sendMessage but was already stopped");
}
}
public boolean noCapabilityRequiredOrCapabilityIsSupported(Proto msg) {
boolean result;
if (msg instanceof AddDataMessage) {
final ProtectedStoragePayload protectedStoragePayload = (((AddDataMessage) msg).getProtectedStorageEntry()).getProtectedStoragePayload();
result = !(protectedStoragePayload instanceof CapabilityRequiringPayload);
if (!result)
result = capabilities.containsAll(((CapabilityRequiringPayload) protectedStoragePayload).getRequiredCapabilities());
} else if (msg instanceof AddPersistableNetworkPayloadMessage) {
final PersistableNetworkPayload persistableNetworkPayload = ((AddPersistableNetworkPayloadMessage) msg).getPersistableNetworkPayload();
result = !(persistableNetworkPayload instanceof CapabilityRequiringPayload);
if (!result)
result = capabilities.containsAll(((CapabilityRequiringPayload) persistableNetworkPayload).getRequiredCapabilities());
} else if (msg instanceof CapabilityRequiringPayload) {
result = capabilities.containsAll(((CapabilityRequiringPayload) msg).getRequiredCapabilities());
} else {
result = true;
}
if (!result) {
if (capabilities.size() > 1) {
Proto data = msg;
if (msg instanceof AddDataMessage) {
data = ((AddDataMessage) msg).getProtectedStorageEntry().getProtectedStoragePayload();
}
// Monitoring nodes have only one capability set, we don't want to log those
log.debug("We did not send the message because the peer does not support our required capabilities. " +
"messageClass={}, peer={}, peers supportedCapabilities={}",
data.getClass().getSimpleName(), peersNodeAddressOptional, capabilities);
}
}
return result;
}
public void addMessageListener(MessageListener messageListener) {
boolean isNewEntry = messageListeners.add(messageListener);
if (!isNewEntry)
log.warn("Try to add a messageListener which was already added.");
}
public void removeMessageListener(MessageListener messageListener) {
boolean contained = messageListeners.remove(messageListener);
if (!contained)
log.debug("Try to remove a messageListener which was never added.\n\t" +
"That might happen because of async behaviour of CopyOnWriteArraySet");
}
public void addWeakCapabilitiesListener(SupportedCapabilitiesListener listener) {
capabilitiesListeners.add(new WeakReference<>(listener));
}
private boolean violatesThrottleLimit() {
long now = System.currentTimeMillis();
messageTimeStamps.add(now);
// clean list
while (messageTimeStamps.size() > config.msgThrottlePer10Sec)
messageTimeStamps.remove(0);
return violatesThrottleLimit(now, 1, config.msgThrottlePerSec) ||
violatesThrottleLimit(now, 10, config.msgThrottlePer10Sec);
}
private boolean violatesThrottleLimit(long now, int seconds, int messageCountLimit) {
if (messageTimeStamps.size() >= messageCountLimit) {
// find the entry in the message timestamp history which determines whether we overshot the limit or not
long compareValue = messageTimeStamps.get(messageTimeStamps.size() - messageCountLimit);
// if duration < seconds sec we received too much network_messages
if (now - compareValue < TimeUnit.SECONDS.toMillis(seconds)) {
log.error("violatesThrottleLimit {}/{} second(s)", messageCountLimit, seconds);
return true;
}
}
return false;
}
// MessageListener implementation
// Only receive non - CloseConnectionMessage network_messages
@Override
public void onMessage(NetworkEnvelope networkEnvelope, Connection connection) {
checkArgument(connection.equals(this));
if (networkEnvelope instanceof BundleOfEnvelopes)
for (NetworkEnvelope current : ((BundleOfEnvelopes) networkEnvelope).getEnvelopes()) {
UserThread.execute(() -> messageListeners.forEach(e -> e.onMessage(current, connection)));
}
else
UserThread.execute(() -> messageListeners.forEach(e -> e.onMessage(networkEnvelope, connection)));
}
// Setters
public void setPeerType(PeerType peerType) {
log.debug("setPeerType: peerType={}, nodeAddressOpt={}", peerType.toString(), peersNodeAddressOptional);
this.peerType = peerType;
}
private void setPeersNodeAddress(NodeAddress peerNodeAddress) {
checkNotNull(peerNodeAddress, "peerAddress must not be null");
peersNodeAddressOptional = Optional.of(peerNodeAddress);
String peersNodeAddress = getPeersNodeAddressOptional().isPresent() ? getPeersNodeAddressOptional().get().getFullAddress() : "";
if (this instanceof InboundConnection) {
log.debug("\n\n
"We got the peers node address set.\n" +
"peersNodeAddress= " + peersNodeAddress +
"\nconnection.uid=" + getUid() +
"\n
}
peersNodeAddressProperty.set(peerNodeAddress);
if (BanList.isBanned(peerNodeAddress)) {
log.warn("We detected a connection to a banned peer. We will close that connection. (setPeersNodeAddress)");
reportInvalidRequest(RuleViolation.PEER_BANNED);
}
}
// Getters
public boolean hasPeersNodeAddress() {
return peersNodeAddressOptional.isPresent();
}
// ShutDown
public void shutDown(CloseConnectionReason closeConnectionReason) {
shutDown(closeConnectionReason, null);
}
public void shutDown(CloseConnectionReason closeConnectionReason, @Nullable Runnable shutDownCompleteHandler) {
log.debug("shutDown: nodeAddressOpt={}, closeConnectionReason={}", this.peersNodeAddressOptional.orElse(null), closeConnectionReason);
if (!stopped) {
String peersNodeAddress = peersNodeAddressOptional.map(NodeAddress::toString).orElse("null");
log.debug("\n\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n" +
"ShutDown connection:"
+ "\npeersNodeAddress=" + peersNodeAddress
+ "\ncloseConnectionReason=" + closeConnectionReason
+ "\nuid=" + uid
+ "\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n");
if (closeConnectionReason.sendCloseMessage) {
new Thread(() -> {
try {
String reason = closeConnectionReason == CloseConnectionReason.RULE_VIOLATION ?
getRuleViolation().name() : closeConnectionReason.name();
sendMessage(new CloseConnectionMessage(reason));
stopped = true;
//noinspection UnstableApiUsage
Uninterruptibles.sleepUninterruptibly(200, TimeUnit.MILLISECONDS);
} catch (Throwable t) {
log.error(t.getMessage());
t.printStackTrace();
} finally {
stopped = true;
UserThread.execute(() -> doShutDown(closeConnectionReason, shutDownCompleteHandler));
}
}, "Connection:SendCloseConnectionMessage-" + this.uid).start();
} else {
stopped = true;
doShutDown(closeConnectionReason, shutDownCompleteHandler);
}
} else {
//TODO find out why we get called that
log.debug("stopped was already at shutDown call");
UserThread.execute(() -> doShutDown(closeConnectionReason, shutDownCompleteHandler));
}
}
private void doShutDown(CloseConnectionReason closeConnectionReason, @Nullable Runnable shutDownCompleteHandler) {
// Use UserThread.execute as its not clear if that is called from a non-UserThread
UserThread.execute(() -> connectionListener.onDisconnect(closeConnectionReason, this));
try {
socket.close();
} catch (SocketException e) {
log.trace("SocketException at shutdown might be expected " + e.getMessage());
} catch (IOException e) {
log.error("Exception at shutdown. " + e.getMessage());
e.printStackTrace();
} finally {
protoOutputStream.onConnectionShutdown();
try {
protoInputStream.close();
} catch (IOException e) {
log.error(e.getMessage());
e.printStackTrace();
}
//noinspection UnstableApiUsage
MoreExecutors.shutdownAndAwaitTermination(singleThreadExecutor, 500, TimeUnit.MILLISECONDS);
MoreExecutors.shutdownAndAwaitTermination(bundleSender, 500, TimeUnit.MILLISECONDS);
log.debug("Connection shutdown complete " + this.toString());
// Use UserThread.execute as its not clear if that is called from a non-UserThread
if (shutDownCompleteHandler != null)
UserThread.execute(shutDownCompleteHandler);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Connection)) return false;
Connection that = (Connection) o;
return uid.equals(that.uid);
}
@Override
public int hashCode() {
return uid.hashCode();
}
@Override
public String toString() {
return "Connection{" +
"peerAddress=" + peersNodeAddressOptional +
", peerType=" + peerType +
", connectionType=" + (this instanceof InboundConnection ? "InboundConnection" : "OutboundConnection") +
", uid='" + uid + '\'' +
'}';
}
@SuppressWarnings("unused")
public String printDetails() {
String portInfo;
if (socket.getLocalPort() == 0)
portInfo = "port=" + socket.getPort();
else
portInfo = "localPort=" + socket.getLocalPort() + "/port=" + socket.getPort();
return "Connection{" +
"peerAddress=" + peersNodeAddressOptional +
", peerType=" + peerType +
", portInfo=" + portInfo +
", uid='" + uid + '\'' +
", ruleViolation=" + ruleViolation +
", ruleViolations=" + ruleViolations +
", supportedCapabilities=" + capabilities +
", stopped=" + stopped +
'}';
}
// SharedSpace
/**
* Holds all shared data between Connection and InputHandler
* Runs in same thread as Connection
*/
public boolean reportInvalidRequest(RuleViolation ruleViolation) {
log.warn("We got reported the ruleViolation {} at connection {}", ruleViolation, this);
int numRuleViolations;
numRuleViolations = ruleViolations.getOrDefault(ruleViolation, 0);
numRuleViolations++;
ruleViolations.put(ruleViolation, numRuleViolations);
if (numRuleViolations >= ruleViolation.maxTolerance) {
log.warn("We close connection as we received too many corrupt requests.\n" +
"numRuleViolations={}\n\t" +
"corruptRequest={}\n\t" +
"corruptRequests={}\n\t" +
"connection={}", numRuleViolations, ruleViolation, ruleViolations.toString(), this);
this.ruleViolation = ruleViolation;
if (ruleViolation == RuleViolation.PEER_BANNED) {
log.warn("We close connection due RuleViolation.PEER_BANNED. peersNodeAddress={}", getPeersNodeAddressOptional());
shutDown(CloseConnectionReason.PEER_BANNED);
} else if (ruleViolation == RuleViolation.INVALID_CLASS) {
log.warn("We close connection due RuleViolation.INVALID_CLASS");
shutDown(CloseConnectionReason.INVALID_CLASS_RECEIVED);
} else {
log.warn("We close connection due RuleViolation.RULE_VIOLATION");
shutDown(CloseConnectionReason.RULE_VIOLATION);
}
return true;
} else {
return false;
}
}
private void handleException(Throwable e) {
CloseConnectionReason closeConnectionReason;
// silent fail if we are shutdown
if (stopped)
return;
if (e instanceof SocketException) {
if (socket.isClosed())
closeConnectionReason = CloseConnectionReason.SOCKET_CLOSED;
else
closeConnectionReason = CloseConnectionReason.RESET;
log.info("SocketException (expected if connection lost). closeConnectionReason={}; connection={}", closeConnectionReason, this);
} else if (e instanceof SocketTimeoutException || e instanceof TimeoutException) {
closeConnectionReason = CloseConnectionReason.SOCKET_TIMEOUT;
log.info("Shut down caused by exception {} on connection={}", e.toString(), this);
} else if (e instanceof EOFException) {
closeConnectionReason = CloseConnectionReason.TERMINATED;
log.warn("Shut down caused by exception {} on connection={}", e.toString(), this);
} else if (e instanceof OptionalDataException || e instanceof StreamCorruptedException) {
closeConnectionReason = CloseConnectionReason.CORRUPTED_DATA;
log.warn("Shut down caused by exception {} on connection={}", e.toString(), this);
} else {
closeConnectionReason = CloseConnectionReason.UNKNOWN_EXCEPTION;
log.warn("Unknown reason for exception at socket: {}\n\t" +
"peer={}\n\t" +
"Exception={}",
socket.toString(),
this.peersNodeAddressOptional,
e.toString());
e.printStackTrace();
}
shutDown(closeConnectionReason);
}
// InputHandler
// Runs in same thread as Connection, receives a message, performs several checks on it
// (including throttling limits, validity and statistics)
// and delivers it to the message listener given in the constructor.
private InputStream protoInputStream;
private final NetworkProtoResolver networkProtoResolver;
private long lastReadTimeStamp;
private boolean threadNameSet;
@Override
public void run() {
try {
Thread.currentThread().setName("InputHandler");
while (!stopped && !Thread.currentThread().isInterrupted()) {
if (!threadNameSet && getPeersNodeAddressOptional().isPresent()) {
Thread.currentThread().setName("InputHandler-" + getPeersNodeAddressOptional().get().getFullAddress());
threadNameSet = true;
}
try {
if (socket != null &&
socket.isClosed()) {
log.warn("Socket is null or closed socket={}", socket);
shutDown(CloseConnectionReason.SOCKET_CLOSED);
return;
}
// Throttle inbound network_messages
long now = System.currentTimeMillis();
long elapsed = now - lastReadTimeStamp;
if (elapsed < 10) {
log.debug("We got 2 network_messages received in less than 10 ms. We set the thread to sleep " +
"for 20 ms to avoid getting flooded by our peer. lastReadTimeStamp={}, now={}, elapsed={}",
lastReadTimeStamp, now, elapsed);
Thread.sleep(20);
}
// Reading the protobuffer message from the inputStream
protobuf.NetworkEnvelope proto = protobuf.NetworkEnvelope.parseDelimitedFrom(protoInputStream);
if (proto == null) {
if (protoInputStream.read() == -1)
log.debug("proto is null because protoInputStream.read()=-1 (EOF). That is expected if client got stopped without proper shutdown.");
else
log.warn("proto is null. protoInputStream.read()=" + protoInputStream.read());
shutDown(CloseConnectionReason.NO_PROTO_BUFFER_ENV);
return;
}
NetworkEnvelope networkEnvelope = networkProtoResolver.fromProto(proto);
lastReadTimeStamp = now;
log.debug("<< Received networkEnvelope of type: {}", networkEnvelope.getClass().getSimpleName());
int size = proto.getSerializedSize();
// We comment out that part as only debug and trace log level is used. For debugging purposes
// we leave the code though.
/*if (networkEnvelope instanceof Pong || networkEnvelope instanceof RefreshOfferMessage) {
// We only log Pong and RefreshOfferMsg when in dev environment (trace)
log.trace("\n\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n" +
"New data arrived at inputHandler of connection {}.\n" +
"Received object (truncated)={} / size={}"
+ "\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n",
connection,
Utilities.toTruncatedString(proto.toString()),
size);
} else {
// We want to log all incoming network_messages (except Pong and RefreshOfferMsg)
// so we log before the data type checks
//log.info("size={}; object={}", size, Utilities.toTruncatedString(rawInputObject.toString(), 100));
log.debug("\n\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n" +
"New data arrived at inputHandler of connection {}.\n" +
"Received object (truncated)={} / size={}"
+ "\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n",
connection,
Utilities.toTruncatedString(proto.toString()),
size);
}*/
// We want to track the size of each object even if it is invalid data
statistic.addReceivedBytes(size);
// We want to track the network_messages also before the checks, so do it early...
statistic.addReceivedMessage(networkEnvelope);
// First we check the size
boolean exceeds;
if (networkEnvelope instanceof ExtendedDataSizePermission) {
exceeds = size > MAX_PERMITTED_MESSAGE_SIZE;
} else {
exceeds = size > PERMITTED_MESSAGE_SIZE;
}
if (networkEnvelope instanceof AddPersistableNetworkPayloadMessage &&
!((AddPersistableNetworkPayloadMessage) networkEnvelope).getPersistableNetworkPayload().verifyHashSize()) {
log.warn("PersistableNetworkPayload.verifyHashSize failed. hashSize={}; object={}",
((AddPersistableNetworkPayloadMessage) networkEnvelope).getPersistableNetworkPayload().getHash().length,
Utilities.toTruncatedString(proto));
if (reportInvalidRequest(RuleViolation.MAX_MSG_SIZE_EXCEEDED))
return;
}
if (exceeds) {
log.warn("size > MAX_MSG_SIZE. size={}; object={}", size, Utilities.toTruncatedString(proto));
if (reportInvalidRequest(RuleViolation.MAX_MSG_SIZE_EXCEEDED))
return;
}
if (violatesThrottleLimit() && reportInvalidRequest(RuleViolation.THROTTLE_LIMIT_EXCEEDED))
return;
// Check P2P network ID
if (proto.getMessageVersion() != Version.getP2PMessageVersion()
&& reportInvalidRequest(RuleViolation.WRONG_NETWORK_ID)) {
log.warn("RuleViolation.WRONG_NETWORK_ID. version of message={}, app version={}, " +
"proto.toTruncatedString={}", proto.getMessageVersion(),
Version.getP2PMessageVersion(),
Utilities.toTruncatedString(proto.toString()));
return;
}
if (networkEnvelope instanceof SupportedCapabilitiesMessage) {
Capabilities supportedCapabilities = ((SupportedCapabilitiesMessage) networkEnvelope).getSupportedCapabilities();
if (supportedCapabilities != null) {
if (!capabilities.equals(supportedCapabilities)) {
capabilities.set(supportedCapabilities);
// Capabilities can be empty. We only check for mandatory if we get some capabilities.
if (!capabilities.isEmpty() && !Capabilities.hasMandatoryCapability(capabilities)) {
String senderNodeAddress = networkEnvelope instanceof SendersNodeAddressMessage ?
((SendersNodeAddressMessage) networkEnvelope).getSenderNodeAddress().getFullAddress() :
"[unknown address]";
log.info("We close a connection to old node {}. " +
"Capabilities of old node: {}, networkEnvelope class name={}",
senderNodeAddress, capabilities.prettyPrint(), networkEnvelope.getClass().getSimpleName());
shutDown(CloseConnectionReason.MANDATORY_CAPABILITIES_NOT_SUPPORTED);
return;
}
capabilitiesListeners.forEach(weakListener -> {
SupportedCapabilitiesListener supportedCapabilitiesListener = weakListener.get();
if (supportedCapabilitiesListener != null) {
UserThread.execute(() -> supportedCapabilitiesListener.onChanged(supportedCapabilities));
}
});
}
}
}
if (networkEnvelope instanceof CloseConnectionMessage) {
// If we get a CloseConnectionMessage we shut down
if (log.isDebugEnabled()) {
log.debug("CloseConnectionMessage received. Reason={}\n\t" +
"connection={}", proto.getCloseConnectionMessage().getReason(), this);
}
if (CloseConnectionReason.PEER_BANNED.name().equals(proto.getCloseConnectionMessage().getReason())) {
log.warn("We got shut down because we are banned by the other peer. (InputHandler.run CloseConnectionMessage)");
shutDown(CloseConnectionReason.PEER_BANNED);
} else {
shutDown(CloseConnectionReason.CLOSE_REQUESTED_BY_PEER);
}
return;
} else if (!stopped) {
// We don't want to get the activity ts updated by ping/pong msg
if (!(networkEnvelope instanceof KeepAliveMessage))
statistic.updateLastActivityTimestamp();
if (networkEnvelope instanceof GetDataRequest)
setPeerType(PeerType.INITIAL_DATA_REQUEST);
// First a seed node gets a message from a peer (PreliminaryDataRequest using
// AnonymousMessage interface) which does not have its hidden service
// published, so it does not know its address. As the IncomingConnection does not have the
// peersNodeAddress set that connection cannot be used for outgoing network_messages until we
// get the address set.
// At the data update message (DataRequest using SendersNodeAddressMessage interface)
// after the HS is published we get the peer's address set.
// There are only those network_messages used for new connections to a peer:
// 1. PreliminaryDataRequest
// 2. DataRequest (implements SendersNodeAddressMessage)
// 3. GetPeersRequest (implements SendersNodeAddressMessage)
// 4. DirectMessage (implements SendersNodeAddressMessage)
if (networkEnvelope instanceof SendersNodeAddressMessage) {
NodeAddress senderNodeAddress = ((SendersNodeAddressMessage) networkEnvelope).getSenderNodeAddress();
if (senderNodeAddress != null) {
Optional<NodeAddress> peersNodeAddressOptional = getPeersNodeAddressOptional();
if (peersNodeAddressOptional.isPresent()) {
// If we have already the peers address we check again if it matches our stored one
checkArgument(peersNodeAddressOptional.get().equals(senderNodeAddress),
"senderNodeAddress not matching connections peer address.\n\t" +
"message=" + networkEnvelope);
} else {
// We must not shut down a banned peer at that moment as it would trigger a connection termination
// and we could not send the CloseConnectionMessage.
// We check for a banned peer inside setPeersNodeAddress() and shut down if banned.
setPeersNodeAddress(senderNodeAddress);
}
}
}
if (networkEnvelope instanceof PrefixedSealedAndSignedMessage)
setPeerType(Connection.PeerType.DIRECT_MSG_PEER);
onMessage(networkEnvelope, this);
}
} catch (InvalidClassException e) {
log.error(e.getMessage());
e.printStackTrace();
reportInvalidRequest(RuleViolation.INVALID_CLASS);
} catch (ProtobufferException | NoClassDefFoundError e) {
log.error(e.getMessage());
e.printStackTrace();
reportInvalidRequest(RuleViolation.INVALID_DATA_TYPE);
} catch (Throwable t) {
handleException(t);
}
}
} catch (Throwable t) {
handleException(t);
}
}
}
|
package com.cloud.agent.manager;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.channels.ClosedChannelException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.ejb.Local;
import javax.naming.ConfigurationException;
import org.apache.log4j.Logger;
import com.cloud.agent.AgentManager;
import com.cloud.agent.Listener;
import com.cloud.agent.api.AgentControlAnswer;
import com.cloud.agent.api.AgentControlCommand;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.CheckHealthCommand;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.GetHostStatsAnswer;
import com.cloud.agent.api.GetHostStatsCommand;
import com.cloud.agent.api.MaintainCommand;
import com.cloud.agent.api.PingAnswer;
import com.cloud.agent.api.PingCommand;
import com.cloud.agent.api.PingRoutingCommand;
import com.cloud.agent.api.PoolEjectCommand;
import com.cloud.agent.api.ReadyAnswer;
import com.cloud.agent.api.ReadyCommand;
import com.cloud.agent.api.ShutdownCommand;
import com.cloud.agent.api.StartupAnswer;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupProxyCommand;
import com.cloud.agent.api.StartupRoutingCommand;
import com.cloud.agent.api.StartupStorageCommand;
import com.cloud.agent.api.UnsupportedAnswer;
import com.cloud.agent.manager.allocator.HostAllocator;
import com.cloud.agent.manager.allocator.PodAllocator;
import com.cloud.agent.transport.Request;
import com.cloud.agent.transport.Response;
import com.cloud.alert.AlertManager;
import com.cloud.api.BaseCmd;
import com.cloud.api.ServerApiException;
import com.cloud.api.commands.AddHostCmd;
import com.cloud.api.commands.AddSecondaryStorageCmd;
import com.cloud.api.commands.CancelMaintenanceCmd;
import com.cloud.api.commands.DeleteHostCmd;
import com.cloud.api.commands.PrepareForMaintenanceCmd;
import com.cloud.api.commands.ReconnectHostCmd;
import com.cloud.api.commands.UpdateHostCmd;
import com.cloud.capacity.CapacityVO;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.configuration.Config;
import com.cloud.configuration.dao.ConfigurationDao;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.DataCenter;
import com.cloud.dc.DataCenterIpAddressVO;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.Pod;
import com.cloud.dc.PodCluster;
import com.cloud.dc.dao.ClusterDao;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.dc.dao.DataCenterIpAddressDaoImpl;
import com.cloud.dc.dao.HostPodDao;
import com.cloud.dc.dao.VlanDao;
import com.cloud.event.dao.EventDao;
import com.cloud.exception.AgentUnavailableException;
import com.cloud.exception.DiscoveryException;
import com.cloud.exception.InternalErrorException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.exception.OperationTimedoutException;
import com.cloud.exception.UnsupportedVersionException;
import com.cloud.ha.HighAvailabilityManager;
import com.cloud.host.DetailVO;
import com.cloud.host.Host;
import com.cloud.host.Host.Type;
import com.cloud.host.HostStats;
import com.cloud.host.HostVO;
import com.cloud.host.Status;
import com.cloud.host.Status.Event;
import com.cloud.host.dao.DetailsDao;
import com.cloud.host.dao.HostDao;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.hypervisor.kvm.resource.KvmDummyResourceBase;
import com.cloud.maid.StackMaid;
import com.cloud.maint.UpgradeManager;
import com.cloud.network.IPAddressVO;
import com.cloud.network.NetworkManager;
import com.cloud.network.dao.IPAddressDao;
import com.cloud.offering.ServiceOffering;
import com.cloud.resource.Discoverer;
import com.cloud.resource.ServerResource;
import com.cloud.service.ServiceOfferingVO;
import com.cloud.storage.GuestOSCategoryVO;
import com.cloud.storage.Storage;
import com.cloud.storage.StorageManager;
import com.cloud.storage.StoragePoolVO;
import com.cloud.storage.VMTemplateHostVO;
import com.cloud.storage.VMTemplateVO;
import com.cloud.storage.dao.GuestOSCategoryDao;
import com.cloud.storage.dao.StoragePoolDao;
import com.cloud.storage.dao.StoragePoolHostDao;
import com.cloud.storage.dao.VMTemplateDao;
import com.cloud.storage.dao.VMTemplateHostDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.resource.DummySecondaryStorageResource;
import com.cloud.template.VirtualMachineTemplate;
import com.cloud.user.dao.UserStatisticsDao;
import com.cloud.uservm.UserVm;
import com.cloud.utils.ActionDelegate;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.component.Adapters;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.component.Inject;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.MacAddress;
import com.cloud.utils.net.NetUtils;
import com.cloud.utils.nio.HandlerFactory;
import com.cloud.utils.nio.Link;
import com.cloud.utils.nio.NioServer;
import com.cloud.utils.nio.Task;
import com.cloud.vm.State;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachineProfile;
import com.cloud.vm.dao.VMInstanceDao;
/**
* Implementation of the Agent Manager. This class controls the connection to
* the agents.
*
* @config {@table || Param Name | Description | Values | Default || || port |
* port to listen on for agent connection. | Integer | 8250 || ||
* workers | # of worker threads | Integer | 5 || || router.template.id
* | default id for template | Integer | 1 || || router.ram.size |
* default ram for router vm in mb | Integer | 128 || ||
* router.ip.address | ip address for the router | ip | 10.1.1.1 || ||
* wait | Time to wait for control commands to return | seconds | 1800
* || || domain | domain for domain routers| String | foo.com || ||
* alert.wait | time to wait before alerting on a disconnected agent |
* seconds | 1800 || || update.wait | time to wait before alerting on a
* updating agent | seconds | 600 || || ping.interval | ping interval in
* seconds | seconds | 60 || || instance.name | Name of the deployment
* String | required || || start.retry | Number of times to retry start
* | Number | 2 || || ping.timeout | multiplier to ping.interval before
* announcing an agent has timed out | float | 2.0x || ||
* router.stats.interval | interval to report router statistics |
* seconds | 300s || * }
**/
@Local(value = { AgentManager.class })
public class AgentManagerImpl implements AgentManager, HandlerFactory {
private static final Logger s_logger = Logger.getLogger(AgentManagerImpl.class);
protected ConcurrentHashMap<Long, AgentAttache> _agents = new ConcurrentHashMap<Long, AgentAttache>(2047);
protected List<Pair<Integer, Listener>> _hostMonitors = new ArrayList<Pair<Integer, Listener>>(11);
protected List<Pair<Integer, Listener>> _cmdMonitors = new ArrayList<Pair<Integer, Listener>>(11);
protected int _monitorId = 0;
protected NioServer _connection;
@Inject protected HostDao _hostDao = null;
@Inject protected UserStatisticsDao _userStatsDao = null;
@Inject protected DataCenterDao _dcDao = null;
@Inject protected VlanDao _vlanDao = null;
@Inject protected DataCenterIpAddressDaoImpl _privateIPAddressDao = null;
@Inject protected IPAddressDao _publicIPAddressDao = null;
@Inject protected HostPodDao _podDao = null;
protected Adapters<HostAllocator> _hostAllocators = null;
protected Adapters<PodAllocator> _podAllocators = null;
@Inject protected EventDao _eventDao = null;
@Inject protected VMInstanceDao _vmDao = null;
@Inject protected VolumeDao _volDao = null;
@Inject protected CapacityDao _capacityDao = null;
@Inject protected ConfigurationDao _configDao = null;
@Inject protected StoragePoolDao _storagePoolDao = null;
@Inject protected StoragePoolHostDao _storagePoolHostDao = null;
@Inject protected GuestOSCategoryDao _guestOSCategoryDao = null;
@Inject protected DetailsDao _hostDetailsDao = null;
@Inject protected ClusterDao _clusterDao;
protected Adapters<Discoverer> _discoverers = null;
protected int _port;
@Inject
protected HighAvailabilityManager _haMgr = null;
@Inject
protected AlertManager _alertMgr = null;
@Inject
protected NetworkManager _networkMgr = null;
@Inject
protected UpgradeManager _upgradeMgr = null;
@Inject
protected StorageManager _storageMgr = null;
private String _publicNic;
private String _privateNic;
private String _guestNic;
private String _storageNic1;
private String _storageNic2;
protected int _retry = 2;
protected String _name;
protected String _instance;
protected int _wait;
protected int _updateWait;
protected int _alertWait;
protected long _nodeId = -1;
protected int _overProvisioningFactor = 1;
protected float _cpuOverProvisioningFactor = 1;
protected Random _rand = new Random(System.currentTimeMillis());
protected int _pingInterval;
protected long _pingTimeout;
protected AgentMonitor _monitor = null;
protected ExecutorService _executor;
@Inject
protected VMTemplateDao _tmpltDao;
@Inject
protected VMTemplateHostDao _vmTemplateHostDao;
@Override
public boolean configure(final String name, final Map<String, Object> params) throws ConfigurationException {
_name = name;
Request.initBuilder();
final ComponentLocator locator = ComponentLocator.getCurrentLocator();
ConfigurationDao configDao = locator.getDao(ConfigurationDao.class);
if (configDao == null) {
throw new ConfigurationException("Unable to get the configuration dao.");
}
final Map<String, String> configs = configDao.getConfiguration("AgentManager", params);
_publicNic = configDao.getValue(Config.XenPublicNetwork.key());
_privateNic = configDao.getValue(Config.XenPrivateNetwork.key());
_guestNic = configDao.getValue(Config.XenGuestNetwork.key());
_storageNic1 = configDao.getValue(Config.XenStorageNetwork1.key());
_storageNic2 = configDao.getValue(Config.XenStorageNetwork2.key());
_port = NumbersUtil.parseInt(configs.get("port"), 8250);
final int workers = NumbersUtil.parseInt(configs.get("workers"), 5);
String value = configs.get("ping.interval");
_pingInterval = NumbersUtil.parseInt(value, 60);
value = configs.get("wait");
_wait = NumbersUtil.parseInt(value, 1800) * 1000;
value = configs.get("alert.wait");
_alertWait = NumbersUtil.parseInt(value, 1800);
value = configs.get("update.wait");
_updateWait = NumbersUtil.parseInt(value, 600);
value = configs.get("ping.timeout");
final float multiplier = value != null ? Float.parseFloat(value) : 2.5f;
_pingTimeout = (long) (multiplier * _pingInterval);
s_logger.info("Ping Timeout is " + _pingTimeout);
_instance = configs.get("instance.name");
if (_instance == null) {
_instance = "DEFAULT";
}
_hostAllocators = locator.getAdapters(HostAllocator.class);
if (_hostAllocators == null || !_hostAllocators.isSet()) {
throw new ConfigurationException("Unable to find an host allocator.");
}
_podAllocators = locator.getAdapters(PodAllocator.class);
if (_podAllocators == null || !_podAllocators.isSet()) {
throw new ConfigurationException("Unable to find an pod allocator.");
}
_discoverers = locator.getAdapters(Discoverer.class);
if (_nodeId == -1) {
// FIXME: We really should not do this like this. It should be done
// at config time and is stored as a config variable.
_nodeId = MacAddress.getMacAddress().toLong();
}
_hostDao.markHostsAsDisconnected(_nodeId, Status.Up, Status.Connecting, Status.Updating, Status.Disconnected, Status.Down);
_monitor = new AgentMonitor(_nodeId, _hostDao, _volDao, _vmDao, _dcDao, _podDao, this, _alertMgr, _pingTimeout);
registerForHostEvents(_monitor, true, true, false);
_executor = new ThreadPoolExecutor(10, 100, 60l, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new NamedThreadFactory("AgentTaskPool"));
String overProvisioningFactorStr = configs.get("storage.overprovisioning.factor");
_overProvisioningFactor = NumbersUtil.parseInt(overProvisioningFactorStr, 1);
String cpuOverProvisioningFactorStr = configs.get("cpu.overprovisioning.factor");
_cpuOverProvisioningFactor = NumbersUtil.parseFloat(cpuOverProvisioningFactorStr, 1);
if(_cpuOverProvisioningFactor < 1){
_cpuOverProvisioningFactor = 1;
}
_connection = new NioServer("AgentManager", _port, workers + 10, this);
s_logger.info("Listening on " + _port + " with " + workers + " workers");
return true;
}
@Override
public Task create(Task.Type type, Link link, byte[] data) {
return new AgentHandler(type, link, data);
}
@Override
public int registerForHostEvents(final Listener listener, boolean connections, boolean commands, boolean priority) {
synchronized (_hostMonitors) {
_monitorId++;
if (connections) {
if (priority) {
_hostMonitors.add(0, new Pair<Integer, Listener>(_monitorId, listener));
} else {
_hostMonitors.add(new Pair<Integer, Listener>(_monitorId, listener));
}
}
if (commands) {
if (priority) {
_cmdMonitors.add(0, new Pair<Integer, Listener>(_monitorId, listener));
} else {
_cmdMonitors.add(new Pair<Integer, Listener>(_monitorId, listener));
}
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Registering listener " + listener.getClass().getSimpleName() + " with id " + _monitorId);
}
return _monitorId;
}
}
@Override
public void unregisterForHostEvents(final int id) {
s_logger.debug("Deregistering " + id);
_hostMonitors.remove(id);
}
private AgentControlAnswer handleControlCommand(AgentAttache attache, final AgentControlCommand cmd) {
AgentControlAnswer answer = null;
for (Pair<Integer, Listener> listener : _cmdMonitors) {
answer = listener.second().processControlCommand(attache.getId(), cmd);
if(answer != null)
return answer;
}
s_logger.warn("No handling of agent control command: " + cmd.toString() + " sent from " + attache.getId());
return new AgentControlAnswer(cmd);
}
public void handleCommands(AgentAttache attache, final long sequence, final Command[] cmds) {
for (Pair<Integer, Listener> listener : _cmdMonitors) {
boolean processed = listener.second().processCommands(attache.getId(), sequence, cmds);
if (s_logger.isTraceEnabled()) {
s_logger.trace("SeqA " + attache.getId() + "-" + sequence + ": " + (processed ? "processed" : "not processed") + " by " + listener.getClass());
}
}
}
public AgentAttache findAttache(long hostId) {
return _agents.get(hostId);
}
@Override
public Set<Long> getConnectedHosts() {
// make the returning set be safe for concurrent iteration
final HashSet<Long> result = new HashSet<Long>();
synchronized (_agents) {
final Set<Long> s = _agents.keySet();
for (final Long id : s)
result.add(id);
}
return result;
}
@Override
public Host findHost(final Host.Type type, final DataCenterVO dc, final HostPodVO pod, final StoragePoolVO sp,
final ServiceOffering offering, final VMTemplateVO template, VMInstanceVO vm,
Host currentHost, final Set<Host> avoid) {
VirtualMachineProfile vmc = new VirtualMachineProfile(vm.getType());
Enumeration<HostAllocator> en = _hostAllocators.enumeration();
while (en.hasMoreElements()) {
final HostAllocator allocator = en.nextElement();
final Host host = allocator.allocateTo(vmc, offering, type, dc, pod, sp.getClusterId(), template, avoid);
if (host == null) {
continue;
} else {
return host;
}
}
s_logger.warn("findHost() could not find a non-null host.");
return null;
}
@Override
public List<PodCluster> listByDataCenter(long dcId) {
List<HostPodVO> pods = _podDao.listByDataCenterId(dcId);
ArrayList<PodCluster> pcs = new ArrayList<PodCluster>();
for (HostPodVO pod : pods) {
List<ClusterVO> clusters = _clusterDao.listByPodId(pod.getId());
if (clusters.size() == 0) {
pcs.add(new PodCluster(pod, null));
} else {
for (ClusterVO cluster : clusters) {
pcs.add(new PodCluster(pod, cluster));
}
}
}
return pcs;
}
@Override
public List<PodCluster> listByPod(long podId) {
ArrayList<PodCluster> pcs = new ArrayList<PodCluster>();
HostPodVO pod = _podDao.findById(podId);
if (pod == null) {
return pcs;
}
List<ClusterVO> clusters = _clusterDao.listByPodId(pod.getId());
if (clusters.size() == 0) {
pcs.add(new PodCluster(pod, null));
} else {
for (ClusterVO cluster : clusters) {
pcs.add(new PodCluster(pod, cluster));
}
}
return pcs;
}
protected AgentAttache handleDirectConnect(ServerResource resource, StartupCommand[] startup, Map<String, String> details, boolean old) {
if (startup == null) {
return null;
}
HostVO server = createHost(startup, resource, details, old);
if (server == null) {
return null;
}
long id = server.getId();
AgentAttache attache = createAttache(id, server, resource);
if (!resource.IsRemoteAgent())
attache = notifyMonitorsOfConnection(attache, startup);
else {
_hostDao.updateStatus(server, Event.AgentConnected, _nodeId);
}
return attache;
}
@Override
public List<HostVO> discoverHosts(AddHostCmd cmd) throws IllegalArgumentException, DiscoveryException, InvalidParameterValueException {
Long dcId = cmd.getZoneId();
Long podId = cmd.getPodId();
Long clusterId = cmd.getClusterId();
String clusterName = cmd.getClusterName();
String url = cmd.getUrl();
String username = cmd.getUsername();
String password = cmd.getPassword();
return discoverHosts(dcId, podId, clusterId, clusterName, url, username, password);
}
@Override
public List<HostVO> discoverHosts(AddSecondaryStorageCmd cmd) throws IllegalArgumentException, DiscoveryException, InvalidParameterValueException {
Long dcId = cmd.getZoneId();
String url = cmd.getUrl();
return discoverHosts(dcId, null, null, null, url, null, null);
}
@Override
public List<HostVO> discoverHosts(Long dcId, Long podId, Long clusterId, String clusterName, String url, String username, String password) throws IllegalArgumentException, DiscoveryException, InvalidParameterValueException {
URI uri = null;
//Check if the zone exists in the system
if (_dcDao.findById(dcId) == null ){
throw new InvalidParameterValueException("Can't find zone by id " + dcId);
}
//Check if the pod exists in the system
if (podId != null) {
if (_podDao.findById(podId) == null ){
throw new InvalidParameterValueException("Can't find pod by id " + podId);
}
//check if pod belongs to the zone
HostPodVO pod = _podDao.findById(podId);
if (!Long.valueOf(pod.getDataCenterId()).equals(dcId)) {
throw new InvalidParameterValueException("Pod " + podId + " doesn't belong to the zone " + dcId);
}
}
// Deny to add a secondary storage multiple times for the same zone
if ((username == null) && (_hostDao.findSecondaryStorageHost(dcId) != null)) {
throw new InvalidParameterValueException("A secondary storage host already exists in the specified zone");
}
//Verify cluster information and create a new cluster if needed
if (clusterName != null && clusterId != null) {
throw new InvalidParameterValueException("Can't specify cluster by both id and name");
}
if ((clusterName != null || clusterId != null) && podId == null) {
throw new InvalidParameterValueException("Can't specify cluster without specifying the pod");
}
if (clusterId != null) {
if (_clusterDao.findById(clusterId) == null) {
throw new InvalidParameterValueException("Can't find cluster by id " + clusterId);
}
}
if (clusterName != null) {
ClusterVO cluster = new ClusterVO(dcId, podId, clusterName);
try {
cluster = _clusterDao.persist(cluster);
} catch (Exception e) {
cluster = _clusterDao.findBy(clusterName, podId);
if (cluster == null) {
throw new CloudRuntimeException("Unable to create cluster " + clusterName + " in pod " + podId + " and data center " + dcId, e);
}
}
clusterId = cluster.getId();
}
try {
uri = new URI(url);
if (uri.getScheme() == null)
throw new InvalidParameterValueException("uri.scheme is null " + url + ", add nfs:// as a prefix");
else if (uri.getScheme().equalsIgnoreCase("nfs")) {
if (uri.getHost() == null || uri.getHost().equalsIgnoreCase("") || uri.getPath() == null || uri.getPath().equalsIgnoreCase("")) {
throw new InvalidParameterValueException("Your host and/or path is wrong. Make sure it's of the format nfs://hostname/path");
}
}
} catch (URISyntaxException e) {
throw new InvalidParameterValueException(url + " is not a valid uri");
}
List<HostVO> hosts = new ArrayList<HostVO>();
s_logger.info("Trying to add a new host at " + url + " in data center " + dcId);
Enumeration<Discoverer> en = _discoverers.enumeration();
while (en.hasMoreElements()) {
Discoverer discoverer = en.nextElement();
Map<? extends ServerResource, Map<String, String>> resources = null;
try {
resources = discoverer.find(dcId, podId, clusterId, uri, username, password);
} catch(Exception e) {
s_logger.info("Exception in host discovery process with discoverer: " + discoverer.getName() + ", skip to another discoverer if there is any");
}
if (resources != null) {
for (Map.Entry<? extends ServerResource, Map<String, String>> entry : resources.entrySet()) {
ServerResource resource = entry.getKey();
AgentAttache attache = simulateStart(resource, entry.getValue(), true);
if (attache != null) {
hosts.add(_hostDao.findById(attache.getId()));
}
discoverer.postDiscovery(hosts, _nodeId);
}
s_logger.info("server resources successfully discovered by " + discoverer.getName());
return hosts;
}
}
s_logger.warn("Unable to find the server resources at " + url);
throw new DiscoveryException("Unable to add the host");
}
@Override
@DB
public boolean deleteHost(long hostId) {
Transaction txn = Transaction.currentTxn();
try {
HostVO host = _hostDao.findById(hostId);
if (host == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Host: " + hostId + " does not even exist. Delete call is ignored.");
}
return true;
}
if (host.getType() == Type.SecondaryStorage) {
return deleteSecondaryStorageHost(host);
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Delete Host: " + hostId + " Guid:" + host.getGuid());
}
if (host.getType() == Type.Routing && host.getHypervisorType() == HypervisorType.XenServer ) {
if (host.getClusterId() != null) {
List<HostVO> hosts = _hostDao.listBy(Type.Routing, host.getClusterId(), host.getPodId(), host.getDataCenterId());
boolean success = false;
for( HostVO thost: hosts ) {
long thostId = thost.getId();
if( thostId == hostId ) continue;
PoolEjectCommand eject = new PoolEjectCommand(host.getGuid());
Answer answer = easySend(thostId, eject);
if( answer != null && answer.getResult()) {
s_logger.debug("Eject Host: " + hostId + " from " + thostId + " Succeed");
success = true;
break;
} else {
s_logger.debug("Eject Host: " + hostId + " from " + thostId + " failed due to " + answer.getDetails());
}
}
if( !success ){
throw new CloudRuntimeException("Unable to delete host " + hostId + " due to unable to eject it from pool");
}
}
}
txn.start();
_dcDao.releasePrivateIpAddress(host.getPrivateIpAddress(), host.getDataCenterId(), null);
AgentAttache attache = _agents.get(hostId);
handleDisconnect(attache, Status.Event.Remove, false);
//delete host details
_hostDetailsDao.deleteDetails(hostId);
host.setGuid(null);
host.setClusterId(null);
_hostDao.update(host.getId(), host);
_hostDao.remove(hostId);
//delete the associated primary storage from db
ComponentLocator locator = ComponentLocator.getLocator("management-server");
_storagePoolHostDao = locator.getDao(StoragePoolHostDao.class);
if (_storagePoolHostDao == null) {
throw new ConfigurationException("Unable to get storage pool host dao: " + StoragePoolHostDao.class);
}
//1. Get the pool_ids from the host ref table
ArrayList<Long> pool_ids = _storagePoolHostDao.getPoolIds(hostId);
//2.Delete the associated entries in host ref table
_storagePoolHostDao.deletePrimaryRecordsForHost(hostId);
//3.For pool ids you got, delete entries in pool table where type='FileSystem' || 'LVM'
for( Long poolId : pool_ids) {
StoragePoolVO storagePool = _storagePoolDao.findById(poolId);
if( storagePool.isLocal()) {
storagePool.setUuid(null);
storagePool.setClusterId(null);
_storagePoolDao.update(poolId, storagePool);
_storagePoolDao.remove(poolId);
}
}
txn.commit();
return true;
} catch (Throwable t) {
s_logger.error("Unable to delete host: " + hostId, t);
return false;
}
}
public boolean deleteHost(DeleteHostCmd cmd) throws InvalidParameterValueException{
Long id = cmd.getId();
//Verify that host exists
HostVO host = _hostDao.findById(id);
if (host == null) {
throw new InvalidParameterValueException("Host with id " + id.toString() + " doesn't exist");
}
return deleteHost(id);
}
@DB
protected boolean deleteSecondaryStorageHost(HostVO secStorageHost) {
long zoneId = secStorageHost.getDataCenterId();
long hostId = secStorageHost.getId();
Transaction txn = Transaction.currentTxn();
try {
List<VMInstanceVO> allVmsInZone = _vmDao.listByZoneId(zoneId);
if (!allVmsInZone.isEmpty()) {
s_logger.warn("Cannot delete secondary storage host when there are " + allVmsInZone.size() + " vms in zone " + zoneId);
return false;
}
txn.start();
if (!_hostDao.updateStatus(secStorageHost, Event.MaintenanceRequested, _nodeId)) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to take host " + hostId + " into maintenance mode. Delete call is ignored");
}
return false;
}
if (!_hostDao.updateStatus(secStorageHost, Event.PreparationComplete, _nodeId)) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to take host " + hostId + " into maintenance mode. Delete call is ignored");
}
return false;
}
AgentAttache attache = _agents.get(hostId);
handleDisconnect(attache, Status.Event.Remove, false);
//now delete the host
_hostDao.remove(secStorageHost.getId());
//delete the templates associated with this host
SearchCriteria<VMTemplateHostVO> templateHostSC = _vmTemplateHostDao.createSearchCriteria();
templateHostSC.addAnd("hostId", SearchCriteria.Op.EQ, secStorageHost.getId());
_vmTemplateHostDao.remove(templateHostSC);
/*Disconnected agent needs special handling here*/
secStorageHost.setGuid(null);
txn.commit();
return true;
}catch (Throwable t) {
s_logger.error("Unable to delete sec storage host: " + secStorageHost.getId(), t);
return false;
}
}
@Override
public boolean isVirtualMachineUpgradable(final UserVm vm, final ServiceOffering offering) {
Enumeration<HostAllocator> en = _hostAllocators.enumeration();
boolean isMachineUpgradable = true;
while (isMachineUpgradable && en.hasMoreElements()) {
final HostAllocator allocator = en.nextElement();
isMachineUpgradable = allocator.isVirtualMachineUpgradable(vm, offering);
}
return isMachineUpgradable;
}
protected int getPingInterval() {
return _pingInterval;
}
@Override
public Answer send(Long hostId, Command cmd, int timeout) throws AgentUnavailableException, OperationTimedoutException {
Commands cmds = new Commands(OnError.Revert);
cmds.addCommand(cmd);
send(hostId, cmds, timeout);
Answer[] answers = cmds.getAnswers();
if (answers != null && !(answers[0] instanceof UnsupportedAnswer)) {
return answers[0];
}
if (answers != null && (answers[0] instanceof UnsupportedAnswer)) {
s_logger.warn("Unsupported Command: " + answers[0].getDetails());
return answers[0];
}
return null;
}
@Override
public Answer[] send(Long hostId, Commands commands, int timeout) throws AgentUnavailableException, OperationTimedoutException {
assert hostId != null : "Who's not checking the agent id before sending? ... (finger wagging)";
if (hostId == null) {
throw new AgentUnavailableException(-1);
}
Command[] cmds = commands.toCommands();
assert cmds.length > 0 : "Ask yourself this about a hundred times. Why am I sending zero length commands?";
if (cmds.length == 0) {
commands.setAnswers(new Answer[0]);
}
final AgentAttache agent = getAttache(hostId);
if (agent == null || agent.isClosed()) {
throw new AgentUnavailableException("agent not logged into this management server", hostId);
}
long seq = _hostDao.getNextSequence(hostId);
Request req = new Request(seq, hostId, _nodeId, cmds, commands.stopOnError(), true, commands.revertOnError());
Answer[] answers = agent.send(req, timeout);
commands.setAnswers(answers);
return answers;
}
protected Status investigate(AgentAttache agent) {
Long hostId = agent.getId();
if (s_logger.isDebugEnabled()) {
s_logger.debug("checking if agent (" + hostId + ") is alive");
}
try {
long seq = _hostDao.getNextSequence(hostId);
Request req = new Request(seq, hostId, _nodeId, new CheckHealthCommand(), true);
Answer[] answers = agent.send(req, 50 * 1000);
if (answers != null && answers[0] != null ) {
Status status = answers[0].getResult() ? Status.Up : Status.Down;
if (s_logger.isDebugEnabled()) {
s_logger.debug("agent (" + hostId + ") responded to checkHeathCommand, reporting that agent is " + status);
}
return status;
}
} catch (AgentUnavailableException e) {
s_logger.debug("Agent is unavailable so we move on.");
} catch (OperationTimedoutException e) {
s_logger.debug("Timed Out " + e.getMessage());
}
return _haMgr.investigate(hostId);
}
protected AgentAttache getAttache(final Long hostId) throws AgentUnavailableException {
assert (hostId != null) : "Who didn't check their id value?";
if (hostId == null) {
return null;
}
AgentAttache agent = findAttache(hostId);
if (agent == null) {
s_logger.debug("Unable to find agent for " + hostId);
throw new AgentUnavailableException("Unable to find agent ", hostId);
}
return agent;
}
@Override
public long send(Long hostId, Commands commands, Listener listener) throws AgentUnavailableException {
final AgentAttache agent = getAttache(hostId);
if (agent.isClosed()) {
return -1;
}
Command[] cmds = commands.toCommands();
assert cmds.length > 0 : "Why are you sending zero length commands?";
if (cmds.length == 0) {
return -1;
}
long seq = _hostDao.getNextSequence(hostId);
Request req = new Request(seq, hostId, _nodeId, cmds, commands.stopOnError(), true, commands.revertOnError());
agent.send(req, listener);
return seq;
}
@Override
public long gatherStats(final Long hostId, final Command cmd, final Listener listener) {
try {
return send(hostId, new Commands(cmd), listener);
} catch (final AgentUnavailableException e) {
return -1;
}
}
@Override
public void disconnect(final long hostId, final Status.Event event, final boolean investigate) {
AgentAttache attache = _agents.get(hostId);
if (attache != null ) {
disconnect(attache, event, investigate);
} else {
HostVO host = _hostDao.findById(hostId);
if (host != null && host.getRemoved() == null) {
if(event!=null && event.equals(Event.Remove)) {
host.setGuid(null);
host.setClusterId(null);
}
_hostDao.updateStatus(host, event, _nodeId);
}
}
}
public void disconnect(AgentAttache attache, final Status.Event event, final boolean investigate) {
_executor.submit(new DisconnectTask(attache, event, investigate));
}
protected boolean handleDisconnect(AgentAttache attache, Status.Event event, boolean investigate) {
if( attache == null )
return true;
long hostId = attache.getId();
s_logger.info("Host " + hostId + " is disconnecting with event " + event.toString());
HostVO host = _hostDao.findById(hostId);
if (host == null) {
s_logger.warn("Can't find host with " + hostId);
return false;
}
final Status currentState = host.getStatus();
if (currentState == Status.Down || currentState == Status.Alert || currentState == Status.PrepareForMaintenance) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Host " + hostId + " is already " + currentState.toString());
}
return false;
}
Status nextState = currentState.getNextStatus(event);
if (nextState == null) {
if(!(attache instanceof DirectAgentAttache)) {
return false;
}
s_logger.debug("There is no transition from state " + currentState.toString() + " and event " + event.toString());
assert false : "How did we get here. Look at the FSM";
return false;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("The next state is " + nextState.toString() + ", current state is " + currentState);
}
// Now we go and correctly diagnose what the actual situation is
if (nextState == Status.Alert && investigate) {
s_logger.info("Investigating why host " + hostId + " has disconnected with event " + event.toString());
final Status determinedState = investigate(attache);
s_logger.info("The state determined is " + (determinedState != null ? determinedState.toString() : "undeterminable"));
if (determinedState == null || determinedState == Status.Down) {
s_logger.error("Host is down: " + host.getId() + "-" + host.getName() + ". Starting HA on the VMs");
event = Event.HostDown;
} else if (determinedState == Status.Up) {
// we effectively pinged from the server here.
s_logger.info("Agent is determined to be up and running");
_hostDao.updateStatus(host, Event.Ping, _nodeId);
return false;
} else if (determinedState == Status.Disconnected) {
s_logger.warn("Agent is disconnected but the host is still up: " + host.getId() + "-" + host.getName());
if (currentState == Status.Disconnected) {
if (((System.currentTimeMillis() >> 10) - host.getLastPinged()) > _alertWait) {
s_logger.warn("Host " + host.getId() + " has been disconnected pass the time it should be disconnected.");
event = Event.WaitedTooLong;
} else {
s_logger.debug("Host has been determined to be disconnected but it hasn't passed the wait time yet.");
return false;
}
} else if (currentState == Status.Updating) {
if (((System.currentTimeMillis() >> 10) - host.getLastPinged()) > _updateWait) {
s_logger.warn("Host " + host.getId() + " has been updating for too long");
event = Event.WaitedTooLong;
} else {
s_logger.debug("Host has been determined to be disconnected but it hasn't passed the wait time yet.");
return false;
}
} else if (currentState == Status.Up) {
DataCenterVO dcVO = _dcDao.findById(host.getDataCenterId());
HostPodVO podVO = _podDao.findById(host.getPodId());
String hostDesc = "name: " + host.getName() + " (id:" + host.getId() + "), availability zone: " + dcVO.getName() + ", pod: "
+ podVO.getName();
_alertMgr.sendAlert(AlertManager.ALERT_TYPE_HOST, host.getDataCenterId(), host.getPodId(), "Host disconnected, " + hostDesc,
"If the agent for host [" + hostDesc + "] is not restarted within " + _alertWait + " seconds, HA will begin on the VMs");
event = Event.AgentDisconnected;
}
} else {
// if we end up here we are in alert state, send an alert
DataCenterVO dcVO = _dcDao.findById(host.getDataCenterId());
HostPodVO podVO = _podDao.findById(host.getPodId());
String hostDesc = "name: " + host.getName() + " (id:" + host.getId() + "), availability zone: " + dcVO.getName() + ", pod: " + podVO.getName();
_alertMgr.sendAlert(AlertManager.ALERT_TYPE_HOST, host.getDataCenterId(), host.getPodId(), "Host in ALERT state, " + hostDesc,
"In availability zone " + host.getDataCenterId() + ", host is in alert state: " + host.getId() + "-" + host.getName());
}
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Deregistering link for " + hostId + " with state " + nextState);
}
_hostDao.disconnect(host, event, _nodeId);
synchronized (_agents) {
AgentAttache removed = _agents.remove(hostId);
}
host = _hostDao.findById(host.getId());
if (host.getStatus() == Status.Alert || host.getStatus() == Status.Down) {
_haMgr.scheduleRestartForVmsOnHost(host);
}
attache.disconnect(nextState);
for (Pair<Integer, Listener> monitor : _hostMonitors) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Sending Disconnect to listener: " + monitor.second().getClass().getName());
}
monitor.second().processDisconnect(hostId, nextState);
}
return true;
}
protected AgentAttache notifyMonitorsOfConnection(AgentAttache attache, final StartupCommand[] cmd) {
long hostId = attache.getId();
HostVO host = _hostDao.findById(hostId);
for (Pair<Integer, Listener> monitor : _hostMonitors) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Sending Connect to listener: " + monitor.second().getClass().getSimpleName());
}
for (int i = 0; i < cmd.length; i++) {
if (!monitor.second().processConnect(host, cmd[i])) {
s_logger.info("Monitor " + monitor.second().getClass().getSimpleName() + " says not to continue the connect process for " + hostId);
handleDisconnect(attache, Event.AgentDisconnected, false);
return attache;
}
}
}
Long dcId = host.getDataCenterId();
ReadyCommand ready = new ReadyCommand(dcId);
Answer answer = easySend(hostId, ready);
if (answer == null) {
// this is tricky part for secondary storage
// make it as disconnected, wait for secondary storage VM to be up
// return the attache instead of null, even it is disconnectede
handleDisconnect(attache, Event.AgentDisconnected, false);
}
_hostDao.updateStatus(host, Event.Ready, _nodeId);
attache.ready();
return attache;
}
@Override
public boolean start() {
startDirectlyConnectedHosts();
if (_monitor != null) {
_monitor.start();
}
_connection.start();
return true;
}
public void startDirectlyConnectedHosts() {
List<HostVO> hosts = _hostDao.findDirectlyConnectedHosts();
for (HostVO host : hosts) {
loadDirectlyConnectedHost(host, null);
}
}
@SuppressWarnings("rawtypes")
protected void loadDirectlyConnectedHost(HostVO host, ActionDelegate<Long> actionDelegate) {
String resourceName = host.getResource();
ServerResource resource = null;
try {
Class<?> clazz = Class.forName(resourceName);
Constructor constructor = clazz.getConstructor();
resource = (ServerResource) constructor.newInstance();
} catch (ClassNotFoundException e) {
s_logger.warn("Unable to find class " + host.getResource(), e);
return;
} catch (InstantiationException e) {
s_logger.warn("Unablet to instantiate class " + host.getResource(), e);
return;
} catch (IllegalAccessException e) {
s_logger.warn("Illegal access " + host.getResource(), e);
return;
} catch (SecurityException e) {
s_logger.warn("Security error on " + host.getResource(), e);
return;
} catch (NoSuchMethodException e) {
s_logger.warn("NoSuchMethodException error on " + host.getResource(), e);
return;
} catch (IllegalArgumentException e) {
s_logger.warn("IllegalArgumentException error on " + host.getResource(), e);
return;
} catch (InvocationTargetException e) {
s_logger.warn("InvocationTargetException error on " + host.getResource(), e);
return;
}
_hostDao.loadDetails(host);
HashMap<String, Object> params = new HashMap<String, Object>(host.getDetails().size() + 5);
params.putAll(host.getDetails());
// private.network.device may change when reconnect
params.remove("private.network.device");
params.put("private.network.device", _privateNic);
params.remove("public.network.device");
params.put("public.network.device", _publicNic);
params.remove("guest.network.device");
params.put("guest.network.device", _guestNic);
params.put("guid", host.getGuid());
params.put("zone", Long.toString(host.getDataCenterId()));
if (host.getPodId() != null) {
params.put("pod", Long.toString(host.getPodId()));
}
if (host.getClusterId() != null) {
params.put("cluster", Long.toString(host.getClusterId()));
}
params.put("secondary.storage.vm", "false");
params.put("max.template.iso.size", _configDao.getValue("max.template.iso.size"));
try {
resource.configure(host.getName(), params);
} catch (ConfigurationException e) {
s_logger.warn("Unable to configure resource due to ", e);
return;
}
if (!resource.start()) {
s_logger.warn("Unable to start the resource");
return;
}
_executor.execute(new SimulateStartTask(host.getId(), resource, host.getDetails(), actionDelegate));
}
protected AgentAttache simulateStart(ServerResource resource, Map<String, String> details, boolean old) throws IllegalArgumentException{
StartupCommand[] cmds = resource.initialize();
if (cmds == null )
return null;
AgentAttache attache = null;
if (s_logger.isDebugEnabled()) {
s_logger.debug("Startup request from directly connected host: " + new Request(0l, -1l, -1l, cmds, true, false, true).toString());
}
try {
attache = handleDirectConnect(resource, cmds, details, old);
}catch (IllegalArgumentException ex)
{
s_logger.warn("Unable to connect due to ", ex);
throw ex;
}
catch (Exception e) {
s_logger.warn("Unable to connect due to ", e);
}
if (attache == null) {
resource.disconnected();
return null;
}
if( attache.isReady()) {
StartupAnswer[] answers = new StartupAnswer[cmds.length];
for (int i = 0; i < answers.length; i++) {
answers[i] = new StartupAnswer(cmds[i], attache.getId(), _pingInterval);
}
attache.process(answers);
}
return attache;
}
@Override
public boolean stop() {
if (_monitor != null) {
_monitor.signalStop();
}
if (_connection != null) {
_connection.stop();
}
s_logger.info("Disconnecting agents: " + _agents.size());
synchronized (_agents) {
for (final AgentAttache agent : _agents.values()) {
final HostVO host = _hostDao.findById(agent.getId());
if( host == null ) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Cant not find host " + agent.getId());
}
} else {
_hostDao.updateStatus(host, Event.ManagementServerDown, _nodeId);
}
}
}
return true;
}
@Override
public Pair<HostPodVO, Long> findPod(final VirtualMachineTemplate template, ServiceOfferingVO offering, final DataCenterVO dc, final long accountId, Set<Long> avoids) {
final Enumeration en = _podAllocators.enumeration();
while (en.hasMoreElements()) {
final PodAllocator allocator = (PodAllocator) en.nextElement();
final Pair<HostPodVO, Long> pod = allocator.allocateTo(template, offering, dc, accountId, avoids);
if (pod != null) {
return pod;
}
}
return null;
}
public Pod findPod(VirtualMachineProfile vm, DataCenter dc, Set<? extends Pod> avoids) {
for (PodAllocator allocator : _podAllocators) {
Pod pod = allocator.allocateTo(vm, dc, avoids);
if (pod != null) {
s_logger.debug("Pod " + pod.getId() + " is found by " + allocator.getName());
return pod;
}
}
s_logger.debug("Unable to find any pod for " + vm);
return null;
}
@Override
public HostStats getHostStatistics(long hostId) throws InternalErrorException
{
Answer answer = easySend(hostId, new GetHostStatsCommand(_hostDao.findById(hostId).getGuid(), _hostDao.findById(hostId).getName(),hostId));
if (answer != null && (answer instanceof UnsupportedAnswer)) {
return null;
}
if (answer == null || !answer.getResult()) {
String msg = "Unable to obtain host " + hostId + " statistics. ";
s_logger.warn(msg);
return null;
} else {
//now construct the result object
if(answer instanceof GetHostStatsAnswer)
{
return ((GetHostStatsAnswer) answer).getHostStats();
}
}
return null;
}
@Override
public Long getGuestOSCategoryId(long hostId) {
HostVO host = _hostDao.findById(hostId);
if (host == null) {
return null;
} else {
_hostDao.loadDetails(host);
DetailVO detail = _hostDetailsDao.findDetail(hostId, "guest.os.category.id");
if (detail == null) {
return null;
} else {
return Long.parseLong(detail.getValue());
}
}
}
@Override
public String getName() {
return _name;
}
protected class DisconnectTask implements Runnable {
AgentAttache _attache;
Status.Event _event;
boolean _investigate;
DisconnectTask(final AgentAttache attache, final Status.Event event, final boolean investigate) {
_attache = attache;
_event = event;
_investigate = investigate;
}
@Override
public void run() {
try {
handleDisconnect(_attache, _event, _investigate);
} catch (final Exception e) {
s_logger.error("Exception caught while handling disconnect: ", e);
} finally {
StackMaid.current().exitCleanup();
}
}
}
@Override
public Answer easySend(final Long hostId, final Command cmd) {
return easySend(hostId, cmd, _wait);
}
@Override
public Answer easySend(final Long hostId, final Command cmd, int timeout) {
try {
Host h = _hostDao.findById(hostId);
Status status = h.getStatus();
if( !status.equals(Status.Up) && !status.equals(Status.Connecting) ){
return null;
}
final Answer answer = send(hostId, cmd, timeout);
if (answer == null) {
s_logger.warn("send returns null answer");
return null;
}
if (!answer.getResult()) {
s_logger.warn("Unable to execute command: " + cmd.toString() + " due to " + answer.getDetails());
return null;
}
if (s_logger.isDebugEnabled() && answer.getDetails() != null) {
s_logger.debug("Details from executing " + cmd.getClass().toString() + ": " + answer.getDetails());
}
return answer;
} catch (final AgentUnavailableException e) {
s_logger.warn(e.getMessage());
return null;
} catch (final OperationTimedoutException e) {
s_logger.warn("Operation timed out: " + e.getMessage());
return null;
} catch (final Exception e) {
s_logger.warn("Exception while sending", e);
return null;
}
}
@Override
public Answer send(final Long hostId, final Command cmd) throws AgentUnavailableException, OperationTimedoutException {
return send(hostId, cmd, _wait);
}
@Override
public Answer[] send(final Long hostId, Commands cmds) throws AgentUnavailableException, OperationTimedoutException {
return send(hostId, cmds, _wait);
}
@Override
public HostVO reconnectHost(ReconnectHostCmd cmd) throws AgentUnavailableException {
Long hostId = cmd.getId();
HostVO host = _hostDao.findById(hostId);
if (host == null) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Host with id " + hostId.toString() + " doesn't exist");
}
boolean result = reconnect(hostId);
if (result) {
return host;
}
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Failed to reconnect host with id " + hostId.toString() + ", internal error.");
}
@Override
public boolean reconnect(final long hostId) throws AgentUnavailableException {
HostVO host;
host = _hostDao.findById(hostId);
if (host == null || host.getRemoved() != null) {
s_logger.warn("Unable to find host " + hostId);
return false;
}
if (host.getStatus() != Status.Up && host.getStatus() != Status.Alert) {
s_logger.info("Unable to disconnect host because it is not in the correct state: host=" + hostId + "; Status=" + host.getStatus());
return false;
}
AgentAttache attache = findAttache(hostId);
if (attache == null) {
s_logger.info("Unable to disconnect host because it is not connected to this server: " + hostId);
return false;
}
disconnect(attache, Event.ShutdownRequested, false);
return true;
}
@Override
public boolean cancelMaintenance(final long hostId) {
HostVO host;
host = _hostDao.findById(hostId);
if (host == null || host.getRemoved() != null) {
s_logger.warn("Unable to find host " + hostId);
return true;
}
if (host.getStatus() != Status.PrepareForMaintenance && host.getStatus() != Status.Maintenance && host.getStatus() != Status.ErrorInMaintenance) {
return true;
}
_haMgr.cancelScheduledMigrations(host);
List<VMInstanceVO> vms = _haMgr.findTakenMigrationWork();
for (VMInstanceVO vm : vms) {
if (vm.getHostId() != null && vm.getHostId() == hostId) {
s_logger.info("Unable to cancel migration because the vm is being migrated: " + vm.toString());
return false;
}
}
disconnect(hostId, Event.ResetRequested, false);
return true;
}
@Override
public HostVO cancelMaintenance(CancelMaintenanceCmd cmd) throws InvalidParameterValueException{
Long hostId = cmd.getId();
//verify input parameters
HostVO host = _hostDao.findById(hostId);
if (host == null || host.getRemoved() != null) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Host with id " + hostId.toString() + " doesn't exist");
}
boolean success = cancelMaintenance(hostId);
if (!success) {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Internal error cancelling maintenance.");
}
return host;
}
@Override
public boolean executeUserRequest(long hostId, Event event) throws AgentUnavailableException {
if (event == Event.MaintenanceRequested) {
return maintain(hostId);
} else if (event == Event.ResetRequested) {
return cancelMaintenance(hostId);
} else if (event == Event.Remove) {
return deleteHost(hostId);
} else if (event == Event.AgentDisconnected) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Received agent disconnect event for host " + hostId);
}
AgentAttache attache = null;
synchronized (_agents) {
attache = _agents.get(hostId);
}
if (attache != null) {
handleDisconnect(attache, Event.AgentDisconnected, false);
}
return true;
} else if (event == Event.ShutdownRequested) {
return reconnect(hostId);
}
return false;
}
@Override
public boolean maintain(final long hostId) throws AgentUnavailableException {
HostVO host = _hostDao.findById(hostId);
Status state;
Answer answer = easySend(hostId, new MaintainCommand());
if (answer == null || !answer.getResult()) {
s_logger.warn("Unable to put host in maintainance mode: " + hostId);
return false;
}
// Let's put this guy in maintenance state
do {
host = _hostDao.findById(hostId);
if (host == null) {
s_logger.debug("Unable to find host " + hostId);
return false;
}
state = host.getStatus();
if (state == Status.Disconnected || state == Status.Updating) {
s_logger.debug("Unable to put host " + hostId + " in matinenance mode because it is currently in " + state.toString());
throw new AgentUnavailableException("Agent is in " + state.toString() + " state. Please wait for it to become Alert state try again.", hostId);
}
} while (!_hostDao.updateStatus(host, Event.MaintenanceRequested, _nodeId));
AgentAttache attache;
synchronized (_agents) {
attache = _agents.get(hostId);
if (attache != null) {
attache.setMaintenanceMode(true);
}
}
if (attache != null) {
// Now cancel all of the commands except for the active one.
attache.cancelAllCommands(Status.PrepareForMaintenance, false);
}
final Host.Type type = host.getType();
if (type == Host.Type.Routing) {
final List<VMInstanceVO> vms = _vmDao.listByHostId(hostId);
if (vms.size() == 0) {
return true;
}
for (final VMInstanceVO vm : vms) {
_haMgr.scheduleMigration(vm);
}
} else {
final List<Long> ids = _volDao.findVmsStoredOnHost(hostId);
for (final Long id : ids) {
final VMInstanceVO instance = _vmDao.findById(id);
if (instance != null && (instance.getState() == State.Running || instance.getState() == State.Starting)) {
_haMgr.scheduleStop(instance, host.getId(), false);
}
}
}
return true;
}
@Override
public boolean maintain(PrepareForMaintenanceCmd cmd) throws InvalidParameterValueException {
Long hostId = cmd.getId();
HostVO host = _hostDao.findById(hostId);
if (host == null) {
s_logger.debug("Unable to find host " + hostId);
throw new InvalidParameterValueException("Unable to find host with ID: " + hostId + ". Please specify a valid host ID.");
}
if (_hostDao.countBy(host.getClusterId(), Status.PrepareForMaintenance, Status.ErrorInMaintenance, Status.Maintenance) > 0) {
throw new InvalidParameterValueException("There are other servers in maintenance mode.");
}
if (_storageMgr.isLocalStorageActiveOnHost(host)) {
throw new InvalidParameterValueException("There are active VMs using the host's local storage pool. Please stop all VMs on this host that use local storage.");
}
try{
return maintain(hostId);
}catch (AgentUnavailableException e) {
return false;
}
}
public boolean checkCIDR(Host.Type type, HostPodVO pod, String serverPrivateIP, String serverPrivateNetmask) {
if (serverPrivateIP == null) {
return true;
}
// Get the CIDR address and CIDR size
String cidrAddress = pod.getCidrAddress();
long cidrSize = pod.getCidrSize();
// If the server's private IP address is not in the same subnet as the
// pod's CIDR, return false
String cidrSubnet = NetUtils.getCidrSubNet(cidrAddress, cidrSize);
String serverSubnet = NetUtils.getSubNet(serverPrivateIP, serverPrivateNetmask);
if (!cidrSubnet.equals(serverSubnet)) {
return false;
}
// If the server's private netmask is less inclusive than the pod's CIDR
// netmask, return false
String cidrNetmask = NetUtils.getCidrSubNet("255.255.255.255", cidrSize);
long cidrNetmaskNumeric = NetUtils.ip2Long(cidrNetmask);
long serverNetmaskNumeric = NetUtils.ip2Long(serverPrivateNetmask);
if (serverNetmaskNumeric > cidrNetmaskNumeric) {
return false;
}
return true;
}
protected void checkCIDR(Host.Type type, HostPodVO pod, DataCenterVO dc, String serverPrivateIP, String serverPrivateNetmask) throws IllegalArgumentException {
// Skip this check for Storage Agents and Console Proxies
if (type == Host.Type.Storage || type == Host.Type.ConsoleProxy)
return;
if (serverPrivateIP == null) {
return;
}
// Get the CIDR address and CIDR size
String cidrAddress = pod.getCidrAddress();
long cidrSize = pod.getCidrSize();
// If the server's private IP address is not in the same subnet as the
// pod's CIDR, return false
String cidrSubnet = NetUtils.getCidrSubNet(cidrAddress, cidrSize);
String serverSubnet = NetUtils.getSubNet(serverPrivateIP, serverPrivateNetmask);
if (!cidrSubnet.equals(serverSubnet)) {
s_logger.warn("The private ip address of the server (" + serverPrivateIP + ") is not compatible with the CIDR of pod: "
+ pod.getName() + " and zone: " + dc.getName());
throw new IllegalArgumentException("The private ip address of the server (" + serverPrivateIP + ") is not compatible with the CIDR of pod: "
+ pod.getName() + " and zone: " + dc.getName());
}
// If the server's private netmask is less inclusive than the pod's CIDR
// netmask, return false
String cidrNetmask = NetUtils.getCidrSubNet("255.255.255.255", cidrSize);
long cidrNetmaskNumeric = NetUtils.ip2Long(cidrNetmask);
long serverNetmaskNumeric = NetUtils.ip2Long(serverPrivateNetmask);
if (serverNetmaskNumeric > cidrNetmaskNumeric) {
throw new IllegalArgumentException("The private ip address of the server (" + serverPrivateIP + ") is not compatible with the CIDR of pod: "
+ pod.getName() + " and zone: " + dc.getName());
}
}
public void checkIPConflicts(Host.Type type, HostPodVO pod, DataCenterVO dc, String serverPrivateIP, String serverPrivateNetmask, String serverPublicIP,
String serverPublicNetmask) {
// If the server's private IP is the same as is public IP, this host has
// a host-only private network. Don't check for conflicts with the
// private IP address table.
if (serverPrivateIP != serverPublicIP) {
if (!_privateIPAddressDao.mark(dc.getId(), pod.getId(), serverPrivateIP)) {
// If the server's private IP address is already in the
// database, return false
List<DataCenterIpAddressVO> existingPrivateIPs = _privateIPAddressDao.listByPodIdDcIdIpAddress(pod.getId(), dc.getId(), serverPrivateIP);
assert existingPrivateIPs.size() <= 1 : " How can we get more than one ip address with " + serverPrivateIP;
if (existingPrivateIPs.size() > 1) {
throw new IllegalArgumentException("The private ip address of the server (" + serverPrivateIP + ") is already in use in pod: "
+ pod.getName() + " and zone: " + dc.getName());
}
if (existingPrivateIPs.size() == 1) {
DataCenterIpAddressVO vo = existingPrivateIPs.get(0);
if (vo.getInstanceId() != null) {
throw new IllegalArgumentException("The private ip address of the server (" + serverPrivateIP + ") is already in use in pod: "
+ pod.getName() + " and zone: " + dc.getName());
}
}
}
}
if (serverPublicIP != null && !_publicIPAddressDao.mark(dc.getId(), serverPublicIP)) {
// If the server's public IP address is already in the database,
// return false
List<IPAddressVO> existingPublicIPs = _publicIPAddressDao.listByDcIdIpAddress(dc.getId(), serverPublicIP);
if (existingPublicIPs.size() > 0) {
throw new IllegalArgumentException("The public ip address of the server (" + serverPublicIP + ") is already in use in zone: " + dc.getName());
}
}
}
public HostVO createHost(final StartupCommand startup, ServerResource resource, Map<String, String> details, boolean directFirst) throws IllegalArgumentException {
Host.Type type = null;
if (startup instanceof StartupStorageCommand) {
StartupStorageCommand ssCmd = ((StartupStorageCommand) startup);
if (ssCmd.getResourceType() == Storage.StorageResourceType.SECONDARY_STORAGE) {
type = Host.Type.SecondaryStorage;
if (resource != null && resource instanceof DummySecondaryStorageResource){
resource = null;
}
} else {
type = Host.Type.Storage;
}
final Map<String, String> hostDetails = ssCmd.getHostDetails();
if (hostDetails != null) {
if (details != null) {
details.putAll(hostDetails);
} else {
details = hostDetails;
}
}
} else if (startup instanceof StartupRoutingCommand) {
StartupRoutingCommand ssCmd = ((StartupRoutingCommand) startup);
type = Host.Type.Routing;
final Map<String, String> hostDetails = ssCmd.getHostDetails();
if (hostDetails != null) {
if (details != null) {
details.putAll(hostDetails);
} else {
details = hostDetails;
}
}
} else if (startup instanceof StartupProxyCommand) {
type = Host.Type.ConsoleProxy;
} else if (startup instanceof StartupRoutingCommand) {
type = Host.Type.Routing;
} else {
assert false : "Did someone add a new Startup command?";
}
Long id = null;
HostVO server = _hostDao.findByGuid(startup.getGuid());
if (server == null) {
server = _hostDao.findByGuid(startup.getGuidWithoutResource());
}
if (server != null && server.getRemoved() == null) {
id = server.getId();
if (s_logger.isDebugEnabled()) {
s_logger.debug("Found the host " + id + " by guid: " + startup.getGuid());
}
if (directFirst) {
s_logger.debug("Old host reconnected as new");
return null;
}
} else {
server = new HostVO(startup.getGuid());
}
server.setDetails(details);
updateHost(server, startup, type, _nodeId);
if (resource != null) {
server.setResource(resource.getClass().getName());
}
if (id == null) {
if (startup instanceof StartupStorageCommand) {
server = _hostDao.persist(server);
id = server.getId();
} else if (startup instanceof StartupProxyCommand) {
server.setProxyPort(((StartupProxyCommand) startup).getProxyPort());
server = _hostDao.persist(server);
id = server.getId();
} else if (startup instanceof StartupRoutingCommand) {
server = _hostDao.persist(server);
id = server.getId();
}
s_logger.info("New " + server.getType().toString() + " host connected w/ guid " + startup.getGuid() + " and id is " + id);
} else {
if (!_hostDao.connect(server, _nodeId)) {
throw new CloudRuntimeException("Agent cannot connect because the current state is " + server.getStatus().toString());
}
s_logger.info("Old " + server.getType().toString() + " host reconnected w/ id =" + id);
}
createCapacityEntry(startup, server);
return server;
}
public HostVO createHost(final StartupCommand[] startup, ServerResource resource, Map<String, String> details, boolean directFirst) throws IllegalArgumentException {
StartupCommand firstCmd = startup[0];
HostVO result = createHost(firstCmd, resource, details, directFirst);
if( result == null ) {
return null;
}
return result;
}
public AgentAttache handleConnect(final Link link, final StartupCommand[] startup) throws IllegalArgumentException {
HostVO server = createHost(startup, null, null, false);
if ( server == null ) {
return null;
}
long id = server.getId();
AgentAttache attache = createAttache(id, server, link);
attache = notifyMonitorsOfConnection(attache, startup);
return attache;
}
public AgentAttache findAgent(long hostId) {
synchronized (_agents) {
return _agents.get(hostId);
}
}
protected AgentAttache createAttache(long id, HostVO server, Link link) {
s_logger.debug("Adding link for " + id);
final AgentAttache attache = new ConnectedAgentAttache(id, link, server.getStatus() == Status.Maintenance
|| server.getStatus() == Status.ErrorInMaintenance || server.getStatus() == Status.PrepareForMaintenance);
link.attach(attache);
AgentAttache old = null;
synchronized (_agents) {
old = _agents.get(id);
_agents.put(id, attache);
}
return attache;
}
protected AgentAttache createAttache(long id, HostVO server, ServerResource resource) {
s_logger.debug("Adding directly connect host for " + id);
if (resource instanceof DummySecondaryStorageResource || resource instanceof KvmDummyResourceBase) {
return new DummyAttache(id, false);
}
final DirectAgentAttache attache = new DirectAgentAttache(id, resource, server.getStatus() == Status.Maintenance
|| server.getStatus() == Status.ErrorInMaintenance || server.getStatus() == Status.PrepareForMaintenance, this);
AgentAttache old = null;
synchronized (_agents) {
old = _agents.get(id);
_agents.put(id, attache);
}
return attache;
}
@Override
public boolean maintenanceFailed(long hostId) {
HostVO host = _hostDao.findById(hostId);
if( host == null ) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Cant not find host " + hostId);
}
return false;
} else {
return _hostDao.updateStatus(host, Event.UnableToMigrate, _nodeId);
}
}
@Override
public HostVO updateHost(UpdateHostCmd cmd) throws InvalidParameterValueException{
Long hostId = cmd.getId();
Long guestOSCategoryId = cmd.getOsCategoryId();
if (guestOSCategoryId != null) {
// Verify that the host exists
HostVO host = _hostDao.findById(hostId);
if (host == null) {
throw new InvalidParameterValueException("Host with id " + hostId + " doesn't exist");
}
// Verify that the guest OS Category exists
if (guestOSCategoryId > 0) {
if (_guestOSCategoryDao.findById(guestOSCategoryId) == null) {
throw new InvalidParameterValueException("Please specify a valid guest OS category.");
}
}
GuestOSCategoryVO guestOSCategory = _guestOSCategoryDao.findById(guestOSCategoryId);
Map<String, String> hostDetails = _hostDetailsDao.findDetails(hostId);
if (guestOSCategory != null) {
// Save a new entry for guest.os.category.id
hostDetails.put("guest.os.category.id", String.valueOf(guestOSCategory.getId()));
} else {
// Delete any existing entry for guest.os.category.id
hostDetails.remove("guest.os.category.id");
}
_hostDetailsDao.persist(hostId, hostDetails);
}
HostVO updatedHost = _hostDao.findById(hostId);
return updatedHost;
}
protected void updateHost(final HostVO host, final StartupCommand startup, final Host.Type type, final long msId) throws IllegalArgumentException {
s_logger.debug("updateHost() called");
String dataCenter = startup.getDataCenter();
String pod = startup.getPod();
String cluster = startup.getCluster();
if (pod != null && dataCenter != null && pod.equalsIgnoreCase("default") && dataCenter.equalsIgnoreCase("default")) {
List<HostPodVO> pods = _podDao.listAllIncludingRemoved();
for (HostPodVO hpv : pods) {
if (checkCIDR(type, hpv, startup.getPrivateIpAddress(), startup.getPrivateNetmask())) {
pod = hpv.getName();
dataCenter = _dcDao.findById(hpv.getDataCenterId()).getName();
break;
}
}
}
long dcId = -1;
DataCenterVO dc = _dcDao.findByName(dataCenter);
if (dc == null) {
try {
dcId = Long.parseLong(dataCenter);
dc = _dcDao.findById(dcId);
} catch (final NumberFormatException e) {
}
}
if (dc == null) {
throw new IllegalArgumentException("Host " + startup.getPrivateIpAddress() + " sent incorrect data center: " + dataCenter);
}
dcId = dc.getId();
HostPodVO p = _podDao.findByName(pod, dcId);
if (p == null) {
try {
final long podId = Long.parseLong(pod);
p = _podDao.findById(podId);
} catch (final NumberFormatException e) {
}
}
Long podId = null;
if (p == null) {
if (type != Host.Type.SecondaryStorage) {
/*
* s_logger.info("Unable to find the pod so we are creating one."
* ); p = createPod(pod, dcId, startup.getPrivateIpAddress(),
* NetUtils.getCidrSize(startup.getPrivateNetmask())); podId =
* p.getId();
*/
s_logger.error("Host " + startup.getPrivateIpAddress() + " sent incorrect pod: " + pod + " in " + dataCenter);
throw new IllegalArgumentException("Host " + startup.getPrivateIpAddress() + " sent incorrect pod: " + pod + " in " + dataCenter);
}
} else {
podId = p.getId();
}
Long clusterId = null;
if (cluster != null) {
try {
clusterId = Long.valueOf(cluster);
} catch (NumberFormatException e) {
ClusterVO c = _clusterDao.findBy(cluster, podId);
if (c == null) {
c = new ClusterVO(dcId, podId, cluster);
c = _clusterDao.persist(c);
}
clusterId = c.getId();
}
}
if (type == Host.Type.Routing) {
StartupRoutingCommand scc = (StartupRoutingCommand) startup;
HypervisorType hypervisorType = scc.getHypervisorType();
boolean doCidrCheck = true;
// If this command is from the agent simulator, don't do the CIDR
// check
if (scc.getAgentTag() != null && startup.getAgentTag().equalsIgnoreCase("vmops-simulator"))
doCidrCheck = false;
// If this command is from a KVM agent, or from an agent that has a
// null hypervisor type, don't do the CIDR check
if (hypervisorType == null || hypervisorType == HypervisorType.KVM || hypervisorType == HypervisorType.VmWare)
doCidrCheck = false;
if (doCidrCheck)
s_logger.info("Host: " + host.getName() + " connected with hypervisor type: " + hypervisorType + ". Checking CIDR...");
else
s_logger.info("Host: " + host.getName() + " connected with hypervisor type: " + hypervisorType + ". Skipping CIDR check...");
if (doCidrCheck) {
checkCIDR(type, p, dc, scc.getPrivateIpAddress(), scc.getPrivateNetmask());
}
// Check if the private/public IPs of the server are already in the
// private/public IP address tables
checkIPConflicts(type, p, dc, scc.getPrivateIpAddress(), scc.getPublicIpAddress(), scc.getPublicIpAddress(), scc.getPublicNetmask());
}
host.setDataCenterId(dc.getId());
host.setPodId(podId);
host.setClusterId(clusterId);
host.setPrivateIpAddress(startup.getPrivateIpAddress());
host.setPrivateNetmask(startup.getPrivateNetmask());
host.setPrivateMacAddress(startup.getPrivateMacAddress());
host.setPublicIpAddress(startup.getPublicIpAddress());
host.setPublicMacAddress(startup.getPublicMacAddress());
host.setPublicNetmask(startup.getPublicNetmask());
host.setStorageIpAddress(startup.getStorageIpAddress());
host.setStorageMacAddress(startup.getStorageMacAddress());
host.setStorageNetmask(startup.getStorageNetmask());
host.setVersion(startup.getVersion());
host.setName(startup.getName());
host.setType(type);
host.setManagementServerId(msId);
host.setStorageUrl(startup.getIqn());
host.setLastPinged(System.currentTimeMillis() >> 10);
if (startup instanceof StartupRoutingCommand) {
final StartupRoutingCommand scc = (StartupRoutingCommand) startup;
host.setCaps(scc.getCapabilities());
host.setCpus(scc.getCpus());
host.setTotalMemory(scc.getMemory());
host.setSpeed(scc.getSpeed());
HypervisorType hyType = scc.getHypervisorType();
host.setHypervisorType(hyType);
} else if(startup instanceof StartupStorageCommand) {
final StartupStorageCommand ssc = (StartupStorageCommand) startup;
host.setParent(ssc.getParent());
host.setTotalSize(ssc.getTotalSize());
host.setHypervisorType(HypervisorType.None);
if (ssc.getNfsShare() != null) {
host.setStorageUrl(ssc.getNfsShare());
}
}
if (startup.getStorageIpAddressDeux() != null) {
host.setStorageIpAddressDeux(startup.getStorageIpAddressDeux());
host.setStorageMacAddressDeux(startup.getStorageMacAddressDeux());
host.setStorageNetmaskDeux(startup.getStorageNetmaskDeux());
}
}
@Override
public Host findHost(VirtualMachineProfile vm, Set<? extends Host> avoids) {
return null;
}
// create capacity entries if none exist for this server
private void createCapacityEntry(final StartupCommand startup, HostVO server) {
SearchCriteria<CapacityVO> capacitySC = _capacityDao.createSearchCriteria();
capacitySC.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, server.getId());
capacitySC.addAnd("dataCenterId", SearchCriteria.Op.EQ, server.getDataCenterId());
capacitySC.addAnd("podId", SearchCriteria.Op.EQ, server.getPodId());
List<CapacityVO> capacities = _capacityDao.search(capacitySC, null);
// remove old entries, we'll recalculate them anyway
if ((capacities != null) && !capacities.isEmpty()) {
for (CapacityVO capacity : capacities) {
_capacityDao.remove(capacity.getId());
}
}
if (startup instanceof StartupStorageCommand) {
StartupStorageCommand ssCmd = (StartupStorageCommand) startup;
if (ssCmd.getResourceType() == Storage.StorageResourceType.STORAGE_HOST) {
CapacityVO capacity = new CapacityVO(server.getId(), server.getDataCenterId(), server.getPodId(), 0L, server.getTotalSize(),
CapacityVO.CAPACITY_TYPE_STORAGE);
_capacityDao.persist(capacity);
capacity = new CapacityVO(server.getId(), server.getDataCenterId(), server.getPodId(), 0L, server.getTotalSize() * _overProvisioningFactor,
CapacityVO.CAPACITY_TYPE_STORAGE_ALLOCATED);
_capacityDao.persist(capacity);
}
} else if (startup instanceof StartupRoutingCommand) {
CapacityVO capacity = new CapacityVO(server.getId(), server.getDataCenterId(), server.getPodId(), 0L,
server.getTotalMemory(), CapacityVO.CAPACITY_TYPE_MEMORY);
_capacityDao.persist(capacity);
capacity = new CapacityVO(server.getId(), server.getDataCenterId(), server.getPodId(), 0L, (long)(server.getCpus().longValue()
* server.getSpeed().longValue()*_cpuOverProvisioningFactor), CapacityVO.CAPACITY_TYPE_CPU);
_capacityDao.persist(capacity);
}
}
// protected void upgradeAgent(final Link link, final byte[] request, final String reason) {
// if (reason == UnsupportedVersionException.IncompatibleVersion) {
// final UpgradeResponse response = new UpgradeResponse(request, _upgradeMgr.getAgentUrl());
// try {
// link.send(response.toBytes());
// } catch (final ClosedChannelException e) {
// s_logger.warn("Unable to send response due to connection closed: " + response.toString());
// return;
// assert (reason == UnsupportedVersionException.UnknownVersion) : "Unknown reason: " + reason;
// final UpgradeResponse response = new UpgradeResponse(request, _upgradeMgr.getAgentUrl());
// try {
// link.send(response.toBytes());
// } catch (final ClosedChannelException e) {
// s_logger.warn("Unable to send response due to connection closed: " + response.toString());
protected class SimulateStartTask implements Runnable {
ServerResource resource;
Map<String, String> details;
long id;
ActionDelegate<Long> actionDelegate;
public SimulateStartTask(long id, ServerResource resource, Map<String, String> details, ActionDelegate<Long> actionDelegate) {
this.id = id;
this.resource = resource;
this.details = details;
this.actionDelegate = actionDelegate;
}
@Override
public void run() {
try {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Simulating start for resource " + resource.getName() + " id " + id);
}
simulateStart(resource, details, false);
} catch (Exception e) {
s_logger.warn("Unable to simulate start on resource " + id + " name " + resource.getName(), e);
} finally {
if(actionDelegate != null)
actionDelegate.action(new Long(id));
StackMaid.current().exitCleanup();
}
}
}
public class AgentHandler extends Task {
public AgentHandler(Task.Type type, Link link, byte[] data) {
super(type, link, data);
}
protected void processRequest(final Link link, final Request request) {
AgentAttache attache = (AgentAttache) link.attachment();
final Command[] cmds = request.getCommands();
Command cmd = cmds[0];
boolean logD = true;
Response response = null;
if (attache == null) {
s_logger.debug("Processing sequence " + request.getSequence() + ": Processing " + request.toString());
if (!(cmd instanceof StartupCommand)) {
s_logger.warn("Throwing away a request because it came through as the first command on a connect: " + request.toString());
return;
}
StartupCommand startup = (StartupCommand) cmd;
// if ((_upgradeMgr.registerForUpgrade(-1, startup.getVersion()) == UpgradeManager.State.RequiresUpdate) && (_upgradeMgr.getAgentUrl() != null)) {
// final UpgradeCommand upgrade = new UpgradeCommand(_upgradeMgr.getAgentUrl());
// final Request req = new Request(1, -1, -1, new Command[] { upgrade }, true, true);
// s_logger.info("Agent requires upgrade: " + req.toString());
// try {
// link.send(req.toBytes());
// } catch (ClosedChannelException e) {
// s_logger.warn("Unable to tell agent it should update.");
// return;
try {
StartupCommand[] startups = new StartupCommand[cmds.length];
for (int i = 0; i < cmds.length; i++)
startups[i] = (StartupCommand) cmds[i];
attache = handleConnect(link, startups);
} catch (final IllegalArgumentException e) {
_alertMgr.sendAlert(AlertManager.ALERT_TYPE_HOST, 0, new Long(0), "Agent from " + startup.getPrivateIpAddress()
+ " is unable to connect due to " + e.getMessage(), "Agent from " + startup.getPrivateIpAddress() + " is unable to connect with "
+ request.toString() + " because of " + e.getMessage());
s_logger.warn("Unable to create attache for agent: " + request.toString(), e);
response = new Response(request, new StartupAnswer((StartupCommand) cmd, e.getMessage()), _nodeId, -1);
} catch (final CloudRuntimeException e) {
_alertMgr.sendAlert(AlertManager.ALERT_TYPE_HOST, 0, new Long(0), "Agent from " + startup.getPrivateIpAddress()
+ " is unable to connect due to " + e.getMessage(), "Agent from " + startup.getPrivateIpAddress() + " is unable to connect with "
+ request.toString() + " because of " + e.getMessage());
s_logger.warn("Unable to create attache for agent: " + request.toString(), e);
}
if (attache == null) {
if (response == null) {
s_logger.warn("Unable to create attache for agent: " + request.toString());
response = new Response(request, new StartupAnswer((StartupCommand) request.getCommand(), "Unable to register this agent"), _nodeId, -1);
}
try {
link.send(response.toBytes(), true);
} catch (final ClosedChannelException e) {
s_logger.warn("Response was not sent: " + response.toString());
}
return;
}
}
final long hostId = attache.getId();
if (s_logger.isDebugEnabled()) {
if (cmd instanceof PingRoutingCommand) {
final PingRoutingCommand ping = (PingRoutingCommand) cmd;
if (ping.getNewStates().size() > 0) {
s_logger.debug("SeqA " + hostId + "-" + request.getSequence() + ": Processing " + request.toString());
} else {
logD = false;
s_logger.debug("Ping from " + hostId);
s_logger.trace("SeqA " + hostId + "-" + request.getSequence() + ": Processing " + request.toString());
}
} else if (cmd instanceof PingCommand) {
logD = false;
s_logger.debug("Ping from " + hostId);
s_logger.trace("SeqA " + attache.getId() + "-" + request.getSequence() + ": Processing " + request.toString());
} else {
s_logger.debug("SeqA " + attache.getId() + "-" + request.getSequence() + ": Processing " + request.toString());
}
}
final Answer[] answers = new Answer[cmds.length];
for (int i = 0; i < cmds.length; i++) {
cmd = cmds[i];
Answer answer = null;
try {
if (cmd instanceof StartupRoutingCommand) {
final StartupRoutingCommand startup = (StartupRoutingCommand) cmd;
answer = new StartupAnswer(startup, attache.getId(), getPingInterval());
} else if (cmd instanceof StartupProxyCommand) {
final StartupProxyCommand startup = (StartupProxyCommand) cmd;
answer = new StartupAnswer(startup, attache.getId(), getPingInterval());
} else if (cmd instanceof StartupStorageCommand) {
final StartupStorageCommand startup = (StartupStorageCommand) cmd;
answer = new StartupAnswer(startup, attache.getId(), getPingInterval());
} else if (cmd instanceof ShutdownCommand) {
final ShutdownCommand shutdown = (ShutdownCommand) cmd;
final String reason = shutdown.getReason();
s_logger.info("Host " + attache.getId() + " has informed us that it is shutting down with reason " + reason + " and detail "
+ shutdown.getDetail());
if (reason.equals(ShutdownCommand.Update)) {
disconnect(attache, Event.UpdateNeeded, false);
} else if (reason.equals(ShutdownCommand.Requested)) {
disconnect(attache, Event.ShutdownRequested, false);
}
return;
} else if(cmd instanceof AgentControlCommand) {
answer = handleControlCommand(attache, (AgentControlCommand)cmd);
} else {
handleCommands(attache, request.getSequence(), new Command[] { cmd });
if (cmd instanceof PingCommand) {
long cmdHostId = ((PingCommand) cmd).getHostId();
// if the router is sending a ping, verify the
// gateway was pingable
if (cmd instanceof PingRoutingCommand) {
boolean gatewayAccessible = ((PingRoutingCommand) cmd).isGatewayAccessible();
HostVO host = _hostDao.findById(Long.valueOf(cmdHostId));
if (!gatewayAccessible) {
// alert that host lost connection to
// gateway (cannot ping the default route)
DataCenterVO dcVO = _dcDao.findById(host.getDataCenterId());
HostPodVO podVO = _podDao.findById(host.getPodId());
String hostDesc = "name: " + host.getName() + " (id:" + host.getId() + "), availability zone: " + dcVO.getName()
+ ", pod: " + podVO.getName();
_alertMgr.sendAlert(AlertManager.ALERT_TYPE_ROUTING, host.getDataCenterId(), host.getPodId(),
"Host lost connection to gateway, " + hostDesc, "Host [" + hostDesc
+ "] lost connection to gateway (default route) and is possibly having network connection issues.");
} else {
_alertMgr.clearAlert(AlertManager.ALERT_TYPE_ROUTING, host.getDataCenterId(), host.getPodId());
}
}
answer = new PingAnswer((PingCommand) cmd);
} else if (cmd instanceof ReadyAnswer) {
HostVO host = _hostDao.findById(attache.getId());
if( host == null ) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Cant not find host " + attache.getId());
}
} else {
s_logger.info("Host " + attache.getId() + " is now ready to processing commands.");
_hostDao.updateStatus(host, Event.Ready, _nodeId);
}
} else {
answer = new Answer(cmd);
}
}
} catch (final Throwable th) {
s_logger.warn("Caught: ", th);
answer = new Answer(cmd, false, th.getMessage());
}
answers[i] = answer;
}
response = new Response(request, answers, _nodeId, attache.getId());
if (s_logger.isDebugEnabled()) {
if (logD) {
s_logger.debug("SeqA " + attache.getId() + "-" + response.getSequence() + ": Sending " + response.toString());
} else {
s_logger.trace("SeqA " + attache.getId() + "-" + response.getSequence() + ": Sending " + response.toString());
}
}
try {
link.send(response.toBytes());
} catch (final ClosedChannelException e) {
s_logger.warn("Unable to send response because connection is closed: " + response.toString());
}
}
protected void processResponse(final Link link, final Response response) {
final AgentAttache attache = (AgentAttache) link.attachment();
if (attache == null) {
s_logger.warn("Unable to process: " + response.toString());
}
if (!attache.processAnswers(response.getSequence(), response)) {
s_logger.info("Host " + attache.getId() + " - Seq " + response.getSequence() + ": Response is not processed: " + response.toString());
}
}
@Override
protected void doTask(final Task task) throws Exception {
Transaction txn = Transaction.open(Transaction.CLOUD_DB);
try {
final Type type = task.getType();
if (type == Task.Type.DATA) {
final byte[] data = task.getData();
try {
final Request event = Request.parse(data);
if (event instanceof Response) {
processResponse(task.getLink(), (Response) event);
} else {
processRequest(task.getLink(), event);
}
} catch (final UnsupportedVersionException e) {
s_logger.warn(e.getMessage());
//upgradeAgent(task.getLink(), data, e.getReason());
}
} else if (type == Task.Type.CONNECT) {
} else if (type == Task.Type.DISCONNECT) {
final Link link = task.getLink();
final AgentAttache attache = (AgentAttache) link.attachment();
if (attache != null) {
disconnect(attache, Event.AgentDisconnected, true);
} else {
s_logger.info("Connection from " + link.getIpAddress() + " closed but no cleanup was done.");
link.close();
link.terminated();
}
}
} finally {
StackMaid.current().exitCleanup();
txn.close();
}
}
}
protected AgentManagerImpl() {
}
}
|
package io.spine.server;
import com.google.protobuf.Message;
import io.grpc.stub.StreamObserver;
import io.spine.annotation.Experimental;
import io.spine.annotation.Internal;
import io.spine.core.Ack;
import io.spine.core.BoundedContextName;
import io.spine.core.BoundedContextNames;
import io.spine.core.Event;
import io.spine.option.EntityOption.Visibility;
import io.spine.server.commandbus.CommandBus;
import io.spine.server.commandstore.CommandStore;
import io.spine.server.entity.Entity;
import io.spine.server.entity.Repository;
import io.spine.server.entity.VisibilityGuard;
import io.spine.server.event.EventBus;
import io.spine.server.event.EventFactory;
import io.spine.server.integration.IntegrationBus;
import io.spine.server.integration.IntegrationEvent;
import io.spine.server.integration.grpc.IntegrationEventSubscriberGrpc;
import io.spine.server.rejection.RejectionBus;
import io.spine.server.stand.Stand;
import io.spine.server.storage.StorageFactory;
import io.spine.server.tenant.TenantIndex;
import io.spine.system.server.SystemGateway;
import io.spine.type.TypeName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Optional;
import java.util.Set;
import java.util.function.Supplier;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Suppliers.memoize;
import static io.spine.util.Exceptions.newIllegalStateException;
public abstract class BoundedContext
extends IntegrationEventSubscriberGrpc.IntegrationEventSubscriberImplBase
implements AutoCloseable {
/**
* The name of the bounded context, which is used to distinguish the context in an application
* with several bounded contexts.
*/
private final BoundedContextName name;
/** If {@code true} the bounded context serves many tenants. */
private final boolean multitenant;
private final CommandBus commandBus;
private final EventBus eventBus;
private final IntegrationBus integrationBus;
private final Stand stand;
/** Controls access to entities of all repositories registered with this bounded context. */
private final VisibilityGuard guard = VisibilityGuard.newInstance();
/** Memoized version of the {@code StorageFactory} supplier passed to the constructor. */
private final Supplier<StorageFactory> storageFactory;
private final TenantIndex tenantIndex;
BoundedContext(BoundedContextBuilder builder) {
super();
this.name = builder.getName();
this.multitenant = builder.isMultitenant();
this.storageFactory = memoize(() -> builder.buildStorageFactorySupplier()
.get());
this.commandBus = builder.buildCommandBus();
this.eventBus = builder.buildEventBus();
this.stand = builder.buildStand();
this.tenantIndex = builder.buildTenantIndex();
this.integrationBus = buildIntegrationBus(builder, eventBus, commandBus, name);
}
/**
* Creates a new instance of {@link IntegrationBus} with the given parameters.
*
* @param builder the {@link BoundedContextBuilder} to obtain
* the {@link IntegrationBus.Builder} from
* @param eventBus the initialized {@link EventBus}
* @param commandBus the initialized {@link CommandBus} to obtain the {@link RejectionBus} from
* @param name the name of the constructed bounded context
* @return new instance of {@link IntegrationBus}
*/
private static IntegrationBus buildIntegrationBus(BoundedContextBuilder builder,
EventBus eventBus,
CommandBus commandBus,
BoundedContextName name) {
Optional<IntegrationBus.Builder> busBuilder = builder.getIntegrationBus();
checkState(busBuilder.isPresent());
IntegrationBus result =
busBuilder.get()
.setBoundedContextName(name)
.setEventBus(eventBus)
.setRejectionBus(commandBus.rejectionBus())
.build();
return result;
}
/**
* Creates a new builder for {@code BoundedContext}.
*
* @return new builder instance
*/
public static BoundedContextBuilder newBuilder() {
return new BoundedContextBuilder();
}
/**
* Registers the passed repository with the {@code BoundedContext}.
*
* <p>If the repository does not have a storage assigned, it will be initialized
* using the {@code StorageFactory} associated with this bounded context.
*
* <p>Checks whether there is a default state for entity type.
*
* @param repository the repository to register
* @param <I> the type of IDs used in the repository
* @param <E> the type of entities or aggregates
* @see Repository#initStorage(StorageFactory)
*/
public <I, E extends Entity<I, ?>> void register(Repository<I, E> repository) {
checkNotNull(repository);
repository.setBoundedContext(this);
guard.register(repository);
repository.onRegistered();
}
/**
* Sends an integration event to this {@code BoundedContext}.
*/
@Experimental
@Override
public void notify(IntegrationEvent integrationEvent, StreamObserver<Ack> observer) {
Event event = EventFactory.toEvent(integrationEvent);
eventBus.post(event, observer);
}
/**
* Obtains a set of entity type names by their visibility.
*/
public Set<TypeName> getEntityTypes(Visibility visibility) {
Set<TypeName> result = guard.getEntityTypes(visibility);
return result;
}
/**
* Finds a repository by the state class of entities.
*/
@Internal
public Optional<Repository> findRepository(Class<? extends Message> entityStateClass) {
// See if there is a repository for this state at all.
if (!guard.hasRepository(entityStateClass)) {
throw newIllegalStateException("No repository found for the the entity state class %s",
entityStateClass.getName());
}
Optional<Repository> repository = guard.getRepository(entityStateClass);
return repository;
}
/** Obtains instance of {@link CommandBus} of this {@code BoundedContext}. */
public CommandBus getCommandBus() {
return this.commandBus;
}
/** Obtains instance of {@link EventBus} of this {@code BoundedContext}. */
public EventBus getEventBus() {
return this.eventBus;
}
/** Obtains instance of {@link RejectionBus} of this {@code BoundedContext}. */
public RejectionBus getRejectionBus() {
return this.commandBus.rejectionBus();
}
/** Obtains instance of {@link IntegrationBus} of this {@code BoundedContext}. */
public IntegrationBus getIntegrationBus() {
return this.integrationBus;
}
/** Obtains instance of {@link Stand} of this {@code BoundedContext}. */
public Stand getStand() {
return stand;
}
/**
* Obtains an ID of the bounded context.
*
* <p>The ID allows to identify a bounded context if a multi-context application.
* If the ID was not defined, during the building process, the context would get
* {@link BoundedContextNames#assumingTests()} name.
*
* @return the ID of this {@code BoundedContext}
*/
public BoundedContextName getName() {
return name;
}
/**
* Obtains {@link StorageFactory} associated with this {@code BoundedContext}.
*/
public StorageFactory getStorageFactory() {
return storageFactory.get();
}
/**
* @return {@code true} if the bounded context serves many organizations
*/
public boolean isMultitenant() {
return multitenant;
}
/**
* Obtains a tenant index of this Bounded Context.
*
* <p>If the Bounded Context is single-tenant returns
* {@linkplain io.spine.server.tenant.TenantIndex.Factory#singleTenant() null-object}
* implementation.
*/
@Internal
public TenantIndex getTenantIndex() {
return tenantIndex;
}
/** Obtains instance of {@link SystemGateway} of this {@code BoundedContext}. */
@Internal
public abstract SystemGateway getSystemGateway();
/**
* Closes the {@code BoundedContext} performing all necessary clean-ups.
*
* <p>This method performs the following:
* <ol>
* <li>Closes associated {@link StorageFactory}.
* <li>Closes {@link CommandBus}.
* <li>Closes {@link EventBus}.
* <li>Closes {@link IntegrationBus}.
* <li>Closes {@link CommandStore}.
* <li>Closes {@link io.spine.server.event.EventStore EventStore}.
* <li>Closes {@link Stand}.
* <li>Shuts down all registered repositories. Each registered repository is:
* <ul>
* <li>un-registered from {@link CommandBus}
* <li>un-registered from {@link EventBus}
* <li>detached from its storage
* </ul>
* </ol>
*
* @throws Exception caused by closing one of the components
*/
@Override
public void close() throws Exception {
storageFactory.get()
.close();
commandBus.close();
eventBus.close();
integrationBus.close();
stand.close();
shutDownRepositories();
log().info(closed(nameForLogging()));
}
String nameForLogging() {
return BoundedContext.class.getSimpleName() + ' ' + getName().getValue();
}
/**
* Returns the passed name with added suffix {@code " closed."}.
*/
private static String closed(String name) {
return name + " closed.";
}
/**
* Closes all repositories and clears {@link TenantIndex}.
*/
private void shutDownRepositories() {
guard.shutDownRepositories();
if (tenantIndex != null) {
tenantIndex.close();
}
}
private enum LogSingleton {
INSTANCE;
@SuppressWarnings("NonSerializableFieldInSerializableClass")
private final Logger value = LoggerFactory.getLogger(BoundedContext.class);
}
private static Logger log() {
return LogSingleton.INSTANCE.value;
}
}
|
package org.pdxfinder.services.ds;
import org.pdxfinder.dao.ModelCreation;
import org.pdxfinder.dao.OntologyTerm;
import org.pdxfinder.dao.Specimen;
import org.pdxfinder.repositories.ModelCreationRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.util.Assert;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@Component
public class SearchDS {
private final static Logger log = LoggerFactory.getLogger(SearchDS.class);
private Set<ModelForQuery> models;
private Map<String, String> cancerSystemMap = new HashMap<>();
public static List<String> PATIENT_AGE_OPTIONS = Arrays.asList(
"0-9",
"10-19",
"20-29",
"30-39",
"40-49",
"50-59",
"60-69",
"70-79",
"80-89",
"90+",
"NA"
);
public static List<String> DATASOURCE_OPTIONS = Arrays.asList(
"JAX",
"IRCC",
"PDMR",
"PDXNet-HCI-BCM",
"PDXNet-MDAnderson",
"PDXNet-WUSTL",
"PDXNet-Wistar-MDAnderson-Penn"
);
public static List<String> PATIENT_GENDERS = new ArrayList<>();
public static List<String> CANCERS_BY_SYSTEM = Arrays.asList(
"Breast cancer",
"Cardiovascular Cancer",
"Connective and Soft Tissue cancer",
"Digestive System Cancer",
"Endocrine Cancer",
"Eye Cancer",
"Head and Neck Cancer",
"Hematopoietic and Lymphoid System Cancer",
"Nervous System Cancer",
"Peritoneal and Retroperitoneal Cancer",
"Malignant Reproductive System Cancer",
"Respiratory Tract Cancer",
"Thoracic cancer",
"Skin cancer",
"Urinary System Cancer",
"Unclassified"
);
public static List<String> SAMPLE_TUMOR_TYPE_OPTIONS = Arrays.asList(
"Primary",
"Metastatic",
"Recurrent or Relapse",
"Refractory",
"Not Specified"
);
/**
* Populate the complete set of models for searching when this object is instantiated
*/
public SearchDS(ModelCreationRepository modelCreationRepository) {
Assert.notNull(modelCreationRepository, "Model repository cannot be null");
this.models = new HashSet<>();
// Mapping NCIT ontology term labels to display labels
this.cancerSystemMap.put("Malignant Breast Neoplasm", "Breast cancer");
this.cancerSystemMap.put("Malignant Cardiovascular Neoplasm", "Cardiovascular Cancer");
this.cancerSystemMap.put("Connective and Soft Tissue Neoplasm", "Connective and Soft Tissue cancer");
this.cancerSystemMap.put("Malignant Digestive System Neoplasm", "Digestive System Cancer");
this.cancerSystemMap.put("Malignant Endocrine Neoplasm", "Endocrine Cancer");
this.cancerSystemMap.put("Malignant Eye Neoplasm", "Eye Cancer");
this.cancerSystemMap.put("Malignant Head and Neck Neoplasm", "Head and Neck Cancer");
this.cancerSystemMap.put("Hematopoietic and Lymphoid System Neoplasm", "Hematopoietic and Lymphoid System Cancer");
this.cancerSystemMap.put("Malignant Nervous System Neoplasm", "Nervous System Cancer");
this.cancerSystemMap.put("Peritoneal and Retroperitoneal Neoplasms", "Peritoneal and Retroperitoneal Cancer");
this.cancerSystemMap.put("Malignant Reproductive System Neoplasm", "Malignant Reproductive System Cancer");
this.cancerSystemMap.put("Malignant Respiratory Tract Neoplasm", "Respiratory Tract Cancer");
this.cancerSystemMap.put("Thoracic Disorder", "Thoracic cancer");
this.cancerSystemMap.put("Malignant Skin Neoplasm", "Skin cancer");
this.cancerSystemMap.put("Malignant Urinary System Neoplasm", "Urinary System Cancer");
this.cancerSystemMap.put("Unclassified", "Unclassified");
// When this class is instantiated, populate and cache the models set
for (ModelCreation mc : modelCreationRepository.getModelsWithPatientData()) {
ModelForQuery mfq = new ModelForQuery();
mfq.setModelId(mc.getId());
mfq.setExternalId(mc.getSourcePdxId());
mfq.setDatasource(mc.getDataSource());
if (mc.getSample().getPatientSnapshot().getTreatmentNaive() != null) {
mfq.setTreatmentHistory(mc.getSample().getPatientSnapshot().getTreatmentNaive().toString());
} else {
mfq.setTreatmentHistory("Not Specified");
}
if (mc.getSample().getSampleSite() != null) {
mfq.setSampleSampleSite(mc.getSample().getSampleSite().getName());
} else {
mfq.setSampleSampleSite("Not Specified");
}
if (mc.getSample().getType() != null) {
mfq.setSampleTumorType(mc.getSample().getType().getName());
} else {
mfq.setSampleTumorType("Not Specified");
}
if (mc.getSample().getSampleSite() != null) {
mfq.setSampleSampleSite(mc.getSample().getSampleSite().getName());
} else {
mfq.setSampleSampleSite("Not Specified");
}
// Patient information
mfq.setPatientAge(mc.getSample().getPatientSnapshot().getAgeBin());
mfq.setPatientGender(mc.getSample().getPatientSnapshot().getPatient().getSex());
mfq.setDiagnosis(mc.getSample().getDiagnosis());
mfq.setMappedOntologyTerm(mc.getSample().getSampleToOntologyRelationShip().getOntologyTerm().getLabel());
if (mc.getSample().getPatientSnapshot().getTreatmentNaive() != null) {
mfq.setPatientTreatmentStatus(mc.getSample().getPatientSnapshot().getTreatmentNaive().toString());
}
// Sample information
mfq.setSampleExtractionMethod(mc.getSample().getExtractionMethod());
mfq.setSampleOriginTissue(mc.getSample().getOriginTissue().getName());
mfq.setSampleClassification(mc.getSample().getClassification());
if (mc.getSample().getType() != null) {
mfq.setSampleTumorType(mc.getSample().getType().getName());
}
// Model information
Set<Specimen> specimens = mc.getSpecimens();
if (specimens != null && specimens.size() > 0) {
Specimen s = specimens.iterator().next();
mfq.setModelBackgroundStrain(s.getBackgroundStrain().getSymbol());
mfq.setModelImplantationSite(s.getImplantationSite().getName());
mfq.setModelImplantationType(s.getImplantationType().getName());
}
// Get all ancestor ontology terms into a set specific for this model
Set<OntologyTerm> allOntologyTerms = new HashSet<>();
for (OntologyTerm t : mc.getSample().getSampleToOntologyRelationShip().getOntologyTerm().getSubclassOf()) {
allOntologyTerms.addAll(getAllAncestors(t));
}
// Add all top level systems (translated) to the Model
for (String s : allOntologyTerms.stream().map(OntologyTerm::getLabel).collect(Collectors.toSet())) {
if (this.cancerSystemMap.keySet().contains(s)) {
if (mfq.getCancerSystem() == null) {
mfq.setCancerSystem(new ArrayList<>());
}
mfq.getCancerSystem().add(this.cancerSystemMap.get(s));
}
}
// Ensure that ALL models have a system -- even if it's not in the ontology nodes specified
if (mfq.getCancerSystem() == null || mfq.getCancerSystem().size() == 0) {
if (mfq.getCancerSystem() == null) {
mfq.setCancerSystem(new ArrayList<>());
}
mfq.getCancerSystem().add(this.cancerSystemMap.get("Unclassified"));
}
// TODO: Complete the organ options
// TODO: Complete the cell type options
// TODO: Complete the patient treatment options
models.add(mfq);
}
// try (FileOutputStream fout = new FileOutputStream("/models.ser"); ObjectOutputStream oos = new ObjectOutputStream(fout)) {
// oos.writeObject(models);
// } catch (IOException e) {
// log.warn("Cannot serialize models to file, startup times will be slow", e);
PATIENT_GENDERS = models.stream().map(ModelForQuery::getPatientGender).distinct().collect(Collectors.toList());
}
/**
* Recursively get all ancestors starting from the supplied ontology term
*
* @param t the starting term in the ontology
* @return a set of ontology terms corresponding to the ancestors of the term supplied
*/
public Set<OntologyTerm> getAllAncestors(OntologyTerm t) {
Set<OntologyTerm> retSet = new HashSet<>();
if (t.getSubclassOf() == null || t.getSubclassOf().size() == 0) {
return null;
}
for (OntologyTerm st : t.getSubclassOf()) {
retSet.add(st);
Set<OntologyTerm> intSet = getAllAncestors(st);
if (intSet != null) retSet.addAll(intSet);
}
return retSet;
}
public Set<ModelForQuery> getModels() {
return models;
}
public void setModels(Set<ModelForQuery> models) {
this.models = models;
}
/**
* Search function accespts a Map of key value pairs
* key = what facet to search
* list of values = what values to filter on (using OR)
* <p>
* EX of expected data structure:
* <p>
* patient_age -> { 5-10, 20-40 },
* patient_gender -> { Male },
* sample_origin_tissue -> { Lung, Liver }
* <p>
* would yield results for male patients between 5-10 OR between 20-40 AND that had cancers in the lung OR liver
*
* @param filters
* @return set of models derived from filtering the complete set according to the
* filters passed in as arguments
*/
public Set<ModelForQuery> search(Map<SearchFacetName, List<String>> filters) {
Set<ModelForQuery> result = new HashSet<>(models);
// If no filters have been specified, return the complete set
if (filters == null) {
return result;
}
for (SearchFacetName facet : filters.keySet()) {
List<Predicate<ModelForQuery>> preds = new ArrayList<>();
Predicate predicate;
switch (facet) {
case datasource:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.datasource));
result = result.stream().filter(x -> predicate.test(x.getDatasource())).collect(Collectors.toSet());
break;
case patient_age:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.patient_age));
result = result.stream().filter(x -> predicate.test(x.getPatientAge())).collect(Collectors.toSet());
break;
case patient_treatment_status:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.patient_treatment_status));
result = result.stream().filter(x -> predicate.test(x.getPatientTreatmentStatus())).collect(Collectors.toSet());
break;
case patient_gender:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.patient_gender));
result = result.stream().filter(x -> predicate.test(x.getPatientGender())).collect(Collectors.toSet());
break;
case sample_origin_tissue:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.sample_origin_tissue));
result = result.stream().filter(x -> predicate.test(x.getSampleOriginTissue())).collect(Collectors.toSet());
break;
case sample_classification:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.sample_classification));
result = result.stream().filter(x -> predicate.test(x.getSampleClassification())).collect(Collectors.toSet());
break;
case sample_tumor_type:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.sample_tumor_type));
result = result.stream().filter(x -> predicate.test(x.getSampleTumorType())).collect(Collectors.toSet());
break;
case model_implantation_site:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.model_implantation_site));
result = result.stream().filter(x -> predicate.test(x.getModelImplantationSite())).collect(Collectors.toSet());
break;
case model_implantation_type:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.model_implantation_type));
result = result.stream().filter(x -> predicate.test(x.getModelImplantationType())).collect(Collectors.toSet());
break;
case model_background_strain:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.model_background_strain));
result = result.stream().filter(x -> predicate.test(x.getModelBackgroundStrain())).collect(Collectors.toSet());
break;
case system:
Set<ModelForQuery> toRemove = new HashSet<>();
for (ModelForQuery res : result) {
Boolean keep = Boolean.FALSE;
for (String s : filters.get(SearchFacetName.system)) {
if (res.getCancerSystem().contains(s)) {
keep = Boolean.TRUE;
}
}
if (!keep) {
toRemove.add(res);
}
}
result.removeAll(toRemove);
break;
case organ:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.organ));
result = result.stream().filter(x -> predicate.test(x.getCancerOrgan())).collect(Collectors.toSet());
break;
case cell_type:
predicate = getExactMatchDisjunctionPredicate(filters.get(SearchFacetName.cell_type));
result = result.stream().filter(x -> predicate.test(x.getCancerCellType())).collect(Collectors.toSet());
break;
default:
// default case is an unexpected filter option
// Do not filter anything
log.info("Unrecognised facet {} passed to search, skipping.", facet);
break;
}
}
return result;
}
/**
* getExactMatchDisjunctionPredicate returns a composed predicate with all the supplied filters "OR"ed together
* using an exact match
* <p>
* NOTE: This is a case sensitive match!
*
* @param filters the set of strings to match against
* @return a composed predicate case insensitive matching the supplied filters using disjunction (OR)
*/
Predicate<String> getExactMatchDisjunctionPredicate(List<String> filters) {
List<Predicate<String>> preds = new ArrayList<>();
// Iterate through the filter options passed in for this facet
for (String filter : filters) {
// Create a filter predicate for each option
Predicate<String> pred = s -> s.equals(filter);
// Store all filter options in a list
preds.add(pred);
}
// Create a "combination" predicate containing sub-predicates "OR"ed together
return preds.stream().reduce(Predicate::or).orElse(x -> false);
}
/**
* getContainsMatchDisjunctionPredicate returns a composed predicate with all the supplied filters "OR"ed together
* using a contains match
* <p>
* NOTE: This is a case insensitive match!
*
* @param filters the set of strings to match against
* @return a composed predicate case insensitive matching the supplied filters using disjunction (OR)
*/
Predicate getContainsMatchDisjunctionPredicate(List<String> filters) {
List<Predicate<String>> preds = new ArrayList<>();
// Iterate through the filter options passed in for this facet
for (String filter : filters) {
// Create a filter predicate for each option
Predicate<String> pred = s -> s.toLowerCase().contains(filter.toLowerCase());
// Store all filter options in a list
preds.add(pred);
}
// Create a "combination" predicate containing sub-predicates "OR"ed together
return preds.stream().reduce(Predicate::or).orElse(x -> false);
}
}
|
package io.spine.type;
import com.google.common.base.Predicate;
import com.google.protobuf.Descriptors.FileDescriptor;
import com.google.protobuf.Message;
import javax.annotation.Nullable;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* A common interface for command messages.
*
* <p>This interface is used by the Model Compiler for marking command messages.
* By convention, command messages are defined in a proto file, which name ends
* with {@code commands.proto}.
*
* @author Alexander Yevsyukov
*/
@SuppressWarnings("InterfaceNeverImplemented") /* See Javadoc */
public interface CommandMessage extends Message {
/**
* Constants and utilities for working with proto command files.
*/
class File {
/** Prevents instantiation of this utility class. */
private File() {
}
/**
* The name suffix for proto files containing command message declarations.
*/
public static final String SUFFIX = "commands.proto";
//TODO:2018-02-12:alexander.yevsyukov: Replace usages of this predicate with cast to
// `CommandMessage` after code generation is updated.
/**
* Returns {@code true} if the passed file defines command messages,
* {@code false} otherwise.
*/
public static final Predicate<FileDescriptor> PREDICATE = new Predicate<FileDescriptor>() {
@Override
public boolean apply(@Nullable FileDescriptor file) {
checkNotNull(file);
final String fqn = file.getName();
final boolean result = fqn.endsWith(SUFFIX);
return result;
}
};
}
}
|
package org.pdxfinder.services.ds;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.neo4j.ogm.json.JSONArray;
import org.neo4j.ogm.json.JSONException;
import org.neo4j.ogm.json.JSONObject;
import org.pdxfinder.graph.dao.DataProjection;
import org.pdxfinder.graph.repositories.DataProjectionRepository;
import org.pdxfinder.services.search.WebFacetSection;
import org.pdxfinder.services.search.WebFacetContainer;
import org.pdxfinder.services.search.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Component;
import org.springframework.util.Assert;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static java.lang.Long.parseLong;
@Component
public class SearchDS {
private final static Logger log = LoggerFactory.getLogger(SearchDS.class);
private DataProjectionRepository dataProjectionRepository;
/**
* The DS is initialized if all filters and search objects are initialized
*/
private boolean INITIALIZED = false;
/**
* A set of MFQ objects. These objects are being returned after performing a search.
*/
private Set<ModelForQuery> models;
/**
* This container has the definition of the structure and the content of the filters as well as has info on what filter is selected
*/
private WebFacetContainer webFacetContainer;
//facet key => option
private Map<String, List<FacetOption>> facetOptionMap;
// SEARCH OBJECTS:
/**
* A general one param search object that is being used when search is performed on a MFQ object field
*/
private OneParamCheckboxSearch oneParamCheckboxSearch;
/**
* Three param search object for performing a search on gene mutations
*/
private ThreeParamLinkedSearch geneMutationSearch;
/**
* Two param search object for performing a search on dosing studies
*/
private TwoParamUnlinkedSearch dosingStudySearch;
/**
* Two param linked search to perform search on breastCancerBioMarkers
*/
private TwoParamLinkedSearch breastCancerMarkersSearch;
public SearchDS(DataProjectionRepository dataProjectionRepository) {
Assert.notNull(dataProjectionRepository, "Data projection repository cannot be null");
this.dataProjectionRepository = dataProjectionRepository;
this.models = new HashSet<>();
}
/**
* Initializes the searchDS, creates filter structure and links search objects to them
*/
public void init(){
//INITIALIZE MODEL FOR QUERY OBJECTS FIRST
initializeModels();
//now we can use MFQ objects to get additional values for filters
webFacetContainer = new WebFacetContainer();
facetOptionMap = new HashMap<>();
WebFacetSection patientTumorSection = new WebFacetSection();
patientTumorSection.setName("PATIENT / TUMOR");
WebFacetSection pdxModelSection = new WebFacetSection();
pdxModelSection.setName("PDX MODEL");
WebFacetSection molecularDataSection = new WebFacetSection();
molecularDataSection.setName("MOLECULAR DATA");
WebFacetSection treatmentInfoSection = new WebFacetSection();
treatmentInfoSection.setName("TREATMENT INFORMATION");
//cancer by system filter def
List<FacetOption> cancerBySystemOptions = new ArrayList<>();
cancerBySystemOptions.add(new FacetOption("Breast Cancer", "Breast_Cancer"));
cancerBySystemOptions.add(new FacetOption("Cardiovascular Cancer", "Cardiovascular_Cancer"));
cancerBySystemOptions.add(new FacetOption("Connective and Soft Tissue Cancer", "Connective_and_Soft_Tissue_Cancer"));
cancerBySystemOptions.add(new FacetOption("Digestive System Cancer", "Digestive_System_Cancer"));
cancerBySystemOptions.add(new FacetOption("Endocrine Cancer", "Endocrine_Cancer"));
cancerBySystemOptions.add(new FacetOption("Eye Cancer", "Eye_Cancer"));
cancerBySystemOptions.add(new FacetOption("Head and Neck Cancer", "Head_and_Neck_Cancer"));
cancerBySystemOptions.add(new FacetOption("Hematopoietic and Lymphoid System Cancer", "Hematopoietic_and_Lymphoid_System_Cancer"));
cancerBySystemOptions.add(new FacetOption("Nervous System Cancer", "Nervous_System_Cancer"));
cancerBySystemOptions.add(new FacetOption("Peritoneal and Retroperitoneal Cancer", "Peritoneal_and_Retroperitoneal_Cancer"));
cancerBySystemOptions.add(new FacetOption("Reproductive System Cancer", "Reproductive_System_Cancer"));
cancerBySystemOptions.add(new FacetOption("Respiratory Tract Cancer", "Respiratory_Tract_Cancer"));
cancerBySystemOptions.add(new FacetOption("Thoracic Cancer", "Thoracic_Cancer"));
cancerBySystemOptions.add(new FacetOption("Skin Cancer", "Skin_Cancer"));
cancerBySystemOptions.add(new FacetOption("Urinary System Cancer", "Urinary_System_Cancer"));
cancerBySystemOptions.add(new FacetOption("Unclassified", "Unclassified"));
OneParamCheckboxFilter cancerBySystem = new OneParamCheckboxFilter("CANCER BY SYSTEM", "cancer_system", false, FilterType.OneParamFilter.get(),
cancerBySystemOptions, new ArrayList<>());
patientTumorSection.addComponent(cancerBySystem);
facetOptionMap.put("cancer_system", cancerBySystemOptions);
//tumor type filter def
List<FacetOption> tumorTypeOptions = new ArrayList<>();
tumorTypeOptions.add(new FacetOption("Primary", "Primary"));
tumorTypeOptions.add(new FacetOption("Metastatic", "Metastatic"));
tumorTypeOptions.add(new FacetOption("Recurrent", "Recurrent"));
tumorTypeOptions.add(new FacetOption("Refractory", "Refractory"));
tumorTypeOptions.add(new FacetOption("Not Specified", "Not_Specified"));
OneParamCheckboxFilter tumorType = new OneParamCheckboxFilter("TUMOR_TYPE", "sample_tumor_type", false, FilterType.OneParamFilter.get(),
tumorTypeOptions, new ArrayList<>());
patientTumorSection.addComponent(tumorType);
facetOptionMap.put("sample_tumor_type", tumorTypeOptions);
//sex filter def
List<FacetOption> patientSexOptions = new ArrayList<>();
patientSexOptions.add(new FacetOption("Male", "Male"));
patientSexOptions.add(new FacetOption("Female", "Female"));
patientSexOptions.add(new FacetOption("Not Specified", "Not_Specified"));
OneParamCheckboxFilter sex = new OneParamCheckboxFilter("SEX", "patient_gender", false, FilterType.OneParamFilter.get(),
patientSexOptions, new ArrayList<>());
patientTumorSection.addComponent(sex);
facetOptionMap.put("patient_gender", patientSexOptions);
//age filter def
List<FacetOption> ageOptions = new ArrayList<>();
ageOptions.add(new FacetOption("0-9", "0-9"));
ageOptions.add(new FacetOption("10-19", "10-19"));
ageOptions.add(new FacetOption("20-29", "20-29"));
ageOptions.add(new FacetOption("30-39", "30-39"));
ageOptions.add(new FacetOption("40-49", "40-49"));
ageOptions.add(new FacetOption("50-59", "50-59"));
ageOptions.add(new FacetOption("60-69", "60-69"));
ageOptions.add(new FacetOption("70-79", "70-79"));
ageOptions.add(new FacetOption("80-89", "80-89"));
ageOptions.add(new FacetOption("90", "90"));
ageOptions.add(new FacetOption("Not Specified", "Not_Specified"));
OneParamCheckboxFilter age = new OneParamCheckboxFilter("AGE", "patient_age", false, FilterType.OneParamFilter.get(),
ageOptions, new ArrayList<>());
patientTumorSection.addComponent(age);
facetOptionMap.put("patient_age",ageOptions);
//treatment status filter
List<FacetOption> patientTreatmentStatusOptions = new ArrayList<>();
patientTreatmentStatusOptions.add(new FacetOption("Treatment Naive", "treatment_naive"));
patientTreatmentStatusOptions.add(new FacetOption("Not Treatment Naive", "not_treatment_naive"));
patientTreatmentStatusOptions.add(new FacetOption("Not Specified", "Not_Specified"));
OneParamCheckboxFilter patientTreatmentStatus = new OneParamCheckboxFilter("TREATMENT STATUS", "patient_treatment_status", false,
FilterType.OneParamFilter.get(), patientTreatmentStatusOptions, new ArrayList<>());
patientTumorSection.addComponent(patientTreatmentStatus);
facetOptionMap.put("patient_treatment_status", patientTreatmentStatusOptions);
//datasource filter def
Set<String> datasourceSet = models.stream()
.map(ModelForQuery::getDatasource)
.collect(Collectors.toSet());
List<String> datasourceList = new ArrayList<>();
datasourceList.addAll(datasourceSet);
Collections.sort(datasourceList);
List<FacetOption> datasourceOptions = new ArrayList<>();
for(String ds : datasourceList){
datasourceOptions.add(new FacetOption(ds, ds));
}
//dataset available filter def
List<FacetOption> datasetAvailableOptions = new ArrayList<>();
datasetAvailableOptions.add(new FacetOption("Gene Mutation", "Gene_Mutation"));
datasetAvailableOptions.add(new FacetOption("Cytogenetics", "Cytogenetics"));
datasetAvailableOptions.add(new FacetOption("Dosing Studies", "Dosing_Studies"));
datasetAvailableOptions.add(new FacetOption("Patient Treatment", "Patient_Treatment"));
OneParamCheckboxFilter datasetAvailable = new OneParamCheckboxFilter("DATASET AVAILABLE", "data_available", false, FilterType.OneParamFilter.get(),
datasetAvailableOptions, new ArrayList<>());
pdxModelSection.addComponent(datasetAvailable);
facetOptionMap.put("data_available", datasetAvailableOptions);
OneParamCheckboxFilter datasource = new OneParamCheckboxFilter("DATASOURCE", "datasource", false, FilterType.OneParamFilter.get(), datasourceOptions, new ArrayList<>());
pdxModelSection.addComponent(datasource);
facetOptionMap.put("datasource", datasourceOptions);
//project filter def
Set<String> projectsSet = new HashSet<>();
for(ModelForQuery mfk : models){
if(mfk.getProjects() != null){
for(String s: mfk.getProjects()){
projectsSet.add(s);
}
}
}
List<String> projectList = new ArrayList<>(projectsSet);
Collections.sort(projectList);
//TODO: skip filter if no projects were defined?
List<FacetOption> projectOptions = new ArrayList<>();
for(String p: projectList){
projectOptions.add(new FacetOption(p, p));
}
OneParamCheckboxFilter projects = new OneParamCheckboxFilter("PROJECT", "project", false, FilterType.OneParamFilter.get(), projectOptions, new ArrayList<>());
pdxModelSection.addComponent(projects);
facetOptionMap.put("project", projectOptions);
//gene mutation filter def
//TODO: look up platforms, genes and variants
TwoParamLinkedFilter geneMutation = new TwoParamLinkedFilter("GENE MUTATION", "mutation", false, FilterType.TwoParamLinkedFilter.get(),
"GENE", "VARIANT",getMutationOptions(), getMutationAndVariantOptions(), new HashMap<>());
molecularDataSection.addComponent(geneMutation);
//Breast cancer markers
//labelIDs should be alphabetically ordered(ER, HER, PR) as per dataprojection requirement
List<FacetOption> breastCancerMarkerOptions = new ArrayList<>();
//DP> ERBB2(HER2)--ESR1(ER)--PGR(PR)
//breastCancerMarkerOptions.add(new FacetOption("HER2- ER+ PR+", "ERBB2neg_ESR1neg_PGRpos"));
//breastCancerMarkerOptions.add(new FacetOption("HER2- ER- PR-", "ERBB2neg_ESR1neg_PGRneg"));
//breastCancerMarkerOptions.add(new FacetOption("HER2- ER+ PR-", "ERBB2neg_ESR1pos_PGRneg"));
//breastCancerMarkerOptions.add(new FacetOption("HER2+ ER+ PR+", "ERBB2pos_ESR1pos_PGRpos"));
//breastCancerMarkerOptions.add(new FacetOption("HER2+ ER- PR-", "ERBB2pos_ESR1neg_PGRneg"));
//breastCancerMarkerOptions.add(new FacetOption("HER2+ ER- PR+", "ERneg_HER2pos_PRpos"));
//breastCancerMarkerOptions.add(new FacetOption("HER2+ ER+ PR-", "ERpos_HER2pos_PRneg"));
//breastCancerMarkerOptions.add(new FacetOption("HER2- ER- PR+", "ERneg_HER2neg_PRpos"));
breastCancerMarkerOptions.add(new FacetOption("HER2/ERBB2 negative", "ERBB2neg"));
breastCancerMarkerOptions.add(new FacetOption("HER2/ERBB2 positive", "ERBB2pos"));
breastCancerMarkerOptions.add(new FacetOption("ER/ESR1 negative", "ESR1neg"));
breastCancerMarkerOptions.add(new FacetOption("ER/ESR1 positive", "ESR1pos"));
breastCancerMarkerOptions.add(new FacetOption("PR/PGR negative", "PGRneg"));
breastCancerMarkerOptions.add(new FacetOption("PR/PGR positive", "PGRpos"));
OneParamCheckboxFilter breastCancerMarkers = new OneParamCheckboxFilter("BREAST CANCER BIOMARKERS", "breast_cancer_markers", false, FilterType.OneParamFilter.get(),
breastCancerMarkerOptions, new ArrayList<>());
molecularDataSection.addComponent(breastCancerMarkers);
facetOptionMap.put("breast_cancer_markers",breastCancerMarkerOptions);
//model dosing study def
Map<String, Map<String, Set<Long>>> modelDrugResponses = getModelDrugResponsesFromDP();
List<String> drugNames = new ArrayList<>(modelDrugResponses.keySet());
TwoParamUnlinkedFilter modelDosingStudy = new TwoParamUnlinkedFilter("MODEL DOSING STUDY", "drug", false, FilterType.TwoParamUnlinkedFilter.get(), "DRUG", "RESPONSE", drugNames, Arrays.asList(
"Complete Response",
"Partial Response",
"Progressive Disease",
"Stable Disease",
"Stable Disease And Complete Response"
), new HashMap<>());
treatmentInfoSection.addComponent(modelDosingStudy);
webFacetContainer.addSection(pdxModelSection);
webFacetContainer.addSection(molecularDataSection);
webFacetContainer.addSection(treatmentInfoSection);
webFacetContainer.addSection(patientTumorSection);
//one general search object for searching on MFQ object fields
oneParamCheckboxSearch = new OneParamCheckboxSearch(null, null);
//drug search
dosingStudySearch = new TwoParamUnlinkedSearch();
dosingStudySearch.setData(getModelDrugResponsesFromDP());
//gene mutation search
//the gene mutation is a ThreeParamFilter component, but a FourParamLinkedSearch must be used because of the hidden platform labelId
geneMutationSearch = new ThreeParamLinkedSearch("geneMutation", "mutation");
geneMutationSearch.setData(getMutationsFromDP());
//breast cancer markers search initialization
breastCancerMarkersSearch = new TwoParamLinkedSearch("breastCancerMarkers", "breast_cancer_markers");
breastCancerMarkersSearch.setData(getBreastCancerMarkersFromDP());
INITIALIZED = true;
}
public WebFacetContainer getUpdatedSelectedFilters(Map<SearchFacetName, List<String>> filters){
//use a clone to avoid keeping filters from previous iterations
WebFacetContainer webFacetContainerClone = new WebFacetContainer();
List<WebFacetSection> sections = new ArrayList<>(webFacetContainer.getWebFacetSections());
webFacetContainerClone.setWebFacetSections(sections);
//reset all previously selected fields and make the component inactive
for(WebFacetSection wfs :webFacetContainerClone.getWebFacetSections()){
for(GeneralFilter filter: wfs.getFilterComponents()){
filter.setActive(false);
if(filter instanceof OneParamCheckboxFilter){
OneParamCheckboxFilter f = (OneParamCheckboxFilter)filter;
f.setSelected(new ArrayList<>());
}
else if(filter instanceof TwoParamUnlinkedFilter){
TwoParamUnlinkedFilter f = (TwoParamUnlinkedFilter) filter;
f.setSelected(new HashMap<>());
}
else if(filter instanceof TwoParamLinkedFilter){
TwoParamLinkedFilter f = (TwoParamLinkedFilter) filter;
f.setSelected(new HashMap<>());
}
}
}
//loop through the selected filters, make them active and initialize their selected list/map
for(Map.Entry<SearchFacetName, List<String>> facet: filters.entrySet()){
String facetName = facet.getKey().getName();
List<String> selected = facet.getValue();
List<String> decodedSelected = new ArrayList<>();
//if there is an overwrite rule for the filter, replace the selected values with the replacement
if(facetOptionMap.get(facetName) != null){
for(FacetOption fo: facetOptionMap.get(facetName)){
if(selected.contains(fo.getLabelId()))
decodedSelected.add(fo.getLabelId());
}
}
//no overwrite rule
else{
decodedSelected = selected;
}
for(WebFacetSection wfs :webFacetContainerClone.getWebFacetSections()){
for(GeneralFilter filter: wfs.getFilterComponents()){
if(filter.getUrlParam().equals(facetName)){
filter.setActive(true);
if(filter instanceof OneParamCheckboxFilter){
OneParamCheckboxFilter f = (OneParamCheckboxFilter)filter;
f.setSelected(decodedSelected);
}
else if(filter instanceof TwoParamUnlinkedFilter){
TwoParamUnlinkedFilter f = (TwoParamUnlinkedFilter) filter;
Map<String,List<String>> selectedMap = new HashMap<>();
for(String opt:decodedSelected){
String[] optArr = opt.split("___");
if(selectedMap.containsKey(optArr[0])){
selectedMap.get(optArr[0]).add(optArr[1]);
}
else{
List<String> arrList = new ArrayList<>();
arrList.add(optArr[1]);
selectedMap.put(optArr[0], arrList);
}
}
f.setSelected(selectedMap);
}
else if(filter instanceof TwoParamLinkedFilter){
TwoParamLinkedFilter f = (TwoParamLinkedFilter) filter;
Map<String,List<String>> selectedMap = new HashMap<>();
for(String opt:decodedSelected){
String[] optArr = opt.split("___");
if(selectedMap.containsKey(optArr[0])){
selectedMap.get(optArr[0]).add(optArr[1]);
}
else{
List<String> arrList = new ArrayList<>();
arrList.add(optArr[1]);
selectedMap.put(optArr[0], arrList);
}
}
f.setSelected(selectedMap);
}
}
}
}
}
return webFacetContainerClone;
}
public Set<ModelForQuery> search(Map<SearchFacetName, List<String>> filters){
synchronized (this){
if(! INITIALIZED ) {
init();
}
}
Set<ModelForQuery> result = new HashSet<>(models);
//empty previously set variants
result.forEach(x -> x.setMutatedVariants(new ArrayList<>()));
//empty previously set drugs
result.forEach(x -> x.setDrugWithResponse(new ArrayList<>()));
//reset breast cancer markers
result.forEach(x ->x.setBreastCancerMarkers(new ArrayList<>()));
// If no filters have been specified, return the complete set
if (filters == null) {
return result;
}
OneParamCheckboxSearch oneParamCheckboxSearch = new OneParamCheckboxSearch("search","search");
for (SearchFacetName facet : filters.keySet()) {
log.info("Models:"+result.size()+" before applying filter: "+facet.getName());
switch(facet){
case query:
//List<String> searchParams, Set<ModelForQuery> mfqSet, Function<ModelForQuery, List<String>> searchFunc
result = oneParamCheckboxSearch.searchOnCollection(facetOptionMap.get("query"), filters.get(SearchFacetName.query), result, ModelForQuery::getAllOntologyTermAncestors);
break;
case datasource:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("datasource"), filters.get(SearchFacetName.datasource), result, ModelForQuery::getDatasource);
break;
case diagnosis:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("diagnosis"), filters.get(SearchFacetName.diagnosis), result, ModelForQuery::getMappedOntologyTerm);
break;
case patient_age:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("patient_age"), filters.get(SearchFacetName.patient_age), result, ModelForQuery::getPatientAge);
break;
case patient_treatment_status:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("patient_treatment_status"), filters.get(SearchFacetName.patient_treatment_status), result, ModelForQuery::getPatientTreatmentStatus);
break;
case patient_gender:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("patient_gender"), filters.get(SearchFacetName.patient_gender), result, ModelForQuery::getPatientGender);
break;
case sample_origin_tissue:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("sample_origin_tissue"), filters.get(SearchFacetName.sample_origin_tissue), result, ModelForQuery::getSampleOriginTissue);
break;
case sample_classification:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("sample_classification"), filters.get(SearchFacetName.sample_classification), result, ModelForQuery::getSampleClassification);
break;
case sample_tumor_type:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("sample_tumor_type"), filters.get(SearchFacetName.sample_tumor_type), result, ModelForQuery::getSampleTumorType);
break;
case model_implantation_site:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("model_implantation_site"), filters.get(SearchFacetName.model_implantation_site), result, ModelForQuery::getModelImplantationSite);
break;
case model_implantation_type:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("model_implantation_type"), filters.get(SearchFacetName.model_implantation_type), result, ModelForQuery::getModelImplantationSite);
break;
case model_host_strain:
result = oneParamCheckboxSearch.searchOnCollection(facetOptionMap.get("model_host_strain"), filters.get(SearchFacetName.model_host_strain), result, ModelForQuery::getModelHostStrain);
break;
case cancer_system:
result = oneParamCheckboxSearch.searchOnCollection(facetOptionMap.get("cancer_system"), filters.get(SearchFacetName.cancer_system), result, ModelForQuery::getCancerSystem);
break;
case organ:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("organ"), filters.get(SearchFacetName.organ), result, ModelForQuery::getCancerOrgan);
break;
case cell_type:
result = oneParamCheckboxSearch.searchOnString(facetOptionMap.get("cell_type"), filters.get(SearchFacetName.cell_type), result, ModelForQuery::getCancerCellType);
break;
case project:
result = oneParamCheckboxSearch.searchOnCollection(facetOptionMap.get("project"), filters.get(SearchFacetName.project), result, ModelForQuery::getProjects);
break;
case data_available:
result = oneParamCheckboxSearch.searchOnCollection(facetOptionMap.get("data_available"), filters.get(SearchFacetName.data_available), result, ModelForQuery::getDataAvailable);
break;
case mutation:
result = geneMutationSearch.search(filters.get(SearchFacetName.mutation), result, ModelForQuery::addMutatedVariant);
break;
case drug:
result = dosingStudySearch.search(filters.get(SearchFacetName.drug), result, ModelForQuery::addDrugWithResponse);
break;
case breast_cancer_markers:
result = breastCancerMarkersSearch.search(filters.get(SearchFacetName.breast_cancer_markers), result, ModelForQuery::addBreastCancerMarkers, ComparisonOperator.AND);
break;
default:
//undexpected filter option
log.warn("Unrecognised facet {} passed to search, skipping", facet.getName());
break;
}
log.info("After applying filter: "+result.size());
}
return result;
}
public Set<ModelForQuery> getModels() {
synchronized (this){
if(! INITIALIZED ) {
init();
}
}
return models;
}
public void setModels(Set<ModelForQuery> models) {
this.models = models;
}
/**
* This method loads the ModelForQuery Data Projection object and initializes the models
*/
private void initializeModels() {
String modelJson = dataProjectionRepository.findByLabel("ModelForQuery").getValue();
try {
JSONArray jarray = new JSONArray(modelJson);
for (int i = 0; i < jarray.length(); i++) {
JSONObject j = jarray.getJSONObject(i);
ModelForQuery mfq = new ModelForQuery();
mfq.setModelId(parseLong(j.getString("modelId")));
mfq.setDatasource(j.getString("datasource"));
mfq.setExternalId(j.getString("externalId"));
if(j.has("patientAge")){
mfq.setPatientAge(j.getString("patientAge"));
}
else{
mfq.setPatientAge("Not Specified");
}
mfq.setPatientGender(j.getString("patientGender"));
if(j.has("patientEthnicity")){
mfq.setPatientEthnicity(j.getString("patientEthnicity"));
}
mfq.setSampleOriginTissue(j.getString("sampleOriginTissue"));
mfq.setSampleSampleSite(j.getString("sampleSampleSite"));
mfq.setSampleExtractionMethod(j.getString("sampleExtractionMethod"));
//mfq.setSampleClassification(j.getString("sampleClassification"));
mfq.setSampleTumorType(j.getString("sampleTumorType"));
mfq.setDiagnosis(j.getString("diagnosis"));
mfq.setMappedOntologyTerm(j.getString("mappedOntologyTerm"));
if(j.has("patientTreatmentStatus")){
mfq.setPatientTreatmentStatus(j.getString("patientTreatmentStatus"));
}
JSONArray ja = j.getJSONArray("cancerSystem");
List<String> cancerSystem = new ArrayList<>();
for (int k = 0; k < ja.length(); k++) {
cancerSystem.add(ja.getString(k));
}
mfq.setCancerSystem(cancerSystem);
ja = j.getJSONArray("allOntologyTermAncestors");
Set<String> ancestors = new HashSet<>();
for (int k = 0; k < ja.length(); k++) {
ancestors.add(ja.getString(k));
}
mfq.setAllOntologyTermAncestors(ancestors);
if(j.has("dataAvailable")){
ja = j.getJSONArray("dataAvailable");
List<String> dataAvailable = new ArrayList<>();
for(int k=0; k<ja.length(); k++){
dataAvailable.add(ja.getString(k));
}
mfq.setDataAvailable(dataAvailable);
}
if(j.has("projects")){
ja = j.getJSONArray("projects");
for(int k = 0; k < ja.length(); k++){
mfq.addProject(ja.getString(k));
}
}
if(j.has("accessModalities")){
mfq.setAccessModalities(j.getString("accessModalities"));
}
else{
mfq.setAccessModalities("");
}
this.models.add(mfq);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
private Map<String, Map<String, Map<String, Set<Long>>>> getMutationsFromDP(){
log.info("Initializing mutations");
//platform=> marker=> variant=>{set of model ids}
Map<String, Map<String, Map<String, Set<Long>>>> mutations = new HashMap<>();
String mut = dataProjectionRepository.findByLabel("PlatformMarkerVariantModel").getValue();
try{
ObjectMapper mapper = new ObjectMapper();
mutations = mapper.readValue(mut, new TypeReference<Map<String, Map<String, Map<String, Set<Long>>>>>(){});
//log.info("Lookup: "+mutations.get("TargetedNGS_MUT").get("RB1").get("N123D").toString());
}
catch(Exception e){
e.printStackTrace();
}
return mutations;
}
private Map<String, List<String>> getMutationAndVariantOptions(){
Map<String,Set<String>> tempResults = getMutationOptionsFromDP();
Map<String, List<String>> resultMap = new HashMap<>();
for(Map.Entry<String, Set<String>> entry : tempResults.entrySet()){
resultMap.put(entry.getKey(), new ArrayList<>(new TreeSet<>(entry.getValue())));
}
return resultMap;
}
private List<String> getMutationOptions(){
Map<String,Set<String>> tempResults = getMutationOptionsFromDP();
List<String> resultList = new ArrayList<>();
for(Map.Entry<String, Set<String>> entry : tempResults.entrySet()){
resultList.add(entry.getKey());
}
return resultList;
}
private Map<String, Set<String>> getMutationOptionsFromDP(){
Map<String, Map<String, Map<String, Set<Long>>>> mutations = getMutationsFromDP();
Map<String,Set<String>> tempResults = new HashMap<>();
for(Map.Entry<String, Map<String, Map<String, Set<Long>>>> platform:mutations.entrySet()){
for(Map.Entry<String, Map<String, Set<Long>>> marker:platform.getValue().entrySet()){
for(Map.Entry<String, Set<Long>> variant:marker.getValue().entrySet()){
String m = marker.getKey();
String v = variant.getKey();
if(tempResults.containsKey(m)){
tempResults.get(m).add(v);
}
else{
Set<String> set = new HashSet<>();
set.add(v);
tempResults.put(m, set);
}
}
}
}
return tempResults;
}
private Map<String, Map<String, Set<Long>>> getModelDrugResponsesFromDP(){
log.info("Initializing model drug responses");
Map<String, Map<String, Set<Long>>> modelDrugResponses = new HashMap<>();
DataProjection dataProjection = dataProjectionRepository.findByLabel("ModelDrugData");
String responses = "{}";
if(dataProjection != null){
responses = dataProjection.getValue();
}
try{
ObjectMapper mapper = new ObjectMapper();
modelDrugResponses = mapper.readValue(responses, new TypeReference<Map<String, Map<String, Set<Long>>>>(){});
//log.info("Lookup: "+modelDrugResponses.get("doxorubicincyclophosphamide").get("progressive disease").toString());
}
catch(Exception e){
e.printStackTrace();
}
return modelDrugResponses;
}
private Map<String, Map<String, Set<Long>>> getBreastCancerMarkersFromDP(){
log.info("Initializing breast cancer markers ");
Map<String, Map<String, Set<Long>>> data = new HashMap<>();
DataProjection dataProjection = dataProjectionRepository.findByLabel("cytogenetics");
String responses = "{}";
if(dataProjection != null){
responses = dataProjection.getValue();
}
try{
ObjectMapper mapper = new ObjectMapper();
data = mapper.readValue(responses, new TypeReference<Map<String, Map<String, Set<Long>>>>(){});
}
catch(Exception e){
e.printStackTrace();
}
return data;
}
/**
* getExactMatchDisjunctionPredicate returns a composed predicate with all the supplied filters "OR"ed together
* using an exact match
* <p>
* NOTE: This is a case sensitive match!
*
* @param filters the set of strings to match against
* @return a composed predicate case insensitive matching the supplied filters using disjunction (OR)
*/
Predicate<String> getExactMatchDisjunctionPredicate(List<String> filters) {
List<Predicate<String>> preds = new ArrayList<>();
// Iterate through the filter options passed in for this facet
for (String filter : filters) {
// Create a filter predicate for each option
Predicate<String> pred = s -> s.equals(filter);
// Store all filter options in a list
preds.add(pred);
}
// Create a "combination" predicate containing sub-predicates "OR"ed together
return preds.stream().reduce(Predicate::or).orElse(x -> false);
}
/**
* getContainsMatchDisjunctionPredicate returns a composed predicate with all the supplied filters "OR"ed together
* using a contains match
* <p>
* NOTE: This is a case insensitive match!
*
* @param filters the set of strings to match against
* @return a composed predicate case insensitive matching the supplied filters using disjunction (OR)
*/
Predicate<String> getContainsMatchDisjunctionPredicate(List<String> filters) {
List<Predicate<String>> preds = new ArrayList<>();
// Iterate through the filter options passed in for this facet
for (String filter : filters) {
// Create a filter predicate for each option
Predicate<String> pred = s -> s.toLowerCase().contains(filter.toLowerCase());
// Store all filter options in a list
preds.add(pred);
}
// Create a "combination" predicate containing sub-predicates "OR"ed together
return preds.stream().reduce(Predicate::or).orElse(x -> false);
}
/*
public List<FacetOption> getFacetOptions(SearchFacetName facet,List<String> options, Map<SearchFacetName, List<String>> configuredFacets){
List<FacetOption> facetOptions = new ArrayList<>();
for(String s : options){
FacetOption fo = new FacetOption(s, 0);
fo.setSelected(false);
facetOptions.add(fo);
}
if(configuredFacets.containsKey(facet)){
List<String> selectedFacets = configuredFacets.get(facet);
for(String sf : selectedFacets){
for(FacetOption fo : facetOptions){
if(fo.getName().equals(sf)){
fo.setSelected(true);
}
}
}
}
return facetOptions;
}
*/
/**
* Get the count of models for each diagnosis (including children).
* <p>
* This method will return counts of facet options for the supplied facet
*
* @return a Map of k: diagnosis v: count
*/
@Cacheable("diagnosis_counts")
public Map<String, Integer> getDiagnosisCounts() {
Set<ModelForQuery> allResults = models;
Map<String, Integer> map = new HashMap<>();
// Get the list of diagnoses
Set<String> allDiagnoses = allResults.stream().map(ModelForQuery::getMappedOntologyTerm).collect(Collectors.toSet());
// For each diagnosis, match all results using the same search technique as "query"
for (String diagnosis : allDiagnoses) {
Predicate<String> predicate = getContainsMatchDisjunctionPredicate(Arrays.asList(diagnosis));
// Long i = allResults.stream().map(x -> x.getAllOntologyTermAncestors().stream().filter(predicate).collect(Collectors.toSet())).map(x->((Set)x)).filter(x->x.size()>0).distinct().count();
Long i = allResults.stream()
.filter(x -> x.getAllOntologyTermAncestors().stream().filter(predicate).collect(Collectors.toSet()).size() > 0)
.distinct().count();
// Long i = allResults.stream().filter(x -> x.getAllOntologyTermAncestors().contains(diagnosis)).distinct().count();
map.put(diagnosis, i.intValue());
}
return map;
}
}
|
package com.cloud.api.commands;
import org.apache.log4j.Logger;
import com.cloud.api.BaseCmd;
import com.cloud.api.BaseCmd.Manager;
import com.cloud.api.Implementation;
import com.cloud.api.Parameter;
import com.cloud.api.ServerApiException;
import com.cloud.api.response.SuccessResponse;
import com.cloud.storage.DiskOfferingVO;
@Implementation(method="updateDiskOffering", manager=Manager.ConfigManager, description="Updates a disk offering.")
public class UpdateDiskOfferingCmd extends BaseCmd{
public static final Logger s_logger = Logger.getLogger(UpdateDiskOfferingCmd.class.getName());
private static final String s_name = "updatediskofferingresponse";
//////////////// API parameters /////////////////////
@Parameter(name="displaytext", type=CommandType.STRING, description="updates alternate display text of the disk offering with this value")
private String displayText;
@Parameter(name="id", type=CommandType.LONG, required=true, description="ID of the disk offering")
private Long id;
@Parameter(name="name", type=CommandType.STRING, description="updates name of the disk offering with this value")
private String diskOfferingName;
@Parameter(name="tags", type=CommandType.STRING, description="update tags of the disk offering with this value")
private String tags;
/////////////////// Accessors ///////////////////////
public String getDisplayText() {
return displayText;
}
public Long getId() {
return id;
}
public String getDiskOfferingName() {
return diskOfferingName;
}
public String getTags() {
return tags;
}
/////////////// API Implementation///////////////////
@Override
public String getName() {
return s_name;
}
@SuppressWarnings("unchecked")
public SuccessResponse getResponse() {
SuccessResponse response = new SuccessResponse();
DiskOfferingVO responseObject = (DiskOfferingVO)getResponseObject();
if (responseObject != null) {
response.setSuccess(Boolean.TRUE);
} else {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, "Failed to update disk offering");
}
response.setResponseName(getName());
return response;
}
}
|
package com.dmdirc.addons.ui_swing.components.frames;
import com.dmdirc.WritableFrameContainer;
import com.dmdirc.addons.ui_swing.SwingController;
import com.dmdirc.addons.ui_swing.UIUtilities;
import com.dmdirc.addons.ui_swing.actions.CommandAction;
import com.dmdirc.addons.ui_swing.actions.CopyAction;
import com.dmdirc.addons.ui_swing.actions.CutAction;
import com.dmdirc.addons.ui_swing.actions.InputFieldCopyAction;
import com.dmdirc.addons.ui_swing.actions.InputTextFramePasteAction;
import com.dmdirc.addons.ui_swing.components.SwingInputField;
import com.dmdirc.addons.ui_swing.dialogs.paste.PasteDialog;
import com.dmdirc.commandparser.PopupManager;
import com.dmdirc.commandparser.PopupMenu;
import com.dmdirc.commandparser.PopupMenuItem;
import com.dmdirc.commandparser.PopupType;
import com.dmdirc.config.ConfigManager;
import com.dmdirc.interfaces.AwayStateListener;
import com.dmdirc.logger.ErrorLevel;
import com.dmdirc.logger.Logger;
import com.dmdirc.ui.input.InputHandler;
import com.dmdirc.ui.interfaces.InputWindow;
import java.awt.BorderLayout;
import java.awt.Point;
import java.awt.Toolkit;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.io.IOException;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JSeparator;
import javax.swing.KeyStroke;
import javax.swing.event.InternalFrameEvent;
import net.miginfocom.layout.PlatformDefaults;
/**
* Frame with an input field.
*/
public abstract class InputTextFrame extends TextFrame implements InputWindow,
AwayStateListener, MouseListener {
/**
* A version number for this class. It should be changed whenever the class
* structure is changed (or anything else that would prevent serialized
* objects being unserialized with the new class).
*/
private static final long serialVersionUID = 2;
/** Input field panel. */
protected JPanel inputPanel;
/** Away label. */
protected JLabel awayLabel;
/** The InputHandler for our input field. */
private InputHandler inputHandler;
/** Frame input field. */
private SwingInputField inputField;
/** Popupmenu for this frame. */
private JPopupMenu inputFieldPopup;
/** Nick popup menu. */
protected JPopupMenu nickPopup;
/** Away indicator. */
private boolean useAwayIndicator;
/**
* Creates a new instance of InputFrame.
*
* @param owner WritableFrameContainer owning this frame.
* @param controller Swing controller
*/
public InputTextFrame(final SwingController controller,
final WritableFrameContainer<? extends InputWindow> owner) {
super(owner, controller);
initComponents();
final ConfigManager config = owner.getConfigManager();
if (!UIUtilities.isGTKUI()) {
//GTK users appear to dislike choice, ignore them if they want some.
getInputField().setBackground(config.getOptionColour(
"ui", "inputbackgroundcolour",
"ui", "backgroundcolour"));
getInputField().setForeground(config.getOptionColour(
"ui", "inputforegroundcolour",
"ui", "foregroundcolour"));
getInputField().setCaretColor(config.getOptionColour(
"ui", "inputforegroundcolour",
"ui", "foregroundcolour"));
}
useAwayIndicator = config.getOptionBool("ui", "awayindicator");
config.addChangeListener("ui", "inputforegroundcolour", this);
config.addChangeListener("ui", "inputbackgroundcolour", this);
config.addChangeListener("ui", "awayindicator", this);
if (getContainer().getServer() != null) {
getContainer().getServer().addAwayStateListener(this);
}
getInputField().getTextField().getInputMap().put(KeyStroke.getKeyStroke(
KeyEvent.VK_C, UIUtilities.getCtrlMask()), "textpaneCopy");
getInputField().getTextField().getActionMap().put("textpaneCopy",
new InputFieldCopyAction(getTextPane(),
getInputField().getTextField()));
}
/**
* Initialises the components for this frame.
*/
private void initComponents() {
setInputField(new SwingInputField(getController().getMainFrame()));
getInputField().addMouseListener(this);
initPopupMenu();
nickPopup = new JPopupMenu();
awayLabel = new JLabel();
awayLabel.setText("(away)");
awayLabel.setVisible(false);
inputPanel = new JPanel(new BorderLayout(
(int) PlatformDefaults.getUnitValueX("related").getValue(),
(int) PlatformDefaults.getUnitValueX("related").getValue()));
inputPanel.add(awayLabel, BorderLayout.LINE_START);
inputPanel.add(inputField, BorderLayout.CENTER);
initInputField();
}
/** Initialises the popupmenu. */
private void initPopupMenu() {
inputFieldPopup = new JPopupMenu();
inputFieldPopup.add(new CutAction(getInputField().getTextField()));
inputFieldPopup.add(new CopyAction(getInputField().getTextField()));
inputFieldPopup.add(new InputTextFramePasteAction(this));
inputFieldPopup.setOpaque(true);
inputFieldPopup.setLightWeightPopupEnabled(true);
}
/**
* Initialises the input field.
*/
private void initInputField() {
UIUtilities.addUndoManager(getInputField().getTextField());
getInputField().getActionMap().put("paste",
new InputTextFramePasteAction(this));
getInputField().getInputMap(WHEN_FOCUSED).put(KeyStroke.getKeyStroke(
"shift INSERT"),
"paste");
getInputField().getInputMap(WHEN_FOCUSED).put(KeyStroke.getKeyStroke(
"ctrl V"),
"paste");
}
/**
* Returns the container associated with this frame.
*
* @return This frame's container.
*/
@Override
@SuppressWarnings("unchecked")
public WritableFrameContainer<? extends InputWindow> getContainer() {
return (WritableFrameContainer<? extends InputWindow>) super.getContainer();
}
/**
* Returns the input handler associated with this frame.
*
* @return Input handlers for this frame
*/
@Override
public final InputHandler getInputHandler() {
return inputHandler;
}
/**
* Sets the input handler for this frame.
*
* @param newInputHandler input handler to set for this frame
*/
public final void setInputHandler(final InputHandler newInputHandler) {
this.inputHandler = newInputHandler;
inputHandler.addValidationListener(inputField);
inputHandler.setTabCompleter(((WritableFrameContainer<?>) frameParent)
.getTabCompleter());
}
/**
* Returns the input field for this frame.
*
* @return SwingInputField input field for the frame.
*/
public final SwingInputField getInputField() {
return inputField;
}
/**
* Sets the frames input field.
*
* @param newInputField new input field to use
*/
protected final void setInputField(final SwingInputField newInputField) {
this.inputField = newInputField;
}
/**
* Returns the away label for this server connection.
*
* @return JLabel away label
*/
public JLabel getAwayLabel() {
return awayLabel;
}
/**
* Sets the away indicator on or off.
*
* @param awayState away state
*
* @deprecated Use {@link AwayStateListener}s to listen for changes instead
*/
@Override
@Deprecated
public void setAwayIndicator(final boolean awayState) {
//Ignore
}
/**
* {@inheritDoc}
*
* @param mouseEvent Mouse event
*/
@Override
public void mouseClicked(final MouseEvent mouseEvent) {
if (mouseEvent.getSource() == getTextPane()) {
processMouseEvent(mouseEvent);
}
}
/**
* {@inheritDoc}
*
* @param mouseEvent Mouse event
*/
@Override
public void mousePressed(final MouseEvent mouseEvent) {
processMouseEvent(mouseEvent);
}
/**
* {@inheritDoc}
*
* @param mouseEvent Mouse event
*/
@Override
public void mouseReleased(final MouseEvent mouseEvent) {
processMouseEvent(mouseEvent);
}
/**
* {@inheritDoc}
*
* @param mouseEvent Mouse event
*/
@Override
public void mouseExited(final MouseEvent mouseEvent) {
//Ignore
}
/**
* {@inheritDoc}
*
* @param mouseEvent Mouse event
*/
@Override
public void mouseEntered(final MouseEvent mouseEvent) {
//Ignore
}
/**
* Processes every mouse button event to check for a popup trigger.
*
* @param e mouse event
*/
@Override
public void processMouseEvent(final MouseEvent e) {
if (e.isPopupTrigger() && e.getSource() == getInputField()) {
final Point point = getInputField().getMousePosition();
if (point != null) {
initPopupMenu();
inputFieldPopup.show(this, (int) point.getX(),
(int) point.getY() + getTextPane().getHeight() + (int)
PlatformDefaults.getUnitValueX("related").getValue());
}
}
}
/** Checks and pastes text. */
public void doPaste() {
String clipboard = null;
try {
if (!Toolkit.getDefaultToolkit().getSystemClipboard().
isDataFlavorAvailable(DataFlavor.stringFlavor)) {
return;
}
} catch (IllegalStateException ex) {
Logger.userError(ErrorLevel.LOW, "Unable to paste from clipboard.");
return;
}
try {
//get the contents of the input field and combine it with the clipboard
clipboard = (String) Toolkit.getDefaultToolkit().
getSystemClipboard().getData(DataFlavor.stringFlavor);
doPaste(clipboard);
} catch (IOException ex) {
Logger.userError(ErrorLevel.LOW, "Unable to get clipboard contents: " + ex.
getMessage());
} catch (UnsupportedFlavorException ex) {
Logger.userError(ErrorLevel.LOW, "Unsupported clipboard type", ex);
}
}
/**
* Pastes the specified content into the input area.
*
* @param clipboard The contents of the clipboard to be pasted
* @since 0.6.3m1
*/
protected void doPaste(final String clipboard) {
final String inputFieldText = getInputField().getText();
//Get the text that would result from the paste (inputfield
//- selection + clipboard)
final String text = inputFieldText.substring(0, getInputField()
.getSelectionStart()) + clipboard + inputFieldText.substring(
getInputField().getSelectionEnd());
final String[] clipboardLines = getSplitLine(text);
//check theres something to paste
if (clipboardLines.length > 1) {
//Clear the input field
inputField.setText("");
final Integer pasteTrigger = getContainer().getConfigManager()
.getOptionInt("ui", "pasteProtectionLimit");
//check whether the number of lines is over the limit
if (pasteTrigger != null && getContainer().getNumLines(text)
> pasteTrigger) {
//show the multi line paste dialog
new PasteDialog(this, text, getController().getMainFrame()).
display();
} else {
//send the lines
for (String clipboardLine : clipboardLines) {
getContainer().sendLine(clipboardLine);
}
}
} else {
//put clipboard text in input field
inputField.replaceSelection(clipboard);
}
}
/**
* Splits the line on all line endings.
*
* @param line Line that will be split
*
* @return Split line array
*/
private String[] getSplitLine(final String line) {
return line.replace("\r\n", "\n").replace('\r', '\n').split("\n");
}
/** {@inheritDoc} */
@Override
public void configChanged(final String domain, final String key) {
super.configChanged(domain, key);
if ("ui".equals(domain) && getContainer().getConfigManager() != null) {
if (getInputField() != null) {
if ("inputbackgroundcolour".equals(key) || "backgroundcolour".
equals(key)) {
getInputField().setBackground(getContainer()
.getConfigManager().getOptionColour(
"ui", "inputbackgroundcolour",
"ui", "backgroundcolour"));
} else if ("inputforegroundcolour".equals(key) || "foregroundcolour".
equals(key)) {
getInputField().setForeground(getContainer()
.getConfigManager().getOptionColour(
"ui", "inputforegroundcolour",
"ui", "foregroundcolour"));
getInputField().setCaretColor(getContainer()
.getConfigManager().getOptionColour(
"ui", "inputforegroundcolour",
"ui", "foregroundcolour"));
}
}
if ("awayindicator".equals(key)) {
useAwayIndicator = getContainer().getConfigManager()
.getOptionBool("ui", "awayindicator");
}
}
}
/**
* Popuplates the nicklist popup.
*
* @param nickname Nickname for the popup
*/
protected final void popuplateNicklistPopup(final String nickname) {
final PopupMenu popups = PopupManager.getMenu(PopupType.CHAN_NICK,
getContainer().getConfigManager());
nickPopup = (JPopupMenu) populatePopupMenu(new JPopupMenu(), popups,
nickname);
}
/**
* Populates the specified popupmenu
*
* @param menu Menu component
* @param popup Popup to get info from
* @param arguments Arguments for the command
*
* @return Populated popup
*/
private JComponent populatePopupMenu(final JComponent menu,
final PopupMenu popup, final Object... arguments) {
for (PopupMenuItem menuItem : popup.getItems()) {
if (menuItem.isDivider()) {
menu.add(new JSeparator());
} else if (menuItem.isSubMenu()) {
menu.add(populatePopupMenu(new JMenu(menuItem.getName()),
menuItem.getSubMenu(), arguments));
} else {
menu.add(
new JMenuItem(
new CommandAction(getCommandParser(),
this, menuItem.getName(), menuItem.getCommand(arguments))));
}
}
return menu;
}
/** Request input field focus. */
public void requestInputFieldFocus() {
if (inputField != null) {
inputField.requestFocusInWindow();
}
}
/** {@inheritDoc} */
@Override
public void onAway(final String reason) {
UIUtilities.invokeLater(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
if (useAwayIndicator) {
awayLabel.setVisible(true);
}
}
});
}
/** {@inheritDoc} */
@Override
public void onBack() {
UIUtilities.invokeLater(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
if (useAwayIndicator) {
awayLabel.setVisible(false);
}
}
});
}
/** {@inheritDoc} */
@Override
public void close() {
super.close();
if (getContainer() != null && getContainer().getServer() != null) {
getContainer().getServer().removeAwayStateListener(this);
}
}
/**
* Activates the input field on frame focus. {@inheritDoc}
*
* @param event Internal frame event
*/
@Override
public void internalFrameActivated(final InternalFrameEvent event) {
super.internalFrameActivated(event);
if (useAwayIndicator && getContainer().getServer() != null) {
awayLabel.setVisible(getContainer().getServer().isAway());
}
inputField.requestFocusInWindow();
}
}
|
package com.redhat.ceylon.compiler.typechecker.analyzer;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.declaredInPackage;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getPackageTypeDeclaration;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getTypeArguments;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getTypeDeclaration;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getTypeMember;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getTypedDeclaration;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.setTypeConstructor;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.unwrapAliasedTypeConstructor;
import static com.redhat.ceylon.compiler.typechecker.tree.TreeUtil.formatPath;
import static com.redhat.ceylon.compiler.typechecker.tree.TreeUtil.name;
import static com.redhat.ceylon.compiler.typechecker.tree.TreeUtil.unwrapExpressionUntilTerm;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.appliedType;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.getContainingClassOrInterface;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.getNativeDeclaration;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.getNativeHeader;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.intersection;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.intersectionOfSupertypes;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isImplemented;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isNativeImplementation;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isToplevelAnonymousClass;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isToplevelClassConstructor;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isTypeUnknown;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.notOverloaded;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.union;
import static com.redhat.ceylon.model.typechecker.model.SiteVariance.IN;
import static com.redhat.ceylon.model.typechecker.model.SiteVariance.OUT;
import static java.lang.Integer.parseInt;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.redhat.ceylon.common.Backend;
import com.redhat.ceylon.common.BackendSupport;
import com.redhat.ceylon.compiler.typechecker.context.TypecheckerUnit;
import com.redhat.ceylon.compiler.typechecker.parser.CeylonLexer;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.TypeSpecifier;
import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
import com.redhat.ceylon.model.cmr.JDKUtils;
import com.redhat.ceylon.model.typechecker.model.Class;
import com.redhat.ceylon.model.typechecker.model.ClassAlias;
import com.redhat.ceylon.model.typechecker.model.ClassOrInterface;
import com.redhat.ceylon.model.typechecker.model.Constructor;
import com.redhat.ceylon.model.typechecker.model.Declaration;
import com.redhat.ceylon.model.typechecker.model.Function;
import com.redhat.ceylon.model.typechecker.model.FunctionOrValue;
import com.redhat.ceylon.model.typechecker.model.Import;
import com.redhat.ceylon.model.typechecker.model.ImportList;
import com.redhat.ceylon.model.typechecker.model.Interface;
import com.redhat.ceylon.model.typechecker.model.Module;
import com.redhat.ceylon.model.typechecker.model.ModuleImport;
import com.redhat.ceylon.model.typechecker.model.NothingType;
import com.redhat.ceylon.model.typechecker.model.Package;
import com.redhat.ceylon.model.typechecker.model.Parameter;
import com.redhat.ceylon.model.typechecker.model.Scope;
import com.redhat.ceylon.model.typechecker.model.Specification;
import com.redhat.ceylon.model.typechecker.model.Type;
import com.redhat.ceylon.model.typechecker.model.TypeAlias;
import com.redhat.ceylon.model.typechecker.model.TypeDeclaration;
import com.redhat.ceylon.model.typechecker.model.TypeParameter;
import com.redhat.ceylon.model.typechecker.model.TypedDeclaration;
import com.redhat.ceylon.model.typechecker.model.Unit;
import com.redhat.ceylon.model.typechecker.model.UnknownType;
import com.redhat.ceylon.model.typechecker.model.Value;
/**
* Second phase of type analysis.
* Scan the compilation unit looking for literal type
* declarations and maps them to the associated model
* objects. Also builds up a list of imports for the
* compilation unit. Finally, assigns types to the
* associated model objects of declarations declared
* using an explicit type (this must be done in this
* phase, since shared declarations may be used out of
* order in expressions).
*
* @author Gavin King
*
*/
public class TypeVisitor extends Visitor {
private TypecheckerUnit unit;
private final BackendSupport backendSupport;
private boolean inDelegatedConstructor;
private boolean inTypeLiteral;
private boolean inExtendsOrClassAlias;
private Backend inBackend = null;
public TypeVisitor(BackendSupport backendSupport) {
this.backendSupport = backendSupport;
}
public TypeVisitor(TypecheckerUnit unit, BackendSupport backendSupport) {
this.unit = unit;
this.backendSupport = backendSupport;
String nat = unit.getPackage().getModule().getNative();
inBackend = Backend.fromAnnotation(nat);
}
@Override public void visit(Tree.CompilationUnit that) {
unit = that.getUnit();
Backend ib = inBackend;
String nat = unit.getPackage().getModule().getNative();
inBackend = Backend.fromAnnotation(nat);
super.visit(that);
inBackend = ib;
HashSet<String> set = new HashSet<String>();
for (Tree.Import im: that.getImportList().getImports()) {
Tree.ImportPath ip = im.getImportPath();
if (ip!=null) {
String mp = formatPath(ip.getIdentifiers());
if (!set.add(mp)) {
ip.addError("duplicate import: '" + mp + "'");
}
}
}
}
@Override
public void visit(Tree.Import that) {
Package importedPackage =
getPackage(that.getImportPath(), backendSupport);
if (importedPackage!=null) {
that.getImportPath().setModel(importedPackage);
Tree.ImportMemberOrTypeList imtl =
that.getImportMemberOrTypeList();
if (imtl!=null) {
ImportList il = imtl.getImportList();
il.setImportedScope(importedPackage);
Set<String> names = new HashSet<String>();
for (Tree.ImportMemberOrType member:
imtl.getImportMemberOrTypes()) {
names.add(importMember(member, importedPackage, il));
}
if (imtl.getImportWildcard()!=null) {
importAllMembers(importedPackage, names, il);
}
else if (imtl.getImportMemberOrTypes().isEmpty()) {
imtl.addError("empty import list");
}
}
}
}
private void importAllMembers(Package importedPackage,
Set<String> ignoredMembers, ImportList il) {
for (Declaration dec: importedPackage.getMembers()) {
if (dec.isShared() &&
!dec.isAnonymous() &&
!ignoredMembers.contains(dec.getName()) &&
!isNonimportable(importedPackage, dec.getName())) {
addWildcardImport(il, dec);
}
}
}
private void importAllMembers(TypeDeclaration importedType,
Set<String> ignoredMembers, ImportList til) {
for (Declaration dec: importedType.getMembers()) {
if (dec.isShared() &&
(dec.isStaticallyImportable() ||
dec instanceof Constructor) &&
!dec.isAnonymous() &&
!ignoredMembers.contains(dec.getName())) {
addWildcardImport(til, dec, importedType);
}
}
}
private void addWildcardImport(ImportList il, Declaration dec) {
if (!hidesToplevel(dec)) {
Import i = new Import();
i.setAlias(dec.getName());
i.setDeclaration(dec);
i.setWildcardImport(true);
addWildcardImport(il, dec, i);
}
}
private void addWildcardImport(ImportList il, Declaration dec, TypeDeclaration td) {
if (!hidesToplevel(dec)) {
Import i = new Import();
i.setAlias(dec.getName());
i.setDeclaration(dec);
i.setWildcardImport(true);
i.setTypeDeclaration(td);
addWildcardImport(il, dec, i);
}
}
private void addWildcardImport(ImportList il, Declaration dec, Import i) {
if (notOverloaded(dec)) {
String alias = i.getAlias();
if (alias!=null) {
Import o = unit.getImport(dec.getName());
if (o!=null && o.isWildcardImport()) {
if (o.getDeclaration().equals(dec)) {
//this case only happens in the IDE,
//due to reuse of the Unit
unit.getImports().remove(o);
il.getImports().remove(o);
}
else {
i.setAmbiguous(true);
o.setAmbiguous(true);
}
}
unit.getImports().add(i);
il.getImports().add(i);
}
}
}
public static Module getModule(Tree.ImportPath path) {
if (path!=null &&
!path.getIdentifiers().isEmpty()) {
String nameToImport =
formatPath(path.getIdentifiers());
Module module =
path.getUnit().getPackage()
.getModule();
Package pkg = module.getPackage(nameToImport);
if (pkg != null) {
Module mod = pkg.getModule();
if (!pkg.getNameAsString()
.equals(mod.getNameAsString())) {
path.addError("not a module: '" +
nameToImport + "'");
return null;
}
if (mod.equals(module)) {
return mod;
}
//check that the package really does belong to
//an imported module, to work around bug where
//default package thinks it can see stuff in
//all modules in the same source dir
Set<Module> visited = new HashSet<Module>();
for (ModuleImport mi: module.getImports()) {
if (findModuleInTransitiveImports(mi.getModule(),
mod, visited)) {
return mod;
}
}
}
path.addError("module not found in imported modules: '" +
nameToImport + "'", 7000);
}
return null;
}
public static Package getPackage(Tree.ImportPath path,
BackendSupport backendSupport) {
if (path!=null &&
!path.getIdentifiers().isEmpty()) {
String nameToImport =
formatPath(path.getIdentifiers());
Module module =
path.getUnit().getPackage()
.getModule();
Package pkg = module.getPackage(nameToImport);
if (pkg != null) {
if (pkg.getModule().equals(module)) {
return pkg;
}
if (!pkg.isShared()) {
path.addError("imported package is not shared: '" +
nameToImport + "'", 402);
}
// if (module.isDefault() &&
// !pkg.getModule().isDefault() &&
// !pkg.getModule().getNameAsString()
// .equals(Module.LANGUAGE_MODULE_NAME)) {
// path.addError("package belongs to a module and may not be imported by default module: " +
// nameToImport);
//check that the package really does belong to
//an imported module, to work around bug where
//default package thinks it can see stuff in
//all modules in the same source dir
Set<Module> visited = new HashSet<Module>();
for (ModuleImport mi: module.getImports()) {
if (findModuleInTransitiveImports(
mi.getModule(), pkg.getModule(),
visited)) {
return pkg;
}
}
} else {
for (ModuleImport mi: module.getImports()) {
if (mi.isNative()) {
Backend backend =
Backend.fromAnnotation(mi.getNative());
String name = mi.getModule().getNameAsString();
if (!backendSupport.supportsBackend(backend)
&& (nameToImport.equals(name)
|| nameToImport.startsWith(name + "."))) {
return null;
}
if (!backendSupport.supportsBackend(Backend.Java) &&
(JDKUtils.isJDKAnyPackage(nameToImport) ||
JDKUtils.isOracleJDKAnyPackage(nameToImport))) {
return null;
}
}
}
}
String help;
if(module.isDefault())
help = " (define a module and add module import to its module descriptor)";
else
help = " (add module import to module descriptor of '" +
module.getNameAsString() + "')";
path.addError("package not found in imported modules: '" +
nameToImport + "'" + help, 7000);
}
return null;
}
private static boolean findModuleInTransitiveImports(Module moduleToVisit,
Module moduleToFind, Set<Module> visited) {
if (!visited.add(moduleToVisit)) {
return false;
}
else if (moduleToVisit.equals(moduleToFind)) {
return true;
}
else {
for (ModuleImport imp: moduleToVisit.getImports()) {
// skip non-exported modules
if (imp.isExport() &&
findModuleInTransitiveImports(imp.getModule(),
moduleToFind, visited)) {
return true;
}
}
return false;
}
}
private boolean hidesToplevel(Declaration dec) {
for (Declaration d: unit.getDeclarations()) {
String n = d.getName();
if (d.isToplevel() && n!=null &&
dec.getName().equals(n)) {
return true;
}
}
return false;
}
private boolean checkForHiddenToplevel(Tree.Identifier id,
Import i, Tree.Alias alias) {
for (Declaration d: unit.getDeclarations()) {
String n = d.getName();
Declaration idec = i.getDeclaration();
if (d.isToplevel() && n!=null &&
i.getAlias().equals(n) &&
!idec.equals(d) &&
//in the current package without providing an
//alias:
!isLegalAliasFreeImport(d, idec)) {
if (alias==null) {
id.addError("toplevel declaration with this name declared in this unit: '" + n + "'");
}
else {
alias.addError("toplevel declaration with this name declared in this unit: '" + n + "'");
}
return true;
}
}
return false;
}
private static boolean isLegalAliasFreeImport
(Declaration dec, Declaration importedDec) {
if (importedDec instanceof Value) {
Value value = (Value) importedDec;
TypeDeclaration td = value.getTypeDeclaration();
return td.isAnonymous() && td.equals(dec);
}
else {
return false;
}
}
private void importMembers(Tree.ImportMemberOrType member,
Declaration d) {
Tree.ImportMemberOrTypeList imtl =
member.getImportMemberOrTypeList();
if (imtl!=null) {
if (d instanceof Value) {
Value v = (Value) d;
TypeDeclaration td = v.getTypeDeclaration();
if (td.isAnonymous()) {
d = td;
}
}
if (d instanceof TypeDeclaration) {
Set<String> names = new HashSet<String>();
ImportList til = imtl.getImportList();
TypeDeclaration td = (TypeDeclaration) d;
til.setImportedScope(td);
List<Tree.ImportMemberOrType> imts =
imtl.getImportMemberOrTypes();
for (Tree.ImportMemberOrType imt: imts) {
names.add(importMember(imt, td, til));
}
if (imtl.getImportWildcard()!=null) {
importAllMembers(td, names, til);
}
else if (imts.isEmpty()) {
imtl.addError("empty import list");
}
}
else {
imtl.addError("member alias list must follow a type");
}
}
}
private void checkAliasCase(Tree.Alias alias, Declaration d) {
if (alias!=null) {
Tree.Identifier id = alias.getIdentifier();
int tt = id.getToken().getType();
if (d instanceof TypeDeclaration &&
tt!=CeylonLexer.UIDENTIFIER) {
id.addError("imported type should have uppercase alias: '" +
d.getName() + "'");
}
else if (d instanceof TypedDeclaration &&
tt!=CeylonLexer.LIDENTIFIER) {
id.addError("imported member should have lowercase alias: '" +
d.getName() + "'");
}
}
}
private String importMember(Tree.ImportMemberOrType member,
Package importedPackage, ImportList il) {
Tree.Identifier id =
member.getIdentifier();
if (id==null) {
return null;
}
Import i = new Import();
member.setImportModel(i);
Tree.Alias alias = member.getAlias();
String name = name(id);
if (alias==null) {
i.setAlias(name);
}
else {
i.setAlias(name(alias.getIdentifier()));
}
if (isNonimportable(importedPackage, name)) {
id.addError("root type may not be imported");
return name;
}
Declaration d =
importedPackage.getMember(name, null, false);
if (d==null) {
id.addError("imported declaration not found: '" +
name + "'",
100);
unit.getUnresolvedReferences().add(id);
}
else {
if (!declaredInPackage(d, unit)) {
if (!d.isShared()) {
id.addError("imported declaration is not shared: '" +
name + "'",
400);
}
else if (d.isPackageVisibility()) {
id.addError("imported package private declaration is not visible: '" +
name + "'");
}
else if (d.isProtectedVisibility()) {
id.addError("imported protected declaration is not visible: '" +
name + "'");
}
}
i.setDeclaration(d);
member.setDeclarationModel(d);
if (il.hasImport(d)) {
id.addError("already imported: '" + name + "'");
}
else if (!checkForHiddenToplevel(id, i, alias)) {
addImport(member, il, i);
}
checkAliasCase(alias, d);
}
if (d!=null) {
importMembers(member, d);
}
return name;
}
private String importMember(Tree.ImportMemberOrType member,
TypeDeclaration td, ImportList il) {
Tree.Identifier id =
member.getIdentifier();
if (id==null) {
return null;
}
Import i = new Import();
member.setImportModel(i);
Tree.Alias alias = member.getAlias();
String name = name(id);
if (alias==null) {
i.setAlias(name);
}
else {
i.setAlias(name(alias.getIdentifier()));
}
Declaration m = td.getMember(name, null, false);
if (m==null) {
id.addError("imported declaration not found: '" +
name + "' of '" +
td.getName() + "'",
100);
unit.getUnresolvedReferences().add(id);
}
else {
List<Declaration> members =
m.getContainer().getMembers();
for (Declaration d: members) {
String dn = d.getName();
if (dn!=null &&
dn.equals(name) &&
!d.sameKind(m) &&
!d.isAnonymous()) {
//crazy interop cases like isOpen() + open()
id.addError("ambiguous member declaration: '" +
name + "' of '" +
td.getName() + "'");
return null;
}
}
if (!m.isShared()) {
id.addError("imported declaration is not shared: '" +
name + "' of '" +
td.getName() + "'",
400);
}
else if (!declaredInPackage(m, unit)) {
if (m.isPackageVisibility()) {
id.addError("imported package private declaration is not visible: '" +
name + "' of '" +
td.getName() + "'");
}
else if (m.isProtectedVisibility()) {
id.addError("imported protected declaration is not visible: '" +
name + "' of '" +
td.getName() + "'");
}
}
i.setTypeDeclaration(td);
if (!m.isStaticallyImportable() &&
!isToplevelClassConstructor(td, m) &&
!isToplevelAnonymousClass(m.getContainer())) {
if (alias==null) {
member.addError("does not specify an alias");
}
}
i.setDeclaration(m);
member.setDeclarationModel(m);
if (il.hasImport(m)) {
id.addError("already imported: '" +
name + "' of '" + td.getName() + "'");
}
else {
if (m.isStaticallyImportable() ||
isToplevelClassConstructor(td, m) ||
isToplevelAnonymousClass(m.getContainer())) {
if (!checkForHiddenToplevel(id, i, alias)) {
addImport(member, il, i);
}
}
else {
addMemberImport(member, il, i);
}
}
checkAliasCase(alias, m);
}
if (m!=null) {
importMembers(member, m);
}
//imtl.addError("member aliases may not have member aliases");
return name;
}
private void addImport(Tree.ImportMemberOrType member,
ImportList il, Import i) {
String alias = i.getAlias();
if (alias!=null) {
Map<String, String> mods = unit.getModifiers();
if (mods.containsKey(alias) &&
mods.get(alias).equals(alias)) {
//spec says you can't hide a language modifier
//unless the modifier itself has an alias
//(this is perhaps a little heavy-handed)
member.addError("import hides a language modifier: '" +
alias + "'");
}
else {
Import o = unit.getImport(alias);
if (o==null) {
unit.getImports().add(i);
il.getImports().add(i);
}
else if (o.isWildcardImport()) {
unit.getImports().remove(o);
il.getImports().remove(o);
unit.getImports().add(i);
il.getImports().add(i);
}
else {
member.addError("duplicate import alias: '" +
alias + "'");
}
}
}
}
private void addMemberImport(Tree.ImportMemberOrType member,
ImportList il, Import i) {
String alias = i.getAlias();
if (alias!=null) {
if (il.getImport(alias)==null) {
unit.getImports().add(i);
il.getImports().add(i);
}
else {
member.addError("duplicate member import alias: '" +
alias + "'");
}
}
}
private boolean isNonimportable(Package pkg, String name) {
return pkg.getQualifiedNameString().equals("java.lang") &&
("Object".equals(name) ||
"Throwable".equals(name) ||
"Exception".equals(name));
}
@Override public void visit(Tree.Declaration that) {
Backend ib = inBackend;
String nat = that.getDeclarationModel().getNativeBackend();
inBackend = Backend.fromAnnotation(nat);
super.visit(that);
inBackend = ib;
}
public void visit(Tree.GroupedType that) {
super.visit(that);
Tree.StaticType type = that.getType();
if (type!=null) {
that.setTypeModel(type.getTypeModel());
}
}
@Override
public void visit(Tree.UnionType that) {
super.visit(that);
List<Tree.StaticType> sts =
that.getStaticTypes();
List<Type> types =
new ArrayList<Type>
(sts.size());
for (Tree.StaticType st: sts) {
//can't use addToUnion() here
Type t = st.getTypeModel();
if (t!=null) {
types.add(t);
}
}
Type type = union(types ,unit);
that.setTypeModel(type);
}
@Override
public void visit(Tree.IntersectionType that) {
super.visit(that);
List<Tree.StaticType> sts =
that.getStaticTypes();
List<Type> types =
new ArrayList<Type>
(sts.size());
for (Tree.StaticType st: sts) {
//can't use addToIntersection() here
Type t = st.getTypeModel();
if (t!=null) {
types.add(t);
}
}
Type type = intersection(types, unit);
that.setTypeModel(type);
}
@Override
public void visit(Tree.SequenceType that) {
super.visit(that);
Tree.StaticType elementType = that.getElementType();
Tree.NaturalLiteral length = that.getLength();
Type et = elementType.getTypeModel();
if (et!=null) {
Type t;
if (length==null) {
t = unit.getSequentialType(et);
}
else {
final int len;
try {
len = parseInt(length.getText());
}
catch (NumberFormatException nfe) {
length.addError("must be a positive decimal integer");
return;
}
if (len<1) {
length.addError("must be positive");
return;
}
if (len>1000) {
length.addError("may not be greater than 1000");
return;
}
Class td = unit.getTupleDeclaration();
t = unit.getEmptyType();
for (int i=0; i<len; i++) {
t = appliedType(td, et, et, t);
}
}
that.setTypeModel(t);
}
}
@Override
public void visit(Tree.IterableType that) {
super.visit(that);
Tree.Type elem = that.getElementType();
if (elem==null) {
Type nt = unit.getNothingType();
that.setTypeModel(unit.getIterableType(nt));
that.addError("iterable type must have an element type");
}
else {
if (elem instanceof Tree.SequencedType) {
Tree.SequencedType st =
(Tree.SequencedType) elem;
Type et = st.getType().getTypeModel();
if (et!=null) {
Type t =
st.getAtLeastOne() ?
unit.getNonemptyIterableType(et) :
unit.getIterableType(et);
that.setTypeModel(t);
}
}
else {
that.addError("malformed iterable type");
}
}
}
@Override
public void visit(Tree.OptionalType that) {
super.visit(that);
List<Type> types =
new ArrayList<Type>(2);
types.add(unit.getNullType());
Type dt = that.getDefiniteType().getTypeModel();
if (dt!=null) types.add(dt);
that.setTypeModel(union(types, unit));
}
@Override
public void visit(Tree.EntryType that) {
super.visit(that);
Type kt =
that.getKeyType().getTypeModel();
Type vt =
that.getValueType()==null ?
new UnknownType(unit).getType() :
that.getValueType().getTypeModel();
that.setTypeModel(unit.getEntryType(kt, vt));
}
@Override
public void visit(Tree.TypeConstructor that) {
super.visit(that);
TypeAlias ta = that.getDeclarationModel();
ta.setExtendedType(that.getType().getTypeModel());
Type type = ta.getType();
type.setTypeConstructor(true);
that.setTypeModel(type);
}
@Override
public void visit(Tree.FunctionType that) {
super.visit(that);
Tree.StaticType rt =
that.getReturnType();
if (rt!=null) {
List<Tree.Type> argumentTypes =
that.getArgumentTypes();
Type tt = getTupleType(argumentTypes, unit);
Interface cd = unit.getCallableDeclaration();
Type pt =
appliedType(cd, rt.getTypeModel(), tt);
that.setTypeModel(pt);
}
}
@Override
public void visit(Tree.TupleType that) {
super.visit(that);
List<Tree.Type> elementTypes =
that.getElementTypes();
Type tt = getTupleType(elementTypes, unit);
that.setTypeModel(tt);
}
static Type getTupleType(List<Tree.Type> ets,
Unit unit) {
List<Type> args =
new ArrayList<Type>
(ets.size());
boolean sequenced = false;
boolean atleastone = false;
int firstDefaulted = -1;
for (int i=0; i<ets.size(); i++) {
Tree.Type st = ets.get(i);
Type arg = st==null ?
null : st.getTypeModel();
if (arg==null) {
arg = new UnknownType(unit).getType();
}
else if (st instanceof Tree.SpreadType) {
//currently we only allow a
//single spread type, but in
//future we should also allow
//X, Y, *Zs
return st.getTypeModel();
}
else if (st instanceof Tree.DefaultedType) {
if (firstDefaulted==-1) {
firstDefaulted = i;
}
}
else if (st instanceof Tree.SequencedType) {
if (i!=ets.size()-1) {
st.addError("variant element must occur last in a tuple type");
}
else {
sequenced = true;
Tree.SequencedType sst =
(Tree.SequencedType) st;
atleastone = sst.getAtLeastOne();
arg = sst.getType().getTypeModel();
}
if (firstDefaulted!=-1 && atleastone) {
st.addError("nonempty variadic element must occur after defaulted elements in a tuple type");
}
}
else {
if (firstDefaulted!=-1) {
st.addError("required element must occur after defaulted elements in a tuple type");
}
}
args.add(arg);
}
return getTupleType(args, sequenced, atleastone,
firstDefaulted, unit);
}
//TODO: big copy/paste from Unit.getTupleType(), to
// eliminate the canonicalization (since aliases
// are not yet resolvable in this phase)
private static Type getTupleType(
List<Type> elemTypes,
boolean variadic, boolean atLeastOne,
int firstDefaulted,
Unit unit) {
Class td = unit.getTupleDeclaration();
Type result = unit.getEmptyType();
Type union = unit.getNothingType();
int last = elemTypes.size()-1;
for (int i=last; i>=0; i
Type elemType = elemTypes.get(i);
List<Type> pair =
new ArrayList<Type>();
//can't use addToUnion() here
pair.add(elemType);
pair.add(union);
union = union(pair, unit);
if (variadic && i==last) {
result = atLeastOne ?
unit.getSequenceType(elemType) :
unit.getSequentialType(elemType);
}
else {
result = appliedType(td, union, elemType,
result);
if (firstDefaulted>=0 && i>=firstDefaulted) {
pair = new ArrayList<Type>();
//can't use addToUnion() here
pair.add(unit.getEmptyType());
pair.add(result);
result = union(pair, unit);
}
}
}
return result;
}
@Override
public void visit(Tree.BaseType that) {
super.visit(that);
Tree.Identifier id = that.getIdentifier();
if (id!=null) {
String name = name(id);
Scope scope = that.getScope();
TypeDeclaration type;
if (that.getPackageQualified()) {
type = getPackageTypeDeclaration(name,
null, false, unit);
}
else {
type = getTypeDeclaration(scope, name,
null, false, unit);
}
if (type==null) {
that.addError("type declaration does not exist: '" +
name + "'", 102);
unit.getUnresolvedReferences().add(id);
}
else {
type = (TypeDeclaration)handleHeader(type, that);
Type outerType =
scope.getDeclaringType(type);
visitSimpleType(that, outerType, type);
}
}
}
public void visit(Tree.SuperType that) {
//if (inExtendsClause) { //can't appear anywhere else in the tree!
Scope scope = that.getScope();
ClassOrInterface ci =
getContainingClassOrInterface(scope);
if (ci!=null) {
if (scope instanceof Constructor) {
that.setTypeModel(intersectionOfSupertypes(ci));
}
else if (ci.isClassOrInterfaceMember()) {
ClassOrInterface oci = (ClassOrInterface)
ci.getContainer();
that.setTypeModel(intersectionOfSupertypes(oci));
}
else {
that.addError("super appears in extends for non-member class");
}
}
}
@Override
public void visit(Tree.MemberLiteral that) {
super.visit(that);
if (that.getType()!=null) {
Type pt =
that.getType().getTypeModel();
if (pt!=null) {
if (that.getTypeArgumentList()!=null &&
isTypeUnknown(pt) &&
!pt.isUnknown()) {
that.getTypeArgumentList()
.addError("qualifying type does not fully-specify type arguments");
}
}
}
}
@Override
public void visit(Tree.QualifiedType that) {
boolean onl = inTypeLiteral;
boolean oiea = inExtendsOrClassAlias;
boolean oidc = inDelegatedConstructor;
inTypeLiteral = false;
inExtendsOrClassAlias = false;
inDelegatedConstructor = false;
super.visit(that);
inExtendsOrClassAlias = oiea;
inDelegatedConstructor = oidc;
inTypeLiteral = onl;
Tree.StaticType ot = that.getOuterType();
Type pt = ot.getTypeModel();
if (pt!=null) {
// if (pt.isTypeConstructor()) {
// ot.addError("qualifying type may not be a type constructor");
if (that.getMetamodel() &&
that.getTypeArgumentList()!=null &&
isTypeUnknown(pt) && !pt.isUnknown()) {
that.getTypeArgumentList()
.addError("qualifying type does not fully-specify type arguments");
}
TypeDeclaration d = pt.getDeclaration();
Tree.Identifier id = that.getIdentifier();
if (id!=null) {
String name = name(id);
TypeDeclaration type =
getTypeMember(d, name,
null, false, unit);
if (type==null) {
if (d.isMemberAmbiguous(name, unit, null, false)) {
that.addError("member type declaration is ambiguous: '" +
name + "' for type '" +
d.getName() + "'");
}
else {
that.addError("member type declaration does not exist: '" +
name + "' in type '" +
d.getName() + "'", 100);
unit.getUnresolvedReferences().add(id);
}
}
else {
visitSimpleType(that, pt, type);
}
}
}
}
@Override
public void visit(Tree.TypeLiteral that) {
inTypeLiteral = true;
super.visit(that);
inTypeLiteral = false;
}
private void visitSimpleType(Tree.SimpleType that,
Type ot, TypeDeclaration dec) {
if (dec instanceof Constructor &&
//in a metamodel type literal, a constructor
//is allowed
!inTypeLiteral &&
//for an extends clause or aliased class,
//either a class with parameters or a
//constructor is allowed
!inExtendsOrClassAlias &&
!inDelegatedConstructor) {
that.addError("constructor is not a type: '" +
dec.getName(unit) + "'");
}
Tree.TypeArgumentList tal =
that.getTypeArgumentList();
if (tal!=null) {
dec = unwrapAliasedTypeConstructor(dec);
}
List<TypeParameter> params =
dec.getTypeParameters();
List<Type> typeArgs =
getTypeArguments(tal, ot, params);
//Note: we actually *check* these type arguments
// later in ExpressionVisitor
Type pt = dec.appliedType(ot, typeArgs);
if (tal==null) {
if (!params.isEmpty()) {
//For now the only type constructors allowed
//as the type of a value are type constructors
//that alias Callable (in future relax this)
//and interpret *every* type with a missing
//type argument list as a type constructor
Interface cd = unit.getCallableDeclaration();
boolean functionTypeConstructor =
dec.isAlias() ?
dec.inherits(cd) :
dec.equals(cd);
if (functionTypeConstructor) {
pt.setTypeConstructor(true);
}
}
}
else {
if (params.isEmpty()) {
that.addError("does not accept type arguments: '" +
dec.getName(unit) +
"' is not a generic type");
}
tal.setTypeModels(typeArgs);
List<Tree.Type> args = tal.getTypes();
for (int i = 0;
i<args.size() &&
i<params.size();
i++) {
Tree.Type t = args.get(i);
if (t instanceof Tree.StaticType) {
Tree.StaticType st =
(Tree.StaticType) t;
Tree.TypeVariance variance =
st.getTypeVariance();
if (variance!=null) {
TypeParameter p = params.get(i);
String var = variance.getText();
if (var.equals("out")) {
pt.setVariance(p, OUT);
}
else if (var.equals("in")) {
pt.setVariance(p, IN);
}
if (!p.isInvariant()) {
//Type doesn't yet know
//how to reason about *runtime*
//instantiations of variant types
//since they are effectively
//invariant
variance.addUnsupportedError(
"use-site variant instantiation of declaration-site variant types is not supported: type parameter '" +
p.getName() + "' of '" +
dec.getName(unit) +
"' is not declared invariant");
}
}
}
}
}
that.setTypeModel(pt);
that.setDeclarationModel(dec);
}
@Override
public void visit(Tree.VoidModifier that) {
Class vtd = unit.getAnythingDeclaration();
if (vtd!=null) {
that.setTypeModel(vtd.getType());
}
}
@Override
public void visit(Tree.SequencedType that) {
super.visit(that);
Type type =
that.getType().getTypeModel();
if (type!=null) {
Type et = that.getAtLeastOne() ?
unit.getSequenceType(type) :
unit.getSequentialType(type);
that.setTypeModel(et);
}
}
@Override
public void visit(Tree.DefaultedType that) {
super.visit(that);
Type type =
that.getType().getTypeModel();
if (type!=null) {
that.setTypeModel(type);
}
}
@Override
public void visit(Tree.SpreadType that) {
super.visit(that);
Tree.Type t = that.getType();
if (t!=null) {
Type type = t.getTypeModel();
if (type!=null) {
that.setTypeModel(type);
}
}
}
@Override
public void visit(Tree.TypedDeclaration that) {
super.visit(that);
Tree.Type type = that.getType();
TypedDeclaration dec = that.getDeclarationModel();
setType(that, type, dec);
if (dec instanceof FunctionOrValue) {
FunctionOrValue mv = (FunctionOrValue) dec;
if (dec.isLate() &&
mv.isParameter()) {
that.addError("parameter may not be annotated late");
}
}
// if (type.getTypeModel().isTypeConstructor()) {
// type.addError("type constructor may not occur as the type of a declaration");
}
@Override
public void visit(Tree.TypedArgument that) {
super.visit(that);
setType(that, that.getType(),
that.getDeclarationModel());
}
/*@Override
public void visit(Tree.FunctionArgument that) {
super.visit(that);
setType(that, that.getType(), that.getDeclarationModel());
}*/
private void setType(Node that, Tree.Type type,
TypedDeclaration td) {
if (type==null) {
that.addError("missing type of declaration: '" +
td.getName() + "'");
}
else if (!(type instanceof Tree.LocalModifier)) { //if the type declaration is missing, we do type inference later
Type t = type.getTypeModel();
if (t!=null) {
td.setType(t);
}
}
}
private void defaultSuperclass(Tree.ExtendedType et,
TypeDeclaration cd) {
if (et==null) {
cd.setExtendedType(unit.getBasicType());
}
}
@Override
public void visit(Tree.ObjectDefinition that) {
Class o = that.getAnonymousClass();
o.setExtendedType(null);
o.getSatisfiedTypes().clear();
defaultSuperclass(that.getExtendedType(), o);
super.visit(that);
handleHeader(that.getDeclarationModel(), that);
Type type = o.getType();
that.getDeclarationModel().setType(type);
that.getType().setTypeModel(type);
}
@Override
public void visit(Tree.ObjectArgument that) {
Class o = that.getAnonymousClass();
o.setExtendedType(null);
o.getSatisfiedTypes().clear();
defaultSuperclass(that.getExtendedType(), o);
super.visit(that);
Type type = o.getType();
that.getDeclarationModel().setType(type);
that.getType().setTypeModel(type);
}
@Override
public void visit(Tree.ObjectExpression that) {
Class o = that.getAnonymousClass();
o.setExtendedType(null);
o.getSatisfiedTypes().clear();
defaultSuperclass(that.getExtendedType(), o);
super.visit(that);
}
@Override
public void visit(Tree.ClassDefinition that) {
Class cd = that.getDeclarationModel();
cd.setExtendedType(null);
cd.getSatisfiedTypes().clear();
Class vd = unit.getAnythingDeclaration();
if (vd != null && !vd.equals(cd)) {
defaultSuperclass(that.getExtendedType(), cd);
}
super.visit(that);
handleHeader(cd, that);
Tree.ParameterList pl = that.getParameterList();
if (pl!=null && cd.hasConstructors()) {
pl.addError("class with parameters may not declare constructors: class '" +
cd.getName() +
"' has a parameter list and a constructor");
}
if (pl==null && !cd.hasConstructors()) {
that.addError("class without parameters must declare at least one constructor: class '" +
cd.getName() +
"' has neither parameter list nor constructors",
1001);
}
}
@Override
public void visit(Tree.InterfaceDefinition that) {
Interface id = that.getDeclarationModel();
id.setExtendedType(null);
id.getSatisfiedTypes().clear();
Class od = unit.getObjectDeclaration();
if (od!=null) {
id.setExtendedType(od.getType());
}
super.visit(that);
}
@Override
public void visit(Tree.TypeParameterDeclaration that) {
TypeParameter p = that.getDeclarationModel();
p.setExtendedType(null);
p.getSatisfiedTypes().clear();
Class vd = unit.getAnythingDeclaration();
if (vd!=null) {
p.setExtendedType(vd.getType());
}
super.visit(that);
Tree.TypeSpecifier ts = that.getTypeSpecifier();
if (ts!=null) {
Tree.StaticType type = ts.getType();
if (type!=null) {
Type dta = type.getTypeModel();
Declaration dec = p.getDeclaration();
if (dta!=null &&
dta.involvesDeclaration(dec)) {
type.addError("default type argument involves parameterized type: '" +
dta.asString(unit) +
"' involves '" + dec.getName(unit) +
"'");
dta = null;
}
/*else if (dta.containsTypeParameters()) {
type.addError("default type argument involves type parameters: " +
dta.asString(unit));
dta = null;
}*/
p.setDefaultTypeArgument(dta);
}
}
}
@Override
public void visit(Tree.TypeParameterList that) {
super.visit(that);
List<Tree.TypeParameterDeclaration> tpds =
that.getTypeParameterDeclarations();
List<TypeParameter> params =
new ArrayList<TypeParameter>
(tpds.size());
for (int i=tpds.size()-1; i>=0; i
Tree.TypeParameterDeclaration tpd = tpds.get(i);
if (tpd!=null) {
TypeParameter tp =
tpd.getDeclarationModel();
Type dta =
tp.getDefaultTypeArgument();
if (dta!=null) {
params.add(tp);
if (dta.involvesTypeParameters(params)) {
tpd.getTypeSpecifier()
.addError("default type argument involves a type parameter not yet declared");
}
}
}
}
}
@Override
public void visit(Tree.ClassDeclaration that) {
ClassAlias td =
(ClassAlias)
that.getDeclarationModel();
td.setExtendedType(null);
super.visit(that);
handleHeader(td, that);
Tree.ClassSpecifier cs = that.getClassSpecifier();
if (cs==null) {
that.addError("missing class body or aliased class reference");
}
else {
Tree.ExtendedType et =
that.getExtendedType();
if (et!=null) {
et.addError("class alias may not extend a type");
}
Tree.SatisfiedTypes sts =
that.getSatisfiedTypes();
if (sts!=null) {
sts.addError("class alias may not satisfy a type");
}
Tree.CaseTypes cts =
that.getCaseTypes();
if (cts!=null) {
that.addError("class alias may not have cases or a self type");
}
Tree.SimpleType ct = cs.getType();
if (ct==null) {
// that.addError("malformed aliased class");
}
else if (!(ct instanceof Tree.StaticType)) {
ct.addError("aliased type must be a class");
}
else {
Type type = ct.getTypeModel();
if (type!=null && !type.isUnknown()) {
TypeDeclaration dec =
type.getDeclaration();
td.setConstructor(dec);
if (dec instanceof Constructor) {
if (dec.isAbstract()) {
ct.addError("aliases a partial constructor: '" +
dec.getName(unit) +
"' is declared abstract");
}
if (td.isShared() && !dec.isShared()) {
ct.addError("shared alias of an unshared constructor: '" +
dec.getName(unit) +
"' is not shared");
}
type = type.getExtendedType();
dec = dec.getExtendedType()
.getDeclaration();
}
if (dec instanceof Class) {
td.setExtendedType(type);
}
else {
ct.addError("not a class: '" +
dec.getName(unit) + "'");
}
TypeDeclaration etd =
ct.getDeclarationModel();
if (etd==td) {
ct.addError("directly aliases itself: '" +
td.getName() + "'");
}
}
}
}
}
@Override
public void visit(Tree.InterfaceDeclaration that) {
Interface id = that.getDeclarationModel();
id.setExtendedType(null);
super.visit(that);
Tree.TypeSpecifier typeSpecifier =
that.getTypeSpecifier();
if (typeSpecifier==null) {
if (!id.isNative()) {
that.addError("missing interface body or aliased interface reference");
}
}
else {
Tree.SatisfiedTypes sts =
that.getSatisfiedTypes();
if (sts!=null) {
sts.addError("interface alias may not satisfy a type");
}
Tree.CaseTypes cts =
that.getCaseTypes();
if (cts!=null) {
that.addError("class alias may not have cases or a self type");
}
Tree.StaticType et =
typeSpecifier.getType();
if (et==null) {
// that.addError("malformed aliased interface");
}
else if (!(et instanceof Tree.StaticType)) {
typeSpecifier
.addError("aliased type must be an interface");
}
else {
Type type = et.getTypeModel();
if (type!=null && !type.isUnknown()) {
TypeDeclaration dec =
type.getDeclaration();
if (dec instanceof Interface) {
id.setExtendedType(type);
}
else {
et.addError("not an interface: '" +
dec.getName(unit) +
"'");
}
}
}
}
}
@Override
public void visit(Tree.TypeAliasDeclaration that) {
TypeAlias ta = that.getDeclarationModel();
ta.setExtendedType(null);
super.visit(that);
Tree.SatisfiedTypes sts = that.getSatisfiedTypes();
if (sts!=null) {
sts.addError("type alias may not satisfy a type");
}
TypeSpecifier typeSpecifier =
that.getTypeSpecifier();
if (typeSpecifier==null) {
that.addError("missing aliased type");
}
else {
Tree.StaticType et = typeSpecifier.getType();
if (et==null) {
that.addError("malformed aliased type");
}
else {
Type type = et.getTypeModel();
if (type!=null) {
setTypeConstructor(et, null);
ta.setExtendedType(type);
}
}
}
}
private boolean isInitializerParameter(FunctionOrValue dec) {
return dec!=null &&
dec.isParameter() &&
dec.getInitializerParameter()
.isHidden();
}
@Override
public void visit(Tree.MethodDeclaration that) {
super.visit(that);
Tree.SpecifierExpression sie =
that.getSpecifierExpression();
Function dec = that.getDeclarationModel();
handleHeader(dec, that);
if (isInitializerParameter(dec)) {
if (sie!=null) {
sie.addError("function is an initializer parameter and may not have an initial value: '" +
dec.getName() + "'");
}
}
if (sie==null && isNativeImplementation(dec)) {
that.addError("missing method body for native function implementation");
}
}
@Override
public void visit(Tree.MethodDefinition that) {
super.visit(that);
Function dec = that.getDeclarationModel();
handleHeader(dec, that);
if (isInitializerParameter(dec)) {
that.getBlock()
.addError("function is an initializer parameter and may not have a body: '" +
dec.getName() + "'");
}
}
@Override
public void visit(Tree.AttributeDeclaration that) {
super.visit(that);
Tree.SpecifierOrInitializerExpression sie =
that.getSpecifierOrInitializerExpression();
Value dec = that.getDeclarationModel();
handleHeader(dec, that);
if (isInitializerParameter(dec)) {
Parameter param = dec.getInitializerParameter();
Tree.Type type = that.getType();
if (type instanceof Tree.SequencedType) {
param.setSequenced(true);
Tree.SequencedType st =
(Tree.SequencedType) type;
param.setAtLeastOne(st.getAtLeastOne());
}
if (sie!=null) {
sie.addError("value is an initializer parameter and may not have an initial value: '" +
dec.getName() + "'");
}
}
if (sie==null && isNativeImplementation(dec)) {
that.addError("missing method body for native value implementation");
}
}
@Override
public void visit(Tree.AttributeGetterDefinition that) {
super.visit(that);
Value dec = that.getDeclarationModel();
handleHeader(dec, that);
if (isInitializerParameter(dec)) {
that.getBlock()
.addError("value is an initializer parameter and may not have a body: '" +
dec.getName() + "'");
}
}
void checkExtendedTypeExpression(Tree.Type type) {
if (type instanceof Tree.QualifiedType) {
Tree.QualifiedType qualifiedType =
(Tree.QualifiedType) type;
Tree.StaticType outerType =
qualifiedType.getOuterType();
if (!(outerType instanceof Tree.SuperType)) {
TypeDeclaration otd =
qualifiedType.getDeclarationModel();
if (otd!=null) {
if (otd.isStaticallyImportable() ||
otd instanceof Constructor) {
checkExtendedTypeExpression(outerType);
}
else {
outerType.addError("illegal qualifier in constructor delegation (must be super)");
}
}
}
}
}
private static void inheritedType(Tree.StaticType st) {
if (st instanceof Tree.SimpleType) {
((Tree.SimpleType) st).setInherited(true);
}
}
@Override
public void visit(Tree.DelegatedConstructor that) {
inDelegatedConstructor = true;
super.visit(that);
inDelegatedConstructor = false;
checkExtendedTypeExpression(that.getType());
inheritedType(that.getType());
}
@Override
public void visit(Tree.ClassSpecifier that) {
inExtendsOrClassAlias = true;
super.visit(that);
inExtendsOrClassAlias = false;
checkExtendedTypeExpression(that.getType());
inheritedType(that.getType());
}
@Override
public void visit(Tree.ExtendedType that) {
inExtendsOrClassAlias =
that.getInvocationExpression()!=null;
super.visit(that);
inExtendsOrClassAlias = false;
inheritedType(that.getType());
checkExtendedTypeExpression(that.getType());
TypeDeclaration td =
(TypeDeclaration)
that.getScope();
if (!td.isAlias()) {
Tree.SimpleType et = that.getType();
if (et!=null) {
Type type = et.getTypeModel();
if (type!=null) {
TypeDeclaration etd =
et.getDeclarationModel();
if (etd!=null &&
!(etd instanceof UnknownType)) {
if (etd instanceof Constructor) {
type = type.getExtendedType();
etd = etd.getExtendedType()
.getDeclaration();
}
if (etd==td) {
//unnecessary, handled by SupertypeVisitor
// et.addError("directly extends itself: '" +
// td.getName() + "'");
}
else if (etd instanceof TypeParameter) {
et.addError("directly extends a type parameter: '" +
type.getDeclaration().getName(unit) +
"'");
}
else if (etd instanceof Interface) {
et.addError("extends an interface: '" +
type.getDeclaration().getName(unit) +
"'");
}
else if (etd instanceof TypeAlias) {
et.addError("extends a type alias: '" +
type.getDeclaration().getName(unit) +
"'");
}
else if (etd instanceof NothingType) {
et.addError("extends the bottom type 'Nothing'");
}
else {
td.setExtendedType(type);
}
}
}
}
}
}
@Override
public void visit(Tree.SatisfiedTypes that) {
super.visit(that);
TypeDeclaration td =
(TypeDeclaration)
that.getScope();
if (td.isAlias()) {
return;
}
List<Tree.StaticType> types = that.getTypes();
List<Type> list =
new ArrayList<Type>
(types.size());
if (types.isEmpty()) {
that.addError("missing types in satisfies");
}
boolean foundTypeParam = false;
boolean foundClass = false;
boolean foundInterface = false;
for (Tree.StaticType st: types) {
inheritedType(st);
Type type = st.getTypeModel();
if (type!=null) {
TypeDeclaration std = type.getDeclaration();
if (std!=null &&
!(std instanceof UnknownType)) {
if (std==td) {
//unnecessary, handled by SupertypeVisitor
// st.addError("directly extends itself: '" +
// td.getName() + "'");
}
else if (std instanceof NothingType) {
st.addError("satisfies the bottom type 'Nothing'");
}
else if (std instanceof TypeAlias) {
st.addError("satisfies a type alias: '" +
type.getDeclaration().getName(unit) +
"'");
}
else if (std instanceof Constructor) {
//nothing to do
}
else if (td instanceof TypeParameter) {
if (foundTypeParam) {
st.addUnsupportedError("type parameter upper bounds are not yet supported in combination with other bounds");
}
else if (std instanceof TypeParameter) {
if (foundClass||foundInterface) {
st.addUnsupportedError("type parameter upper bounds are not yet supported in combination with other bounds");
}
foundTypeParam = true;
list.add(type);
}
else if (std instanceof Class) {
if (foundClass) {
st.addUnsupportedError("multiple class upper bounds are not yet supported");
}
foundClass = true;
list.add(type);
}
else if (std instanceof Interface) {
foundInterface = true;
list.add(type);
}
else {
st.addError("upper bound must be a class, interface, or type parameter");
}
}
else {
if (std instanceof TypeParameter) {
st.addError("directly satisfies type parameter: '" +
std.getName(unit) + "'");
}
else if (std instanceof Class) {
st.addError("satisfies a class: '" +
std.getName(unit) + "'");
}
else if (std instanceof Interface) {
if (td.isDynamic() &&
!std.isDynamic()) {
st.addError("dynamic interface satisfies a non-dynamic interface: '" +
std.getName(unit) + "'");
}
else {
list.add(type);
}
}
else {
st.addError("satisfied type must be an interface");
}
}
}
}
}
td.setSatisfiedTypes(list);
}
/*@Override
public void visit(Tree.TypeConstraint that) {
super.visit(that);
if (that.getSelfType()!=null) {
TypeDeclaration td = (TypeDeclaration) that.getSelfType().getScope();
TypeParameter tp = that.getDeclarationModel();
td.setSelfType(tp.getType());
if (tp.isSelfType()) {
that.addError("type parameter may not act as self type for two different types");
}
else {
tp.setSelfTypedDeclaration(td);
}
}
}*/
@Override
public void visit(Tree.CaseTypes that) {
super.visit(that);
TypeDeclaration td =
(TypeDeclaration)
that.getScope();
List<Tree.BaseMemberExpression> bmes =
that.getBaseMemberExpressions();
List<Tree.StaticType> cts = that.getTypes();
List<Type> list =
new ArrayList<Type>
(bmes.size()+cts.size());
if (td instanceof TypeParameter) {
if (!bmes.isEmpty()) {
that.addError("cases of type parameter must be a types");
}
}
else {
for (Tree.BaseMemberExpression bme: bmes) {
//bmes have not yet been resolved
TypedDeclaration od =
getTypedDeclaration(bme.getScope(),
name(bme.getIdentifier()),
null, false, bme.getUnit());
if (od!=null) {
Type type = od.getType();
if (type!=null) {
list.add(type);
}
}
}
}
for (Tree.StaticType ct: cts) {
inheritedType(ct);
Type type = ct.getTypeModel();
if (type!=null) {
if (!isTypeUnknown(type)) {
if (type.isUnion() ||
type.isIntersection() ||
type.isNothing()) {
//union/intersection types don't have equals()
if (td instanceof TypeParameter) {
ct.addError("enumerated bound must be a class or interface type");
}
else {
ct.addError("case type must be a class, interface, or self type");
}
}
else {
TypeDeclaration ctd = type.getDeclaration();
if (ctd.equals(td)) {
ct.addError("directly enumerates itself: '" +
td.getName() + "'");
}
else if (type.isClassOrInterface()) {
list.add(type);
}
else if (type.isTypeParameter()) {
if (td instanceof TypeParameter) {
list.add(type);
}
else {
TypeParameter tp =
(TypeParameter) ctd;
td.setSelfType(type);
if (tp.isSelfType()) {
ct.addError("type parameter may not act as self type for two different types");
}
else {
tp.setSelfTypedDeclaration(td);
list.add(type);
}
if (cts.size()>1) {
ct.addError("a type may not have more than one self type");
}
}
}
else {
if (td instanceof TypeParameter) {
ct.addError("enumerated bound must be a class or interface type");
}
else {
ct.addError("case type must be a class, interface, or self type");
}
}
}
}
}
}
if (!list.isEmpty()) {
if (list.size() == 1 &&
list.get(0).getDeclaration()
.isSelfType()) {
Scope scope =
list.get(0)
.getDeclaration()
.getContainer();
if (scope instanceof ClassOrInterface) {
ClassOrInterface ci =
(ClassOrInterface) scope;
if (!ci.isAbstract()) {
that.addError("non-abstract class parameterized by self type: '" +
td.getName() + "'", 905);
}
}
}
else {
if (td instanceof ClassOrInterface) {
ClassOrInterface ci =
(ClassOrInterface) td;
if (!ci.isAbstract()) {
that.addError("non-abstract class has enumerated subtypes: '" +
td.getName() + "'", 905);
}
}
}
td.setCaseTypes(list);
}
}
@Override
public void visit(Tree.InitializerParameter that) {
super.visit(that);
Parameter p = that.getParameterModel();
String name = p.getName();
Declaration a =
that.getScope()
.getDirectMember(name, null, false);
if (a==null) {
//Now done in ExpressionVisitor!
// that.addError("parameter declaration does not exist: '" + p.getName() + "'");
}
else if (!isLegalParameter(a)) {
that.addError("parameter is not a reference value or function: '" +
name + "'");
}
else {
if (a.isFormal()) {
that.addError("parameter is a formal attribute: '" +
name + "'", 320);
}
FunctionOrValue mov = (FunctionOrValue) a;
mov.setInitializerParameter(p);
p.setModel(mov);
}
/*if (isGeneric(a)) {
that.addError("parameter declaration is generic: '" +
name + "' may not declare type parameters");
}*/
if (p.isDefaulted()) {
checkDefaultArg(that.getSpecifierExpression(), p);
}
}
public boolean isLegalParameter(Declaration a) {
if (a instanceof Value) {
Value v = (Value) a;
if (v.isTransient()) {
return false;
}
else {
TypeDeclaration td = v.getTypeDeclaration();
return !(td instanceof Class) ||
!td.isAnonymous();
}
}
else if (a instanceof Function) {
return true;
}
else {
return false;
}
}
@Override
public void visit(Tree.AnyAttribute that) {
super.visit(that);
Tree.Type type = that.getType();
if (type instanceof Tree.SequencedType) {
Value v = (Value) that.getDeclarationModel();
Parameter p = v.getInitializerParameter();
if (p==null) {
type.addError("value is not a parameter, so may not be variadic: '" +
v.getName() + "'");
}
else {
p.setSequenced(true);
}
}
}
@Override
public void visit(Tree.AnyMethod that) {
super.visit(that);
Tree.Type type = that.getType();
if (type instanceof Tree.SequencedType) {
type.addError("function type may not be variadic");
}
}
@Override
public void visit(Tree.QualifiedMemberOrTypeExpression that) {
Tree.Primary primary = that.getPrimary();
if (primary instanceof Tree.MemberOrTypeExpression) {
Tree.MemberOrTypeExpression mte =
(Tree.MemberOrTypeExpression) primary;
if (mte instanceof Tree.BaseTypeExpression ||
mte instanceof Tree.QualifiedTypeExpression) {
that.setStaticMethodReference(true);
mte.setStaticMethodReferencePrimary(true);
if (that.getDirectlyInvoked()) {
mte.setDirectlyInvoked(true);
}
}
}
if (primary instanceof Tree.Package) {
((Tree.Package) primary).setQualifier(true);
}
super.visit(that);
}
@Override
public void visit(Tree.InvocationExpression that) {
Tree.Term primary =
unwrapExpressionUntilTerm(that.getPrimary());
if (primary instanceof Tree.MemberOrTypeExpression) {
Tree.MemberOrTypeExpression mte =
(Tree.MemberOrTypeExpression) primary;
mte.setDirectlyInvoked(true);
}
super.visit(that);
}
private static Tree.SpecifierOrInitializerExpression
getSpecifier(Tree.ParameterDeclaration that) {
Tree.TypedDeclaration dec =
that.getTypedDeclaration();
if (dec instanceof Tree.AttributeDeclaration) {
Tree.AttributeDeclaration ad =
(Tree.AttributeDeclaration) dec;
return ad.getSpecifierOrInitializerExpression();
}
else if (dec instanceof Tree.MethodDeclaration) {
Tree.MethodDeclaration md =
(Tree.MethodDeclaration) dec;
return md.getSpecifierExpression();
}
else {
return null;
}
}
private void checkDefaultArg(Tree.SpecifierOrInitializerExpression se,
Parameter p) {
if (se!=null) {
if (se.getScope() instanceof Specification) {
se.addError("parameter of specification statement may not define default value");
}
else {
Declaration d = p.getDeclaration();
if (d.isActual()) {
se.addError("parameter of actual declaration may not define default value: parameter '" +
p.getName() + "' of '" +
p.getDeclaration().getName() +
"'");
}
}
}
}
@Override public void visit(Tree.ParameterDeclaration that) {
super.visit(that);
Parameter p = that.getParameterModel();
if (p.isDefaulted()) {
if (p.getDeclaration().isParameter()) {
getSpecifier(that)
.addError("parameter of callable parameter may not have default argument");
}
checkDefaultArg(getSpecifier(that), p);
}
}
private Declaration handleHeader(Declaration dec,
Node that) {
if (Backend.None.nativeAnnotation.equals(dec.getNativeBackend())
&& !backendSupport.supportsBackend(Backend.None)) {
BackendSupport backend =
inBackend == null ?
backendSupport :
inBackend.backendSupport;
Declaration hdr = dec;
if (!hdr.isNativeHeader()) {
hdr = getNativeHeader(dec.getContainer(), dec.getName());
}
Declaration impl =
getNativeDeclaration(dec, backend);
if (impl==null && hdr != null) {
if (!isImplemented(hdr) && hdr.isShared()) {
that.addError("no native implementation for backend: native '"
+ dec.getName(unit) +
"' is not implemented for one or more backends");
}
} else if (hdr==null) {
that.addError("native implementation must have a header: "
+ dec.getName(unit));
}
return inBackend == null || impl==null ?
dec : impl;
}
return dec;
}
}
|
package com.opensymphony.workflow.designer;
import java.util.List;
import java.util.ArrayList;
import javax.swing.*;
public class GraphTabbedPane extends JTabbedPane
{
private List graphs = new ArrayList();
public WorkflowGraph getCurrentGraph()
{
int index = getSelectedIndex();
if(index == -1 || index >= graphs.size()) return null;
return (WorkflowGraph)graphs.get(index);
}
public boolean selectWorkflow(String workflowName)
{
for(int i = 0; i < getTabCount(); i++)
{
String name = getTitleAt(i);
if(name.equals(workflowName))
{
setSelectedIndex(i);
return true;
}
}
return false;
}
public void addGraph(WorkflowGraph graph)
{
graphs.add(graph);
add(graph.getName(), new JScrollPane(graph));
setSelectedIndex(getComponentCount() - 1);
}
public void removeAll()
{
super.removeAll();
graphs.clear();
}
public WorkflowGraph[] getGraphs()
{
WorkflowGraph[] g = new WorkflowGraph[graphs.size()];
graphs.toArray(g);
return g;
}
public void renameGraph(String name, String newName)
{
for(int i=0;i<graphs.size();i++)
{
WorkflowGraph graph = (WorkflowGraph)graphs.get(i);
if(graph.getName().equals(name))
{
graph.setName(newName);
setTitleAt(i, newName);
return;
}
}
}
}
|
package dr.evomodel.treedatalikelihood;
/**
* BeagleDataLikelihoodDelegate
*
* A DataLikelihoodDelegate that uses BEAGLE
*
* @author Andrew Rambaut
* @author Marc Suchard
* @version $Id$
*/
import beagle.*;
import dr.evomodel.branchmodel.BranchModel;
import dr.evomodel.siteratemodel.SiteRateModel;
import dr.evomodel.treelikelihood.*;
import dr.evolution.alignment.PatternList;
import dr.evolution.alignment.UncertainSiteList;
import dr.evolution.datatype.DataType;
import dr.evolution.tree.Tree;
import dr.evolution.util.TaxonList;
import dr.evomodel.tipstatesmodel.TipStatesModel;
import dr.inference.model.AbstractModel;
import dr.inference.model.Model;
import dr.inference.model.Parameter;
import dr.inference.model.Variable;
import dr.math.matrixAlgebra.Vector;
import dr.util.Citable;
import dr.util.Citation;
import dr.util.CommonCitations;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.logging.Logger;
public class BeagleDataLikelihoodDelegate extends AbstractModel implements DataLikelihoodDelegate, Citable {
public static boolean IS_THREAD_COUNT_COMPATIBLE() {
int[] versionNumbers = BeagleInfo.getVersionNumbers();
return versionNumbers.length != 0 && versionNumbers[0] >= 3 && versionNumbers[1] >= 1;
}
// This property is a comma-delimited list of resource numbers (0 == CPU) to
// allocate each BEAGLE instance to. If less than the number of instances then
// will wrap around.
private static final String RESOURCE_AUTO_PROPERTY = "beagle.resource.auto";
private static final String RESOURCE_ORDER_PROPERTY = "beagle.resource.order";
private static final String PREFERRED_FLAGS_PROPERTY = "beagle.preferred.flags";
private static final String REQUIRED_FLAGS_PROPERTY = "beagle.required.flags";
private static final String SCALING_PROPERTY = "beagle.scaling";
private static final String RESCALE_FREQUENCY_PROPERTY = "beagle.rescale";
private static final String DELAY_SCALING_PROPERTY = "beagle.delay.scaling";
private static final String EXTRA_BUFFER_COUNT_PROPERTY = "beagle.extra.buffer.count";
private static final String FORCE_VECTORIZATION = "beagle.force.vectorization";
private static final String THREAD_COUNT = "beagle.thread.count";
// Which scheme to use if choice not specified (or 'default' is selected):
private static final PartialsRescalingScheme DEFAULT_RESCALING_SCHEME = PartialsRescalingScheme.DYNAMIC;
private static int instanceCount = 0;
private static List<Integer> resourceOrder = null;
private static List<Integer> preferredOrder = null;
private static List<Integer> requiredOrder = null;
private static List<String> scalingOrder = null;
private static List<Integer> extraBufferOrder = null;
// Default frequency for complete recomputation of scaling factors under the 'dynamic' scheme
private static final int RESCALE_FREQUENCY = 100;
private static final int RESCALE_TIMES = 1;
private static final boolean RESCALING_OFF = false; // a debugging switch
private static final boolean DEBUG = false;
/**
*
* @param tree Used for configuration - shouldn't be watched for changes
* @param branchModel Specifies substitution model for each branch
* @param patternList List of patterns
* @param siteRateModel Specifies rates per site
* @param useAmbiguities Whether to respect state ambiguities in data
*/
public BeagleDataLikelihoodDelegate(Tree tree,
PatternList patternList,
BranchModel branchModel,
SiteRateModel siteRateModel,
boolean useAmbiguities,
PartialsRescalingScheme rescalingScheme,
boolean delayRescalingUntilUnderflow) {
super("BeagleDataLikelihoodDelegate");
final Logger logger = Logger.getLogger("dr.evomodel");
logger.info("\nUsing BEAGLE DataLikelihood Delegate");
setId(patternList.getId());
this.dataType = patternList.getDataType();
this.patternList = patternList;
patternCount = patternList.getPatternCount();
stateCount = dataType.getStateCount();
// Check for matching state counts
int stateCount2 = branchModel.getRootFrequencyModel().getFrequencyCount();
if (stateCount != stateCount2) {
throw new IllegalArgumentException("Pattern state count (" + stateCount
+ ") does not match substitution model state count (" + stateCount2 + ")");
}
patternWeights = patternList.getPatternWeights();
this.branchModel = branchModel;
addModel(this.branchModel);
this.siteRateModel = siteRateModel;
addModel(this.siteRateModel);
this.categoryCount = this.siteRateModel.getCategoryCount();
nodeCount = tree.getNodeCount();
tipCount = tree.getExternalNodeCount();
internalNodeCount = nodeCount - tipCount;
branchUpdateIndices = new int[nodeCount];
branchLengths = new double[nodeCount];
scaleBufferIndices = new int[internalNodeCount];
storedScaleBufferIndices = new int[internalNodeCount];
operations = new int[internalNodeCount * Beagle.OPERATION_TUPLE_SIZE];
firstRescaleAttempt = true;
try {
int compactPartialsCount = tipCount;
if (useAmbiguities) {
// if we are using ambiguities then we don't use tip partials
compactPartialsCount = 0;
}
// one partials buffer for each tip and two for each internal node (for store restore)
partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount);
// one scaling buffer for each internal node plus an extra for the accumulation, then doubled for store/restore
scaleBufferHelper = new BufferIndexHelper(getScaleBufferCount(), 0);
evolutionaryProcessDelegate = new HomogenousSubstitutionModelDelegate(tree, branchModel);
// Attempt to get the resource order from the System Property
if (resourceOrder == null) {
resourceOrder = parseSystemPropertyIntegerArray(RESOURCE_ORDER_PROPERTY);
}
if (preferredOrder == null) {
preferredOrder = parseSystemPropertyIntegerArray(PREFERRED_FLAGS_PROPERTY);
}
if (requiredOrder == null) {
requiredOrder = parseSystemPropertyIntegerArray(REQUIRED_FLAGS_PROPERTY);
}
if (scalingOrder == null) {
scalingOrder = parseSystemPropertyStringArray(SCALING_PROPERTY);
}
if (extraBufferOrder == null) {
extraBufferOrder = parseSystemPropertyIntegerArray(EXTRA_BUFFER_COUNT_PROPERTY);
}
// first set the rescaling scheme to use from the parser
this.rescalingScheme = rescalingScheme;
this.delayRescalingUntilUnderflow = delayRescalingUntilUnderflow;
int[] resourceList = null;
long preferenceFlags = 0;
long requirementFlags = 0;
if (scalingOrder.size() > 0) {
this.rescalingScheme = PartialsRescalingScheme.parseFromString(
scalingOrder.get(instanceCount % scalingOrder.size()));
}
if (resourceOrder.size() > 0) {
// added the zero on the end so that a CPU is selected if requested resource fails
resourceList = new int[]{resourceOrder.get(instanceCount % resourceOrder.size()), 0};
if (resourceList[0] > 0) {
preferenceFlags |= BeagleFlag.PROCESSOR_GPU.getMask(); // Add preference weight against CPU
}
}
if (preferredOrder.size() > 0) {
preferenceFlags = preferredOrder.get(instanceCount % preferredOrder.size());
}
if (requiredOrder.size() > 0) {
requirementFlags = requiredOrder.get(instanceCount % requiredOrder.size());
}
// Define default behaviour here
if (this.rescalingScheme == PartialsRescalingScheme.DEFAULT) {
//if GPU: the default is dynamic scaling in BEAST
if (resourceList != null && resourceList[0] > 1) {
this.rescalingScheme = DEFAULT_RESCALING_SCHEME;
} else { // if CPU: just run as fast as possible
// this.rescalingScheme = PartialsRescalingScheme.NONE;
// Dynamic should run as fast as none until first underflow
this.rescalingScheme = DEFAULT_RESCALING_SCHEME;
}
}
// to keep behaviour of the delayed scheme (always + delay)...
if (this.rescalingScheme == PartialsRescalingScheme.DELAYED) {
this.delayRescalingUntilUnderflow = true;
this.rescalingScheme = PartialsRescalingScheme.ALWAYS;
}
if (this.rescalingScheme == PartialsRescalingScheme.AUTO) {
preferenceFlags |= BeagleFlag.SCALING_AUTO.getMask();
useAutoScaling = true;
} else {
// preferenceFlags |= BeagleFlag.SCALING_MANUAL.getMask();
}
String r = System.getProperty(RESCALE_FREQUENCY_PROPERTY);
if (r != null) {
rescalingFrequency = Integer.parseInt(r);
if (rescalingFrequency < 1) {
rescalingFrequency = RESCALE_FREQUENCY;
}
}
String d = System.getProperty(DELAY_SCALING_PROPERTY);
if (d != null) {
this.delayRescalingUntilUnderflow = Boolean.parseBoolean(d);
}
if (preferenceFlags == 0 && resourceList == null) { // else determine dataset characteristics
if (stateCount == 4 && patternList.getPatternCount() < 10000) // TODO determine good cut-off
preferenceFlags |= BeagleFlag.PROCESSOR_CPU.getMask();
}
boolean forceVectorization = false;
String vectorizationString = System.getProperty(FORCE_VECTORIZATION);
if (vectorizationString != null) {
forceVectorization = true;
}
String tc = System.getProperty(THREAD_COUNT);
if (tc != null) {
threadCount = Integer.parseInt(tc);
if (threadCount < 2) {
threadCount = 1;
}
}
if (BeagleFlag.VECTOR_SSE.isSet(preferenceFlags) && (stateCount != 4)
&& !forceVectorization
) {
// @todo SSE doesn't seem to work for larger state spaces so for now we override the
// SSE option.
preferenceFlags &= ~BeagleFlag.VECTOR_SSE.getMask();
preferenceFlags |= BeagleFlag.VECTOR_NONE.getMask();
if (stateCount > 4 && this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
this.rescalingScheme = PartialsRescalingScheme.DELAYED;
}
}
if (!BeagleFlag.PRECISION_SINGLE.isSet(preferenceFlags)) {
// if single precision not explicitly set then prefer double
preferenceFlags |= BeagleFlag.PRECISION_DOUBLE.getMask();
}
if (evolutionaryProcessDelegate.canReturnComplexDiagonalization()) {
requirementFlags |= BeagleFlag.EIGEN_COMPLEX.getMask();
}
if ((resourceList == null &&
(BeagleFlag.PROCESSOR_GPU.isSet(preferenceFlags) ||
BeagleFlag.FRAMEWORK_CUDA.isSet(preferenceFlags) ||
BeagleFlag.FRAMEWORK_OPENCL.isSet(preferenceFlags)))
||
(resourceList != null && resourceList[0] > 0)) {
// non-CPU implementations don't have SSE so remove default preference for SSE
// when using non-CPU preferences or prioritising non-CPU resource
preferenceFlags &= ~BeagleFlag.VECTOR_SSE.getMask();
}
// start auto resource selection
String resourceAuto = System.getProperty(RESOURCE_AUTO_PROPERTY);
if (resourceAuto != null && Boolean.parseBoolean(resourceAuto)) {
long benchmarkFlags = 0;
if (this.rescalingScheme == PartialsRescalingScheme.NONE) {
benchmarkFlags = BeagleBenchmarkFlag.SCALING_NONE.getMask();
} else if (this.rescalingScheme == PartialsRescalingScheme.ALWAYS) {
benchmarkFlags = BeagleBenchmarkFlag.SCALING_ALWAYS.getMask();
} else {
benchmarkFlags = BeagleBenchmarkFlag.SCALING_DYNAMIC.getMask();
}
logger.info("\nRunning benchmarks to automatically select fastest BEAGLE resource for analysis or partition... ");
List<BenchmarkedResourceDetails> benchmarkedResourceDetails =
BeagleFactory.getBenchmarkedResourceDetails(
tipCount,
compactPartialsCount,
stateCount,
patternCount,
categoryCount,
resourceList,
preferenceFlags,
requirementFlags,
1, // eigenModelCount,
1, // partitionCount,
0, // calculateDerivatives,
benchmarkFlags);
logger.info(" Benchmark results, from fastest to slowest:");
for (BenchmarkedResourceDetails benchmarkedResource : benchmarkedResourceDetails) {
logger.info(benchmarkedResource.toString());
}
resourceList = new int[]{benchmarkedResourceDetails.get(0).getResourceNumber()};
}
// end auto resource selection
beagle = BeagleFactory.loadBeagleInstance(
tipCount,
partialBufferHelper.getBufferCount(),
compactPartialsCount,
stateCount,
patternCount,
evolutionaryProcessDelegate.getEigenBufferCount(),
evolutionaryProcessDelegate.getMatrixBufferCount(),
categoryCount,
scaleBufferHelper.getBufferCount(), // Always allocate; they may become necessary
resourceList,
preferenceFlags,
requirementFlags
);
InstanceDetails instanceDetails = beagle.getDetails();
ResourceDetails resourceDetails = null;
if (instanceDetails != null) {
resourceDetails = BeagleFactory.getResourceDetails(instanceDetails.getResourceNumber());
if (resourceDetails != null) {
StringBuilder sb = new StringBuilder(" Using BEAGLE resource ");
sb.append(resourceDetails.getNumber()).append(": ");
sb.append(resourceDetails.getName()).append("\n");
if (resourceDetails.getDescription() != null) {
String[] description = resourceDetails.getDescription().split("\\|");
for (String desc : description) {
if (desc.trim().length() > 0) {
sb.append(" ").append(desc.trim()).append("\n");
}
}
}
sb.append(" with instance flags: ").append(instanceDetails.toString());
logger.info(sb.toString());
} else {
logger.info(" Error retrieving BEAGLE resource for instance: " + instanceDetails.toString());
}
} else {
logger.info(" No external BEAGLE resources available, or resource list/requirements not met, using Java implementation");
}
instanceFlags = instanceDetails.getFlags();
if (IS_THREAD_COUNT_COMPATIBLE() && threadCount > 1) {
beagle.setCPUThreadCount(threadCount);
}
if (patternList instanceof UncertainSiteList) { // TODO Remove
useAmbiguities = true;
}
logger.info(" " + (useAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
logger.info(" With " + patternList.getPatternCount() + " unique site patterns.");
if (patternList.areUncertain() && !useAmbiguities) {
logger.info(" WARNING: Uncertain site patterns will be ignored.");
}
for (int i = 0; i < tipCount; i++) {
// Find the id of tip i in the patternList
String id = tree.getTaxonId(i);
int index = patternList.getTaxonIndex(id);
if (index == -1) {
throw new TaxonList.MissingTaxonException("Taxon, " + id + ", in tree, " + tree.getId() +
", is not found in patternList, " + patternList.getId());
} else {
if (useAmbiguities) {
setPartials(beagle, patternList, index, i);
} else {
setStates(beagle, patternList, index, i);
}
}
}
beagle.setPatternWeights(patternWeights);
String rescaleMessage = " Using rescaling scheme : " + this.rescalingScheme.getText();
if (this.rescalingScheme == PartialsRescalingScheme.AUTO &&
resourceDetails != null &&
(resourceDetails.getFlags() & BeagleFlag.SCALING_AUTO.getMask()) == 0) {
// If auto scaling in BEAGLE is not supported then do it here
this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
rescaleMessage = " Auto rescaling not supported in BEAGLE, using : " + this.rescalingScheme.getText();
}
boolean parenthesis = false;
if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
rescaleMessage += " (rescaling every " + rescalingFrequency + " evaluations";
parenthesis = true;
}
if (this.delayRescalingUntilUnderflow) {
rescaleMessage += (parenthesis ? ", " : "(") + "delay rescaling until first overflow";
parenthesis = true;
}
rescaleMessage += (parenthesis ? ")" : "");
logger.info(rescaleMessage);
if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
everUnderflowed = false; // If false, BEAST does not rescale until first under-/over-flow.
}
updateSubstitutionModel = true;
updateSiteModel = true;
} catch (TaxonList.MissingTaxonException mte) {
throw new RuntimeException(mte.toString());
}
instanceCount++;
}
@Override
public String getReport() {
return null;
}
@Override
public TreeTraversal.TraversalType getOptimalTraversalType() {
if ((instanceFlags & BeagleFlag.FRAMEWORK_CPU.getMask()) != 0) {
return TreeTraversal.TraversalType.POST_ORDER;
} else {
return TreeTraversal.TraversalType.REVERSE_LEVEL_ORDER;
}
}
@Override
public int getTraitCount() {
return 1;
}
@Override
public int getTraitDim() {
return patternCount;
}
@Override
public RateRescalingScheme getRateRescalingScheme() {
return RateRescalingScheme.NONE;
}
public PatternList getPatternList() {
return this.patternList;
}
private static List<Integer> parseSystemPropertyIntegerArray(String propertyName) {
List<Integer> order = new ArrayList<Integer>();
String r = System.getProperty(propertyName);
if (r != null) {
String[] parts = r.split(",");
for (String part : parts) {
try {
int n = Integer.parseInt(part.trim());
order.add(n);
} catch (NumberFormatException nfe) {
System.err.println("Invalid entry '" + part + "' in " + propertyName);
}
}
}
return order;
}
private static List<String> parseSystemPropertyStringArray(String propertyName) {
List<String> order = new ArrayList<String>();
String r = System.getProperty(propertyName);
if (r != null) {
String[] parts = r.split(",");
for (String part : parts) {
try {
String s = part.trim();
order.add(s);
} catch (NumberFormatException nfe) {
System.err.println("Invalid entry '" + part + "' in " + propertyName);
}
}
}
return order;
}
private int getScaleBufferCount() {
return internalNodeCount + 1;
}
/**
* Sets the partials from a sequence in an alignment.
*
* @param beagle beagle
* @param patternList patternList
* @param sequenceIndex sequenceIndex
* @param nodeIndex nodeIndex
*/
private final void setPartials(Beagle beagle,
PatternList patternList,
int sequenceIndex,
int nodeIndex) {
double[] partials = new double[patternCount * stateCount * categoryCount];
int v = 0;
for (int i = 0; i < patternCount; i++) {
if (patternList instanceof UncertainSiteList) {
((UncertainSiteList) patternList).fillPartials(sequenceIndex, i, partials, v);
v += stateCount;
// TODO Add this functionality to SimpleSiteList to avoid if statement here
} else if (patternList.areUncertain()) {
double[] prob = patternList.getUncertainPatternState(sequenceIndex, i);
System.arraycopy(prob, 0, partials, v, stateCount);
v += stateCount;
} else {
int state = patternList.getPatternState(sequenceIndex, i);
boolean[] stateSet = dataType.getStateSet(state);
for (int j = 0; j < stateCount; j++) {
if (stateSet[j]) {
partials[v] = 1.0;
} else {
partials[v] = 0.0;
}
v++;
}
}
}
// if there is more than one category then replicate the partials for each
int n = patternCount * stateCount;
int k = n;
for (int i = 1; i < categoryCount; i++) {
System.arraycopy(partials, 0, partials, k, n);
k += n;
}
beagle.setPartials(nodeIndex, partials);
}
/**
* Sets the partials from a sequence in an alignment.
*/
private final void setPartials(Beagle beagle,
TipStatesModel tipStatesModel,
int nodeIndex) {
double[] partials = new double[patternCount * stateCount * categoryCount];
tipStatesModel.getTipPartials(nodeIndex, partials);
// if there is more than one category then replicate the partials for each
int n = patternCount * stateCount;
int k = n;
for (int i = 1; i < categoryCount; i++) {
System.arraycopy(partials, 0, partials, k, n);
k += n;
}
beagle.setPartials(nodeIndex, partials);
}
/**
* Sets the partials from a sequence in an alignment.
*
* @param beagle beagle
* @param patternList patternList
* @param sequenceIndex sequenceIndex
* @param nodeIndex nodeIndex
*/
private final void setStates(Beagle beagle,
PatternList patternList,
int sequenceIndex,
int nodeIndex) {
int i;
int[] states = new int[patternCount];
for (i = 0; i < patternCount; i++) {
states[i] = patternList.getPatternState(sequenceIndex, i);
}
beagle.setTipStates(nodeIndex, states);
}
/**
* Calculate the log likelihood of the current state.
*
* @return the log likelihood.
*/
@Override
public double calculateLikelihood(List<BranchOperation> branchOperations, List<NodeOperation> nodeOperations, int rootNodeNumber) throws LikelihoodException {
//recomputeScaleFactors = false;
if (DEBUG) {
System.out.println("Partition: " + this.getModelName());
}
if (!this.delayRescalingUntilUnderflow || everUnderflowed) {
if (this.rescalingScheme == PartialsRescalingScheme.ALWAYS || this.rescalingScheme == PartialsRescalingScheme.DELAYED) {
useScaleFactors = true;
recomputeScaleFactors = true;
} else if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
useScaleFactors = true;
if (rescalingCount > rescalingFrequency) {
if (DEBUG) {
System.out.println("rescalingCount > rescalingFrequency");
}
rescalingCount = 0;
rescalingCountInner = 0;
}
if (DEBUG) {
System.out.println("rescalingCountInner = " + rescalingCountInner);
}
if (rescalingCountInner < RESCALE_TIMES) {
if (DEBUG) {
System.out.println("rescalingCountInner < RESCALE_TIMES");
}
recomputeScaleFactors = true;
rescalingCountInner++;
throw new LikelihoodRescalingException();
}
//underflowHandling takes into account the first evaluation when initiating the MCMC chain
//suggest replacing with boolean initialEvaluation
if (initialEvaluation) {
if (underflowHandling < 1) {
underflowHandling++;
if (DEBUG) {
System.out.println("underflowHandling < 1");
}
} else if (underflowHandling == 1) {
if (DEBUG) {
System.out.println("underflowHandling == 1");
}
recomputeScaleFactors = true;
underflowHandling++;
initialEvaluation = false;
}
}
rescalingCount++;
}
}
if (RESCALING_OFF) { // a debugging switch
useScaleFactors = false;
recomputeScaleFactors = false;
}
int branchUpdateCount = 0;
for (BranchOperation op : branchOperations) {
branchUpdateIndices[branchUpdateCount] = op.getBranchNumber();
branchLengths[branchUpdateCount] = op.getBranchLength();
branchUpdateCount ++;
}
if (updateSubstitutionModel) { // TODO More efficient to update only the substitution model that changed, instead of all
evolutionaryProcessDelegate.updateSubstitutionModels(beagle, flip);
// we are currently assuming a no-category model...
}
if (updateSiteModel) {
double[] categoryRates = this.siteRateModel.getCategoryRates();
if (categoryRates == null) {
// If this returns null then there was a numerical error calculating the category rates
// (probably a very small alpha) so reject the move.
return Double.NEGATIVE_INFINITY;
}
beagle.setCategoryRates(categoryRates);
}
if (branchUpdateCount > 0) {
evolutionaryProcessDelegate.updateTransitionMatrices(
beagle,
branchUpdateIndices,
branchLengths,
branchUpdateCount,
flip);
}
if (flip) {
// Flip all the buffers to be written to first...
for (NodeOperation op : nodeOperations) {
partialBufferHelper.flipOffset(op.getNodeNumber());
}
}
int operationCount = nodeOperations.size();
int k = 0;
for (NodeOperation op : nodeOperations) {
int nodeNum = op.getNodeNumber();
operations[k] = partialBufferHelper.getOffsetIndex(nodeNum);
if (useScaleFactors) {
// get the index of this scaling buffer
int n = nodeNum - tipCount;
if (recomputeScaleFactors) {
// flip the indicator: can take either n or (internalNodeCount + 1) - n
scaleBufferHelper.flipOffset(n);
// store the index
scaleBufferIndices[n] = scaleBufferHelper.getOffsetIndex(n);
operations[k + 1] = scaleBufferIndices[n]; // Write new scaleFactor
operations[k + 2] = Beagle.NONE;
} else {
operations[k + 1] = Beagle.NONE;
operations[k + 2] = scaleBufferIndices[n]; // Read existing scaleFactor
}
} else {
if (useAutoScaling) {
scaleBufferIndices[nodeNum - tipCount] = partialBufferHelper.getOffsetIndex(nodeNum);
}
operations[k + 1] = Beagle.NONE; // Not using scaleFactors
operations[k + 2] = Beagle.NONE;
}
operations[k + 3] = partialBufferHelper.getOffsetIndex(op.getLeftChild()); // source node 1
operations[k + 4] = evolutionaryProcessDelegate.getMatrixIndex(op.getLeftChild()); // source matrix 1
operations[k + 5] = partialBufferHelper.getOffsetIndex(op.getRightChild()); // source node 2
operations[k + 6] = evolutionaryProcessDelegate.getMatrixIndex(op.getRightChild()); // source matrix 2
k += Beagle.OPERATION_TUPLE_SIZE;
}
beagle.updatePartials(operations, operationCount, Beagle.NONE);
int rootIndex = partialBufferHelper.getOffsetIndex(rootNodeNumber);
double[] categoryWeights = this.siteRateModel.getCategoryProportions();
// This should probably explicitly be the state frequencies for the root node...
double[] frequencies = evolutionaryProcessDelegate.getRootStateFrequencies();
int cumulateScaleBufferIndex = Beagle.NONE;
if (useScaleFactors) {
if (recomputeScaleFactors) {
scaleBufferHelper.flipOffset(internalNodeCount);
cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
beagle.resetScaleFactors(cumulateScaleBufferIndex);
beagle.accumulateScaleFactors(scaleBufferIndices, internalNodeCount, cumulateScaleBufferIndex);
} else {
cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
}
} else if (useAutoScaling) {
beagle.accumulateScaleFactors(scaleBufferIndices, internalNodeCount, Beagle.NONE);
}
// these could be set only when they change but store/restore would need to be considered
beagle.setCategoryWeights(0, categoryWeights);
beagle.setStateFrequencies(0, frequencies);
double[] sumLogLikelihoods = new double[1];
if (DEBUG) {
System.out.println("useScaleFactors=" + useScaleFactors + " recomputeScaleFactors=" + recomputeScaleFactors + " (" + getId() + ")");
}
beagle.calculateRootLogLikelihoods(new int[]{rootIndex}, new int[]{0}, new int[]{0},
new int[]{cumulateScaleBufferIndex}, 1, sumLogLikelihoods);
double logL = sumLogLikelihoods[0];
/*if (DEBUG) {
System.out.println(logL);
if (logL > -90000) {
System.exit(0);
}
}*/
if (Double.isNaN(logL) || Double.isInfinite(logL)) {
if (DEBUG) {
System.out.println("Double.isNaN(logL) || Double.isInfinite(logL) (" + getId() + ")");
}
everUnderflowed = true;
logL = Double.NEGATIVE_INFINITY;
if (firstRescaleAttempt && (delayRescalingUntilUnderflow || rescalingScheme == PartialsRescalingScheme.DELAYED)) {
if (rescalingScheme == PartialsRescalingScheme.DYNAMIC || (rescalingCount == 0)) {
// show a message but only every 1000 rescales
if (rescalingMessageCount % 1000 == 0) {
if (rescalingMessageCount > 0) {
Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood (" + rescalingMessageCount + " messages not shown; " + getId() + ").");
} else {
Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood. Attempting a rescaling... (" + getId() + ")");
}
}
rescalingMessageCount += 1;
}
useScaleFactors = true;
recomputeScaleFactors = true;
firstRescaleAttempt = false; // Only try to rescale once
rescalingCount
}
// turn off double buffer flipping so the next call overwrites the
// underflowed buffers. Flip will be turned on again in storeState for
// next step
flip = false;
underflowHandling = 0;
throw new LikelihoodUnderflowException();
} else {
firstRescaleAttempt = true;
recomputeScaleFactors = false;
flip = true;
}
updateSubstitutionModel = false;
updateSiteModel = false;
/* No need to rescale partials */
/**
* Stores the additional state other than model components
*/
@Override
public void storeState() {
partialBufferHelper.storeState();
evolutionaryProcessDelegate.storeState();
if (useScaleFactors || useAutoScaling) { // Only store when actually used
scaleBufferHelper.storeState();
System.arraycopy(scaleBufferIndices, 0, storedScaleBufferIndices, 0, scaleBufferIndices.length);
// storedRescalingCount = rescalingCount;
}
// turn on double buffering flipping (may have been turned off to enable a rescale)
flip = true;
}
/**
* Restore the additional stored state
*/
@Override
public void restoreState() {
updateSiteModel = true; // this is required to upload the categoryRates to BEAGLE after the restore
partialBufferHelper.restoreState();
evolutionaryProcessDelegate.restoreState();
if (useScaleFactors || useAutoScaling) {
scaleBufferHelper.restoreState();
int[] tmp = storedScaleBufferIndices;
storedScaleBufferIndices = scaleBufferIndices;
scaleBufferIndices = tmp;
// rescalingCount = storedRescalingCount;
}
}
@Override
public void setCallback(TreeDataLikelihood treeDataLikelihood) {
// Callback not necessary
}
@Override
public int vectorizeNodeOperations(List<ProcessOnTreeDelegate.NodeOperation> nodeOperations, int[] operations) {
throw new RuntimeException("Not yet implemented");
}
@Override
protected void acceptState() {
}
// INSTANCE CITABLE
@Override
public Citation.Category getCategory() {
return Citation.Category.FRAMEWORK;
}
@Override
public String getDescription() {
return "Using BEAGLE likelihood calculation library";
}
@Override
public List<Citation> getCitations() {
return Collections.singletonList(CommonCitations.AYRES_2012_BEAGLE);
}
// INSTANCE VARIABLES
private final int nodeCount;
private final int tipCount;
private final int internalNodeCount;
private final int[] branchUpdateIndices;
private final double[] branchLengths;
private int[] scaleBufferIndices;
private int[] storedScaleBufferIndices;
private final int[] operations;
private boolean flip = true;
private final BufferIndexHelper partialBufferHelper;
private final BufferIndexHelper scaleBufferHelper;
private PartialsRescalingScheme rescalingScheme;
private int rescalingFrequency = RESCALE_FREQUENCY;
private boolean delayRescalingUntilUnderflow = true;
private boolean useScaleFactors = false;
private boolean useAutoScaling = false;
private boolean recomputeScaleFactors = false;
private boolean everUnderflowed = false;
private int rescalingCount = 0;
private int rescalingCountInner = 0;
private int threadCount = 1;
private long instanceFlags;
private boolean firstRescaleAttempt = false;
private int rescalingMessageCount = 0;
//integer to keep track of setting recomputeScaleFactors correctly after an underflow
private int underflowHandling = 0;
/**
* the patternList
*/
private final PatternList patternList;
/**
* the data type
*/
private final DataType dataType;
/**
* the pattern weights
*/
private final double[] patternWeights;
/**
* the number of patterns
*/
private final int patternCount;
/**
* the number of states in the data
*/
private final int stateCount;
/**
* the branch-site model for these sites
*/
private final BranchModel branchModel;
/**
* A delegate to handle substitution models on branches
*/
private final EvolutionaryProcessDelegate evolutionaryProcessDelegate;
/**
* the site model for these sites
*/
private final SiteRateModel siteRateModel;
/**
* the pattern likelihoods
*/
private double[] patternLogLikelihoods = null;
/**
* the number of rate categories
*/
private final int categoryCount;
/**
* an array used to transfer tip partials
*/
private double[] tipPartials;
/**
* an array used to transfer tip states
*/
private int[] tipStates;
/**
* the BEAGLE library instance
*/
private final Beagle beagle;
/**
* Flag to specify that the substitution model has changed
*/
private boolean updateSubstitutionModel;
/**
* Flag to specify that the site model has changed
*/
private boolean updateSiteModel;
/**
* Flag to take into account the first likelihood evaluation when initiating the MCMC chain
*/
private boolean initialEvaluation = true;
}
|
package edu.washington.escience.myria.parallel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import edu.washington.escience.myria.DbException;
import edu.washington.escience.myria.MyriaConstants;
import edu.washington.escience.myria.MyriaConstants.FTMODE;
import edu.washington.escience.myria.operator.RootOperator;
import edu.washington.escience.myria.operator.StreamingState;
import edu.washington.escience.myria.operator.TupleSource;
import edu.washington.escience.myria.operator.network.Consumer;
import edu.washington.escience.myria.operator.network.Producer;
import edu.washington.escience.myria.operator.network.RecoverProducer;
import edu.washington.escience.myria.parallel.ipc.FlowControlBagInputBuffer;
import edu.washington.escience.myria.parallel.ipc.IPCEvent;
import edu.washington.escience.myria.parallel.ipc.IPCEventListener;
import edu.washington.escience.myria.parallel.ipc.StreamOutputChannel;
import edu.washington.escience.myria.storage.TupleBatch;
import edu.washington.escience.myria.util.DateTimeUtils;
/**
* A {@link WorkerQueryPartition} is a partition of a query plan at a single worker.
* */
public class WorkerQueryPartition implements QueryPartition {
/**
* logger.
* */
private static final Logger LOGGER = LoggerFactory.getLogger(WorkerQueryPartition.class);
/**
* The query ID.
* */
private final long queryID;
/**
* All tasks.
* */
private final Set<QuerySubTreeTask> tasks;
/**
* Number of finished tasks.
* */
private final AtomicInteger numFinishedTasks;
/**
* The owner {@link Worker}.
* */
private final Worker ownerWorker;
/**
* The ftMode.
* */
private final FTMODE ftMode;
/**
* The profiling mode.
*/
private final boolean profilingMode;
/**
* priority, currently no use.
* */
private volatile int priority;
/**
* Store the current pause future if the query is in pause, otherwise null.
* */
private final AtomicReference<QueryFuture> pauseFuture = new AtomicReference<QueryFuture>(null);
/**
* the future for the query's execution.
* */
private final DefaultQueryFuture executionFuture = new DefaultQueryFuture(this, true);
/**
* record all failed tasks.
* */
private final ConcurrentLinkedQueue<QuerySubTreeTask> failTasks = new ConcurrentLinkedQueue<QuerySubTreeTask>();
/**
* Current alive worker set.
* */
private final Set<Integer> missingWorkers;
/**
* Record milliseconds so that we can normalize the time in {@link ProfilingLogger}.
*/
private volatile long startMilliseconds = 0;
/**
* The future listener for processing the complete events of the execution of all the query's tasks.
*/
private final TaskFutureListener taskExecutionListener = new TaskFutureListener() {
@Override
public void operationComplete(final TaskFuture future) throws Exception {
QuerySubTreeTask drivingTask = future.getTask();
int currentNumFinished = numFinishedTasks.incrementAndGet();
executionFuture.setProgress(1, currentNumFinished, tasks.size());
Throwable failureReason = future.getCause();
if (!future.isSuccess()) {
failTasks.add(drivingTask);
if (!(failureReason instanceof QueryKilledException)) {
// The task is a failure, not killed.
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("got a failed task, root op = " + drivingTask.getRootOp().getOpName() + ", cause ",
failureReason);
}
for (QuerySubTreeTask t : tasks) {
// kill other tasks
t.kill();
}
}
}
if (currentNumFinished >= tasks.size()) {
queryStatistics.markQueryEnd();
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Query #" + queryID + " executed for "
+ DateTimeUtils.nanoElapseToHumanReadable(queryStatistics.getQueryExecutionElapse()));
}
if (isProfilingMode()) {
try {
getOwnerWorker().getProfilingLogger().flush(queryID);
} catch (DbException e) {
LOGGER.error("Error flushing profiling logger.", e);
}
}
if (failTasks.isEmpty()) {
executionFuture.setSuccess();
} else {
Throwable existingCause = executionFuture.getCause();
Throwable newCause = failTasks.peek().getExecutionFuture().getCause();
if (existingCause == null) {
executionFuture.setFailure(newCause);
} else {
existingCause.addSuppressed(newCause);
}
}
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("New finished task: {}. {} remain.", drivingTask, (tasks.size() - currentNumFinished));
}
}
}
};
/***
* Statistics of this query partition.
*/
private final QueryExecutionStatistics queryStatistics = new QueryExecutionStatistics();
/**
* @param plan the plan of this query partition.
* @param queryID the id of the query.
* @param ownerWorker the worker on which this query partition is going to run
* */
public WorkerQueryPartition(final SingleQueryPlanWithArgs plan, final long queryID, final Worker ownerWorker) {
this.queryID = queryID;
ftMode = plan.getFTMode();
profilingMode = plan.isProfilingMode();
List<RootOperator> operators = plan.getRootOps();
tasks = new HashSet<QuerySubTreeTask>(operators.size());
numFinishedTasks = new AtomicInteger(0);
this.ownerWorker = ownerWorker;
missingWorkers = Collections.newSetFromMap(new ConcurrentHashMap<Integer, Boolean>());
for (final RootOperator taskRootOp : operators) {
createTask(taskRootOp);
}
}
/**
* create a task.
*
* @param root the root operator of this task.
* @return the task.
*/
public QuerySubTreeTask createTask(final RootOperator root) {
final QuerySubTreeTask drivingTask =
new QuerySubTreeTask(ownerWorker.getIPCConnectionPool().getMyIPCID(), this, root, ownerWorker
.getQueryExecutor());
TaskFuture taskExecutionFuture = drivingTask.getExecutionFuture();
taskExecutionFuture.addListener(taskExecutionListener);
tasks.add(drivingTask);
HashSet<Consumer> consumerSet = new HashSet<Consumer>();
consumerSet.addAll(drivingTask.getInputChannels().values());
for (final Consumer c : consumerSet) {
FlowControlBagInputBuffer<TupleBatch> inputBuffer =
new FlowControlBagInputBuffer<TupleBatch>(ownerWorker.getIPCConnectionPool(), c
.getInputChannelIDs(ownerWorker.getIPCConnectionPool().getMyIPCID()), ownerWorker
.getInputBufferCapacity(), ownerWorker.getInputBufferRecoverTrigger(), ownerWorker.getIPCConnectionPool());
inputBuffer.addListener(FlowControlBagInputBuffer.NEW_INPUT_DATA, new IPCEventListener() {
@Override
public void triggered(final IPCEvent event) {
drivingTask.notifyNewInput();
}
});
c.setInputBuffer(inputBuffer);
}
return drivingTask;
}
@Override
public final QueryFuture getExecutionFuture() {
return executionFuture;
}
@Override
public final void init() {
for (QuerySubTreeTask t : tasks) {
init(t);
}
}
/**
* initialize a task.
*
* @param t the task
* */
public final void init(final QuerySubTreeTask t) {
TaskResourceManager resourceManager = new TaskResourceManager(ownerWorker.getIPCConnectionPool(), t);
ImmutableMap.Builder<String, Object> b = ImmutableMap.builder();
t.init(resourceManager, b.putAll(ownerWorker.getExecEnvVars()).build());
}
@Override
public final long getQueryID() {
return queryID;
}
@Override
public final int compareTo(final QueryPartition o) {
if (o == null) {
return -1;
}
return priority - o.getPriority();
}
@Override
public final void setPriority(final int priority) {
this.priority = priority;
}
@Override
public final String toString() {
return tasks + ", priority:" + priority;
}
@Override
public final void startExecution() {
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Query : " + getQueryID() + " start processing.");
}
startMilliseconds = System.currentTimeMillis();
queryStatistics.markQueryStart();
for (QuerySubTreeTask t : tasks) {
t.start();
}
}
@Override
public final int getPriority() {
return priority;
}
@Override
public final QueryFuture pause() {
final QueryFuture pauseF = new DefaultQueryFuture(this, true);
while (!pauseFuture.compareAndSet(null, pauseF)) {
QueryFuture current = pauseFuture.get();
if (current != null) {
// already paused by some other threads, do not do the actual pause
return current;
}
}
return pauseF;
}
@Override
public final QueryFuture resume() {
QueryFuture pf = pauseFuture.getAndSet(null);
DefaultQueryFuture rf = new DefaultQueryFuture(this, true);
if (pf == null) {
// query is not in pause, return success directly.
rf.setSuccess();
return rf;
}
// TODO do the resume stuff
return rf;
}
@Override
public final void kill() {
for (QuerySubTreeTask task : tasks) {
task.kill();
}
}
@Override
public final boolean isPaused() {
return pauseFuture.get() != null;
}
@Override
public final QueryExecutionStatistics getExecutionStatistics() {
return queryStatistics;
}
@Override
public FTMODE getFTMode() {
return ftMode;
}
@Override
public boolean isProfilingMode() {
return profilingMode;
}
@Override
public Set<Integer> getMissingWorkers() {
return missingWorkers;
}
/**
* when a REMOVE_WORKER message is received, give tasks another chance to decide if they are ready to generate
* EOS/EOI.
*/
public void triggerTasks() {
for (QuerySubTreeTask task : tasks) {
task.notifyNewInput();
}
}
/**
* enable/disable output channels of the root(producer) of each task.
*
* @param workerId the worker that changed its status.
* @param enable enable/disable all the channels that belong to the worker.
* */
public void updateProducerChannels(final int workerId, final boolean enable) {
for (QuerySubTreeTask task : tasks) {
task.updateProducerChannels(workerId, enable);
}
}
/**
* add a recovery task for the failed worker.
*
* @param workerId the id of the failed worker.
*/
public void addRecoveryTasks(final int workerId) {
List<RootOperator> recoveryTasks = new ArrayList<RootOperator>();
for (QuerySubTreeTask task : tasks) {
if (task.getRootOp() instanceof Producer) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("adding recovery task for " + task.getRootOp().getOpName());
}
List<StreamingState> buffers = ((Producer) task.getRootOp()).getTriedToSendTuples();
List<Integer> indices = ((Producer) task.getRootOp()).getChannelIndicesOfAWorker(workerId);
StreamOutputChannel<TupleBatch>[] channels = ((Producer) task.getRootOp()).getChannels();
for (int i = 0; i < indices.size(); ++i) {
int j = indices.get(i);
/* buffers.get(j) might be an empty List<TupleBatch>, so need to set its schema explicitly. */
TupleSource scan = new TupleSource(buffers.get(j).exportState(), buffers.get(j).getSchema());
scan.setOpName("tuplesource for " + task.getRootOp().getOpName() + channels[j].getID());
RecoverProducer rp =
new RecoverProducer(scan, ExchangePairID.fromExisting(channels[j].getID().getStreamID()), channels[j]
.getID().getRemoteID(), (Producer) task.getRootOp(), j);
rp.setOpName("recProducer_for_" + task.getRootOp().getOpName());
recoveryTasks.add(rp);
scan.setFragmentId(0 - recoveryTasks.size());
rp.setFragmentId(0 - recoveryTasks.size());
}
}
}
final List<QuerySubTreeTask> list = new ArrayList<QuerySubTreeTask>();
for (RootOperator cp : recoveryTasks) {
QuerySubTreeTask recoveryTask = createTask(cp);
list.add(recoveryTask);
}
new Thread() {
@Override
public void run() {
while (true) {
if (ownerWorker.getIPCConnectionPool().isRemoteAlive(workerId)) {
/* waiting for ADD_WORKER to be received */
for (QuerySubTreeTask task : list) {
init(task);
/* input might be null but we still need it to run */
task.notifyNewInput();
}
break;
}
try {
Thread.sleep(MyriaConstants.SHORT_WAITING_INTERVAL_100_MS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}.start();
}
/**
* @return the owner worker
*/
public Worker getOwnerWorker() {
return ownerWorker;
}
/**
* @return the time in milliseconds when the partition was initialized.
*/
public long getBeginMilliseconds() {
return startMilliseconds;
}
}
|
package org.peerbox.watchservice;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.hive2hive.core.exceptions.NoPeerConnectionException;
import org.hive2hive.core.exceptions.NoSessionException;
import org.hive2hive.processframework.exceptions.InvalidProcessStateException;
import org.hive2hive.processframework.exceptions.ProcessExecutionException;
import org.peerbox.app.manager.file.IFileManager;
import org.peerbox.watchservice.filetree.composite.FileComponent;
import org.peerbox.watchservice.filetree.composite.FolderComposite;
import org.peerbox.watchservice.states.AbstractActionState;
import org.peerbox.watchservice.states.EstablishedState;
import org.peerbox.watchservice.states.ExecutionHandle;
import org.peerbox.watchservice.states.InitialState;
import org.peerbox.watchservice.states.LocalMoveState;
import org.peerbox.watchservice.states.RemoteCreateState;
import org.peerbox.watchservice.states.RemoteUpdateState;
import org.peerbox.watchservice.states.StateType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The Action class provides a systematic and lose-coupled way to change the
* state of an object as part of the chosen state pattern design.
*
*
* @author albrecht, anliker, winzenried
*
*/
public class Action implements IAction {
private final static Logger logger = LoggerFactory.getLogger(Action.class);
private FileComponent file;
private final AtomicLong timestamp;
private volatile AbstractActionState currentState;
private volatile AbstractActionState nextState;
private volatile boolean isExecuting = false;
private volatile boolean changedWhileExecuted = false;
private volatile int executionAttempts = 0;
private IFileEventManager fileEventManager;
private final Lock lock = new ReentrantLock();
public Action() {
this(null);
}
/**
* Initialize with timestamp and set currentState to initial state
*/
public Action(final IFileEventManager fileEventManager) {
this.currentState = new InitialState(this);
this.nextState = new EstablishedState(this);
timestamp = new AtomicLong(Long.MAX_VALUE);
this.fileEventManager = fileEventManager;
updateTimestamp();
}
/**
* changes the state of the currentState to Create state if current state allows it.
*/
@Override
public void handleLocalCreateEvent() {
logger.trace("handleLocalCreateEvent - File: {}, isExecuting({})",
getFile().getPath(), isExecuting());
try {
acquireLock();
if (isExecuting()) {
if(currentState.getStateType() == StateType.REMOTE_CREATE){
RemoteCreateState castedState = (RemoteCreateState)currentState;
castedState.setLocalCreateHappened(true);
}
nextState = nextState.changeStateOnLocalCreate();
checkIfChanged();
} else {
updateTimestamp();
currentState = currentState.handleLocalCreate();
nextState = currentState.getDefaultState();
}
} finally {
releaseLock();
}
}
/**
* changes the state of the currentState to Modify state if current state allows it.
*/
@Override
public void handleLocalUpdateEvent() {
logger.trace("handleLocalUpdateEvent - File: {}, isExecuting({})",
getFile().getPath(), isExecuting());
try {
acquireLock();
if (isExecuting()) {
if(currentState.getStateType() == StateType.REMOTE_CREATE){
// RemoteCreateState castedState = (RemoteCreateState)currentState;
// if(!castedState.localCreateHappened()){
// nextState = nextState.changeStateOnLocalUpdate();
// checkIfChanged();
// } else {
// logger.debug("File {}: LocalUpdateEvent after LocalCreateEvent "
// + "in RemoteCreateState - ignored!", file.getPath());
} else if(currentState.getStateType() == StateType.REMOTE_UPDATE){
RemoteUpdateState castedState = (RemoteUpdateState)currentState;
if(castedState.getLocalUpdateHappened()){
nextState = nextState.changeStateOnLocalUpdate();
checkIfChanged();
} else {
castedState.setLocalUpdateHappened(true);
logger.debug("File {}: First LocalUpdateEvent "
+ "in RemoteUpdateState - ignored!", file.getPath());
}
} else {
nextState = nextState.changeStateOnLocalUpdate();
checkIfChanged();
}
} else {
updateTimestamp();
if (currentState instanceof LocalMoveState) {
nextState = nextState.changeStateOnLocalUpdate();
} else {
currentState = currentState.handleLocalUpdate();
nextState = currentState.getDefaultState();
}
}
} finally {
releaseLock();
}
}
/**
* changes the state of the currentState to Delete state if current state allows it.
*/
@Override
public void handleLocalDeleteEvent() {
logger.trace("handleLocalDeleteEvent - File: {}, isExecuting({})",
getFile().getPath(), isExecuting());
try {
acquireLock();
if (isExecuting()) {
nextState = nextState.changeStateOnLocalDelete();
checkIfChanged();
} else {
updateTimestamp();
currentState = currentState.handleLocalDelete();
nextState = currentState.getDefaultState();
}
} finally {
releaseLock();
}
}
@Override
public void handleLocalHardDeleteEvent(){
logger.trace("handleLocalHardDeleteEvent - File: {}, isExecuting({})",
getFile().getPath(), isExecuting());
if (getFile().isFolder()) {
logger.trace("Folder {} - delete children", getFile().getPath());
FolderComposite folder = (FolderComposite) getFile();
Map<Path, FileComponent> children = folder.getChildren();
for (Map.Entry<Path, FileComponent> childEntry : children.entrySet()) {
FileComponent child = childEntry.getValue();
child.getAction().handleLocalHardDeleteEvent();
}
}
try {
acquireLock();
if (isExecuting()) {
nextState = nextState.changeStateOnLocalHardDelete();
checkIfChanged();
} else {
updateTimestamp();
currentState = currentState.handleLocalHardDelete();
nextState = currentState.getDefaultState();
}
try {
if (!Files.exists(getFile().getPath())) {
return;
}
Files.delete(getFile().getPath());
logger.trace("DELETED FROM DISK: {}", getFile().getPath());
} catch (IOException e) {
logger.warn("Could not delete file: {} ({})",
getFile().getPath(), e.getMessage(), e);
}
} finally {
releaseLock();
}
}
@Override
public void handleLocalMoveEvent(Path oldFilePath) {
logger.debug("handleLocalMoveEvent - File: {}, isExecuting({})",
getFile().getPath(), isExecuting());
try {
acquireLock();
if (isExecuting()) {
nextState = nextState.changeStateOnLocalMove(oldFilePath);
checkIfChanged();
} else {
updateTimestamp();
if (oldFilePath.equals(getFile().getPath())) {
logger.trace("File {}: Move to same location due to update!",
getFile().getPath());
fileEventManager.getFileTree().getDeletedByContentHash()
.get(getFile().getContentHash()).remove(oldFilePath);
return;
}
currentState = currentState.handleLocalMove(oldFilePath);
nextState = currentState.getDefaultState();
}
} finally {
releaseLock();
}
}
@Override
public void handleRemoteCreateEvent() {
logger.trace("handleRemoteCreateEvent - File: {}, isExecuting({})",
getFile().getPath(), isExecuting());
try {
acquireLock();
if (isExecuting()) {
nextState = nextState.changeStateOnRemoteCreate();
checkIfChanged();
} else {
updateTimestamp();
currentState = currentState.handleRemoteCreate();
nextState = currentState.getDefaultState();
}
} finally {
releaseLock();
}
}
@Override
public void handleRemoteUpdateEvent() {
logger.trace("handleRemoteUpdateEvent - File: {}, isExecuting({})",
getFile().getPath(), isExecuting());
try {
acquireLock();
if (isExecuting()) {
nextState = nextState.changeStateOnRemoteUpdate();
checkIfChanged();
} else {
updateTimestamp();
currentState = currentState.handleRemoteUpdate();
nextState = currentState.getDefaultState();
}
} finally {
releaseLock();
}
}
@Override
public void handleRemoteDeleteEvent() {
logger.trace("handleRemoteDeleteEvent - File: {}, isExecuting({})",
getFile().getPath(), isExecuting());
if (getFile().isFolder()) {
logger.trace("Folder {} - delete children", getFile().getPath());
FolderComposite folder = (FolderComposite) getFile();
Map<Path, FileComponent> children = folder.getChildren();
for (Map.Entry<Path, FileComponent> childEntry : children.entrySet()) {
FileComponent child = childEntry.getValue();
child.getAction().handleRemoteDeleteEvent();
}
}
try {
acquireLock();
if (isExecuting()) {
nextState = nextState.changeStateOnRemoteDelete();
checkIfChanged();
} else {
updateTimestamp();
currentState = currentState.handleRemoteDelete();
nextState = currentState.getDefaultState();
}
} finally {
releaseLock();
}
}
@Override
public void handleRemoteMoveEvent(Path path) {
logger.trace("handleRemoteMoveEvent - File: {}, isExecuting({})",
getFile().getPath(), isExecuting());
try {
acquireLock();
Path srcPath = getFile().getPath();
if (isExecuting()) {
nextState = nextState.changeStateOnRemoteMove(path);
checkIfChanged();
} else {
updateTimestamp();
currentState = currentState.handleRemoteMove(path);
nextState = currentState.getDefaultState();
try {
if (!Files.exists(srcPath)) {
return;
}
Files.move(srcPath, path);
} catch (IOException e) {
logger.warn("Could not move file: from src={} to dst={} ({})",
srcPath, path, e.getMessage(), e);
}
}
} finally {
releaseLock();
}
}
private void checkIfChanged() {
if (!(nextState instanceof EstablishedState)) {
logger.trace("File {}: Next state is {}, keep track of change",
getFile().getPath(), getNextStateName());
changedWhileExecuted = true;
} else {
logger.trace("File {}: Next state is {}, no change detected",
getFile().getPath(), getNextStateName());
}
}
@Override
public ExecutionHandle execute(IFileManager fileManager)
throws NoSessionException, NoPeerConnectionException,
InvalidProcessStateException, ProcessExecutionException {
if (isExecuting()) {
throw new IllegalStateException("Action is already executing.");
}
ExecutionHandle ehandle = null;
try {
acquireLock();
setIsExecuting(true);
++executionAttempts;
ehandle = currentState.execute(fileManager);
if(ehandle == null){
setIsExecuting(false);
}
return ehandle;
// setIsExecuting(false);
// return null;
} finally {
releaseLock();
}
}
@Override
public void onSucceeded() {
logger.trace("onSucceeded: File {} - Switch state from {} to {}",
getFile().getPath(), getCurrentStateName(), getNextStateName());
try {
acquireLock();
currentState = nextState;
nextState = nextState.getDefaultState();
setIsExecuting(false);
changedWhileExecuted = false;
executionAttempts = 0;
} finally {
releaseLock();
}
}
@Override
public void onFailed() {
try {
acquireLock();
setIsExecuting(false);
} finally {
releaseLock();
}
}
private void acquireLock() {
logger.trace("File {}: Wait for own lock at t={} in State {}",
getFile().getPath(), System.currentTimeMillis(), getCurrentState().getStateType());
lock.lock();
logger.trace("File {}: Received own lock at t={} in State {}",
getFile().getPath(), System.currentTimeMillis(), getCurrentState().getStateType());
}
private void releaseLock() {
lock.unlock();
logger.trace("File {}: Released own lock at t={} in State {}",
getFile().getPath(), System.currentTimeMillis(), getCurrentState().getStateType());
}
/**
* @return current state object
*/
@Override
public AbstractActionState getCurrentState() {
return currentState;
}
@Override
public String getCurrentStateName() {
return currentState != null ? currentState.getClass().getSimpleName() : "null";
}
@Override
public AbstractActionState getNextState() {
return nextState;
}
@Override
public String getNextStateName() {
return nextState != null ? nextState.getClass().getSimpleName() : "null";
}
@Override
public boolean getChangedWhileExecuted() {
return changedWhileExecuted;
}
@Override
public int getExecutionAttempts() {
return executionAttempts;
}
@Override
public boolean isExecuting() {
return isExecuting;
}
private void setIsExecuting(final boolean isExecuting) {
this.isExecuting = isExecuting;
}
@Override
public long getTimestamp() {
return timestamp.get();
}
@Override
public void updateTimestamp() {
timestamp.set(System.currentTimeMillis());
}
@Override
public IFileEventManager getFileEventManager() {
return fileEventManager;
}
@Override
public void setFileEventManager(final IFileEventManager fileEventManager) {
this.fileEventManager = fileEventManager;
}
@Override
public FileComponent getFile() {
return file;
}
@Override
public void setFile(final FileComponent file) {
this.file = file;
}
@Override
public void setCurrentState(AbstractActionState state) {
this.currentState = state;
}
@Override
public String toString() {
return String.format("Action[currentState(%s), nextState(%s), isExecuting(%s), changedWhileExecuted(%s), executionAttempts(%d),]",
getCurrentStateName(), getNextStateName(), isExecuting(), changedWhileExecuted, executionAttempts);
}
}
|
package com.wegas.core.ejb;
import com.wegas.core.event.internal.PlayerAction;
import com.wegas.core.event.internal.ResetEvent;
import com.wegas.core.event.internal.lifecycle.EntityCreated;
import com.wegas.core.event.internal.lifecycle.PreEntityRemoved;
import com.wegas.core.exception.client.WegasErrorMessage;
import com.wegas.core.exception.internal.WegasNoResultException;
import com.wegas.core.persistence.game.*;
import com.wegas.core.security.ejb.RoleFacade;
import com.wegas.core.security.ejb.UserFacade;
import com.wegas.core.security.guest.GuestJpaAccount;
import com.wegas.core.security.persistence.Permission;
import com.wegas.core.security.persistence.Role;
import com.wegas.core.security.persistence.User;
import org.slf4j.LoggerFactory;
import javax.ejb.EJB;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
import javax.enterprise.event.Event;
import javax.inject.Inject;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* @author Francois-Xavier Aeberhard <fx@red-agent.com>
* @author Cyril Junod <cyril.junod at gmail.com>
*/
@Stateless
@LocalBean
public class GameFacade extends BaseFacade<Game> {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(GameFacade.class);
/**
* Fired once game created
*/
@Inject
private Event<EntityCreated<Game>> gameCreatedEvent;
/**
* Fired pre Game removed
*/
@Inject
private Event<PreEntityRemoved<Game>> gameRemovedEvent;
@EJB
private GameModelFacade gameModelFacade;
@EJB
private RoleFacade roleFacade;
@EJB
private TeamFacade teamFacade;
@EJB
private UserFacade userFacade;
@Inject
private Event<PlayerAction> playerActionEvent;
@Inject
private Event<ResetEvent> resetEvent;
public GameFacade() {
super(Game.class);
}
/**
* @param gameModelId
* @param game
* @throws IOException
*/
public void publishAndCreate(final Long gameModelId, final Game game) throws IOException {
GameModel gm = gameModelFacade.duplicate(gameModelId);
gm.setName(gameModelFacade.find(gameModelId).getName());// @HACK Set name back to the original
gm.setComments(""); // Clear comments
gm.setTemplate(false);
this.create(gm, game);
}
@Override
public void create(final Game game) {
this.create(game.getGameModel().getId(), game);
}
/**
* @param gameModelId
* @param game
*/
public void create(final Long gameModelId, final Game game) {
this.create(gameModelFacade.find(gameModelId), game);
}
/**
* @param gameModel
* @param game
*/
public void create(final GameModel gameModel, final Game game) {
final User currentUser = userFacade.getCurrentUser();
if (game.getToken() == null) {
game.setToken(this.createUniqueToken(game));
} else if (this.findByToken(game.getToken()) != null) {
throw WegasErrorMessage.error("This token is already in use.");
}
game.setCreatedBy(!(currentUser.getMainAccount() instanceof GuestJpaAccount) ? currentUser : null); // @hack @fixme, guest are not stored in the db so link wont work
gameModel.addGame(game);
gameModelFacade.reset(gameModel); // Reset the game so the default player will have instances
userFacade.addAccountPermission(currentUser.getMainAccount(),
"Game:View,Edit:g" + game.getId());
userFacade.addAccountPermission(currentUser.getMainAccount(),
"Game:View:g" + game.getId()); // Grant play to creator
try { // By default games can be join w/ token
roleFacade.findByName("Public").addPermission("Game:Token:g" + game.getId());
} catch (WegasNoResultException ex) {
logger.error("Unable to find Role: Public");
}
gameCreatedEvent.fire(new EntityCreated(game));
}
/**
* @param game
* @return
*/
public String createUniqueToken(Game game) {
String prefixKey = game.getShortName().toLowerCase().replace(" ", "-");
boolean foundUniqueKey = false;
int counter = 0;
String key = null;
int length = 2;
int maxRequest = 400;
while (!foundUniqueKey) {
if (counter > maxRequest) {
length += 1;
maxRequest += 400;
}
String genLetter = this.genRandomLetter(length);
key = prefixKey + "-" + genLetter;
Game foundGameByToken = this.findByToken(key);
if (foundGameByToken == null) {
foundUniqueKey = true;
}
counter += 1;
}
return key;
}
private String genRandomLetter(long length) {
final String tokenElements = "abcdefghijklmnopqrstuvwxyz";
final Integer digits = tokenElements.length();
length = Math.min(50, length); // max 50 length;
StringBuilder sb = new StringBuilder();
Integer random = (int) (Math.random() * digits);
sb.append(tokenElements.charAt(random));
if (length > 1) {
sb.append(genRandomLetter(length - 1));
}
return sb.toString();
}
@Override
public Game update(final Long entityId, final Game entity) {
String token = entity.getToken().toLowerCase().replace(" ", "-");
if (token.length() == 0) {
throw WegasErrorMessage.error("Access key cannot be empty");
}
Game theGame = this.findByToken(entity.getToken());
if (theGame != null && !theGame.getId().equals(entity.getId())) {
throw WegasErrorMessage.error("This access key is already in use");
}
return super.update(entityId, entity);
}
@Override
public void remove(final Game entity) {
gameRemovedEvent.fire(new PreEntityRemoved(entity));
// This is for retrocompatibility w/ game models that do not habe DebugGame
if (entity.getGameModel().getGames().size() <= 1
&& !(entity.getGameModel().getGames().get(0) instanceof DebugGame)) {// This is for retrocompatibility w/ game models that do not habe DebugGame
gameModelFacade.remove(entity.getGameModel());
} else {
super.remove(entity);
}
//for (Team t : entity.getTeams()) {
// teamFacade.remove(t);
userFacade.deleteAccountPermissionByInstance("g" + entity.getId());
userFacade.deleteRolePermissionsByInstance("g" + entity.getId());
}
/**
* Search for a game with token
*
* @param token
* @return first game found or null
*/
public Game findByToken(final String token) {
final TypedQuery<Game> tq = getEntityManager().createNamedQuery("game.findByToken", Game.class).setParameter("token", token).setParameter("status", Game.Status.LIVE);
try {
return tq.getSingleResult();
} catch (NoResultException ex) {
return null;
}
}
/**
* @param search
* @return
*/
public List<Game> findByName(String search) {
final CriteriaBuilder cb = getEntityManager().getCriteriaBuilder();
final CriteriaQuery cq = cb.createQuery();
final Root<Game> game = cq.from(Game.class);
cq.where(cb.like(game.get("name"), search));
Query q = getEntityManager().createQuery(cq);
return (List<Game>) q.getResultList();
}
/**
* @param gameModelId
* @param orderBy
* @return
*/
public List<Game> findByGameModelId(final Long gameModelId, final String orderBy) {
return getEntityManager().createQuery("SELECT game FROM Game game "
+ "WHERE TYPE(game) != DebugGame AND game.gameModel.id = :gameModelId ORDER BY game.createdTime DESC", Game.class)
.setParameter("gameModelId", gameModelId)
.getResultList();
}
/**
* @param status
* @return
*/
public List<Game> findAll(final Game.Status status) {
return getEntityManager().createNamedQuery("game.findByStatus", Game.class).setParameter("status", status).getResultList();
}
/**
* @param userId
* @return
*/
public List<Game> findRegisteredGames(final Long userId) {
final Query getByGameId = getEntityManager().createQuery("SELECT game, p FROM Game game "
+ "LEFT JOIN game.teams t LEFT JOIN t.players p "
+ "WHERE t.gameId = game.id AND p.teamId = t.id "
+ "AND p.user.id = :userId AND "
+ "(game.status = com.wegas.core.persistence.game.Game.Status.LIVE OR game.status = com.wegas.core.persistence.game.Game.Status.BIN) "
+ "ORDER BY p.joinTime ASC", Game.class)
.setParameter("userId", userId);
return this.findRegisterdGames(getByGameId);
}
/**
* @param userId
* @param gameModelId
* @return
*/
public List<Game> findRegisteredGames(final Long userId, final Long gameModelId) {
final Query getByGameId = getEntityManager().createQuery("SELECT game, p FROM Game game "
+ "LEFT JOIN game.teams t LEFT JOIN t.players p "
+ "WHERE t.gameId = game.id AND p.teamId = t.id AND p.user.id = :userId AND game.gameModel.id = :gameModelId "
+ "AND game.status = com.wegas.core.persistence.game.Game.Status.LIVE "
+ "ORDER BY p.joinTime ASC", Game.class)
.setParameter("userId", userId)
.setParameter("gameModelId", gameModelId);
return this.findRegisterdGames(getByGameId);
}
/**
* @param q
* @return
*/
private List<Game> findRegisterdGames(final Query q) {
final List<Game> games = new ArrayList<>();
for (Object ret : q.getResultList()) { // @hack Replace created time by player joined time
final Object[] r = (Object[]) ret;
final Game game = (Game) r[0];
this.getEntityManager().detach(game);
game.setCreatedTime(((Player) r[1]).getJoinTime());
games.add(game);
}
return games;
}
/**
* @param roleName
* @return
*/
public Collection<Game> findPublicGamesByRole(String roleName) {
Collection<Game> games = new ArrayList<>();
try {
Role role;
role = roleFacade.findByName(roleName);
for (Permission permission : role.getPermissions()) {
if (permission.getValue().startsWith("Game:View")) {
Long gameId = Long.parseLong(permission.getValue().split(":g")[1]);
Game game = this.find(gameId);
if (game.getStatus() == Game.Status.LIVE) {
games.add(game);
}
}
}
} catch (WegasNoResultException ex) {
logger.error("FindPublicGamesByRole: " + roleName + " role not found");
}
return games;
}
/**
* @param team
* @param player
*/
public void joinTeam(Team team, Player player) {
team.addPlayer(player);
getEntityManager().persist(player);
team.getGame().getGameModel().propagateDefaultInstance(player);
playerActionEvent.fire(new PlayerAction(player));
}
/**
* @param teamId
* @param p
* @return
*/
public Player joinTeam(Long teamId, Player p) {
// logger.log(Level.INFO, "Adding user " + userId + " to team: " + teamId + ".");
this.joinTeam(teamFacade.find(teamId), p);
return p;
}
/**
* @param team
* @param user
* @return
*/
public Player joinTeam(Team team, User user) {
// logger.log(Level.INFO, "Adding user " + userId + " to team: " + teamId + ".");
Player p = new Player(user, team);
this.joinTeam(team, p);
this.addRights(user, p.getGame());
return p;
}
/**
* @param teamId
* @param userId
* @return
*/
public Player joinTeam(Long teamId, Long userId) {
// logger.log(Level.INFO, "Adding user " + userId + " to team: " + teamId + ".");
return this.joinTeam(teamFacade.find(teamId), userFacade.find(userId));
}
/**
* @param user
* @param game
*/
public void addRights(User user, Game game) {
user.getMainAccount().addPermission(
"Game:View:g" + game.getId(), // Add "View" right on game,
"GameModel:View:gm" + game.getGameModel().getId()); // and also "View" right on its associated game model
}
/**
* Bin given game, changing it's status to {@link Game.Status#BIN}
*
* @param entity Game
*/
public void bin(Game entity) {
entity.setStatus(Game.Status.BIN);
}
/**
* Set game status, changing to {@link Game.Status#LIVE}
*
* @param entity Game
*/
public void live(Game entity) {
entity.setStatus(Game.Status.LIVE);
}
/**
* Set game status, changing to {@link Game.Status#DELETE}
*
* @param entity GameModel
*/
public void delete(Game entity) {
entity.setStatus(Game.Status.DELETE);
}
/**
* Reset a game
*
* @param game the game to reset
*/
public void reset(final Game game) {
// Need to flush so prepersit events will be thrown (for example Game will add default teams)
getEntityManager().flush();
game.getGameModel().propagateDefaultInstance(game);
getEntityManager().flush(); // DA FU ()
// Send an reset event (for the state machine and other)
resetEvent.fire(new ResetEvent(game));
}
/**
* Reset a game
*
* @param gameId id of the game to reset
*/
public void reset(Long gameId) {
this.reset(this.find(gameId));
}
}
|
package com.bakerframework.baker.view;
import android.content.Context;
import android.content.res.Configuration;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.widget.GridView;
public class ShelfView extends GridView {
public ShelfView(Context context, AttributeSet attrs) {
super(context, attrs);
updateColumnCount();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
updateColumnCount();
super.onConfigurationChanged(newConfig);
}
private void updateColumnCount() {
// Get metrics information
DisplayMetrics metrics = getResources().getDisplayMetrics();
int width = metrics.widthPixels;
// Each issue card needs at lease 450px width to display correctly
int columnCount = ((int) Math.max(Math.floor(width / 450), 1));
setNumColumns(columnCount);
}
}
|
package info.tregmine.basiccommands;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
import org.bukkit.ChatColor;
import org.bukkit.Color;
import org.bukkit.FireworkEffect;
//import org.bukkit.FireworkEffect;
import org.bukkit.GameMode;
import org.bukkit.Location;
import org.bukkit.World;
//import org.bukkit.World;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
//import org.bukkit.entity.CreatureType;
//import org.bukkit.entity.Chicken;
import org.bukkit.entity.Entity;
import org.bukkit.entity.EntityType;
//import org.bukkit.entity.Firework;
//import org.bukkit.entity.Firework;
//import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Monster;
//import org.bukkit.Color;
import org.bukkit.entity.Player;
import org.bukkit.entity.Slime;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.entity.EnderDragon;
//import org.bukkit.inventory.ItemStack;
//import org.bukkit.inventory.meta.FireworkMeta;
import info.tregmine.Tregmine;
import info.tregmine.api.TregminePlayer;
//import info.tregmine.api.TregminePlayer;
public class BasicCommands extends JavaPlugin {
public final Logger log = Logger.getLogger("Minecraft");
public Tregmine tregmine = null;
// public Map<String, FireworkEffect.Builder> fireWorkEffect = new HashMap<String, FireworkEffect.Builder>();
// public Map<String, FireworkMeta> fireWorkMeta = new HashMap<String, FireworkMeta>();
// public Map<String, ItemStack> fireWork = new HashMap<String, ItemStack>();
// public Map<String, Boolean> property = new HashMap<String, Boolean>();
public Map<String, info.tregmine.api.firework.createFirework> firework = new HashMap<String, info.tregmine.api.firework.createFirework>();
@Override
public void onEnable(){
Plugin test = this.getServer().getPluginManager().getPlugin("Tregmine");
if(this.tregmine == null) {
if(test != null) {
this.tregmine = ((Tregmine)test);
} else {
log.info(this.getDescription().getName() + " " + this.getDescription().getVersion() + " - could not find Tregmine");
this.getServer().getPluginManager().disablePlugin(this);
}
}
getServer().getPluginManager().registerEvents(new BasicCommandsBlock(this), this);
}
@Override
public void onDisable(){
}
@Override
public boolean onCommand(CommandSender sender, Command command, String commandLabel, String[] args) {
String commandName = command.getName().toLowerCase();
if(!(sender instanceof Player)) {
if (commandName.matches("kick")) {
Player victim = this.getServer().matchPlayer(args[0]).get(0);
if (victim != null) {
info.tregmine.api.TregminePlayer victimPlayer = this.tregmine.tregminePlayer.get(victim.getName());
this.getServer().broadcastMessage("GOD kicked " + victimPlayer.getChatName() + ChatColor.AQUA + " for 1 min.");
victim.kickPlayer("kicked by GOD.");
}
return true;
}
return false;
}
final Player player = (Player) sender;
info.tregmine.api.TregminePlayer tregminePlayer = this.tregmine.tregminePlayer.get(player.getName());
boolean isAdmin = tregminePlayer.isAdmin();
boolean isDonator = tregminePlayer.isDonator();
boolean isMentor = tregminePlayer.getMetaBoolean("mentor");
if (commandName.matches("fw")) {
}
if (commandName.matches("keyword")) {
if (args.length != 1) {
return false;
}
if (args[0].length() < 1) {
tregminePlayer.sendMessage(ChatColor.RED + "Your keyword must be at least " +
"1 characters long.");
return true;
}
tregminePlayer.setMetaString("keyword", args[0].toLowerCase());
tregminePlayer.sendMessage(ChatColor.YELLOW + "From now on you can only log in by using ip " + args[0].toLowerCase() + ".mc.tregmine.info");
return true;
}
if (commandName.matches("password")) {
if (args.length != 1) {
return false;
}
if (args[0].length() < 6) {
tregminePlayer.sendMessage(ChatColor.RED + "Your password must be at least " +
"6 characters long.");
return true;
}
tregminePlayer.setPassword(args[0]);
tregminePlayer.sendMessage(ChatColor.YELLOW + "Your password has been changed.");
return true;
}
if (commandName.matches("creative") && (tregminePlayer.isAdmin() || tregminePlayer.getMetaBoolean("builder"))) {
final TregminePlayer tregPlayer = tregmine.getPlayer(player);
tregPlayer.setGameMode(GameMode.CREATIVE);
tregPlayer.sendMessage(ChatColor.YELLOW + "You are now in creative mode.");
}
if (commandName.matches("survival") && (tregminePlayer.isAdmin() || tregminePlayer.getMetaBoolean("builder"))) {
final TregminePlayer tregPlayer = tregmine.getPlayer(player);
tregPlayer.setGameMode(GameMode.SURVIVAL);
tregPlayer.sendMessage(ChatColor.YELLOW + "You are now in survival mode.");
}
if (commandName.matches("pos")) {
Location loc = player.getLocation();
Location spawn = player.getWorld().getSpawnLocation();
double distance = info.tregmine.api.math.Distance.calc2d(spawn, loc);
player.sendMessage(ChatColor.DARK_AQUA + "World: " + ChatColor.WHITE + player.getWorld().getName());
this.log.info("World: " + player.getWorld().getName());
player.sendMessage(ChatColor.DARK_AQUA + "X: " + ChatColor.WHITE + loc.getX() + ChatColor.RED + " (" + loc.getBlockX() + ")" );
this.log.info("X: " + loc.getX() + " (" + loc.getBlockX() + ")" );
player.sendMessage(ChatColor.DARK_AQUA + "Y: " + ChatColor.WHITE + loc.getY() + ChatColor.RED + " (" + loc.getBlockY() + ")" );
this.log.info("Y: " + loc.getY() + " (" + loc.getBlockY() + ")" );
player.sendMessage(ChatColor.DARK_AQUA + "Z: " + ChatColor.WHITE + loc.getZ() + ChatColor.RED + " (" + loc.getBlockZ() + ")" );
this.log.info("Z: " + loc.getZ() + " (" + loc.getBlockZ() + ")" );
player.sendMessage(ChatColor.DARK_AQUA + "Yaw: " + ChatColor.WHITE + loc.getYaw());
this.log.info("Yaw: " + loc.getYaw());
player.sendMessage(ChatColor.DARK_AQUA + "Pitch: " + ChatColor.WHITE + loc.getPitch());
this.log.info("Pitch: " + loc.getPitch() );
player.sendMessage(ChatColor.DARK_AQUA + "Blocks from spawn: " + ChatColor.WHITE + distance);
return true;
}
if (commandName.matches("cname") && tregminePlayer.isAdmin()) {
ChatColor color = ChatColor.getByChar(args[0]);
tregminePlayer.setTemporaryChatName(color + args[1]);
tregminePlayer.sendMessage("You are now: " + tregminePlayer.getChatName());
this.log.info(tregminePlayer.getName() + "changed name to" + tregminePlayer.getChatName());
}
if (commandName.matches("t") && tregminePlayer.isAdmin()) {
Player victim = this.getServer().matchPlayer(args[0]).get(0);
victim.getWorld().strikeLightningEffect(victim.getLocation());
return true;
}
if (commandName.matches("td") && tregminePlayer.isOp()) {
Player victim = this.getServer().matchPlayer(args[0]).get(0);
victim.getWorld().strikeLightningEffect(victim.getLocation());
victim.setHealth(0);
return true;
}
if (commandName.matches("time") && tregminePlayer.isDonator()) {
if (args.length == 1) {
if (args[0].matches("day")) {
player.setPlayerTime(6000, false);
} else if (args[0].matches("night")) {
player.setPlayerTime(18000, false);
} else if (args[0].matches("normal")) {
player.resetPlayerTime();
}
}
else {
player.sendMessage(ChatColor.YELLOW + "Say /time day|night|normal");
}
log.info(player.getName() + "TIME");
return true;
}
if (commandName.matches("tpblock") && isDonator) {
if ("on".matches(args[0])) {
tregminePlayer.setMetaString("tpblock", "true");
player.sendMessage("Teleportation is now blocked to you.");
return true;
}
if ("off".matches(args[0])) {
tregminePlayer.setMetaString("tpblock", "false");
player.sendMessage("Teleportation is now allowed to you.");
return true;
}
if ("status".matches(args[0])) {
player.sendMessage("Your tpblock is set to " + tregminePlayer.getMetaString("tpblock") + ".");
return true;
}
player.sendMessage(ChatColor.RED + "The commands are /tpblock on, /tpblock off and /tpblock status.");
return true;
}
if (commandName.matches("normal")) {
info.tregmine.api.TregminePlayer tregPlayer = this.tregmine.tregminePlayer.get(player.getName());
if (isAdmin) {
tregPlayer.setTempMetaString("admin", "false");
tregPlayer.setTemporaryChatName(ChatColor.GOLD + tregPlayer.getName());
player.sendMessage(ChatColor.YELLOW + "You are no longer admin, until you reconnect!");
} else if (tregPlayer.getMetaBoolean("builder")) {
tregPlayer.setTempMetaString("builder", "false");
tregPlayer.setTemporaryChatName(ChatColor.GOLD + tregPlayer.getName());
player.sendMessage(ChatColor.YELLOW + "You are no longer builder, until you reconnect!");
} else if (tregPlayer.isGuardian()) {
Player[] players = tregmine.getServer().getOnlinePlayers();
TregminePlayer maxRank = null;
for (Player srvPlayer : players) {
TregminePlayer guardian = tregmine.getPlayer(srvPlayer);
if (!guardian.isGuardian()) {
continue;
}
TregminePlayer.GuardianState state = guardian.getGuardianState();
if (state == TregminePlayer.GuardianState.QUEUED) {
if (maxRank == null || guardian.getGuardianRank() > maxRank.getGuardianRank()) {
maxRank = guardian;
}
}
}
if (maxRank != null) {
tregPlayer.setGuardianState(TregminePlayer.GuardianState.INACTIVE);
tregPlayer.sendMessage(ChatColor.BLUE + "You are now in normal mode, and no longer have to response to help requests.");
maxRank.setGuardianState(TregminePlayer.GuardianState.ACTIVE);
maxRank.sendMessage(ChatColor.BLUE + "You are now on active duty and should respond to help requests.");
} else {
tregPlayer.sendMessage(ChatColor.BLUE + "Not enough guardians are on to manage the server. We need you to keep working. Sorry. :/");
}
}
return true;
}
info.tregmine.api.TregminePlayer tP = this.tregmine.tregminePlayer.get(player.getName());
if (commandName.matches("nuke") && (tP.isGuardian() || tP.isAdmin())) {
player.sendMessage("You nuked all mobs in this world!");
for (Entity ent : player.getWorld().getLivingEntities()) {
if(ent instanceof Monster) {
Monster mob = (Monster) ent;
mob.setHealth(0);
}
// if(ent instanceof Chicken) {
// Chicken chicken = (Chicken) ent;
// chicken.setHealth(0);
if(ent instanceof org.bukkit.entity.Animals) {
org.bukkit.entity.Animals animal = (org.bukkit.entity.Animals) ent;
animal.setHealth(0);
}
if(ent instanceof Slime) {
Slime slime = (Slime) ent;
slime.setHealth(0);
}
if (ent instanceof EnderDragon) {
EnderDragon dragon = (EnderDragon)ent;
dragon.setHealth(0);
}
}
return true;
}
if (commandName.matches("user") && args.length > 0 && (isAdmin || isMentor)) {
if (args[0].matches("reload")) {
this.tregmine.tregminePlayer.get(this.getServer().matchPlayer(args[1]).get(0).getDisplayName()).load();
player.sendMessage("Player reloaded "+ this.getServer().matchPlayer(args[1]).get(0).getDisplayName());
return true;
}
if (args[0].matches("make")) {
Player victim = this.getServer().matchPlayer(args[2]).get(0);
info.tregmine.api.TregminePlayer victimPlayer = this.tregmine.tregminePlayer.get(victim.getName());
TregminePlayer vtregPlayer = this.tregmine.tregminePlayer.get(victim.getName());
if (args[1].matches("settler")) {
vtregPlayer.setMetaString("color", "trial");
vtregPlayer.setMetaString("trusted", "true");
vtregPlayer.setTemporaryChatName(vtregPlayer.getNameColor() + vtregPlayer.getName());
player.sendMessage(ChatColor.AQUA + "You made " + victimPlayer.getChatName() + ChatColor.AQUA + " settler of this server." );
victim.sendMessage("Welcome! You are now made settler.");
this.log.info(victim.getName() + " was given settler rights by " + player.getName() + ".");
return true;
}
if (args[1].matches("warn")) {
vtregPlayer.setMetaString("color", "warned");
player.sendMessage(ChatColor.AQUA + "You warned " + victimPlayer.getChatName() + ".");
victim.sendMessage("You are now warned");
this.log.info(victim.getName() + " was warned by " + player.getName() + ".");
vtregPlayer.setTemporaryChatName(vtregPlayer.getNameColor() + vtregPlayer.getName());
return true;
}
if (args[1].matches("hardwarn")) {
vtregPlayer.setMetaString("color", "warned");
vtregPlayer.setMetaString("trusted", "false");
player.sendMessage(ChatColor.AQUA + "You warned " + victimPlayer.getChatName() + " and removed his building rights." );
victim.sendMessage("You are now warned and bereft of your building rights.");
this.log.info(victim.getName() + " was hardwarned by " + player.getName() + ".");
vtregPlayer.setTemporaryChatName(vtregPlayer.getNameColor() + vtregPlayer.getName());
return true;
}
if (args[1].matches("trial")) {
player.sendMessage(ChatColor.RED + "Please use /user make settler name");
}
if (args[1].matches("resident") && player.isOp() ) {
vtregPlayer.setMetaString("color", "trusted");
vtregPlayer.setMetaString("trusted", "true");
this.log.info(victim.getName() + " was given trusted rights by " + tregminePlayer.getChatName() + ".");
player.sendMessage(ChatColor.AQUA + "You made " + victimPlayer.getChatName() + ChatColor.AQUA + " a resident." );
victim.sendMessage("Welcome! You are now a resident");
vtregPlayer.setTemporaryChatName(vtregPlayer.getNameColor() + vtregPlayer.getName());
return true;
}
if (args[1].matches("donator") && player.isOp() ) {
vtregPlayer.setMetaString("donator", "true");
// vtregPlayer.setMetaString("compass", "true");
vtregPlayer.setFlying(true);
vtregPlayer.setMetaString("color", "donator");
player.sendMessage(ChatColor.AQUA + "You made " + vtregPlayer.getChatName() + " a donator." );
this.log.info(victim.getName() + " was made donator by" + tregminePlayer.getChatName() + ".");
victim.sendMessage("Congratulations, you are now a donator!");
vtregPlayer.setTemporaryChatName(vtregPlayer.getNameColor() + vtregPlayer.getName());
return true;
}
if (args[1].matches("child")) {
vtregPlayer.setMetaString("color", "child");
player.sendMessage(ChatColor.AQUA + "You made " + vtregPlayer.getChatName() + " a child." );
this.log.info(victim.getName() + " was made child by" + tregminePlayer.getChatName() + ".");
vtregPlayer.setTemporaryChatName(vtregPlayer.getNameColor() + vtregPlayer.getName());
return true;
}
if (args[1].matches("guardian") && player.isOp() ) {
if(vtregPlayer.isDonator()) {
vtregPlayer.setMetaString("police", "true");
vtregPlayer.setMetaString("color", "police");
player.sendMessage(ChatColor.AQUA + "You made " + vtregPlayer.getChatName() + " a police." );
this.log.info(victim.getName() + " was made police by" + tregminePlayer.getChatName() + ".");
victim.sendMessage("Congratulations, you are now a police!");
} else {
player.sendMessage(ChatColor.AQUA + "Sorry this person is not a " + ChatColor.GOLD + " donator." );
}
vtregPlayer.setTemporaryChatName(vtregPlayer.getNameColor() + vtregPlayer.getName());
return true;
}
}
}
if (commandName.matches("kick") && (isAdmin || isMentor)) {
Player victim = this.getServer().matchPlayer(args[0]).get(0);
if (victim != null) {
info.tregmine.api.TregminePlayer victimPlayer = this.tregmine.tregminePlayer.get(victim.getName());
if(victim.getName().matches("einand")) {
this.getServer().broadcastMessage("Never try to kick a god!");
player.kickPlayer("Never kick a god!");
return true;
}
this.getServer().broadcastMessage(tregminePlayer.getChatName() + ChatColor.AQUA + " kicked " + victimPlayer.getChatName() + ChatColor.AQUA + " for 1 minute.");
this.log.info(victim.getName() + " kicked by " + player.getName());
victim.kickPlayer("kicked by " + player.getName());
}
return true;
}
if (commandName.matches("ride.") && tregminePlayer.isOp() ) {
Player v = this.getServer().matchPlayer(args[0]).get(0);
Player v2 = this.getServer().matchPlayer(args[1]).get(0);
v2.setPassenger(v);
return true;
}
if (commandName.matches("eject")) {
Player v = this.getServer().matchPlayer(args[0]).get(0);
v.eject();
}
if (commandName.matches("newspawn") && isAdmin) {
player.getWorld().setSpawnLocation(player.getLocation().getBlockX(), player.getLocation().getBlockY(), player.getLocation().getBlockZ());
return true;
}
if (commandName.matches("sendto") && isAdmin) {
Player victim = this.getServer().matchPlayer(args[0]).get(0);
if (victim != null){
Location cpspawn = this.getServer().getWorld(args[1]).getSpawnLocation();
player.teleport(cpspawn);
}
return true;
}
if (commandName.matches("test")) {
player.sendMessage("Admin: " + tregminePlayer.isAdmin());
player.sendMessage("Donator: " +tregminePlayer.isDonator());
player.sendMessage("Trusted: " +tregminePlayer.isTrusted());
}
if (commandName.matches("createmob") && isAdmin) {
int amount = 1;
EntityType mobtyp;
try {
amount = Integer.parseInt( args[1] );
} catch (Exception e) {
amount = 1;
}
try {
String mobname = args[0]; //args[0].substring(0,1).toUpperCase() + args[0].substring(1).toLowerCase();
mobtyp = EntityType.fromName(mobname);
} catch (Exception e) {
player.sendMessage(ChatColor.RED + "Sorry that mob doesn't exist.");
return true;
}
if (mobtyp != null) {
for (int i = 0; i<amount;i++) {
if (mobtyp.isSpawnable() && mobtyp.isAlive()) {
// player.getWorld().spawnCreature(player.getLocation(), mobtyp);
player.getWorld().spawnEntity(player.getLocation(), mobtyp);
}
}
player.sendMessage(ChatColor.YELLOW + "You created " + amount + " " + mobtyp.getName() + ".");
this.log.info(player.getName() + " created " + amount + " " + mobtyp.getName());
}
else {
StringBuilder buf = new StringBuilder();
String delim = "";
for (EntityType mob : EntityType.values()) {
if (mob.isSpawnable() && mob.isAlive()) {
buf.append(delim);
buf.append(mob.getName());
delim = ", ";
}
}
player.sendMessage("Valid names are: ");
player.sendMessage(buf.toString());
}
return true;
}
if (commandName.matches("ban") && (isAdmin || isMentor)) {
if (this.getServer().matchPlayer(args[0]).size() > 1) {
player.sendMessage("Found more then one player that contain that letters");
}
Player victim = this.getServer().matchPlayer(args[0]).get(0);
if (victim != null) {
info.tregmine.api.TregminePlayer victimPlayer = this.tregmine.tregminePlayer.get(victim.getName());
if(victim.getName().matches("einand")) {
this.getServer().broadcastMessage("Never ban a god!");
player.kickPlayer("Never ban a god!");
return true;
}
victimPlayer.setMetaString("banned", "true");
this.getServer().broadcastMessage(victimPlayer.getChatName() + ChatColor.RED + " was banned by " + tregminePlayer.getChatName() + ".");
this.log.info(victim.getName() + " Banned by " + player.getName());
victim.kickPlayer("banned by " + player.getName());
}
return true;
}
if (commandName.matches("clean")) {
player.getInventory().clear();
return true;
}
return false;
}
@Override
public void onLoad() {
final World world = this.getServer().getWorld("world");
this.getServer().getScheduler().scheduleSyncRepeatingTask(this, new Runnable() {
public void run() {
Location loc = world.getSpawnLocation();
info.tregmine.api.firework.createFirework firework = new info.tregmine.api.firework.createFirework();
firework.addColor(Color.BLUE);
firework.addColor(Color.YELLOW);
firework.addType(FireworkEffect.Type.STAR);
firework.shot(loc);
}
},100L,200L);
}
}
|
package ru.apetrov;
import java.io.IOException;
import java.io.InputStream;
public class CheckByteStream{
public boolean isNumber(InputStream in) throws IOException{
boolean result = false;
if (in.read() % 2 == 0){
result = true;
}
in.close();
return result;
}
}
|
package net.hillsdon.reviki.vc.impl;
import net.hillsdon.reviki.web.urls.Configuration;
import net.hillsdon.reviki.web.urls.UnknownWikiException;
import net.hillsdon.reviki.web.urls.WikiUrls;
import net.hillsdon.reviki.web.urls.impl.PageStoreConfiguration;
import net.hillsdon.reviki.wiki.renderer.creole.LinkResolutionContext;
import net.hillsdon.reviki.wiki.renderer.creole.PageLinkTarget;
public class SVNPathLinkTarget extends PageLinkTarget {
private final String _repositoryURL;
private final String _path;
public SVNPathLinkTarget(final String repositoryURL, final String path) {
_repositoryURL = repositoryURL;
_path = path;
}
@Override
public boolean isLinkToCurrentWiki() {
return false;
}
@Override
protected String getWiki(LinkResolutionContext resolver) throws UnknownWikiException {
return getWiki(resolver.getConfiguration());
}
private String getWiki(Configuration config) throws UnknownWikiException {
if (config instanceof PageStoreConfiguration) {
String url = _repositoryURL + _path;
if (_repositoryURL.endsWith("/") && _path.startsWith("/")) {
url = _repositoryURL + _path.substring(1);
}
PageStoreConfiguration configuration = (PageStoreConfiguration) config;
for (WikiUrls wiki: configuration.getApplicationUrls().getAvailableWikiUrls()) {
String wikiUrl = wiki.getWiki().getUrl().toString();
if (!wikiUrl.endsWith("/")) {
wikiUrl = wikiUrl + "/";
}
if (url.startsWith(wikiUrl)) {
return wiki.getWikiName();
}
}
throw new UnknownWikiException();
}
return null;
}
@Override
public String getPageName() {
return new PageReferenceImpl(_path).getName();
}
@Override
protected String getRevision() {
return null;
}
@Override
protected String getFragment() {
return null;
}
}
|
package com.adobe.phonegap.push;
import android.annotation.SuppressLint;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.res.AssetManager;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.NotificationCompat;
import android.text.Html;
import android.util.Log;
import com.google.android.gcm.GCMBaseIntentService;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Random;
@SuppressLint("NewApi")
public class GCMIntentService extends GCMBaseIntentService implements PushConstants {
private static final String LOG_TAG = "PushPlugin_GCMIntentService";
private static HashMap<Integer, ArrayList<String>> messageMap = new HashMap<Integer, ArrayList<String>>();
public void setNotification(int notId, String message){
ArrayList<String> messageList = messageMap.get(notId);
if(messageList == null) {
messageList = new ArrayList<String>();
messageMap.put(notId, messageList);
}
if(message.isEmpty()){
messageList.clear();
}else{
messageList.add(message);
}
}
public GCMIntentService() {
super("GCMIntentService");
}
@Override
public void onRegistered(Context context, String regId) {
Log.v(LOG_TAG, "onRegistered: " + regId);
try {
JSONObject json = new JSONObject().put(REGISTRATION_ID, regId);
Log.v(LOG_TAG, "onRegistered: " + json.toString());
PushPlugin.sendEvent( json );
}
catch(JSONException e) {
// No message to the user is sent, JSON failed
Log.e(LOG_TAG, "onRegistered: JSON exception");
}
}
@Override
public void onUnregistered(Context context, String regId) {
Log.d(LOG_TAG, "onUnregistered - regId: " + regId);
}
@Override
protected void onMessage(Context context, Intent intent) {
Log.d(LOG_TAG, "onMessage - context: " + context);
// Extract the payload from the message
Bundle extras = intent.getExtras();
if (extras != null) {
// if we are in the foreground, just surface the payload, else post it to the statusbar
if (PushPlugin.isInForeground()) {
extras.putBoolean(FOREGROUND, true);
PushPlugin.sendExtras(extras);
}
else {
extras.putBoolean(FOREGROUND, false);
// Send a notification if there is a message
String message = this.getMessageText(extras);
String title = getString(extras, TITLE, "");
if ((message != null && message.length() != 0) ||
(title != null && title.length() != 0)) {
createNotification(context, extras);
}
}
}
}
public void createNotification(Context context, Bundle extras) {
NotificationManager mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
String appName = getAppName(this);
String packageName = context.getPackageName();
Resources resources = context.getResources();
int notId = parseInt(NOT_ID, extras);
Intent notificationIntent = new Intent(this, PushHandlerActivity.class);
notificationIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP);
notificationIntent.putExtra(PUSH_BUNDLE, extras);
notificationIntent.putExtra(NOT_ID, notId);
int requestCode = new Random().nextInt();
PendingIntent contentIntent = PendingIntent.getActivity(this, requestCode, notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT);
NotificationCompat.Builder mBuilder =
new NotificationCompat.Builder(context)
.setWhen(System.currentTimeMillis())
.setContentTitle(getString(extras, TITLE))
.setTicker(getString(extras, TITLE))
.setContentIntent(contentIntent)
.setAutoCancel(true);
SharedPreferences prefs = context.getSharedPreferences(PushPlugin.COM_ADOBE_PHONEGAP_PUSH, Context.MODE_PRIVATE);
String localIcon = prefs.getString(ICON, null);
String localIconColor = prefs.getString(ICON_COLOR, null);
boolean soundOption = prefs.getBoolean(SOUND, true);
boolean vibrateOption = prefs.getBoolean(VIBRATE, true);
Log.d(LOG_TAG, "stored icon=" + localIcon);
Log.d(LOG_TAG, "stored iconColor=" + localIconColor);
Log.d(LOG_TAG, "stored sound=" + soundOption);
Log.d(LOG_TAG, "stored vibrate=" + vibrateOption);
/*
* Notification Vibration
*/
setNotificationVibration(extras, vibrateOption, mBuilder);
/*
* Notification Icon Color
*
* Sets the small-icon background color of the notification.
* To use, add the `iconColor` key to plugin android options
*
*/
setNotificationIconColor(getString(extras,"color"), mBuilder, localIconColor);
/*
* Notification Icon
*
* Sets the small-icon of the notification.
*
* - checks the plugin options for `icon` key
* - if none, uses the application icon
*
* The icon value must be a string that maps to a drawable resource.
* If no resource is found, falls
*
*/
setNotificationSmallIcon(context, extras, packageName, resources, mBuilder, localIcon);
/*
* Notification Large-Icon
*
* Sets the large-icon of the notification
*
* - checks the gcm data for the `image` key
* - checks to see if remote image, loads it.
* - checks to see if assets image, Loads It.
* - checks to see if resource image, LOADS IT!
* - if none, we don't set the large icon
*
*/
setNotificationLargeIcon(extras, packageName, resources, mBuilder);
/*
* Notification Sound
*/
if (soundOption) {
setNotificationSound(context, extras, mBuilder);
}
/*
* LED Notification
*/
setNotificationLedColor(extras, mBuilder);
/*
* Priority Notification
*/
setNotificationPriority(extras, mBuilder);
/*
* Notification message
*/
setNotificationMessage(notId, extras, mBuilder);
/*
* Notification count
*/
setNotificationCount(extras, mBuilder);
/*
* Notication add actions
*/
createActions(extras, mBuilder, resources, packageName);
mNotificationManager.notify(appName, notId, mBuilder.build());
}
private void createActions(Bundle extras, NotificationCompat.Builder mBuilder, Resources resources, String packageName) {
Log.d(LOG_TAG, "create actions");
String actions = getString(extras, ACTIONS);
if (actions != null) {
try {
JSONArray actionsArray = new JSONArray(actions);
for (int i=0; i < actionsArray.length(); i++) {
Log.d(LOG_TAG, "adding action");
JSONObject action = actionsArray.getJSONObject(i);
Log.d(LOG_TAG, "adding callback = " + action.getString(CALLBACK));
Intent intent = new Intent(this, PushHandlerActivity.class);
intent.putExtra(CALLBACK, action.getString(CALLBACK));
intent.putExtra(PUSH_BUNDLE, extras);
PendingIntent pIntent = PendingIntent.getActivity(this, i, intent, PendingIntent.FLAG_UPDATE_CURRENT);
mBuilder.addAction(resources.getIdentifier(action.getString(ICON), DRAWABLE, packageName),
action.getString(TITLE), pIntent);
}
} catch(JSONException e) {
// nope
}
}
}
private void setNotificationCount(Bundle extras, NotificationCompat.Builder mBuilder) {
String msgcnt = getString(extras, MSGCNT);
if (msgcnt == null) {
msgcnt = getString(extras, BADGE);
}
if (msgcnt != null) {
mBuilder.setNumber(Integer.parseInt(msgcnt));
}
}
private void setNotificationVibration(Bundle extras, Boolean vibrateOption, NotificationCompat.Builder mBuilder) {
String vibrationPattern = getString(extras, VIBRATION_PATTERN);
if (vibrationPattern != null) {
String[] items = vibrationPattern.replaceAll("\\[", "").replaceAll("\\]", "").split(",");
long[] results = new long[items.length];
for (int i = 0; i < items.length; i++) {
try {
results[i] = Long.parseLong(items[i]);
} catch (NumberFormatException nfe) {}
}
mBuilder.setVibrate(results);
} else {
if (vibrateOption) {
mBuilder.setDefaults(Notification.DEFAULT_VIBRATE);
}
}
}
private void setNotificationMessage(int notId, Bundle extras, NotificationCompat.Builder mBuilder) {
String message = getMessageText(extras);
String style = getString(extras, STYLE, STYLE_TEXT);
if(STYLE_INBOX.equals(style)) {
setNotification(notId, message);
mBuilder.setContentText(message);
ArrayList<String> messageList = messageMap.get(notId);
Integer sizeList = messageList.size();
if (sizeList > 1) {
String sizeListMessage = sizeList.toString();
String stacking = sizeList + " more";
if (getString(extras, SUMMARY_TEXT) != null) {
stacking = getString(extras, SUMMARY_TEXT);
stacking = stacking.replace("%n%", sizeListMessage);
}
NotificationCompat.InboxStyle notificationInbox = new NotificationCompat.InboxStyle()
.setBigContentTitle(getString(extras, TITLE))
.setSummaryText(stacking);
for (int i = messageList.size() - 1; i >= 0; i
notificationInbox.addLine(Html.fromHtml(messageList.get(i)));
}
mBuilder.setStyle(notificationInbox);
} else {
NotificationCompat.BigTextStyle bigText = new NotificationCompat.BigTextStyle();
if (message != null) {
bigText.bigText(message);
bigText.setBigContentTitle(getString(extras, TITLE));
mBuilder.setStyle(bigText);
}
}
} else if (STYLE_PICTURE.equals(style)) {
setNotification(notId, "");
NotificationCompat.BigPictureStyle bigPicture = new NotificationCompat.BigPictureStyle();
bigPicture.bigPicture(getBitmapFromURL(getString(extras, PICTURE)));
bigPicture.setBigContentTitle(getString(extras, TITLE));
bigPicture.setSummaryText(getString(extras, SUMMARY_TEXT));
mBuilder.setContentTitle(getString(extras, TITLE));
mBuilder.setContentText(message);
mBuilder.setStyle(bigPicture);
} else {
setNotification(notId, "");
NotificationCompat.BigTextStyle bigText = new NotificationCompat.BigTextStyle();
if (message != null) {
mBuilder.setContentText(Html.fromHtml(message));
bigText.bigText(message);
bigText.setBigContentTitle(getString(extras, TITLE));
String summaryText = getString(extras, SUMMARY_TEXT);
if (summaryText != null) {
bigText.setSummaryText(summaryText);
}
mBuilder.setStyle(bigText);
}
/*
else {
mBuilder.setContentText("<missing message content>");
}
*/
}
}
private String getString(Bundle extras,String key) {
String message = extras.getString(key);
if (message == null) {
message = extras.getString(GCM_NOTIFICATION+"."+key);
}
return message;
}
private String getString(Bundle extras,String key, String defaultString) {
String message = extras.getString(key);
if (message == null) {
message = extras.getString(GCM_NOTIFICATION+"."+key, defaultString);
}
return message;
}
private String getMessageText(Bundle extras) {
String message = getString(extras, MESSAGE);
if (message == null) {
message = getString(extras, BODY);
}
return message;
}
private void setNotificationSound(Context context, Bundle extras, NotificationCompat.Builder mBuilder) {
String soundname = getString(extras, SOUNDNAME);
if (soundname == null) {
soundname = getString(extras, SOUND);
}
if (soundname != null) {
Uri sound = Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE
+ "://" + context.getPackageName() + "/raw/" + soundname);
Log.d(LOG_TAG, sound.toString());
mBuilder.setSound(sound);
} else {
mBuilder.setSound(android.provider.Settings.System.DEFAULT_NOTIFICATION_URI);
}
}
private void setNotificationLedColor(Bundle extras, NotificationCompat.Builder mBuilder) {
String ledColor = getString(extras, LED_COLOR);
if (ledColor != null) {
// Converts parse Int Array from ledColor
String[] items = ledColor.replaceAll("\\[", "").replaceAll("\\]", "").split(",");
int[] results = new int[items.length];
for (int i = 0; i < items.length; i++) {
try {
results[i] = Integer.parseInt(items[i]);
} catch (NumberFormatException nfe) {}
}
if (results.length == 4) {
mBuilder.setLights(Color.argb(results[0], results[1], results[2], results[3]), 500, 500);
} else {
Log.e(LOG_TAG, "ledColor parameter must be an array of length == 4 (ARGB)");
}
}
}
private void setNotificationPriority(Bundle extras, NotificationCompat.Builder mBuilder) {
String priorityStr = getString(extras, PRIORITY);
if (priorityStr != null) {
try {
Integer priority = Integer.parseInt(priorityStr);
if (priority >= NotificationCompat.PRIORITY_MIN && priority <= NotificationCompat.PRIORITY_MAX) {
mBuilder.setPriority(priority);
} else {
Log.e(LOG_TAG, "Priority parameter must be between -2 and 2");
}
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
}
private void setNotificationLargeIcon(Bundle extras, String packageName, Resources resources, NotificationCompat.Builder mBuilder) {
String gcmLargeIcon = getString(extras, IMAGE); // from gcm
if (gcmLargeIcon != null) {
if (gcmLargeIcon.startsWith("http:
mBuilder.setLargeIcon(getBitmapFromURL(gcmLargeIcon));
Log.d(LOG_TAG, "using remote large-icon from gcm");
} else {
AssetManager assetManager = getAssets();
InputStream istr;
try {
istr = assetManager.open(gcmLargeIcon);
Bitmap bitmap = BitmapFactory.decodeStream(istr);
mBuilder.setLargeIcon(bitmap);
Log.d(LOG_TAG, "using assets large-icon from gcm");
} catch (IOException e) {
int largeIconId = 0;
largeIconId = resources.getIdentifier(gcmLargeIcon, DRAWABLE, packageName);
if (largeIconId != 0) {
Bitmap largeIconBitmap = BitmapFactory.decodeResource(resources, largeIconId);
mBuilder.setLargeIcon(largeIconBitmap);
Log.d(LOG_TAG, "using resources large-icon from gcm");
} else {
Log.d(LOG_TAG, "Not setting large icon");
}
}
}
}
}
private void setNotificationSmallIcon(Context context, Bundle extras, String packageName, Resources resources, NotificationCompat.Builder mBuilder, String localIcon) {
int iconId = 0;
String icon = getString(extras, ICON);
if (icon != null) {
iconId = resources.getIdentifier(icon, DRAWABLE, packageName);
Log.d(LOG_TAG, "using icon from plugin options");
}
else if (localIcon != null) {
iconId = resources.getIdentifier(localIcon, DRAWABLE, packageName);
Log.d(LOG_TAG, "using icon from plugin options");
}
if (iconId == 0) {
Log.d(LOG_TAG, "no icon resource found - using application icon");
iconId = context.getApplicationInfo().icon;
}
mBuilder.setSmallIcon(iconId);
}
private void setNotificationIconColor(String color, NotificationCompat.Builder mBuilder, String localIconColor) {
int iconColor = 0;
if (color != null) {
try {
iconColor = Color.parseColor(color);
} catch (IllegalArgumentException e) {
Log.e(LOG_TAG, "couldn't parse color from android options");
}
}
else if (localIconColor != null) {
try {
iconColor = Color.parseColor(localIconColor);
} catch (IllegalArgumentException e) {
Log.e(LOG_TAG, "couldn't parse color from android options");
}
}
if (iconColor != 0) {
mBuilder.setColor(iconColor);
}
}
public Bitmap getBitmapFromURL(String strURL) {
try {
URL url = new URL(strURL);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoInput(true);
connection.connect();
InputStream input = connection.getInputStream();
return BitmapFactory.decodeStream(input);
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
private static String getAppName(Context context) {
CharSequence appName = context.getPackageManager().getApplicationLabel(context.getApplicationInfo());
return (String)appName;
}
@Override
public void onError(Context context, String errorId) {
Log.e(LOG_TAG, "onError - errorId: " + errorId);
// if we are in the foreground, just send the error
if (PushPlugin.isInForeground()) {
PushPlugin.sendError(errorId);
}
}
private int parseInt(String value, Bundle extras) {
int retval = 0;
try {
retval = Integer.parseInt(getString(extras, value));
}
catch(NumberFormatException e) {
Log.e(LOG_TAG, "Number format exception - Error parsing " + value + ": " + e.getMessage());
}
catch(Exception e) {
Log.e(LOG_TAG, "Number format exception - Error parsing " + value + ": " + e.getMessage());
}
return retval;
}
}
|
package api.web.gw2.mapping.core;
import api.web.gw2.mapping.v2.account.wallet.CurrencyAmount;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Type;
import java.net.MalformedURLException;
import java.net.URL;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Currency;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.OptionalDouble;
import java.util.OptionalInt;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
abstract class JsonpAbstractMarshaller {
/**
* The logger instance.
*/
protected final Logger logger = Logger.getLogger(getClass().getName());
/**
* Creates a new empty instance.
*/
public JsonpAbstractMarshaller() {
}
/**
* Load an object from JSON.
* @param <T> The type to use.
* @param targetClass The target class.
* @param input The source stream.
* @return A {@code T} instance, may be {@code null}.
* @throws NullPointerException If {@code targetClass} or {@code input} is {@code null}/
* @throws IOException In case of IO error.
*/
public abstract <T> T loadObject(final Class<T> targetClass, final InputStream input) throws NullPointerException, IOException;
/**
* Load a collection from JSON.
* @param <T> The type to use.
* @param targetClass The target class.
* @param input The source stream.
* @return A {@code Collection<T>} instance, may be {@code null}.
* @throws NullPointerException If {@code targetClass} or {@code input} is {@code null}/
* @throws IOException In case of IO error.
*/
public abstract <T> Collection<T> loadObjectArray(final Class<T> targetClass, final InputStream input) throws IOException;
/**
* Load a runtime object from JSON.
* @param <T> The type to use.
* @param selector The name of the selector value.
* @param pattern The pattern which allows to construct the class name.
* @param input The source stream.
* @return A {@code T} instance, may be {@code null}.
* @throws NullPointerException If {@code targetClass} or {@code input} is {@code null}/
* @throws IOException In case of IO error.
*/
public abstract <T> T loadRuntimeObject(final String selector, final String pattern, final InputStream input) throws IOException;
/**
* Creates a concrete instance of the desired interface using the {@code ServiceManager}.
* @param <T> The target type.
* @param targetClass The target class.
* @return A {@code T} instance, may be {@code null}.
*/
protected final <T> T createConcreteEmptyInstance(final Class<T> targetClass) {
// Get service loader for target class.
final ServiceLoader serviceLoader = ServiceLoader.load(targetClass);
// Get concrete instance for target class.
final Iterator<T> iterator = serviceLoader.iterator();
T result = null;
while (iterator.hasNext()) {
if (result == null) {
result = (T) iterator.next();
}
}
return result;
}
/**
* Tries to locate given field in selected class or it's ancestor classes.
* @param fieldName The field name.
* @param targetClass The target class.
* @return A {@code Field} instance, may be {@code null} if field was not found.
*/
protected final Field lookupField(final String fieldName, final Class targetClass) {
Field field = null;
for (Class aClass = targetClass; aClass != null && field == null; aClass = aClass.getSuperclass()) {
try {
field = aClass.getDeclaredField(fieldName);
} catch (NoSuchFieldException ex) {
logger.log(Level.FINEST, "Could not find field \"{0}\" in class {1}, attenpting to use class hierachy.", new Object[]{fieldName, targetClass});
}
}
return field;
}
/**
* Converts a JSON key into a Java field name.
* @param key The JSON key.
* @return A {@code String}, never {@code null}.
* @throws NullPointerException If {@code key} is {@code null}.
*/
protected static final String jsonKeyToJavaFieldName(final String key) throws NullPointerException {
Objects.requireNonNull(key);
final String[] tokens = key.split("_"); // NOI18N.
final StringBuilder buffer = new StringBuilder(key.length());
buffer.append(tokens[0]);
Arrays.stream(tokens, 1, tokens.length)
.forEach(token -> {
final String head = token.substring(0, 1).toUpperCase();
final String tail = token.substring(1, token.length());
buffer.append(head);
buffer.append(tail);
});
return buffer.toString();
}
/**
* Convert an enum value to a proper Java class name (by removing '_' and setting the proper letter case).
* @param value The source enum value.
* @return A {@code String} instance, never {@code null}.
* @throws NullPointerException If {@code value} is {@code null}.
*/
protected static final String javaEnumToJavaClassName(final Enum value) throws NullPointerException {
Objects.requireNonNull(value);
final String name = value.name().toLowerCase();
final String[] tokens = name.split("_"); // NOI18N.
final StringBuilder buffer = new StringBuilder(name.length());
Arrays.stream(tokens)
.forEach(token -> {
String head = token.substring(0, 1).toUpperCase();
String tail = token.substring(1, token.length());
buffer.append(head);
buffer.append(tail);
});
return buffer.toString();
}
/**
* Gets the default value for given field.
* <br/>This method is usually called when encountering a {@code null} value.
* @param field The field on which the value will be set.
* @return An {@code Object} instance, may be {@code null}.
* <br/>The value to return will be determined from the annotations and the class of the target field.
* @throws NullPointerException If {@code field} is {@code null}.
*/
protected final Object defaultValueForField(final Field field) throws NullPointerException {
Objects.requireNonNull(field);
final Type fieldType = field.getType();
boolean isOptional = field.getAnnotation(OptionalValue.class) != null;
boolean isId = field.getAnnotation(IdValue.class) != null;
boolean isLevel = field.getAnnotation(LevelValue.class) != null;
boolean isCurrency = field.getAnnotation(CoinValue.class) != null;
// boolean isDistance = field.getAnnotation(DistanceValue.class) != null;
boolean isLocalizedResource = field.getAnnotation(LocalizedResource.class) != null;
boolean isQuantity = field.getAnnotation(QuantityValue.class) != null;
boolean isDate = field.getAnnotation(DateValue.class) != null;
boolean isDuration = field.getAnnotation(DurationValue.class) != null;
boolean isURL = field.getAnnotation(URLValue.class) != null;
boolean isPercent = field.getAnnotation(PercentValue.class) != null;
boolean isList = field.getAnnotation(ListValue.class) != null;
boolean isSet = field.getAnnotation(SetValue.class) != null;
boolean isMap = field.getAnnotation(MapValue.class) != null;
boolean isCoord2D = field.getAnnotation(Coord2DValue.class) != null;
boolean isCoord3D = field.getAnnotation(Coord3DValue.class) != null;
Object result = null;
// Use the annotation of the field.
if (isOptional) {
if (isList || isSet || isMap) {
result = Optional.empty();
} else if (isQuantity || isLevel) {
result = OptionalInt.empty();
} else if (isPercent) {
result = OptionalDouble.empty();
} else if (isId) {
final boolean isIntegerId = field.getAnnotation(IdValue.class).flavor() == IdValue.Flavor.INTEGER;
result = isIntegerId ? OptionalInt.empty() : Optional.empty();
} else {
result = Optional.empty();
}
} else if (isLevel) {
result = LevelValue.MIN_LEVEL;
} else if (isCurrency) {
result = CoinAmount.ZERO;
// } else if (isDistance) {
// result = QuantityAmount.ZERO;
} else if (isCoord2D) {
result = Point2D.ORIGIN;
} else if (isCoord3D) {
result = Point3D.ORIGIN;
} else if (isQuantity) {
result = 0;
// result = DistanceAmount.ZERO;
} else if (isDate) {
result = DateValue.DEFAULT;
} else if (isDuration) {
result = Duration.ZERO;
} // Now use the class of the field.
else if (isLocalizedResource || fieldType == String.class) {
result = ""; // NOI8N.
} else if (fieldType == Integer.TYPE) {
result = 0;
} else if (fieldType == Long.TYPE) {
result = 0L;
} else if (fieldType == Float.TYPE) {
result = 0F;
} else if (fieldType == Double.TYPE) {
result = 0D;
} else if (fieldType == Boolean.TYPE) {
result = Boolean.FALSE;
} else if (isSet || fieldType == Set.class) {
result = Collections.EMPTY_SET;
} else if (isList || fieldType == List.class) {
result = Collections.EMPTY_LIST;
} else if (isMap || fieldType == Map.class) {
result = Collections.EMPTY_MAP;
}
return result;
}
/**
* Convert a value obtained from JSON to value that can suit the target field.
* <br/>This method is called before setting a value into a field.
* @param field The target field.
* @param value The value obtained from JSON.
* @return An {@code Object}, may be [@code null}.
* <br/>The value to return will be determined from the annotations and the class of the target field.
* @throws NullPointerException If {@code field} is {@code null}.
* @throws MalformedURLException If URL cannot be parsed from input object.
*/
protected final Object valueForField(final Field field, final Object value) throws NullPointerException, MalformedURLException, IllegalAccessException, IllegalArgumentException, NoSuchMethodException, InvocationTargetException, ClassNotFoundException {
Objects.requireNonNull(field);
// @todo Not all types used yet.
boolean isOptional = field.getAnnotation(OptionalValue.class) != null;
boolean isId = field.getAnnotation(IdValue.class) != null;
boolean isLevel = field.getAnnotation(LevelValue.class) != null;
boolean isCurrency = field.getAnnotation(CoinValue.class) != null;
// boolean isDistance = field.getAnnotation(DistanceValue.class) != null;
boolean isLocalizedResource = field.getAnnotation(LocalizedResource.class) != null;
boolean isQuantity = field.getAnnotation(QuantityValue.class) != null;
boolean isDate = field.getAnnotation(DateValue.class) != null;
boolean isDuration = field.getAnnotation(DurationValue.class) != null;
boolean isURL = field.getAnnotation(URLValue.class) != null;
boolean isPercent = field.getAnnotation(PercentValue.class) != null;
boolean isList = field.getAnnotation(ListValue.class) != null;
boolean isSet = field.getAnnotation(SetValue.class) != null;
boolean isMap = field.getAnnotation(MapValue.class) != null;
boolean isEnum = field.getAnnotation(EnumValue.class) != null;
boolean isCoord2D = field.getAnnotation(Coord2DValue.class) != null;
boolean isCoord3D = field.getAnnotation(Coord3DValue.class) != null;
Object result = value;
// Base types.
if (isOptional && value == null) {
result = null;
} else if (isURL) {
final String path = (String) value;
result = new URL(path);
} else if (isDuration) {
final Number number = (Number) value;
// @todo In game some skill cast time can be 3/4 seconds. Need to check if it's the same for buffs.
final int quantity = number.intValue();
final DurationValue annotation = field.getAnnotation(DurationValue.class);
final DurationValue.Flavor flavor = annotation.flavor();
switch (flavor) {
case MILLIS: {
result = Duration.ofMillis(quantity);
}
break;
case SECONDS:
default: {
result = Duration.ofSeconds(quantity);
}
}
} else if (isDate) {
final String string = (String) value;
result = ZonedDateTime.parse(string);
} else if (isList) {
final List list = (List) value;
result = Collections.unmodifiableList(list);
} else if (isSet) {
final Set set = new HashSet((List) value);
result = Collections.unmodifiableSet(set);
} else if (isMap) {
final Map map = (Map) value;
result = Collections.unmodifiableMap(map);
} else if (isCurrency) {
final Number number = (Number) value;
result = CoinAmount.ofCopper(number.intValue());
} else if (isCoord2D) {
final List<? extends Number> list = (List) value;
final double x = list.get(0).doubleValue();
final double y = list.get(1).doubleValue();
result = new Point2D(x, y);
} else if (isCoord3D) {
final List<? extends Number> list = (List) value;
final double x = list.get(0).doubleValue();
final double y = list.get(1).doubleValue();
final double z = list.get(2).doubleValue();
result = new Point3D(x, y, z);
}
// As we rely heavily on enums, we need to convert base types obtained from JSON into valid enum values.
if (isEnum) {
// Do a second pass on the collection to marshall its content into enum values.
if (isSet || isList) {
final Collection<?> source = (Collection) result;
final List destination = new ArrayList(source.size());
// Cannot use stream because of exceptions raised in marshallEnumValue().
for (final Object v : source) {
final Object target = marshallEnumValue(field, v);
destination.add(target);
}
result = (isList) ? Collections.unmodifiableList(destination) : Collections.unmodifiableSet(new HashSet(destination));
} // @todo What about maps?
// Single value.
else {
result = marshallEnumValue(field, value);
}
}
// Wrap the result into an Optional instance.
// Provided default values may already be wrapped into Optional instances.
if (isOptional && !(result instanceof Optional || result instanceof OptionalInt)) {
if (isList || isSet || isMap) {
result = Optional.ofNullable(result);
} else if (isQuantity || isLevel) {
result = OptionalInt.of((Integer) result);
} else if (isPercent) {
result = OptionalDouble.of((Double) result);
} else if (isId) {
final boolean isIntegerId = field.getAnnotation(IdValue.class).flavor() == IdValue.Flavor.INTEGER;
result = isIntegerId ? OptionalInt.of((Integer) result) : Optional.ofNullable(result);
} else {
result = Optional.ofNullable(result);
}
}
// LOGGER.log(Level.INFO, "{0} declaring class: {1}", new Object[]{field.getName(), field.getDeclaringClass()});
// LOGGER.log(Level.INFO, "{0} type: {1}", new Object[]{field.getName(), field.getType()});
// LOGGER.log(Level.INFO, "{0} annotated type: {1}", new Object[]{field.getName(), field.getAnnotatedType()});
// LOGGER.log(Level.INFO, "{0} value returned as: {1} ({2})", new Object[]{field.getName(), result, result.getClass()});
return result;
}
protected final Object marshallEnumValue(final Field field, final Object value) throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
Objects.requireNonNull(field);
Objects.requireNonNull(value);
// For simple field this should be the enum type.
// The class obtained here is not good when the field holds an optional or a collection.
Class enumClass = field.getType();
final boolean isList = field.getAnnotation(ListValue.class) != null;
final boolean isSet = field.getAnnotation(SetValue.class) != null;
final boolean isMap = field.getAnnotation(MapValue.class) != null;
final boolean isOptional = field.getAnnotation(OptionalValue.class) != null;
if (isList || isSet || isMap || isOptional) {
final Class[] classes = findClassesForField(field);
enumClass = classes[0];
}
// Will not work if value is not of the proper type (usually String or integer).
final Object result = EnumValueFactory.INSTANCE.mapEnumValue(enumClass, value);
Objects.requireNonNull(result);
// Issue a warning if the value returned is the unkown value.
if ("UNKOWN".equals(((Enum) result).name())) { // NOI18N.
logger.log(Level.WARNING, "Field \"{0}\": unable to marshall enum value \"{1}\", defaulting to value \"{2}\" instead.", new Object[]{field.getName(), value, result}); // NOI18N.
}
logger.log(Level.FINEST, "Field \"{0}\": marshalled enum value \"{1}\" into value \"{2}\".", new Object[]{field.getName(), value, result}); // NOI18N.
return result;
}
/**
* Common method used to retrieve proper class(es) for a given field.
* <br>Classes for the fields are determined by the annotations set on this field.
* <br>Currently this method is able to process the following annotations:
* <ul>
* <li>{@code OptionalValue}</li>
* <li>{@code ListValue}</li>
* <li>{@code SetValue}</li>
* <li>{@code MapValue}</li>
* </ul>
* @param field The source field.
* @return A {@code Class[]} instance, never {@code null}
* <br>If the given field is marked with the {@code MapValue} annotation, the array will be of size 2:
* <ul>
* <li>Class at index 0 is the class of the keys to the map.</li>
* <li>Class at index 1 is the class of the values to the map.</li>
* </ul>
* Otherwise the array returned will be of size 1.
* @throws ClassNotFoundException If one of the target classes cannot be found.
* @throws NullPointerException If {@code field} is {@code null}.
* @see OptionalValue
* @see ListValue
* @see SetValue
* @see MapValue
*/
protected Class[] findClassesForField(final Field field) throws ClassNotFoundException, NullPointerException {
Objects.requireNonNull(field);
// @todo Find interface class.
// @todo Check the validity this.
final boolean isList = field.getAnnotation(ListValue.class) != null;
final boolean isSet = field.getAnnotation(SetValue.class) != null;
final boolean isMap = field.getAnnotation(MapValue.class) != null;
final boolean isOptional = field.getAnnotation(OptionalValue.class) != null;
String typename = field.getGenericType().getTypeName();
if (isOptional) {
typename = typename.replaceAll("java\\.util\\.Optional<", ""); // NOI18N.
}
if (isSet) {
typename = typename.replaceAll("java\\.util\\.Set<", ""); // NOI18N.
}
if (isList) {
typename = typename.replaceAll("java\\.util\\.List<", ""); // NOI18N.
}
if (isMap) {
typename = typename.replaceAll("java\\.util\\.Map<", ""); // NOI18N.
}
// Remove trailing >.
typename = typename.replaceAll(">+", ""); // NOI18N.
final String[] subTargetClassNames = typename.split(",\\s*");
final Class[] subTargetClasses = new Class[subTargetClassNames.length];
for (int index = 0; index < subTargetClassNames.length; index++) {
subTargetClasses[index] = Class.forName(subTargetClassNames[index]);
}
return subTargetClasses;
}
/**
* Logs a warning about a missing field.
* @param key The source key from the JSON.
* @param fieldName The name of the target field that is missing.
* @param targetClass The target class.
*/
protected final void logWarningMissingField(final String key, final String fieldName, final Class targetClass) {
final String message = String.format("No matching field \"%s\" found for JSON key \"%s\" in class %s.", fieldName, key, targetClass.getName()); // NOI18N.
logger.warning(message);
}
}
|
package cmonster.browsers;
import cmonster.cookies.Cookie;
import cmonster.cookies.DecryptedCookie;
import cmonster.cookies.EncryptedCookie;
import cmonster.utils.OS;
import com.sun.jna.platform.win32.Crypt32Util;
import org.apache.maven.shared.utils.io.DirectoryScanner;
import javax.crypto.Cipher;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.sql.*;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
public class ChromeBrowser extends Browser {
private String chromeKeyringPassword = null;
/**
* Returns a set of cookie store locations
*/
@Override
protected Set<File> getCookieStores() {
HashSet<File> cookieStores = new HashSet<>();
String userHome = System.getProperty("user.home");
String[] cookieDirectories = {
"/AppData/Local/Google/Chrome/User Data",
"/Application Data/Google/Chrome/User Data",
"/Library/Application Support/Google/Chrome",
"/.config/chromium"
};
for (String cookieDirectory : cookieDirectories) {
String baseDir = userHome + cookieDirectory;
String[] files = getCookieDbFiles(baseDir);
if (files != null && files.length > 0) {
for (String file : files) {
cookieStores.add(new File(baseDir + "/" + file));
}
}
}
return cookieStores;
}
/**
* In come case, people could set profile for browsers, would create custom cookie files
* @param baseDir
* @author <a href="mailto:kbalbertyu@gmail.com">Albert Yu</a> 5/26/2017 1:40 PM
*/
private String[] getCookieDbFiles(String baseDir) {
String[] files = null;
File filePath = new File(baseDir);
if (filePath.exists() && filePath.isDirectory()) {
DirectoryScanner ds = new DirectoryScanner();
String[] includes = {"*/Cookies"};
ds.setIncludes(includes);
ds.setBasedir(new File(baseDir));
ds.setCaseSensitive(true);
ds.scan();
files = ds.getIncludedFiles();
}
return files;
}
/**
* Processes all cookies in the cookie store for a given domain or all
* domains if domainFilter is null
*/
@Override
protected Set<Cookie> processCookies(File cookieStore, String domainFilter) {
HashSet<Cookie> cookies = new HashSet<>();
if (cookieStore.exists()) {
Connection connection = null;
try {
cookieStoreCopy.delete();
Files.copy(cookieStore.toPath(), cookieStoreCopy.toPath());
// load the sqlite-JDBC driver using the current class loader
Class.forName("org.sqlite.JDBC");
// create a database connection
connection = DriverManager.getConnection("jdbc:sqlite:" + cookieStoreCopy.getAbsolutePath());
Statement statement = connection.createStatement();
statement.setQueryTimeout(30); // set timeout to 30 seconds
ResultSet result;
if (domainFilter == null || domainFilter.isEmpty()) {
result = statement.executeQuery("select * from cookies");
} else {
result = statement.executeQuery("select * from cookies where host_key like \"%" + domainFilter + "%\"");
}
while (result.next()) {
String name = result.getString("name");
parseCookieFromResult(cookieStore, name, cookies, result);
}
} catch (Exception e) {
e.printStackTrace();
// if the error message is "out of memory",
// it probably means no database file is found
} finally {
try {
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
// connection close failed
}
}
}
return cookies;
}
/**
* Returns cookies for cookie key with given domain
*/
@Override
public Set<Cookie> getCookiesForDomain(String name, String domain) {
HashSet<Cookie> cookies = new HashSet<>();
for(File cookieStore : getCookieStores()){
cookies.addAll(getCookiesByName(cookieStore, name, domain));
}
return cookies;
}
private Set<Cookie> getCookiesByName(File cookieStore, String name, String domainFilter) {
HashSet<Cookie> cookies = new HashSet<>();
if (cookieStore.exists()) {
Connection connection = null;
try {
cookieStoreCopy.delete();
Files.copy(cookieStore.toPath(), cookieStoreCopy.toPath());
// load the sqlite-JDBC driver using the current class loader
Class.forName("org.sqlite.JDBC");
// create a database connection
connection = DriverManager.getConnection("jdbc:sqlite:" + cookieStoreCopy.getAbsolutePath());
Statement statement = connection.createStatement();
statement.setQueryTimeout(30); // set timeout to 30 seconds
ResultSet result;
if (domainFilter == null || domainFilter.isEmpty()) {
result = statement.executeQuery(String.format("select * from cookies where name = '%s'", name));
} else {
result = statement.executeQuery("select * from cookies where name = '" + name + "' and host_key like '%" + domainFilter + "'");
}
while (result.next()) {
parseCookieFromResult(cookieStore, name, cookies, result);
}
} catch (Exception e) {
e.printStackTrace();
// if the error message is "out of memory",
// it probably means no database file is found
} finally {
try {
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
// connection close failed
}
}
}
return cookies;
}
private void parseCookieFromResult(File cookieStore, String name, HashSet<Cookie> cookies, ResultSet result) throws SQLException {
byte[] encryptedBytes = result.getBytes("encrypted_value");
String path = result.getString("path");
String domain = result.getString("host_key");
boolean secure = result.getBoolean("secure");
boolean httpOnly = result.getBoolean("httponly");
Date expires = result.getDate("expires_utc");
EncryptedCookie encryptedCookie = new EncryptedCookie(name,
encryptedBytes,
expires,
path,
domain,
secure,
httpOnly,
cookieStore);
DecryptedCookie decryptedCookie = decrypt(encryptedCookie);
if (decryptedCookie != null) {
cookies.add(decryptedCookie);
} else {
cookies.add(encryptedCookie);
}
cookieStoreCopy.delete();
}
/**
* Decrypts an encrypted cookie
*/
@Override
protected DecryptedCookie decrypt(EncryptedCookie encryptedCookie) {
byte[] decryptedBytes = null;
if (OS.isWindows()) {
try {
decryptedBytes = Crypt32Util.cryptUnprotectData(encryptedCookie.getEncryptedValue());
} catch (Exception e) {
decryptedBytes = null;
}
} else if (OS.isLinux()) {
try {
byte[] salt = "saltysalt".getBytes();
char[] password = "peanuts".toCharArray();
char[] iv = new char[16];
Arrays.fill(iv, ' ');
int keyLength = 16;
int iterations = 1;
PBEKeySpec spec = new PBEKeySpec(password, salt, iterations, keyLength * 8);
SecretKeyFactory pbkdf2 = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
byte[] aesKey = pbkdf2.generateSecret(spec).getEncoded();
SecretKeySpec keySpec = new SecretKeySpec(aesKey, "AES");
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
cipher.init(Cipher.DECRYPT_MODE, keySpec, new IvParameterSpec(new String(iv).getBytes()));
// if cookies are encrypted "v10" is a the prefix (has to be removed before decryption)
byte[] encryptedBytes = encryptedCookie.getEncryptedValue();
if (new String(encryptedCookie.getEncryptedValue()).startsWith("v10")) {
encryptedBytes = Arrays.copyOfRange(encryptedBytes, 3, encryptedBytes.length);
}
decryptedBytes = cipher.doFinal(encryptedBytes);
} catch (Exception e) {
decryptedBytes = null;
}
} else if (OS.isMac()) {
// access the decryption password from the keyring manager
if (chromeKeyringPassword == null) try {
chromeKeyringPassword = getMacKeyringPassword("Chrome Safe Storage");
} catch (IOException ignored) {
}
try {
byte[] salt = "saltysalt".getBytes();
char[] password = chromeKeyringPassword.toCharArray();
char[] iv = new char[16];
Arrays.fill(iv, ' ');
int keyLength = 16;
int iterations = 1003;
PBEKeySpec spec = new PBEKeySpec(password, salt, iterations, keyLength * 8);
SecretKeyFactory pbkdf2 = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
byte[] aesKey = pbkdf2.generateSecret(spec).getEncoded();
SecretKeySpec keySpec = new SecretKeySpec(aesKey, "AES");
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
cipher.init(Cipher.DECRYPT_MODE, keySpec, new IvParameterSpec(new String(iv).getBytes()));
// if cookies are encrypted "v10" is a the prefix (has to be removed before decryption)
byte[] encryptedBytes = encryptedCookie.getEncryptedValue();
if (new String(encryptedCookie.getEncryptedValue()).startsWith("v10")) {
encryptedBytes = Arrays.copyOfRange(encryptedBytes, 3, encryptedBytes.length);
}
decryptedBytes = cipher.doFinal(encryptedBytes);
} catch (Exception e) {
decryptedBytes = null;
}
}
if (decryptedBytes == null) {
return null;
} else {
return new DecryptedCookie(encryptedCookie.getName(),
encryptedCookie.getEncryptedValue(),
new String(decryptedBytes),
encryptedCookie.getExpires(),
encryptedCookie.getPath(),
encryptedCookie.getDomain(),
encryptedCookie.isSecure(),
encryptedCookie.isHttpOnly(),
encryptedCookie.getCookieStore());
}
}
/**
* Accesses the apple keyring to retrieve the Chrome decryption password
*
* @param application
* @return
* @throws IOException
*/
private static String getMacKeyringPassword(String application) throws IOException {
Runtime rt = Runtime.getRuntime();
String[] commands = {"security", "find-generic-password", "-w", "-s", application};
Process proc = rt.exec(commands);
BufferedReader stdInput = new BufferedReader(new InputStreamReader(proc.getInputStream()));
StringBuilder result = new StringBuilder();
String s;
while ((s = stdInput.readLine()) != null) {
result.append(s);
}
return result.toString();
}
}
|
package jolie;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import jolie.lang.Constants;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import jolie.lang.parse.Scanner;
/**
* A parser for JOLIE's command line arguments,
* providing methods for accessing them.
* @author Fabrizio Montesi
*/
public class CommandLineParser
{
private final static Pattern pathSeparatorPattern = Pattern.compile( jolie.lang.Constants.pathSeparator );
private final int connectionsLimit;
private final String[] includePaths;
private final URL[] libURLs;
private final InputStream programStream;
private final String programFilepath;
private final String[] arguments;
private final Map< String, Scanner.Token > constants = new HashMap< String, Scanner.Token >();
private final boolean verbose;
/**
* Returns the arguments passed to the JOLIE program.
* @return the arguments passed to the JOLIE program
*/
public String[] arguments()
{
return arguments;
}
/**
* Returns the file path of the JOLIE program to execute.
* @return the file path of the JOLIE program to execute
*/
public String programFilepath()
{
return programFilepath;
}
/**
* Returns an InputStream for the program code to execute.
* @return an InputStream for the program code to execute
*/
public InputStream programStream()
{
return programStream;
}
/**
* Returns the library URLs passed by command line with the -l option.
* @return the library URLs passed by command line
*/
public URL[] libURLs()
{
return libURLs;
}
/**
* Returns the include paths passed by command line with the -i option.
* @return the include paths passed by command line
*/
public String[] includePaths()
{
return includePaths;
}
/**
* Returns the connection limit parameter
* passed by command line with the -c option.
* @return the connection limit parameter passed by command line
*/
public int connectionsLimit()
{
return connectionsLimit;
}
private static String getOptionString( String option, String description )
{
return( '\t' + option + "\t\t" + description + '\n' );
}
private String getVersionString()
{
return( Constants.VERSION + " " + Constants.COPYRIGHT );
}
/**
* Returns a map containing the constants defined by command line.
* @return a map containing the constants defined by command line
*/
public Map< String, Scanner.Token > definedConstants()
{
return constants;
}
private String getHelpString()
{
StringBuilder helpBuilder = new StringBuilder();
helpBuilder.append( getVersionString() );
helpBuilder.append( "\n\nUsage: jolie [options] behaviour_file [options] [program arguments]\n\n" );
helpBuilder.append( "Available options:\n" );
helpBuilder.append(
getOptionString( "-h, --help", "Display this help information" ) );
//TODO include doc for -l and -i
helpBuilder.append(
getOptionString( "-C ConstantIdentifier=ConstantValue", "Sets constant ConstantIdentifier to ConstantValue before starting execution" ) );
helpBuilder.append(
getOptionString( "--connlimit [number]", "Set the maximum number of active connection threads" ) );
helpBuilder.append(
getOptionString( "--verbose", "Activate verbose mode" ) );
helpBuilder.append(
getOptionString( "--version", "Display this program version information" ) );
return helpBuilder.toString();
}
private void parseCommandLineConstant( String input )
throws IOException
{
Scanner scanner = new Scanner( new ByteArrayInputStream( input.getBytes() ), "Command line" );
Scanner.Token token = scanner.getToken();
if ( token.is( Scanner.TokenType.ID ) ) {
String id = token.content();
token = scanner.getToken();
if ( token.isNot( Scanner.TokenType.ASSIGN ) ) {
throw new IOException( "expected = after constant identifier " + id + ", found token type " + token.type() );
}
token = scanner.getToken();
if ( token.isValidConstant() == false ) {
throw new IOException( "expected constant value for constant identifier " + id + ", found token type " + token.type() );
}
constants.put( id, token );
} else {
throw new IOException( "expected constant identifier, found token type " + token.type() );
}
}
/**
* Returns <code>true</code> if the verbose option has been specified, false otherwise.
* @return <code>true</code> if the verbose option has been specified, false otherwise
*/
public boolean verbose()
{
return verbose;
}
/**
* Constructor
* @param args the command line arguments
* @param classLoader the ClassLoader to use for finding resources
* @throws jolie.CommandLineException if the command line is not valid or asks for simple information. (like --help and --version)
*/
public CommandLineParser( String[] args, ClassLoader classLoader )
throws CommandLineException, IOException
{
boolean bVerbose = false;
List< String > argumentsList = new ArrayList< String >();
LinkedList< String > includeList = new LinkedList< String >();
List< String > libList = new ArrayList< String >();
int cLimit = -1;
String pwd = new File( "" ).getCanonicalPath();
includeList.add( pwd );
includeList.add( "include" );
libList.add( pwd );
libList.add( "ext" );
libList.add( "lib" );
String olFilepath = null;
for( int i = 0; i < args.length; i++ ) {
if ( "--help".equals( args[ i ] ) || "-h".equals( args[ i ] ) ) {
throw new CommandLineException( getHelpString() );
} else if ( "-C".equals( args[ i ] ) ) {
i++;
try {
parseCommandLineConstant( args[ i ] );
} catch( IOException e ) {
throw new CommandLineException( "Invalid constant definition, reason: " + e.getMessage() );
}
} else if ( "-i".equals( args[ i ] ) ) {
i++;
String[] tmp = pathSeparatorPattern.split( args[ i ] );
for( String s : tmp ) {
includeList.add( s );
}
} else if ( "-l".equals( args[ i ] ) ) {
i++;
String[] tmp = pathSeparatorPattern.split( args[ i ] );
for( String s : tmp ) {
libList.add( s );
}
} else if ( "--connlimit".equals( args[ i ] ) ) {
i++;
cLimit = Integer.parseInt( args[ i ] );
} else if ( "--verbose".equals( args[ i ] ) ) {
bVerbose = true;
} else if ( "--version".equals( args[ i ] ) ) {
throw new CommandLineException( getVersionString() );
} else if ( args[ i ].endsWith( ".ol" ) ) {
if ( olFilepath == null ) {
olFilepath = args[ i ];
} else {
throw new CommandLineException( "You can specify only an input file." );
}
} else {
for( int j = i; j < args.length; j++ ) {
argumentsList.add( args[ j ] );
}
}/* else
throw new CommandLineException( "Unrecognized command line token: " + args[ i ] );*/
}
verbose = bVerbose;
arguments = argumentsList.toArray( new String[ argumentsList.size() ] );
if ( olFilepath == null ) {
throw new CommandLineException( "Input file not specified." );
}
programFilepath = olFilepath;
connectionsLimit = cLimit;
List< URL > urls = new ArrayList< URL >();
for( String path : libList ) {
if ( path.endsWith( ".jar" ) ) {
urls.add( new URL( "jar:file:" + path + "!/" ) );
} else if ( new File( path ).isDirectory() ) {
urls.add( new URL( "file:" + path + "/" ) );
} else if ( path.endsWith( Constants.fileSeparator + "*" ) ) {
File dir = new File( path.substring( 0, path.length() - 2 ) );
String jars[] = dir.list( new FilenameFilter() {
public boolean accept( File dir, String filename ) {
return filename.endsWith( ".jar" );
}
} );
if ( jars != null ) {
for( String jarPath : jars ) {
urls.add( new URL( "jar:file:" + dir.getCanonicalPath() + Constants.fileSeparator + jarPath + "!/" ) );
}
}
}
}
libURLs = urls.toArray( new URL[]{} );
programStream = getOLStream( olFilepath, includeList, classLoader );
if ( programStream == null ) {
throw new FileNotFoundException( olFilepath );
}
includePaths = includeList.toArray( new String[]{} );
}
private InputStream getOLStream( String olFilepath, LinkedList< String > includePaths, ClassLoader classLoader )
throws FileNotFoundException, IOException
{
InputStream olStream = null;
File f = new File( olFilepath );
if ( f.exists() ) {
olStream = new FileInputStream( f );
} else {
for( int i = 0; i < includePaths.size() && olStream == null; i++ ) {
f = new File(
includePaths.get( i ) +
jolie.lang.Constants.fileSeparator +
olFilepath
);
if ( f.exists() ) {
olStream = new BufferedInputStream( new FileInputStream( f ) );
}
}
if ( olStream == null ) {
URL olURL = classLoader.getResource( olFilepath );
if ( olURL != null ) {
olStream = olURL.openStream();
}
}
}
if ( olStream != null && f.getParent() != null ) {
includePaths.addFirst( f.getParent() );
}
return olStream;
}
}
|
/**
* This is a standard class to use Object Relational Mapping
* Use generic constructor in subclasses to configure prefix and caseMod
* Avoid primitive types in attributes of the subclasses
*
* @author Murilo Augusto Castagnoli de Quadros
* @since 2014
* @email macquadros@gmail.com
*/
package murilo.libs.relational;
import static murilo.libs.utils.Utils.camelToSnakeCase;
import static murilo.libs.utils.Utils.firstLetterToUpperCase;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import com.mysql.jdbc.ResultSet;
public class ObjectRelational implements Cloneable, Serializable {
private static final long serialVersionUID = 1L;
public static final int SNAKELOWERCASE_TO_CAMELCASE = 1,
SNAKEUPPERCASE_TO_CAMELCASE = 2, UPPERCASE_TO_LOWERCASE = 3,
NONE = 0;
private transient Field[] fields;
private String prefix;
private int selectedCase;
/**
* Set a prefix in each column of table
*
* @param prefix
*/
public void setPrefix(String prefix) {
this.prefix = prefix;
}
/**
* From Database format to Java Object format
*
* @param mod
* values {SNAKELOWERCASE_TO_CAMELCASE,
* SNAKEUPPERCASE_TO_CAMELCASE, UPPERCASE_TO_LOWERCASE, NONE}
*/
public void setCaseMod(int mod) {
selectedCase = mod;
}
public void initialize(ResultSet resultSet)
throws IllegalArgumentException, IllegalAccessException,
SQLException, NoSuchMethodException, SecurityException,
InvocationTargetException {
fields = this.getClass().getDeclaredFields();
for (Field field : fields) {
if (!Modifier.isStatic(field.getModifiers())) {
String columnName = getColumnName(field.getName());
setFieldValue(field, resultSet.getObject(columnName));
}
}
}
public Map<String, String> export() throws IllegalArgumentException,
IllegalAccessException, NoSuchMethodException, SecurityException,
InvocationTargetException {
if (fields == null)
fields = this.getClass().getDeclaredFields();
Map<String, String> map = new HashMap<String, String>();
for (Field field : fields) {
if (!Modifier.isStatic(field.getModifiers())) {
String columnName = getColumnName(field.getName());
map.put(columnName, getFieldValueAsString(field));
}
}
return map;
}
/**
* Get the column name compatible with table
*
* @param fieldName
* @return column name
*/
public String getColumnName(String fieldName) {
String columnName = setSelectedPrefix(fieldName);
columnName = getStringInSelectedCase(columnName);
return columnName;
}
/**
* Get a field declared in the class by name
*
* @param fieldName
* @return field
* @throws NoSuchFieldException
* @throws SecurityException
*/
public Field getField(String fieldName) throws NoSuchFieldException,
SecurityException {
return this.getClass().getDeclaredField(fieldName);
}
public void setFieldValue(Field field, Object value)
throws IllegalArgumentException, IllegalAccessException,
NoSuchMethodException, SecurityException, InvocationTargetException {
if (Modifier.isPublic(field.getModifiers())) {
field.set(this, value);
} else if (value != null) {
Method method = this.getClass().getMethod(
convertToSetMethod(field), value.getClass());
method.invoke(this, value);
}
}
public Object getFieldValue(Field field) throws IllegalArgumentException,
IllegalAccessException, InvocationTargetException,
NoSuchMethodException, SecurityException {
Object result = null;
if (field.getModifiers() == 1) {
result = field.get(this);
} else {
result = this.getClass().getMethod(convertToGetMethod(field))
.invoke(this);
}
return result;
}
public String getFieldValueAsString(Field field)
throws IllegalArgumentException, IllegalAccessException,
NoSuchMethodException, SecurityException, InvocationTargetException {
String result = null;
Object res = getFieldValue(field);
if (res != null)
result = res.toString();
return result;
}
/**
* Convert a field name in a set method name using Java standard code
* definition
*
* @param field
* @return set method name
*/
private String convertToSetMethod(Field field) {
return "set" + firstLetterToUpperCase(field.getName());
}
/**
* Convert a field name in a get method name using Java standard code
* definition
*
* @param field
* @return get method name
*/
private String convertToGetMethod(Field field) {
return "get" + firstLetterToUpperCase(field.getName());
}
/**
* Convert a string to be compatible with database columns
*
* @param string
* @return string in the selected format
*/
private String getStringInSelectedCase(String string) {
String result;
switch (selectedCase) {
case SNAKELOWERCASE_TO_CAMELCASE:
result = camelToSnakeCase(string).toLowerCase();
break;
case SNAKEUPPERCASE_TO_CAMELCASE:
result = camelToSnakeCase(string).toUpperCase();
break;
case UPPERCASE_TO_LOWERCASE:
result = string.toUpperCase();
break;
default:
result = string;
}
return result;
}
/**
* Apply prefix in a column name
*
* @param columnName
* @return column name with prefix
*/
private String setSelectedPrefix(String columnName) {
if (prefix != null && !prefix.isEmpty()) {
columnName = prefix.concat(columnName);
}
return columnName;
}
@Override
public String toString() {
if (fields == null)
fields = this.getClass().getDeclaredFields();
String string = getClass().toString();
string = string.concat(" {");
for (Field field : fields) {
if (!Modifier.isStatic(field.getModifiers())) {
string = string.concat(field.getName());
string = string.concat("=");
try {
string += getFieldValueAsString(field);
} catch (IllegalArgumentException | IllegalAccessException
| NoSuchMethodException | SecurityException
| InvocationTargetException e) {
string = string
.concat("[error: the value is not accessible]");
e.printStackTrace();
}
string = string.concat(", ");
}
}
string = string.concat("}");
return string.replace(", }", "}");
}
@Override
public ObjectRelational clone() throws CloneNotSupportedException {
return (ObjectRelational) super.clone();
}
@Override
@SuppressWarnings("unchecked")
public boolean equals(Object obj) {
boolean result = true;
if (obj instanceof EncapsulatedObjectRelational<?>)
obj = ((EncapsulatedObjectRelational<ObjectRelational>) obj).get();
Class<?> clazz = this.getClass();
if (clazz.equals(obj.getClass())) {
Field[] fields = clazz.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
if (Modifier.isStatic(fields[i].getModifiers()))
continue;
try {
if (!this.getFieldValue(fields[i]).equals(
((ObjectRelational) obj).getFieldValue(fields[i]))) {
result = false;
break;
}
} catch (NullPointerException e) {
try {
if (this.getFieldValue(fields[i]) != ((ObjectRelational) obj)
.getFieldValue(fields[i])) {
result = false;
break;
}
} catch (Exception e1) {
e1.printStackTrace();
}
} catch (Exception e) {
e.printStackTrace();
}
}
} else {
result = false;
}
return result;
}
}
|
package gov.nih.nci.calab.ui.submit;
/**
* This class associates a assay result file with a characterization.
*
* @author pansu
*/
/* CVS $Id: LoadCharacterizationTableAction.java,v 1.2 2006-09-15 21:06:04 pansu Exp $ */
import gov.nih.nci.calab.ui.core.AbstractDispatchAction;
import gov.nih.nci.calab.ui.core.InitSessionSetup;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.upload.FormFile;
import org.apache.struts.validator.DynaValidatorForm;
public class LoadCharacterizationTableAction extends AbstractDispatchAction {
public ActionForward submit(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
ActionForward forward = null;
DynaValidatorForm theForm = (DynaValidatorForm) form;
String fileSource = (String) theForm.get("fileSource");
if (fileSource.equals("new")) {
FormFile file = (FormFile) theForm.get("file");
//TODO write file to filesystem and database
} else {
String fileId = (String) theForm.get("fileId");
//TODO load file from database
}
//TODO get the fileName
String fileName="this is a test";
request.setAttribute("characterizationFile", fileName);
String title = (String) theForm.get("title");
String description = (String) theForm.get("description");
String comments = (String) theForm.get("comments");
String keywords = (String) theForm.get("keywords");
String visibilities = (String) theForm.get("visibilities");
String[] keywordList = keywords.split("\r\n");
String forwardPage = (String) theForm.get("forwardPage");
// TODO add service codes
forward = mapping.findForward(forwardPage);
return forward;
}
public ActionForward setup(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
InitSessionSetup.getInstance().clearWorkflowSession(session);
InitSessionSetup.getInstance().clearSearchSession(session);
InitSessionSetup.getInstance().clearInventorySession(session);
return mapping.getInputForward();
}
public boolean loginRequired() {
return true;
}
}
|
package modules.admin.User.actions;
import org.skyve.domain.messages.Message;
import org.skyve.domain.messages.ValidationException;
import org.skyve.metadata.controller.ServerSideAction;
import org.skyve.metadata.controller.ServerSideActionResult;
import org.skyve.web.WebContext;
import modules.admin.User.UserBizlet;
import modules.admin.User.UserExtension;
import modules.admin.domain.Group;
import modules.admin.domain.User.GroupSelection;
import modules.admin.domain.User.WizardState;
public class Next implements ServerSideAction<UserExtension> {
private static final long serialVersionUID = -4667349358677521637L;
@Override
public ServerSideActionResult<UserExtension> execute(UserExtension adminUser, WebContext webContext)
throws Exception {
next(adminUser);
return new ServerSideActionResult<>(adminUser);
}
public static void next(UserExtension adminUser) throws Exception{
ValidationException e = new ValidationException();
if(WizardState.confirmContact.equals(adminUser.getWizardState())){
e.getMessages().add(new Message("You must either search for an existing contact or choose to create a new contact."));
} else if(WizardState.createContact.equals(adminUser.getWizardState())){
// validate previous data entry
UserBizlet.validateUserContact(adminUser, e);
// propose a new username
if(adminUser.getContact()!=null && adminUser.getContact().getEmail1()!=null) {
adminUser.setUserName(adminUser.getContact().getEmail1());
} else {
adminUser.setUserName(GenerateUniqueUserName.generateUniqueUserNameFromContactName(adminUser));
}
adminUser.setWizardState(WizardState.confirmUserNameAndPassword);
} else if(WizardState.confirmUserNameAndPassword.equals(adminUser.getWizardState())){
// validate previous data entry
UserBizlet.validateUserNameAndPassword(adminUser,e );
// create a new empty group for group creation, if selected
if(GroupSelection.newGroup.equals(adminUser.getGroupSelection())) {
adminUser.setNewGroup(Group.newInstance());
} else {
adminUser.setNewGroup(null);
}
adminUser.setWizardState(WizardState.confirmGroupMemberships);
}
// throw any validation exceptions collected so far
if(e.getMessages().size()>0){
throw e;
}
}
}
|
package arez.doc.examples.step4;
import arez.annotations.Action;
import arez.annotations.ArezComponent;
import arez.annotations.Feature;
import arez.annotations.Observable;
import arez.annotations.Observe;
@ArezComponent
public abstract class TrainTicket
{
public static TrainTicket create( int remainingRides )
{
return new Arez_TrainTicket( remainingRides );
}
@Observable( initializer = Feature.ENABLE )
public abstract int getRemainingRides();
public abstract void setRemainingRides( int remainingRides );
@Action
public void rideTrain()
{
setRemainingRides( getRemainingRides() - 1 );
}
@Observe
void notifyUserWhenTicketExpires()
{
if ( 0 == getRemainingRides() )
{
NotifyTool.notifyUserTicketExpired( this );
}
}
}
|
package beast.evolution.substitutionmodel;
import beast.core.Description;
import beast.core.Input;
import beast.core.Input.Validate;
import beast.core.parameter.RealParameter;
import beast.core.util.Log;
import beast.evolution.datatype.DataType;
import beast.evolution.datatype.TwoStateCovarion;
/**
* <p/>
* a the rate of the slow rate class
* 1 the rate of the fast rate class
* p0 the equilibrium frequency of zero states
* p1 1 - p0, the equilibrium frequency of one states
* f0 the equilibrium frequency of slow rate class
* f1 1 - f0, the equilibrium frequency of fast rate class
* s, s1, s2 the rate of switching
* <p/>
* then the (unnormalized) instantaneous rate matrix (unnormalized Q) should be (depending on mode)
* <p/>
*
* mode = BEAST -- using classic BEAST implementation, reversible iff hfrequencies = (0.5, 0.5)
* FLAGS: reversible = false, TSParameterisation = false
*
* [ -(a*p1)-s , a*p1 , s , 0 ]
* [ a*p0 , -(a*p0)-s , 0 , s ]
* [ s , 0 , -p1-s , p1 ]
* [ 0 , s , p0 , -p0-s ]
*
* equilibrium frequencies
* [ p0 * f0, p1, * f0, p0 * f1, p1, * f1 ]
*
* mode = REVERSIBLE -- brings in hfrequencies in rate matrix
* reversible = true, TSParameterisation = false
* [ - , a , s , 0 ]
* [ a , - , 0 , s ]
* [ s , 0 , - , 1 ]
* [ 0 , s , 1 , - ]
*
* which with frequencies becomes
*
* [ -(a*p1*f0)-s*f0 , a*p1*f0 , s*f0 , 0 ]
* [ a*p0*f0 , -(a*p0*f0)-s*f0 , 0 , s*f0 ]
* [ s*f1 , 0 , -p1*f1-s*f1 , p1*f1 ]
* [ 0 , s*f1 , p0*f1 , -p0*f1-s*f1 ]
*
* equilibrium frequencies
* [ p0 * f0, p1, * f0, p0 * f1, p1, * f1 ]
*
* mode = TUFFLEYSTEEL uses alternative parameterisation: hfrequencies is ignored, and switch parameter is set to dimension = 2
* [ -(a*p1)-s1 , a*p1 , s1 , 0 ]
* [ a*p0 , -(a*p0)-s1 , 0 , s1 ]
* [ s2 , 0 , -p1-s2 , p1 ]
* [ 0 , s2 , p0 , -p0-s2 ]
*
* equilibrium frequencies
* [ f0 * s2/(s1+s2), f1, * s2/(s1+s2), f0 * s1/(s1+s2), f1, * s1/(s1+s2) ]
*
*
* Note: to use Tuffley & Steel's methods, set a = 0.
*/
@Description("Covarion model for Binary data")
public class BinaryCovarion extends GeneralSubstitutionModel {
public Input<RealParameter> alphaInput = new Input<RealParameter>("alpha", "the rate of evolution in slow mode", Validate.REQUIRED);
public Input<RealParameter> switchRateInput = new Input<RealParameter>("switchRate", "the rate of flipping between slow and fast modes", Validate.REQUIRED);
public Input<RealParameter> frequenciesInput = new Input<RealParameter>("vfrequencies", "the frequencies of the visible states", Validate.REQUIRED);
public Input<RealParameter> hfrequenciesInput = new Input<RealParameter>("hfrequencies", "the frequencies of the hidden rates");
public enum MODE {BEAST, REVERSIBLE, TUFFLEYSTEEL};
public Input<MODE> modeInput = new Input<>("mode","one of BEAST, REVERSIBLE, TUFFLESTEEL "
+ "BEAST = implementation as in BEAST 1 "
+ "REVERSIBLE = like BEAST 1 implementation, but using frequencies to make it reversible "
+ "TUFFLEYSTEEL = Tuffley & Steel (1996) impementation (no rates for ", MODE.BEAST,MODE.values());
private RealParameter alpha;
private RealParameter switchRate;
private RealParameter frequencies;
private RealParameter hiddenFrequencies;
protected double[][] unnormalizedQ;
protected double[][] storedUnnormalizedQ;
int stateCount;
MODE mode = modeInput.get();
public BinaryCovarion() {
ratesInput.setRule(Validate.OPTIONAL);
frequenciesInput.setRule(Validate.OPTIONAL);
}
@Override
public void initAndValidate() throws Exception {
alpha = alphaInput.get();
switchRate = switchRateInput.get();
frequencies = frequenciesInput.get();
hiddenFrequencies = hfrequenciesInput.get();
if (mode.equals(MODE.BEAST) || mode.equals(MODE.REVERSIBLE)) {
if (switchRate.getDimension() != 1) {
throw new Exception("switchRate should have dimension 1");
}
} else {
if (switchRate.getDimension() != 2) {
throw new Exception("switchRate should have dimension 2");
}
}
if (alpha.getDimension() != 1) {
throw new Exception("alpha should have dimension 1");
}
if (frequencies.getDimension() != 2) {
throw new Exception("frequencies should have dimension 2");
}
if (mode.equals(MODE.BEAST) || mode.equals(MODE.REVERSIBLE)) {
if (hfrequenciesInput.get() == null) {
throw new Exception("hiddenFrequenciesshould should be specified");
}
if (hiddenFrequencies.getDimension() != 2) {
throw new Exception("hiddenFrequenciesshould have dimension 2");
}
} else {
if (hfrequenciesInput.get() != null) {
Log.warning.println("WARNING: hfrequencies is specified, but the BinaryCovarion model ignores it.");
}
}
nrOfStates = 4;
unnormalizedQ = new double[4][4];
storedUnnormalizedQ = new double[4][4];
updateMatrix = true;
eigenSystem = createEigenSystem();
rateMatrix = new double[nrOfStates][nrOfStates];
relativeRates = new double[4 * 3];
storedRelativeRates = new double[4 * 3];
}
@Override
public boolean canHandleDataType(DataType dataType) {
return dataType.getClass().equals(TwoStateCovarion.class);
}
@Override
protected void setupRelativeRates() {
}
@Override
protected void setupRateMatrix() {
setupUnnormalizedQMatrix();
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
rateMatrix[i][j] = unnormalizedQ[i][j];
}
}
// bring in frequencies
// for (int i = 0; i < m_nStates; i++) {
// for (int j = i + 1; j < m_nStates; j++) {
// m_rateMatrix[i][j] *= fFreqs[j];
// m_rateMatrix[j][i] *= fFreqs[i];
// set up diagonal
for (int i = 0; i < nrOfStates; i++) {
double fSum = 0.0;
for (int j = 0; j < nrOfStates; j++) {
if (i != j)
fSum += rateMatrix[i][j];
}
rateMatrix[i][i] = -fSum;
}
// normalise rate matrix to one expected substitution per unit time
normalize(rateMatrix, getFrequencies());
} // setupRateMatrix
@Override
public double[] getFrequencies() {
double[] fFreqs = new double[4];
if (mode.equals(MODE.BEAST) || mode.equals(MODE.REVERSIBLE)) {
fFreqs[0] = frequencies.getValue(0) * hiddenFrequencies.getValue(0);
fFreqs[1] = frequencies.getValue(1) * hiddenFrequencies.getValue(0);
fFreqs[2] = frequencies.getValue(0) * hiddenFrequencies.getValue(1);
fFreqs[3] = frequencies.getValue(1) * hiddenFrequencies.getValue(1);
} else {
double h0 = alpha.getValue(1) * (alpha.getValue(0) + alpha.getValue(1));
double h1 = alpha.getValue(0) * (alpha.getValue(0) + alpha.getValue(1));
fFreqs[0] = frequencies.getValue(0) * h0;
fFreqs[1] = frequencies.getValue(1) * h0;
fFreqs[2] = frequencies.getValue(0) * h1;
fFreqs[3] = frequencies.getValue(1) * h1;
}
return fFreqs;
}
protected void setupUnnormalizedQMatrix() {
switch (mode) {
case BEAST: {
double a = alpha.getValue(0);
double s = switchRate.getValue(0);
double f0 = hiddenFrequencies.getValue(0);
double f1 = hiddenFrequencies.getValue(1);
double p0 = frequencies.getValue(0);
double p1 = frequencies.getValue(1);
assert Math.abs(1.0 - f0 - f1) < 1e-8;
assert Math.abs(1.0 - p0 - p1) < 1e-8;
unnormalizedQ[0][1] = a * p1;
unnormalizedQ[0][2] = s;
unnormalizedQ[0][3] = 0.0;
unnormalizedQ[1][0] = a * p0;
unnormalizedQ[1][2] = 0.0;
unnormalizedQ[1][3] = s;
unnormalizedQ[2][0] = s;
unnormalizedQ[2][1] = 0.0;
unnormalizedQ[2][3] = p1;
unnormalizedQ[3][0] = 0.0;
unnormalizedQ[3][1] = s;
unnormalizedQ[3][2] = p0;
}
break;
case REVERSIBLE: {
double a = alpha.getValue(0);
double s = switchRate.getValue(0);
double f0 = hiddenFrequencies.getValue(0);
double f1 = hiddenFrequencies.getValue(1);
double p0 = frequencies.getValue(0);
double p1 = frequencies.getValue(1);
assert Math.abs(1.0 - f0 - f1) < 1e-8;
assert Math.abs(1.0 - p0 - p1) < 1e-8;
unnormalizedQ[0][1] = a * p1 * f0;
unnormalizedQ[0][2] = s * f0;
unnormalizedQ[0][3] = 0.0;
unnormalizedQ[1][0] = a * p0 * f0;
unnormalizedQ[1][2] = 0.0;
unnormalizedQ[1][3] = s * f0;
unnormalizedQ[2][0] = s * f1;
unnormalizedQ[2][1] = 0.0;
unnormalizedQ[2][3] = p1 * f1;
unnormalizedQ[3][0] = 0.0;
unnormalizedQ[3][1] = s * f1;
unnormalizedQ[3][2] = p0 * f1;
}
break;
case TUFFLEYSTEEL: {
double a = alpha.getValue(0);
double s1 = switchRate.getValue(0);
double s2 = switchRate.getValue(0);
double p0 = frequencies.getValue(0);
double p1 = frequencies.getValue(1);
assert Math.abs(1.0 - p0 - p1) < 1e-8;
unnormalizedQ[0][1] = a * p1;
unnormalizedQ[0][2] = s1;
unnormalizedQ[0][3] = 0.0;
unnormalizedQ[1][0] = a * p0;
unnormalizedQ[1][2] = 0.0;
unnormalizedQ[1][3] = s1;
unnormalizedQ[2][0] = s2;
unnormalizedQ[2][1] = 0.0;
unnormalizedQ[2][3] = p1;
unnormalizedQ[3][0] = 0.0;
unnormalizedQ[3][1] = s2;
unnormalizedQ[3][2] = p0;
}
break;
}
}
/**
* Normalize rate matrix to one expected substitution per unit time
*
* @param matrix the matrix to normalize to one expected substitution
* @param pi the equilibrium distribution of states
*/
private void normalize(double[][] matrix, double[] pi) {
double subst = 0.0;
int dimension = pi.length;
for (int i = 0; i < dimension; i++) {
subst += -matrix[i][i] * pi[i];
}
// normalize, including switches
for (int i = 0; i < dimension; i++) {
for (int j = 0; j < dimension; j++) {
matrix[i][j] = matrix[i][j] / subst;
}
}
double switchingProportion = 0.0;
switchingProportion += matrix[0][2] * pi[2];
switchingProportion += matrix[2][0] * pi[0];
switchingProportion += matrix[1][3] * pi[3];
switchingProportion += matrix[3][1] * pi[1];
//System.out.println("switchingProportion=" + switchingProportion);
// normalize, removing switches
for (int i = 0; i < dimension; i++) {
for (int j = 0; j < dimension; j++) {
matrix[i][j] = matrix[i][j] / (1.0 - switchingProportion);
}
}
}
}
|
package simulation;
import network.Network;
import network.Node;
import network.exceptions.NodeExistsException;
import network.exceptions.NodeNotFoundException;
import simulation.implementations.policies.shortestpath.ShortestPathAttribute;
import simulation.implementations.policies.shortestpath.ShortestPathAttributeFactory;
import simulation.implementations.policies.shortestpath.ShortestPathLabel;
import java.util.HashMap;
import java.util.Map;
public class NetworkCreator {
private NetworkCreator() {} // should not be instantiated
private static RouteTable createRouteTableForNode(Network network, int nodeId) {
return new RouteTable(network.getNode(nodeId).getOutNeighbours(), new ShortestPathAttributeFactory());
}
private static void setRoute(RouteTable routeTable, Network network, int destId, int neighbourId,
int length, int[] path) {
routeTable.setAttribute(network.getNode(destId), network.getNode(neighbourId),
new ShortestPathAttribute(length));
// create array of nodes for the path
Node[] pathNodes = new Node[path.length];
for (int i = 0; i < pathNodes.length; i++) {
pathNodes[i] = new Node(network, path[i]);
}
routeTable.setPath(network.getNode(destId), network.getNode(neighbourId), new PathAttribute(pathNodes));
}
static Network createNetwork0() throws NodeExistsException, NodeNotFoundException {
Network network = new Network();
network.addNode(0);
network.addNode(1);
network.link(0, 1, new ShortestPathLabel(1));
return network;
}
static Map<Node, RouteTable> expectedRouteTableForNetwork0(Network network) {
Map<Node, RouteTable> expectedTables = new HashMap<>();
RouteTable routeTable;
// node 0 route table
routeTable = createRouteTableForNode(network, 0);
setRoute(routeTable, network, 1, 1, 1, new int[]{1});
expectedTables.put(network.getNode(0), routeTable);
// node 1 route table
routeTable = createRouteTableForNode(network, 1);
expectedTables.put(network.getNode(1), routeTable);
return expectedTables;
}
static Network createNetwork1() throws NodeExistsException, NodeNotFoundException {
Network network = new Network();
network.addNode(0);
network.addNode(1);
network.addNode(2);
network.link(0, 1, new ShortestPathLabel(1));
network.link(1, 2, new ShortestPathLabel(1));
network.link(0, 2, new ShortestPathLabel(0));
return network;
}
static Network createNetwork2() throws NodeExistsException, NodeNotFoundException {
Network network = new Network();
network.addNode(0);
network.addNode(1);
network.addNode(2);
network.addNode(3);
network.addNode(4);
network.addNode(5);
network.link(0, 1, new ShortestPathLabel(1));
network.link(0, 2, new ShortestPathLabel(1));
network.link(1, 2, new ShortestPathLabel(1));
network.link(2, 3, new ShortestPathLabel(0));
network.link(2, 4, new ShortestPathLabel(-1));
network.link(3, 5, new ShortestPathLabel(0));
network.link(4, 5, new ShortestPathLabel(3));
return network;
}
static Network createNetwork3() throws NodeExistsException, NodeNotFoundException {
Network network = new Network();
network.addNode(0);
network.addNode(1);
network.addNode(2);
network.addNode(3);
network.link(1, 0, new ShortestPathLabel(0));
network.link(2, 0, new ShortestPathLabel(0));
network.link(3, 0, new ShortestPathLabel(0));
network.link(1, 2, new ShortestPathLabel(1));
network.link(1, 2, new ShortestPathLabel(-1));
network.link(2, 3, new ShortestPathLabel(1));
network.link(3, 1, new ShortestPathLabel(-2));
return network;
}
}
|
package bencoding.alarmmanager;
import org.appcelerator.titanium.TiApplication;
import android.R;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.net.Uri;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
public class AlarmNotificationListener extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
NotificationManager notificationManager = null;
utils.debugLog("In Alarm Notification Listener");
Bundle bundle = intent.getExtras();
if(bundle.get("notification_request_code") == null){
utils.infoLog("notification_request_code is null assume cancelled");
return;
}
int requestCode = bundle.getInt("notification_request_code", AlarmmanagerModule.DEFAULT_REQUEST_CODE);
utils.debugLog("requestCode is " + requestCode);
String contentTitle = bundle.getString("notification_title");
utils.debugLog("contentTitle is " + contentTitle);
String contentText = bundle.getString("notification_msg");
utils.debugLog("contentText is " + contentText);
String className = bundle.getString("notification_root_classname");
utils.debugLog("className is " + className);
boolean hasIcon = bundle.getBoolean("notification_has_icon", true);
int icon = R.drawable.stat_notify_more;
if(hasIcon){
icon = bundle.getInt("notification_icon",R.drawable.stat_notify_more);
utils.debugLog("User provided an icon of " + icon);
}else{
utils.debugLog("No icon provided, default will be used");
}
String soundPath = bundle.getString("notification_sound");
//Add default notification flags
boolean playSound = bundle.getBoolean("notification_play_sound",false);
utils.debugLog("On notification play sound? " + new Boolean(playSound).toString());
boolean doVibrate = bundle.getBoolean("notification_vibrate",false);
utils.debugLog("On notification vibrate? " + new Boolean(doVibrate).toString());
boolean showLights = bundle.getBoolean("notification_show_lights",false);
utils.debugLog("On notification show lights? " + new Boolean(showLights).toString());
notificationManager = (NotificationManager) TiApplication.getInstance().getSystemService(TiApplication.NOTIFICATION_SERVICE);
utils.debugLog("NotificationManager created");
Intent notifyIntent =createIntent(className);
Notification notification = new Notification(icon, contentTitle, System.currentTimeMillis());
PendingIntent sender = PendingIntent.getActivity( TiApplication.getInstance().getApplicationContext(),
requestCode, notifyIntent,
PendingIntent.FLAG_UPDATE_CURRENT | Notification.FLAG_AUTO_CANCEL);
utils.debugLog("setting notification flags");
notification = createNotifyFlags(notification,playSound,soundPath,doVibrate,showLights);
utils.debugLog("setLatestEventInfo");
notification.setLatestEventInfo(TiApplication.getInstance().getApplicationContext(), contentTitle,contentText, sender);
utils.debugLog("Notifying using requestCode =" + requestCode);
notificationManager.notify(requestCode, notification);
utils.infoLog("You should now see a notification");
}
private Notification createNotifyFlags(Notification notification, boolean playSound, String soundPath, boolean doVibrate, boolean showLights){
//Set the notifications flags
if(playSound){
if(!utils.isEmptyString(soundPath)){
notification.sound = Uri.parse(soundPath);
}else{
notification.defaults |= Notification.DEFAULT_SOUND;
}
}
if(doVibrate){
notification.defaults |=Notification.DEFAULT_VIBRATE;
}
if(showLights){
notification.defaults |=Notification.DEFAULT_LIGHTS;
}
//Set alarm flags
notification.flags |= Notification.FLAG_ONLY_ALERT_ONCE | Notification.FLAG_AUTO_CANCEL;
return notification;
}
private Intent createIntent(String className){
Intent intent = null;
try {
if(utils.isEmptyString(className)){
utils.debugLog("Using application context");
intent = new Intent(TiApplication.getInstance().getApplicationContext(),TiApplication.getInstance().getRootOrCurrentActivity().getClass());
}else{
utils.debugLog("Trying to get a class for name '" + className + "'");
@SuppressWarnings("rawtypes")Class intentClass = Class.forName(className);
intent = new Intent(TiApplication.getInstance().getApplicationContext(), intentClass);
}
} catch (ClassNotFoundException e) {
utils.errorLog(e);
}
return intent;
}
}
|
// modification, are permitted provided that the following conditions are met:
// and/or other materials provided with the distribution.
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// The views and conclusions contained in the software and documentation are those
// of the authors and should not be interpreted as representing official policies,
// either expressed or implied, of the FreeBSD Project.
package br.com.carlosrafaelgn.fplay;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.ColorStateList;
import android.database.DataSetObserver;
import android.text.InputType;
import android.text.method.LinkMovementMethod;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.RadioButton;
import android.widget.RelativeLayout;
import android.widget.Spinner;
import android.widget.SpinnerAdapter;
import android.widget.TextView;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import br.com.carlosrafaelgn.fplay.activity.MainHandler;
import br.com.carlosrafaelgn.fplay.list.BaseList;
import br.com.carlosrafaelgn.fplay.list.FileSt;
import br.com.carlosrafaelgn.fplay.list.IcecastRadioStationList;
import br.com.carlosrafaelgn.fplay.list.RadioStation;
import br.com.carlosrafaelgn.fplay.list.RadioStationGenre;
import br.com.carlosrafaelgn.fplay.list.RadioStationList;
import br.com.carlosrafaelgn.fplay.list.ShoutcastRadioStationList;
import br.com.carlosrafaelgn.fplay.playback.Player;
import br.com.carlosrafaelgn.fplay.ui.BackgroundActivityMonitor;
import br.com.carlosrafaelgn.fplay.ui.BgButton;
import br.com.carlosrafaelgn.fplay.ui.BgColorStateList;
import br.com.carlosrafaelgn.fplay.ui.BgListView;
import br.com.carlosrafaelgn.fplay.ui.FastAnimator;
import br.com.carlosrafaelgn.fplay.ui.RadioStationView;
import br.com.carlosrafaelgn.fplay.ui.UI;
import br.com.carlosrafaelgn.fplay.ui.drawable.ColorDrawable;
import br.com.carlosrafaelgn.fplay.ui.drawable.TextIconDrawable;
import br.com.carlosrafaelgn.fplay.util.SafeURLSpan;
import br.com.carlosrafaelgn.fplay.util.TypedRawArrayList;
public final class ActivityBrowserRadio extends ActivityBrowserView implements View.OnClickListener, DialogInterface.OnClickListener, DialogInterface.OnCancelListener, DialogInterface.OnDismissListener, BgListView.OnBgListViewKeyDownObserver, RadioStationList.OnBaseListSelectionChangedListener<RadioStation>, RadioStationList.RadioStationAddedObserver, FastAnimator.Observer, AdapterView.OnItemSelectedListener {
private static final class RadioStationAdapter implements SpinnerAdapter {
private Context context;
private ColorStateList defaultTextColors;
public RadioStationGenre[] genres;
public RadioStationAdapter(Context context, ColorStateList defaultTextColors, RadioStationGenre[] genres) {
this.context = context;
this.defaultTextColors = defaultTextColors;
this.genres = genres;
}
public void release() {
context = null;
defaultTextColors = null;
genres = null;
}
@Override
public int getCount() {
return (genres == null ? 0 : genres.length);
}
@Override
public Object getItem(int position) {
return (genres == null ? null : genres[position]);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public int getItemViewType(int position) {
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
TextView txt = (TextView)convertView;
if (txt == null) {
txt = new TextView(context);
txt.setPadding(UI.dialogMargin, UI.dialogMargin, UI.dialogMargin, UI.dialogMargin);
txt.setTypeface(UI.defaultTypeface);
txt.setTextSize(TypedValue.COMPLEX_UNIT_PX, UI.dialogTextSize);
txt.setTextColor(defaultTextColors);
}
txt.setText(genres == null ? "" : genres[position].name);
return txt;
}
@Override
public int getViewTypeCount() {
return 1;
}
@Override
public boolean hasStableIds() {
return true;
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public void registerDataSetObserver(DataSetObserver observer) {
}
@Override
public void unregisterDataSetObserver(DataSetObserver observer) {
}
@Override
public View getDropDownView(int position, View convertView, ViewGroup parent) {
TextView txt = (TextView)convertView;
if (txt == null) {
txt = new TextView(context);
txt.setPadding(UI.dialogMargin, UI.dialogDropDownVerticalMargin, UI.dialogMargin, UI.dialogDropDownVerticalMargin);
txt.setTypeface(UI.defaultTypeface);
txt.setTextSize(TypedValue.COMPLEX_UNIT_PX, UI.dialogTextSize);
txt.setTextColor(defaultTextColors);
}
txt.setText(genres == null ? "" : genres[position].name);
return txt;
}
}
private final boolean useShoutcast;
private TextView sep2;
private BgListView list;
private RadioStationGenre[] genres;
private RadioStationAdapter adapter, adapterSecondary;
private RadioStationList radioStationList;
private RelativeLayout panelSecondary, panelLoading;
private RadioButton chkGenre, chkTerm;
private Spinner btnGenre, btnGenreSecondary;
private EditText txtTerm;
private BgButton btnGoBack, btnFavorite, btnSearch, btnGoBackToPlayer, btnAdd, btnPlay;
private boolean loading, isAtFavorites, isCreatingLayout, isHidingLoadingPanel, ignoreFirstNotification;
private FastAnimator animator, loadingPanelAnimatorHide, loadingPanelAnimatorShow;
private CharSequence msgNoFavorites, msgNoStations, msgLoading;
public ActivityBrowserRadio(boolean useShoutcast) {
this.useShoutcast = useShoutcast;
}
@Override
public CharSequence getTitle() {
return getText(R.string.add_radio);
}
private void updateButtons() {
UI.animationReset();
if (!isAtFavorites != (btnFavorite.getVisibility() == View.VISIBLE)) {
if (isAtFavorites) {
UI.animationAddViewToHide(btnFavorite);
UI.animationAddViewToHide(btnSearch);
btnGoBack.setNextFocusRightId(R.id.list);
UI.setNextFocusForwardId(btnGoBack, R.id.list);
} else {
UI.animationAddViewToShow(btnFavorite);
UI.animationAddViewToShow(btnSearch);
btnGoBack.setNextFocusRightId(R.id.btnFavorite);
UI.setNextFocusForwardId(btnGoBack, R.id.btnFavorite);
}
}
final int s = radioStationList.getSelection();
if ((s >= 0) != (btnAdd.getVisibility() == View.VISIBLE)) {
if (s >= 0) {
UI.animationAddViewToShow(btnAdd);
UI.animationAddViewToShow(sep2);
UI.animationAddViewToShow(btnPlay);
btnGoBack.setNextFocusLeftId(R.id.btnPlay);
btnGoBackToPlayer.setNextFocusRightId(R.id.btnAdd);
UI.setNextFocusForwardId(btnGoBackToPlayer, R.id.btnAdd);
} else {
UI.animationAddViewToHide(btnAdd);
UI.animationAddViewToHide(sep2);
UI.animationAddViewToHide(btnPlay);
btnGoBack.setNextFocusLeftId(R.id.btnGoBackToPlayer);
btnGoBackToPlayer.setNextFocusRightId(R.id.btnGoBack);
UI.setNextFocusForwardId(btnGoBackToPlayer, R.id.btnGoBack);
}
}
UI.animationCommit(isCreatingLayout, null);
}
private void addPlaySelectedItem(final boolean play) {
if (radioStationList.getSelection() < 0)
return;
final RadioStation radioStation = radioStationList.getItemT(radioStationList.getSelection());
if (radioStation.m3uUri == null || radioStation.m3uUri.length() < 0) {
UI.toast(getApplication(), R.string.error_file_not_found);
return;
}
Player.songs.addingStarted();
BackgroundActivityMonitor.start(getHostActivity());
try {
(new Thread("Checked Radio Station Adder Thread") {
@Override
public void run() {
InputStream is = null;
InputStreamReader isr = null;
BufferedReader br = null;
HttpURLConnection urlConnection = null;
try {
if (Player.state >= Player.STATE_TERMINATING)
return;
urlConnection = (HttpURLConnection)(new URL(radioStation.m3uUri)).openConnection();
final int s = urlConnection.getResponseCode();
if (s == 200) {
is = urlConnection.getInputStream();
isr = new InputStreamReader(is, "UTF-8");
br = new BufferedReader(isr, 1024);
TypedRawArrayList<String> lines = new TypedRawArrayList<>(String.class, 8);
String line;
while ((line = br.readLine()) != null) {
line = line.trim();
if (line.length() > 0 && line.charAt(0) != '
(line.regionMatches(true, 0, "http://", 0, 7) ||
line.regionMatches(true, 0, "https://", 0, 8)))
lines.add(line);
}
if (Player.state >= Player.STATE_TERMINATING)
return;
if (lines.size() == 0) {
MainHandler.toast(R.string.error_gen);
} else {
//instead of just using the first available address, let's use
//one from the middle ;)
Player.songs.addFiles(new FileSt[] { new FileSt(lines.get(lines.size() >> 1), radioStation.title, null, 0) }, null, 1, play, false, true, false);
}
} else {
MainHandler.toast((s >= 400 && s < 500) ? R.string.error_file_not_found : R.string.error_gen);
}
} catch (Throwable ex) {
MainHandler.toast(ex);
} finally {
Player.songs.addingEnded();
try {
if (urlConnection != null)
urlConnection.disconnect();
} catch (Throwable ex) {
ex.printStackTrace();
}
try {
if (is != null)
is.close();
} catch (Throwable ex) {
ex.printStackTrace();
}
try {
if (isr != null)
isr.close();
} catch (Throwable ex) {
ex.printStackTrace();
}
try {
if (br != null)
br.close();
} catch (Throwable ex) {
ex.printStackTrace();
}
System.gc();
}
}
}).start();
} catch (Throwable ex) {
Player.songs.addingEnded();
UI.toast(getApplication(), ex.getMessage());
}
}
@Override
public void loadingProcessChanged(boolean started) {
if (UI.browserActivity != this)
return;
loading = started;
if (panelLoading != null) {
if (loadingPanelAnimatorHide != null && !isCreatingLayout) {
panelLoading.setVisibility(View.VISIBLE);
loadingPanelAnimatorHide.end();
loadingPanelAnimatorShow.end();
(started ? loadingPanelAnimatorShow : loadingPanelAnimatorHide).start();
isHidingLoadingPanel = !started;
} else {
panelLoading.setVisibility(started ? View.VISIBLE : View.GONE);
}
}
if (list != null) {
list.setCustomEmptyText(started ? msgLoading : (isAtFavorites ? msgNoFavorites : msgNoStations));
if (animator != null) {
if (started) {
list.setVisibility(View.INVISIBLE);
} else if (list.getVisibility() != View.VISIBLE) {
animator.end();
list.setVisibility(View.VISIBLE);
animator.start();
}
}
}
//if (!started)
// updateButtons();
}
@Override
public View createView() {
return new RadioStationView(Player.getService());
}
@Override
public void processItemCheckboxClick(int position) {
final RadioStation station = radioStationList.getItemT(position);
if (station.isFavorite)
radioStationList.addFavoriteStation(station);
else
radioStationList.removeFavoriteStation(station);
}
@Override
public void processItemClick(int position) {
//UI.doubleClickMode is ignored for radio stations!
if (radioStationList.getSelection() == position)
addPlaySelectedItem(true);
else
radioStationList.setSelection(position, true);
}
@Override
public void processItemLongClick(int position) {
if (radioStationList.getSelection() != position)
radioStationList.setSelection(position, true);
}
private int validateGenreIndex(int index) {
if (genres == null)
return 0;
int parent = index & 0xffff;
if (parent >= genres.length)
parent = genres.length - 1;
if (index <= 0xffff)
return parent;
final RadioStationGenre genre = genres[parent];
if (genre.children == null || genre.children.length == 0)
return parent;
int child = (index >>> 16);
if (child >= genre.children.length)
child = genre.children.length - 1;
return parent | (child << 16);
}
private int getPrimaryGenreIndex() {
if (genres == null)
return -1;
final int index = (useShoutcast ? Player.radioLastGenreShoutcast : Player.radioLastGenre);
final int parent = index & 0xffff;
return ((parent >= genres.length) ? (genres.length - 1) : parent);
}
private int getSecondaryGenreIndex() {
if (genres == null)
return -1;
int index = (useShoutcast ? Player.radioLastGenreShoutcast : Player.radioLastGenre);
final int parent = index & 0xffff;
final RadioStationGenre genre = genres[(parent >= genres.length) ? (genres.length - 1) : parent];
if (index <= 0xffff || genre.children == null || genre.children.length == 0)
return 0;
index = (index >>> 16);
return ((index >= genre.children.length) ? (genre.children.length - 1) : index);
}
private RadioStationGenre getGenre() {
if (genres == null)
return null;
int index = (useShoutcast ? Player.radioLastGenreShoutcast : Player.radioLastGenre);
final int parent = index & 0xffff;
final RadioStationGenre genre = genres[(parent >= genres.length) ? (genres.length - 1) : parent];
if (index <= 0xffff || genre.children == null || genre.children.length == 0)
return genre;
index = (index >>> 16);
return genre.children[(index >= genre.children.length) ? (genre.children.length - 1) : index];
}
private void doSearch() {
final int selection = radioStationList.getSelection();
if (Player.radioSearchTerm != null) {
Player.radioSearchTerm = Player.radioSearchTerm.trim();
if (Player.radioSearchTerm.length() < 1)
Player.radioSearchTerm = null;
}
if (Player.lastRadioSearchWasByGenre || Player.radioSearchTerm == null)
radioStationList.fetchStations(getApplication(), getGenre(), null);
else
radioStationList.fetchStations(getApplication(), null, Player.radioSearchTerm);
//do not call updateButtons() if onSelectionChanged() got called before!
if (selection < 0)
updateButtons();
}
@Override
public boolean onBgListViewKeyDown(BgListView list, int keyCode) {
int p;
switch (keyCode) {
case UI.KEY_LEFT:
if (btnSearch != null && btnGoBack != null)
((btnSearch.getVisibility() == View.VISIBLE) ? btnSearch : btnGoBack).requestFocus();
return true;
case UI.KEY_RIGHT:
if (btnGoBackToPlayer != null)
btnGoBackToPlayer.requestFocus();
return true;
case UI.KEY_ENTER:
if (radioStationList != null) {
p = radioStationList.getSelection();
if (p >= 0)
processItemClick(p);
}
return true;
case UI.KEY_EXTRA:
if (radioStationList != null) {
p = radioStationList.getSelection();
if (p >= 0) {
final RadioStation station = radioStationList.getItemT(p);
station.isFavorite = !station.isFavorite;
processItemCheckboxClick(p);
if (list != null) {
final RadioStationView view = (RadioStationView)list.getViewForPosition(p);
if (view != null) {
view.refreshItemFavoriteButton();
break;
}
}
radioStationList.notifyCheckedChanged();
}
}
return true;
}
return false;
}
@Override
public void onSelectionChanged(BaseList<RadioStation> list) {
updateButtons();
}
@Override
public void onClick(View view) {
if (view == btnGoBack) {
if (isAtFavorites) {
isAtFavorites = false;
doSearch();
} else {
finish(0, view, true);
}
} else if (view == btnFavorite) {
final int selection = radioStationList.getSelection();
isAtFavorites = true;
radioStationList.cancel();
radioStationList.fetchFavorites(getApplication());
//do not call updateButtons() if onSelectionChanged() got called before!
if (selection < 0)
updateButtons();
} else if (view == btnSearch) {
final Context ctx = getHostActivity();
final LinearLayout l = (LinearLayout)UI.createDialogView(ctx, null);
LinearLayout.LayoutParams p = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
chkGenre = new RadioButton(ctx);
chkGenre.setText(R.string.genre);
chkGenre.setChecked(Player.lastRadioSearchWasByGenre);
chkGenre.setOnClickListener(this);
chkGenre.setTextSize(TypedValue.COMPLEX_UNIT_PX, UI.dialogTextSize);
chkGenre.setLayoutParams(p);
p = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
p.topMargin = UI.dialogMargin;
btnGenre = new Spinner(ctx);
btnGenre.setContentDescription(ctx.getText(R.string.genre));
btnGenre.setLayoutParams(p);
btnGenre.setVisibility(Player.lastRadioSearchWasByGenre ? View.VISIBLE : View.GONE);
if (useShoutcast) {
p = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
p.topMargin = UI.dialogMargin;
btnGenreSecondary = new Spinner(ctx);
btnGenreSecondary.setContentDescription(ctx.getText(R.string.genre));
btnGenreSecondary.setLayoutParams(p);
btnGenreSecondary.setVisibility(Player.lastRadioSearchWasByGenre ? View.VISIBLE : View.GONE);
} else {
btnGenreSecondary = null;
}
p = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
p.topMargin = UI.dialogMargin << 1;
chkTerm = new RadioButton(ctx);
chkTerm.setText(R.string.search_term);
chkTerm.setChecked(!Player.lastRadioSearchWasByGenre);
chkTerm.setOnClickListener(this);
chkTerm.setTextSize(TypedValue.COMPLEX_UNIT_PX, UI.dialogTextSize);
chkTerm.setLayoutParams(p);
p = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
p.topMargin = UI.dialogMargin;
txtTerm = new EditText(ctx);
txtTerm.setContentDescription(ctx.getText(R.string.search_term));
txtTerm.setText(Player.radioSearchTerm == null ? "" : Player.radioSearchTerm);
txtTerm.setOnClickListener(this);
txtTerm.setTextSize(TypedValue.COMPLEX_UNIT_PX, UI.dialogTextSize);
txtTerm.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_CAP_SENTENCES);
txtTerm.setSingleLine();
txtTerm.setLayoutParams(p);
txtTerm.setVisibility(!Player.lastRadioSearchWasByGenre ? View.VISIBLE : View.GONE);
p = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
p.topMargin = UI.dialogMargin;
p.bottomMargin = UI.dialogMargin;
final TextView lbl = new TextView(ctx);
lbl.setAutoLinkMask(0);
lbl.setLinksClickable(true);
lbl.setLinkTextColor(new BgColorStateList(UI.isAndroidThemeLight() ? 0xff0099cc : 0xff33b5e5));
lbl.setTextSize(TypedValue.COMPLEX_UNIT_PX, UI._14sp);
lbl.setGravity(Gravity.CENTER_HORIZONTAL);
lbl.setText(SafeURLSpan.parseSafeHtml(getText(R.string.by_dir_xiph_org)));
lbl.setMovementMethod(LinkMovementMethod.getInstance());
lbl.setLayoutParams(p);
l.addView(chkGenre);
l.addView(btnGenre);
if (btnGenreSecondary != null)
l.addView(btnGenreSecondary);
l.addView(chkTerm);
l.addView(txtTerm);
l.addView(lbl);
final ColorStateList defaultTextColors = txtTerm.getTextColors();
final int primaryGenreIndex = getPrimaryGenreIndex();
adapter = new RadioStationAdapter(getApplication(), defaultTextColors, genres);
btnGenre.setAdapter(adapter);
btnGenre.setSelection(primaryGenreIndex);
if (btnGenreSecondary != null) {
ignoreFirstNotification = true;
btnGenre.setOnItemSelectedListener(this);
adapterSecondary = new RadioStationAdapter(getApplication(), defaultTextColors, genres[primaryGenreIndex].children);
btnGenreSecondary.setAdapter(adapterSecondary);
btnGenreSecondary.setSelection(getSecondaryGenreIndex());
}
UI.disableEdgeEffect(ctx);
AlertDialog dialog = (new AlertDialog.Builder(ctx))
.setTitle(getText(R.string.search))
.setView(l)
.setPositiveButton(R.string.search, this)
.setNegativeButton(R.string.cancel, this)
.create();
dialog.setOnCancelListener(this);
dialog.setOnDismissListener(this);
UI.prepareDialogAndShow(dialog);
} else if (view == btnGoBackToPlayer) {
finish(-1, view, false);
} else if (view == btnAdd) {
addPlaySelectedItem(false);
} else if (view == btnPlay) {
addPlaySelectedItem(true);
} else if (view == chkGenre || view == btnGenre) {
chkGenre.setChecked(true);
chkTerm.setChecked(false);
if (txtTerm != null)
txtTerm.setVisibility(View.GONE);
if (btnGenre != null)
btnGenre.setVisibility(View.VISIBLE);
if (btnGenreSecondary != null)
btnGenreSecondary.setVisibility(View.VISIBLE);
} else if (view == chkTerm || view == txtTerm) {
chkGenre.setChecked(false);
chkTerm.setChecked(true);
if (btnGenre != null)
btnGenre.setVisibility(View.GONE);
if (btnGenreSecondary != null)
btnGenreSecondary.setVisibility(View.GONE);
if (txtTerm != null) {
txtTerm.setVisibility(View.VISIBLE);
txtTerm.requestFocus();
}
} else if (view == list) {
if (!isAtFavorites && !loading && (radioStationList == null || radioStationList.getCount() == 0))
onClick(btnFavorite);
}
}
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == AlertDialog.BUTTON_POSITIVE) {
if (chkGenre != null)
Player.lastRadioSearchWasByGenre = chkGenre.isChecked();
if (btnGenre != null) {
if (useShoutcast) {
Player.radioLastGenreShoutcast = btnGenre.getSelectedItemPosition();
if (btnGenreSecondary != null)
Player.radioLastGenreShoutcast |= (btnGenreSecondary.getSelectedItemPosition() << 16);
} else {
Player.radioLastGenre = btnGenre.getSelectedItemPosition();
}
}
if (txtTerm != null)
Player.radioSearchTerm = txtTerm.getText().toString();
doSearch();
}
chkGenre = null;
btnGenre = null;
btnGenreSecondary = null;
if (adapter != null) {
adapter.release();
adapter = null;
}
if (adapterSecondary != null) {
adapterSecondary.release();
adapterSecondary = null;
}
chkTerm = null;
txtTerm = null;
}
@Override
public void onCancel(DialogInterface dialog) {
onClick(dialog, AlertDialog.BUTTON_NEGATIVE);
}
@Override
public void onDismiss(DialogInterface dialog) {
UI.reenableEdgeEffect(getHostActivity());
}
@Override
protected boolean onBackPressed() {
if (UI.backKeyAlwaysReturnsToPlayerWhenBrowsing) {
finish(-1, null, false);
return true;
}
if (!isAtFavorites)
return false;
onClick(btnGoBack);
return true;
}
@Override
protected void onCreate() {
UI.browserActivity = this;
radioStationList = (useShoutcast ? new ShoutcastRadioStationList("-", getText(R.string.no_description).toString()) : new IcecastRadioStationList(getText(R.string.tags).toString(), "-", getText(R.string.no_description).toString(), getText(R.string.no_tags).toString()));
radioStationList.setOnBaseListSelectionChangedListener(this);
}
@SuppressWarnings("deprecation")
@Override
protected void onCreateLayout(boolean firstCreation) {
setContentView(R.layout.activity_browser_radio);
UI.smallTextAndColor((TextView)findViewById(R.id.lblLoadingSmall));
msgLoading = getText(R.string.loading);
msgNoFavorites = getText(R.string.no_favorites);
msgNoStations = getText(R.string.no_stations);
list = (BgListView)findViewById(R.id.list);
list.setOnKeyDownObserver(this);
list.setScrollBarType((UI.browserScrollBarType == BgListView.SCROLLBAR_INDEXED) ? BgListView.SCROLLBAR_LARGE : UI.browserScrollBarType);
list.setCustomEmptyText(msgLoading);
list.setEmptyListOnClickListener(this);
panelLoading = (RelativeLayout)findViewById(R.id.panelLoading);
if (UI.animationEnabled) {
list.setVisibility(View.GONE);
loadingPanelAnimatorHide = new FastAnimator(panelLoading, true, this, 0);
loadingPanelAnimatorShow = new FastAnimator(panelLoading, false, null, 0);
radioStationList.radioStationAddedObserver = this;
((View)list.getParent()).setBackgroundDrawable(new ColorDrawable(UI.color_list_bg));
animator = new FastAnimator(list, false, null, 0);
final TextView lblLoading = (TextView)findViewById(R.id.lblLoading);
lblLoading.setTextColor(UI.color_text_disabled);
UI.largeText(lblLoading);
lblLoading.setVisibility(View.VISIBLE);
} else if (firstCreation) {
list.setCustomEmptyText(msgLoading);
}
radioStationList.setObserver(list);
btnGoBack = (BgButton)findViewById(R.id.btnGoBack);
btnGoBack.setOnClickListener(this);
btnGoBack.setIcon(UI.ICON_GOBACK);
btnFavorite = (BgButton)findViewById(R.id.btnFavorite);
btnFavorite.setOnClickListener(this);
btnFavorite.setIcon(UI.ICON_FAVORITE_ON);
btnSearch = (BgButton)findViewById(R.id.btnSearch);
btnSearch.setOnClickListener(this);
btnSearch.setIcon(UI.ICON_SEARCH);
panelSecondary = (RelativeLayout)findViewById(R.id.panelSecondary);
btnGoBackToPlayer = (BgButton)findViewById(R.id.btnGoBackToPlayer);
btnGoBackToPlayer.setTextColor(UI.colorState_text_reactive);
btnGoBackToPlayer.setOnClickListener(this);
btnGoBackToPlayer.setCompoundDrawables(new TextIconDrawable(UI.ICON_LIST, UI.color_text, UI.defaultControlContentsSize), null, null, null);
btnGoBackToPlayer.setDefaultHeight();
btnAdd = (BgButton)findViewById(R.id.btnAdd);
btnAdd.setTextColor(UI.colorState_text_reactive);
btnAdd.setOnClickListener(this);
btnAdd.setIcon(UI.ICON_ADD);
sep2 = (TextView)findViewById(R.id.sep2);
RelativeLayout.LayoutParams rp = new RelativeLayout.LayoutParams(UI.strokeSize, UI.defaultControlContentsSize);
rp.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE);
rp.addRule(RelativeLayout.LEFT_OF, R.id.btnPlay);
rp.leftMargin = UI.controlMargin;
rp.rightMargin = UI.controlMargin;
sep2.setLayoutParams(rp);
sep2.setBackgroundDrawable(new ColorDrawable(UI.color_highlight));
btnPlay = (BgButton)findViewById(R.id.btnPlay);
btnPlay.setTextColor(UI.colorState_text_reactive);
btnPlay.setOnClickListener(this);
btnPlay.setIcon(UI.ICON_PLAY);
UI.prepareControlContainer(findViewById(R.id.panelControls), false, true);
UI.prepareControlContainer(panelSecondary, true, false);
if (UI.isLargeScreen)
UI.prepareViewPaddingForLargeScreen(list, 0, 0);
}
@Override
protected void onPostCreateLayout(boolean firstCreation) {
genres = RadioStationGenre.loadGenres(getHostActivity(), useShoutcast);
if (genres == null)
genres = new RadioStationGenre[] { new RadioStationGenre() };
if (useShoutcast)
Player.radioLastGenreShoutcast = validateGenreIndex(Player.radioLastGenreShoutcast);
else
Player.radioLastGenre = validateGenreIndex(Player.radioLastGenre);
doSearch();
isCreatingLayout = false;
}
@Override
protected void onPause() {
radioStationList.saveFavorites(getApplication());
radioStationList.setObserver(null);
}
@Override
protected void onResume() {
UI.browserActivity = this;
radioStationList.setObserver(list);
if (loading != radioStationList.isLoading())
loadingProcessChanged(radioStationList.isLoading());
}
@Override
protected void onOrientationChanged() {
if (list != null && UI.isLargeScreen)
UI.prepareViewPaddingForLargeScreen(list, 0, 0);
}
@Override
protected void onCleanupLayout() {
UI.animationReset();
if (animator != null) {
animator.release();
animator = null;
}
if (loadingPanelAnimatorHide != null) {
loadingPanelAnimatorHide.release();
loadingPanelAnimatorHide = null;
}
if (loadingPanelAnimatorShow != null) {
loadingPanelAnimatorShow.release();
loadingPanelAnimatorShow = null;
}
list = null;
genres = null;
panelLoading = null;
panelSecondary = null;
btnGoBack = null;
btnFavorite = null;
btnSearch = null;
btnGoBackToPlayer = null;
btnAdd = null;
sep2 = null;
btnPlay = null;
msgNoFavorites = null;
msgNoStations = null;
msgLoading = null;
}
@Override
protected void onDestroy() {
UI.browserActivity = null;
if (radioStationList != null) {
radioStationList.cancel();
radioStationList.setOnBaseListSelectionChangedListener(null);
radioStationList.radioStationAddedObserver = null;
radioStationList = null;
}
}
@Override
public void onRadioStationAdded() {
if (list != null && animator != null && list.getVisibility() != View.VISIBLE) {
animator.end();
list.setVisibility(View.VISIBLE);
animator.start();
}
}
@Override
public void onUpdate(FastAnimator animator, float value) {
}
@Override
public void onEnd(FastAnimator animator) {
if (isHidingLoadingPanel && panelLoading != null) {
isHidingLoadingPanel = false;
panelLoading.setVisibility(View.GONE);
}
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if (parent == btnGenre && btnGenre != null && btnGenreSecondary != null && adapterSecondary != null && genres != null && position >= 0 && position < genres.length) {
if (ignoreFirstNotification) {
ignoreFirstNotification = false;
return;
}
adapterSecondary.genres = genres[position].children;
btnGenreSecondary.setSelection(0);
//since RadioStationAdapter does not keep track of its DataSetObservers,
//we must reset the adapter here
btnGenreSecondary.setAdapter(adapterSecondary);
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
}
|
package com.sapienter.jbilling.server.pricing.db;
import com.sapienter.jbilling.server.item.CurrencyBL;
import com.sapienter.jbilling.server.item.PricingField;
import com.sapienter.jbilling.server.item.tasks.PricingResult;
import com.sapienter.jbilling.server.order.Usage;
import com.sapienter.jbilling.server.order.db.OrderDTO;
import com.sapienter.jbilling.server.pricing.PriceModelWS;
import com.sapienter.jbilling.server.pricing.strategy.PricingStrategy;
import com.sapienter.jbilling.server.user.UserBL;
import com.sapienter.jbilling.server.util.db.CurrencyDTO;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.CollectionOfElements;
import org.hibernate.annotations.Fetch;
import org.hibernate.annotations.FetchMode;
import org.hibernate.annotations.MapKey;
import org.hibernate.annotations.Sort;
import org.hibernate.annotations.SortType;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToOne;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import javax.persistence.TableGenerator;
import javax.persistence.Transient;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* @author Brian Cowdery
* @since 30-07-2010
*/
@Entity
@Table(name = "price_model")
@TableGenerator(
name = "price_model_GEN",
table = "jbilling_seqs",
pkColumnName = "name",
valueColumnName = "next_id",
pkColumnValue = "price_model",
allocationSize = 100
)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
public class PriceModelDTO implements Serializable {
public static final String ATTRIBUTE_WILDCARD = "*";
private Integer id;
private PriceModelStrategy type;
private SortedMap<String, String> attributes = new TreeMap<String, String>();
private BigDecimal rate;
private CurrencyDTO currency;
// price model chaining
private PriceModelDTO next;
public PriceModelDTO() {
}
public PriceModelDTO(PriceModelStrategy type, BigDecimal rate, CurrencyDTO currency) {
this.type = type;
this.rate = rate;
this.currency = currency;
}
public PriceModelDTO(PriceModelWS ws, CurrencyDTO currency) {
setId(ws.getId());
setType(PriceModelStrategy.valueOf(ws.getType()));
setAttributes(new TreeMap<String, String>(ws.getAttributes()));
setRate(ws.getRateAsDecimal());
setCurrency(currency);
}
/**
* Copy constructor.
*
* @param model model to copy
*/
public PriceModelDTO(PriceModelDTO model) {
this.id = model.getId();
this.type = model.getType();
this.attributes = new TreeMap<String, String>(model.getAttributes());
this.rate = model.getRate();
this.currency = model.getCurrency();
if (model.getNext() != null) {
this.next = new PriceModelDTO(model.getNext());
}
}
@Id
@GeneratedValue(strategy = GenerationType.TABLE, generator = "price_model_GEN")
@Column(name = "id", unique = true, nullable = false)
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@Enumerated(EnumType.STRING)
@Column(name = "strategy_type", nullable = false, length = 25)
public PriceModelStrategy getType() {
return type;
}
public void setType(PriceModelStrategy type) {
this.type = type;
}
@Transient
public PricingStrategy getStrategy() {
return getType() != null ? getType().getStrategy() : null;
}
@CollectionOfElements(fetch = FetchType.EAGER)
@JoinTable(name = "price_model_attribute", joinColumns = @JoinColumn(name = "price_model_id"))
@MapKey(columns = @Column(name = "attribute_name", nullable = true, length = 255))
@Column(name = "attribute_value", nullable = true, length = 255)
@Sort(type = SortType.NATURAL)
@Fetch(FetchMode.SELECT)
public SortedMap<String, String> getAttributes() {
return attributes;
}
public void setAttributes(SortedMap<String, String> attributes) {
this.attributes = attributes;
setAttributeWildcards();
}
/**
* Sets the given attribute. If the attribute is null, it will be persisted as a wildcard "*".
*
* @param name attribute name
* @param value attribute value
*/
public void addAttribute(String name, String value) {
this.attributes.put(name, (value != null ? value : ATTRIBUTE_WILDCARD));
}
/**
* Replaces null values in the attribute list with a wildcard character. Null values cannot be
* persisted using the @CollectionOfElements, and make for uglier 'optional' attribute queries.
*/
public void setAttributeWildcards() {
if (getAttributes() != null && !getAttributes().isEmpty()) {
for (Map.Entry<String, String> entry : getAttributes().entrySet())
if (entry.getValue() == null)
entry.setValue(ATTRIBUTE_WILDCARD);
}
}
/**
* Returns the pricing rate. If the strategy type defines an overriding rate, the
* strategy rate will be returned.
*
* @return pricing rate.
*/
@Column(name = "rate", nullable = true, precision = 10, scale = 22)
public BigDecimal getRate() {
return rate;
}
public void setRate(BigDecimal rate) {
this.rate = rate;
}
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "currency_id", nullable = true)
public CurrencyDTO getCurrency() {
return currency;
}
public void setCurrency(CurrencyDTO currency) {
this.currency = currency;
}
@OneToOne(cascade = CascadeType.ALL, fetch = FetchType.EAGER)
@JoinColumn(name = "next_model_id", nullable = true)
public PriceModelDTO getNext() {
return next;
}
public void setNext(PriceModelDTO next) {
this.next = next;
}
/**
* Applies this pricing to the given PricingResult.
*
* This method will automatically convert the calculated price to the currency of the given
* PricingResult if the set currencies differ.
*
* @see com.sapienter.jbilling.server.pricing.strategy.PricingStrategy
*@param pricingOrder target order for this pricing request (may be null)
* @param quantity quantity of item being priced
* @param result pricing result to apply pricing to
* @param usage total item usage for this billing period
* @param singlePurchase true if pricing a single purchase/addition to an order, false if pricing a quantity that already exists on the pricingOrder.
* @param pricingDate pricing date
*/
@Transient
public void applyTo(OrderDTO pricingOrder, BigDecimal quantity, PricingResult result, List<PricingField> fields,
Usage usage, boolean singlePurchase, Date pricingDate) {
// each model in the chain
for (PriceModelDTO next = this; next != null; next = next.getNext()) {
// apply pricing
next.getType().getStrategy().applyTo(pricingOrder, result, fields, next, quantity, usage, singlePurchase);
// convert currency if necessary
if (result.getUserId() != null
&& result.getCurrencyId() != null
&& result.getPrice() != null
&& next.getCurrency() != null
&& next.getCurrency().getId() != result.getCurrencyId()) {
Integer entityId = new UserBL().getEntityId(result.getUserId());
if(pricingDate == null) {
pricingDate = new Date();
}
final BigDecimal converted = new CurrencyBL().convert(next.getCurrency().getId(), result.getCurrencyId(),
result.getPrice(), pricingDate, entityId);
result.setPrice(converted);
}
}
if (result.getPrice() == null) {
result.setPrice(BigDecimal.ZERO);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PriceModelDTO that = (PriceModelDTO) o;
if (attributes != null ? !attributes.equals(that.attributes) : that.attributes != null) return false;
if (currency != null ? !currency.equals(that.currency) : that.currency != null) return false;
if (id != null ? !id.equals(that.id) : that.id != null) return false;
if (rate != null ? !rate.equals(that.rate) : that.rate != null) return false;
if (type != that.type) return false;
return true;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (type != null ? type.hashCode() : 0);
result = 31 * result + (attributes != null ? attributes.hashCode() : 0);
result = 31 * result + (rate != null ? rate.hashCode() : 0);
result = 31 * result + (currency != null ? currency.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "PriceModelDTO{"
+ "id=" + id
+ ", type=" + type
+ ", attributes=" + attributes
+ ", rate=" + rate
+ ", currencyId=" + (currency != null ? currency.getId() : null)
+ ", next=" + next
+ '}';
}
}
|
package org.broad.igv.batch;
import org.apache.commons.lang.StringUtils;
import org.broad.igv.Globals;
import org.broad.igv.PreferenceManager;
import org.broad.igv.dev.api.batch.Command;
import org.broad.igv.feature.RegionOfInterest;
import org.broad.igv.feature.genome.GenomeManager;
import org.broad.igv.track.RegionScoreType;
import org.broad.igv.track.Track;
import org.broad.igv.ui.AbstractHeadedTest;
import org.broad.igv.ui.IGV;
import org.broad.igv.ui.IGVTestHeadless;
import org.broad.igv.ui.panel.FrameManager;
import org.broad.igv.util.ResourceLocator;
import org.broad.igv.util.TestUtils;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.junit.rules.Timeout;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.*;
import java.util.List;
import static junit.framework.Assert.*;
public class CommandExecutorTest extends AbstractHeadedTest {
CommandExecutor exec = new CommandExecutor();
private final String snapshotDir = TestUtils.TMP_OUTPUT_DIR;
@Rule
public TestRule testTimeout = new Timeout((int) 1800000);
@Before
public void setUp() throws Exception {
super.setUp();
Globals.setBatch(true);
igv.loadGenome(TestUtils.defaultGenome, null);
igv.newSession();
exec.setSnapshotDirectory(snapshotDir);
}
@Test
public void testRegionNoname() throws Exception{
tstRegion(null);
}
@Test
public void testRegionName() throws Exception{
tstRegion("myregion");
}
public void tstRegion(String desc) throws Exception {
String regionStr = "chr1:50-1000";
String descstr = desc != null ? desc : "";
exec.execute("region " + regionStr + " " + descstr);
Collection<RegionOfInterest> regions = IGV.getInstance().getSession().getAllRegionsOfInterest();
assertEquals(1, regions.size());
ArrayList<RegionOfInterest> regionsAL = (ArrayList<RegionOfInterest>) regions;
RegionOfInterest region = regionsAL.get(0);
assertEquals("chr1", region.getChr());
assertEquals(49, region.getStart());
assertEquals(1000, region.getEnd());
assertEquals(descstr, region.getDescription());
}
@Test
@Ignore
public void stressTestSnapshotsHG00171() throws Exception{
PreferenceManager.getInstance().put(PreferenceManager.SAM_MAX_VISIBLE_RANGE, "1000");
String interv0 = "chr1:151666000-152666000";
String interv1 = "chr1:154666000-155666000";
String[] intervals = {interv0, interv1};
String filePath = TestUtils.LARGE_DATA_DIR + "HG00171.hg18.bam";
stressTstSnapshots(filePath, intervals);
}
@Test
@Ignore
public void stressTestSnapshotsBodymap() throws Exception{
PreferenceManager.getInstance().put(PreferenceManager.SAM_DOWNSAMPLE_READS, "true");
PreferenceManager.getInstance().put(PreferenceManager.SAM_SAMPLING_COUNT, "100");
PreferenceManager.getInstance().put(PreferenceManager.SAM_MAX_VISIBLE_RANGE, "1000");
String interv0 = "chr12:97,509,534-97,521,909"; //SLC25A3
String interv1 = "chrX:153,366,844-153,374,196"; //SLC10A3
String[] intervals = {interv0, interv1};
String filePath = TestUtils.DATA_DIR + "sessions/bodymap_3tissue.xml";
stressTstSnapshots(filePath, intervals);
}
/**
* Take a large number of snapshots, make sure they all
* actually show data.
* @throws Exception
*/
public void stressTstSnapshots(String filePath, String[] intervals) throws Exception{
exec.execute("load " + filePath);
//exec.execute(("setSleepInterval 10000"));
//For each interval we base our expected size on the first snapshot
Map<String, Long> intervalSizeMap = new HashMap<String, Long>(intervals.length);
Long expSize;
long margin;
int numTrials = 50;
for(int tri=0; tri < numTrials; tri++){
int intInd = tri % intervals.length;
String interval = intervals[intInd];
expSize = intervalSizeMap.get(interval);
exec.execute("goto " + interval);
String outFileName = outFileBase + tri + ".png";
File outFile = new File(snapshotDir, outFileName);
if(outFile.exists()) outFile.delete();
tstSnapshot(outFileName);
long size = outFile.length();
if(expSize == null){
expSize = size;
intervalSizeMap.put(interval, expSize);
}
margin = expSize / 10;
long sizeDiff = Math.abs(size - expSize);
//break;
assertTrue(String.format("File size much different than expected. Trial %d, Diff = %d, margin = %d", tri, sizeDiff, margin), sizeDiff < margin);
}
}
@Test
public void testSetMaxDepth() throws Exception {
setCheckMaxDepth(5);
setCheckMaxDepth(50);
}
private void setCheckMaxDepth(int maxDepth) {
String res = exec.execute("maxDepth " + maxDepth);
assertFalse(res.contains("ERROR"));
int newMaxDepth = PreferenceManager.getInstance().getAsInt(PreferenceManager.SAM_SAMPLING_COUNT);
assertEquals(maxDepth, newMaxDepth);
}
@Test
public void testSortByRegionScoreType() throws Exception {
Timer deadlockChecker = TestUtils.startDeadlockChecker(1000);
String sessionPath = TestUtils.DATA_DIR + "sessions/BRCA_loh2.xml";
TestUtils.loadSession(igv, sessionPath);
Collection<RegionOfInterest> rois = igv.getSession().getAllRegionsOfInterest();
List<Track> tracks;
int count = 0;
for (RegionOfInterest roi : rois) {
for (RegionScoreType type : RegionScoreType.values()) {
igv.sortAllTracksByAttributes(new String[]{"NAME"}, new boolean[]{false});
String typeStr = type.toString().toUpperCase();
if (count % 2 == 0) {
typeStr = typeStr.toLowerCase();
}
String resp = exec.execute("sort " + typeStr + " " + roi.getLocusString());
assertEquals("OK", resp);
tracks = igv.getAllTracks();
IGVTestHeadless.checkIsSorted(tracks, roi, type, FrameManager.getDefaultFrame().getZoom());
count++;
}
}
deadlockChecker.cancel();
deadlockChecker.purge();
}
private final String outFileBase = "testSnap";
@Test
public void testSnapShotPng() throws Exception {
String outFileName = outFileBase + ".Png";
tstSnapshot(outFileName);
}
@Test
public void testSnapshotTracksOnly() throws Exception {
String outFileName = outFileBase + "_track.png";
tstSnapshot(outFileName, true, "trackpanels");
}
@Test
public void testSnapShotJpeg() throws Exception {
tstSnapshot(outFileBase + ".jpeg");
}
@Test
public void testSnapShotJpg() throws Exception {
tstSnapshot(outFileBase + ".jpg");
}
@Test
public void testSnapShotSvg() throws Exception {
String outFileName = outFileBase + ".svG";
tstSnapshot(outFileName);
}
@Test
public void testSnapShotFails() throws Exception {
String[] exts = new String[]{"abc", "svt", "pnq"};
for (String ext : exts) {
String outFileName = outFileBase + "." + ext;
tstSnapshot(outFileName, false, null);
}
}
public File tstSnapshot(String outFileName) throws Exception {
return tstSnapshot(outFileName, true, null);
}
public File tstSnapshot(String outFileName, boolean shouldSucceed, String moreargs) throws Exception {
File out = new File(snapshotDir, outFileName);
assertFalse(out.exists());
String toexec = "snapshot " + outFileName;
if (moreargs != null && moreargs.length() > 0) {
toexec += " " + moreargs;
}
exec.execute(toexec);
assertEquals(shouldSucceed, out.exists());
return out;
}
public static final String urlPathSpaces = "ftp://ftp.broadinstitute.org/distribution/igv/TEST/cpgIslands%20with%20spaces.hg18.bed";
public static final String dirPathSpaces = TestUtils.DATA_DIR + "folder with spaces";
public static final String fileName01 = "test.wig";
public static final String fileNamePerc = "%test%2D.wig";
public static final String fileNamePlus = "test+wp.wig";
@Test
public void testLoadFileSpaces() throws Exception{
tstLoadFileSpaces(fileName01);
}
@Test
public void testLoadFileSpacesPerc() throws Exception{
tstLoadFileSpaces(fileNamePerc);
}
@Test
public void testLoadFileSpacesPlus() throws Exception{
tstLoadFileSpaces(fileNamePlus);
}
@Test
public void testLoadFileURLSpaces() throws Exception{
tstLoadFileURLSpaces(fileName01);
}
@Test
public void testLoadFileURLSpacesPerc() throws Exception{
tstLoadFileURLSpaces(fileNamePerc);
}
@Test
public void testLoadFileURLSpacesPlus() throws Exception{
tstLoadFileURLSpaces(fileNamePlus);
}
private void tstLoadFileURLSpaces(String filename) throws Exception{
String fileURL = "file://" + org.broad.igv.util.StringUtils.encodeURL(new File(dirPathSpaces, filename).getAbsolutePath());
exec.execute("load " + fileURL);
TestUtils.assertTrackLoaded(IGV.getInstance(), filename);
}
private void tstLoadFileSpaces(String filename) throws Exception{
File file = new File(dirPathSpaces, filename);
exec.execute("load \"" + file.getPath() + "\"");
TestUtils.assertTrackLoaded(IGV.getInstance(), filename);
}
@Test
public void testLoadURL() throws Exception {
//This is mostly to ruggedize test setup. The setup may load
//reference/sequence tracks, we'd like to be able to change
//test setup and not worry about this test.
int beginTracks = igv.getAllTracks().size();
String urlPath = urlPathSpaces;
exec.loadFiles(urlPath, null, true, "hasSpaces");
String localPath = TestUtils.DATA_DIR + "bed/test.bed";
exec.loadFiles(localPath, null, true, null);
assertEquals(2, igv.getAllTracks().size() - beginTracks);
}
@Test
public void testSetDataRange() throws Exception {
String dataFile = TestUtils.DATA_DIR + "igv/recombRate.ens.igv.txt";
exec.loadFiles(dataFile, null, true, null);
String[] goodArgSet = new String[]{"0,5.0 ", "0,1,5", "-1,0,1", "-1.32,10.21"};
for (String arg : goodArgSet) {
String resp = exec.execute("setDataRange " + arg);
assertEquals("OK", resp);
}
String[] badArgSet = new String[]{"0 ", "-1,0,2,3", "o,1o"};
for (String arg : badArgSet) {
String resp = exec.execute("setDataRange " + arg);
assertTrue(resp.toLowerCase().startsWith("error"));
}
}
@Test
public void testLoadGenomesById() throws Exception {
String[] genomeIds = new String[]{"hg19", "mm10", "rn5", "canFam2", "bosTau7", "sacCer3", "WS220"};
for (String genomeId : genomeIds) {
String result = exec.execute("genome " + genomeId);
assertEquals("OK", result);
assertEquals(genomeId, GenomeManager.getInstance().getCurrentGenome().getId());
}
}
@Test
public void testLoadGenomeFile() throws Exception {
String[] genomePaths = new String[]{TestUtils.DATA_DIR + "genomes/hg18.unittest.genome"};
String[] genomeIds = new String[]{"hg18.unittest"};
int ind = 0;
for (String genomePath : genomePaths) {
String result = exec.execute("genome " + genomePath);
assertEquals("OK", result);
assertEquals(genomeIds[ind++], GenomeManager.getInstance().getCurrentGenome().getId());
}
}
@Test
public void testLoadGenomeFastaFile() throws Exception {
String[] genomePaths = new String[]{TestUtils.DATA_DIR + "fasta/ecoli_out.padded.fasta"};
String[] genomeIds = genomePaths;
int ind = 0;
for (String genomePath : genomePaths) {
String result = exec.execute("genome " + genomePath);
assertEquals("OK", result);
assertEquals(genomeIds[ind++], GenomeManager.getInstance().getCurrentGenome().getId());
}
}
@Test
public void testLoadGenomesFail() throws Exception {
String startId = genome.getId();
String[] genomeIds = new String[]{"hg1920", "inch10", "doctor5"};
for (String genomeId : genomeIds) {
String result = exec.execute("genome " + genomeId);
assertTrue(result.toLowerCase().startsWith(("error")));
assertEquals(startId, GenomeManager.getInstance().getCurrentGenome().getId());
}
}
@Test
public void testPreference() throws Exception{
String key = PreferenceManager.DATA_SERVER_URL_KEY;
String val = "myDataServerURL";
assertNotSame(val, PreferenceManager.getInstance().getDataServerURL());
exec.execute(String.format("preference %s %s", key, val));
assertEquals(val, PreferenceManager.getInstance().getDataServerURL());
}
@Test
public void testSnapshotsize() throws Exception{
String filePath = TestUtils.DATA_DIR + "bam/NA12878.SLX.sample.bam";
int numLoads = 1;
for(int ii = 0; ii < numLoads; ii++){
IGV.getInstance().loadResources(Arrays.asList(new ResourceLocator(filePath)));
}
exec.execute("goto chr1:9,713,386-9,733,865");
int minHeight = (int) Toolkit.getDefaultToolkit().getScreenSize().getHeight() - 150;
int maxHeight = minHeight + 200;
String outFileName = minHeight + ".png";
exec.execute("maxpanelheight " + maxHeight);
exec.execute("snapshot " + outFileName);
File outputFile = new File(snapshotDir, outFileName);
BufferedImage image = ImageIO.read(outputFile);
assertTrue("Output image height " + image.getHeight() + " is not at least " + minHeight, image.getHeight() > minHeight);
int remAlphaMask = 0x00ffffff;
int numBlackPix = 0;
for(int yy = image.getMinY(); yy < image.getHeight(); yy++){
for(int xx = image.getMinX(); xx < image.getWidth(); xx++){
int color = image.getRGB(xx, yy) & remAlphaMask;
numBlackPix += color == 0 ? 1 : 0;
}
}
//Just making sure we don't trivially satisfy the problem
assertTrue(numBlackPix > 100);
int totalPix = image.getHeight()*image.getWidth();
assertTrue("Too much of the snapshot is black", numBlackPix < totalPix * 0.1);
}
@Test
public void testCustomCommand_Echo() throws Exception {
String cmd = EchoCommand.class.getName();
String otherArgs = "fly high free bird";
String fullCmd = String.format("%s %s", cmd, otherArgs);
String response = exec.execute(fullCmd);
assertEquals(otherArgs, response);
}
public static class EchoCommand implements Command {
@Override
public String run(List<String> args) {
return StringUtils.join(args, " ");
}
}
}
|
package edu.smcm.gamedev.butterseal;
import java.util.Map;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
public enum BSMap {
HOME(null,null, new Runnable() {
public void run() {
// game logic
}
}),
ICE_CAVE_ENTRY(null,null, new Runnable() {
public void run() {
// game logic
}
}),
ICE_CAVE(BSAssets.ICE_CAVE, "ice-cave", new Runnable() {
public void run() {
// game logic
}
});
BSGameState state;
TiledMap map;
TileAtlas atlas;
TileMapRenderer renderer;
BSMap(BSAssets asset, String key, Runnable action) {
this.map = TiledLoader.createMap(Gdx.files.internal(asset.getAssetPath()));
// TODO: compare with L148 of @9a50ff1735; possibly incorrect syntax
this.atlas = new TileAtlas(this.map, Gdx.files.internal(asset.getAssetPath()));
this.renderer = new OrthogonalTiledMapRenderer(this.map, this.atlas, 12, 12);
// TODO: or (from https://code.google.com/p/libgdx/wiki/GraphicsTileMaps#Map_Renderer)
this.renderer = new OrthogonalTiledMapRenderer(this.map, 1/16f);
}
void draw(OrthographicCamera camera) {
this.renderer.render(camera);
}
public List<Map<String, String>> getTileProperties(BSPlayer player) {
// TODO: I think the following constructor exists.
// Double-check. I wand to create the array list with the
// same size as how many layers there are in the map.
List<Map<String, String>> ret = new ArrayList<Map<String, String>>(this.map.layers.size());
for(TiledMapTileLayer layer : this.map.layers) {
Cell cell = layer.getCell(player.currentTile.x, player.currentTile.y);
TiledMapTile tile = cell.getTile();
// TODO: this function returns `MapProperties`, but what exactly is that?
ret.add(tile.getProperties());
}
return ret;
}
public void draw() {
// TODO Auto-generated method stub
}
}
// Local Variables:
// indent-tabs-mode: nil
// End:
|
package cz.cuni.mff.spl.evaluator.graphs;
import java.util.Arrays;
import java.util.ArrayList;
public class EmpiricalDistribution{
public static final int DEFAULT_BIN_COUNT = 200;
private int binCount;
public EmpiricalDistribution(int binCount){
this.binCount = binCount;
}
public EmpiricalDistribution(){
this(DEFAULT_BIN_COUNT);
}
/**
* Function takes a double array of data and outputs an nx2 arrayList
* containing the y and x coordinates [y,x] of the input's empirical
* distribution.
*/
public ArrayList<ArrayList<Double>> load(double[] a){
int n = a.length;
Arrays.sort(a);
int min = (int)a[0];
int max = (int)a[n-1];
int range = max - min;
if (n < 100){
binCount = 200;
}
double binSize = range/(double)binCount;
ArrayList<ArrayList<Double>> output = new ArrayList<ArrayList<Double>>();
Double[] counts = new Double[binCount+1];
double inc = 1/(double)n;
for (int i = 0; i < counts.length; i++){
counts[i] = 0D;
}
//for each data point, add one to the appropriate bin
for (int i = 0; i < n; i++){
counts[(int)((a[i] - min)/binSize)]++;
}
double current = 0D;
// the EDF must start a 0 on the x axis before the first data point
ArrayList<Double> firstEntry = new ArrayList<Double>();
firstEntry.add(0D);
firstEntry.add(min - binSize);
output.add(firstEntry);
//for each bin increase the step by 1/n * (number of datapoints in bin)
// need to add the point before and after the step to maintain the step
//function, y coordinates - output(i,0), x coordinates - output(i,1)
for (int i = 0; i < counts.length; i++){
//if no data points in bin, line is flat, no step
if (counts[i] == 0){
ArrayList<Double> entry = new ArrayList<Double>();
entry.add(current);
entry.add(Double.valueOf(i*binSize + min));
output.add(entry);
}else{
// add a plot point before the step
ArrayList<Double> entry = new ArrayList<Double>();
entry.add(current);
entry.add(Double.valueOf(i*binSize + min));
output.add(entry);
// add a plot point after the step
current += counts[i]*inc;
ArrayList<Double> entry2 = new ArrayList<Double>();
entry2.add(current);
entry2.add(Double.valueOf(i*binSize + min));
output.add(entry2);
}
}
return output;
}
}
|
package net.katsuster.strview.media.mp4;
import java.io.*;
import net.katsuster.strview.media.bit.*;
import net.katsuster.strview.util.bit.*;
/**
* <p>
* MPEG4 media file format hdlr
* </p>
*
* <p>
*
* </p>
* <ul>
* <li>ISO/IEC 14496-12: ISO base media file format</li>
* </ul>
*/
public class MP4HeaderHdlr extends MP4HeaderFull
implements Cloneable {
public UInt pre_defined;
public UInt handler_type;
public UInt[] reserved;
public UInt namelen;
public LargeBitList name;
public MP4HeaderHdlr() {
pre_defined = new UInt("pre_defined");
handler_type = new UInt("handler_type");
reserved = new UInt[3];
namelen = new UInt("namelen");
name = new SubLargeBitList("name");
}
@Override
public MP4HeaderHdlr clone()
throws CloneNotSupportedException {
MP4HeaderHdlr obj =
(MP4HeaderHdlr)super.clone();
int i;
obj.pre_defined = (UInt)pre_defined.clone();
obj.handler_type = (UInt)handler_type.clone();
obj.reserved = reserved.clone();
for (i = 0; i < obj.reserved.length; i++) {
obj.reserved[i] = (UInt)reserved[i].clone();
}
obj.namelen = (UInt)namelen.clone();
obj.name = (LargeBitList) name.clone();
return obj;
}
@Override
public boolean isRecursive() {
return false;
}
@Override
public void readBits(BitStreamReader b) {
readBits(b, this);
}
public static void readBits(BitStreamReader b,
MP4HeaderHdlr d) {
int size;
int i;
MP4HeaderFull.readBits(b, d);
d.pre_defined = b.readUInt(32, d.pre_defined );
d.handler_type = b.readUInt(32, d.handler_type);
d.reserved = new UInt[3];
for (i = 0; i < d.reserved.length; i++) {
d.reserved[i] = new UInt("reserved[" + i + "]");
d.reserved[i] = b.readUInt(32, d.reserved[i]);
}
size = ((d.size.intValue() - 4) << 3)
- (int)(b.position() - d.type.getRange().getStart());
d.name = b.readBitList(size, d.name);
}
@Override
public void writeBits(BitStreamWriter b) {
writeBits(b, this);
}
public static void writeBits(BitStreamWriter b,
MP4HeaderHdlr d) {
int i;
MP4HeaderFull.writeBits(b, d);
b.writeUInt(32, d.pre_defined );
b.writeUInt(32, d.handler_type, d.getHandlerTypeName());
for (i = 0; i < d.reserved.length; i++) {
b.writeUInt(32, d.reserved[i]);
}
b.writeBitList((int)d.name.length(), d.name, d.getNameName());
}
public String getHandlerTypeName() {
return MP4Consts.getTypeName(handler_type.intValue());
}
public String getNameName() {
return getArrayName(name, "US-ASCII");
}
}
|
package org.jdesktop.swingx.autocomplete;
import java.awt.event.ActionEvent;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.List;
import javax.swing.Action;
import javax.swing.ActionMap;
import javax.swing.ComboBoxEditor;
import javax.swing.InputMap;
import javax.swing.JComboBox;
import javax.swing.JList;
import javax.swing.KeyStroke;
import javax.swing.UIManager;
import javax.swing.text.DefaultEditorKit;
import javax.swing.text.JTextComponent;
import javax.swing.text.TextAction;
/**
* This class contains only static utility methods that can be used to set up
* automatic completion for some Swing components.
* <p>Usage examples:</p>
* <p><code>
* JComboBox comboBox = [...];<br/>
* AutoCompleteDecorator.<b>decorate</b>(comboBox);<br/>
* <br/>
* List items = [...];<br/>
* JTextField textField = [...];<br/>
* AutoCompleteDecorator.<b>decorate</b>(textField, items);
* <br/>
* JList list = [...];<br/>
* JTextField textField = [...];<br/>
* AutoCompleteDecorator.<b>decorate</b>(list, textField);
* </p></code>
*
* @author Thomas Bierhance
*/
public class AutoCompleteDecorator {
/**
* Enables automatic completion for the given JTextComponent based on the
* items contained in the given <tt>List</tt>.
* @param textComponent the text component that will be used for automatic
* completion.
* @param items contains the items that are used for autocompletion
* @param strictMatching <tt>true</tt>, if only given items should be allowed to be entered
*/
public static void decorate(JTextComponent textComponent, List items, boolean strictMatching) {
decorate(textComponent, items, strictMatching, ObjectToStringConverter.DEFAULT_IMPLEMENTATION);
}
/**
* Enables automatic completion for the given JTextComponent based on the
* items contained in the given <tt>List</tt>.
* @param items contains the items that are used for autocompletion
* @param textComponent the text component that will be used for automatic
* completion.
* @param strictMatching <tt>true</tt>, if only given items should be allowed to be entered
* @param stringConverter the converter used to transform items to strings
*/
public static void decorate(JTextComponent textComponent, List items, boolean strictMatching, ObjectToStringConverter stringConverter) {
AbstractAutoCompleteAdaptor adaptor = new TextComponentAdaptor(textComponent, items);
AutoCompleteDocument document = new AutoCompleteDocument(adaptor, strictMatching, stringConverter);
decorate(textComponent, document, adaptor);
}
/**
* Enables automatic completion for the given JTextComponent based on the
* items contained in the given JList. The two components will be
* synchronized. The automatic completion will always be strict.
* @param list a <tt>JList</tt> containing the items for automatic completion
* @param textComponent the text component that will be enabled for automatic
* completion
*/
public static void decorate(JList list, JTextComponent textComponent) {
decorate(list, textComponent, ObjectToStringConverter.DEFAULT_IMPLEMENTATION);
}
/**
* Enables automatic completion for the given JTextComponent based on the
* items contained in the given JList. The two components will be
* synchronized. The automatic completion will always be strict.
* @param list a <tt>JList</tt> containing the items for automatic completion
* @param textComponent the text component that will be used for automatic
* completion
* @param stringConverter the converter used to transform items to strings
*/
public static void decorate(JList list, JTextComponent textComponent, ObjectToStringConverter stringConverter) {
AbstractAutoCompleteAdaptor adaptor = new ListAdaptor(list, textComponent, stringConverter);
AutoCompleteDocument document = new AutoCompleteDocument(adaptor, true, stringConverter);
decorate(textComponent, document, adaptor);
}
/**
* Enables automatic completion for the given JComboBox. The automatic
* completion will be strict (only items from the combo box can be selected)
* if the combo box is not editable.
* @param comboBox a combo box
*/
public static void decorate(final JComboBox comboBox) {
decorate(comboBox, ObjectToStringConverter.DEFAULT_IMPLEMENTATION);
}
/**
* Enables automatic completion for the given JComboBox. The automatic
* completion will be strict (only items from the combo box can be selected)
* if the combo box is not editable.
* @param comboBox a combo box
* @param stringConverter the converter used to transform items to strings
*/
public static void decorate(final JComboBox comboBox, final ObjectToStringConverter stringConverter) {
boolean strictMatching = !comboBox.isEditable();
// has to be editable
comboBox.setEditable(true);
// configure the text component=editor component
JTextComponent editorComponent = (JTextComponent) comboBox.getEditor().getEditorComponent();
final AbstractAutoCompleteAdaptor adaptor = new ComboBoxAdaptor(comboBox);
final AutoCompleteDocument document = new AutoCompleteDocument(adaptor, strictMatching, stringConverter);
decorate(editorComponent, document, adaptor);
// show the popup list when the user presses a key
final KeyListener keyListener = new KeyAdapter() {
public void keyPressed(KeyEvent keyEvent) {
// don't popup on action keys (cursor movements, etc...)
if (keyEvent.isActionKey()) return;
// don't popup if the combobox isn't visible anyway
if (comboBox.isDisplayable() && !comboBox.isPopupVisible()) {
int keyCode = keyEvent.getKeyCode();
// don't popup when the user hits shift,ctrl or alt
if (keyCode==keyEvent.VK_SHIFT || keyCode==keyEvent.VK_CONTROL || keyCode==keyEvent.VK_ALT) return;
// don't popup when the user hits escape (see issue #311)
if (keyCode==keyEvent.VK_ESCAPE) return;
comboBox.setPopupVisible(true);
}
}
};
editorComponent.addKeyListener(keyListener);
if (stringConverter!=ObjectToStringConverter.DEFAULT_IMPLEMENTATION) {
comboBox.setEditor(new AutoCompleteComboBoxEditor(comboBox.getEditor(), stringConverter));
}
// Changing the l&f can change the combobox' editor which in turn
// would not be autocompletion-enabled. The new editor needs to be set-up.
comboBox.addPropertyChangeListener("editor", new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent e) {
ComboBoxEditor editor = (ComboBoxEditor) e.getNewValue();
if (editor!=null && editor.getEditorComponent()!=null) {
if (!(editor instanceof AutoCompleteComboBoxEditor)
&& stringConverter!=ObjectToStringConverter.DEFAULT_IMPLEMENTATION) {
comboBox.setEditor(new AutoCompleteComboBoxEditor(editor, stringConverter));
// Don't do the decorate step here because calling setEditor will trigger
// the propertychange listener a second time, which will do the decorate
// and addKeyListener step.
} else {
decorate((JTextComponent) editor.getEditorComponent(), document, adaptor);
editor.getEditorComponent().addKeyListener(keyListener);
}
}
}
});
}
/**
* Decorates a given text component for automatic completion using the
* given AutoCompleteDocument and AbstractAutoCompleteAdaptor.
*
*
* @param textComponent a text component that should be decorated
* @param document the AutoCompleteDocument to be installed on the text component
* @param adaptor the AbstractAutoCompleteAdaptor to be used
*/
public static void decorate(JTextComponent textComponent, AutoCompleteDocument document, final AbstractAutoCompleteAdaptor adaptor) {
// install the document on the text component
textComponent.setDocument(document);
// mark entire text when the text component gains focus
// otherwise the last mark would have been retained which is quiet confusing
textComponent.addFocusListener(new FocusAdapter() {
public void focusGained(FocusEvent e) {
JTextComponent textComponent = (JTextComponent) e.getSource();
adaptor.markEntireText();
}
});
// Tweak some key bindings
InputMap editorInputMap = textComponent.getInputMap();
if (document.isStrictMatching()) {
// move the selection to the left on VK_BACK_SPACE
editorInputMap.put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_BACK_SPACE, 0), DefaultEditorKit.selectionBackwardAction);
// ignore VK_DELETE and CTRL+VK_X and beep instead when strict matching
editorInputMap.put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_DELETE, 0), errorFeedbackAction);
editorInputMap.put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_X, java.awt.event.InputEvent.CTRL_DOWN_MASK), errorFeedbackAction);
} else {
ActionMap editorActionMap = textComponent.getActionMap();
// leave VK_DELETE and CTRL+VK_X as is
// VK_BACKSPACE will move the selection to the left if the selected item is in the list
// it will delete the previous character otherwise
editorInputMap.put(KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_BACK_SPACE, 0), "nonstrict-backspace");
editorActionMap.put("nonstrict-backspace", new NonStrictBackspaceAction(
editorActionMap.get(DefaultEditorKit.deletePrevCharAction),
editorActionMap.get(DefaultEditorKit.selectionBackwardAction),
adaptor));
}
}
static class NonStrictBackspaceAction extends TextAction {
Action backspace;
Action selectionBackward;
AbstractAutoCompleteAdaptor adaptor;
public NonStrictBackspaceAction(Action backspace, Action selectionBackward, AbstractAutoCompleteAdaptor adaptor) {
super("nonstrict-backspace");
this.backspace = backspace;
this.selectionBackward = selectionBackward;
this.adaptor = adaptor;
}
public void actionPerformed(ActionEvent e) {
if (adaptor.listContainsSelectedItem()) {
selectionBackward.actionPerformed(e);
} else {
backspace.actionPerformed(e);
}
}
}
/**
* A TextAction that provides an error feedback for the text component that invoked
* the action. The error feedback is most likely a "beep".
*/
static Object errorFeedbackAction = new TextAction("provide-error-feedback") {
public void actionPerformed(ActionEvent e) {
UIManager.getLookAndFeel().provideErrorFeedback(getTextComponent(e));
}
};
}
|
package org.jivesoftware.wildfire.server;
import com.jcraft.jzlib.JZlib;
import com.jcraft.jzlib.ZInputStream;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.dom4j.io.XMPPPacketReader;
import org.jivesoftware.util.JiveGlobals;
import org.jivesoftware.util.LocaleUtils;
import org.jivesoftware.util.Log;
import org.jivesoftware.util.StringUtils;
import org.jivesoftware.wildfire.*;
import org.jivesoftware.wildfire.auth.UnauthorizedException;
import org.jivesoftware.wildfire.net.DNSUtil;
import org.jivesoftware.wildfire.net.MXParser;
import org.jivesoftware.wildfire.net.SocketConnection;
import org.jivesoftware.wildfire.spi.BasicStreamIDFactory;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmpp.packet.*;
import javax.net.ssl.SSLHandshakeException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.regex.Pattern;
/**
* Server-to-server communication is done using two TCP connections between the servers. One
* connection is used for sending packets while the other connection is used for receiving packets.
* The <tt>OutgoingServerSession</tt> represents the connection to a remote server that will only
* be used for sending packets.<p>
*
* Currently only the Server Dialback method is being used for authenticating with the remote
* server. Use {@link #authenticateDomain(String, String)} to create a new connection to a remote
* server that will be used for sending packets to the remote server from the specified domain.
* Only the authenticated domains with the remote server will be able to effectively send packets
* to the remote server. The remote server will reject and close the connection if a
* non-authenticated domain tries to send a packet through this connection.<p>
*
* Once the connection has been established with the remote server and at least a domain has been
* authenticated then a new route will be added to the routing table for this connection. For
* optimization reasons the same outgoing connection will be used even if the remote server has
* several hostnames. However, different routes will be created in the routing table for each
* hostname of the remote server.
*
* @author Gaston Dombiak
*/
public class OutgoingServerSession extends Session {
/**
* Regular expression to ensure that the hostname contains letters.
*/
private static Pattern pattern = Pattern.compile("[a-zA-Z]");
private Collection<String> authenticatedDomains = new ArrayList<String>();
private Collection<String> hostnames = new ArrayList<String>();
private OutgoingServerSocketReader socketReader;
/**
* Flag that indicates if the session was created usign server-dialback.
*/
private boolean usingServerDialback = true;
/**
* Creates a new outgoing connection to the specified hostname if no one exists. The port of
* the remote server could be configured by setting the <b>xmpp.server.socket.remotePort</b>
* property or otherwise the standard port 5269 will be used. Either a new connection was
* created or already existed the specified hostname will be authenticated with the remote
* server. Once authenticated the remote server will start accepting packets from the specified
* domain.<p>
*
* The Server Dialback method is currently the only implemented method for server-to-server
* authentication. This implies that the remote server will ask the Authoritative Server
* to verify the domain to authenticate. Most probably this server will act as the
* Authoritative Server. See {@link IncomingServerSession} for more information.
*
* @param domain the local domain to authenticate with the remote server.
* @param hostname the hostname of the remote server.
* @return True if the domain was authenticated by the remote server.
*/
public static boolean authenticateDomain(String domain, String hostname) {
if (hostname == null || hostname.length() == 0 || hostname.trim().indexOf(' ') > -1) {
// Do nothing if the target hostname is empty, null or contains whitespaces
return false;
}
try {
// Check if the remote hostname is in the blacklist
if (!RemoteServerManager.canAccess(hostname)) {
return false;
}
// Check if a session, that is using server dialback, already exists to the desired
// hostname (i.e. remote server). If no one exists then create a new session. The same
// session will be used for the same hostname for all the domains to authenticate
SessionManager sessionManager = SessionManager.getInstance();
OutgoingServerSession session = sessionManager.getOutgoingServerSession(hostname);
if (session == null) {
// Try locating if the remote server has previously authenticated with this server
for (IncomingServerSession incomingSession : sessionManager
.getIncomingServerSessions(hostname)) {
for (String otherHostname : incomingSession.getValidatedDomains()) {
session = sessionManager.getOutgoingServerSession(otherHostname);
if (session != null) {
if (session.usingServerDialback) {
// A session to the same remote server but with different hostname
// was found. Use this session and add the new hostname to the
// session
session.addHostname(hostname);
break;
}
else {
session = null;
}
}
}
}
}
if (session == null) {
int port = RemoteServerManager.getPortForServer(hostname);
// No session was found to the remote server so make sure that only one is created
synchronized (hostname.intern()) {
session = sessionManager.getOutgoingServerSession(hostname);
if (session == null) {
session = createOutgoingSession(domain, hostname, port);
if (session != null) {
// Add the new hostname to the list of names that the server may have
session.addHostname(hostname);
// Add the validated domain as an authenticated domain
session.addAuthenticatedDomain(domain);
// Notify the SessionManager that a new session has been created
sessionManager.outgoingServerSessionCreated(session);
return true;
}
else {
// Ensure that the hostname is not an IP address (i.e. contains chars)
if (!pattern.matcher(hostname).find()) {
return false;
}
// Check if hostname is a subdomain of an existing outgoing session
for (String otherHost : sessionManager.getOutgoingServers()) {
if (hostname.contains(otherHost)) {
session = sessionManager.getOutgoingServerSession(otherHost);
// Add the new hostname to the found session
session.addHostname(hostname);
return true;
}
}
// Try to establish a connection to candidate hostnames. Iterate on the
// substring after the . and try to establish a connection. If a
// connection is established then the same session will be used for
// sending packets to the "candidate hostname" as well as for the
// requested hostname (i.e. the subdomain of the candidate hostname)
// This trick is useful when remote servers haven't registered in their
// DNSs an entry for their subdomains
int index = hostname.indexOf('.');
while (index > -1 && index < hostname.length()) {
String newHostname = hostname.substring(index + 1);
String serverName = XMPPServer.getInstance().getServerInfo()
.getName();
if ("com".equals(newHostname) || "net".equals(newHostname) ||
"org".equals(newHostname) ||
"gov".equals(newHostname) ||
"edu".equals(newHostname) ||
serverName.equals(newHostname)) {
return false;
}
session = createOutgoingSession(domain, newHostname, port);
if (session != null) {
// Add the new hostname to the list of names that the server may have
session.addHostname(hostname);
// Add the validated domain as an authenticated domain
session.addAuthenticatedDomain(domain);
// Notify the SessionManager that a new session has been created
sessionManager.outgoingServerSessionCreated(session);
// Add the new hostname to the found session
session.addHostname(newHostname);
return true;
}
else {
index = hostname.indexOf('.', index + 1);
}
}
return false;
}
}
}
}
// A session already exists. The session was established using server dialback so
// it is possible to do piggybacking to authenticate more domains
if (session.getAuthenticatedDomains().contains(domain)) {
// Do nothing since the domain has already been authenticated
return true;
}
// A session already exists so authenticate the domain using that session
return session.authenticateSubdomain(domain, hostname);
}
catch (Exception e) {
Log.error("Error authenticating domain with remote server: " + hostname, e);
}
return false;
}
/**
* Establishes a new outgoing session to a remote server. If the remote server supports TLS
* and SASL then the new outgoing connection will be secured with TLS and authenticated
* using SASL. However, if TLS or SASL is not supported by the remote server or if an
* error occured while securing or authenticating the connection using SASL then server
* dialback method will be used.
*
* @param domain the local domain to authenticate with the remote server.
* @param hostname the hostname of the remote server.
* @param port default port to use to establish the connection.
* @return new outgoing session to a remote server.
*/
private static OutgoingServerSession createOutgoingSession(String domain, String hostname,
int port) {
boolean useTLS = JiveGlobals.getBooleanProperty("xmpp.server.tls.enabled", true);
RemoteServerConfiguration configuration = RemoteServerManager.getConfiguration(hostname);
if (configuration != null) {
// TODO Use the specific TLS configuration for this remote server
//useTLS = configuration.isTLSEnabled();
}
if (useTLS) {
// Connect to remote server using TLS + SASL
SocketConnection connection = null;
String realHostname = null;
int realPort = port;
Socket socket = new Socket();
try {
Log.debug("OS - Trying to connect to " + hostname + ":" + port);
// Get the real hostname to connect to using DNS lookup of the specified hostname
DNSUtil.HostAddress address = DNSUtil.resolveXMPPServerDomain(hostname, port);
realHostname = address.getHost();
realPort = address.getPort();
// Establish a TCP connection to the Receiving Server
socket.connect(new InetSocketAddress(realHostname, realPort),
RemoteServerManager.getSocketTimeout());
Log.debug("OS - Plain connection to " + hostname + ":" + port + " successful");
}
catch (Exception e) {
Log.error("Error trying to connect to remote server: " + hostname +
"(DNS lookup: " + realHostname + ":" + realPort + ")", e);
return null;
}
try {
connection =
new SocketConnection(XMPPServer.getInstance().getPacketDeliverer(), socket,
false);
// Send the stream header
StringBuilder openingStream = new StringBuilder();
openingStream.append("<stream:stream");
openingStream.append(" xmlns:stream=\"http://etherx.jabber.org/streams\"");
openingStream.append(" xmlns=\"jabber:server\"");
openingStream.append(" to=\"").append(hostname).append("\"");
openingStream.append(" version=\"1.0\">");
connection.deliverRawText(openingStream.toString());
XMPPPacketReader reader = new XMPPPacketReader();
reader.getXPPParser().setInput(new InputStreamReader(socket.getInputStream(),
CHARSET));
// Get the answer from the Receiving Server
XmlPullParser xpp = reader.getXPPParser();
for (int eventType = xpp.getEventType(); eventType != XmlPullParser.START_TAG;) {
eventType = xpp.next();
}
String serverVersion = xpp.getAttributeValue("", "version");
// Check if the remote server is XMPP 1.0 compliant
if (serverVersion != null && decodeVersion(serverVersion)[0] >= 1) {
// Get the stream features
Element features = reader.parseDocument().getRootElement();
// Check if TLS is enabled
if (features != null && features.element("starttls") != null) {
// Secure the connection with TLS and authenticate using SASL
OutgoingServerSession answer;
answer = secureAndAuthenticate(hostname, connection, reader, openingStream,
domain);
if (answer != null) {
// Everything went fine so return the secured and
// authenticated connection
return answer;
}
}
else {
Log.debug("OS - Error, <starttls> was not received");
}
}
// Something went wrong so close the connection and try server dialback over
// a plain connection
if (connection != null) {
connection.close();
}
}
catch (SSLHandshakeException e) {
Log.debug("Handshake error while creating secured outgoing session to remote " +
"server: " + hostname + "(DNS lookup: " + realHostname + ":" + realPort +
")", e);
// Close the connection
if (connection != null) {
connection.close();
}
}
catch (XmlPullParserException e) {
Log.warn("Error creating secured outgoing session to remote server: " + hostname +
"(DNS lookup: " + realHostname + ":" + realPort + ")", e);
// Close the connection
if (connection != null) {
connection.close();
}
}
catch (Exception e) {
Log.error("Error creating secured outgoing session to remote server: " + hostname +
"(DNS lookup: " + realHostname + ":" + realPort + ")", e);
// Close the connection
if (connection != null) {
connection.close();
}
}
}
if (ServerDialback.isEnabled()) {
Log.debug("OS - Going to try connecting using server dialback");
// Use server dialback over a plain connection
return new ServerDialback().createOutgoingSession(domain, hostname, port);
}
return null;
}
private static OutgoingServerSession secureAndAuthenticate(String hostname,
SocketConnection connection, XMPPPacketReader reader, StringBuilder openingStream,
String domain) throws Exception {
Element features;
Log.debug("OS - Indicating we want TLS to " + hostname);
connection.deliverRawText("<starttls xmlns='urn:ietf:params:xml:ns:xmpp-tls'/>");
MXParser xpp = (MXParser) reader.getXPPParser();
// Wait for the <proceed> response
Element proceed = reader.parseDocument().getRootElement();
if (proceed != null && proceed.getName().equals("proceed")) {
Log.debug("OS - Negotiating TLS with " + hostname);
connection.startTLS(true, hostname);
Log.debug("OS - TLS negotiation with " + hostname + " was successful");
// TLS negotiation was successful so initiate a new stream
connection.deliverRawText(openingStream.toString());
// Reset the parser to use the new secured reader
xpp.setInput(new InputStreamReader(connection.getTLSStreamHandler().getInputStream(),
CHARSET));
// Skip new stream element
for (int eventType = xpp.getEventType(); eventType != XmlPullParser.START_TAG;) {
eventType = xpp.next();
}
// Get new stream features
features = reader.parseDocument().getRootElement();
if (features != null && features.element("mechanisms") != null) {
// Check if we can use stream compression
String policyName = JiveGlobals.getProperty("xmpp.server.compression.policy",
Connection.CompressionPolicy.disabled.toString());
Connection.CompressionPolicy compressionPolicy =
Connection.CompressionPolicy.valueOf(policyName);
if (Connection.CompressionPolicy.optional == compressionPolicy) {
// Verify if the remote server supports stream compression
Element compression = features.element("compression");
if (compression != null) {
boolean zlibSupported = false;
Iterator it = compression.elementIterator("method");
while (it.hasNext()) {
Element method = (Element) it.next();
if ("zlib".equals(method.getTextTrim())) {
zlibSupported = true;
}
}
if (zlibSupported) {
// Request Stream Compression
connection.deliverRawText("<compress xmlns='http://jabber.org/protocol/compress'><method>zlib</method></compress>");
// Check if we are good to start compression
Element answer = reader.parseDocument().getRootElement();
if ("compressed".equals(answer.getName())) {
// Server confirmed that we can use zlib compression
connection.startCompression();
Log.debug("OS - Stream compression was successful with " + hostname);
// Stream compression was successful so initiate a new stream
connection.deliverRawText(openingStream.toString());
// Reset the parser to use stream compression over TLS
ZInputStream in = new ZInputStream(
connection.getTLSStreamHandler().getInputStream());
in.setFlushMode(JZlib.Z_PARTIAL_FLUSH);
xpp.setInput(new InputStreamReader(in, CHARSET));
// Skip the opening stream sent by the server
for (int eventType = xpp.getEventType(); eventType != XmlPullParser.START_TAG;)
{
eventType = xpp.next();
}
// Get new stream features
features = reader.parseDocument().getRootElement();
if (features == null || features.element("mechanisms") == null) {
Log.debug("OS - Error, EXTERNAL SASL was not offered by " + hostname);
return null;
}
}
else {
Log.debug("OS - Stream compression was rejected by " + hostname);
}
}
else {
Log.debug(
"OS - Stream compression found but zlib method is not supported by" +
hostname);
}
}
else {
Log.debug("OS - Stream compression not supoprted by " + hostname);
}
}
Iterator it = features.element("mechanisms").elementIterator();
while (it.hasNext()) {
Element mechanism = (Element) it.next();
if ("EXTERNAL".equals(mechanism.getTextTrim())) {
Log.debug("OS - Starting EXTERNAL SASL with " + hostname);
if (doExternalAuthentication(domain, connection, reader)) {
Log.debug("OS - EXTERNAL SASL with " + hostname + " was successful");
// SASL was successful so initiate a new stream
connection.deliverRawText(openingStream.toString());
// Reset the parser
xpp.resetInput();
// Skip the opening stream sent by the server
for (int eventType = xpp.getEventType();
eventType != XmlPullParser.START_TAG;) {
eventType = xpp.next();
}
// SASL authentication was successful so create new
// OutgoingServerSession
String id = xpp.getAttributeValue("", "id");
StreamID streamID = new BasicStreamIDFactory().createStreamID(id);
OutgoingServerSession session = new OutgoingServerSession(domain,
connection, new OutgoingServerSocketReader(reader), streamID);
connection.init(session);
// Set the hostname as the address of the session
session.setAddress(new JID(null, hostname, null));
// Set that the session was created using TLS+SASL (no server dialback)
session.usingServerDialback = false;
return session;
}
else {
Log.debug("OS - Error, EXTERNAL SASL authentication with " + hostname +
" failed");
return null;
}
}
}
Log.debug("OS - Error, EXTERNAL SASL was not offered by " + hostname);
}
else {
Log.debug("OS - Error, no SASL mechanisms were offered by " + hostname);
}
}
else {
Log.debug("OS - Error, <proceed> was not received");
}
return null;
}
private static boolean doExternalAuthentication(String domain, SocketConnection connection,
XMPPPacketReader reader) throws DocumentException, IOException, XmlPullParserException {
StringBuilder sb = new StringBuilder();
sb.append("<auth xmlns=\"urn:ietf:params:xml:ns:xmpp-sasl\" mechanism=\"EXTERNAL\">");
sb.append(StringUtils.encodeBase64(domain));
sb.append("</auth>");
connection.deliverRawText(sb.toString());
Element response = reader.parseDocument().getRootElement();
if (response != null && "success".equals(response.getName())) {
return true;
}
return false;
}
OutgoingServerSession(String serverName, Connection connection,
OutgoingServerSocketReader socketReader, StreamID streamID) {
super(serverName, connection, streamID);
this.socketReader = socketReader;
socketReader.setSession(this);
}
public void process(Packet packet) throws UnauthorizedException, PacketException {
try {
String senderDomain = packet.getFrom().getDomain();
if (!getAuthenticatedDomains().contains(senderDomain)) {
synchronized (senderDomain.intern()) {
if (!getAuthenticatedDomains().contains(senderDomain) &&
!authenticateSubdomain(senderDomain, packet.getTo().getDomain())) {
// Return error since sender domain was not validated by remote server
returnErrorToSender(packet);
return;
}
}
}
if (conn != null && !conn.isClosed()) {
conn.deliver(packet);
}
}
catch (Exception e) {
Log.error(LocaleUtils.getLocalizedString("admin.error"), e);
}
}
/**
* Authenticates a subdomain of this server with the specified remote server over an exsiting
* outgoing connection. If the existing session was using server dialback then a new db:result
* is going to be sent to the remote server. But if the existing session was TLS+SASL based
* then just assume that the subdomain was authenticated by the remote server.
*
* @param domain the local subdomain to authenticate with the remote server.
* @param hostname the hostname of the remote server.
* @return True if the subdomain was authenticated by the remote server.
*/
private boolean authenticateSubdomain(String domain, String hostname) {
if (!usingServerDialback) {
// Using SASL so just assume that the domain was validated
// (note: this may not be correct)
addAuthenticatedDomain(domain);
return true;
}
ServerDialback method = new ServerDialback(getConnection(), domain);
if (method.authenticateDomain(socketReader, domain, hostname, getStreamID().getID())) {
// Add the validated domain as an authenticated domain
addAuthenticatedDomain(domain);
return true;
}
return false;
}
private void returnErrorToSender(Packet packet) {
RoutingTable routingTable = XMPPServer.getInstance().getRoutingTable();
try {
if (packet instanceof IQ) {
IQ reply = new IQ();
reply.setID(((IQ) packet).getID());
reply.setTo(packet.getFrom());
reply.setFrom(packet.getTo());
reply.setChildElement(((IQ) packet).getChildElement().createCopy());
reply.setError(PacketError.Condition.remote_server_not_found);
ChannelHandler route = routingTable.getRoute(reply.getTo());
if (route != null) {
route.process(reply);
}
}
else if (packet instanceof Presence) {
Presence reply = new Presence();
reply.setID(packet.getID());
reply.setTo(packet.getFrom());
reply.setFrom(packet.getTo());
reply.setError(PacketError.Condition.remote_server_not_found);
ChannelHandler route = routingTable.getRoute(reply.getTo());
if (route != null) {
route.process(reply);
}
}
else if (packet instanceof Message) {
Message reply = new Message();
reply.setID(packet.getID());
reply.setTo(packet.getFrom());
reply.setFrom(packet.getTo());
reply.setType(((Message)packet).getType());
reply.setThread(((Message)packet).getThread());
reply.setError(PacketError.Condition.remote_server_not_found);
ChannelHandler route = routingTable.getRoute(reply.getTo());
if (route != null) {
route.process(reply);
}
}
}
catch (UnauthorizedException e) {
}
catch (Exception e) {
Log.warn("Error returning error to sender. Original packet: " + packet, e);
}
}
/**
* Returns a collection with all the domains, subdomains and virtual hosts that where
* authenticated. The remote server will accept packets sent from any of these domains,
* subdomains and virtual hosts.
*
* @return domains, subdomains and virtual hosts that where validated.
*/
public Collection<String> getAuthenticatedDomains() {
return Collections.unmodifiableCollection(authenticatedDomains);
}
/**
* Adds a new authenticated domain, subdomain or virtual host to the list of
* authenticated domains for the remote server. The remote server will accept packets
* sent from this new authenticated domain.
*
* @param domain the new authenticated domain, subdomain or virtual host to add.
*/
public void addAuthenticatedDomain(String domain) {
authenticatedDomains.add(domain);
}
/**
* Removes an authenticated domain from the list of authenticated domains. The remote
* server will no longer be able to accept packets sent from the removed domain, subdomain or
* virtual host.
*
* @param domain the domain, subdomain or virtual host to remove from the list of
* authenticated domains.
*/
public void removeAuthenticatedDomain(String domain) {
authenticatedDomains.remove(domain);
}
/**
* Returns the list of hostnames related to the remote server. This tracking is useful for
* reusing the same session for the same remote server even if the server has many names.
*
* @return the list of hostnames related to the remote server.
*/
public Collection<String> getHostnames() {
return Collections.unmodifiableCollection(hostnames);
}
/**
* Adds a new hostname to the list of known hostnames of the remote server. This tracking is
* useful for reusing the same session for the same remote server even if the server has
* many names.
*
* @param hostname the new known name of the remote server
*/
private void addHostname(String hostname) {
if (hostnames.add(hostname)) {
// Register the outgoing session in the SessionManager. If the session
// was already registered nothing happens
sessionManager.registerOutgoingServerSession(hostname, this);
// Add a new route for this new session
XMPPServer.getInstance().getRoutingTable().addRoute(new JID(hostname), this);
}
}
public String getAvailableStreamFeatures() {
// Nothing special to add
return null;
}
}
|
package com.intellij.psi.impl.source.xml;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.xml.*;
import com.intellij.util.IncorrectOperationException;
import java.util.ArrayList;
import java.util.List;
public class XmlTagValueImpl implements XmlTagValue{
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.xml.XmlTagValueImpl");
private final XmlTag myTag;
private final XmlTagChild[] myElements;
private XmlText[] myTextElements = null;
private String myText = null;
private String myTrimmedText = null;
public XmlTagValueImpl(XmlTagChild[] bodyElements, XmlTag tag) {
myTag = tag;
myElements = bodyElements;
}
public XmlTagChild[] getChildren() {
return myElements;
}
public XmlText[] getTextElements() {
if(myTextElements != null) return myTextElements;
final List<XmlText> textElements = new ArrayList<XmlText>();
for (int i = 0; i < myElements.length; i++) {
final XmlTagChild element = myElements[i];
if(element instanceof XmlText) textElements.add((XmlText)element);
}
return myTextElements = textElements.toArray(new XmlText[textElements.size()]);
}
public String getText() {
if(myText != null) return myText;
final StringBuffer consolidatedText = new StringBuffer();
for (int i = 0; i < myElements.length; i++) {
final XmlTagChild element = myElements[i];
consolidatedText.append(element.getText());
}
return consolidatedText.toString();
}
public TextRange getTextRange() {
if(myElements.length == 0){
final ASTNode child = XmlChildRole.START_TAG_END_FINDER.findChild( (ASTNode)myTag);
if(child != null)
return new TextRange(child.getStartOffset() + 1, child.getStartOffset() + 1);
return new TextRange(myTag.getTextRange().getEndOffset(), myTag.getTextRange().getEndOffset());
}
return new TextRange(myElements[0].getTextRange().getStartOffset(), myElements[myElements.length - 1].getTextRange().getEndOffset());
}
public String getTrimmedText() {
if(myTrimmedText != null) return myTrimmedText;
final StringBuffer consolidatedText = new StringBuffer();
final XmlText[] textElements = getTextElements();
for (int i = 0; i < textElements.length; i++) {
final XmlText textElement = textElements[i];
consolidatedText.append(textElement.getValue());
}
return myTrimmedText = consolidatedText.toString().trim();
}
public void setText(String value) {
try {
if(myElements.length > 0){
myTag.deleteChildRange(myElements[0], myElements[myElements.length - 1]);
}
if(value != null && value.length() > 0) {
XmlText displayText = myTag.getManager().getElementFactory().createDisplayText("x");
displayText = (XmlText)myTag.add(displayText);
displayText.setValue(value);
}
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
|
package org.orbeon.oxf.xforms.submission;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.log4j.Logger;
import org.dom4j.*;
import org.dom4j.io.DocumentSource;
import org.orbeon.oxf.common.OXFException;
import org.orbeon.oxf.pipeline.api.ExternalContext;
import org.orbeon.oxf.util.*;
import org.orbeon.oxf.xforms.*;
import org.orbeon.oxf.xforms.analysis.model.Instance;
import org.orbeon.oxf.xforms.event.*;
import org.orbeon.oxf.xforms.event.events.*;
import org.orbeon.oxf.xforms.function.XFormsFunction;
import org.orbeon.oxf.xforms.xbl.Scope;
import org.orbeon.oxf.xforms.xbl.XBLContainer;
import org.orbeon.oxf.xml.NamespaceMapping;
import org.orbeon.oxf.xml.TransformerUtils;
import org.orbeon.oxf.xml.XMLConstants;
import org.orbeon.oxf.xml.XMLUtils;
import org.orbeon.oxf.xml.dom4j.Dom4jUtils;
import org.orbeon.oxf.xml.dom4j.LocationData;
import org.orbeon.saxon.functions.FunctionLibrary;
import org.orbeon.saxon.om.DocumentInfo;
import org.orbeon.saxon.om.Item;
import org.orbeon.saxon.om.NodeInfo;
import org.orbeon.saxon.om.VirtualNode;
import scala.collection.immutable.Seq;
import javax.xml.transform.Transformer;
import javax.xml.transform.stream.StreamResult;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.Callable;
/**
* Represents an XForms model submission instance.
*
* TODO: Refactor handling of serialization to separate classes.
*/
public class XFormsModelSubmission implements XFormsEventTarget, XFormsEventObserver {
public static final String LOGGING_CATEGORY = "submission";
public final static Logger logger = LoggerFactory.createLogger(XFormsModelSubmission.class);
private final org.orbeon.oxf.xforms.analysis.model.Submission staticSubmission;
private final String id;
private final Element submissionElement;
private final XBLContainer container;
private final XFormsContainingDocument containingDocument;
private final XFormsModel model;
private boolean submissionElementExtracted = false;
private String avtActionOrResource; // required unless there is a nested xf:resource element;
private String avtMethod; // required
private String avtValidate;
private String avtRelevant;
private String avtXXFormsCalculate;
private String avtXXFormsUploads;
private String avtSerialization;
private String targetref;// this is an XPath expression when used with replace="instance|text" (other meaning possible post-XForms 1.1 for replace="all")
private String avtMode;
private String avtVersion;
private String avtEncoding;
private String avtMediatype;
private String avtIndent;
private String avtOmitxmldeclaration;
private String avtStandalone;
// private String cdatasectionelements;
private String replace = XFormsConstants.XFORMS_SUBMIT_REPLACE_ALL;
private String replaceInstanceId;
private String xxfReplaceInstanceId;
private String avtSeparator = "&";// XForms 1.1 changes back the default to the ampersand as of February 2009
// private String includenamespaceprefixes;
private String avtXXFormsUsername;
private String avtXXFormsPassword;
private String avtXXFormsPreemptiveAuthentication;
private String avtXXFormsDomain;
private String avtXXFormsReadonly;
private String avtXXFormsShared;
private String avtXXFormsCache;
private String avtXXFormsTarget;
private String resolvedXXFormsTarget;
private String avtXXFormsHandleXInclude;
private boolean xxfShowProgress;
private boolean fURLNorewrite;
private String urlType;
// All the submission types in the order they must be checked
private final Submission[] submissions;
public XFormsModelSubmission(XBLContainer container, org.orbeon.oxf.xforms.analysis.model.Submission staticSubmission, XFormsModel model) {
this.staticSubmission = staticSubmission;
this.id = staticSubmission.staticId();
this.submissionElement = staticSubmission.element();
this.container = container;
this.containingDocument = container.getContainingDocument();
this.model = model;
this.submissions = new Submission[] {
new EchoSubmission(this),
new ClientGetAllSubmission(this),
new FilterPortletSubmission(this),
new CacheableSubmission(this),
new LocalPortletSubmission(this),
new RequestDispatcherSubmission(this),
new RegularSubmission(this)
};
}
public XFormsContainingDocument containingDocument() {
return containingDocument;
}
public Element getSubmissionElement() {
return submissionElement;
}
public boolean isShowProgress() {
return xxfShowProgress;
}
public boolean isURLNorewrite() {
return fURLNorewrite;
}
public String getUrlType() {
return urlType;
}
public String getReplace() {
return replace;
}
public String getTargetref() {
return targetref;
}
public String getResolvedXXFormsTarget() {
return resolvedXXFormsTarget;
}
private void extractSubmissionElement() {
if (!submissionElementExtracted) {
avtActionOrResource = submissionElement.attributeValue(XFormsConstants.RESOURCE_QNAME);
if (avtActionOrResource == null) // @resource has precedence over @action
avtActionOrResource = submissionElement.attributeValue("action");
if (avtActionOrResource == null) {
// TODO: support XForms 1.1 nested xf:resource element
throw new XFormsSubmissionException(this, "xf:submission: action attribute or resource attribute is missing.",
"processing xf:submission attributes");
}
avtMethod = submissionElement.attributeValue("method");
if (avtMethod == null) {
// TODO: support XForms 1.1 nested xf:method element
throw new XFormsSubmissionException(this, "xf:submission: method attribute is missing.",
"processing xf:submission attributes");
}
avtValidate = submissionElement.attributeValue("validate");
avtRelevant = submissionElement.attributeValue("relevant");
avtXXFormsCalculate = submissionElement.attributeValue(XFormsConstants.XXFORMS_CALCULATE_QNAME);
avtXXFormsUploads = submissionElement.attributeValue(XFormsConstants.XXFORMS_UPLOADS_QNAME);
avtSerialization = submissionElement.attributeValue("serialization");
// @targetref is the new name as of May 2009, and @target is still supported for backward compatibility
targetref = submissionElement.attributeValue("targetref");
if (targetref == null)
targetref = submissionElement.attributeValue(XFormsConstants.TARGET_QNAME);
avtMode = submissionElement.attributeValue("mode");
avtVersion = submissionElement.attributeValue("version");
avtIndent = submissionElement.attributeValue("indent");
avtMediatype = submissionElement.attributeValue("mediatype");
avtEncoding = submissionElement.attributeValue("encoding");
avtOmitxmldeclaration = submissionElement.attributeValue("omit-xml-declaration");
avtStandalone = submissionElement.attributeValue("standalone");
// TODO
// cdatasectionelements = submissionElement.attributeValue("cdata-section-elements");
if (submissionElement.attributeValue("replace") != null) {
replace = submissionElement.attributeValue("replace");
if (replace.equals("instance")) {
replaceInstanceId = submissionElement.attributeValue("instance");
xxfReplaceInstanceId = submissionElement.attributeValue(XFormsConstants.XXFORMS_INSTANCE_QNAME);
}
}
if (submissionElement.attributeValue("separator") != null) {
avtSeparator = submissionElement.attributeValue("separator");
}
// TODO
// includenamespaceprefixes = submissionElement.attributeValue("includenamespaceprefixes");
// Extension attributes
avtXXFormsUsername = submissionElement.attributeValue(XFormsConstants.XXFORMS_USERNAME_QNAME);
avtXXFormsPassword = submissionElement.attributeValue(XFormsConstants.XXFORMS_PASSWORD_QNAME);
avtXXFormsPreemptiveAuthentication = submissionElement.attributeValue(XFormsConstants.XXFORMS_PREEMPTIVE_AUTHENTICATION_QNAME);
avtXXFormsDomain = submissionElement.attributeValue(XFormsConstants.XXFORMS_DOMAIN_QNAME);
avtXXFormsReadonly = submissionElement.attributeValue(XFormsConstants.XXFORMS_READONLY_ATTRIBUTE_QNAME);
avtXXFormsShared = submissionElement.attributeValue(XFormsConstants.XXFORMS_SHARED_QNAME);
avtXXFormsCache = submissionElement.attributeValue(XFormsConstants.XXFORMS_CACHE_QNAME);
avtXXFormsTarget = submissionElement.attributeValue(XFormsConstants.XXFORMS_TARGET_QNAME);
avtXXFormsHandleXInclude = submissionElement.attributeValue(XFormsConstants.XXFORMS_XINCLUDE);
// Whether we must show progress or not
xxfShowProgress = !"false".equals(submissionElement.attributeValue(XFormsConstants.XXFORMS_SHOW_PROGRESS_QNAME));
// Whether or not to rewrite URLs
fURLNorewrite = XFormsUtils.resolveUrlNorewrite(submissionElement);
// URL type
urlType = submissionElement.attributeValue(XMLConstants.FORMATTING_URL_TYPE_QNAME);
// Remember that we did this
submissionElementExtracted = true;
}
}
public String getId() {
return id;
}
public String getPrefixedId() {
return XFormsUtils.getPrefixedId(getEffectiveId());
}
public Scope scope() {
return staticSubmission.scope();
}
public String getEffectiveId() {
return XFormsUtils.getRelatedEffectiveId(model.getEffectiveId(), getId());
}
public XBLContainer container() {
return getModel().container();
}
public LocationData getLocationData() {
return (LocationData) submissionElement.getData();
}
public XFormsEventObserver parentEventObserver() {
return model;
}
public XFormsModel getModel() {
return model;
}
public void performDefaultAction(XFormsEvent event) {
final String eventName = event.name();
if (XFormsEvents.XFORMS_SUBMIT.equals(eventName) || XFormsEvents.XXFORMS_SUBMIT.equals(eventName)) {
// 11.1 The xforms-submit Event
// Bubbles: Yes / Cancelable: Yes / Context Info: None
doSubmit(event);
} else if (XFormsEvents.XXFORMS_ACTION_ERROR.equals(eventName)) {
final XXFormsActionErrorEvent ev = (XXFormsActionErrorEvent) event;
XFormsError.handleNonFatalActionError(this, ev.throwable());
}
}
private void doSubmit(XFormsEvent event) {
containingDocument.setGotSubmission();
final IndentedLogger indentedLogger = getIndentedLogger();
// Variables declared here as they are used in a catch/finally block
SubmissionParameters p = null;
String resolvedActionOrResource = null;
// Make sure submission element info is extracted
extractSubmissionElement();
Runnable submitDoneOrErrorRunnable = null;
try {
try {
// Big bag of initial runtime parameters
p = new SubmissionParameters(event.name());
if (indentedLogger.isDebugEnabled()) {
final String message = p.isDeferredSubmissionFirstPass ? "submission first pass" : p.isDeferredSubmissionSecondPass ? "submission second pass" : "submission";
indentedLogger.startHandleOperation("", message, "id", getEffectiveId());
}
// If a submission requiring a second pass was already set, then we ignore a subsequent submission but
// issue a warning
{
final XFormsModelSubmission existingSubmission = containingDocument.getClientActiveSubmissionFirstPass();
if (p.isDeferredSubmission && existingSubmission != null) {
indentedLogger.logWarning("", "another submission requiring a second pass already exists",
"existing submission", existingSubmission.getEffectiveId(),
"new submission", this.getEffectiveId());
return;
}
}
// We can do this first, because the check just depends on the controls, instance to submit, and pending
// submissions if any. This does not depend on the actual state of the instance.
if (p.serialize && p.resolvedXXFormsUploads && XFormsSubmissionUtils.hasBoundRelevantPendingUploadControls(containingDocument, p.refInstance)) {
throw new XFormsSubmissionException(this, "xf:submission: instance to submit has at least one pending upload.",
"checking pending uploads",
new XFormsSubmitErrorEvent(XFormsModelSubmission.this, XFormsSubmitErrorEvent.XXFORMS_PENDING_UPLOADS(), null));
}
// "The data model is updated"
final XFormsModel modelForInstance;
if (p.refInstance != null) {
modelForInstance = p.refInstance.model();
if (modelForInstance != null) {
// NOTE: XForms 1.1 says that we should rebuild/recalculate the "model containing this submission".
// Here, we rebuild/recalculate instead the model containing the submission's single-node binding.
// This can be different than the model containing the submission if using e.g. xxf:instance().
// NOTE: XForms 1.1 seems to say this should happen regardless of whether we serialize or not. If
// the instance is not serialized and if no instance data is otherwise used for the submission,
// this seems however unneeded so we optimize out.
if (p.resolvedValidate || p.resolvedRelevant || p.resolvedXXFormsCalculate) {
// Rebuild impacts validation, relevance and calculated values (set by recalculate)
modelForInstance.doRebuild();
}
if (p.resolvedRelevant || p.resolvedXXFormsCalculate) {
// Recalculate impacts relevance and calculated values
modelForInstance.doRecalculate(false);
}
}
} else {
// Case where no instance was found
modelForInstance = null;
}
// Resolve the target AVT because XFormsServer requires it for deferred submission
resolvedXXFormsTarget = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtXXFormsTarget);
// Deferred submission: end of the first pass
if (p.isDeferredSubmissionFirstPass) {
// Create (but abandon) document to submit here because in case of error, an Ajax response will still be produced
if (p.serialize) {
createDocumentToSubmit(indentedLogger, p.refNodeInfo, p.refInstance, modelForInstance, p.resolvedValidate, p.resolvedRelevant);
}
// When replace="all", we wait for the submission of an XXFormsSubmissionEvent from the client
containingDocument.setActiveSubmissionFirstPass(this);
return;
}
// Compute parameters only needed during second pass
final SecondPassParameters p2 = new SecondPassParameters(p);
resolvedActionOrResource = p2.actionOrResource; // in case of exception
// Get serialization requested from @method and @serialization attributes
final String requestedSerialization = XFormsSubmissionUtils.getRequestedSerialization(p.serialization, p.resolvedMethod);
if (requestedSerialization == null)
throw new XFormsSubmissionException(this, "xf:submission: invalid submission method requested: " + p.resolvedMethod, "serializing instance");
final Document documentToSubmit;
if (p.serialize) {
// Check if a submission requires file upload information
if (requestedSerialization.startsWith("multipart/")) {
// Annotate before re-rooting/pruning
XFormsSubmissionUtils.annotateBoundRelevantUploadControls(containingDocument, p.refInstance);
}
// Create document to submit
documentToSubmit = createDocumentToSubmit(indentedLogger, p.refNodeInfo, p.refInstance, modelForInstance, p.resolvedValidate, p.resolvedRelevant);
} else {
// Don't recreate document
documentToSubmit = null;
}
final String overriddenSerializedData;
if (!p.isDeferredSubmissionSecondPass) {
if (p.serialize) {
// Fire xforms-submit-serialize
// "The event xforms-submit-serialize is dispatched. If the submission-body property of the event
// is changed from the initial value of empty string, then the content of the submission-body
// property string is used as the submission serialization. Otherwise, the submission serialization
// consists of a serialization of the selected instance data according to the rules stated at 11.9
// Submission Options."
final XFormsSubmitSerializeEvent serializeEvent = new XFormsSubmitSerializeEvent(XFormsModelSubmission.this, p.refNodeInfo, requestedSerialization);
Dispatch.dispatchEvent(serializeEvent);
// TODO: rest of submission should happen upon default action of event
overriddenSerializedData = serializeEvent.submissionBodyAsString();
} else {
overriddenSerializedData = null;
}
} else {
// Two reasons: 1. We don't want to modify the document state 2. This can be called outside of the document
// lock, see XFormsServer.
overriddenSerializedData = null;
}
// Serialize
final SerializationParameters sp = new SerializationParameters(p, p2, requestedSerialization, documentToSubmit, overriddenSerializedData);
// Result information
SubmissionResult submissionResult = null;
// Iterate through submissions and run the first match
for (final Submission submission : submissions) {
if (submission.isMatch(p, p2, sp)) {
if (indentedLogger.isDebugEnabled())
indentedLogger.startHandleOperation("", "connecting", "type", submission.getType());
try {
submissionResult = submission.connect(p, p2, sp);
break;
} finally {
if (indentedLogger.isDebugEnabled())
indentedLogger.endHandleOperation();
}
}
}
// NOTE: handleSubmissionResult() catches Throwable and returns a Runnable
if (submissionResult != null)// submissionResult is null in case the submission is running asynchronously, AND when ???
submitDoneOrErrorRunnable = handleSubmissionResult(p, p2, submissionResult, true); // true because function context might have changed
} catch (final Throwable throwable) {
final SubmissionParameters pVal = p;
final String resolvedActionOrResourceVal = resolvedActionOrResource;
submitDoneOrErrorRunnable = new Runnable() {
public void run() {
if (pVal != null && pVal.isDeferredSubmissionSecondPass && XFormsProperties.isLocalSubmissionForward(containingDocument)) {
// It doesn't serve any purpose here to dispatch an event, so we just propagate the exception
throw new XFormsSubmissionException(XFormsModelSubmission.this, throwable, "Error while processing xf:submission", "processing submission");
} else {
// Any exception will cause an error event to be dispatched
sendSubmitError(resolvedActionOrResourceVal, throwable);
}
}
};
}
} finally {
// Log total time spent in submission
if (p != null && indentedLogger.isDebugEnabled()) {
indentedLogger.endHandleOperation();
}
}
// Execute post-submission code if any
// This typically dispatches xforms-submit-done/xforms-submit-error, or may throw another exception
if (submitDoneOrErrorRunnable != null) {
// We do this outside the above catch block so that if a problem occurs during dispatching xforms-submit-done
// This will also close the connection result if needed.
submitDoneOrErrorRunnable.run();
}
}
/*
* Process the response of an asynchronous submission.
*/
public void doSubmitReplace(SubmissionResult submissionResult) {
assert submissionResult != null;
// Big bag of initial runtime parameters
final SubmissionParameters p = new SubmissionParameters(null);
final SecondPassParameters p2 = new SecondPassParameters(p);
final Runnable submitDoneRunnable = handleSubmissionResult(p, p2, submissionResult, false);
// Execute submit done runnable if any
if (submitDoneRunnable != null) {
// Do this outside the handleSubmissionResult catch block so that if a problem occurs during dispatching
submitDoneRunnable.run();
}
}
private Runnable handleSubmissionResult(SubmissionParameters p, SecondPassParameters p2, final SubmissionResult submissionResult, boolean initializeXPathContext) {
assert p != null;
assert p2 != null;
assert submissionResult != null;
Runnable submitDoneOrErrorRunnable = null;
try {
final IndentedLogger indentedLogger = getIndentedLogger();
if (indentedLogger.isDebugEnabled())
indentedLogger.startHandleOperation("", "handling result");
try {
// Get fresh XPath context if requested
if (initializeXPathContext)
p.initializeXPathContext();
// Process the different types of response
if (submissionResult.getThrowable() != null) {
// Propagate throwable, which might have come from a separate thread
submitDoneOrErrorRunnable = new Runnable() {
public void run() { sendSubmitError(submissionResult.getThrowable(), submissionResult); }
};
} else {
// Replacer provided, perform replacement
assert submissionResult.getReplacer() != null;
submitDoneOrErrorRunnable = submissionResult.getReplacer().replace(submissionResult.getConnectionResult(), p, p2);
}
} finally {
if (indentedLogger.isDebugEnabled())
indentedLogger.endHandleOperation();
}
} catch (final Throwable throwable) {
// Any exception will cause an error event to be dispatched
submitDoneOrErrorRunnable = new Runnable() {
public void run() { sendSubmitError(throwable, submissionResult); }
};
}
// Create wrapping runnable to make sure the submission result is closed
final Runnable finalSubmitDoneOrErrorRunnable = submitDoneOrErrorRunnable;
return new Runnable() {
public void run() {
try {
finalSubmitDoneOrErrorRunnable.run();
} finally {
// Close only after the submission result has run
submissionResult.close();
}
}
};
}
/**
* Run the given submission callable. This must be a callable for a replace="all" submission.
*
* @param callable callable run
* @param response response to write to if needed
*/
public static void runDeferredSubmission(Callable<SubmissionResult> callable, ExternalContext.Response response) {
// Run submission
try {
final SubmissionResult result = callable.call();
if (result != null) {
// Callable did not do all the work, completed it here
try {
if (result.getReplacer() != null) {
// Replacer provided, perform replacement
if (result.getReplacer() instanceof AllReplacer)
AllReplacer.replace(result.getConnectionResult(), response);
else
assert result.getReplacer() instanceof NoneReplacer;
} else if (result.getThrowable() != null) {
// Propagate throwable, which might have come from a separate thread
throw new OXFException(result.getThrowable());
} else {
// Should not happen
}
} finally {
result.close();
}
}
} catch (Exception e) {
// Something bad happened
throw new OXFException(e);
}
}
public Runnable sendSubmitDone(final ConnectionResult connectionResult) {
return new Runnable() {
public void run() {
// After a submission, the context might have changed
model.resetAndEvaluateVariables();
Dispatch.dispatchEvent(new XFormsSubmitDoneEvent(XFormsModelSubmission.this, connectionResult));
}
};
}
private void sendSubmitError(Throwable throwable, SubmissionResult submissionResult) {
// After a submission, the context might have changed
model.resetAndEvaluateVariables();
// Try to get error event from exception
XFormsSubmitErrorEvent submitErrorEvent = null;
if (throwable instanceof XFormsSubmissionException) {
final XFormsSubmissionException submissionException = (XFormsSubmissionException) throwable;
submitErrorEvent = submissionException.getXFormsSubmitErrorEvent();
}
// If no event obtained, create default event
if (submitErrorEvent == null) {
submitErrorEvent = new XFormsSubmitErrorEvent(XFormsModelSubmission.this,
submitErrorEvent.XXFORMS_INTERNAL_ERROR(), submissionResult.getConnectionResult());
}
// Dispatch event
submitErrorEvent.logThrowable(throwable);
Dispatch.dispatchEvent(submitErrorEvent);
}
private void sendSubmitError(String resolvedActionOrResource, Throwable throwable) {
// After a submission, the context might have changed
model.resetAndEvaluateVariables();
// Try to get error event from exception
XFormsSubmitErrorEvent submitErrorEvent = null;
if (throwable instanceof XFormsSubmissionException) {
final XFormsSubmissionException submissionException = (XFormsSubmissionException) throwable;
submitErrorEvent = submissionException.getXFormsSubmitErrorEvent();
}
// If no event obtained, create default event
if (submitErrorEvent == null) {
submitErrorEvent = new XFormsSubmitErrorEvent(XFormsModelSubmission.this, resolvedActionOrResource,
submitErrorEvent.XXFORMS_INTERNAL_ERROR(), 0);
}
// Dispatch event
submitErrorEvent.logThrowable(throwable);
Dispatch.dispatchEvent(submitErrorEvent);
}
public Replacer getReplacer(ConnectionResult connectionResult, SubmissionParameters p) throws IOException {
// NOTE: This can be called from other threads so it must NOT modify the XFCD or submission
if (connectionResult != null) {
// Handle response
final Replacer replacer;
if (connectionResult.dontHandleResponse) {
// Always return a replacer even if it does nothing, this way we don't have to deal with null
replacer = new NoneReplacer(this, containingDocument);
} else if (NetUtils.isSuccessCode(connectionResult.statusCode)) {
// Successful response
if (connectionResult.hasContent()) {
// There is a body
// Get replacer
if (p.isReplaceAll) {
replacer = new AllReplacer(this, containingDocument);
} else if (p.isReplaceInstance) {
replacer = new InstanceReplacer(this, containingDocument);
} else if (p.isReplaceText) {
replacer = new TextReplacer(this, containingDocument);
} else if (p.isReplaceNone) {
replacer = new NoneReplacer(this, containingDocument);
} else {
throw new XFormsSubmissionException(this, "xf:submission: invalid replace attribute: " + replace, "processing instance replacement",
new XFormsSubmitErrorEvent(this, XFormsSubmitErrorEvent.XXFORMS_INTERNAL_ERROR(), connectionResult));
}
} else {
// There is no body, notify that processing is terminated
if (p.isReplaceInstance || p.isReplaceText) {
// XForms 1.1 says it is fine not to have a body, but in most cases you will want to know that
// no instance replacement took place
final IndentedLogger indentedLogger = getIndentedLogger();
indentedLogger.logWarning("", "instance or text replacement did not take place upon successful response because no body was provided.",
"submission id", getEffectiveId());
}
// "For a success response not including a body, submission processing concludes after dispatching
// xforms-submit-done"
replacer = new NoneReplacer(this, containingDocument);
}
} else if (connectionResult.statusCode == 302 || connectionResult.statusCode == 301) {
// Got a redirect
// Currently we don't know how to handle a redirect for replace != "all"
if (!p.isReplaceAll)
throw new XFormsSubmissionException(this, "xf:submission for submission id: " + id + ", redirect code received with replace=\"" + replace + "\"", "processing submission response",
new XFormsSubmitErrorEvent(this, XFormsSubmitErrorEvent.RESOURCE_ERROR(), connectionResult));
replacer = new RedirectReplacer(this, containingDocument);
} else {
// Error code received
throw new XFormsSubmissionException(this, "xf:submission for submission id: " + id + ", error code received when submitting instance: " + connectionResult.statusCode, "processing submission response",
new XFormsSubmitErrorEvent(this, XFormsSubmitErrorEvent.RESOURCE_ERROR(), connectionResult));
}
return replacer;
} else {
return null;
}
}
public class SubmissionParameters {
// @replace attribute
final boolean isReplaceAll = replace.equals(XFormsConstants.XFORMS_SUBMIT_REPLACE_ALL);
final boolean isReplaceInstance = replace.equals(XFormsConstants.XFORMS_SUBMIT_REPLACE_INSTANCE);
final boolean isReplaceText = replace.equals(XFormsConstants.XFORMS_SUBMIT_REPLACE_TEXT);
final boolean isReplaceNone = replace.equals(XFormsConstants.XFORMS_SUBMIT_REPLACE_NONE);
// Current node for xf:submission and instance containing the node to submit
NodeInfo refNodeInfo;
XFormsInstance refInstance;
Item submissionElementContextItem;
final String resolvedMethod;
final String actualHttpMethod;
final String resolvedMediatype;
final String serialization;
final boolean serialize;
final boolean resolvedValidate;
final boolean resolvedRelevant;
final boolean resolvedXXFormsCalculate;
final boolean resolvedXXFormsUploads;
final boolean isHandlingClientGetAll;
// XPath function library and namespace mappings
final FunctionLibrary functionLibrary = XFormsContainingDocument.getFunctionLibrary();
final NamespaceMapping namespaceMapping = container.getNamespaceMappings(submissionElement);
// XPath context
XPathCache.XPathContext xpathContext;
final boolean isNoscript;
final boolean isAllowDeferredSubmission;
final boolean isPossibleDeferredSubmission;
final boolean isDeferredSubmission;
final boolean isDeferredSubmissionFirstPass;
final boolean isDeferredSubmissionSecondPass;
public void initializeXPathContext() {
final BindingContext bindingContext; {
model.resetAndEvaluateVariables();
final XFormsContextStack contextStack = model.getContextStack();
contextStack.pushBinding(getSubmissionElement(), getEffectiveId(), model.getResolutionScope());
bindingContext = contextStack.getCurrentBindingContext();
}
final XFormsFunction.Context functionContext = model.getContextStack().getFunctionContext(getEffectiveId());
refNodeInfo = (NodeInfo) bindingContext.getSingleItem();
submissionElementContextItem = bindingContext.contextItem();
// NOTE: Current instance may be null if the document submitted is not part of an instance
refInstance = bindingContext.instanceOrNull();
xpathContext = new XPathCache.XPathContext(namespaceMapping, bindingContext.getInScopeVariables(), functionLibrary, functionContext, null, getLocationData());
}
public SubmissionParameters(String eventName) {
initializeXPathContext();
// Check that we have a current node and that it is pointing to a document or an element
if (refNodeInfo == null)
throw new XFormsSubmissionException(XFormsModelSubmission.this, "Empty single-node binding on xf:submission for submission id: " + id, "getting submission single-node binding",
new XFormsSubmitErrorEvent(XFormsModelSubmission.this, XFormsSubmitErrorEvent.NO_DATA(), null));
if (!(refNodeInfo instanceof DocumentInfo || refNodeInfo.getNodeKind() == org.w3c.dom.Node.ELEMENT_NODE)) {
throw new XFormsSubmissionException(XFormsModelSubmission.this, "xf:submission: single-node binding must refer to a document node or an element.", "getting submission single-node binding",
new XFormsSubmitErrorEvent(XFormsModelSubmission.this, XFormsSubmitErrorEvent.NO_DATA(), null));
}
{
// Resolved method AVT
final String resolvedMethodQName = XFormsUtils.resolveAttributeValueTemplates(containingDocument, xpathContext, refNodeInfo , avtMethod);
resolvedMethod = Dom4jUtils.qNameToExplodedQName(Dom4jUtils.extractTextValueQName(namespaceMapping.mapping, resolvedMethodQName, true));
// Get actual method based on the method attribute
actualHttpMethod = XFormsSubmissionUtils.getActualHttpMethod(resolvedMethod);
// Get mediatype
resolvedMediatype = XFormsUtils.resolveAttributeValueTemplates(containingDocument, xpathContext, refNodeInfo, avtMediatype);
// Serialization
serialization = XFormsUtils.resolveAttributeValueTemplates(containingDocument, xpathContext, refNodeInfo, avtSerialization);
if (serialization != null) {
serialize = !serialization.equals("none");
} else {
// For backward compatibility only, support @serialize if there is no @serialization attribute (was in early XForms 1.1 draft)
serialize = !"false".equals(submissionElement.attributeValue("serialize"));
}
// Resolve validate and relevant AVTs
final String resolvedValidateString = XFormsUtils.resolveAttributeValueTemplates(containingDocument, xpathContext, refNodeInfo , avtValidate);
// "The default value is "false" if the value of serialization is "none" and "true" otherwise"
resolvedValidate = serialize && !"false".equals(resolvedValidateString);
final String resolvedRelevantString = XFormsUtils.resolveAttributeValueTemplates(containingDocument, xpathContext, refNodeInfo , avtRelevant);
// "The default value is "false" if the value of serialization is "none" and "true" otherwise"
resolvedRelevant = serialize && !"false".equals(resolvedRelevantString);
final String resolvedCalculateString = XFormsUtils.resolveAttributeValueTemplates(containingDocument, xpathContext, refNodeInfo , avtXXFormsCalculate);
resolvedXXFormsCalculate = serialize && !"false".equals(resolvedCalculateString);
final String resolvedUploadsString = XFormsUtils.resolveAttributeValueTemplates(containingDocument, xpathContext, refNodeInfo , avtXXFormsUploads);
resolvedXXFormsUploads = serialize && !"false".equals(resolvedUploadsString);
}
isHandlingClientGetAll = XFormsProperties.isOptimizeGetAllSubmission(containingDocument) && actualHttpMethod.equals("GET")
&& isReplaceAll
&& (resolvedMediatype == null || !resolvedMediatype.startsWith(NetUtils.APPLICATION_SOAP_XML)) // can't let SOAP requests be handled by the browser
&& avtXXFormsUsername == null // can't optimize if there are authentication credentials
&& avtXXFormsTarget == null // can't optimize if there is a target
&& Dom4jUtils.elements(getSubmissionElement(), XFormsConstants.XFORMS_HEADER_QNAME).size() == 0; // can't optimize if there are headers specified
// In noscript mode, or in "Ajax portlet" mode, there is no deferred submission process
// Also don't allow deferred submissions when the incoming method is a GET. This is an indirect way of
// allowing things like using the XForms engine to generate a PDF with an HTTP GET.
// NOTE: Method can be null e.g. in a portlet render request
final String method = NetUtils.getExternalContext().getRequest().getMethod();
isNoscript = containingDocument.getStaticState().isNoscript();
isAllowDeferredSubmission = !isNoscript && !(method != null && method.equals("GET"));
isPossibleDeferredSubmission = isReplaceAll && !isHandlingClientGetAll && !containingDocument.isInitializing();
isDeferredSubmission = isAllowDeferredSubmission && isPossibleDeferredSubmission;
isDeferredSubmissionFirstPass = isDeferredSubmission && XFormsEvents.XFORMS_SUBMIT.equals(eventName);
isDeferredSubmissionSecondPass = isDeferredSubmission && !isDeferredSubmissionFirstPass; // here we get XXFORMS_SUBMIT
}
}
public class SecondPassParameters {
// This mostly consists of AVTs that can be evaluated only during the second pass of the submission
final String actionOrResource;
final String mode;
final String version;
final String encoding;
final String separator;
final boolean indent;
final boolean omitxmldeclaration;
final Boolean standalone;
final Connection.Credentials credentials;
final boolean isReadonly;
final boolean isCache;
final long timeToLive;
final boolean isHandleXInclude;
final boolean isAsynchronous;
public SecondPassParameters(SubmissionParameters p) {
{
final String temp = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtActionOrResource);
if (temp == null) {
// This can be null if, e.g. you have an AVT like resource="{()}"
throw new XFormsSubmissionException(XFormsModelSubmission.this, "xf:submission: mandatory resource or action evaluated to an empty sequence for attribute value: " + avtActionOrResource,
"resolving resource URI");
}
actionOrResource = NetUtils.encodeHRRI(temp, true);
// TODO: see if we can resolve xml:base early to detect absolute URLs early as well
// actionOrResource = XFormsUtils.resolveXMLBase(containingDocument, getSubmissionElement(), NetUtils.encodeHRRI(temp, true)).toString();
}
mode = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtMode);
version = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtVersion);
separator = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtSeparator);
{
final String temp = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtEncoding);
encoding = (temp != null) ? temp : "UTF-8";
}
{
final String temp = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtIndent);
indent = Boolean.valueOf(temp);
}
{
final String temp = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtOmitxmldeclaration);
omitxmldeclaration = Boolean.valueOf(temp);
}
{
final String temp = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtStandalone);
standalone = (temp != null) ? Boolean.valueOf(temp) : null;
}
final String username = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtXXFormsUsername);
final String password = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtXXFormsPassword);
final String preemptiveAuthentication = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtXXFormsPreemptiveAuthentication);
final String domain = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtXXFormsDomain);
if (username == null)
credentials = null;
else
credentials = new Connection.Credentials(username, password, preemptiveAuthentication, domain);
{
final String temp = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtXXFormsReadonly);
isReadonly = (temp != null) ? Boolean.valueOf(temp) : false;
}
if (avtXXFormsCache != null) {
final String temp = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtXXFormsCache);
// New attribute
isCache = Boolean.valueOf(temp);
} else {
// For backward compatibility
isCache = "application".equals(XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtXXFormsShared));
}
timeToLive = Instance.timeToLiveOrDefault(getSubmissionElement());
// Default is "false" for security reasons
final String tempHandleXInclude = XFormsUtils.resolveAttributeValueTemplates(containingDocument, p.xpathContext, p.refNodeInfo, avtXXFormsHandleXInclude);
isHandleXInclude = Boolean.valueOf(tempHandleXInclude);
// Check read-only and cache hints
if (isCache) {
if (!(p.actualHttpMethod.equals("GET") || p.actualHttpMethod.equals("POST") || p.actualHttpMethod.equals("PUT")))
throw new XFormsSubmissionException(XFormsModelSubmission.this, "xf:submission: xxf:cache=\"true\" or xxf:shared=\"application\" can be set only with method=\"get|post|put\".",
"checking read-only and shared hints");
if (!p.isReplaceInstance)
throw new XFormsSubmissionException(XFormsModelSubmission.this, "xf:submission: xxf:cache=\"true\" or xxf:shared=\"application\" can be set only with replace=\"instance\".",
"checking read-only and shared hints");
} else if (isReadonly) {
if (!p.isReplaceInstance)
throw new XFormsSubmissionException(XFormsModelSubmission.this, "xf:submission: xxf:readonly=\"true\" can be \"true\" only with replace=\"instance\".",
"checking read-only and shared hints");
}
// Get async/sync
// NOTE: XForms 1.1 default to async, but we don't fully support async so we default to sync instead
final boolean isRequestedAsynchronousMode = "asynchronous".equals(mode);
isAsynchronous = !p.isReplaceAll && isRequestedAsynchronousMode;
if (isRequestedAsynchronousMode && p.isReplaceAll) {
// For now we don't support replace="all"
throw new XFormsSubmissionException(XFormsModelSubmission.this, "xf:submission: mode=\"asynchronous\" cannot be \"true\" with replace=\"all\".", "checking asynchronous mode");
}
}
protected SecondPassParameters(SecondPassParameters other, boolean isAsynchronous, boolean isReadonly) {
this.actionOrResource = other.actionOrResource;
this.version = other.version;
this.encoding = other.encoding;
this.separator = other.separator;
this.indent = other.indent;
this.omitxmldeclaration = other.omitxmldeclaration;
this.standalone = other.standalone;
this.credentials = other.credentials;
this.isCache = other.isCache;
this.timeToLive = other.timeToLive;
this.isHandleXInclude = other.isHandleXInclude;
this.mode = isAsynchronous ? "asynchronous" : "synchronous";
this.isAsynchronous = isAsynchronous;
this.isReadonly = isReadonly;
}
public SecondPassParameters amend(boolean isAsynchronous, boolean isReadonly){
return new SecondPassParameters(this, isAsynchronous, isReadonly);
}
}
public class SerializationParameters {
final byte[] messageBody;// TODO: provide option for body to be a stream
final String queryString;
final String actualRequestMediatype;
public SerializationParameters(SubmissionParameters p, SecondPassParameters p2, String requestedSerialization, Document documentToSubmit, String overriddenSerializedData) throws Exception {
if (p.serialize) {
final String defaultMediatypeForSerialization;
if (overriddenSerializedData != null && !overriddenSerializedData.equals("")) {
// Form author set data to serialize
if (Connection.requiresRequestBody(p.actualHttpMethod)) {
queryString = null;
messageBody = overriddenSerializedData.getBytes("UTF-8");
defaultMediatypeForSerialization = "application/xml";
} else {
queryString = URLEncoder.encode(overriddenSerializedData, "UTF-8");
messageBody = null;
defaultMediatypeForSerialization = null;
}
} else if (requestedSerialization.equals("application/x-www-form-urlencoded")) {
// Perform "application/x-www-form-urlencoded" serialization
if (Connection.requiresRequestBody(p.actualHttpMethod)) {
queryString = null;
messageBody = XFormsSubmissionUtils.createWwwFormUrlEncoded(documentToSubmit, p2.separator).getBytes("UTF-8");// the resulting string is already ASCII in fact
defaultMediatypeForSerialization = "application/x-www-form-urlencoded";
} else {
queryString = XFormsSubmissionUtils.createWwwFormUrlEncoded(documentToSubmit, p2.separator);
messageBody = null;
defaultMediatypeForSerialization = null;
}
} else if (requestedSerialization.equals("application/xml")) {
// Serialize XML to a stream of bytes
try {
final Transformer identity = TransformerUtils.getIdentityTransformer();
TransformerUtils.applyOutputProperties(identity,
"xml", p2.version, null, null, p2.encoding, p2.omitxmldeclaration, p2.standalone, p2.indent, 4);
// TODO: use cdata-section-elements
final ByteArrayOutputStream os = new ByteArrayOutputStream();
identity.transform(new DocumentSource(documentToSubmit), new StreamResult(os));
messageBody = os.toByteArray();
} catch (Exception e) {
throw new XFormsSubmissionException(XFormsModelSubmission.this, e, "xf:submission: exception while serializing instance to XML.", "serializing instance");
}
defaultMediatypeForSerialization = "application/xml";
queryString = null;
} else if (requestedSerialization.equals("multipart/related")) {
// TODO
throw new XFormsSubmissionException(XFormsModelSubmission.this, "xf:submission: submission serialization not yet implemented: " + requestedSerialization, "serializing instance");
} else if (requestedSerialization.equals("multipart/form-data")) {
// Build multipart/form-data body
// Create and set body
final MultipartEntity multipartFormData = XFormsSubmissionUtils.createMultipartFormData(documentToSubmit);
final ByteArrayOutputStream os = new ByteArrayOutputStream();
multipartFormData.writeTo(os);
messageBody = os.toByteArray();
queryString = null;
// The mediatype also contains the boundary
defaultMediatypeForSerialization = multipartFormData.getContentType().getValue();
} else if (requestedSerialization.equals("application/octet-stream")) {
// Binary serialization
final QName nodeType = InstanceData.getType(documentToSubmit.getRootElement());
if (XMLConstants.XS_BASE64BINARY_QNAME.equals(nodeType)) {
// TODO
throw new XFormsSubmissionException(XFormsModelSubmission.this, "xf:submission: binary serialization with base64Binary type is not yet implemented.", "serializing instance");
} else {
// Default to anyURI
// TODO: PERFORMANCE: Must pass InputStream all the way to the submission instead of storing into byte[] in memory!
// NOTE: We support a relative path, in which case the path is resolved as a service URL
final String resolvedURI =
XFormsUtils.resolveServiceURL(
containingDocument,
getSubmissionElement(),
documentToSubmit.getRootElement().getStringValue(),
ExternalContext.Response.REWRITE_MODE_ABSOLUTE);
messageBody = SubmissionUtils.readByteArray(model, resolvedURI);
}
defaultMediatypeForSerialization = "application/octet-stream";
queryString = null;
} else if (requestedSerialization.equals("text/html") || requestedSerialization.equals("application/xhtml+xml")) {
// HTML or XHTML serialization
try {
final Transformer identity = TransformerUtils.getIdentityTransformer();
TransformerUtils.applyOutputProperties(identity,
requestedSerialization.equals("text/html") ? "html" : "xhtml", p2.version, null, null,
p2.encoding, p2.omitxmldeclaration, p2.standalone, p2.indent, 4);
// TODO: use cdata-section-elements
final ByteArrayOutputStream os = new ByteArrayOutputStream();
identity.transform(new DocumentSource(documentToSubmit), new StreamResult(os));
messageBody = os.toByteArray();
} catch (Exception e) {
throw new XFormsSubmissionException(XFormsModelSubmission.this, e, "xf:submission: exception while serializing instance to HTML or XHTML.", "serializing instance");
}
defaultMediatypeForSerialization = requestedSerialization;
queryString = null;
} else if (XMLUtils.isTextOrJSONContentType(requestedSerialization)) {
// Text serialization
try {
final Transformer identity = TransformerUtils.getIdentityTransformer();
TransformerUtils.applyOutputProperties(identity,
"text", null, null, null, p2.encoding, true, false, false, 0);
final ByteArrayOutputStream os = new ByteArrayOutputStream();
identity.transform(new DocumentSource(documentToSubmit), new StreamResult(os));
messageBody = os.toByteArray();
} catch (Exception e) {
throw new XFormsSubmissionException(XFormsModelSubmission.this, e, "xf:submission: exception while serializing instance to text.", "serializing instance");
}
defaultMediatypeForSerialization = requestedSerialization;
queryString = null;
} else {
throw new XFormsSubmissionException(XFormsModelSubmission.this, "xf:submission: invalid submission serialization requested: " + requestedSerialization, "serializing instance");
}
// Actual request mediatype: the one specified by @mediatype, or the default mediatype for the serialization otherwise
actualRequestMediatype = (p.resolvedMediatype == null) ? defaultMediatypeForSerialization : p.resolvedMediatype;
} else {
queryString = null;
messageBody = null;
actualRequestMediatype = null;
}
}
}
public XFormsInstance findReplaceInstanceNoTargetref(XFormsInstance refInstance) {
final XFormsInstance replaceInstance;
if (xxfReplaceInstanceId != null)
replaceInstance = containingDocument.findInstanceOrNull(xxfReplaceInstanceId);
else if (replaceInstanceId != null)
replaceInstance = model.getInstance(replaceInstanceId);
else if (refInstance == null)
replaceInstance = model.getDefaultInstance();
else
replaceInstance = refInstance;
return replaceInstance;
}
public NodeInfo evaluateTargetRef(XPathCache.XPathContext xpathContext,
XFormsInstance defaultReplaceInstance, Item submissionElementContextItem) {
final Object destinationObject;
if (targetref == null) {
// There is no explicit @targetref, so the target is implicitly the root element of either the instance
// pointed to by @ref, or the instance specified by @instance or @xxf:instance.
destinationObject = defaultReplaceInstance.instanceRoot();
} else {
// There is an explicit @targetref, which must be evaluated.
// "The in-scope evaluation context of the submission element is used to evaluate the expression." BUT ALSO "The
// evaluation context for this attribute is the in-scope evaluation context for the submission element, except
// the context node is modified to be the document element of the instance identified by the instance attribute
// if it is specified."
final boolean hasInstanceAttribute = xxfReplaceInstanceId != null || replaceInstanceId != null;
final Item targetRefContextItem = hasInstanceAttribute
? defaultReplaceInstance.instanceRoot() : submissionElementContextItem;
// Evaluate destination node
// "This attribute is evaluated only once a successful submission response has been received and if the replace
// attribute value is "instance" or "text". The first node rule is applied to the result."
destinationObject = XPathCache.evaluateSingle(xpathContext, targetRefContextItem, targetref, containingDocument().getRequestStats().getReporter());
}
// TODO: Also detect readonly node/ancestor situation
if (destinationObject instanceof NodeInfo && ((NodeInfo) destinationObject).getNodeKind() == org.w3c.dom.Node.ELEMENT_NODE)
return (NodeInfo) destinationObject;
else
return null;
}
public void performTargetAction(XFormsEvent event) {
// NOP
}
private Document createDocumentToSubmit(IndentedLogger indentedLogger, NodeInfo currentNodeInfo,
XFormsInstance currentInstance, XFormsModel modelForInstance, boolean resolvedValidate, boolean resolvedRelevant) {
final Document documentToSubmit;
// Revalidate instance
// NOTE: We need to do this before pruning so that bind/@type works correctly. XForms 1.1 seems to say that this
// must be done after pruning, but then it is not clear how XML Schema validation would work then.
// Also, if validate="false" or if serialization="none", then we do not revalidate. Now whether this optimization
// is acceptable depends on whether validate="false" only means "don't check the instance's validity" or also
// don't even recalculate. If the latter, then this also means that type annotations won't be updated, which
// can impact serializations that use type information, for example multipart. But in that case, here we decide
// the optimization is worth it anyway.
if (resolvedValidate && modelForInstance != null)
modelForInstance.doRevalidate();
// Get selected nodes (re-root and prune)
documentToSubmit = reRootAndPrune(currentNodeInfo, resolvedRelevant);
// Check that there are no validation errors
// NOTE: If the instance is read-only, it can't have MIPs at the moment, and can't fail validation/requiredness, so we don't go through the process at all.
final boolean instanceSatisfiesValidRequired
= (currentInstance != null && currentInstance.readonly())
|| !resolvedValidate
|| XFormsSubmissionUtils.isSatisfiesValid(indentedLogger, documentToSubmit, true);
if (!instanceSatisfiesValidRequired) {
if (indentedLogger.isDebugEnabled()) {
final String documentString = TransformerUtils.tinyTreeToString(currentNodeInfo);
indentedLogger.logDebug("", "instance document or subset thereof cannot be submitted",
"document", documentString);
}
throw new XFormsSubmissionException(this, "xf:submission: instance to submit does not satisfy valid and/or required model item properties.",
"checking instance validity",
new XFormsSubmitErrorEvent(XFormsModelSubmission.this, XFormsSubmitErrorEvent.VALIDATION_ERROR(), null));
}
return documentToSubmit;
}
private Document reRootAndPrune(final NodeInfo currentNodeInfo, boolean resolvedRelevant) {
final Document documentToSubmit;
if (currentNodeInfo instanceof VirtualNode) {
final Node currentNode = (Node) ((VirtualNode) currentNodeInfo).getUnderlyingNode();
// "A node from the instance data is selected, based on attributes on the submission
// element. The indicated node and all nodes for which it is an ancestor are considered for
// the remainder of the submit process. "
if (currentNode instanceof Element) {
// Create subset of document
documentToSubmit = Dom4jUtils.createDocumentCopyParentNamespaces((Element) currentNode);
} else {
// Use entire instance document
documentToSubmit = Dom4jUtils.createDocumentCopyElement(currentNode.getDocument().getRootElement());
}
if (resolvedRelevant) {
// "Any node which is considered not relevant as defined in 6.1.4 is removed."
final Node[] nodeToDetach = new Node[1];
do {
// NOTE: This is not very efficient, but at least we avoid NPEs that we would get by
// detaching elements within accept(). Should implement a more efficient algorithm to
// prune non-relevant nodes.
nodeToDetach[0] = null;
documentToSubmit.accept(new VisitorSupport() {
public final void visit(Element element) {
checkInstanceData(element);
}
public final void visit(Attribute attribute) {
checkInstanceData(attribute);
}
private void checkInstanceData(Node node) {
if (nodeToDetach[0] == null) {
// Check "relevant" MIP and remove non-relevant nodes
if (!InstanceData.getInheritedRelevant(node))
nodeToDetach[0] = node;
}
}
});
if (nodeToDetach[0] != null)
nodeToDetach[0].detach();
} while (nodeToDetach[0] != null);
}
// TODO: handle includenamespaceprefixes
} else {
// Submitting read-only instance backed by TinyTree (no MIPs to check)
if (currentNodeInfo.getNodeKind() == org.w3c.dom.Node.ELEMENT_NODE) {
documentToSubmit = TransformerUtils.tinyTreeToDom4j2(currentNodeInfo);
} else {
documentToSubmit = TransformerUtils.tinyTreeToDom4j2(currentNodeInfo.getRoot());
}
}
return documentToSubmit;
}
public IndentedLogger getIndentedLogger() {
return containingDocument.getIndentedLogger(XFormsModelSubmission.LOGGING_CATEGORY);
}
public IndentedLogger getDetailsLogger(final XFormsModelSubmission.SubmissionParameters p, final XFormsModelSubmission.SecondPassParameters p2) {
return getNewLogger(p, p2, getIndentedLogger(), isLogDetails());
}
public IndentedLogger getTimingLogger(final XFormsModelSubmission.SubmissionParameters p, final XFormsModelSubmission.SecondPassParameters p2) {
final IndentedLogger indentedLogger = getIndentedLogger();
return getNewLogger(p, p2, indentedLogger, indentedLogger.isDebugEnabled());
}
private static IndentedLogger getNewLogger(final XFormsModelSubmission.SubmissionParameters p, final XFormsModelSubmission.SecondPassParameters p2,
IndentedLogger indentedLogger, boolean newDebugEnabled) {
if (p2.isAsynchronous && !p.isReplaceNone) {
// Background asynchronous submission creates a new logger with its own independent indentation
final IndentedLogger.Indentation newIndentation = new IndentedLogger.Indentation(indentedLogger.getIndentation().indentation);
return new IndentedLogger(indentedLogger, newIndentation, newDebugEnabled);
} else if (indentedLogger.isDebugEnabled() != newDebugEnabled) {
// Keep shared indentation but use new debug setting
return new IndentedLogger(indentedLogger, indentedLogger.getIndentation(), newDebugEnabled);
} else {
// Synchronous submission or foreground asynchronous submission uses current logger
return indentedLogger;
}
}
private static boolean isLogDetails() {
return XFormsProperties.getDebugLogging().contains("submission-details");
}
// Only allow xxforms-submit from client
private static final Set<String> ALLOWED_EXTERNAL_EVENTS = new HashSet<String>();
static {
ALLOWED_EXTERNAL_EVENTS.add(XFormsEvents.XXFORMS_SUBMIT);
}
public boolean allowExternalEvent(String eventName) {
return ALLOWED_EXTERNAL_EVENTS.contains(eventName);
}
public void addListener(String eventName, EventListener listener) {
throw new UnsupportedOperationException();
}
public void removeListener(String eventName, EventListener listener) {
throw new UnsupportedOperationException();
}
public Seq<EventListener> getListeners(String eventName) {
return scala.collection.immutable.List.empty();
}
}
|
package com.intellij.xml.impl.schema;
import com.intellij.codeInsight.daemon.Validator;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiSubstitutor;
import com.intellij.psi.meta.PsiMetaData;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.psi.scope.util.PsiScopesUtil;
import com.intellij.psi.util.CachedValue;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlDocument;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.xml.XmlElementDescriptor;
import com.intellij.xml.XmlNSDescriptor;
import com.intellij.xml.impl.ExternalDocumentValidator;
import com.intellij.xml.util.XmlUtil;
import java.util.*;
/**
* @author Mike
*/
@SuppressWarnings({"HardCodedStringLiteral"})
public class XmlNSDescriptorImpl implements XmlNSDescriptor,Validator {
private static final Set<String> STD_TYPES = new HashSet<String>();
XmlFile myFile;
private String myTargetNamespace;
public XmlNSDescriptorImpl(XmlFile file) {
init(file.getDocument());
}
public XmlNSDescriptorImpl() {
}
public XmlFile getDescriptorFile() {
return myFile;
}
public boolean isHierarhyEnabled() {
return true;
}
public String getDefaultNamespace(){
return myTargetNamespace != null ? myTargetNamespace : "";
}
private final Map<Pair<String, String>, CachedValue<XmlElementDescriptor>> myDescriptorsMap = new HashMap<Pair<String,String>, CachedValue<XmlElementDescriptor>>();
private final Map<Pair<String, XmlTag>, CachedValue<TypeDescriptor>> myTypesMap = new HashMap<Pair<String,XmlTag>, CachedValue<TypeDescriptor>>();
public XmlElementDescriptor getElementDescriptor(String localName, String namespace) {
return getElementDescriptor(localName, namespace, new HashSet<XmlNSDescriptorImpl>(),false);
}
public XmlElementDescriptor getElementDescriptor(String localName, String namespace, Set<XmlNSDescriptorImpl> visited, boolean reference) {
if(visited.contains(this)) return null;
final Pair<String, String> pair = new Pair<String, String>(namespace, localName);
final CachedValue<XmlElementDescriptor> descriptor = myDescriptorsMap.get(pair);
if(descriptor != null) return descriptor.getValue();
XmlDocument document = myFile.getDocument();
XmlTag rootTag = document.getRootTag();
if (rootTag == null) return null;
XmlTag[] tags = rootTag.getSubTags();
visited.add( this );
for (final XmlTag tag : tags) {
if (equalsToSchemaName(tag, "element")) {
String name = tag.getAttributeValue("name");
if (name != null) {
if (checkElementNameEquivalence(localName, namespace, name, tag)) {
final CachedValue<XmlElementDescriptor> cachedValue =
tag.getManager().getCachedValuesManager().createCachedValue(new CachedValueProvider<XmlElementDescriptor>() {
public Result<XmlElementDescriptor> compute() {
final XmlElementDescriptor xmlElementDescriptor = createElementDescriptor(tag);
return new Result<XmlElementDescriptor>(xmlElementDescriptor, xmlElementDescriptor.getDependences());
}
}, false);
myDescriptorsMap.put(pair, cachedValue);
return cachedValue.getValue();
}
}
}
else if (equalsToSchemaName(tag, "include") ||
(reference &&
equalsToSchemaName(tag, "import") &&
namespace.equals(tag.getAttributeValue("namespace"))
)
) {
final XmlAttribute schemaLocation = tag.getAttribute("schemaLocation", tag.getNamespace());
if (schemaLocation != null) {
final XmlFile xmlFile = XmlUtil.findXmlFile(rootTag.getContainingFile(), schemaLocation.getValue());
if (xmlFile != null) {
final XmlDocument includedDocument = xmlFile.getDocument();
if (includedDocument != null) {
final PsiMetaData data = includedDocument.getMetaData();
if (data instanceof XmlNSDescriptorImpl) {
final XmlElementDescriptor elementDescriptor =
((XmlNSDescriptorImpl)data).getElementDescriptor(localName, namespace, visited, reference);
if (elementDescriptor != null) {
final CachedValue<XmlElementDescriptor> value = includedDocument.getManager().getCachedValuesManager()
.createCachedValue(new CachedValueProvider<XmlElementDescriptor>() {
public Result<XmlElementDescriptor> compute() {
return new Result<XmlElementDescriptor>(elementDescriptor, elementDescriptor.getDependences());
}
}, false);
return value.getValue();
}
}
}
}
}
}
}
return null;
}
protected XmlElementDescriptor createElementDescriptor(final XmlTag tag) {
return new XmlElementDescriptorImpl(tag);
}
private boolean checkElementNameEquivalence(String localName, String namespace, String fqn, XmlTag context){
final String localAttrName = XmlUtil.findLocalNameByQualifiedName(fqn);
if (!localAttrName.equals(localName)) return false;
if(myTargetNamespace == null){
final String attrNamespace = context.getNamespaceByPrefix(XmlUtil.findPrefixByQualifiedName(fqn));
if(attrNamespace.equals(namespace))
return true;
}
else return myTargetNamespace.equals(namespace);
return false;
}
public XmlAttributeDescriptorImpl getAttribute(String localName, String namespace) {
return getAttributeImpl(localName, namespace,null);
}
private XmlAttributeDescriptorImpl getAttributeImpl(String localName, String namespace, Set<XmlTag> visited) {
XmlDocument document = myFile.getDocument();
XmlTag rootTag = document.getRootTag();
if (rootTag == null) return null;
XmlNSDescriptorImpl nsDescriptor = (XmlNSDescriptorImpl)rootTag.getNSDescriptor(namespace, true);
if (nsDescriptor != this && nsDescriptor != null) {
return nsDescriptor.getAttribute(
localName,
namespace
);
}
if (visited == null) visited = new HashSet<XmlTag>(1);
else if(visited.contains(rootTag)) return null;
visited.add(rootTag);
XmlTag[] tags = rootTag.getSubTags();
for (XmlTag tag : tags) {
if (equalsToSchemaName(tag, "attribute")) {
String name = tag.getAttributeValue("name");
if (name != null) {
if (checkElementNameEquivalence(localName, namespace, name, tag)) {
return createAttributeDescriptor(tag);
}
}
}
else if (equalsToSchemaName(tag, "include") ||
(equalsToSchemaName(tag, "import") &&
namespace.equals(tag.getAttributeValue("namespace"))
)
) {
final XmlAttribute schemaLocation = tag.getAttribute("schemaLocation", tag.getNamespace());
if (schemaLocation != null) {
final XmlFile xmlFile = XmlUtil.findXmlFile(rootTag.getContainingFile(), schemaLocation.getValue());
if (xmlFile != null) {
final XmlDocument includedDocument = xmlFile.getDocument();
if (includedDocument != null) {
final PsiMetaData data = includedDocument.getMetaData();
if (data instanceof XmlNSDescriptorImpl) {
final XmlAttributeDescriptorImpl attributeDescriptor = ((XmlNSDescriptorImpl)data).getAttributeImpl(localName, namespace,visited);
if (attributeDescriptor != null) {
final CachedValue<XmlAttributeDescriptorImpl> value =
includedDocument.getManager().getCachedValuesManager().createCachedValue(
new CachedValueProvider<XmlAttributeDescriptorImpl>() {
public Result<XmlAttributeDescriptorImpl> compute() {
return new Result<XmlAttributeDescriptorImpl>(attributeDescriptor, attributeDescriptor.getDependences());
}
},
false
);
return value.getValue();
}
}
}
}
}
}
}
return null;
}
protected XmlAttributeDescriptorImpl createAttributeDescriptor(final XmlTag tag) {
return new XmlAttributeDescriptorImpl(tag);
}
protected TypeDescriptor getTypeDescriptor(XmlTag descriptorTag) {
String type = descriptorTag.getAttributeValue("type");
if (type != null) {
return getTypeDescriptor(type, descriptorTag);
}
return findTypeDescriptor(descriptorTag, null);
}
public TypeDescriptor getTypeDescriptor(final String name, XmlTag context) {
if(checkSchemaNamespace(name, context) && STD_TYPES.contains(name)){
return new StdTypeDescriptor(name);
}
final XmlDocument document = myFile.getDocument();
if (document == null) return null;
return findTypeDescriptor(document.getRootTag(), name);
}
public XmlElementDescriptor getDescriptorByType(String qName, XmlTag instanceTag){
final XmlDocument document = myFile.getDocument();
if(document == null) return null;
final XmlTag tag = document.getRootTag();
if(tag == null) return null;
final TypeDescriptor typeDescriptor = findTypeDescriptor(tag, qName);
if(!(typeDescriptor instanceof ComplexTypeDescriptor)) return null;
return new XmlElementDescriptorByType(instanceTag, (ComplexTypeDescriptor)typeDescriptor);
}
private boolean checkSchemaNamespace(String name, XmlTag context){
final String namespace = context.getNamespaceByPrefix(XmlUtil.findPrefixByQualifiedName(name));
if(namespace != null && namespace.length() > 0){
return checkSchemaNamespace(namespace);
}
return "xsd".equals(XmlUtil.findPrefixByQualifiedName(name));
}
private static boolean checkSchemaNamespace(String namespace) {
return XmlUtil.XML_SCHEMA_URI.equals(namespace) ||
XmlUtil.XML_SCHEMA_URI2.equals(namespace) ||
XmlUtil.XML_SCHEMA_URI3.equals(namespace);
}
private static boolean checkSchemaNamespace(XmlTag context){
final String namespace = context.getNamespace();
if(namespace != null && namespace.length() > 0){
return checkSchemaNamespace(namespace);
}
return context.getName().startsWith("xsd:");
}
private static XmlNSDescriptorImpl getNSDescriptorToSearchIn(XmlTag rootTag, final String name, XmlNSDescriptorImpl defaultNSDescriptor) {
if (name == null) return defaultNSDescriptor;
final String namespacePrefix = XmlUtil.findPrefixByQualifiedName(name);
if (namespacePrefix != null && namespacePrefix.length() > 0) {
final String namespace = rootTag.getNamespaceByPrefix(namespacePrefix);
final XmlNSDescriptor nsDescriptor = rootTag.getNSDescriptor(namespace, true);
if (nsDescriptor instanceof XmlNSDescriptorImpl) {
return (XmlNSDescriptorImpl)nsDescriptor;
}
}
return defaultNSDescriptor;
}
protected TypeDescriptor findTypeDescriptor(XmlTag rootTag, final String name) {
return findTypeDescriptorImpl(rootTag, name, null);
}
protected TypeDescriptor findTypeDescriptorImpl(XmlTag rootTag, final String name, Set<XmlTag> visited) {
XmlNSDescriptorImpl nsDescriptor = getNSDescriptorToSearchIn(rootTag, name, this);
if (nsDescriptor != this) {
return nsDescriptor.findTypeDescriptor(
nsDescriptor.getDescriptorFile().getDocument().getRootTag(),
XmlUtil.findLocalNameByQualifiedName(name)
);
}
final Pair<String, XmlTag> pair = new Pair<String, XmlTag>(name, rootTag);
final CachedValue<TypeDescriptor> descriptor = myTypesMap.get(pair);
if(descriptor != null) return descriptor.getValue();
if (rootTag == null) return null;
XmlTag[] tags = rootTag.getSubTags();
if (visited == null) visited = new HashSet<XmlTag>(1);
else if (visited.contains(rootTag)) return null;
visited.add(rootTag);
for (final XmlTag tag : tags) {
if (equalsToSchemaName(tag, "complexType")) {
if (name == null) {
CachedValue<TypeDescriptor> value = createAndPutTypesCachedValue(tag, pair);
return value.getValue();
}
String nameAttribute = tag.getAttributeValue("name");
if (nameAttribute != null) {
if (nameAttribute.equals(name)
|| (name.indexOf(":") >= 0 && nameAttribute.equals(name.substring(name.indexOf(":") + 1)))
) {
CachedValue<TypeDescriptor> cachedValue = createAndPutTypesCachedValue(tag, pair);
return cachedValue.getValue();
}
}
}
else if (equalsToSchemaName(tag, "simpleType")) {
if (name == null) {
CachedValue<TypeDescriptor> value = createAndPutTypesCachedValueSimpleType(tag, pair);
return value.getValue();
}
String nameAttribute = tag.getAttributeValue("name");
if (name.equals(nameAttribute)
|| name.indexOf(":") >= 0 && name.substring(name.indexOf(":") + 1).equals(nameAttribute)
) {
CachedValue<TypeDescriptor> cachedValue = createAndPutTypesCachedValue(tag, pair);
return cachedValue.getValue();
}
}
else if (equalsToSchemaName(tag, "include") ||
(equalsToSchemaName(tag, "import") &&
rootTag.getNamespaceByPrefix(
XmlUtil.findPrefixByQualifiedName(name)
).equals(tag.getAttributeValue("namespace"))
)
) {
final String schemaLocation = tag.getAttributeValue("schemaLocation");
if (schemaLocation != null) {
final XmlFile xmlFile = XmlUtil.findXmlFile(rootTag.getContainingFile(), schemaLocation);
if (xmlFile != null) {
final XmlDocument document = xmlFile.getDocument();
if (document != null) {
final XmlTag rTag = document.getRootTag();
if ("import".equals(tag.getLocalName())) {
final XmlNSDescriptor importedDescriptor = (XmlNSDescriptor)document.getMetaData();
nsDescriptor = (importedDescriptor instanceof XmlNSDescriptorImpl) ?
(XmlNSDescriptorImpl)importedDescriptor :
this;
}
else {
nsDescriptor = this;
}
final Set<XmlTag> visited1 = visited;
final XmlNSDescriptorImpl nsDescriptor1 = nsDescriptor;
final CachedValue<TypeDescriptor> value =
tag.getManager().getCachedValuesManager().createCachedValue(new CachedValueProvider<TypeDescriptor>() {
public Result<TypeDescriptor> compute() {
final TypeDescriptor complexTypeDescriptor =
(nsDescriptor1 != XmlNSDescriptorImpl.this)?
nsDescriptor1.findTypeDescriptor(rTag, name):
nsDescriptor1.findTypeDescriptorImpl(rTag, name,visited1);
return new Result<TypeDescriptor>(complexTypeDescriptor, new Object[]{rTag});
}
}, false
);
if (value.getValue() != null) {
myTypesMap.put(pair, value);
return value.getValue();
}
}
}
}
}
}
return null;
}
private CachedValue<TypeDescriptor> createAndPutTypesCachedValueSimpleType(final XmlTag tag, final Pair<String, XmlTag> pair) {
final CachedValue<TypeDescriptor> value = tag.getManager().getCachedValuesManager().createCachedValue(new CachedValueProvider<TypeDescriptor>() {
public CachedValueProvider.Result<TypeDescriptor> compute() {
final SimpleTypeDescriptor simpleTypeDescriptor = new SimpleTypeDescriptor(tag);
return new Result<TypeDescriptor>(simpleTypeDescriptor, new Object[]{tag});
}
}, false);
myTypesMap.put(pair, value);
return value;
}
private CachedValue<TypeDescriptor> createAndPutTypesCachedValue(final XmlTag tag, final Pair<String, XmlTag> pair) {
final CachedValue<TypeDescriptor> value = tag.getManager().getCachedValuesManager().createCachedValue(new CachedValueProvider<TypeDescriptor>() {
public CachedValueProvider.Result<TypeDescriptor> compute() {
final ComplexTypeDescriptor complexTypeDescriptor = new ComplexTypeDescriptor(XmlNSDescriptorImpl.this, tag);
return new Result<TypeDescriptor>(complexTypeDescriptor, new Object[]{tag});
}
}, false);
myTypesMap.put(pair, value);
return value;
}
public XmlElementDescriptor getElementDescriptor(XmlTag tag) {
PsiElement parent = tag.getParent();
final String namespace = tag.getNamespace();
while(parent instanceof XmlTag && !namespace.equals(((XmlTag)parent).getNamespace()))
parent = parent.getContext();
if (parent instanceof XmlTag) {
final XmlTag parentTag = (XmlTag)parent;
final XmlElementDescriptor parentDescriptor = parentTag.getDescriptor();
if(parentDescriptor != null){
return parentDescriptor.getElementDescriptor(tag);
}
else{
return null;
}
}
else {
return getElementDescriptor(tag.getLocalName(), tag.getNamespace());
}
}
public XmlElementDescriptor[] getRootElementsDescriptors(final XmlDocument doc) {
final List<XmlElementDescriptor> result = new ArrayList<XmlElementDescriptor>();
XmlDocument document = myFile.getDocument();
XmlTag rootTag = document.getRootTag();
if (rootTag == null) return null;
XmlTag[] tags = rootTag.getSubTags();
for (XmlTag tag : tags) {
if (equalsToSchemaName(tag, "element")) {
String name = tag.getAttributeValue("name");
if (name != null) {
final XmlElementDescriptor elementDescriptor = getElementDescriptor(name, getDefaultNamespace());
if (elementDescriptor != null) {
result.add(elementDescriptor);
}
}
}
}
return result.toArray(new XmlElementDescriptor[result.size()]);
}
protected static boolean equalsToSchemaName(XmlTag tag, String schemaName) {
return schemaName.equals(tag.getLocalName()) && checkSchemaNamespace(tag);
}
private static XmlTag findSpecialTag(String name, String specialName, XmlTag rootTag, XmlNSDescriptorImpl descriptor, Set<XmlTag> visited) {
XmlNSDescriptorImpl nsDescriptor = getNSDescriptorToSearchIn(rootTag, name, descriptor);
if (nsDescriptor != descriptor) {
return findSpecialTag(
XmlUtil.findLocalNameByQualifiedName(name),
specialName,
nsDescriptor.getDescriptorFile().getDocument().getRootTag(),
nsDescriptor,
visited
);
}
if (visited == null) visited = new HashSet<XmlTag>(1);
else if (visited.contains(rootTag)) return null;
visited.add(rootTag);
XmlTag[] tags = rootTag.getSubTags();
for (XmlTag tag : tags) {
if (equalsToSchemaName(tag, specialName)) {
String attribute = tag.getAttributeValue("name");
if (name.equals(attribute)
|| name.indexOf(":") >= 0 && name.substring(name.indexOf(":") + 1).equals(attribute)) {
return tag;
}
}
else if (equalsToSchemaName(tag, "include") ||
(equalsToSchemaName(tag, "import") &&
rootTag.getNamespaceByPrefix(
XmlUtil.findPrefixByQualifiedName(name)
).equals(tag.getAttributeValue("namespace"))
)
) {
final String schemaLocation = tag.getAttributeValue("schemaLocation");
if (schemaLocation != null) {
final XmlFile xmlFile = XmlUtil.findXmlFile(rootTag.getContainingFile(), schemaLocation);
if (xmlFile != null) {
final XmlDocument document = xmlFile.getDocument();
if (document != null) {
final XmlTag rTag = findSpecialTag(name, specialName, document.getRootTag(), descriptor, visited);
if (rTag != null) return rTag;
}
}
}
}
}
return null;
}
public XmlTag findGroup(String name) {
return findSpecialTag(name,"group",myFile.getDocument().getRootTag(), this, null);
}
public XmlTag findAttributeGroup(String name) {
return findSpecialTag(name,"attributeGroup",myFile.getDocument().getRootTag(),this, null);
}
private Map<String,List<XmlTag>> mySubstitutions;
public XmlElementDescriptor[] getSubstitutes(String localName, String namespace) {
List<XmlElementDescriptor> result = new ArrayList<XmlElementDescriptor>();
if (mySubstitutions ==null) {
XmlDocument document = myFile.getDocument();
mySubstitutions = new HashMap<String, List<XmlTag>>();
XmlTag rootTag = document.getRootTag();
XmlTag[] tags = rootTag.getSubTags();
for (XmlTag tag : tags) {
if (equalsToSchemaName(tag, "element")) {
final String substAttr = tag.getAttributeValue("substitutionGroup");
if (substAttr != null) {
String substLocalName = XmlUtil.findLocalNameByQualifiedName(substAttr);
List<XmlTag> list = mySubstitutions.get(substLocalName);
if (list == null) {
list = new LinkedList<XmlTag>();
mySubstitutions.put(substLocalName, list);
}
list.add(tag);
}
}
}
}
List<XmlTag> substitutions = mySubstitutions.get(localName);
if (substitutions==null) return XmlElementDescriptor.EMPTY_ARRAY;
for (XmlTag tag : substitutions) {
final String substAttr = tag.getAttributeValue("substitutionGroup");
if (substAttr != null && checkElementNameEquivalence(localName, namespace, substAttr, tag)) {
result.add(createElementDescriptor(tag));
}
}
return result.toArray(new XmlElementDescriptor[result.size()]);
}
public static String getSchemaNamespace(XmlFile file) {
return XmlUtil.findNamespacePrefixByURI(file, "http:
}
public PsiElement getDeclaration(){
return myFile.getDocument();
}
public boolean processDeclarations(PsiElement context, PsiScopeProcessor processor, PsiSubstitutor substitutor, PsiElement lastElement, PsiElement place){
return PsiScopesUtil.walkChildrenScopes(context, processor, substitutor, lastElement, place);
}
public String getName(PsiElement context){
return getName();
}
public String getName(){
return "";
}
public void init(PsiElement element){
myFile = (XmlFile) element.getContainingFile();
final XmlDocument document = myFile.getDocument();
if (document != null) {
final XmlTag rootTag = document.getRootTag();
if (rootTag != null) {
myTargetNamespace = rootTag.getAttributeValue("targetNamespace");
}
}
}
public Object[] getDependences(){
return new Object[]{myFile, };
}
static {
STD_TYPES.add("string");
STD_TYPES.add("normalizedString");
STD_TYPES.add("token");
STD_TYPES.add("byte");
STD_TYPES.add("unsignedByte");
STD_TYPES.add("base64Binary");
STD_TYPES.add("hexBinary");
STD_TYPES.add("integer");
STD_TYPES.add("positiveInteger");
STD_TYPES.add("negativeInteger");
STD_TYPES.add("nonNegativeInteger");
STD_TYPES.add("nonPositiveInteger");
STD_TYPES.add("int");
STD_TYPES.add("unsignedInt");
STD_TYPES.add("long");
STD_TYPES.add("unsignedLong");
STD_TYPES.add("short");
STD_TYPES.add("unsignedShort");
STD_TYPES.add("decimal");
STD_TYPES.add("float");
STD_TYPES.add("double");
STD_TYPES.add("boolean");
STD_TYPES.add("time");
STD_TYPES.add("dateTime");
STD_TYPES.add("duration");
STD_TYPES.add("date");
STD_TYPES.add("gMonth");
STD_TYPES.add("gYear");
STD_TYPES.add("gYearMonth");
STD_TYPES.add("gDay");
STD_TYPES.add("gMonthDay");
STD_TYPES.add("Name");
STD_TYPES.add("QName");
STD_TYPES.add("NCName");
STD_TYPES.add("anyURI");
STD_TYPES.add("language");
STD_TYPES.add("ID");
STD_TYPES.add("IDREF");
STD_TYPES.add("IDREFS");
STD_TYPES.add("ENTITY");
STD_TYPES.add("ENTITIES");
STD_TYPES.add("NOTATION");
STD_TYPES.add("NMTOKEN");
STD_TYPES.add("NMTOKENS");
}
public void validate(PsiElement context, Validator.ValidationHost host) {
ExternalDocumentValidator.doValidation(context,host);
}
protected boolean supportsStdAttributes() {
return true;
}
}
|
package org.helioviewer.gl3d.model;
import org.helioviewer.gl3d.scenegraph.GL3DGroup;
import org.helioviewer.gl3d.scenegraph.GL3DState;
/**
* Grouping Object for all artificial objects, that is visual assistance objects
* that do not represent any real data.
*
* @author Simon Spoerri (simon.spoerri@fhnw.ch)
*
*/
public class GL3DArtificialObjects extends GL3DGroup {
public GL3DArtificialObjects() {
super("Artificial Objects");
}
@Override
public void shapeDraw(GL3DState state) {
super.shapeDraw(state);
}
}
|
package org.videolan.media.content;
import java.util.ArrayList;
public class PlayerManager {
private static PlayerManager instance = new PlayerManager();
public static PlayerManager getInstance() {
return instance;
}
private ArrayList registeredPlayers = new ArrayList(1);
private BDHandler playlistPlayer = null;
private BDHandler videoDripPlayer = null;
//private ArrayList audioPlayerList = new ArrayList(8);
private Object playlistPlayerLock = new Object();
private Object videoDripPlayerLock = new Object();
//private Object audioPlayerLock = new Object();
private stoppingLock = new Object();
private boolean stopping = false;
public void releaseAllPlayers(boolean unconditional) {
BDHandler[] players = null;
synchronized (registeredPlayers) {
players = (BDHandler[])registeredPlayers.toArray(new BDHandler[0]);
}
for (int i = 0; i < players.length; i++) {
if (unconditional) {
players[i].close();
} else if (players[i].getOwnerContext() != null && players[i].getOwnerContext().isReleased()) {
players[i].close();
}
}
}
protected void releaseResource(BDHandler player) {
if (player instanceof org.videolan.media.content.playlist.Handler) {
synchronized (playlistPlayerLock) {
if (player == playlistPlayer) {
playlistPlayer = null;
}
}
return;
}
if (player instanceof org.videolan.media.content.sound.Handler) {
return;
}
if (player instanceof org.videolan.media.content.audio.Handler) {
return;
}
System.err.println("unknown player type: " + player.getClass().getName());
}
protected boolean allocateResource(BDHandler player) {
if (player instanceof org.videolan.media.content.playlist.Handler) {
synchronized (stoppingLock) {
stopping = true;
}
synchronized (playlistPlayerLock) {
if (playlistPlayer != null && player != playlistPlayer) {
playlistPlayer.stop();
playlistPlayer.deallocate();
}
playlistPlayer = player;
}
synchronized (stoppingLock) {
stopping = false;
}
return true;
}
if (player instanceof org.videolan.media.content.sound.Handler) {
return true;
}
if (player instanceof org.videolan.media.content.audio.Handler) {
return true;
}
System.err.println("unknown player type: " + player.getClass().getName());
return false;
}
protected void unregisterPlayer(BDHandler player)
{
synchronized (registeredPlayers) {
if (registeredPlayers.contains(player)) {
registeredPlayers.remove(player);
}
}
}
protected void registerPlayer(BDHandler player)
{
synchronized (registeredPlayers) {
if (!registeredPlayers.contains(player)) {
registeredPlayers.add(player);
}
}
}
public void onPlaylistEnd(int playlist) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.endOfMedia(playlist);
}
}
}
public void onPlaylistTime(int pts) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.updateTime(pts);
}
}
}
public void onChapterReach(int param) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.doChapterReach(param);
}
}
}
public void onMarkReach(int param) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.doMarkReach(param);
}
}
}
public void onPlaylistStart(int param) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.doPlaylistStart(param);
}
}
}
public void onPlayItemReach(int param) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.doPlayItemReach(param);
}
}
}
public void onAngleChange(int param) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.doAngleChange(param);
}
}
}
public void onRateChange(float rate) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.updateRate(rate);
}
}
}
public void onSubtitleChange(int param) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.doSubtitleChange(param);
}
}
}
public void onPiPChange(int param) {
synchronized (stoppingLock) {
if (stopping) return;
synchronized (playlistPlayerLock) {
if (playlistPlayer != null)
playlistPlayer.doPiPChange(param);
}
}
}
}
|
package semanticMarkup.ling.learn;
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.AbstractCollection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import semanticMarkup.know.lib.WordNetPOSKnowledgeBase;
import semanticMarkup.ling.learn.auxiliary.GetNounsAfterPtnReturnValue;
import semanticMarkup.ling.learn.auxiliary.StringAndInt;
import semanticMarkup.ling.learn.dataholder.DataHolder;
import semanticMarkup.ling.learn.dataholder.SentenceStructure;
import semanticMarkup.ling.learn.dataholder.WordPOSKey;
import semanticMarkup.ling.learn.knowledge.Constant;
import semanticMarkup.ling.learn.utility.LearnerUtility;
import semanticMarkup.ling.transform.ITokenizer;
import semanticMarkup.ling.transform.lib.OpenNLPSentencesTokenizer;
import semanticMarkup.ling.transform.lib.OpenNLPTokenizer;
public class LearnerTest {
private Learner tester;
@Before
public void initialize() {
this.tester = learnerFactory();
}
// @Test
// public void testLearn() {
// Configuration myConfiguration = new Configuration();
// Utility myUtility = new Utility(myConfiguration);
// DataHolder results = new DataHolder(myConfiguration, myUtility);
// Map<String, String> myHeuristicNounTable = results
// .getHeuristicNounTable();
// myHeuristicNounTable.put("word1", "type1");
// List<Sentence> mySentenceTable = results.getSentenceHolder();
// mySentenceTable.add(new Sentence(0, "source1", "sentence1",
// "originalSentence", "lead1", "status1", "tag1", "modifier1",
// "type1"));
// // Learner tester = new Learner("plain","res/WordNet/WordNet-3.0/dict");
// // assertEquals ("learner", results, tester.Learn(tms));
// // results = tester.Learn(tms);
// // assertEquals ("learner", results, tester.Learn(tms));
// @Test
// public void testPopulateUnknownWordsTable() {
// fail("Not yet implemented");
@Test
public void testDiscountPOS() {
// case "all"
// see doItCaseHandle case 2
}
@Test
public void testResolveConfict() {
// see doItCaseHandle case 2
}
@Test
public void testChangePOS(){
// see doItCaseHandle case 2
}
@Test
public void testUpdatePOS(){
// see doItCaseHandle case 2
}
// @Test
// public void testGetParentSentenceTag() {
// fail("Not yet implemented");
// @Test
// public void testTagSentWithMT() {
// fail("Not yet implemented");
// @Test
// public void testProcessNewWord() {
// fail("Not yet implemented");
// @Test
// public void testSingularPluralVariations() {
// fail("Not yet implemented");
// @Test
// public void testUpdateUnknownWords() {
// fail("Not yet implemented");
// @Test
// public void testAddHeuristicsNouns() {
// fail("Not yet implemented");
// @Test
// public void testAddDescriptors() {
// fail("Not yet implemented");
// @Test
// public void testAddNouns() {
// fail("Not yet implemented");
// @Test
// public void testGetHeuristicsNouns() {
// fail("Not yet implemented");
@Test
public void testGetHeuristicsNounsHelper() {
HashSet<String> words = new HashSet<String>();
words.add("septa");
words.add("word1");
words.add("septum");
assertEquals("getHeuristicsNouns - handleSpecialCase 1", "septa[p]",
tester.getHeuristicsNounsHelper("septa[s]", words));
}
@Test
public void testGetPresentAbsentNouns() {
// Method getPresentAbsentNouns
assertEquals("getPresentAbsentNouns - no present/absent", "",
tester.getPresentAbsentNouns("only one pair of abcly presen"));
assertEquals("getPresentAbsentNouns - and|or|to", "",
tester.getPresentAbsentNouns("only one pair of and present"));
assertEquals("getPresentAbsentNouns - STOP words", "",
tester.getPresentAbsentNouns("only one pair of without absent"));
assertEquals(
"getPresentAbsentNoun - always|often|seldom|sometimes|[a-z]+lys",
"",
tester.getPresentAbsentNouns("only one pair of abcly present"));
assertEquals("getPresentAbsentNouns - PENDINGS", "circuli[p]",
tester.getPresentAbsentNouns("only one pair of circuli absent"));
assertEquals("getPresentAbsentNouns - end with ss", "glass[s]",
tester.getPresentAbsentNouns("only one pair of glass absent"));
assertEquals(
"getPresentAbsentNouns - end with none ss",
"computers[p]",
tester.getPresentAbsentNouns("only one pair of computers absent"));
assertEquals("getPresentAbsentNouns - teeth", "teeth[p]",
tester.getPresentAbsentNouns("only one pair of teeth present"));
assertEquals("getPresentAbsentNouns - not SENDINGS", "serum[s]",
tester.getPresentAbsentNouns("only one pair of serum absent"));
assertEquals(
"getPresentAbsentNouns - SENDINGS",
"computer[s]",
tester.getPresentAbsentNouns("only one pair of computer absent"));
}
// @Test
// public void testCharacterHeuristics() {
// fail("Not yet implemented");
// @Test
// public void testAdd2HeuristicNounTable() {
// fail("Not yet implemented");
// @Test
// public void testFilterOutDescriptors() {
// fail("Not yet implemented");
@Test
public void testGetTaxonNameNouns() {
// Nouns rule 0: Taxon name nouns
Set<String> taxonNames = new HashSet<String>();
// Method getTaxonNameNouns
assertEquals("getTaxonNameNouns - not match", taxonNames,
tester.getTaxonNameNouns("word word word"));
assertEquals("getTaxonNameNouns - empty taxon name", taxonNames,
tester.getTaxonNameNouns("< i >< / i >"));
taxonNames.add("word1 word2 word3");
taxonNames.add("word1");
taxonNames.add("word2");
taxonNames.add("word3");
taxonNames.add("word4 word5");
taxonNames.add("word4");
taxonNames.add("word5");
assertEquals(
"getTaxonNameNouns - match",
taxonNames,
tester.getTaxonNameNouns("< i >word1 word2 word3< / i>, < i >word4 word5< /i>"));
}
@Test
public void testGetNounsMecklesCartilage() {
// Nouns rule 0.5: Method getNounsMecklesCartilage
Set<String> nouns = new HashSet<String>();
assertEquals("getTaxonNameNouns - not match", nouns,
tester.getNounsMecklesCartilage("word word word"));
nouns.add("meckel
nouns.add("meckels");
nouns.add("meckel");
assertEquals("getTaxonNameNouns - match", nouns,
tester.getNounsMecklesCartilage("word Meckel#s word"));
}
@Test
public void testGetNounsRule1() {
// Method getNounsRule1
// Set<String> descriptorMap = new HashSet<String>();
Set<String> nouns1 = new HashSet<String>();
nouns1.add("term1");
assertEquals(
"getNounsRule1",
nouns1,
tester.getNounsRule1(
"Chang_2004.xml_ ffa60eb1-4320-4e69-b151-75a2615dca4b_29482156-8083-430c-91f4-e80209b50138.txt-0",
"term1", new HashMap<String, Boolean>()));
}
@Test
public void testGetNounsRule2() {
// Method getNounsRule2
Set<String> nouns2 = new HashSet<String>();
assertEquals("getNounsRule2 - not match", nouns2,
tester.getNounsRule2("word word word soe width nea"));
nouns2.add("nouna");
assertEquals("getNounsRule2 - match 1", nouns2,
tester.getNounsRule2("word word word some nouna"));
nouns2.add("nounb");
assertEquals(
"getNounsRule2 - match 2",
nouns2,
tester.getNounsRule2("word some nouna near word some width near word third nounb near end"));
assertEquals(
"getNounsRule2 - match 2",
nouns2,
tester.getNounsRule2("word some nouna near word some width near word third nounb near end nounc abction of end"));
}
@Test
public void testGetNounsRule3Helper() {
// Method getNounsRule3
Set<String> nouns3 = new HashSet<String>();
nouns3.add("II");
nouns3.add("IX");
assertEquals(
"getNounsRule3",
nouns3,
tester.getNounsRule3Helper("posterior and dorsal to foramen for nerve II (i.e. a posterior oblique myodome IX)"));
nouns3.remove("II");
nouns3.remove("IX");
nouns3.add("Meckelian");
assertEquals(
"getNounsRule3",
nouns3,
tester.getNounsRule3Helper("Pronounced dorsal process on Meckelian element"));
}
@Test
public void testGetNounsRule4() {
// Method getNounsRule4
Set<String> nouns4 = new HashSet<String>();
assertEquals("getNounsRule4 - not match", nouns4,
tester.getNounsRule4("word word word noun one"));
nouns4.add("nouna");
assertEquals("getNounsRule4 - not match", nouns4,
tester.getNounsRule4("word word word nouna 1"));
nouns4.remove("nouna");
nouns4.add("nounb");
assertEquals(
"getNounsRule4 - not match",
nouns4,
tester.getNounsRule4("word word word page 1 word above 2 word NoUnb 2 end"));
}
@Test
public void testGetDescriptorsRule1() {
// Method getDescriptorsRule1
Set<String> descriptors1 = new HashSet<String>();
descriptors1.add("absent");
assertEquals("getDescriptorsRule1", descriptors1,
tester.getDescriptorsRule1(
"Brazeau_2009.xml_states200_state202.txt-0", "absent",
new HashSet<String>()));
descriptors1.remove("absent");
descriptors1.add("present");
Set<String> nouns = new HashSet<String>();
nouns.add("present");
assertEquals("getDescriptorsRule1", new HashSet<String>(),
tester.getDescriptorsRule1(
"Brazeau_2009.xml_states200_state203.txt-0", "present",
nouns));
assertEquals("getDescriptorsRule1", descriptors1,
tester.getDescriptorsRule1(
"Brazeau_2009.xml_states200_state203.txt-0", "present",
new HashSet<String>()));
}
// @Test
// public void testGetDescriptorsRule2() {
// fail("Not yet implemented");
@Test
public void testIsDescriptor() {
// Method filterOutDescriptors
Set<String> rNouns = new HashSet<String>();
Set<String> rDescriptors = new HashSet<String>();
Set<String> results = new HashSet<String>();
rNouns.add("noun1");
rNouns.add("descriptor2");
rNouns.add("noun2");
rDescriptors.add("descriptor1");
rDescriptors.add("descriptor2");
rDescriptors.add("descriptor3");
results.add("noun1");
results.add("noun2");
assertEquals("filterOutDescriptors", results,
tester.filterOutDescriptors(rNouns, rDescriptors));
}
@Test
public void testIsMatched() {
// Method isMatched
Map<String, Boolean> descriptorMap = new HashMap<String, Boolean>();
descriptorMap.put("term1", false);
assertEquals("isMatched", false, descriptorMap.get("term1"));
assertEquals("isMatched", true, tester.isMatched(
"begin word word was term1 word word end", "term1",
descriptorMap));
assertEquals("isMatched", true, descriptorMap.get("term1"));
}
@Test
public void testIsIsAndOrSentence(){
String sentencePtn = null;
String ptn1 = null;
String ptn2 = null;
List<String> words = new ArrayList<String>();
// // case 1
// words.clear();
// words.addAll(Arrays.asList("posterior and <M>dorsal</M> to foramen <B>for</B> nerve <N>ii</N>".split(" ")));
// sentencePtn = "q&mqqbqn";
// ptn1="^(?:[mbq,]{0,10}[onp]+(?:,|(?=&)))+&(?:[mbq,]{0,10}[onp]+)"; // n,n,n&n
// ptn2="^(?:[mbq,]{0,10}(?:,|(?=&)))+&(?:[mbq,]{0,10})[onp]+"; // m,m,&mn
// assertEquals("isIsAndOrSentence case 1", false,
// tester.isIsAndOrSentenceHelper(words, sentencePtn, ptn1, ptn2));
// case 2
words.clear();
words.addAll(Arrays.asList("elongate and <O>passes</O> <B>anterolaterally</B> through orbital <B>?</B> oor".split(" ")));
sentencePtn = "q&obqqbq";
ptn1="^(?:[mbq,]{0,10}[onp]+(?:,|(?=&)))+&(?:[mbq,]{0,10}[onp]+)"; // n,n,n&n
ptn2="^(?:[mbq,]{0,10}(?:,|(?=&)))+&(?:[mbq,]{0,10})[onp]+"; // m,m,&mn
assertEquals("isIsAndOrSentence case 2", true,
tester.isIsAndOrSentenceHelper(words, sentencePtn, ptn1, ptn2));
// case 3
words.clear();
words.addAll(Arrays.asList("<O>divides</O> <B>within</B> otic <N>capsule</N> <B>at</B> <B>the</B> <N>level</N> <B>of</B> <B>the</B> postorbital process".split(" ")));
sentencePtn = "q,obqnbbnbbqq";
ptn1="^(?:[mbq,]{0,10}[onp]+(?:,|(?=&)))+&(?:[mbq,]{0,10}[onp]+)"; // n,n,n&n
ptn2="^(?:[mbq,]{0,10}(?:,|(?=&)))+&(?:[mbq,]{0,10})[onp]+"; // m,m,&mn
assertEquals("isIsAndOrSentence case 3", false,
tester.isIsAndOrSentenceHelper(words, sentencePtn, ptn1, ptn2));
}
// @Test
// public void testAddStopWords() {
// fail("Not yet implemented");
// @Test
// public void testAddCharacters() {
// fail("Not yet implemented");
// @Test
// public void testAddNumbers() {
// fail("Not yet implemented");
// @Test
// public void testAddClusterstrings() {
// fail("Not yet implemented");
// @Test
// public void testAddProperNouns() {
// fail("Not yet implemented");
@Test
public void testPosBySuffix() {
// Pattern 1: ^[a-z_]+(er|est|fid|form|ish|less|like|ly|merous|most|shaped)$
// Pattern 2: ^[._.][a-z]+
tester.posBySuffix();
}
@Test
public void testPosBySuffixCase1Helper(){
assertEquals("posBySuffix Case1 - match", true, tester.posBySuffixCase1Helper("approximately"));
assertEquals("posBySuffix Case1 - not match", false, tester.posBySuffixCase1Helper("bigger"));
assertEquals("posBySuffix Case1 - match", true, tester.posBySuffixCase1Helper("bifid"));
assertEquals("posBySuffix Case1 - not match", false, tester.posBySuffixCase1Helper("per"));
}
@Test
public void testPosBySuffixCase2Helper(){
assertEquals("posBySuffix Case2 - match", true, tester.posBySuffixCase2Helper("_nerved"));
assertEquals("posBySuffix Case2 - not match", false, tester.posBySuffixCase2Helper("nerved"));
}
@Test
public void testContainSuffix() {
// test method containSuffix
assertEquals("containSuffix less", true,
tester.containSuffix("less", "", "less"));
assertEquals("containSuffix ly", true,
tester.containSuffix("slightly", "slight", "ly"));
assertEquals("containSuffix er", false,
tester.containSuffix("fewer", "few", "er"));
assertEquals("containSuffix est", true,
tester.containSuffix("fastest", "fast", "est"));
assertEquals("containSuffix base is in WN", true,
tester.containSuffix("platform", "plat", "form"));
assertEquals("containSuffix sole adj", true,
tester.containSuffix("scalelike", "scale", "like"));
// case 3.1.2 and case 3.3.3 not tested
assertEquals("containSuffix 111", false,
tester.containSuffix("anterolaterally", "anterolateral", "ly")); // 111
assertEquals("containSuffix 121", false,
tester.containSuffix("mesially", "mesial", "ly")); // 121
assertEquals("containSuffix 122", false,
tester.containSuffix("per", "p", "er")); // 122
assertEquals("containSuffix 212", false,
tester.containSuffix("border", "bord", "er")); // 212
assertEquals("containSuffix 212", false,
tester.containSuffix("bigger", "bigg", "er")); // 212
assertEquals("containSuffix 221", true,
tester.containSuffix("anteriorly", "anterior", "ly")); // 221
assertEquals("containSuffix 222", false,
tester.containSuffix("corner", "corn", "er")); // 222
assertEquals("containSuffix 222", true,
tester.containSuffix("lower", "low", "er")); // 222
assertEquals("containSuffix 223", true,
tester.containSuffix("bifid", "bi", "fid")); // 223
}
@Test
public void testMarkupByPattern() {
Learner myTester = learnerFactory();
myTester.getDataHolder().add2Holder(DataHolder.SENTENCE,
Arrays.asList(new String[] {"source1", "sentence1", "x=word word word", "lead1", "status1", "tag1", "modifier1", "type1"}));
myTester.markupByPattern();
List<SentenceStructure> targetSentenceHolder = new LinkedList<SentenceStructure>();
targetSentenceHolder.add(new SentenceStructure(0, "source1", "sentence1", "x=word word word", "lead1", "status1", "chromosome", "", "type1"));
assertEquals("markupByPattern", targetSentenceHolder, myTester.getDataHolder().getSentenceHolder());
}
@Test
public void testMarkupByPatternHelper(){
// case 1
SentenceStructure mySentence1 = new SentenceStructure(0, "source1", "sentence1", "x=word word word", "lead1", "status1", "tag1", "modifier1", "type1");
SentenceStructure target1 = new SentenceStructure(0, "source1", "sentence1", "x=word word word", "lead1", "status1", "chromosome", "", "type1");
tester.markupByPatternHelper(mySentence1);
assertEquals("markupByPatternHelper - case 1", target1,mySentence1);
// case 2
SentenceStructure mySentence2 = new SentenceStructure(1, "source2", "sentence2", "2n=abc...", "lead2", "status2", "tag2", "modifier2", null);
SentenceStructure target2 = new SentenceStructure(1, "source2", "sentence2", "2n=abc...", "lead2", "status2", "chromosome", "", null);
tester.markupByPatternHelper(mySentence2);
assertEquals("markupByPatternHelper - case 2", target2,mySentence2);
// case 3
SentenceStructure mySentence3 = new SentenceStructure(2, "source", "sentence", "x word word", "lead", "status", "tag", "modifier", null);
SentenceStructure target3 = new SentenceStructure(2, "source", "sentence", "x word word", "lead", "status", "chromosome", "", null);
tester.markupByPatternHelper(mySentence3);
assertEquals("markupByPatternHelper - case 3", target3, mySentence3);
// case 4
SentenceStructure mySentence4 = new SentenceStructure(3, "source", "sentence", "2n word word", "lead",null, "tag", "modifier", null);
SentenceStructure target4 = new SentenceStructure(3, "source", "sentence", "2n word word", "lead", null, "chromosome", "", null);
tester.markupByPatternHelper(mySentence4);
assertEquals("markupByPatternHelper - case 4", target4, mySentence4);
// case 5
SentenceStructure mySentence5 = new SentenceStructure(4, "source", "sentence", "2 nword word", "lead", "status", "tag", "modifier", "");
SentenceStructure target5 = new SentenceStructure(4, "source", "sentence", "2 nword word", "lead", "status", "chromosome", "", "");
tester.markupByPatternHelper(mySentence5);
assertEquals("markupByPatternHelper - case 5", target5, mySentence5);
// case 6
SentenceStructure mySentence6 = new SentenceStructure(5, "source", "sentence", "fl. word word", "lead", "status", null, null, "");
SentenceStructure target6 = new SentenceStructure(5, "source", "sentence", "fl. word word", "lead", "status", "flowerTime", "", "");
tester.markupByPatternHelper(mySentence6);
assertEquals("markupByPatternHelper - case 6", target6, mySentence6);
// case 7
SentenceStructure mySentence7 = new SentenceStructure(6, "source", "sentence", "fr.word word", "lead", "status", null, "", "");
SentenceStructure target7 = new SentenceStructure(6, "source", "sentence", "fr.word word", "lead", "status", "fruitTime", "", "");
tester.markupByPatternHelper(mySentence7);
assertEquals("markupByPatternHelper - case 7", target7, mySentence7);
}
@Test
public void testMarkupIgnore() {
Learner myTester = learnerFactory();
myTester.getDataHolder().add2Holder(DataHolder.SENTENCE,
Arrays.asList(new String[] {"source1", "sentence1", "IGNOREPTN", "lead1", "status1", "tag1", "modifier1", "type1"}));
myTester.markupIgnore();
List<SentenceStructure> targetSentenceHolder = new LinkedList<SentenceStructure>();
targetSentenceHolder.add(new SentenceStructure(0, "source1", "sentence1", "IGNOREPTN", "lead1", "status1", "ignore", "", "type1"));
assertEquals("markupIgnore", targetSentenceHolder, myTester.getDataHolder().getSentenceHolder());
}
@Test
public void testMarkupIgnoreHelper() {
SentenceStructure mySentence1 = new SentenceStructure(0, "source", "sentence", "IGNOREPTN", "lead", "status", null, "", "");
SentenceStructure target1 = new SentenceStructure(0, "source", "sentence", "IGNOREPTN", "lead", "status", "ignore", "", "");
tester.markupIgnoreHelper(mySentence1);
assertEquals("markupIgnoreHelper", target1, mySentence1);
SentenceStructure mySentence2 = new SentenceStructure(1, "source", "sentence", " IGNOREPTN", "lead", "status", null, "", "");
SentenceStructure target2 = new SentenceStructure(1, "source", "sentence", " IGNOREPTN", "lead", "status", "ignore", "", "");
tester.markupIgnoreHelper(mySentence2);
assertEquals("markupIgnoreHelper", target2, mySentence2);
}
// @Test
// public void testDiscover() {
// fail("Not yet implemented");
// @Test
// public void testRuleBasedLearn() {
// fail("Not yet implemented");
// @Test
// public void testDoIt() {
// fail("Not yet implemented");
// @Test
// public void testGetPOSptn() {
// fail("Not yet implemented");
// @Test
// public void testCheckPOSInfo() {
// fail("Not yet implemented");
// @Test
// public void testTagIt() {
// fail("Not yet implemented");
// @Test
// public void testMatchPattern() {
// fail("Not yet implemented");
@Test
public void testBuildPattern() {
Learner myTester = learnerFactory();
// Method buildPattern
// assertEquals(
// "buildPattern",
// "(?:^\\b(?:one|two|three)\\b|^\\w+\\s\\b(?:one|two|three)\\b|^\\w+\\s\\w+\\s\\b(?:one|two|three)\\b)",
// tester.buildPattern("one two three".split(" ")));
HashSet<String> wordSet= new HashSet<String>();
wordSet.add("teeth");
wordSet.add("unicuspid");
wordSet.add("with");
myTester.setCheckedWordSet(wordSet);
assertEquals("buildPattern", null,
myTester.buildPattern("teeth ; 9".split(" ")));
assertEquals("buildPattern",
"(?:^\\b(?:variously|arranged)\\b|^\\w+\\s\\b(?:variously|arranged)\\b|^\\w+\\s\\w+\\s\\b(?:variously|arranged)\\b).*$",
myTester.buildPattern("teeth variously arranged".split(" ")));
wordSet.add("circuli");
wordSet.add("present");
wordSet.add("on");
wordSet.add("hyohyoidei");
wordSet.add("muscle");
assertEquals("buildPattern",
"(?:^\\b(?:does|not|cross)\\b|^\\w+\\s\\b(?:does|not|cross)\\b|^\\w+\\s\\w+\\s\\b(?:does|not|cross)\\b).*$",
myTester.buildPattern("does not cross".split(" ")));
wordSet.addAll(Arrays.asList("lepidotrichia:of:passes:between:bases".split(":")));
assertEquals("buildPattern",
"(?:^\\b(?:ankylosed|to)\\b|^\\w+\\s\\b(?:ankylosed|to)\\b|^\\w+\\s\\w+\\s\\b(?:ankylosed|to)\\b).*$",
myTester.buildPattern("teeth ankylosed to".split(" ")));
}
@Test
public void testGetPOSptn(){
Learner myTester = learnerFactory();
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"teeth", "p", "role", "1", "1", "", ""}));
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"unicuspid", "p", "role", "1", "3", "", ""}));
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"with", "b", "role", "1", "1", "", ""}));
assertEquals("getPOSptn", "p?b", myTester.getPOSptn(Arrays.asList("teeth unicuspid with".split(" "))));
}
@Test
public void testDoItCaseHandle(){
// case x: boundary case
Learner myTesterBoundary = learnerFactory();
assertEquals("CaseHandle - boundary case", null, myTesterBoundary.doItCaseHandle(null, null));
assertEquals("CaseHandle - boundary case", new StringAndInt("",0), myTesterBoundary.doItCaseHandle("", ""));
// case 1
Learner myTester1 = learnerFactory();
myTester1.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"submandibular", "s", "", "0", "0", null, null}));
myTester1.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"submandibulars", "p", "", "0", "0", null, null}));
assertEquals("CaseHandle - case 1", new StringAndInt("submandibulars",0), myTester1.doItCaseHandle("submandibulars", "submandibulars"));
// case 2
Learner myTester2 = learnerFactory();
myTester2.getDataHolder().add2Holder(DataHolder.SENTENCE,
Arrays.asList(new String[] {"src",
"<N>stems</N> <B>usually</B> erect , sometimes prostrate to ascending <B>(</B> underground <N>stems</N> sometimes woody <O>caudices</O> or rhizomes , sometimes fleshy <B>)</B> . ",
"Stems usually erect, sometimes prostrate to ascending (underground stems sometimes woody caudices or rhizomes, sometimes fleshy ).",
"lead","status",null,"m","type"}));
myTester2.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"stems", "p", "", "0", "0", null, null}));
myTester2.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"stem", "s", "", "0", "0", null, null}));
myTester2.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"usually", "s", "", "0", "0", null, null}));
String sentence =
"stems usually erect , sometimes prostrate to ascending ( underground stems sometimes woody caudices or rhizomes , sometimes fleshy ) .";
String lead = "stems usually erect";
assertEquals("CaseHandle - case 2", new StringAndInt("stems",1), myTester2.doItCaseHandle(sentence, lead));
assertEquals("CaseHandle - case 2, updatePOS - case 2.1, resolveConfict, changePOS - case 2", true, myTester2.getDataHolder().getWordPOSHolder().containsKey(new WordPOSKey("usually", "b")));
assertEquals("CaseHandle - case 2, discountPOS - all", false, myTester2.getDataHolder().getWordPOSHolder().containsKey(new WordPOSKey("usually", "s")));
// case 3.2
// This also tests method markKnown() - case 1.1
Learner myTester32 = learnerFactory();
myTester32.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"teeth", "p", "role", "1", "1", "", ""}));
myTester32.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"with", "b", "role", "1", "1", "", ""}));
myTester32.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList(new String[] {"bicuspid", "unknown"}));
myTester32.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList(new String[] {"multicuspid", "unknown"}));
myTester32.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList(new String[] {"tricuspid", "unknown"}));
myTester32.getDataHolder().add2Holder(DataHolder.SINGULAR_PLURAL, Arrays.asList(new String[] {"tooth", "teeth"}));
assertEquals("CaseHandle - case 3.2", new StringAndInt("teeth",4),
myTester32.doItCaseHandle("teeth unicuspid with crowns posteriorly curved along the main axis of the mandible , organized into a long series of equally_ sized teeth",
"teeth unicuspid with"));
// case 4
// case 4.2
Learner myTester42 = learnerFactory();
// test case 1
myTester42.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"teeth", "p", "role", "1", "1", "", ""}));
myTester42.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"variously", "b", "role", "0", "0", "", ""}));
myTester42.getDataHolder().add2Holder(DataHolder.SINGULAR_PLURAL, Arrays.asList(new String[] {"tooth", "teeth"}));
myTester42.getDataHolder().add2Holder(DataHolder.SINGULAR_PLURAL, Arrays.asList(new String[] {"base", "bases"}));
assertEquals("CaseHandle - case 4.2", new StringAndInt("teeth",0),
myTester42.doItCaseHandle("teeth variously arranged , but never very numerous , equally_ sized and regularly curved posteriorly along main axis of mandible",
"teeth variously arranged"));
//case 4.2 - test case 2
myTester42.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"muscle", "s", "role", "0", "0", "", ""}));
myTester42.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"with", "b", "role", "0", "0", "", ""}));
assertEquals("CaseHandle - case 4.2", new StringAndInt("hyohyoidei muscle",1),
myTester42.doItCaseHandle("hyohyoidei muscle with a broad origin across the entire ventral surface and lateral margins of the ventrolateral wings of the urohyal",
"hyohyoidei muscle with"));
//case 4.2 - test case 2
myTester42.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"bases", "p", "role", "0", "0", "", ""}));
myTester42.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"of", "b", "role", "2", "2", "", ""}));
assertEquals("CaseHandle - case 4.2", new StringAndInt("bases",0),
myTester42.doItCaseHandle("bases of tooth whorls", "bases of"));
// case 5.1.3 and case x
Learner myTester513x = learnerFactory();
myTester513x.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"styles", "p", "role", "1", "1", "", ""}));
myTester513x.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"style", "s", "role", "1", "1", "", ""}));
myTester513x.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"branches", "p", "role", "23", "23", "", ""}));
myTester513x.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"branch", "s", "role", "23", "23", "", ""}));
StringAndInt result513x = myTester513x.doItCaseHandle("styles branches :", "styles branches");
StringAndInt target513x = new StringAndInt("branches",1);
assertEquals("CaseHandle - case 5.1.3 and case x", result513x, target513x);
Learner myTester52 = learnerFactory();
myTester52.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"basal", "b", "role", "30", "30", "", ""}));
myTester52.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"leaf", "s", "role", "0", "0", "", ""}));
myTester52.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"blades", "p", "role", "63", "63", "", ""}));
myTester52.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"linear_lanceolate", "b", "role", "2", "2", "", ""}));
myTester52.getDataHolder().add2Holder(DataHolder.MODIFIER,
Arrays.asList(new String[] {"basal", "1", "false"}));
StringAndInt result52 = myTester52.doItCaseHandle(
"basal leaf blades linear_lanceolate , 3 ?10 cm , margins entire or with remote linear lobes , apices acute ;",
"basal leaf blades");
StringAndInt target52 = new StringAndInt("basal leaf blades", 0);
assertEquals("CaseHandle - case 5.2", result52, target52);
// case 6.2
Learner myTester62 = learnerFactory();
myTester62.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"cauline", "b", "role", "1", "1", "", ""}));
myTester62.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"much", "s", "role", "1", "1", "", ""}));
myTester62.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"reduced", "b", "role", "11", "11", "", ""}));
myTester62.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"distally", "b", "role", "2", "2", "", ""}));
StringAndInt returnedValue62 = myTester62.doItCaseHandle(
"principal cauline much reduced distally , sessile , bases decurrent or not , as spiny wings ;",
"principal cauline much");
assertEquals("CaseHandle - case 6.2", "principal cauline much", returnedValue62.getString());
// assertEquals(myTester7.doItCase7Helper("^s(\\?)$", "s?");
// // case 7
// Learner myTester7 = new Learner(myConfiguration, myUtility);
// assertEquals(myTester7.doItCase7Helper("^s(\\?)$", "s?");
// case 9
Learner myTester9 = learnerFactory();
myTester9.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"basal", "b", "role", "24", "24", "", ""}));
myTester9.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"leaves", "p", "role", "112", "112", "", ""}));
myTester9.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"leaf", "s", "role", "112", "112", "", ""}));
assertEquals("CaseHandle - case 9", new StringAndInt("basal leaves",0),
myTester9.doItCaseHandle("basal leaves :", "basal leaves"));
// case 10
// case 10.1.1
Learner myTester10_1_1 = learnerFactory();
myTester10_1_1.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"of", "b", "role", "0", "0", "", ""}));
assertEquals("CaseHandle - case 10.1.1", new StringAndInt("teeth",2),
myTester10_1_1.doItCaseHandle("teeth of dentary",
"teeth of"));
myTester10_1_1.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"on", "b", "role", "4", "4", "", ""}));
assertEquals("CaseHandle - case 10.1.1", new StringAndInt("foramina",2),
myTester10_1_1.doItCaseHandle("foramina on external surface of lower jaw",
"foramina on"));
// case 10.1.2
Learner myTester10_1_2 = learnerFactory();
myTester10_1_2.addStopWords();
assertEquals("CaseHandle - case 10.1.1", new StringAndInt("stems",2),
myTester10_1_2.doItCaseHandle("stems 1 ?several , erect or ascending , densely gray_tomentose ",
"stems NUM several"));
// case 10.2
Learner myTester10_2 = learnerFactory();
myTester10_2.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"between", "b", "role", "0", "0", "", ""}));
myTester10_2.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"scales", "p", "role", "0", "0", "", ""}));
myTester10_2.getDataHolder().add2Holder(DataHolder.SINGULAR_PLURAL, Arrays.asList(new String[] {"scale", "scales"}));
assertEquals("CaseHandle - case 10.2", new StringAndInt("",0),
myTester10_2.doItCaseHandle("passes between scales",
"passes between"));
// case 0
Learner myTester0 = learnerFactory();
myTester0.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"does", "b", "role", "0", "0", "", ""}));
myTester0.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"not", "b", "role", "0", "0", "", ""}));
assertEquals("CaseHandle - case 0", new StringAndInt("",0),
myTester0.doItCaseHandle("does not cross over the anterodorsal corner of opercular bone",
"does not cross"));
}
@Test
public void testIsFollowedByNoun() {
Learner myTester = learnerFactory();
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"rhombic", "b", "role", "0", "0", null, null}));
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"bones", "p", "role", "0", "0", null, null}));
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"radial", "s", "role", "0", "0", null, null}));
assertEquals("isFollowedByNoun - null case", false, myTester.isFollowedByNoun(null, null));
assertEquals("isFollowedByNoun - empty case", false, myTester.isFollowedByNoun("", ""));
assertEquals("isFollowedByNoun", true, myTester.isFollowedByNoun("foramina on dermal cheek bones", "foramina on"));
assertEquals("isFollowedByNoun", true, myTester.isFollowedByNoun("foramina on bones", "foramina on"));
assertEquals("isFollowedByNoun", false, myTester.isFollowedByNoun("teeth of dentary", "teeth of"));
}
@Test
public void testGetNounsAfterPtn() {
Learner myTester = learnerFactory();
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"margins", "p", "role", "0", "0", null, null}));
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"often", "b", "role", "0", "0", null, null}));
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"??", "b", "role", "0", "0", null, null}));
myTester.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"deeply", "b", "role", "0", "0", null, null}));
List<String> nouns = new ArrayList<String>();
nouns.add("margins");
List<String> nounPtn = new ArrayList<String>();
nounPtn.add("p");
String bWord = "often";
GetNounsAfterPtnReturnValue target = new GetNounsAfterPtnReturnValue(nouns, nounPtn, bWord);
assertEquals("getNounsAfterPtn", target, myTester.getNounsAfterPtn("proximal blade margins often ?? deeply lobed , ( spiny in c . benedicta ) , distal ?smaller , often entire , faces glabrous or ?tomentose , sometimes also villous , strigose , or puberulent , often glandular_punctate .", 2));
}
@Test
public void testTagSentence() {
Learner myTester = learnerFactory();
myTester.getConfiguration().setMaxTagLength(10);
myTester.getDataHolder().add2Holder(DataHolder.SENTENCE, Arrays.asList(new String[] {"src", "sent", "osent","lead","status","tag","m","type"}));
assertEquals("tagIt - case 1", false, myTester.tagSentence(0, ""));
assertEquals("tagIt - case 2", false, myTester.tagSentence(0, "page"));
assertEquals("tagIt - case 3", true, myTester.tagSentence(0, "teeth"));
assertEquals("tagIt - max tag length", "teeth", myTester.getDataHolder().getSentenceHolder().get(0).getTag());
assertEquals("tagIt - case 3", true, myTester.tagSentence(0, "abcdefghijkl"));
//myTester.tagSentence(0, "abcdefghijkl");
assertEquals("tagIt - max tag length", "abcdefghij", myTester.getDataHolder().getSentenceHolder().get(0).getTag());
}
@Test
public void testDoItMarkup() {
Learner myTester = learnerFactory();
myTester.getDataHolder().add2Holder(DataHolder.SENTENCE, Arrays.asList(new String[] {
"src", "sent nor", "osent","lead","status",null,"m","type"}));
myTester.getDataHolder().add2Holder(DataHolder.SENTENCE, Arrays.asList(new String[] {
"src", "sent and", "osent","lead","status","","m","type"}));
myTester.getDataHolder().add2Holder(DataHolder.SENTENCE, Arrays.asList(new String[] {
"src", "sent", "osent","lead","status","unknown","m","type"}));
// assertEquals("doItMarkup - case 1", 0, myTester.doItMarkup());
assertEquals("doItMarkup - Helper - true", true, myTester.doItMarkupHelper(null));
assertEquals("doItMarkup - Helper - true", true, myTester.doItMarkupHelper(""));
assertEquals("doItMarkup - Helper - true", true, myTester.doItMarkupHelper("unknown"));
assertEquals("doItMarkup - Helper - false", false, myTester.doItMarkupHelper("abc"));
assertEquals("doItMarkup - case 1 - true", true, myTester.doItMarkupCase1Helper("postcleithra 2 and 3 fused into a single ossification"));
assertEquals("doItMarkup - case 1 - false", false, myTester.doItMarkupCase1Helper("postcleithra 2 3 fused into a single ossification"));
assertEquals("doItMarkup - case 2 - true", true, myTester.doItMarkupCase2Helper("ossified as autogenous units"));
assertEquals("doItMarkup - case 2 - false", false, myTester.doItMarkupCase2Helper("ossified autogenous units"));
}
@Test
public void testHasHead(){
assertEquals("hasHead - null", false,
tester.hasHead( null,
Arrays.asList("passing through most".split(" "))));
assertEquals("hasHead - not has", false,
tester.hasHead( Arrays.asList("passing through".split(" ")),
Arrays.asList("passing throug most".split(" "))));
assertEquals("hasHead - empty head", true,
tester.hasHead( new ArrayList<String>(),
Arrays.asList("passing through most".split(" "))));
assertEquals("hasHead - has", true,
tester.hasHead( Arrays.asList("passing through".split(" ")),
Arrays.asList("passing through most".split(" "))));
assertEquals("hasHead - head same as list", true,
tester.hasHead( Arrays.asList("passing through most".split(" ")),
Arrays.asList("passing through most".split(" "))));
}
@Test
public void testWrapupMarkup() {
// // case 1
// Learner myTester1 = learnerFactory();
// myTester1.getDataHolder().getSentenceHolder().add(new SentenceStructure(7, "src", "sent", "osent","sensory line not null","status","notnull","modifer","type"));
// myTester1.getDataHolder().getSentenceHolder().add(new SentenceStructure(192, "src", "sent", "osent","sensory line ignore","status","ignore","modifer","type"));
// myTester1.getDataHolder().getSentenceHolder().add(new SentenceStructure(193, "src", "sent", "osent","sensory line canal","status",null,"modifer","type"));
// myTester1.getDataHolder().getSentenceHolder().add(new SentenceStructure(267, "src", "sent", "osent","sensory line canals","status",null,"modifer","type"));
// myTester1.getDataHolder().getSentenceHolder().add(new SentenceStructure(269, "src", "sent", "osent","opening via tubular","status",null,"modifer","type"));
// myTester1.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"line", "s", "*", "1", "1", "", null}));
// myTester1.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"canals", "p", "*", "1", "1", "", null}));
// myTester1.wrapupMarkup();
// assertEquals("wrapupmarkup - case 1 - tag sentence", "sensory line canal", myTester1.getDataHolder().getSentence(193).getTag());
// assertEquals("wrapupmarkup - case 1 - tag sentence", "sensory line", myTester1.getDataHolder().getSentence(267).getTag());
// // case 2
// Learner myTester2 = learnerFactory();
// myTester2.getDataHolder().getSentenceHolder().add(new SentenceStructure(115, "src", "sent", "osent","midsagittal fontanel absent","status",null,"modifer","type"));
// myTester2.getDataHolder().getSentenceHolder().add(new SentenceStructure(116, "src", "sent", "osent","midsagittal fontanel present","status",null,"modifer","type"));
// myTester2.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"fontanel", "s", "*", "1", "1", "", null}));
// myTester2.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"absent", "b", "*", "1", "1", "", null}));
// myTester2.getDataHolder().add2Holder(DataHolder.WORDPOS, Arrays.asList(new String[] {"present", "b", "*", "1", "1", "", null}));
// myTester2.wrapupMarkup();
// assertEquals("wrapupmarkup - case 2 - tag sentence", "midsagittal fontanel", myTester2.getDataHolder().getSentence(115).getTag());
// assertEquals("wrapupmarkup - case 2 - tag sentence", "midsagittal fontanel", myTester2.getDataHolder().getSentence(116).getTag());
}
@Test
public void testOneLeadMarkup(){
Learner myTester = learnerFactory();
myTester.getDataHolder().getSentenceHolder().add(new SentenceStructure(0, "src", "sent", "osent","lead1 lead2","status","tag tag","modifer","type"));
myTester.getDataHolder().getSentenceHolder().add(new SentenceStructure(1, "src", "sent", "osent","midsagittal fontanel present","status",null,"modifer","type"));
myTester.getDataHolder().getSentenceHolder().add(new SentenceStructure(2, "src", "sent", "osent","midsagittal fontanel present","status","tag1","modifer","type"));
myTester.getDataHolder().getSentenceHolder().add(new SentenceStructure(3, "src", "sent", "osent","tagx","status",null,"modifer","type"));
myTester.getDataHolder().getSentenceHolder().add(new SentenceStructure(4, "src", "sent", "osent","tagx tagx","status",null,"modifer","type"));
myTester.getDataHolder().getSentenceHolder().add(new SentenceStructure(5, "src", "sent", "osent","midsagittal fontanel present","status","tagx","modifer","type"));
myTester.getDataHolder().getSentenceHolder().add(new SentenceStructure(6, "src", "sent", "osent","midsagittal fontanel","status","tag2","modifer","type"));
myTester.oneLeadWordMarkup(myTester.getDataHolder().getCurrentTags());
assertEquals("oneLeadMarkup", "tagx", myTester.getDataHolder().getSentence(3).getTag());
}
@Test
public void testUnknownWordBootstrapping(){
// // 1. Preprocessing
// Learner myTester1 = learnerFactory();
// myTester1.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList("word1 unknown".split(" ")));
// Set<String> expected = new HashSet<String>();
//// expected.add("")
// assertEquals("unknownWordBootstrappingGetUnknownWord", expected , myTester1.unknownWordBootstrappingGetUnknownWord("(ee)"));
// 3. Postprocessing
Learner myTester3 = learnerFactory();
myTester3.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"word1", "p", "role", "0", "0", "", ""}));
myTester3.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"word2", "b", "role", "0", "0", "", ""}));
myTester3.getDataHolder().add2Holder(DataHolder.WORDPOS,
Arrays.asList(new String[] {"word3", "s", "role", "0", "0", "", ""}));
myTester3.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList("word1 word1".split(" ")));
myTester3.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList("word2 unknown".split(" ")));
myTester3.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList("_wORd3 unknown".split(" ")));
myTester3.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList("word?_4 unknown".split(" ")));
myTester3.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList("nor unknown".split(" ")));
myTester3.getDataHolder().add2Holder(DataHolder.UNKNOWNWORD, Arrays.asList("word_6 unknown".split(" ")));
myTester3.getDataHolder().getSentenceHolder().add(new SentenceStructure(0, "src", "word1 word_6 word2", "osent","lead","status","tag","modifer","type"));
myTester3.getDataHolder().getSentenceHolder().add(new SentenceStructure(1, "src", "word_6 word2", "osent","lead","status","tag","modifer","type"));
myTester3.getDataHolder().getSentenceHolder().add(new SentenceStructure(2, "src", "word1 word6 word2", "osent","lead","status","tag","modifer","type"));
myTester3.unknownWordBootstrappingPostprocessing();
assertEquals("unknownWordBootstrapping - Postprocessing", "word1 <B>word_6</B> word2", myTester3.getDataHolder().getSentence(0).getSentence());
assertEquals("unknownWordBootstrapping - Postprocessing", "<B>word_6</B> word2", myTester3.getDataHolder().getSentence(1).getSentence());
assertEquals("unknownWordBootstrapping - Postprocessing", "word1 word6 word2", myTester3.getDataHolder().getSentence(2).getSentence());
myTester3.unknownWordBootstrappingPostprocessing();
}
@Test
public void testDittoHelper() {
String nPhrasePattern = "(?:<[A-Z]*[NO]+[A-Z]*>[^<]+?<\\/[A-Z]*[NO]+[A-Z]*>\\s*)+";
String mPhrasePattern = "(?:<[A-Z]*M[A-Z]*>[^<]+?<\\/[A-Z]*M[A-Z]*>\\s*)+";
Learner myTester = learnerFactory();
assertEquals("ditto helper", 0, myTester.dittoHelper(myTester.getDataHolder(), 0, "prismatic calcified <N>cartilage</N>", nPhrasePattern, mPhrasePattern));
assertEquals("ditto helper", 1, myTester.dittoHelper(
myTester.getDataHolder(), 0, "<B>absent</B>", nPhrasePattern,
mPhrasePattern));
assertEquals("ditto helper", 21,
myTester.dittoHelper(myTester.getDataHolder(), 0,
"<B>in</B> tubes below visceral surface <B>of</B> <M>dermal</M> <N>bone</N>",
nPhrasePattern, mPhrasePattern));
}
@Test
public void testPhraseClauseHelper() {
Learner myTester = learnerFactory();
String sentence = "mid and distal <B>progressively</B> smaller , <B>becoming</B> <B>sessile</B> , <B>narrower</B> , <N>bases</N> obtuse to acuminate , <M><B>cauline</B></M> <B>usually</B> 15 or fewer <B>.</B>";
assertEquals("phraseChauseHelper - empty return", new ArrayList<String>(), myTester.phraseClauseHelper(sentence));
sentence = "<M><B>cauline</B></M> <B>linear</B> or <B>oblong</B> , <B>crowded</B> or well separated , <B>usually</B> <B>not</B> surpassing <N>heads</N> <B>.</B>";
List<String> target = new ArrayList<String>(2);
target.add("");
target.add("heads");
assertEquals("phraseChauseHelper", target, myTester.phraseClauseHelper(sentence));
sentence = "distal <M><B>cauline</B></M> <B>sessile</B> , ?<N>decurrent</N> <B>.</B>";
target.clear();
target.add("");
target.add("decurrent");
assertEquals("phraseChauseHelper", target, myTester.phraseClauseHelper(sentence));
}
@Test
public void testPronounCharacterSubjectHelper() {
Learner myTester = learnerFactory();
List<String> target = new ArrayList<String>(2);
String lead;
String sentence;
String modifier;
String tag;
// null
lead = "prismatic calcified cartilage";
sentence = "prismatic calcified <N>cartilage</N>";
modifier = null;
tag = null;
assertEquals("pronounCharacterSubjectHelper null", null, myTester.pronounCharacterSubjectHelper(lead, sentence, modifier, tag));
// case 1.1.1
lead = "size of";
sentence = "<B>size</B> <B>of</B> <N>lateral</N> <B>gular</B>";
modifier = "";
tag = "ditto";
target.clear();
target.add("");
target.add("lateral");
assertEquals("pronounCharacterSubjectHelper case 1.1.1", target, myTester.pronounCharacterSubjectHelper(lead, sentence, modifier, tag));
// case 1.2.1.1
lead = "body scale profile";
sentence = "<M>body</M> <N>scale</N> <B>profile</B>";
modifier = "body";
tag = "scale";
target.clear();
target.add("body ");
target.add("scale");
assertEquals("pronounCharacterSubjectHelper case 1.2.1.1", target, myTester.pronounCharacterSubjectHelper(lead, sentence, modifier, tag));
// case 1.2.1.1
lead = "lyre_ shaped";
sentence = "<N>lyre_</N> <B>shaped</B>";
modifier = "";
tag = "lyre_";
target.clear();
target.add("");
target.add("ditto");
assertEquals("pronounCharacterSubjectHelper case 1.2.1.2", target, myTester.pronounCharacterSubjectHelper(lead, sentence, modifier, tag));
// case 1.2.2
lead = "shape of";
sentence = "<B>shape</B> <B>of</B> opercular <N>ossification</N>";
modifier = "";
tag = "ditto";
target.clear();
target.add("");
target.add("ditto");
assertEquals("pronounCharacterSubjectHelper case 1.2.2", target, myTester.pronounCharacterSubjectHelper(lead, sentence, modifier, tag));
}
@Test
public void testPronounCharacterSubjectHelper4() {
Learner myTester = learnerFactory();
List<String> target = new ArrayList<String>(2);
String lead;
String sentence;
String modifier;
String tag;
// null
lead = "prismatic calcified cartilage";
sentence = "prismatic calcified <N>cartilage</N>";
modifier = null;
tag = null;
assertEquals("pronounCharacterSubjectHelper null", null, myTester.pronounCharacterSubjectHelper4(lead, sentence, modifier, tag));
// lead = "skull shape";
// sentence = "<N>skull</N> <B>shape</B>";
// modifier = "";
// tag = "skull";
// target.clear();
// target.add("");
// target.add("skull");
// assertEquals("pronounCharacterSubjectHelper4", target, myTester.pronounCharacterSubjectHelper(lead, sentence, modifier, tag));
}
@Test
public void testAndOrTagCase1Helper() {
Learner myTester = learnerFactory();
String sPattern = Constant.SEGANDORPTN;
String wPattern = Constant.ANDORPTN;
Set<String> token = new HashSet<String>();
token.addAll(Arrays.asList("and or nor".split(" ")));
token.add("\\");
token.add("and / or");
// test case 1
String pattern = "qqn&p";
List<String> words = new ArrayList<String>();
words.addAll(Arrays.asList("smaller undifferentiated <N>plates</N> or tesserae".split(" ")));
List<List<String>> target = new ArrayList<List<String>>();
List<String> mPatterns = new ArrayList<String>();
mPatterns.add("qq");
List<String> mSegments = new ArrayList<String>();
mSegments.add("smaller undifferentiated");
List<String> sPatterns = new ArrayList<String>();
sPatterns.addAll(Arrays.asList("n p".split(" ")));
List<String> sSegments = new ArrayList<String>();
sSegments.addAll(Arrays.asList("<N>plates</N> tesserae".split(" ")));
List<String> tagAndModifier1 = new ArrayList<String>();
tagAndModifier1.add("");
tagAndModifier1.add("smaller undifferentiated plates or tesserae");
List<String> tagAndModifier2 = new ArrayList<String>();
List<String> update1 = new ArrayList<String>();
List<String> update2 = new ArrayList<String>();
update2.add("tesserae");
target.add(mPatterns);
target.add(mSegments);
target.add(sPatterns);
target.add(sSegments);
target.add(tagAndModifier1);
target.add(tagAndModifier2);
target.add(update1);
target.add(update2);
assertEquals("andOrTagCase1Helper", target, myTester.andOrTagCase1Helper(pattern, wPattern, words, token));
// List<List<String>> returned = myTester.andOrTagCase1Helper(pattern, wPattern, words, token);
// System.out.println(returned);
// test case 2
pattern = "n&qqnbq";
words.clear();
words.addAll(Arrays.asList("<N>perforate</N> or fenestrate anterodorsal <N>portion</N> <B>of</B> palatoquadrate".split(" ")));
mPatterns.clear();
mSegments.clear();
sPatterns.clear();
sSegments.clear();
mPatterns.add("qq");
mSegments.add("fenestrate anterodorsal");
sPatterns.addAll(Arrays.asList("n n".split(" ")));
sSegments.addAll(Arrays.asList("<N>perforate</N> <N>portion</N>".split(" ")));
tagAndModifier1.clear();
tagAndModifier1.add("");
tagAndModifier1.add("perforate or fenestrate anterodorsal portion");
tagAndModifier2.clear();
update1.clear();
update2.clear();
assertEquals("andOrTagCase1Helper", target, myTester.andOrTagCase1Helper(pattern, wPattern, words, token));
}
@Test
public void testFinalizeCompoundModifier() {
Learner myTester = learnerFactory();
// case 1
String modifier = "maxillary and [dentary] tooth_ bearing";
String tag = "elements";
String sentence = "maxillary and dentary <B>tooth_</B> bearing <N>elements</N>";
assertEquals("finalizeCompoundModifier case 1", modifier,
myTester.finalizeCompoundModifier(myTester.getDataHolder(), modifier, tag, sentence));
}
@Test
public void testGetMCount(){
Learner myTester = learnerFactory();
DataHolder myDataHolder = myTester.getDataHolder();
myDataHolder.add2Holder(DataHolder.SENTENCE,
Arrays.asList(new String[] {"source1", "<B>number</B> <B>of</B> <M><B>marginal</B></M> <N>bones</N> <B>alongside</B> postparietal", "o1", "lead1", "status1", "tag1", "modifier1", "type1"}));
myDataHolder.add2Holder(DataHolder.SENTENCE,
Arrays.asList(new String[] {"source1", "through <M><B>marginal</B></M> <N>bones</N> <B>alongside</B> postparietal", "o1", "lead1", "status1", "tag1", "modifier1", "type1"}));
myDataHolder.add2Holder(DataHolder.SENTENCE,
Arrays.asList(new String[] {"source1", "<M><B>marginal</B></M> <N>teeth</N> <B>on</B> dentary", "o1", "lead1", "status1", "tag1", "modifier1", "type1"}));
myDataHolder.add2Holder(DataHolder.SENTENCE,
Arrays.asList(new String[] {"source1", "<B>broad</B> <M><B>marginal</B></M> <N>tooth</N> <B>field</B>", "o1", "lead1", "status1", "tag1", "modifier1", "type1"}));
myDataHolder.add2Holder(DataHolder.SENTENCE,
Arrays.asList(new String[] {"source1", "<B>narrow</B> <M><B>marginal</B></M> <N>tooth</N> <N>row</N>", "o1", "lead1", "status1", "tag1", "modifier1", "type1"}));
myDataHolder.add2Holder(DataHolder.SENTENCE,
Arrays.asList(new String[] {"source1", "anterodorsal <B>peg_</B> like <N>process</N> <B>on</B> <N>scale</N>", "o1", "lead1", "status1", "tag1", "modifier1", "type1"}));
assertEquals("getMCount", 5, myTester.getMCount(myDataHolder, "marginal"));
}
@Test
public void testGetCommonStructures() {
Learner myTester = learnerFactory();
DataHolder myDataHolder = myTester.getDataHolder();
// "structure2" and "structure3" are common structures, "structure1" is
// not
myTester.getDataHolder().add2Holder(
DataHolder.WORDPOS,
Arrays.asList(new String[] { "structure1", "b", "role", "1",
"1", "", "" }));
myTester.getDataHolder().add2Holder(
DataHolder.WORDPOS,
Arrays.asList(new String[] { "structure2", "p", "role", "1",
"1", "", "" }));
myTester.getDataHolder().add2Holder(
DataHolder.WORDPOS,
Arrays.asList(new String[] { "structure3", "s", "role", "1",
"1", "", "" }));
myDataHolder.add2Holder(
DataHolder.SENTENCE,
Arrays.asList(new String[] { "src", "sent", "osent", "lead",
"status", "tag1", "structure1", "type" }));
myDataHolder.add2Holder(
DataHolder.SENTENCE,
Arrays.asList(new String[] { "src", "sent", "osent", "lead",
"status", "tag2", "structure2", "type" }));
myDataHolder.add2Holder(
DataHolder.SENTENCE,
Arrays.asList(new String[] { "src", "sent", "osent", "lead",
"status", "tag3", "structure2", "type" }));
myDataHolder.add2Holder(
DataHolder.SENTENCE,
Arrays.asList(new String[] { "src", "sent", "osent", "lead",
"status", "tag3", "structure3", "type" }));
Set<String> target = new HashSet<String>(Arrays.asList("tag3"));
assertEquals("getCommonStructures", target,
myTester.getCommonStructures(myDataHolder));
}
@Test
public void testNormalizeItem() {
Learner myTester = learnerFactory();
DataHolder myDataHolder = myTester.getDataHolder();
assertEquals("normalizeItem case 2", "general", myTester.normalizeItem("general"));
assertEquals("normalizeItem case 2", "fin", myTester.normalizeItem("fins"));
assertEquals("normalizeItem case 2", "squamosal and quadratojugal and bone",
myTester.normalizeItem("squamosal and quadratojugal and bones"));
}
@Test
public void testAdjectiveSubjectsHelper(){
Learner myTester = learnerFactory();
DataHolder myDataHolder = myTester.getDataHolder();
Set<String> typeModifiers = new HashSet<String>();
Set<String> target = new HashSet<String>();
target.addAll(Arrays.asList("open anterior paired".split(" ")));
myDataHolder.add2Holder(
DataHolder.SENTENCE,
Arrays.asList(new String[] { "src", "endolymphatic <N>ducts</N> <M><B>open</B></M> <B>in</B> <M>dermal</M> <N>skull</N> roof", "osent", "lead",
"status", "", "structure3", "type" }));
myDataHolder.add2Holder(
DataHolder.SENTENCE,
Arrays.asList(new String[] { "src", "restricted to <B>the</B> <M>anterior</M> <B>third</B> <B>of</B> <B>the</B> <N>jaw</N>", "osent", "lead",
"status", "", "structure3", "type" }));
myDataHolder.add2Holder(
DataHolder.SENTENCE,
Arrays.asList(new String[] { "src", "<B>series</B> <B>of</B> <M>paired</M> <B>median</B> <N>skull</N> roofng <N>bones</N> <B>that</B> meet <B>at</B> <B>the</B> <M>dorsal</M> midline <B>of</B> <B>the</B> <N>skull</N>", "osent", "lead",
"status", "", "structure3", "type" }));
myDataHolder.add2Holder(
DataHolder.SENTENCE,
Arrays.asList(new String[] { "src", "anterior dorsal fontanelle", "osent", "lead",
"status", "", "structure3", "type" }));
assertEquals("adjectiveSubjectsHelper", target,
myTester.adjectiveSubjectsPart1(myDataHolder, typeModifiers));
}
@Test
public void testAdjectiveSubjectsPart2Helper1(){
Learner myTester = learnerFactory();
DataHolder myDataHolder = myTester.getDataHolder();
Set<String> typeModifiers = new HashSet<String>();
typeModifiers.addAll(Arrays.asList("open|paired|anterior|through".split("\\|")));
assertEquals("AdjectiveSubjectsPart2Helper1", true, myTester.adjectiveSubjectsPart2Helper1("restricted to <B>the</B> <M>anterior</M> <B>third</B> <B>of</B> <B>the</B> <N>jaw</N>", typeModifiers));
assertEquals("AdjectiveSubjectsPart2Helper1", false, myTester.adjectiveSubjectsPart2Helper1("restricted to <B>the</B> <B>third</B> <B>of</B> <B>the</B> <N>jaw</N>", typeModifiers));
assertEquals("AdjectiveSubjectsPart2Helper1", true, myTester.adjectiveSubjectsPart2Helper1("<B>series</B> <B>of</B> <M>paired</M> <B>median</B> <N>skull</N> roofng <N>bones</N> <B>that</B> meet <B>at</B> <B>the</B> <M>dorsal</M> midline <B>of</B> <B>the</B> <N>skull</N>", typeModifiers));
assertEquals("AdjectiveSubjectsPart2Helper1", false, myTester.adjectiveSubjectsPart2Helper1("<B>series</B> <B>of paired median</B> <N>skull</N> roofng <N>bones</N> <B>that</B> meet <B>at</B> <B>the</B> <M>dorsal</M> midline <B>of</B> <B>the</B> <N>skull</N>", typeModifiers));
}
private Learner learnerFactory() {
Learner tester;
Configuration myConfiguration = new Configuration();
ITokenizer tokenizer = new OpenNLPTokenizer(
myConfiguration.getOpenNLPTokenizerDir());
ITokenizer sentenceDetector = new OpenNLPSentencesTokenizer(
myConfiguration.getOpenNLPSentenceDetectorDir());
WordNetPOSKnowledgeBase wordNetPOSKnowledgeBase = null;
try {
wordNetPOSKnowledgeBase = new WordNetPOSKnowledgeBase(myConfiguration.getWordNetDictDir(), false);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
LearnerUtility myLearnerUtility = new LearnerUtility(sentenceDetector,
tokenizer, wordNetPOSKnowledgeBase);
tester = new Learner(myConfiguration, tokenizer, myLearnerUtility);
return tester;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.